text
stringlengths
89
104k
code_tokens
sequence
avg_line_len
float64
7.91
980
score
float64
0
630
def _on_connected(self, future): """Invoked when connections have been established. If the client is in clustering mode, it will kick of the discovery step if needed. If not, it will select the configured database. :param future: The connection future :type future: tornado.concurrent.Future """ if future.exception(): self._connect_future.set_exception(future.exception()) return conn = future.result() LOGGER.debug('Connected to %s (%r, %r, %r)', conn.name, self._clustering, self._discovery, self._connected) if self._clustering: self._cluster[conn.name] = conn if not self._discovery: self.io_loop.add_future(self.cluster_nodes(), self._on_cluster_discovery) elif self.ready: LOGGER.debug('Cluster nodes all connected') if not self._connect_future.done(): self._connect_future.set_result(True) self._connected.set() else: def on_selected(sfuture): LOGGER.debug('Initial setup and selection processed') if sfuture.exception(): self._connect_future.set_exception(sfuture.exception()) else: self._connect_future.set_result(True) self._connected.set() select_future = concurrent.Future() self.io_loop.add_future(select_future, on_selected) self._connection = conn cmd = Command( self._build_command(['SELECT', str(conn.database)]), self._connection, None, None) cmd.connection.execute(cmd, select_future)
[ "def", "_on_connected", "(", "self", ",", "future", ")", ":", "if", "future", ".", "exception", "(", ")", ":", "self", ".", "_connect_future", ".", "set_exception", "(", "future", ".", "exception", "(", ")", ")", "return", "conn", "=", "future", ".", "result", "(", ")", "LOGGER", ".", "debug", "(", "'Connected to %s (%r, %r, %r)'", ",", "conn", ".", "name", ",", "self", ".", "_clustering", ",", "self", ".", "_discovery", ",", "self", ".", "_connected", ")", "if", "self", ".", "_clustering", ":", "self", ".", "_cluster", "[", "conn", ".", "name", "]", "=", "conn", "if", "not", "self", ".", "_discovery", ":", "self", ".", "io_loop", ".", "add_future", "(", "self", ".", "cluster_nodes", "(", ")", ",", "self", ".", "_on_cluster_discovery", ")", "elif", "self", ".", "ready", ":", "LOGGER", ".", "debug", "(", "'Cluster nodes all connected'", ")", "if", "not", "self", ".", "_connect_future", ".", "done", "(", ")", ":", "self", ".", "_connect_future", ".", "set_result", "(", "True", ")", "self", ".", "_connected", ".", "set", "(", ")", "else", ":", "def", "on_selected", "(", "sfuture", ")", ":", "LOGGER", ".", "debug", "(", "'Initial setup and selection processed'", ")", "if", "sfuture", ".", "exception", "(", ")", ":", "self", ".", "_connect_future", ".", "set_exception", "(", "sfuture", ".", "exception", "(", ")", ")", "else", ":", "self", ".", "_connect_future", ".", "set_result", "(", "True", ")", "self", ".", "_connected", ".", "set", "(", ")", "select_future", "=", "concurrent", ".", "Future", "(", ")", "self", ".", "io_loop", ".", "add_future", "(", "select_future", ",", "on_selected", ")", "self", ".", "_connection", "=", "conn", "cmd", "=", "Command", "(", "self", ".", "_build_command", "(", "[", "'SELECT'", ",", "str", "(", "conn", ".", "database", ")", "]", ")", ",", "self", ".", "_connection", ",", "None", ",", "None", ")", "cmd", ".", "connection", ".", "execute", "(", "cmd", ",", "select_future", ")" ]
41.116279
17.44186
def transaction(self, connection=None, **kwargs): """ a simple context manager useful for when you want to wrap a bunch of db calls in a transaction http://docs.python.org/2/library/contextlib.html http://docs.python.org/release/2.5/whatsnew/pep-343.html example -- with self.transaction() # do a bunch of calls # those db calls will be committed by this line """ with self.connection(connection) as connection: name = connection.transaction_name() connection.transaction_start(name) try: yield connection connection.transaction_stop() except Exception as e: connection.transaction_fail(name) self.raise_error(e)
[ "def", "transaction", "(", "self", ",", "connection", "=", "None", ",", "*", "*", "kwargs", ")", ":", "with", "self", ".", "connection", "(", "connection", ")", "as", "connection", ":", "name", "=", "connection", ".", "transaction_name", "(", ")", "connection", ".", "transaction_start", "(", "name", ")", "try", ":", "yield", "connection", "connection", ".", "transaction_stop", "(", ")", "except", "Exception", "as", "e", ":", "connection", ".", "transaction_fail", "(", "name", ")", "self", ".", "raise_error", "(", "e", ")" ]
38.190476
15.52381
def redo_expansion_state(self, ignore_not_existing_rows=False): """ Considers the tree to be collapsed and expand into all tree item with the flag set True """ def set_expansion_state(state_path): state_row_iter = self.state_row_iter_dict_by_state_path[state_path] if state_row_iter: # may elements are missing afterwards state_row_path = self.tree_store.get_path(state_row_iter) self.view.expand_to_path(state_row_path) if self.__my_selected_sm_id is not None and self.__my_selected_sm_id in self.__expansion_state: expansion_state = self.__expansion_state[self.__my_selected_sm_id] try: for state_path, state_row_expanded in expansion_state.items(): if state_path in self.state_row_iter_dict_by_state_path: if state_row_expanded: set_expansion_state(state_path) else: if not ignore_not_existing_rows and self._selected_sm_model and \ self._selected_sm_model.state_machine.get_state_by_path(state_path, as_check=True): state = self._selected_sm_model.state_machine.get_state_by_path(state_path) if isinstance(state, LibraryState) or state.is_root_state_of_library or \ state.get_next_upper_library_root_state(): continue logger.error("State not in StateMachineTree but in StateMachine, {0}.".format(state_path)) except (TypeError, KeyError): logger.error("Expansion state of state machine {0} could not be restored" "".format(self.__my_selected_sm_id))
[ "def", "redo_expansion_state", "(", "self", ",", "ignore_not_existing_rows", "=", "False", ")", ":", "def", "set_expansion_state", "(", "state_path", ")", ":", "state_row_iter", "=", "self", ".", "state_row_iter_dict_by_state_path", "[", "state_path", "]", "if", "state_row_iter", ":", "# may elements are missing afterwards", "state_row_path", "=", "self", ".", "tree_store", ".", "get_path", "(", "state_row_iter", ")", "self", ".", "view", ".", "expand_to_path", "(", "state_row_path", ")", "if", "self", ".", "__my_selected_sm_id", "is", "not", "None", "and", "self", ".", "__my_selected_sm_id", "in", "self", ".", "__expansion_state", ":", "expansion_state", "=", "self", ".", "__expansion_state", "[", "self", ".", "__my_selected_sm_id", "]", "try", ":", "for", "state_path", ",", "state_row_expanded", "in", "expansion_state", ".", "items", "(", ")", ":", "if", "state_path", "in", "self", ".", "state_row_iter_dict_by_state_path", ":", "if", "state_row_expanded", ":", "set_expansion_state", "(", "state_path", ")", "else", ":", "if", "not", "ignore_not_existing_rows", "and", "self", ".", "_selected_sm_model", "and", "self", ".", "_selected_sm_model", ".", "state_machine", ".", "get_state_by_path", "(", "state_path", ",", "as_check", "=", "True", ")", ":", "state", "=", "self", ".", "_selected_sm_model", ".", "state_machine", ".", "get_state_by_path", "(", "state_path", ")", "if", "isinstance", "(", "state", ",", "LibraryState", ")", "or", "state", ".", "is_root_state_of_library", "or", "state", ".", "get_next_upper_library_root_state", "(", ")", ":", "continue", "logger", ".", "error", "(", "\"State not in StateMachineTree but in StateMachine, {0}.\"", ".", "format", "(", "state_path", ")", ")", "except", "(", "TypeError", ",", "KeyError", ")", ":", "logger", ".", "error", "(", "\"Expansion state of state machine {0} could not be restored\"", "\"\"", ".", "format", "(", "self", ".", "__my_selected_sm_id", ")", ")" ]
64.535714
33.642857
def initialize_app(flask_app, args): """Initialize the App.""" # Setup gourde with the args. gourde.setup(args) # Register a custom health check. gourde.is_healthy = is_healthy # Add an optional API initialize_api(flask_app)
[ "def", "initialize_app", "(", "flask_app", ",", "args", ")", ":", "# Setup gourde with the args.", "gourde", ".", "setup", "(", "args", ")", "# Register a custom health check.", "gourde", ".", "is_healthy", "=", "is_healthy", "# Add an optional API", "initialize_api", "(", "flask_app", ")" ]
24.5
14.4
def sum_distances(self, indices, distance_matrix): """Calculate combinatorial distance between a select group of trajectories, indicated by indices Arguments --------- indices : tuple distance_matrix : numpy.ndarray (M,M) Returns ------- numpy.ndarray Notes ----- This function can perhaps be quickened by calculating the sum of the distances. The calculated distances, as they are right now, are only used in a relative way. Purely summing distances would lead to the same result, at a perhaps quicker rate. """ combs_tup = np.array(tuple(combinations(indices, 2))) # Put indices from tuples into two-dimensional array. combs = np.array([[i[0] for i in combs_tup], [i[1] for i in combs_tup]]) # Calculate distance (vectorized) dist = np.sqrt( np.sum(np.square(distance_matrix[combs[0], combs[1]]), axis=0)) return dist
[ "def", "sum_distances", "(", "self", ",", "indices", ",", "distance_matrix", ")", ":", "combs_tup", "=", "np", ".", "array", "(", "tuple", "(", "combinations", "(", "indices", ",", "2", ")", ")", ")", "# Put indices from tuples into two-dimensional array.", "combs", "=", "np", ".", "array", "(", "[", "[", "i", "[", "0", "]", "for", "i", "in", "combs_tup", "]", ",", "[", "i", "[", "1", "]", "for", "i", "in", "combs_tup", "]", "]", ")", "# Calculate distance (vectorized)", "dist", "=", "np", ".", "sqrt", "(", "np", ".", "sum", "(", "np", ".", "square", "(", "distance_matrix", "[", "combs", "[", "0", "]", ",", "combs", "[", "1", "]", "]", ")", ",", "axis", "=", "0", ")", ")", "return", "dist" ]
32.548387
22.483871
def calc_score(self, att_query, att_keys): """ Calculate Bahdanau score :param att_query: b x t_q x n :param att_keys: b x t_k x n returns: b x t_q x t_k scores """ b, t_k, n = att_keys.size() t_q = att_query.size(1) att_query = att_query.unsqueeze(2).expand(b, t_q, t_k, n) att_keys = att_keys.unsqueeze(1).expand(b, t_q, t_k, n) sum_qk = att_query + att_keys if self.normalize: sum_qk = sum_qk + self.normalize_bias linear_att = self.linear_att / self.linear_att.norm() linear_att = linear_att * self.normalize_scalar else: linear_att = self.linear_att out = torch.tanh(sum_qk).matmul(linear_att) return out
[ "def", "calc_score", "(", "self", ",", "att_query", ",", "att_keys", ")", ":", "b", ",", "t_k", ",", "n", "=", "att_keys", ".", "size", "(", ")", "t_q", "=", "att_query", ".", "size", "(", "1", ")", "att_query", "=", "att_query", ".", "unsqueeze", "(", "2", ")", ".", "expand", "(", "b", ",", "t_q", ",", "t_k", ",", "n", ")", "att_keys", "=", "att_keys", ".", "unsqueeze", "(", "1", ")", ".", "expand", "(", "b", ",", "t_q", ",", "t_k", ",", "n", ")", "sum_qk", "=", "att_query", "+", "att_keys", "if", "self", ".", "normalize", ":", "sum_qk", "=", "sum_qk", "+", "self", ".", "normalize_bias", "linear_att", "=", "self", ".", "linear_att", "/", "self", ".", "linear_att", ".", "norm", "(", ")", "linear_att", "=", "linear_att", "*", "self", ".", "normalize_scalar", "else", ":", "linear_att", "=", "self", ".", "linear_att", "out", "=", "torch", ".", "tanh", "(", "sum_qk", ")", ".", "matmul", "(", "linear_att", ")", "return", "out" ]
29.153846
17.384615
def rel_path(name, available_tools): """ Extracts relative path to a tool (from the main cloned directory) out of available_tools based on the name it is given """ if name == '@' or name == '.' or name == '/': name = '' multi_tool = '@' in name for tool in available_tools: t_name = tool[0].lower() if multi_tool: if name.split('@')[-1] == t_name.split('@')[-1]: return t_name, t_name else: if name == t_name.split('/')[-1]: return t_name, tool[0] elif name == '' and t_name.split('@')[-1] == 'unspecified': return '', '' return None, None
[ "def", "rel_path", "(", "name", ",", "available_tools", ")", ":", "if", "name", "==", "'@'", "or", "name", "==", "'.'", "or", "name", "==", "'/'", ":", "name", "=", "''", "multi_tool", "=", "'@'", "in", "name", "for", "tool", "in", "available_tools", ":", "t_name", "=", "tool", "[", "0", "]", ".", "lower", "(", ")", "if", "multi_tool", ":", "if", "name", ".", "split", "(", "'@'", ")", "[", "-", "1", "]", "==", "t_name", ".", "split", "(", "'@'", ")", "[", "-", "1", "]", ":", "return", "t_name", ",", "t_name", "else", ":", "if", "name", "==", "t_name", ".", "split", "(", "'/'", ")", "[", "-", "1", "]", ":", "return", "t_name", ",", "tool", "[", "0", "]", "elif", "name", "==", "''", "and", "t_name", ".", "split", "(", "'@'", ")", "[", "-", "1", "]", "==", "'unspecified'", ":", "return", "''", ",", "''", "return", "None", ",", "None" ]
39
12.368421
def cumulative_gain_curve(y_true, y_score, pos_label=None): """This function generates the points necessary to plot the Cumulative Gain Note: This implementation is restricted to the binary classification task. Args: y_true (array-like, shape (n_samples)): True labels of the data. y_score (array-like, shape (n_samples)): Target scores, can either be probability estimates of the positive class, confidence values, or non-thresholded measure of decisions (as returned by decision_function on some classifiers). pos_label (int or str, default=None): Label considered as positive and others are considered negative Returns: percentages (numpy.ndarray): An array containing the X-axis values for plotting the Cumulative Gains chart. gains (numpy.ndarray): An array containing the Y-axis values for one curve of the Cumulative Gains chart. Raises: ValueError: If `y_true` is not composed of 2 classes. The Cumulative Gain Chart is only relevant in binary classification. """ y_true, y_score = np.asarray(y_true), np.asarray(y_score) # ensure binary classification if pos_label is not specified classes = np.unique(y_true) if (pos_label is None and not (np.array_equal(classes, [0, 1]) or np.array_equal(classes, [-1, 1]) or np.array_equal(classes, [0]) or np.array_equal(classes, [-1]) or np.array_equal(classes, [1]))): raise ValueError("Data is not binary and pos_label is not specified") elif pos_label is None: pos_label = 1. # make y_true a boolean vector y_true = (y_true == pos_label) sorted_indices = np.argsort(y_score)[::-1] y_true = y_true[sorted_indices] gains = np.cumsum(y_true) percentages = np.arange(start=1, stop=len(y_true) + 1) gains = gains / float(np.sum(y_true)) percentages = percentages / float(len(y_true)) gains = np.insert(gains, 0, [0]) percentages = np.insert(percentages, 0, [0]) return percentages, gains
[ "def", "cumulative_gain_curve", "(", "y_true", ",", "y_score", ",", "pos_label", "=", "None", ")", ":", "y_true", ",", "y_score", "=", "np", ".", "asarray", "(", "y_true", ")", ",", "np", ".", "asarray", "(", "y_score", ")", "# ensure binary classification if pos_label is not specified", "classes", "=", "np", ".", "unique", "(", "y_true", ")", "if", "(", "pos_label", "is", "None", "and", "not", "(", "np", ".", "array_equal", "(", "classes", ",", "[", "0", ",", "1", "]", ")", "or", "np", ".", "array_equal", "(", "classes", ",", "[", "-", "1", ",", "1", "]", ")", "or", "np", ".", "array_equal", "(", "classes", ",", "[", "0", "]", ")", "or", "np", ".", "array_equal", "(", "classes", ",", "[", "-", "1", "]", ")", "or", "np", ".", "array_equal", "(", "classes", ",", "[", "1", "]", ")", ")", ")", ":", "raise", "ValueError", "(", "\"Data is not binary and pos_label is not specified\"", ")", "elif", "pos_label", "is", "None", ":", "pos_label", "=", "1.", "# make y_true a boolean vector", "y_true", "=", "(", "y_true", "==", "pos_label", ")", "sorted_indices", "=", "np", ".", "argsort", "(", "y_score", ")", "[", ":", ":", "-", "1", "]", "y_true", "=", "y_true", "[", "sorted_indices", "]", "gains", "=", "np", ".", "cumsum", "(", "y_true", ")", "percentages", "=", "np", ".", "arange", "(", "start", "=", "1", ",", "stop", "=", "len", "(", "y_true", ")", "+", "1", ")", "gains", "=", "gains", "/", "float", "(", "np", ".", "sum", "(", "y_true", ")", ")", "percentages", "=", "percentages", "/", "float", "(", "len", "(", "y_true", ")", ")", "gains", "=", "np", ".", "insert", "(", "gains", ",", "0", ",", "[", "0", "]", ")", "percentages", "=", "np", ".", "insert", "(", "percentages", ",", "0", ",", "[", "0", "]", ")", "return", "percentages", ",", "gains" ]
36.614035
22.54386
def conv2d(x_input, w_matrix): """conv2d returns a 2d convolution layer with full stride.""" return tf.nn.conv2d(x_input, w_matrix, strides=[1, 1, 1, 1], padding='SAME')
[ "def", "conv2d", "(", "x_input", ",", "w_matrix", ")", ":", "return", "tf", ".", "nn", ".", "conv2d", "(", "x_input", ",", "w_matrix", ",", "strides", "=", "[", "1", ",", "1", ",", "1", ",", "1", "]", ",", "padding", "=", "'SAME'", ")" ]
58.333333
16.666667
def RegresarAOrigenCTGRechazado(self, numero_carta_de_porte, numero_ctg, km_a_recorrer=None, **kwargs): "Al consultar los CTGs rechazados se puede Regresar a Origen" ret = self.client.regresarAOrigenCTGRechazado(request=dict( auth={ 'token': self.Token, 'sign': self.Sign, 'cuitRepresentado': self.Cuit, }, datosRegresarAOrigenCTGRechazado=dict( cartaPorte=numero_carta_de_porte, ctg=numero_ctg, kmARecorrer=km_a_recorrer, )))['response'] self.__analizar_errores(ret) datos = ret.get('datosResponse') if datos: self.CartaPorte = str(datos['cartaPorte']) self.NumeroCTG = str(datos['ctg']) self.FechaHora = str(datos['fechaHora']) self.CodigoTransaccion = str(datos['codigoOperacion']) self.Observaciones = "" return self.CodigoTransaccion
[ "def", "RegresarAOrigenCTGRechazado", "(", "self", ",", "numero_carta_de_porte", ",", "numero_ctg", ",", "km_a_recorrer", "=", "None", ",", "*", "*", "kwargs", ")", ":", "ret", "=", "self", ".", "client", ".", "regresarAOrigenCTGRechazado", "(", "request", "=", "dict", "(", "auth", "=", "{", "'token'", ":", "self", ".", "Token", ",", "'sign'", ":", "self", ".", "Sign", ",", "'cuitRepresentado'", ":", "self", ".", "Cuit", ",", "}", ",", "datosRegresarAOrigenCTGRechazado", "=", "dict", "(", "cartaPorte", "=", "numero_carta_de_porte", ",", "ctg", "=", "numero_ctg", ",", "kmARecorrer", "=", "km_a_recorrer", ",", ")", ")", ")", "[", "'response'", "]", "self", ".", "__analizar_errores", "(", "ret", ")", "datos", "=", "ret", ".", "get", "(", "'datosResponse'", ")", "if", "datos", ":", "self", ".", "CartaPorte", "=", "str", "(", "datos", "[", "'cartaPorte'", "]", ")", "self", ".", "NumeroCTG", "=", "str", "(", "datos", "[", "'ctg'", "]", ")", "self", ".", "FechaHora", "=", "str", "(", "datos", "[", "'fechaHora'", "]", ")", "self", ".", "CodigoTransaccion", "=", "str", "(", "datos", "[", "'codigoOperacion'", "]", ")", "self", ".", "Observaciones", "=", "\"\"", "return", "self", ".", "CodigoTransaccion" ]
51.095238
15.571429
def reshape_like_all_dims(a, b): """Reshapes a to match the shape of b.""" ret = tf.reshape(a, tf.shape(b)) if not tf.executing_eagerly(): ret.set_shape(b.get_shape()) return ret
[ "def", "reshape_like_all_dims", "(", "a", ",", "b", ")", ":", "ret", "=", "tf", ".", "reshape", "(", "a", ",", "tf", ".", "shape", "(", "b", ")", ")", "if", "not", "tf", ".", "executing_eagerly", "(", ")", ":", "ret", ".", "set_shape", "(", "b", ".", "get_shape", "(", ")", ")", "return", "ret" ]
30.833333
9.666667
def count_of_user(user_id): ''' Get the cound of views. ''' return TabCollect.select( TabCollect, TabPost.uid.alias('post_uid'), TabPost.title.alias('post_title'), TabPost.view_count.alias('post_view_count') ).where( TabCollect.user_id == user_id ).join( TabPost, on=(TabCollect.post_id == TabPost.uid) ).count()
[ "def", "count_of_user", "(", "user_id", ")", ":", "return", "TabCollect", ".", "select", "(", "TabCollect", ",", "TabPost", ".", "uid", ".", "alias", "(", "'post_uid'", ")", ",", "TabPost", ".", "title", ".", "alias", "(", "'post_title'", ")", ",", "TabPost", ".", "view_count", ".", "alias", "(", "'post_view_count'", ")", ")", ".", "where", "(", "TabCollect", ".", "user_id", "==", "user_id", ")", ".", "join", "(", "TabPost", ",", "on", "=", "(", "TabCollect", ".", "post_id", "==", "TabPost", ".", "uid", ")", ")", ".", "count", "(", ")" ]
32
16.461538
def pty_wrapper_main(): """ Main function of the pty wrapper script """ # make sure we can import _pty even if pyqode is not installed (this is the case in HackEdit where pyqode has # been vendored). sys.path.insert(0, os.path.dirname(__file__)) import _pty # fixme: find a way to use a pty and keep stdout and stderr as separate channels _pty.spawn(sys.argv[1:])
[ "def", "pty_wrapper_main", "(", ")", ":", "# make sure we can import _pty even if pyqode is not installed (this is the case in HackEdit where pyqode has", "# been vendored).", "sys", ".", "path", ".", "insert", "(", "0", ",", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ")", "import", "_pty", "# fixme: find a way to use a pty and keep stdout and stderr as separate channels", "_pty", ".", "spawn", "(", "sys", ".", "argv", "[", "1", ":", "]", ")" ]
35.454545
22
def mutapply(self, function, fieldname): """ Applies `function` in-place to the field name specified. In other words, `mutapply` overwrites column `fieldname` ith the results of applying `function` to each element of that column. """ self[fieldname] = self.apply(function, fieldname)
[ "def", "mutapply", "(", "self", ",", "function", ",", "fieldname", ")", ":", "self", "[", "fieldname", "]", "=", "self", ".", "apply", "(", "function", ",", "fieldname", ")" ]
40.625
17.875
def cancel(self): """ Cancel current statement """ conn = self._assert_open() conn._try_activate_cursor(self) self._session.cancel_if_pending()
[ "def", "cancel", "(", "self", ")", ":", "conn", "=", "self", ".", "_assert_open", "(", ")", "conn", ".", "_try_activate_cursor", "(", "self", ")", "self", ".", "_session", ".", "cancel_if_pending", "(", ")" ]
29.666667
5.166667
def render_as_json(func): """ Decorator to render as JSON :param func: :return: """ if inspect.isclass(func): setattr(func, "_renderer", json_renderer) return func else: @functools.wraps(func) def decorated_view(*args, **kwargs): data = func(*args, **kwargs) return _build_response(data, jsonify) return decorated_view
[ "def", "render_as_json", "(", "func", ")", ":", "if", "inspect", ".", "isclass", "(", "func", ")", ":", "setattr", "(", "func", ",", "\"_renderer\"", ",", "json_renderer", ")", "return", "func", "else", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "decorated_view", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "data", "=", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "_build_response", "(", "data", ",", "jsonify", ")", "return", "decorated_view" ]
26.4
12.133333
def build_status(self, _args): """Print the status of the specified build. """ print('{Style.BRIGHT}Bootstraps whose core components are probably ' 'already built:{Style.RESET_ALL}'.format(Style=Out_Style)) bootstrap_dir = join(self.ctx.build_dir, 'bootstrap_builds') if exists(bootstrap_dir): for filen in os.listdir(bootstrap_dir): print(' {Fore.GREEN}{Style.BRIGHT}{filen}{Style.RESET_ALL}' .format(filen=filen, Fore=Out_Fore, Style=Out_Style)) print('{Style.BRIGHT}Recipes that are probably already built:' '{Style.RESET_ALL}'.format(Style=Out_Style)) other_builds_dir = join(self.ctx.build_dir, 'other_builds') if exists(other_builds_dir): for filen in sorted(os.listdir(other_builds_dir)): name = filen.split('-')[0] dependencies = filen.split('-')[1:] recipe_str = (' {Style.BRIGHT}{Fore.GREEN}{name}' '{Style.RESET_ALL}'.format( Style=Out_Style, name=name, Fore=Out_Fore)) if dependencies: recipe_str += ( ' ({Fore.BLUE}with ' + ', '.join(dependencies) + '{Fore.RESET})').format(Fore=Out_Fore) recipe_str += '{Style.RESET_ALL}'.format(Style=Out_Style) print(recipe_str)
[ "def", "build_status", "(", "self", ",", "_args", ")", ":", "print", "(", "'{Style.BRIGHT}Bootstraps whose core components are probably '", "'already built:{Style.RESET_ALL}'", ".", "format", "(", "Style", "=", "Out_Style", ")", ")", "bootstrap_dir", "=", "join", "(", "self", ".", "ctx", ".", "build_dir", ",", "'bootstrap_builds'", ")", "if", "exists", "(", "bootstrap_dir", ")", ":", "for", "filen", "in", "os", ".", "listdir", "(", "bootstrap_dir", ")", ":", "print", "(", "' {Fore.GREEN}{Style.BRIGHT}{filen}{Style.RESET_ALL}'", ".", "format", "(", "filen", "=", "filen", ",", "Fore", "=", "Out_Fore", ",", "Style", "=", "Out_Style", ")", ")", "print", "(", "'{Style.BRIGHT}Recipes that are probably already built:'", "'{Style.RESET_ALL}'", ".", "format", "(", "Style", "=", "Out_Style", ")", ")", "other_builds_dir", "=", "join", "(", "self", ".", "ctx", ".", "build_dir", ",", "'other_builds'", ")", "if", "exists", "(", "other_builds_dir", ")", ":", "for", "filen", "in", "sorted", "(", "os", ".", "listdir", "(", "other_builds_dir", ")", ")", ":", "name", "=", "filen", ".", "split", "(", "'-'", ")", "[", "0", "]", "dependencies", "=", "filen", ".", "split", "(", "'-'", ")", "[", "1", ":", "]", "recipe_str", "=", "(", "' {Style.BRIGHT}{Fore.GREEN}{name}'", "'{Style.RESET_ALL}'", ".", "format", "(", "Style", "=", "Out_Style", ",", "name", "=", "name", ",", "Fore", "=", "Out_Fore", ")", ")", "if", "dependencies", ":", "recipe_str", "+=", "(", "' ({Fore.BLUE}with '", "+", "', '", ".", "join", "(", "dependencies", ")", "+", "'{Fore.RESET})'", ")", ".", "format", "(", "Fore", "=", "Out_Fore", ")", "recipe_str", "+=", "'{Style.RESET_ALL}'", ".", "format", "(", "Style", "=", "Out_Style", ")", "print", "(", "recipe_str", ")" ]
53.074074
21.481481
def porosimetry(im, sizes=25, inlets=None, access_limited=True, mode='hybrid'): r""" Performs a porosimetry simulution on the image Parameters ---------- im : ND-array An ND image of the porous material containing True values in the pore space. sizes : array_like or scalar The sizes to invade. If a list of values of provided they are used directly. If a scalar is provided then that number of points spanning the min and max of the distance transform are used. inlets : ND-array, boolean A boolean mask with True values indicating where the invasion enters the image. By default all faces are considered inlets, akin to a mercury porosimetry experiment. Users can also apply solid boundaries to their image externally before passing it in, allowing for complex inlets like circular openings, etc. This argument is only used if ``access_limited`` is ``True``. access_limited : Boolean This flag indicates if the intrusion should only occur from the surfaces (``access_limited`` is True, which is the default), or if the invading phase should be allowed to appear in the core of the image. The former simulates experimental tools like mercury intrusion porosimetry, while the latter is useful for comparison to gauge the extent of shielding effects in the sample. mode : string Controls with method is used to compute the result. Options are: 'hybrid' - (default) Performs a distance tranform of the void space, thresholds to find voxels larger than ``sizes[i]``, trims the resulting mask if ``access_limitations`` is ``True``, then dilates it using the efficient fft-method to obtain the non-wetting fluid configuration. 'dt' - Same as 'hybrid', except uses a second distance transform, relative to the thresholded mask, to find the invading fluid configuration. The choice of 'dt' or 'hybrid' depends on speed, which is system and installation specific. 'mio' - Using a single morphological image opening step to obtain the invading fluid confirguration directly, *then* trims if ``access_limitations`` is ``True``. This method is not ideal and is included mostly for comparison purposes. The morphological operations are done using fft-based method implementations. Returns ------- image : ND-array A copy of ``im`` with voxel values indicating the sphere radius at which it becomes accessible from the inlets. This image can be used to find invading fluid configurations as a function of applied capillary pressure by applying a boolean comparison: ``inv_phase = im > r`` where ``r`` is the radius (in voxels) of the invading sphere. Of course, ``r`` can be converted to capillary pressure using your favorite model. Notes ----- There are many ways to perform this filter, and PoreSpy offer 3, which users can choose between via the ``mode`` argument. These methods all work in a similar way by finding which foreground voxels can accomodate a sphere of a given radius, then repeating for smaller radii. See Also -------- fftmorphology local_thickness """ if im.ndim != im.squeeze().ndim: warnings.warn('Input image conains a singleton axis:' + str(im.shape) + ' Reduce dimensionality with np.squeeze(im) to avoid' + ' unexpected behavior.') dt = spim.distance_transform_edt(im > 0) if inlets is None: inlets = get_border(im.shape, mode='faces') if isinstance(sizes, int): sizes = sp.logspace(start=sp.log10(sp.amax(dt)), stop=0, num=sizes) else: sizes = sp.unique(sizes)[-1::-1] if im.ndim == 2: strel = ps_disk else: strel = ps_ball if mode == 'mio': pw = int(sp.floor(dt.max())) impad = sp.pad(im, mode='symmetric', pad_width=pw) inletspad = sp.pad(inlets, mode='symmetric', pad_width=pw) inlets = sp.where(inletspad) # sizes = sp.unique(sp.around(sizes, decimals=0).astype(int))[-1::-1] imresults = sp.zeros(sp.shape(impad)) for r in tqdm(sizes): imtemp = fftmorphology(impad, strel(r), mode='erosion') if access_limited: imtemp = trim_disconnected_blobs(imtemp, inlets) imtemp = fftmorphology(imtemp, strel(r), mode='dilation') if sp.any(imtemp): imresults[(imresults == 0)*imtemp] = r imresults = extract_subsection(imresults, shape=im.shape) elif mode == 'dt': inlets = sp.where(inlets) imresults = sp.zeros(sp.shape(im)) for r in tqdm(sizes): imtemp = dt >= r if access_limited: imtemp = trim_disconnected_blobs(imtemp, inlets) if sp.any(imtemp): imtemp = spim.distance_transform_edt(~imtemp) < r imresults[(imresults == 0)*imtemp] = r elif mode == 'hybrid': inlets = sp.where(inlets) imresults = sp.zeros(sp.shape(im)) for r in tqdm(sizes): imtemp = dt >= r if access_limited: imtemp = trim_disconnected_blobs(imtemp, inlets) if sp.any(imtemp): imtemp = fftconvolve(imtemp, strel(r), mode='same') > 0.0001 imresults[(imresults == 0)*imtemp] = r else: raise Exception('Unreckognized mode ' + mode) return imresults
[ "def", "porosimetry", "(", "im", ",", "sizes", "=", "25", ",", "inlets", "=", "None", ",", "access_limited", "=", "True", ",", "mode", "=", "'hybrid'", ")", ":", "if", "im", ".", "ndim", "!=", "im", ".", "squeeze", "(", ")", ".", "ndim", ":", "warnings", ".", "warn", "(", "'Input image conains a singleton axis:'", "+", "str", "(", "im", ".", "shape", ")", "+", "' Reduce dimensionality with np.squeeze(im) to avoid'", "+", "' unexpected behavior.'", ")", "dt", "=", "spim", ".", "distance_transform_edt", "(", "im", ">", "0", ")", "if", "inlets", "is", "None", ":", "inlets", "=", "get_border", "(", "im", ".", "shape", ",", "mode", "=", "'faces'", ")", "if", "isinstance", "(", "sizes", ",", "int", ")", ":", "sizes", "=", "sp", ".", "logspace", "(", "start", "=", "sp", ".", "log10", "(", "sp", ".", "amax", "(", "dt", ")", ")", ",", "stop", "=", "0", ",", "num", "=", "sizes", ")", "else", ":", "sizes", "=", "sp", ".", "unique", "(", "sizes", ")", "[", "-", "1", ":", ":", "-", "1", "]", "if", "im", ".", "ndim", "==", "2", ":", "strel", "=", "ps_disk", "else", ":", "strel", "=", "ps_ball", "if", "mode", "==", "'mio'", ":", "pw", "=", "int", "(", "sp", ".", "floor", "(", "dt", ".", "max", "(", ")", ")", ")", "impad", "=", "sp", ".", "pad", "(", "im", ",", "mode", "=", "'symmetric'", ",", "pad_width", "=", "pw", ")", "inletspad", "=", "sp", ".", "pad", "(", "inlets", ",", "mode", "=", "'symmetric'", ",", "pad_width", "=", "pw", ")", "inlets", "=", "sp", ".", "where", "(", "inletspad", ")", "# sizes = sp.unique(sp.around(sizes, decimals=0).astype(int))[-1::-1]", "imresults", "=", "sp", ".", "zeros", "(", "sp", ".", "shape", "(", "impad", ")", ")", "for", "r", "in", "tqdm", "(", "sizes", ")", ":", "imtemp", "=", "fftmorphology", "(", "impad", ",", "strel", "(", "r", ")", ",", "mode", "=", "'erosion'", ")", "if", "access_limited", ":", "imtemp", "=", "trim_disconnected_blobs", "(", "imtemp", ",", "inlets", ")", "imtemp", "=", "fftmorphology", "(", "imtemp", ",", "strel", "(", "r", ")", ",", "mode", "=", "'dilation'", ")", "if", "sp", ".", "any", "(", "imtemp", ")", ":", "imresults", "[", "(", "imresults", "==", "0", ")", "*", "imtemp", "]", "=", "r", "imresults", "=", "extract_subsection", "(", "imresults", ",", "shape", "=", "im", ".", "shape", ")", "elif", "mode", "==", "'dt'", ":", "inlets", "=", "sp", ".", "where", "(", "inlets", ")", "imresults", "=", "sp", ".", "zeros", "(", "sp", ".", "shape", "(", "im", ")", ")", "for", "r", "in", "tqdm", "(", "sizes", ")", ":", "imtemp", "=", "dt", ">=", "r", "if", "access_limited", ":", "imtemp", "=", "trim_disconnected_blobs", "(", "imtemp", ",", "inlets", ")", "if", "sp", ".", "any", "(", "imtemp", ")", ":", "imtemp", "=", "spim", ".", "distance_transform_edt", "(", "~", "imtemp", ")", "<", "r", "imresults", "[", "(", "imresults", "==", "0", ")", "*", "imtemp", "]", "=", "r", "elif", "mode", "==", "'hybrid'", ":", "inlets", "=", "sp", ".", "where", "(", "inlets", ")", "imresults", "=", "sp", ".", "zeros", "(", "sp", ".", "shape", "(", "im", ")", ")", "for", "r", "in", "tqdm", "(", "sizes", ")", ":", "imtemp", "=", "dt", ">=", "r", "if", "access_limited", ":", "imtemp", "=", "trim_disconnected_blobs", "(", "imtemp", ",", "inlets", ")", "if", "sp", ".", "any", "(", "imtemp", ")", ":", "imtemp", "=", "fftconvolve", "(", "imtemp", ",", "strel", "(", "r", ")", ",", "mode", "=", "'same'", ")", ">", "0.0001", "imresults", "[", "(", "imresults", "==", "0", ")", "*", "imtemp", "]", "=", "r", "else", ":", "raise", "Exception", "(", "'Unreckognized mode '", "+", "mode", ")", "return", "imresults" ]
41.676692
24.225564
def handleRestartRequest(self, req: Request) -> None: """ Handles transaction of type POOL_RESTART Can schedule or cancel restart to a newer version at specified time :param req: """ txn = req.operation if txn[TXN_TYPE] != POOL_RESTART: return action = txn[ACTION] if action == START: when = dateutil.parser.parse(txn[DATETIME]) \ if DATETIME in txn.keys() and txn[DATETIME] not in ["0", "", None] \ else None fail_timeout = txn.get(TIMEOUT, self.defaultActionTimeout) self.requestRestart(when, fail_timeout) return if action == CANCEL: if self.scheduledAction: self._cancelScheduledRestart() logger.info("Node '{}' cancels restart".format( self.nodeName)) return logger.error( "Got {} transaction with unsupported action {}".format( POOL_RESTART, action))
[ "def", "handleRestartRequest", "(", "self", ",", "req", ":", "Request", ")", "->", "None", ":", "txn", "=", "req", ".", "operation", "if", "txn", "[", "TXN_TYPE", "]", "!=", "POOL_RESTART", ":", "return", "action", "=", "txn", "[", "ACTION", "]", "if", "action", "==", "START", ":", "when", "=", "dateutil", ".", "parser", ".", "parse", "(", "txn", "[", "DATETIME", "]", ")", "if", "DATETIME", "in", "txn", ".", "keys", "(", ")", "and", "txn", "[", "DATETIME", "]", "not", "in", "[", "\"0\"", ",", "\"\"", ",", "None", "]", "else", "None", "fail_timeout", "=", "txn", ".", "get", "(", "TIMEOUT", ",", "self", ".", "defaultActionTimeout", ")", "self", ".", "requestRestart", "(", "when", ",", "fail_timeout", ")", "return", "if", "action", "==", "CANCEL", ":", "if", "self", ".", "scheduledAction", ":", "self", ".", "_cancelScheduledRestart", "(", ")", "logger", ".", "info", "(", "\"Node '{}' cancels restart\"", ".", "format", "(", "self", ".", "nodeName", ")", ")", "return", "logger", ".", "error", "(", "\"Got {} transaction with unsupported action {}\"", ".", "format", "(", "POOL_RESTART", ",", "action", ")", ")" ]
32.967742
17.354839
def set_offload(devname, **kwargs): ''' Changes the offload parameters and other features of the specified network device CLI Example: .. code-block:: bash salt '*' ethtool.set_offload <devname> tcp_segmentation_offload=on ''' for param, value in kwargs.items(): if param == 'tcp_segmentation_offload': value = value == "on" and 1 or 0 try: ethtool.set_tso(devname, value) except IOError: return 'Not supported' return show_offload(devname)
[ "def", "set_offload", "(", "devname", ",", "*", "*", "kwargs", ")", ":", "for", "param", ",", "value", "in", "kwargs", ".", "items", "(", ")", ":", "if", "param", "==", "'tcp_segmentation_offload'", ":", "value", "=", "value", "==", "\"on\"", "and", "1", "or", "0", "try", ":", "ethtool", ".", "set_tso", "(", "devname", ",", "value", ")", "except", "IOError", ":", "return", "'Not supported'", "return", "show_offload", "(", "devname", ")" ]
26.9
22.8
def __expand_cluster(self, index_point): """! @brief Expands cluster from specified point in the input data space. @param[in] index_point (list): Index of a point from the data. @return (list) Return tuple of list of indexes that belong to the same cluster and list of points that are marked as noise: (cluster, noise), or None if nothing has been expanded. """ cluster = None self.__visited[index_point] = True neighbors = self.__neighbor_searcher(index_point) if len(neighbors) >= self.__neighbors: cluster = [index_point] self.__belong[index_point] = True for i in neighbors: if self.__visited[i] is False: self.__visited[i] = True next_neighbors = self.__neighbor_searcher(i) if len(next_neighbors) >= self.__neighbors: neighbors += [k for k in next_neighbors if ( (k in neighbors) == False) and k != index_point] if self.__belong[i] is False: cluster.append(i) self.__belong[i] = True return cluster
[ "def", "__expand_cluster", "(", "self", ",", "index_point", ")", ":", "cluster", "=", "None", "self", ".", "__visited", "[", "index_point", "]", "=", "True", "neighbors", "=", "self", ".", "__neighbor_searcher", "(", "index_point", ")", "if", "len", "(", "neighbors", ")", ">=", "self", ".", "__neighbors", ":", "cluster", "=", "[", "index_point", "]", "self", ".", "__belong", "[", "index_point", "]", "=", "True", "for", "i", "in", "neighbors", ":", "if", "self", ".", "__visited", "[", "i", "]", "is", "False", ":", "self", ".", "__visited", "[", "i", "]", "=", "True", "next_neighbors", "=", "self", ".", "__neighbor_searcher", "(", "i", ")", "if", "len", "(", "next_neighbors", ")", ">=", "self", ".", "__neighbors", ":", "neighbors", "+=", "[", "k", "for", "k", "in", "next_neighbors", "if", "(", "(", "k", "in", "neighbors", ")", "==", "False", ")", "and", "k", "!=", "index_point", "]", "if", "self", ".", "__belong", "[", "i", "]", "is", "False", ":", "cluster", ".", "append", "(", "i", ")", "self", ".", "__belong", "[", "i", "]", "=", "True", "return", "cluster" ]
38.909091
23.666667
def all_patterns(name): u""" Accepts a string and returns a pattern of possible patterns involving that name Called by simple_mapping_to_pattern for each name in the mapping it receives. """ # i_ denotes an import-like node # u_ denotes a node that appears to be a usage of the name if u'.' in name: name, attr = name.split(u'.', 1) simple_name = simple_name_match % (name) simple_attr = subname_match % (attr) dotted_name = dotted_name_match % (simple_name, simple_attr) i_from = from_import_match % (dotted_name) i_from_submod = from_import_submod_match % (simple_name, simple_attr, simple_attr, simple_attr, simple_attr) i_name = name_import_match % (dotted_name, dotted_name) u_name = power_twoname_match % (simple_name, simple_attr) u_subname = power_subname_match % (simple_attr) return u' | \n'.join((i_name, i_from, i_from_submod, u_name, u_subname)) else: simple_name = simple_name_match % (name) i_name = name_import_match % (simple_name, simple_name) i_from = from_import_match % (simple_name) u_name = power_onename_match % (simple_name) return u' | \n'.join((i_name, i_from, u_name))
[ "def", "all_patterns", "(", "name", ")", ":", "# i_ denotes an import-like node", "# u_ denotes a node that appears to be a usage of the name", "if", "u'.'", "in", "name", ":", "name", ",", "attr", "=", "name", ".", "split", "(", "u'.'", ",", "1", ")", "simple_name", "=", "simple_name_match", "%", "(", "name", ")", "simple_attr", "=", "subname_match", "%", "(", "attr", ")", "dotted_name", "=", "dotted_name_match", "%", "(", "simple_name", ",", "simple_attr", ")", "i_from", "=", "from_import_match", "%", "(", "dotted_name", ")", "i_from_submod", "=", "from_import_submod_match", "%", "(", "simple_name", ",", "simple_attr", ",", "simple_attr", ",", "simple_attr", ",", "simple_attr", ")", "i_name", "=", "name_import_match", "%", "(", "dotted_name", ",", "dotted_name", ")", "u_name", "=", "power_twoname_match", "%", "(", "simple_name", ",", "simple_attr", ")", "u_subname", "=", "power_subname_match", "%", "(", "simple_attr", ")", "return", "u' | \\n'", ".", "join", "(", "(", "i_name", ",", "i_from", ",", "i_from_submod", ",", "u_name", ",", "u_subname", ")", ")", "else", ":", "simple_name", "=", "simple_name_match", "%", "(", "name", ")", "i_name", "=", "name_import_match", "%", "(", "simple_name", ",", "simple_name", ")", "i_from", "=", "from_import_match", "%", "(", "simple_name", ")", "u_name", "=", "power_onename_match", "%", "(", "simple_name", ")", "return", "u' | \\n'", ".", "join", "(", "(", "i_name", ",", "i_from", ",", "u_name", ")", ")" ]
49
20.56
def guess_width(self): """auto fit column width""" if len(self.header) <= 4: nspace = 6 elif len(self.header) <= 6: nspace = 5 else: nspace = 4 ncol = len(self.header) self._width = [nspace] * ncol width = [0] * ncol # set initial width from header for idx, item in enumerate(self.header): width[idx] = len(str(item)) # guess width of each column from first 10 lines of data samples = min(len(self.data), 10) for col in range(ncol): for idx in range(samples): data = self.data[idx][col] if not isinstance(data, (float, int)): temp = len(data) else: temp = 10 if temp > width[col]: width[col] = temp for col in range(ncol): self._width[col] += width[col]
[ "def", "guess_width", "(", "self", ")", ":", "if", "len", "(", "self", ".", "header", ")", "<=", "4", ":", "nspace", "=", "6", "elif", "len", "(", "self", ".", "header", ")", "<=", "6", ":", "nspace", "=", "5", "else", ":", "nspace", "=", "4", "ncol", "=", "len", "(", "self", ".", "header", ")", "self", ".", "_width", "=", "[", "nspace", "]", "*", "ncol", "width", "=", "[", "0", "]", "*", "ncol", "# set initial width from header", "for", "idx", ",", "item", "in", "enumerate", "(", "self", ".", "header", ")", ":", "width", "[", "idx", "]", "=", "len", "(", "str", "(", "item", ")", ")", "# guess width of each column from first 10 lines of data", "samples", "=", "min", "(", "len", "(", "self", ".", "data", ")", ",", "10", ")", "for", "col", "in", "range", "(", "ncol", ")", ":", "for", "idx", "in", "range", "(", "samples", ")", ":", "data", "=", "self", ".", "data", "[", "idx", "]", "[", "col", "]", "if", "not", "isinstance", "(", "data", ",", "(", "float", ",", "int", ")", ")", ":", "temp", "=", "len", "(", "data", ")", "else", ":", "temp", "=", "10", "if", "temp", ">", "width", "[", "col", "]", ":", "width", "[", "col", "]", "=", "temp", "for", "col", "in", "range", "(", "ncol", ")", ":", "self", ".", "_width", "[", "col", "]", "+=", "width", "[", "col", "]" ]
30.9
12.333333
def _write_gml(G, path): """ Wrapper around nx.write_gml """ import networkx as nx return nx.write_gml(G, path, stringizer=str)
[ "def", "_write_gml", "(", "G", ",", "path", ")", ":", "import", "networkx", "as", "nx", "return", "nx", ".", "write_gml", "(", "G", ",", "path", ",", "stringizer", "=", "str", ")" ]
23.666667
8
def sanitize_ssl(self): """Use local installed certificate file if available. Tries to get system, then certifi, then the own installed certificate file.""" if self["sslverify"] is True: try: self["sslverify"] = get_system_cert_file() except ValueError: try: self["sslverify"] = get_certifi_file() except (ValueError, ImportError): try: self["sslverify"] = get_share_file('cacert.pem') except ValueError: pass
[ "def", "sanitize_ssl", "(", "self", ")", ":", "if", "self", "[", "\"sslverify\"", "]", "is", "True", ":", "try", ":", "self", "[", "\"sslverify\"", "]", "=", "get_system_cert_file", "(", ")", "except", "ValueError", ":", "try", ":", "self", "[", "\"sslverify\"", "]", "=", "get_certifi_file", "(", ")", "except", "(", "ValueError", ",", "ImportError", ")", ":", "try", ":", "self", "[", "\"sslverify\"", "]", "=", "get_share_file", "(", "'cacert.pem'", ")", "except", "ValueError", ":", "pass" ]
40.466667
13.066667
def reorder(self, dst_order, arr, src_order=None): """Reorder the output array to match that needed by the viewer.""" if dst_order is None: dst_order = self.viewer.rgb_order if src_order is None: src_order = self.rgb_order if src_order != dst_order: arr = trcalc.reorder_image(dst_order, arr, src_order) return arr
[ "def", "reorder", "(", "self", ",", "dst_order", ",", "arr", ",", "src_order", "=", "None", ")", ":", "if", "dst_order", "is", "None", ":", "dst_order", "=", "self", ".", "viewer", ".", "rgb_order", "if", "src_order", "is", "None", ":", "src_order", "=", "self", ".", "rgb_order", "if", "src_order", "!=", "dst_order", ":", "arr", "=", "trcalc", ".", "reorder_image", "(", "dst_order", ",", "arr", ",", "src_order", ")", "return", "arr" ]
38.2
13.2
def block_sep0(self, Y): r"""Separate variable into component corresponding to :math:`\mathbf{y}_0` in :math:`\mathbf{y}\;\;`. """ return Y[(slice(None),)*self.blkaxis + (slice(0, self.blkidx),)]
[ "def", "block_sep0", "(", "self", ",", "Y", ")", ":", "return", "Y", "[", "(", "slice", "(", "None", ")", ",", ")", "*", "self", ".", "blkaxis", "+", "(", "slice", "(", "0", ",", "self", ".", "blkidx", ")", ",", ")", "]" ]
37.166667
17.166667
def _latex_(self): r"""The representation routine for transitions. >>> g1 = State("Cs", 133, 6, 0, 1/Integer(2),3) >>> g2 = State("Cs", 133, 6, 0, 1/Integer(2),4) >>> Transition(g2,g1)._latex_() '^{133}\\mathrm{Cs}\\ 6S_{1/2}^{4}\\ \\nrightarrow \\ ^{133}\\mathrm{Cs}\\ 6S_{1/2}^{3}' """ if self.allowed: return self.e1._latex_()+'\\ \\rightarrow \\ '+self.e2._latex_() elif not self.allowed: return self.e1._latex_()+'\\ \\nrightarrow \\ '+self.e2._latex_() else: return self.e1._latex_()+'\\ \\rightarrow^? \\ '+self.e2._latex_() return self.e1._latex_()+'\\ \\nleftrightarrow \\ '+self.e2._latex_()
[ "def", "_latex_", "(", "self", ")", ":", "if", "self", ".", "allowed", ":", "return", "self", ".", "e1", ".", "_latex_", "(", ")", "+", "'\\\\ \\\\rightarrow \\\\ '", "+", "self", ".", "e2", ".", "_latex_", "(", ")", "elif", "not", "self", ".", "allowed", ":", "return", "self", ".", "e1", ".", "_latex_", "(", ")", "+", "'\\\\ \\\\nrightarrow \\\\ '", "+", "self", ".", "e2", ".", "_latex_", "(", ")", "else", ":", "return", "self", ".", "e1", ".", "_latex_", "(", ")", "+", "'\\\\ \\\\rightarrow^? \\\\ '", "+", "self", ".", "e2", ".", "_latex_", "(", ")", "return", "self", ".", "e1", ".", "_latex_", "(", ")", "+", "'\\\\ \\\\nleftrightarrow \\\\ '", "+", "self", ".", "e2", ".", "_latex_", "(", ")" ]
41.411765
25.294118
def _add_install(self, context): """ generates install.sh and adds it to included files """ contents = self._render_template('install.sh', context) self.config.setdefault('files', []) # file list might be empty # add install.sh to list of included files self._add_unique_file({ "path": "/install.sh", "contents": contents, "mode": "755" })
[ "def", "_add_install", "(", "self", ",", "context", ")", ":", "contents", "=", "self", ".", "_render_template", "(", "'install.sh'", ",", "context", ")", "self", ".", "config", ".", "setdefault", "(", "'files'", ",", "[", "]", ")", "# file list might be empty", "# add install.sh to list of included files", "self", ".", "_add_unique_file", "(", "{", "\"path\"", ":", "\"/install.sh\"", ",", "\"contents\"", ":", "contents", ",", "\"mode\"", ":", "\"755\"", "}", ")" ]
35.75
13.083333
def datetimeobj_YmdHMS(value): """Convert timestamp string to a datetime object. Timestamps strings like '20130618120000' are able to be converted by this function. Args: value: A timestamp string in the format '%Y%m%d%H%M%S'. Returns: A datetime object. Raises: ValueError: If timestamp is invalid. Note: The timezone is assumed to be UTC/GMT. """ i = int(value) S = i M = S//100 H = M//100 d = H//100 m = d//100 Y = m//100 return datetime.datetime( Y % 10000, m % 100, d % 100, H % 100, M % 100, S % 100, tzinfo=TZ_GMT )
[ "def", "datetimeobj_YmdHMS", "(", "value", ")", ":", "i", "=", "int", "(", "value", ")", "S", "=", "i", "M", "=", "S", "//", "100", "H", "=", "M", "//", "100", "d", "=", "H", "//", "100", "m", "=", "d", "//", "100", "Y", "=", "m", "//", "100", "return", "datetime", ".", "datetime", "(", "Y", "%", "10000", ",", "m", "%", "100", ",", "d", "%", "100", ",", "H", "%", "100", ",", "M", "%", "100", ",", "S", "%", "100", ",", "tzinfo", "=", "TZ_GMT", ")" ]
22.296296
25.111111
def to_dict(self): """ Return the information from the pedigree file as a dictionary. family id is key and a list with dictionarys for each individual as value. Returns: families (dict): A dictionary with the families """ self.logger.debug("Return the information as a dictionary") families = {} for family_id in self.families: family = [] for individual_id in self.families[family_id].individuals: individual = self.families[family_id].individuals[individual_id] family.append(individual.to_json()) self.logger.debug("Adding individual {0} to family {1}".format( individual_id, family_id )) self.logger.debug("Adding family {0}".format(family_id)) families[family_id] = family return families
[ "def", "to_dict", "(", "self", ")", ":", "self", ".", "logger", ".", "debug", "(", "\"Return the information as a dictionary\"", ")", "families", "=", "{", "}", "for", "family_id", "in", "self", ".", "families", ":", "family", "=", "[", "]", "for", "individual_id", "in", "self", ".", "families", "[", "family_id", "]", ".", "individuals", ":", "individual", "=", "self", ".", "families", "[", "family_id", "]", ".", "individuals", "[", "individual_id", "]", "family", ".", "append", "(", "individual", ".", "to_json", "(", ")", ")", "self", ".", "logger", ".", "debug", "(", "\"Adding individual {0} to family {1}\"", ".", "format", "(", "individual_id", ",", "family_id", ")", ")", "self", ".", "logger", ".", "debug", "(", "\"Adding family {0}\"", ".", "format", "(", "family_id", ")", ")", "families", "[", "family_id", "]", "=", "family", "return", "families" ]
38.416667
20.916667
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, **kw): """Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON document) to a Python object. If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name must be specified. Encodings that are not ASCII based (such as UCS-2) are not allowed and should be decoded to ``unicode`` first. ``object_hook`` is an optional function that will be called with the result of any object literal decode (a ``dict``). The return value of ``object_hook`` will be used instead of the ``dict``. This feature can be used to implement custom decoders (e.g. JSON-RPC class hinting). ``parse_float``, if specified, will be called with the string of every JSON float to be decoded. By default this is equivalent to float(num_str). This can be used to use another datatype or parser for JSON floats (e.g. decimal.Decimal). ``parse_int``, if specified, will be called with the string of every JSON int to be decoded. By default this is equivalent to int(num_str). This can be used to use another datatype or parser for JSON integers (e.g. float). ``parse_constant``, if specified, will be called with one of the following strings: -Infinity, Infinity, NaN, null, true, false. This can be used to raise an exception if invalid JSON numbers are encountered. To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` kwarg. """ if (cls is None and encoding is None and object_hook is None and parse_int is None and parse_float is None and parse_constant is None and not kw): return _default_decoder.decode(s) if cls is None: cls = JSONDecoder if object_hook is not None: kw['object_hook'] = object_hook if parse_float is not None: kw['parse_float'] = parse_float if parse_int is not None: kw['parse_int'] = parse_int if parse_constant is not None: kw['parse_constant'] = parse_constant return cls(encoding=encoding, **kw).decode(s)
[ "def", "loads", "(", "s", ",", "encoding", "=", "None", ",", "cls", "=", "None", ",", "object_hook", "=", "None", ",", "parse_float", "=", "None", ",", "parse_int", "=", "None", ",", "parse_constant", "=", "None", ",", "*", "*", "kw", ")", ":", "if", "(", "cls", "is", "None", "and", "encoding", "is", "None", "and", "object_hook", "is", "None", "and", "parse_int", "is", "None", "and", "parse_float", "is", "None", "and", "parse_constant", "is", "None", "and", "not", "kw", ")", ":", "return", "_default_decoder", ".", "decode", "(", "s", ")", "if", "cls", "is", "None", ":", "cls", "=", "JSONDecoder", "if", "object_hook", "is", "not", "None", ":", "kw", "[", "'object_hook'", "]", "=", "object_hook", "if", "parse_float", "is", "not", "None", ":", "kw", "[", "'parse_float'", "]", "=", "parse_float", "if", "parse_int", "is", "not", "None", ":", "kw", "[", "'parse_int'", "]", "=", "parse_int", "if", "parse_constant", "is", "not", "None", ":", "kw", "[", "'parse_constant'", "]", "=", "parse_constant", "return", "cls", "(", "encoding", "=", "encoding", ",", "*", "*", "kw", ")", ".", "decode", "(", "s", ")" ]
44.795918
21.857143
def query(method='droplets', droplet_id=None, command=None, args=None, http_method='get'): ''' Make a web call to DigitalOcean ''' base_path = six.text_type(config.get_cloud_config_value( 'api_root', get_configured_provider(), __opts__, search_global=False, default='https://api.digitalocean.com/v2' )) path = '{0}/{1}/'.format(base_path, method) if droplet_id: path += '{0}/'.format(droplet_id) if command: path += command if not isinstance(args, dict): args = {} personal_access_token = config.get_cloud_config_value( 'personal_access_token', get_configured_provider(), __opts__, search_global=False ) data = salt.utils.json.dumps(args) requester = getattr(requests, http_method) request = requester(path, data=data, headers={'Authorization': 'Bearer ' + personal_access_token, 'Content-Type': 'application/json'}) if request.status_code > 299: raise SaltCloudSystemExit( 'An error occurred while querying DigitalOcean. HTTP Code: {0} ' 'Error: \'{1}\''.format( request.status_code, # request.read() request.text ) ) log.debug(request.url) # success without data if request.status_code == 204: return True content = request.text result = salt.utils.json.loads(content) if result.get('status', '').lower() == 'error': raise SaltCloudSystemExit( pprint.pformat(result.get('error_message', {})) ) return result
[ "def", "query", "(", "method", "=", "'droplets'", ",", "droplet_id", "=", "None", ",", "command", "=", "None", ",", "args", "=", "None", ",", "http_method", "=", "'get'", ")", ":", "base_path", "=", "six", ".", "text_type", "(", "config", ".", "get_cloud_config_value", "(", "'api_root'", ",", "get_configured_provider", "(", ")", ",", "__opts__", ",", "search_global", "=", "False", ",", "default", "=", "'https://api.digitalocean.com/v2'", ")", ")", "path", "=", "'{0}/{1}/'", ".", "format", "(", "base_path", ",", "method", ")", "if", "droplet_id", ":", "path", "+=", "'{0}/'", ".", "format", "(", "droplet_id", ")", "if", "command", ":", "path", "+=", "command", "if", "not", "isinstance", "(", "args", ",", "dict", ")", ":", "args", "=", "{", "}", "personal_access_token", "=", "config", ".", "get_cloud_config_value", "(", "'personal_access_token'", ",", "get_configured_provider", "(", ")", ",", "__opts__", ",", "search_global", "=", "False", ")", "data", "=", "salt", ".", "utils", ".", "json", ".", "dumps", "(", "args", ")", "requester", "=", "getattr", "(", "requests", ",", "http_method", ")", "request", "=", "requester", "(", "path", ",", "data", "=", "data", ",", "headers", "=", "{", "'Authorization'", ":", "'Bearer '", "+", "personal_access_token", ",", "'Content-Type'", ":", "'application/json'", "}", ")", "if", "request", ".", "status_code", ">", "299", ":", "raise", "SaltCloudSystemExit", "(", "'An error occurred while querying DigitalOcean. HTTP Code: {0} '", "'Error: \\'{1}\\''", ".", "format", "(", "request", ".", "status_code", ",", "# request.read()", "request", ".", "text", ")", ")", "log", ".", "debug", "(", "request", ".", "url", ")", "# success without data", "if", "request", ".", "status_code", "==", "204", ":", "return", "True", "content", "=", "request", ".", "text", "result", "=", "salt", ".", "utils", ".", "json", ".", "loads", "(", "content", ")", "if", "result", ".", "get", "(", "'status'", ",", "''", ")", ".", "lower", "(", ")", "==", "'error'", ":", "raise", "SaltCloudSystemExit", "(", "pprint", ".", "pformat", "(", "result", ".", "get", "(", "'error_message'", ",", "{", "}", ")", ")", ")", "return", "result" ]
27.910714
23.803571
def git_sequence_editor_squash(fpath): r""" squashes wip messages CommandLine: python -m utool.util_git --exec-git_sequence_editor_squash Example: >>> # DISABLE_DOCTEST >>> # SCRIPT >>> import utool as ut >>> from utool.util_git import * # NOQA >>> fpath = ut.get_argval('--fpath', str, default=None) >>> git_sequence_editor_squash(fpath) Ignore: text = ut.codeblock( ''' pick 852aa05 better doctest for tips pick 3c779b8 wip pick 02bc21d wip pick 1853828 Fixed root tablename pick 9d50233 doctest updates pick 66230a5 wip pick c612e98 wip pick b298598 Fixed tablename error pick 1120a87 wip pick f6c4838 wip pick 7f92575 wip ''') Ignore: def squash_consecutive_commits_with_same_message(): # http://stackoverflow.com/questions/8226278/git-alias-to-squash-all-commits-with-a-particular-commit-message # Can do interactively with this. Can it be done automatically and pay attention to # Timestamps etc? git rebase --interactive HEAD~40 --autosquash git rebase --interactive $(git merge-base HEAD master) --autosquash # Lookbehind correct version %s/\([a-z]* [a-z0-9]* wip\n\)\@<=pick \([a-z0-9]*\) wip/squash \2 wip/gc # THE FULL NON-INTERACTIVE AUTOSQUASH SCRIPT # TODO: Dont squash if there is a one hour timedelta between commits GIT_EDITOR="cat $1" GIT_SEQUENCE_EDITOR="python -m utool.util_git --exec-git_sequence_editor_squash \ --fpath $1" git rebase -i $(git rev-list HEAD | tail -n 1) --autosquash --no-verify GIT_EDITOR="cat $1" GIT_SEQUENCE_EDITOR="python -m utool.util_git --exec-git_sequence_editor_squash \ --fpath $1" git rebase -i HEAD~10 --autosquash --no-verify GIT_EDITOR="cat $1" GIT_SEQUENCE_EDITOR="python -m utool.util_git --exec-git_sequence_editor_squash \ --fpath $1" git rebase -i $(git merge-base HEAD master) --autosquash --no-verify # 14d778fa30a93f85c61f34d09eddb6d2cafd11e2 # c509a95d4468ebb61097bd9f4d302367424772a3 # b0ffc26011e33378ee30730c5e0ef1994bfe1a90 # GIT_SEQUENCE_EDITOR=<script> git rebase -i <params> # GIT_SEQUENCE_EDITOR="echo 'FOOBAR $1' " git rebase -i HEAD~40 --autosquash # git checkout master # git branch -D tmp # git checkout -b tmp # option to get the tail commit $(git rev-list HEAD | tail -n 1) # GIT_SEQUENCE_EDITOR="python -m utool.util_git --exec-git_sequence_editor_squash \ --fpath $1" git rebase -i HEAD~40 --autosquash # GIT_SEQUENCE_EDITOR="python -m utool.util_git --exec-git_sequence_editor_squash \ --fpath $1" git rebase -i HEAD~40 --autosquash --no-verify <params> """ # print(sys.argv) import utool as ut text = ut.read_from(fpath) # print('fpath = %r' % (fpath,)) print(text) # Doesnt work because of fixed witdth requirement # search = (ut.util_regex.positive_lookbehind('[a-z]* [a-z0-9]* wip\n') + 'pick ' + # ut.reponamed_field('hash', '[a-z0-9]*') + ' wip') # repl = ('squash ' + ut.bref_field('hash') + ' wip') # import re # new_text = re.sub(search, repl, text, flags=re.MULTILINE) # print(new_text) prev_msg = None prev_dt = None new_lines = [] def get_commit_date(hashid): out, err, ret = ut.cmd('git show -s --format=%ci ' + hashid, verbose=False, quiet=True, pad_stdout=False) # from datetime import datetime from dateutil import parser # print('out = %r' % (out,)) stamp = out.strip('\n') # print('stamp = %r' % (stamp,)) dt = parser.parse(stamp) # dt = datetime.strptime(stamp, '%Y-%m-%d %H:%M:%S %Z') # print('dt = %r' % (dt,)) return dt for line in text.split('\n'): commit_line = line.split(' ') if len(commit_line) < 3: prev_msg = None prev_dt = None new_lines += [line] continue action = commit_line[0] hashid = commit_line[1] msg = ' ' .join(commit_line[2:]) try: dt = get_commit_date(hashid) except ValueError: prev_msg = None prev_dt = None new_lines += [line] continue orig_msg = msg can_squash = action == 'pick' and msg == 'wip' and prev_msg == 'wip' if prev_dt is not None and prev_msg == 'wip': tdelta = dt - prev_dt # Only squash closely consecutive commits threshold_minutes = 45 td_min = (tdelta.total_seconds() / 60.) # print(tdelta) can_squash &= td_min < threshold_minutes msg = msg + ' -- tdelta=%r' % (ut.get_timedelta_str(tdelta),) if can_squash: new_line = ' ' .join(['squash', hashid, msg]) new_lines += [new_line] else: new_lines += [line] prev_msg = orig_msg prev_dt = dt new_text = '\n'.join(new_lines) def get_commit_date(hashid): out = ut.cmd('git show -s --format=%ci ' + hashid, verbose=False) print('out = %r' % (out,)) # print('Dry run') # ut.dump_autogen_code(fpath, new_text) print(new_text) ut.write_to(fpath, new_text, n=None)
[ "def", "git_sequence_editor_squash", "(", "fpath", ")", ":", "# print(sys.argv)", "import", "utool", "as", "ut", "text", "=", "ut", ".", "read_from", "(", "fpath", ")", "# print('fpath = %r' % (fpath,))", "print", "(", "text", ")", "# Doesnt work because of fixed witdth requirement", "# search = (ut.util_regex.positive_lookbehind('[a-z]* [a-z0-9]* wip\\n') + 'pick ' +", "# ut.reponamed_field('hash', '[a-z0-9]*') + ' wip')", "# repl = ('squash ' + ut.bref_field('hash') + ' wip')", "# import re", "# new_text = re.sub(search, repl, text, flags=re.MULTILINE)", "# print(new_text)", "prev_msg", "=", "None", "prev_dt", "=", "None", "new_lines", "=", "[", "]", "def", "get_commit_date", "(", "hashid", ")", ":", "out", ",", "err", ",", "ret", "=", "ut", ".", "cmd", "(", "'git show -s --format=%ci '", "+", "hashid", ",", "verbose", "=", "False", ",", "quiet", "=", "True", ",", "pad_stdout", "=", "False", ")", "# from datetime import datetime", "from", "dateutil", "import", "parser", "# print('out = %r' % (out,))", "stamp", "=", "out", ".", "strip", "(", "'\\n'", ")", "# print('stamp = %r' % (stamp,))", "dt", "=", "parser", ".", "parse", "(", "stamp", ")", "# dt = datetime.strptime(stamp, '%Y-%m-%d %H:%M:%S %Z')", "# print('dt = %r' % (dt,))", "return", "dt", "for", "line", "in", "text", ".", "split", "(", "'\\n'", ")", ":", "commit_line", "=", "line", ".", "split", "(", "' '", ")", "if", "len", "(", "commit_line", ")", "<", "3", ":", "prev_msg", "=", "None", "prev_dt", "=", "None", "new_lines", "+=", "[", "line", "]", "continue", "action", "=", "commit_line", "[", "0", "]", "hashid", "=", "commit_line", "[", "1", "]", "msg", "=", "' '", ".", "join", "(", "commit_line", "[", "2", ":", "]", ")", "try", ":", "dt", "=", "get_commit_date", "(", "hashid", ")", "except", "ValueError", ":", "prev_msg", "=", "None", "prev_dt", "=", "None", "new_lines", "+=", "[", "line", "]", "continue", "orig_msg", "=", "msg", "can_squash", "=", "action", "==", "'pick'", "and", "msg", "==", "'wip'", "and", "prev_msg", "==", "'wip'", "if", "prev_dt", "is", "not", "None", "and", "prev_msg", "==", "'wip'", ":", "tdelta", "=", "dt", "-", "prev_dt", "# Only squash closely consecutive commits", "threshold_minutes", "=", "45", "td_min", "=", "(", "tdelta", ".", "total_seconds", "(", ")", "/", "60.", ")", "# print(tdelta)", "can_squash", "&=", "td_min", "<", "threshold_minutes", "msg", "=", "msg", "+", "' -- tdelta=%r'", "%", "(", "ut", ".", "get_timedelta_str", "(", "tdelta", ")", ",", ")", "if", "can_squash", ":", "new_line", "=", "' '", ".", "join", "(", "[", "'squash'", ",", "hashid", ",", "msg", "]", ")", "new_lines", "+=", "[", "new_line", "]", "else", ":", "new_lines", "+=", "[", "line", "]", "prev_msg", "=", "orig_msg", "prev_dt", "=", "dt", "new_text", "=", "'\\n'", ".", "join", "(", "new_lines", ")", "def", "get_commit_date", "(", "hashid", ")", ":", "out", "=", "ut", ".", "cmd", "(", "'git show -s --format=%ci '", "+", "hashid", ",", "verbose", "=", "False", ")", "print", "(", "'out = %r'", "%", "(", "out", ",", ")", ")", "# print('Dry run')", "# ut.dump_autogen_code(fpath, new_text)", "print", "(", "new_text", ")", "ut", ".", "write_to", "(", "fpath", ",", "new_text", ",", "n", "=", "None", ")" ]
38.950355
21.028369
def convert(dbus_obj): """Converts dbus_obj from dbus type to python type. :param dbus_obj: dbus object. :returns: dbus_obj in python type. """ _isinstance = partial(isinstance, dbus_obj) ConvertType = namedtuple('ConvertType', 'pytype dbustypes') pyint = ConvertType(int, (dbus.Byte, dbus.Int16, dbus.Int32, dbus.Int64, dbus.UInt16, dbus.UInt32, dbus.UInt64)) pybool = ConvertType(bool, (dbus.Boolean, )) pyfloat = ConvertType(float, (dbus.Double, )) pylist = ConvertType(lambda _obj: list(map(convert, dbus_obj)), (dbus.Array, )) pytuple = ConvertType(lambda _obj: tuple(map(convert, dbus_obj)), (dbus.Struct, )) types_str = (dbus.ObjectPath, dbus.Signature, dbus.String) if not PY3: types_str += (dbus.UTF8String,) pystr = ConvertType(str if PY3 else unicode, types_str) pydict = ConvertType( lambda _obj: dict(zip(map(convert, dbus_obj.keys()), map(convert, dbus_obj.values()) ) ), (dbus.Dictionary, ) ) for conv in (pyint, pybool, pyfloat, pylist, pytuple, pystr, pydict): if any(map(_isinstance, conv.dbustypes)): return conv.pytype(dbus_obj) else: return dbus_obj
[ "def", "convert", "(", "dbus_obj", ")", ":", "_isinstance", "=", "partial", "(", "isinstance", ",", "dbus_obj", ")", "ConvertType", "=", "namedtuple", "(", "'ConvertType'", ",", "'pytype dbustypes'", ")", "pyint", "=", "ConvertType", "(", "int", ",", "(", "dbus", ".", "Byte", ",", "dbus", ".", "Int16", ",", "dbus", ".", "Int32", ",", "dbus", ".", "Int64", ",", "dbus", ".", "UInt16", ",", "dbus", ".", "UInt32", ",", "dbus", ".", "UInt64", ")", ")", "pybool", "=", "ConvertType", "(", "bool", ",", "(", "dbus", ".", "Boolean", ",", ")", ")", "pyfloat", "=", "ConvertType", "(", "float", ",", "(", "dbus", ".", "Double", ",", ")", ")", "pylist", "=", "ConvertType", "(", "lambda", "_obj", ":", "list", "(", "map", "(", "convert", ",", "dbus_obj", ")", ")", ",", "(", "dbus", ".", "Array", ",", ")", ")", "pytuple", "=", "ConvertType", "(", "lambda", "_obj", ":", "tuple", "(", "map", "(", "convert", ",", "dbus_obj", ")", ")", ",", "(", "dbus", ".", "Struct", ",", ")", ")", "types_str", "=", "(", "dbus", ".", "ObjectPath", ",", "dbus", ".", "Signature", ",", "dbus", ".", "String", ")", "if", "not", "PY3", ":", "types_str", "+=", "(", "dbus", ".", "UTF8String", ",", ")", "pystr", "=", "ConvertType", "(", "str", "if", "PY3", "else", "unicode", ",", "types_str", ")", "pydict", "=", "ConvertType", "(", "lambda", "_obj", ":", "dict", "(", "zip", "(", "map", "(", "convert", ",", "dbus_obj", ".", "keys", "(", ")", ")", ",", "map", "(", "convert", ",", "dbus_obj", ".", "values", "(", ")", ")", ")", ")", ",", "(", "dbus", ".", "Dictionary", ",", ")", ")", "for", "conv", "in", "(", "pyint", ",", "pybool", ",", "pyfloat", ",", "pylist", ",", "pytuple", ",", "pystr", ",", "pydict", ")", ":", "if", "any", "(", "map", "(", "_isinstance", ",", "conv", ".", "dbustypes", ")", ")", ":", "return", "conv", ".", "pytype", "(", "dbus_obj", ")", "else", ":", "return", "dbus_obj" ]
38.028571
18.257143
def self_aware(fn): ''' decorating a function with this allows it to refer to itself as 'self' inside the function body. ''' if isgeneratorfunction(fn): @wraps(fn) def wrapper(*a,**k): generator = fn(*a,**k) if hasattr( generator, 'gi_frame' ) and hasattr( generator.gi_frame, 'f_builtins' ) and hasattr( generator.gi_frame.f_builtins, '__setitem__' ): generator.gi_frame.f_builtins[ 'self' ] = generator return wrapper else: fn=strict_globals(**fn.__globals__)(fn) fn.__globals__['self']=fn return fn
[ "def", "self_aware", "(", "fn", ")", ":", "if", "isgeneratorfunction", "(", "fn", ")", ":", "@", "wraps", "(", "fn", ")", "def", "wrapper", "(", "*", "a", ",", "*", "*", "k", ")", ":", "generator", "=", "fn", "(", "*", "a", ",", "*", "*", "k", ")", "if", "hasattr", "(", "generator", ",", "'gi_frame'", ")", "and", "hasattr", "(", "generator", ".", "gi_frame", ",", "'f_builtins'", ")", "and", "hasattr", "(", "generator", ".", "gi_frame", ".", "f_builtins", ",", "'__setitem__'", ")", ":", "generator", ".", "gi_frame", ".", "f_builtins", "[", "'self'", "]", "=", "generator", "return", "wrapper", "else", ":", "fn", "=", "strict_globals", "(", "*", "*", "fn", ".", "__globals__", ")", "(", "fn", ")", "fn", ".", "__globals__", "[", "'self'", "]", "=", "fn", "return", "fn" ]
28.407407
15
def _create_gitlab_runner_prometheus_instance(self, instance, init_config): """ Set up the gitlab_runner instance so it can be used in OpenMetricsBaseCheck """ # Mapping from Prometheus metrics names to Datadog ones # For now it's a 1:1 mapping allowed_metrics = init_config.get('allowed_metrics') if allowed_metrics is None: raise CheckException("At least one metric must be whitelisted in `allowed_metrics`.") gitlab_runner_instance = deepcopy(instance) # gitlab_runner uses 'prometheus_endpoint' and not 'prometheus_url', so we have to rename the key gitlab_runner_instance['prometheus_url'] = instance.get('prometheus_endpoint', None) gitlab_runner_instance.update( { 'namespace': 'gitlab_runner', 'metrics': allowed_metrics, # Defaults that were set when gitlab_runner was based on PrometheusCheck 'send_monotonic_counter': instance.get('send_monotonic_counter', False), 'health_service_check': instance.get('health_service_check', False), } ) return gitlab_runner_instance
[ "def", "_create_gitlab_runner_prometheus_instance", "(", "self", ",", "instance", ",", "init_config", ")", ":", "# Mapping from Prometheus metrics names to Datadog ones", "# For now it's a 1:1 mapping", "allowed_metrics", "=", "init_config", ".", "get", "(", "'allowed_metrics'", ")", "if", "allowed_metrics", "is", "None", ":", "raise", "CheckException", "(", "\"At least one metric must be whitelisted in `allowed_metrics`.\"", ")", "gitlab_runner_instance", "=", "deepcopy", "(", "instance", ")", "# gitlab_runner uses 'prometheus_endpoint' and not 'prometheus_url', so we have to rename the key", "gitlab_runner_instance", "[", "'prometheus_url'", "]", "=", "instance", ".", "get", "(", "'prometheus_endpoint'", ",", "None", ")", "gitlab_runner_instance", ".", "update", "(", "{", "'namespace'", ":", "'gitlab_runner'", ",", "'metrics'", ":", "allowed_metrics", ",", "# Defaults that were set when gitlab_runner was based on PrometheusCheck", "'send_monotonic_counter'", ":", "instance", ".", "get", "(", "'send_monotonic_counter'", ",", "False", ")", ",", "'health_service_check'", ":", "instance", ".", "get", "(", "'health_service_check'", ",", "False", ")", ",", "}", ")", "return", "gitlab_runner_instance" ]
45.269231
27.423077
def touch(self): """ Mark this update as complete. """ if self.marker_table_bound is None: self.create_marker_table() table = self.marker_table_bound id_exists = self.exists() with self.engine.begin() as conn: if not id_exists: ins = table.insert().values(update_id=self.update_id, target_table=self.target_table, inserted=datetime.datetime.now()) else: ins = table.update().where(sqlalchemy.and_(table.c.update_id == self.update_id, table.c.target_table == self.target_table)).\ values(update_id=self.update_id, target_table=self.target_table, inserted=datetime.datetime.now()) conn.execute(ins) assert self.exists()
[ "def", "touch", "(", "self", ")", ":", "if", "self", ".", "marker_table_bound", "is", "None", ":", "self", ".", "create_marker_table", "(", ")", "table", "=", "self", ".", "marker_table_bound", "id_exists", "=", "self", ".", "exists", "(", ")", "with", "self", ".", "engine", ".", "begin", "(", ")", "as", "conn", ":", "if", "not", "id_exists", ":", "ins", "=", "table", ".", "insert", "(", ")", ".", "values", "(", "update_id", "=", "self", ".", "update_id", ",", "target_table", "=", "self", ".", "target_table", ",", "inserted", "=", "datetime", ".", "datetime", ".", "now", "(", ")", ")", "else", ":", "ins", "=", "table", ".", "update", "(", ")", ".", "where", "(", "sqlalchemy", ".", "and_", "(", "table", ".", "c", ".", "update_id", "==", "self", ".", "update_id", ",", "table", ".", "c", ".", "target_table", "==", "self", ".", "target_table", ")", ")", ".", "values", "(", "update_id", "=", "self", ".", "update_id", ",", "target_table", "=", "self", ".", "target_table", ",", "inserted", "=", "datetime", ".", "datetime", ".", "now", "(", ")", ")", "conn", ".", "execute", "(", "ins", ")", "assert", "self", ".", "exists", "(", ")" ]
44.65
20.95
def getAllChildNodes(self): ''' getAllChildNodes - Gets all the children, and their children, and their children, and so on, all the way to the end as a TagCollection. Use .childNodes for a regular list @return TagCollection<AdvancedTag> - A TagCollection of all children (and their children recursive) ''' ret = TagCollection() # Scan all the children of this node for child in self.children: # Append each child ret.append(child) # Append children's children recursive ret += child.getAllChildNodes() return ret
[ "def", "getAllChildNodes", "(", "self", ")", ":", "ret", "=", "TagCollection", "(", ")", "# Scan all the children of this node", "for", "child", "in", "self", ".", "children", ":", "# Append each child", "ret", ".", "append", "(", "child", ")", "# Append children's children recursive", "ret", "+=", "child", ".", "getAllChildNodes", "(", ")", "return", "ret" ]
31.666667
25.380952
def disk_usage(path): """Return free usage about the given path, in bytes. Parameters ---------- path : str Folder for which to return disk usage Returns ------- output : tuple Tuple containing total space in the folder and currently used space in the folder """ st = os.statvfs(path) total = st.f_blocks * st.f_frsize used = (st.f_blocks - st.f_bfree) * st.f_frsize return total, used
[ "def", "disk_usage", "(", "path", ")", ":", "st", "=", "os", ".", "statvfs", "(", "path", ")", "total", "=", "st", ".", "f_blocks", "*", "st", ".", "f_frsize", "used", "=", "(", "st", ".", "f_blocks", "-", "st", ".", "f_bfree", ")", "*", "st", ".", "f_frsize", "return", "total", ",", "used" ]
23.263158
20.052632
def refresh_state_in_ec(self, ec_index): '''Get the up-to-date state of the component in an execution context. This function will update the state, rather than using the cached value. This may take time, if the component is executing on a remote node. @param ec_index The index of the execution context to check the state in. This index is into the total array of contexts, that is both owned and participating contexts. If the value of ec_index is greater than the length of @ref owned_ecs, that length is subtracted from ec_index and the result used as an index into @ref participating_ecs. ''' with self._mutex: if ec_index >= len(self.owned_ecs): ec_index -= len(self.owned_ecs) if ec_index >= len(self.participating_ecs): raise exceptions.BadECIndexError(ec_index) state = self._get_ec_state(self.participating_ecs[ec_index]) self.participating_ec_states[ec_index] = state else: state = self._get_ec_state(self.owned_ecs[ec_index]) self.owned_ec_states[ec_index] = state return state
[ "def", "refresh_state_in_ec", "(", "self", ",", "ec_index", ")", ":", "with", "self", ".", "_mutex", ":", "if", "ec_index", ">=", "len", "(", "self", ".", "owned_ecs", ")", ":", "ec_index", "-=", "len", "(", "self", ".", "owned_ecs", ")", "if", "ec_index", ">=", "len", "(", "self", ".", "participating_ecs", ")", ":", "raise", "exceptions", ".", "BadECIndexError", "(", "ec_index", ")", "state", "=", "self", ".", "_get_ec_state", "(", "self", ".", "participating_ecs", "[", "ec_index", "]", ")", "self", ".", "participating_ec_states", "[", "ec_index", "]", "=", "state", "else", ":", "state", "=", "self", ".", "_get_ec_state", "(", "self", ".", "owned_ecs", "[", "ec_index", "]", ")", "self", ".", "owned_ec_states", "[", "ec_index", "]", "=", "state", "return", "state" ]
48.777778
25.814815
def marketplace(self): """ :returns: Version marketplace of preview :rtype: twilio.rest.preview.marketplace.Marketplace """ if self._marketplace is None: self._marketplace = Marketplace(self) return self._marketplace
[ "def", "marketplace", "(", "self", ")", ":", "if", "self", ".", "_marketplace", "is", "None", ":", "self", ".", "_marketplace", "=", "Marketplace", "(", "self", ")", "return", "self", ".", "_marketplace" ]
33.625
8.125
def _get_all_merges(routing_table): """Get possible sets of entries to merge. Yields ------ :py:class:`~.Merge` """ # Memorise entries that have been considered as part of a merge considered_entries = set() for i, entry in enumerate(routing_table): # If we've already considered this entry then skip if i in considered_entries: continue # Construct a merge by including other routing table entries below this # one which have equivalent routes. merge = set([i]) merge.update( j for j, other_entry in enumerate(routing_table[i+1:], start=i+1) if entry.route == other_entry.route ) # Mark all these entries as considered considered_entries.update(merge) # If the merge contains multiple entries then yield it if len(merge) > 1: yield _Merge(routing_table, merge)
[ "def", "_get_all_merges", "(", "routing_table", ")", ":", "# Memorise entries that have been considered as part of a merge", "considered_entries", "=", "set", "(", ")", "for", "i", ",", "entry", "in", "enumerate", "(", "routing_table", ")", ":", "# If we've already considered this entry then skip", "if", "i", "in", "considered_entries", ":", "continue", "# Construct a merge by including other routing table entries below this", "# one which have equivalent routes.", "merge", "=", "set", "(", "[", "i", "]", ")", "merge", ".", "update", "(", "j", "for", "j", ",", "other_entry", "in", "enumerate", "(", "routing_table", "[", "i", "+", "1", ":", "]", ",", "start", "=", "i", "+", "1", ")", "if", "entry", ".", "route", "==", "other_entry", ".", "route", ")", "# Mark all these entries as considered", "considered_entries", ".", "update", "(", "merge", ")", "# If the merge contains multiple entries then yield it", "if", "len", "(", "merge", ")", ">", "1", ":", "yield", "_Merge", "(", "routing_table", ",", "merge", ")" ]
31.206897
19.551724
def extend_selection_to_next(self, what='word', direction='left'): """ Extend selection to next *what* ('word' or 'character') toward *direction* ('left' or 'right') """ self.__move_cursor_anchor(what, direction, QTextCursor.KeepAnchor)
[ "def", "extend_selection_to_next", "(", "self", ",", "what", "=", "'word'", ",", "direction", "=", "'left'", ")", ":", "self", ".", "__move_cursor_anchor", "(", "what", ",", "direction", ",", "QTextCursor", ".", "KeepAnchor", ")" ]
46
15.333333
def _remove(self, shard_name): """remove member from configuration""" result = self.router_command("removeShard", shard_name, is_eval=False) if result['ok'] == 1 and result['state'] == 'completed': shard = self._shards.pop(shard_name) if shard.get('isServer', False): Servers().remove(shard['_id']) if shard.get('isReplicaSet', False): ReplicaSets().remove(shard['_id']) return result
[ "def", "_remove", "(", "self", ",", "shard_name", ")", ":", "result", "=", "self", ".", "router_command", "(", "\"removeShard\"", ",", "shard_name", ",", "is_eval", "=", "False", ")", "if", "result", "[", "'ok'", "]", "==", "1", "and", "result", "[", "'state'", "]", "==", "'completed'", ":", "shard", "=", "self", ".", "_shards", ".", "pop", "(", "shard_name", ")", "if", "shard", ".", "get", "(", "'isServer'", ",", "False", ")", ":", "Servers", "(", ")", ".", "remove", "(", "shard", "[", "'_id'", "]", ")", "if", "shard", ".", "get", "(", "'isReplicaSet'", ",", "False", ")", ":", "ReplicaSets", "(", ")", ".", "remove", "(", "shard", "[", "'_id'", "]", ")", "return", "result" ]
47.5
12.7
def hide_arp_holder_arp_entry_interfacetype_TenGigabitEthernet_TenGigabitEthernet(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") hide_arp_holder = ET.SubElement(config, "hide-arp-holder", xmlns="urn:brocade.com:mgmt:brocade-arp") arp_entry = ET.SubElement(hide_arp_holder, "arp-entry") arp_ip_address_key = ET.SubElement(arp_entry, "arp-ip-address") arp_ip_address_key.text = kwargs.pop('arp_ip_address') interfacetype = ET.SubElement(arp_entry, "interfacetype") TenGigabitEthernet = ET.SubElement(interfacetype, "TenGigabitEthernet") TenGigabitEthernet = ET.SubElement(TenGigabitEthernet, "TenGigabitEthernet") TenGigabitEthernet.text = kwargs.pop('TenGigabitEthernet') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "hide_arp_holder_arp_entry_interfacetype_TenGigabitEthernet_TenGigabitEthernet", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "hide_arp_holder", "=", "ET", ".", "SubElement", "(", "config", ",", "\"hide-arp-holder\"", ",", "xmlns", "=", "\"urn:brocade.com:mgmt:brocade-arp\"", ")", "arp_entry", "=", "ET", ".", "SubElement", "(", "hide_arp_holder", ",", "\"arp-entry\"", ")", "arp_ip_address_key", "=", "ET", ".", "SubElement", "(", "arp_entry", ",", "\"arp-ip-address\"", ")", "arp_ip_address_key", ".", "text", "=", "kwargs", ".", "pop", "(", "'arp_ip_address'", ")", "interfacetype", "=", "ET", ".", "SubElement", "(", "arp_entry", ",", "\"interfacetype\"", ")", "TenGigabitEthernet", "=", "ET", ".", "SubElement", "(", "interfacetype", ",", "\"TenGigabitEthernet\"", ")", "TenGigabitEthernet", "=", "ET", ".", "SubElement", "(", "TenGigabitEthernet", ",", "\"TenGigabitEthernet\"", ")", "TenGigabitEthernet", ".", "text", "=", "kwargs", ".", "pop", "(", "'TenGigabitEthernet'", ")", "callback", "=", "kwargs", ".", "pop", "(", "'callback'", ",", "self", ".", "_callback", ")", "return", "callback", "(", "config", ")" ]
57.466667
27
def reduce(source, func, initializer=None): """Apply a function of two arguments cumulatively to the items of an asynchronous sequence, reducing the sequence to a single value. If ``initializer`` is present, it is placed before the items of the sequence in the calculation, and serves as a default when the sequence is empty. """ acc = accumulate.raw(source, func, initializer) return select.item.raw(acc, -1)
[ "def", "reduce", "(", "source", ",", "func", ",", "initializer", "=", "None", ")", ":", "acc", "=", "accumulate", ".", "raw", "(", "source", ",", "func", ",", "initializer", ")", "return", "select", ".", "item", ".", "raw", "(", "acc", ",", "-", "1", ")" ]
43.3
16.6
def nla_put_data(msg, attrtype, data): """Add abstract data as unspecific attribute to Netlink message. https://github.com/thom311/libnl/blob/libnl3_2_25/lib/attr.c#L527 Equivalent to nla_put() except that the length of the payload is derived from the bytearray data object. Positional arguments: msg -- Netlink message (nl_msg class instance). attrtype -- attribute type (integer). data -- data to be used as attribute payload (bytearray). Returns: 0 on success or a negative error code. """ return nla_put(msg, attrtype, len(data), data)
[ "def", "nla_put_data", "(", "msg", ",", "attrtype", ",", "data", ")", ":", "return", "nla_put", "(", "msg", ",", "attrtype", ",", "len", "(", "data", ")", ",", "data", ")" ]
35.75
21.6875
def recap(self, nc): # type: (int) -> None """recap changes the maximum size limit of the dynamic table. It also proceeds to a resize(), if the new size is lower than the previous one. @param int nc: the new cap of the dynamic table (that is the maximum-maximum size) # noqa: E501 @raise AssertionError """ assert(nc >= 0) t = self._dynamic_table_cap_size > nc self._dynamic_table_cap_size = nc if t: # The RFC is not clear about whether this resize should happen; # we do it anyway self.resize(nc)
[ "def", "recap", "(", "self", ",", "nc", ")", ":", "# type: (int) -> None", "assert", "(", "nc", ">=", "0", ")", "t", "=", "self", ".", "_dynamic_table_cap_size", ">", "nc", "self", ".", "_dynamic_table_cap_size", "=", "nc", "if", "t", ":", "# The RFC is not clear about whether this resize should happen;", "# we do it anyway", "self", ".", "resize", "(", "nc", ")" ]
40.133333
19.6
def ensure_coord_type(f): """ A decorator for class methods of the form .. code-block:: python Class.method(self, coords, **kwargs) where ``coords`` is an :obj:`astropy.coordinates.SkyCoord` object. The decorator raises a :obj:`TypeError` if the ``coords`` that gets passed to ``Class.method`` is not an :obj:`astropy.coordinates.SkyCoord` instance. Args: f (class method): A function with the signature ``(self, coords, **kwargs)``, where ``coords`` is a :obj:`SkyCoord` object containing an array. Returns: A function that raises a :obj:`TypeError` if ``coords`` is not an :obj:`astropy.coordinates.SkyCoord` object, but which otherwise behaves the same as the decorated function. """ @wraps(f) def _wrapper_func(self, coords, **kwargs): if not isinstance(coords, coordinates.SkyCoord): raise TypeError('`coords` must be an astropy.coordinates.SkyCoord object.') return f(self, coords, **kwargs) return _wrapper_func
[ "def", "ensure_coord_type", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "_wrapper_func", "(", "self", ",", "coords", ",", "*", "*", "kwargs", ")", ":", "if", "not", "isinstance", "(", "coords", ",", "coordinates", ".", "SkyCoord", ")", ":", "raise", "TypeError", "(", "'`coords` must be an astropy.coordinates.SkyCoord object.'", ")", "return", "f", "(", "self", ",", "coords", ",", "*", "*", "kwargs", ")", "return", "_wrapper_func" ]
35.724138
23.655172
def when_built(self): """ Returns a Deferred that is callback()'d (with this Circuit instance) when this circuit hits BUILT. If it's already BUILT when this is called, you get an already-successful Deferred; otherwise, the state must change to BUILT. If the circuit will never hit BUILT (e.g. it is abandoned by Tor before it gets to BUILT) you will receive an errback """ # XXX note to self: we never do an errback; fix this behavior if self.state == 'BUILT': return defer.succeed(self) return self._when_built.when_fired()
[ "def", "when_built", "(", "self", ")", ":", "# XXX note to self: we never do an errback; fix this behavior", "if", "self", ".", "state", "==", "'BUILT'", ":", "return", "defer", ".", "succeed", "(", "self", ")", "return", "self", ".", "_when_built", ".", "when_fired", "(", ")" ]
38.6875
18.6875
def parseColors(colors, defaultColor): """ Parse command line color information. @param colors: A C{list} of space separated "value color" strings, such as ["0.9 red", "0.75 rgb(23, 190, 207)", "0.1 #CF3CF3"]. @param defaultColor: The C{str} color to use for cells that do not reach the identity fraction threshold of any color in C{colors}. @return: A C{list} of (threshold, color) tuples, where threshold is a C{float} (from C{colors}) and color is a C{str} (from C{colors}). The list will be sorted by decreasing threshold values. """ result = [] if colors: for colorInfo in colors: fields = colorInfo.split(maxsplit=1) if len(fields) == 2: threshold, color = fields try: threshold = float(threshold) except ValueError: print('--color arguments must be given as space-separated ' 'pairs of "value color" where the value is a ' 'numeric identity threshold. Your value %r is not ' 'numeric.' % threshold, file=sys.stderr) sys.exit(1) if 0.0 > threshold > 1.0: print('--color arguments must be given as space-separated ' 'pairs of "value color" where the value is a ' 'numeric identity threshold from 0.0 to 1.0. Your ' 'value %r is not in that range.' % threshold, file=sys.stderr) sys.exit(1) result.append((threshold, color)) else: print('--color arguments must be given as space-separated ' 'pairs of "value color". You have given %r, which does ' 'not contain a space.' % colorInfo, file=sys.stderr) sys.exit(1) result.sort(key=itemgetter(0), reverse=True) if not result or result[-1][0] > 0.0: result.append((0.0, defaultColor)) return result
[ "def", "parseColors", "(", "colors", ",", "defaultColor", ")", ":", "result", "=", "[", "]", "if", "colors", ":", "for", "colorInfo", "in", "colors", ":", "fields", "=", "colorInfo", ".", "split", "(", "maxsplit", "=", "1", ")", "if", "len", "(", "fields", ")", "==", "2", ":", "threshold", ",", "color", "=", "fields", "try", ":", "threshold", "=", "float", "(", "threshold", ")", "except", "ValueError", ":", "print", "(", "'--color arguments must be given as space-separated '", "'pairs of \"value color\" where the value is a '", "'numeric identity threshold. Your value %r is not '", "'numeric.'", "%", "threshold", ",", "file", "=", "sys", ".", "stderr", ")", "sys", ".", "exit", "(", "1", ")", "if", "0.0", ">", "threshold", ">", "1.0", ":", "print", "(", "'--color arguments must be given as space-separated '", "'pairs of \"value color\" where the value is a '", "'numeric identity threshold from 0.0 to 1.0. Your '", "'value %r is not in that range.'", "%", "threshold", ",", "file", "=", "sys", ".", "stderr", ")", "sys", ".", "exit", "(", "1", ")", "result", ".", "append", "(", "(", "threshold", ",", "color", ")", ")", "else", ":", "print", "(", "'--color arguments must be given as space-separated '", "'pairs of \"value color\". You have given %r, which does '", "'not contain a space.'", "%", "colorInfo", ",", "file", "=", "sys", ".", "stderr", ")", "sys", ".", "exit", "(", "1", ")", "result", ".", "sort", "(", "key", "=", "itemgetter", "(", "0", ")", ",", "reverse", "=", "True", ")", "if", "not", "result", "or", "result", "[", "-", "1", "]", "[", "0", "]", ">", "0.0", ":", "result", ".", "append", "(", "(", "0.0", ",", "defaultColor", ")", ")", "return", "result" ]
44.106383
21.340426
def read_stat(): """ Returns the system stat information. :returns: The system stat information. :rtype: list """ data = [] with open("/proc/stat", "rb") as stat_file: for line in stat_file: cpu_stat = line.split() if cpu_stat[0][:3] != b"cpu": break # First cpu line is aggregation of following lines, skip it if len(cpu_stat[0]) == 3: continue data.append( { "times": { "user": int(cpu_stat[1]), "nice": int(cpu_stat[2]), "sys": int(cpu_stat[3]), "idle": int(cpu_stat[4]), "irq": int(cpu_stat[6]), } } ) return data
[ "def", "read_stat", "(", ")", ":", "data", "=", "[", "]", "with", "open", "(", "\"/proc/stat\"", ",", "\"rb\"", ")", "as", "stat_file", ":", "for", "line", "in", "stat_file", ":", "cpu_stat", "=", "line", ".", "split", "(", ")", "if", "cpu_stat", "[", "0", "]", "[", ":", "3", "]", "!=", "b\"cpu\"", ":", "break", "# First cpu line is aggregation of following lines, skip it", "if", "len", "(", "cpu_stat", "[", "0", "]", ")", "==", "3", ":", "continue", "data", ".", "append", "(", "{", "\"times\"", ":", "{", "\"user\"", ":", "int", "(", "cpu_stat", "[", "1", "]", ")", ",", "\"nice\"", ":", "int", "(", "cpu_stat", "[", "2", "]", ")", ",", "\"sys\"", ":", "int", "(", "cpu_stat", "[", "3", "]", ")", ",", "\"idle\"", ":", "int", "(", "cpu_stat", "[", "4", "]", ")", ",", "\"irq\"", ":", "int", "(", "cpu_stat", "[", "6", "]", ")", ",", "}", "}", ")", "return", "data" ]
29.535714
14.107143
def repeater(call, args=None, kwargs=None, retries=4): """ repeat call x-times: docker API is just awesome :param call: function :param args: tuple, args for function :param kwargs: dict, kwargs for function :param retries: int, how many times we try? :return: response of the call """ args = args or () kwargs = kwargs or {} t = 1.0 for x in range(retries): try: return call(*args, **kwargs) except APIError as ex: logger.error("query #%d: docker returned an error: %r", x, ex) except Exception as ex: # this may be pretty bad log_last_traceback() logger.error("query #%d: generic error: %r", x, ex) t *= 2 time.sleep(t)
[ "def", "repeater", "(", "call", ",", "args", "=", "None", ",", "kwargs", "=", "None", ",", "retries", "=", "4", ")", ":", "args", "=", "args", "or", "(", ")", "kwargs", "=", "kwargs", "or", "{", "}", "t", "=", "1.0", "for", "x", "in", "range", "(", "retries", ")", ":", "try", ":", "return", "call", "(", "*", "args", ",", "*", "*", "kwargs", ")", "except", "APIError", "as", "ex", ":", "logger", ".", "error", "(", "\"query #%d: docker returned an error: %r\"", ",", "x", ",", "ex", ")", "except", "Exception", "as", "ex", ":", "# this may be pretty bad", "log_last_traceback", "(", ")", "logger", ".", "error", "(", "\"query #%d: generic error: %r\"", ",", "x", ",", "ex", ")", "t", "*=", "2", "time", ".", "sleep", "(", "t", ")" ]
31.125
13.958333
def srp1(*args,**kargs): """Send and receive packets at layer 2 and return only the first answer nofilter: put 1 to avoid use of bpf filters retry: if positive, how many times to resend unanswered packets if negative, how many times to retry when no more packets are answered timeout: how much time to wait after the last packet has been sent verbose: set verbosity level multi: whether to accept multiple answers for the same stimulus filter: provide a BPF filter iface: work only on the given interface""" if not "timeout" in kargs: kargs["timeout"] = -1 a,b=srp(*args,**kargs) if len(a) > 0: return a[0][1] else: return None
[ "def", "srp1", "(", "*", "args", ",", "*", "*", "kargs", ")", ":", "if", "not", "\"timeout\"", "in", "kargs", ":", "kargs", "[", "\"timeout\"", "]", "=", "-", "1", "a", ",", "b", "=", "srp", "(", "*", "args", ",", "*", "*", "kargs", ")", "if", "len", "(", "a", ")", ">", "0", ":", "return", "a", "[", "0", "]", "[", "1", "]", "else", ":", "return", "None" ]
39.882353
16.823529
def connect(self, hostname, family, hostaddr): """ Log new connections. """ self.client_ip = hostaddr[0] self.client_port = hostaddr[1] self.time_start = time.time() logger.debug('<{}> Connect from {}[{}]:{}'.format( self.id, hostname, self.client_ip, self.client_port)) return Milter.CONTINUE
[ "def", "connect", "(", "self", ",", "hostname", ",", "family", ",", "hostaddr", ")", ":", "self", ".", "client_ip", "=", "hostaddr", "[", "0", "]", "self", ".", "client_port", "=", "hostaddr", "[", "1", "]", "self", ".", "time_start", "=", "time", ".", "time", "(", ")", "logger", ".", "debug", "(", "'<{}> Connect from {}[{}]:{}'", ".", "format", "(", "self", ".", "id", ",", "hostname", ",", "self", ".", "client_ip", ",", "self", ".", "client_port", ")", ")", "return", "Milter", ".", "CONTINUE" ]
32.727273
10.909091
def rm_files(path, extension): """ Remove all files in the given directory with the given extension :param str path: Directory :param str extension: File type to remove :return none: """ files = list_files(extension, path) for file in files: if file.endswith(extension): os.remove(os.path.join(path, file)) return
[ "def", "rm_files", "(", "path", ",", "extension", ")", ":", "files", "=", "list_files", "(", "extension", ",", "path", ")", "for", "file", "in", "files", ":", "if", "file", ".", "endswith", "(", "extension", ")", ":", "os", ".", "remove", "(", "os", ".", "path", ".", "join", "(", "path", ",", "file", ")", ")", "return" ]
27.538462
13.538462
def var(ctx, clear_target, clear_all): """Install variable data to /var/[lib,cache]/hfos""" install_var(str(ctx.obj['instance']), clear_target, clear_all)
[ "def", "var", "(", "ctx", ",", "clear_target", ",", "clear_all", ")", ":", "install_var", "(", "str", "(", "ctx", ".", "obj", "[", "'instance'", "]", ")", ",", "clear_target", ",", "clear_all", ")" ]
40
17
def _record_count(self): """ Get number of records in file. This is maybe suboptimal because we have to seek to the end of the file. Side effect: returns file position to record_start. """ self.filepath_or_buffer.seek(0, 2) total_records_length = (self.filepath_or_buffer.tell() - self.record_start) if total_records_length % 80 != 0: warnings.warn("xport file may be corrupted") if self.record_length > 80: self.filepath_or_buffer.seek(self.record_start) return total_records_length // self.record_length self.filepath_or_buffer.seek(-80, 2) last_card = self.filepath_or_buffer.read(80) last_card = np.frombuffer(last_card, dtype=np.uint64) # 8 byte blank ix = np.flatnonzero(last_card == 2314885530818453536) if len(ix) == 0: tail_pad = 0 else: tail_pad = 8 * len(ix) self.filepath_or_buffer.seek(self.record_start) return (total_records_length - tail_pad) // self.record_length
[ "def", "_record_count", "(", "self", ")", ":", "self", ".", "filepath_or_buffer", ".", "seek", "(", "0", ",", "2", ")", "total_records_length", "=", "(", "self", ".", "filepath_or_buffer", ".", "tell", "(", ")", "-", "self", ".", "record_start", ")", "if", "total_records_length", "%", "80", "!=", "0", ":", "warnings", ".", "warn", "(", "\"xport file may be corrupted\"", ")", "if", "self", ".", "record_length", ">", "80", ":", "self", ".", "filepath_or_buffer", ".", "seek", "(", "self", ".", "record_start", ")", "return", "total_records_length", "//", "self", ".", "record_length", "self", ".", "filepath_or_buffer", ".", "seek", "(", "-", "80", ",", "2", ")", "last_card", "=", "self", ".", "filepath_or_buffer", ".", "read", "(", "80", ")", "last_card", "=", "np", ".", "frombuffer", "(", "last_card", ",", "dtype", "=", "np", ".", "uint64", ")", "# 8 byte blank", "ix", "=", "np", ".", "flatnonzero", "(", "last_card", "==", "2314885530818453536", ")", "if", "len", "(", "ix", ")", "==", "0", ":", "tail_pad", "=", "0", "else", ":", "tail_pad", "=", "8", "*", "len", "(", "ix", ")", "self", ".", "filepath_or_buffer", ".", "seek", "(", "self", ".", "record_start", ")", "return", "(", "total_records_length", "-", "tail_pad", ")", "//", "self", ".", "record_length" ]
30.527778
21.527778
def _parse_request(self, xml): """ Parse a request with metadata information :param xml: LXML Object :type xml: Union[lxml.etree._Element] """ for node in xml.xpath(".//ti:groupname", namespaces=XPATH_NAMESPACES): lang = node.get("xml:lang") or CtsText.DEFAULT_LANG self.metadata.add(RDF_NAMESPACES.CTS.groupname, lang=lang, value=node.text) self.set_creator(node.text, lang) for node in xml.xpath(".//ti:title", namespaces=XPATH_NAMESPACES): lang = node.get("xml:lang") or CtsText.DEFAULT_LANG self.metadata.add(RDF_NAMESPACES.CTS.title, lang=lang, value=node.text) self.set_title(node.text, lang) for node in xml.xpath(".//ti:label", namespaces=XPATH_NAMESPACES): lang = node.get("xml:lang") or CtsText.DEFAULT_LANG self.metadata.add(RDF_NAMESPACES.CTS.label, lang=lang, value=node.text) self.set_subject(node.text, lang) for node in xml.xpath(".//ti:description", namespaces=XPATH_NAMESPACES): lang = node.get("xml:lang") or CtsText.DEFAULT_LANG self.metadata.add(RDF_NAMESPACES.CTS.description, lang=lang, value=node.text) self.set_description(node.text, lang) # Need to code that p if not self.citation.is_set() and xml.xpath("//ti:citation", namespaces=XPATH_NAMESPACES): self.citation = CtsCollection.XmlCtsCitation.ingest( xml, xpath=".//ti:citation[not(ancestor::ti:citation)]" )
[ "def", "_parse_request", "(", "self", ",", "xml", ")", ":", "for", "node", "in", "xml", ".", "xpath", "(", "\".//ti:groupname\"", ",", "namespaces", "=", "XPATH_NAMESPACES", ")", ":", "lang", "=", "node", ".", "get", "(", "\"xml:lang\"", ")", "or", "CtsText", ".", "DEFAULT_LANG", "self", ".", "metadata", ".", "add", "(", "RDF_NAMESPACES", ".", "CTS", ".", "groupname", ",", "lang", "=", "lang", ",", "value", "=", "node", ".", "text", ")", "self", ".", "set_creator", "(", "node", ".", "text", ",", "lang", ")", "for", "node", "in", "xml", ".", "xpath", "(", "\".//ti:title\"", ",", "namespaces", "=", "XPATH_NAMESPACES", ")", ":", "lang", "=", "node", ".", "get", "(", "\"xml:lang\"", ")", "or", "CtsText", ".", "DEFAULT_LANG", "self", ".", "metadata", ".", "add", "(", "RDF_NAMESPACES", ".", "CTS", ".", "title", ",", "lang", "=", "lang", ",", "value", "=", "node", ".", "text", ")", "self", ".", "set_title", "(", "node", ".", "text", ",", "lang", ")", "for", "node", "in", "xml", ".", "xpath", "(", "\".//ti:label\"", ",", "namespaces", "=", "XPATH_NAMESPACES", ")", ":", "lang", "=", "node", ".", "get", "(", "\"xml:lang\"", ")", "or", "CtsText", ".", "DEFAULT_LANG", "self", ".", "metadata", ".", "add", "(", "RDF_NAMESPACES", ".", "CTS", ".", "label", ",", "lang", "=", "lang", ",", "value", "=", "node", ".", "text", ")", "self", ".", "set_subject", "(", "node", ".", "text", ",", "lang", ")", "for", "node", "in", "xml", ".", "xpath", "(", "\".//ti:description\"", ",", "namespaces", "=", "XPATH_NAMESPACES", ")", ":", "lang", "=", "node", ".", "get", "(", "\"xml:lang\"", ")", "or", "CtsText", ".", "DEFAULT_LANG", "self", ".", "metadata", ".", "add", "(", "RDF_NAMESPACES", ".", "CTS", ".", "description", ",", "lang", "=", "lang", ",", "value", "=", "node", ".", "text", ")", "self", ".", "set_description", "(", "node", ".", "text", ",", "lang", ")", "# Need to code that p", "if", "not", "self", ".", "citation", ".", "is_set", "(", ")", "and", "xml", ".", "xpath", "(", "\"//ti:citation\"", ",", "namespaces", "=", "XPATH_NAMESPACES", ")", ":", "self", ".", "citation", "=", "CtsCollection", ".", "XmlCtsCitation", ".", "ingest", "(", "xml", ",", "xpath", "=", "\".//ti:citation[not(ancestor::ti:citation)]\"", ")" ]
48.1875
26
def get_distinct_values_from_cols(self, l_col_list): """ returns the list of distinct combinations in a dataset based on the columns in the list. Note that this is currently implemented as MAX permutations of the combo so it is not guarenteed to have values in each case. """ uniq_vals = [] for l_col_name in l_col_list: #print('col_name: ' + l_col_name) uniq_vals.append(set(self.get_col_data_by_name(l_col_name))) #print(' unique values = ', uniq_vals) #print(' unique values[0] = ', uniq_vals[0]) #print(' unique values[1] = ', uniq_vals[1]) if len(l_col_list) == 0: return [] elif len(l_col_list) == 1: return sorted([v for v in uniq_vals]) elif len(l_col_list) == 2: res = [] res = [(a, b) for a in uniq_vals[0] for b in uniq_vals[1]] return res else: print ("TODO ") return -44
[ "def", "get_distinct_values_from_cols", "(", "self", ",", "l_col_list", ")", ":", "uniq_vals", "=", "[", "]", "for", "l_col_name", "in", "l_col_list", ":", "#print('col_name: ' + l_col_name) ", "uniq_vals", ".", "append", "(", "set", "(", "self", ".", "get_col_data_by_name", "(", "l_col_name", ")", ")", ")", "#print(' unique values = ', uniq_vals) ", "#print(' unique values[0] = ', uniq_vals[0])", "#print(' unique values[1] = ', uniq_vals[1])", "if", "len", "(", "l_col_list", ")", "==", "0", ":", "return", "[", "]", "elif", "len", "(", "l_col_list", ")", "==", "1", ":", "return", "sorted", "(", "[", "v", "for", "v", "in", "uniq_vals", "]", ")", "elif", "len", "(", "l_col_list", ")", "==", "2", ":", "res", "=", "[", "]", "res", "=", "[", "(", "a", ",", "b", ")", "for", "a", "in", "uniq_vals", "[", "0", "]", "for", "b", "in", "uniq_vals", "[", "1", "]", "]", "return", "res", "else", ":", "print", "(", "\"TODO \"", ")", "return", "-", "44" ]
38.730769
15.423077
def del_resource(self, service_name, resource_name, base_class=None): """ Deletes a resource class for a given service. Fails silently if no connection is found in the cache. :param service_name: The service a given ``Resource`` talks to. Ex. ``sqs``, ``sns``, ``dynamodb``, etc. :type service_name: string :param base_class: (Optional) The base class of the object. Prevents "magically" loading the wrong class (one with a different base). Default is ``default``. :type base_class: class """ # Unlike ``get_resource``, this should be fire & forget. # We don't really care, as long as it's not in the cache any longer. try: classpath = self.build_classpath(base_class) opts = self.services[service_name]['resources'][resource_name] del opts[classpath] except KeyError: pass
[ "def", "del_resource", "(", "self", ",", "service_name", ",", "resource_name", ",", "base_class", "=", "None", ")", ":", "# Unlike ``get_resource``, this should be fire & forget.", "# We don't really care, as long as it's not in the cache any longer.", "try", ":", "classpath", "=", "self", ".", "build_classpath", "(", "base_class", ")", "opts", "=", "self", ".", "services", "[", "service_name", "]", "[", "'resources'", "]", "[", "resource_name", "]", "del", "opts", "[", "classpath", "]", "except", "KeyError", ":", "pass" ]
40.652174
22.043478
def _GetStat(self): """Retrieves a stat object. Returns: VFSStat: a stat object. Raises: BackEndError: when the encoded stream is missing. """ stat_object = vfs_stat.VFSStat() # File data stat information. stat_object.size = self.path_spec.range_size # File entry type stat information. stat_object.type = stat_object.TYPE_FILE return stat_object
[ "def", "_GetStat", "(", "self", ")", ":", "stat_object", "=", "vfs_stat", ".", "VFSStat", "(", ")", "# File data stat information.", "stat_object", ".", "size", "=", "self", ".", "path_spec", ".", "range_size", "# File entry type stat information.", "stat_object", ".", "type", "=", "stat_object", ".", "TYPE_FILE", "return", "stat_object" ]
21.444444
19.222222
def render_impl(self, template, context, at_paths=None, **kwargs): """ Render given template file and return the result. :param template: Template file path :param context: A dict or dict-like object to instantiate given template file :param at_paths: Template search paths :param kwargs: Keyword arguments passed to the template engine to render templates with specific features enabled. :return: Rendered string """ # Override the path to pass it to tenjin.Engine. if at_paths is not None: paths = at_paths + self.engine_options.get("path", []) self.engine_options["path"] = paths engine = tenjin.Engine(**self.engine_options) LOGGER.warning("engine_options=%s", str(self.engine_options)) kwargs = self.filter_options(kwargs, self.render_valid_options()) return engine.render(template, context, **kwargs)
[ "def", "render_impl", "(", "self", ",", "template", ",", "context", ",", "at_paths", "=", "None", ",", "*", "*", "kwargs", ")", ":", "# Override the path to pass it to tenjin.Engine.", "if", "at_paths", "is", "not", "None", ":", "paths", "=", "at_paths", "+", "self", ".", "engine_options", ".", "get", "(", "\"path\"", ",", "[", "]", ")", "self", ".", "engine_options", "[", "\"path\"", "]", "=", "paths", "engine", "=", "tenjin", ".", "Engine", "(", "*", "*", "self", ".", "engine_options", ")", "LOGGER", ".", "warning", "(", "\"engine_options=%s\"", ",", "str", "(", "self", ".", "engine_options", ")", ")", "kwargs", "=", "self", ".", "filter_options", "(", "kwargs", ",", "self", ".", "render_valid_options", "(", ")", ")", "return", "engine", ".", "render", "(", "template", ",", "context", ",", "*", "*", "kwargs", ")" ]
41.217391
20.347826
def retrieve_all(self, sids, default_none=False): """ Retrieve all assets in `sids`. Parameters ---------- sids : iterable of int Assets to retrieve. default_none : bool If True, return None for failed lookups. If False, raise `SidsNotFound`. Returns ------- assets : list[Asset or None] A list of the same length as `sids` containing Assets (or Nones) corresponding to the requested sids. Raises ------ SidsNotFound When a requested sid is not found and default_none=False. """ sids = list(sids) hits, missing, failures = {}, set(), [] for sid in sids: try: asset = self._asset_cache[sid] if not default_none and asset is None: # Bail early if we've already cached that we don't know # about an asset. raise SidsNotFound(sids=[sid]) hits[sid] = asset except KeyError: missing.add(sid) # All requests were cache hits. Return requested sids in order. if not missing: return [hits[sid] for sid in sids] update_hits = hits.update # Look up cache misses by type. type_to_assets = self.group_by_type(missing) # Handle failures failures = {failure: None for failure in type_to_assets.pop(None, ())} update_hits(failures) self._asset_cache.update(failures) if failures and not default_none: raise SidsNotFound(sids=list(failures)) # We don't update the asset cache here because it should already be # updated by `self.retrieve_equities`. update_hits(self.retrieve_equities(type_to_assets.pop('equity', ()))) update_hits( self.retrieve_futures_contracts(type_to_assets.pop('future', ())) ) # We shouldn't know about any other asset types. if type_to_assets: raise AssertionError( "Found asset types: %s" % list(type_to_assets.keys()) ) return [hits[sid] for sid in sids]
[ "def", "retrieve_all", "(", "self", ",", "sids", ",", "default_none", "=", "False", ")", ":", "sids", "=", "list", "(", "sids", ")", "hits", ",", "missing", ",", "failures", "=", "{", "}", ",", "set", "(", ")", ",", "[", "]", "for", "sid", "in", "sids", ":", "try", ":", "asset", "=", "self", ".", "_asset_cache", "[", "sid", "]", "if", "not", "default_none", "and", "asset", "is", "None", ":", "# Bail early if we've already cached that we don't know", "# about an asset.", "raise", "SidsNotFound", "(", "sids", "=", "[", "sid", "]", ")", "hits", "[", "sid", "]", "=", "asset", "except", "KeyError", ":", "missing", ".", "add", "(", "sid", ")", "# All requests were cache hits. Return requested sids in order.", "if", "not", "missing", ":", "return", "[", "hits", "[", "sid", "]", "for", "sid", "in", "sids", "]", "update_hits", "=", "hits", ".", "update", "# Look up cache misses by type.", "type_to_assets", "=", "self", ".", "group_by_type", "(", "missing", ")", "# Handle failures", "failures", "=", "{", "failure", ":", "None", "for", "failure", "in", "type_to_assets", ".", "pop", "(", "None", ",", "(", ")", ")", "}", "update_hits", "(", "failures", ")", "self", ".", "_asset_cache", ".", "update", "(", "failures", ")", "if", "failures", "and", "not", "default_none", ":", "raise", "SidsNotFound", "(", "sids", "=", "list", "(", "failures", ")", ")", "# We don't update the asset cache here because it should already be", "# updated by `self.retrieve_equities`.", "update_hits", "(", "self", ".", "retrieve_equities", "(", "type_to_assets", ".", "pop", "(", "'equity'", ",", "(", ")", ")", ")", ")", "update_hits", "(", "self", ".", "retrieve_futures_contracts", "(", "type_to_assets", ".", "pop", "(", "'future'", ",", "(", ")", ")", ")", ")", "# We shouldn't know about any other asset types.", "if", "type_to_assets", ":", "raise", "AssertionError", "(", "\"Found asset types: %s\"", "%", "list", "(", "type_to_assets", ".", "keys", "(", ")", ")", ")", "return", "[", "hits", "[", "sid", "]", "for", "sid", "in", "sids", "]" ]
32.477612
19.58209
def mapPriority(levelno): """Map logging levels to journald priorities. Since Python log level numbers are "sparse", we have to map numbers in between the standard levels too. """ if levelno <= _logging.DEBUG: return LOG_DEBUG elif levelno <= _logging.INFO: return LOG_INFO elif levelno <= _logging.WARNING: return LOG_WARNING elif levelno <= _logging.ERROR: return LOG_ERR elif levelno <= _logging.CRITICAL: return LOG_CRIT else: return LOG_ALERT
[ "def", "mapPriority", "(", "levelno", ")", ":", "if", "levelno", "<=", "_logging", ".", "DEBUG", ":", "return", "LOG_DEBUG", "elif", "levelno", "<=", "_logging", ".", "INFO", ":", "return", "LOG_INFO", "elif", "levelno", "<=", "_logging", ".", "WARNING", ":", "return", "LOG_WARNING", "elif", "levelno", "<=", "_logging", ".", "ERROR", ":", "return", "LOG_ERR", "elif", "levelno", "<=", "_logging", ".", "CRITICAL", ":", "return", "LOG_CRIT", "else", ":", "return", "LOG_ALERT" ]
32.388889
11.277778
def interpolation_linear(x, x1, x2, y1, y2): """ Linear interpolation returns (y2 - y1) / (x2 - x1) * (x - x1) + y1 """ m = (y2 - y1) / (x2 - x1) t = (x - x1) return m * t + y1
[ "def", "interpolation_linear", "(", "x", ",", "x1", ",", "x2", ",", "y1", ",", "y2", ")", ":", "m", "=", "(", "y2", "-", "y1", ")", "/", "(", "x2", "-", "x1", ")", "t", "=", "(", "x", "-", "x1", ")", "return", "m", "*", "t", "+", "y1" ]
24.625
10.375
def com_google_fonts_check_hinting_impact(font, ttfautohint_stats): """Show hinting filesize impact. Current implementation simply logs useful info but there's no fail scenario for this checker.""" hinted = ttfautohint_stats["hinted_size"] dehinted = ttfautohint_stats["dehinted_size"] increase = hinted - dehinted change = (float(hinted)/dehinted - 1) * 100 def filesize_formatting(s): if s < 1024: return f"{s} bytes" elif s < 1024*1024: return "{:.1f}kb".format(s/1024) else: return "{:.1f}Mb".format(s/(1024*1024)) hinted_size = filesize_formatting(hinted) dehinted_size = filesize_formatting(dehinted) increase = filesize_formatting(increase) results_table = "Hinting filesize impact:\n\n" results_table += f"| | {font} |\n" results_table += "|:--- | ---:|\n" results_table += f"| Dehinted Size | {dehinted_size} |\n" results_table += f"| Hinted Size | {hinted_size} |\n" results_table += f"| Increase | {increase} |\n" results_table += f"| Change | {change:.1f} % |\n" yield INFO, results_table
[ "def", "com_google_fonts_check_hinting_impact", "(", "font", ",", "ttfautohint_stats", ")", ":", "hinted", "=", "ttfautohint_stats", "[", "\"hinted_size\"", "]", "dehinted", "=", "ttfautohint_stats", "[", "\"dehinted_size\"", "]", "increase", "=", "hinted", "-", "dehinted", "change", "=", "(", "float", "(", "hinted", ")", "/", "dehinted", "-", "1", ")", "*", "100", "def", "filesize_formatting", "(", "s", ")", ":", "if", "s", "<", "1024", ":", "return", "f\"{s} bytes\"", "elif", "s", "<", "1024", "*", "1024", ":", "return", "\"{:.1f}kb\"", ".", "format", "(", "s", "/", "1024", ")", "else", ":", "return", "\"{:.1f}Mb\"", ".", "format", "(", "s", "/", "(", "1024", "*", "1024", ")", ")", "hinted_size", "=", "filesize_formatting", "(", "hinted", ")", "dehinted_size", "=", "filesize_formatting", "(", "dehinted", ")", "increase", "=", "filesize_formatting", "(", "increase", ")", "results_table", "=", "\"Hinting filesize impact:\\n\\n\"", "results_table", "+=", "f\"| | {font} |\\n\"", "results_table", "+=", "\"|:--- | ---:|\\n\"", "results_table", "+=", "f\"| Dehinted Size | {dehinted_size} |\\n\"", "results_table", "+=", "f\"| Hinted Size | {hinted_size} |\\n\"", "results_table", "+=", "f\"| Increase | {increase} |\\n\"", "results_table", "+=", "f\"| Change | {change:.1f} % |\\n\"", "yield", "INFO", ",", "results_table" ]
33.935484
14.967742
def box_actions(results, times, N_matrix, ifprint): """ Finds actions, angles and frequencies for box orbit. Takes a series of phase-space points from an orbit integration at times t and returns L = (act,ang,n_vec,toy_aa, pars) -- explained in find_actions() below. """ if(ifprint): print("\n=====\nUsing triaxial harmonic toy potential") t = time.time() # Find best toy parameters omega = toy.findbestparams_ho(results) if(ifprint): print("Best omega "+str(omega)+" found in "+str(time.time()-t)+" seconds") # Now find toy actions and angles AA = np.array([toy.angact_ho(i,omega) for i in results]) AA = AA[~np.isnan(AA).any(1)] if(len(AA)==0): return t = time.time() act = solver.solver(AA, N_matrix) if act==None: return if(ifprint): print("Action solution found for N_max = "+str(N_matrix)+", size "+str(len(act[0]))+" symmetric matrix in "+str(time.time()-t)+" seconds") np.savetxt("GF.Sn_box",np.vstack((act[1].T,act[0][3:])).T) ang = solver.angle_solver(AA,times,N_matrix,np.ones(3)) if(ifprint): print("Angle solution found for N_max = "+str(N_matrix)+", size "+str(len(ang))+" symmetric matrix in "+str(time.time()-t)+" seconds") # Just some checks if(len(ang)>len(AA)): print("More unknowns than equations") return act[0], ang, act[1], AA, omega
[ "def", "box_actions", "(", "results", ",", "times", ",", "N_matrix", ",", "ifprint", ")", ":", "if", "(", "ifprint", ")", ":", "print", "(", "\"\\n=====\\nUsing triaxial harmonic toy potential\"", ")", "t", "=", "time", ".", "time", "(", ")", "# Find best toy parameters", "omega", "=", "toy", ".", "findbestparams_ho", "(", "results", ")", "if", "(", "ifprint", ")", ":", "print", "(", "\"Best omega \"", "+", "str", "(", "omega", ")", "+", "\" found in \"", "+", "str", "(", "time", ".", "time", "(", ")", "-", "t", ")", "+", "\" seconds\"", ")", "# Now find toy actions and angles", "AA", "=", "np", ".", "array", "(", "[", "toy", ".", "angact_ho", "(", "i", ",", "omega", ")", "for", "i", "in", "results", "]", ")", "AA", "=", "AA", "[", "~", "np", ".", "isnan", "(", "AA", ")", ".", "any", "(", "1", ")", "]", "if", "(", "len", "(", "AA", ")", "==", "0", ")", ":", "return", "t", "=", "time", ".", "time", "(", ")", "act", "=", "solver", ".", "solver", "(", "AA", ",", "N_matrix", ")", "if", "act", "==", "None", ":", "return", "if", "(", "ifprint", ")", ":", "print", "(", "\"Action solution found for N_max = \"", "+", "str", "(", "N_matrix", ")", "+", "\", size \"", "+", "str", "(", "len", "(", "act", "[", "0", "]", ")", ")", "+", "\" symmetric matrix in \"", "+", "str", "(", "time", ".", "time", "(", ")", "-", "t", ")", "+", "\" seconds\"", ")", "np", ".", "savetxt", "(", "\"GF.Sn_box\"", ",", "np", ".", "vstack", "(", "(", "act", "[", "1", "]", ".", "T", ",", "act", "[", "0", "]", "[", "3", ":", "]", ")", ")", ".", "T", ")", "ang", "=", "solver", ".", "angle_solver", "(", "AA", ",", "times", ",", "N_matrix", ",", "np", ".", "ones", "(", "3", ")", ")", "if", "(", "ifprint", ")", ":", "print", "(", "\"Angle solution found for N_max = \"", "+", "str", "(", "N_matrix", ")", "+", "\", size \"", "+", "str", "(", "len", "(", "ang", ")", ")", "+", "\" symmetric matrix in \"", "+", "str", "(", "time", ".", "time", "(", ")", "-", "t", ")", "+", "\" seconds\"", ")", "# Just some checks", "if", "(", "len", "(", "ang", ")", ">", "len", "(", "AA", ")", ")", ":", "print", "(", "\"More unknowns than equations\"", ")", "return", "act", "[", "0", "]", ",", "ang", ",", "act", "[", "1", "]", ",", "AA", ",", "omega" ]
34.7
26.85
def rename_local_untracked(self): """ Rename local untracked files that would require pulls """ # Find what files have been added! new_upstream_files = self.find_upstream_changed('A') for f in new_upstream_files: if os.path.exists(f): # If there's a file extension, put the timestamp before that ts = datetime.datetime.now().strftime('__%Y%m%d%H%M%S') path_head, path_tail = os.path.split(f) path_tail = ts.join(os.path.splitext(path_tail)) new_file_name = os.path.join(path_head, path_tail) os.rename(f, new_file_name) yield 'Renamed {} to {} to avoid conflict with upstream'.format(f, new_file_name)
[ "def", "rename_local_untracked", "(", "self", ")", ":", "# Find what files have been added!", "new_upstream_files", "=", "self", ".", "find_upstream_changed", "(", "'A'", ")", "for", "f", "in", "new_upstream_files", ":", "if", "os", ".", "path", ".", "exists", "(", "f", ")", ":", "# If there's a file extension, put the timestamp before that", "ts", "=", "datetime", ".", "datetime", ".", "now", "(", ")", ".", "strftime", "(", "'__%Y%m%d%H%M%S'", ")", "path_head", ",", "path_tail", "=", "os", ".", "path", ".", "split", "(", "f", ")", "path_tail", "=", "ts", ".", "join", "(", "os", ".", "path", ".", "splitext", "(", "path_tail", ")", ")", "new_file_name", "=", "os", ".", "path", ".", "join", "(", "path_head", ",", "path_tail", ")", "os", ".", "rename", "(", "f", ",", "new_file_name", ")", "yield", "'Renamed {} to {} to avoid conflict with upstream'", ".", "format", "(", "f", ",", "new_file_name", ")" ]
50.6
16.866667
def get_input(context, conf): """Gets a user parameter, either from the console or from an outer submodule/system Assumes conf has name, default, prompt and debug """ name = conf['name']['value'] prompt = conf['prompt']['value'] default = conf['default']['value'] or conf['debug']['value'] if context.submodule or context.inputs: value = context.inputs.get(name, default) elif not context.test: # we skip user interaction during tests raw = raw_input("%s (default=%s) " % (encode(prompt), encode(default))) value = raw or default else: value = default return value
[ "def", "get_input", "(", "context", ",", "conf", ")", ":", "name", "=", "conf", "[", "'name'", "]", "[", "'value'", "]", "prompt", "=", "conf", "[", "'prompt'", "]", "[", "'value'", "]", "default", "=", "conf", "[", "'default'", "]", "[", "'value'", "]", "or", "conf", "[", "'debug'", "]", "[", "'value'", "]", "if", "context", ".", "submodule", "or", "context", ".", "inputs", ":", "value", "=", "context", ".", "inputs", ".", "get", "(", "name", ",", "default", ")", "elif", "not", "context", ".", "test", ":", "# we skip user interaction during tests", "raw", "=", "raw_input", "(", "\"%s (default=%s) \"", "%", "(", "encode", "(", "prompt", ")", ",", "encode", "(", "default", ")", ")", ")", "value", "=", "raw", "or", "default", "else", ":", "value", "=", "default", "return", "value" ]
31.95
17.6
def center_origin(self): """Sets the origin to the center of the image.""" self.set_origin(Vector2(self.image.get_width() / 2.0, self.image.get_height() / 2.0))
[ "def", "center_origin", "(", "self", ")", ":", "self", ".", "set_origin", "(", "Vector2", "(", "self", ".", "image", ".", "get_width", "(", ")", "/", "2.0", ",", "self", ".", "image", ".", "get_height", "(", ")", "/", "2.0", ")", ")" ]
58
23
def CountHuntOutputPluginLogEntries(self, hunt_id, output_plugin_id, with_type=None, cursor=None): """Counts hunt output plugin log entries.""" query = ("SELECT COUNT(*) " "FROM flow_output_plugin_log_entries " "FORCE INDEX (flow_output_plugin_log_entries_by_hunt) " "WHERE hunt_id = %s AND output_plugin_id = %s ") args = [ db_utils.HuntIDToInt(hunt_id), db_utils.OutputPluginIDToInt(output_plugin_id) ] if with_type is not None: query += "AND log_entry_type = %s" args.append(int(with_type)) cursor.execute(query, args) return cursor.fetchone()[0]
[ "def", "CountHuntOutputPluginLogEntries", "(", "self", ",", "hunt_id", ",", "output_plugin_id", ",", "with_type", "=", "None", ",", "cursor", "=", "None", ")", ":", "query", "=", "(", "\"SELECT COUNT(*) \"", "\"FROM flow_output_plugin_log_entries \"", "\"FORCE INDEX (flow_output_plugin_log_entries_by_hunt) \"", "\"WHERE hunt_id = %s AND output_plugin_id = %s \"", ")", "args", "=", "[", "db_utils", ".", "HuntIDToInt", "(", "hunt_id", ")", ",", "db_utils", ".", "OutputPluginIDToInt", "(", "output_plugin_id", ")", "]", "if", "with_type", "is", "not", "None", ":", "query", "+=", "\"AND log_entry_type = %s\"", "args", ".", "append", "(", "int", "(", "with_type", ")", ")", "cursor", ".", "execute", "(", "query", ",", "args", ")", "return", "cursor", ".", "fetchone", "(", ")", "[", "0", "]" ]
37.047619
14.761905
def os_walk_pre_35(top, topdown=True, onerror=None, followlinks=False): """Pre Python 3.5 implementation of os.walk() that doesn't use scandir.""" islink, join, isdir = os.path.islink, os.path.join, os.path.isdir try: names = os.listdir(top) except OSError as err: if onerror is not None: onerror(err) return dirs, nondirs = [], [] for name in names: if isdir(join(top, name)): dirs.append(name) else: nondirs.append(name) if topdown: yield top, dirs, nondirs for name in dirs: new_path = join(top, name) if followlinks or not islink(new_path): for x in os_walk_pre_35(new_path, topdown, onerror, followlinks): yield x if not topdown: yield top, dirs, nondirs
[ "def", "os_walk_pre_35", "(", "top", ",", "topdown", "=", "True", ",", "onerror", "=", "None", ",", "followlinks", "=", "False", ")", ":", "islink", ",", "join", ",", "isdir", "=", "os", ".", "path", ".", "islink", ",", "os", ".", "path", ".", "join", ",", "os", ".", "path", ".", "isdir", "try", ":", "names", "=", "os", ".", "listdir", "(", "top", ")", "except", "OSError", "as", "err", ":", "if", "onerror", "is", "not", "None", ":", "onerror", "(", "err", ")", "return", "dirs", ",", "nondirs", "=", "[", "]", ",", "[", "]", "for", "name", "in", "names", ":", "if", "isdir", "(", "join", "(", "top", ",", "name", ")", ")", ":", "dirs", ".", "append", "(", "name", ")", "else", ":", "nondirs", ".", "append", "(", "name", ")", "if", "topdown", ":", "yield", "top", ",", "dirs", ",", "nondirs", "for", "name", "in", "dirs", ":", "new_path", "=", "join", "(", "top", ",", "name", ")", "if", "followlinks", "or", "not", "islink", "(", "new_path", ")", ":", "for", "x", "in", "os_walk_pre_35", "(", "new_path", ",", "topdown", ",", "onerror", ",", "followlinks", ")", ":", "yield", "x", "if", "not", "topdown", ":", "yield", "top", ",", "dirs", ",", "nondirs" ]
29.925926
19.185185
def delete_snapshot(self, snapshot_id): """Remove a previously created snapshot.""" query = self.query_factory( action="DeleteSnapshot", creds=self.creds, endpoint=self.endpoint, other_params={"SnapshotId": snapshot_id}) d = query.submit() return d.addCallback(self.parser.truth_return)
[ "def", "delete_snapshot", "(", "self", ",", "snapshot_id", ")", ":", "query", "=", "self", ".", "query_factory", "(", "action", "=", "\"DeleteSnapshot\"", ",", "creds", "=", "self", ".", "creds", ",", "endpoint", "=", "self", ".", "endpoint", ",", "other_params", "=", "{", "\"SnapshotId\"", ":", "snapshot_id", "}", ")", "d", "=", "query", ".", "submit", "(", ")", "return", "d", ".", "addCallback", "(", "self", ".", "parser", ".", "truth_return", ")" ]
48
12.142857
def _decrypt(private_key, ciphertext, padding): """ Decrypts RSA ciphertext using a private key :param private_key: A PrivateKey object :param ciphertext: The ciphertext - a byte string :param padding: The padding mode to use, specified as a kSecPadding*Key value :raises: ValueError - when any of the parameters contain an invalid value TypeError - when any of the parameters are of the wrong type OSError - when an error is returned by the OS crypto library :return: A byte string of the plaintext """ if not isinstance(private_key, PrivateKey): raise TypeError(pretty_message( ''' private_key must be an instance of the PrivateKey class, not %s ''', type_name(private_key) )) if not isinstance(ciphertext, byte_cls): raise TypeError(pretty_message( ''' ciphertext must be a byte string, not %s ''', type_name(ciphertext) )) if not padding: raise ValueError('padding must be specified') cf_data = None sec_transform = None try: cf_data = CFHelpers.cf_data_from_bytes(ciphertext) error_pointer = new(CoreFoundation, 'CFErrorRef *') sec_transform = Security.SecDecryptTransformCreate( private_key.sec_key_ref, error_pointer ) handle_cf_error(error_pointer) Security.SecTransformSetAttribute( sec_transform, Security.kSecPaddingKey, padding, error_pointer ) handle_cf_error(error_pointer) Security.SecTransformSetAttribute( sec_transform, Security.kSecTransformInputAttributeName, cf_data, error_pointer ) handle_cf_error(error_pointer) plaintext = Security.SecTransformExecute(sec_transform, error_pointer) handle_cf_error(error_pointer) return CFHelpers.cf_data_to_bytes(plaintext) finally: if cf_data: CoreFoundation.CFRelease(cf_data) if sec_transform: CoreFoundation.CFRelease(sec_transform)
[ "def", "_decrypt", "(", "private_key", ",", "ciphertext", ",", "padding", ")", ":", "if", "not", "isinstance", "(", "private_key", ",", "PrivateKey", ")", ":", "raise", "TypeError", "(", "pretty_message", "(", "'''\n private_key must be an instance of the PrivateKey class, not %s\n '''", ",", "type_name", "(", "private_key", ")", ")", ")", "if", "not", "isinstance", "(", "ciphertext", ",", "byte_cls", ")", ":", "raise", "TypeError", "(", "pretty_message", "(", "'''\n ciphertext must be a byte string, not %s\n '''", ",", "type_name", "(", "ciphertext", ")", ")", ")", "if", "not", "padding", ":", "raise", "ValueError", "(", "'padding must be specified'", ")", "cf_data", "=", "None", "sec_transform", "=", "None", "try", ":", "cf_data", "=", "CFHelpers", ".", "cf_data_from_bytes", "(", "ciphertext", ")", "error_pointer", "=", "new", "(", "CoreFoundation", ",", "'CFErrorRef *'", ")", "sec_transform", "=", "Security", ".", "SecDecryptTransformCreate", "(", "private_key", ".", "sec_key_ref", ",", "error_pointer", ")", "handle_cf_error", "(", "error_pointer", ")", "Security", ".", "SecTransformSetAttribute", "(", "sec_transform", ",", "Security", ".", "kSecPaddingKey", ",", "padding", ",", "error_pointer", ")", "handle_cf_error", "(", "error_pointer", ")", "Security", ".", "SecTransformSetAttribute", "(", "sec_transform", ",", "Security", ".", "kSecTransformInputAttributeName", ",", "cf_data", ",", "error_pointer", ")", "handle_cf_error", "(", "error_pointer", ")", "plaintext", "=", "Security", ".", "SecTransformExecute", "(", "sec_transform", ",", "error_pointer", ")", "handle_cf_error", "(", "error_pointer", ")", "return", "CFHelpers", ".", "cf_data_to_bytes", "(", "plaintext", ")", "finally", ":", "if", "cf_data", ":", "CoreFoundation", ".", "CFRelease", "(", "cf_data", ")", "if", "sec_transform", ":", "CoreFoundation", ".", "CFRelease", "(", "sec_transform", ")" ]
26.95
20.75
def do_set(parser, token): '''Calls an arbitrary method on an object.''' code = token.contents firstspace = code.find(' ') if firstspace >= 0: code = code[firstspace+1:] return Setter(code)
[ "def", "do_set", "(", "parser", ",", "token", ")", ":", "code", "=", "token", ".", "contents", "firstspace", "=", "code", ".", "find", "(", "' '", ")", "if", "firstspace", ">=", "0", ":", "code", "=", "code", "[", "firstspace", "+", "1", ":", "]", "return", "Setter", "(", "code", ")" ]
28.142857
13.857143
def _mine_flush(self, load, skip_verify=False): ''' Allow the minion to delete all of its own mine contents ''' if not skip_verify and 'id' not in load: return False if self.opts.get('minion_data_cache', False) or self.opts.get('enforce_mine_cache', False): return self.cache.flush('minions/{0}'.format(load['id']), 'mine') return True
[ "def", "_mine_flush", "(", "self", ",", "load", ",", "skip_verify", "=", "False", ")", ":", "if", "not", "skip_verify", "and", "'id'", "not", "in", "load", ":", "return", "False", "if", "self", ".", "opts", ".", "get", "(", "'minion_data_cache'", ",", "False", ")", "or", "self", ".", "opts", ".", "get", "(", "'enforce_mine_cache'", ",", "False", ")", ":", "return", "self", ".", "cache", ".", "flush", "(", "'minions/{0}'", ".", "format", "(", "load", "[", "'id'", "]", ")", ",", "'mine'", ")", "return", "True" ]
44.333333
25.444444
def get_syn_ids_by_lemma(self, lemma): """Returns a list of synset IDs based on a lemma""" if not isinstance(lemma,unicode): lemma = unicode(lemma,'utf-8') http, resp, content = self.connect() params = "" fragment = "" path = "cdb_syn" if self.debug: printf( "cornettodb/views/query_remote_syn_lemma: db_opt: %s" % path ) query_opt = "dict_search" if self.debug: printf( "cornettodb/views/query_remote_syn_lemma: query_opt: %s" % query_opt ) qdict = {} qdict[ "action" ] = "queryList" qdict[ "word" ] = lemma.encode('utf-8') query = urllib.urlencode( qdict ) db_url_tuple = ( self.scheme, self.host + ':' + str(self.port), path, params, query, fragment ) db_url = urlparse.urlunparse( db_url_tuple ) if self.debug: printf( "db_url: %s" % db_url ) resp, content = http.request( db_url, "GET" ) if self.debug: printf( "resp:\n%s" % resp ) printf( "content:\n%s" % content ) # printf( "content is of type: %s" % type( content ) ) dict_list = [] dict_list = eval( content ) # string to list synsets = [] items = len( dict_list ) if self.debug: printf( "items: %d" % items ) # syn dict: like lu dict, but without pos: part-of-speech for dict in dict_list: if self.debug: printf( dict ) seq_nr = dict[ "seq_nr" ] # sense number value = dict[ "value" ] # lexical unit identifier form = dict[ "form" ] # lemma label = dict[ "label" ] # label to be shown if self.debug: printf( "seq_nr: %s" % seq_nr ) printf( "value: %s" % value ) printf( "form: %s" % form ) printf( "label: %s" % label ) if value != "": synsets.append( value ) return synsets
[ "def", "get_syn_ids_by_lemma", "(", "self", ",", "lemma", ")", ":", "if", "not", "isinstance", "(", "lemma", ",", "unicode", ")", ":", "lemma", "=", "unicode", "(", "lemma", ",", "'utf-8'", ")", "http", ",", "resp", ",", "content", "=", "self", ".", "connect", "(", ")", "params", "=", "\"\"", "fragment", "=", "\"\"", "path", "=", "\"cdb_syn\"", "if", "self", ".", "debug", ":", "printf", "(", "\"cornettodb/views/query_remote_syn_lemma: db_opt: %s\"", "%", "path", ")", "query_opt", "=", "\"dict_search\"", "if", "self", ".", "debug", ":", "printf", "(", "\"cornettodb/views/query_remote_syn_lemma: query_opt: %s\"", "%", "query_opt", ")", "qdict", "=", "{", "}", "qdict", "[", "\"action\"", "]", "=", "\"queryList\"", "qdict", "[", "\"word\"", "]", "=", "lemma", ".", "encode", "(", "'utf-8'", ")", "query", "=", "urllib", ".", "urlencode", "(", "qdict", ")", "db_url_tuple", "=", "(", "self", ".", "scheme", ",", "self", ".", "host", "+", "':'", "+", "str", "(", "self", ".", "port", ")", ",", "path", ",", "params", ",", "query", ",", "fragment", ")", "db_url", "=", "urlparse", ".", "urlunparse", "(", "db_url_tuple", ")", "if", "self", ".", "debug", ":", "printf", "(", "\"db_url: %s\"", "%", "db_url", ")", "resp", ",", "content", "=", "http", ".", "request", "(", "db_url", ",", "\"GET\"", ")", "if", "self", ".", "debug", ":", "printf", "(", "\"resp:\\n%s\"", "%", "resp", ")", "printf", "(", "\"content:\\n%s\"", "%", "content", ")", "# printf( \"content is of type: %s\" % type( content ) )", "dict_list", "=", "[", "]", "dict_list", "=", "eval", "(", "content", ")", "# string to list", "synsets", "=", "[", "]", "items", "=", "len", "(", "dict_list", ")", "if", "self", ".", "debug", ":", "printf", "(", "\"items: %d\"", "%", "items", ")", "# syn dict: like lu dict, but without pos: part-of-speech", "for", "dict", "in", "dict_list", ":", "if", "self", ".", "debug", ":", "printf", "(", "dict", ")", "seq_nr", "=", "dict", "[", "\"seq_nr\"", "]", "# sense number", "value", "=", "dict", "[", "\"value\"", "]", "# lexical unit identifier", "form", "=", "dict", "[", "\"form\"", "]", "# lemma", "label", "=", "dict", "[", "\"label\"", "]", "# label to be shown", "if", "self", ".", "debug", ":", "printf", "(", "\"seq_nr: %s\"", "%", "seq_nr", ")", "printf", "(", "\"value: %s\"", "%", "value", ")", "printf", "(", "\"form: %s\"", "%", "form", ")", "printf", "(", "\"label: %s\"", "%", "label", ")", "if", "value", "!=", "\"\"", ":", "synsets", ".", "append", "(", "value", ")", "return", "synsets" ]
30.861538
20.661538
def get(self, ring, angle): """Get RGB color tuple of color at index pixel""" pixel = self.angleToPixel(angle, ring) return self._get_base(pixel)
[ "def", "get", "(", "self", ",", "ring", ",", "angle", ")", ":", "pixel", "=", "self", ".", "angleToPixel", "(", "angle", ",", "ring", ")", "return", "self", ".", "_get_base", "(", "pixel", ")" ]
41.5
5.75
def _get_function_inputs(f, src_kwargs): """Filters inputs to be compatible with function `f`'s signature. Args: f: Function according to whose input signature we filter arguments. src_kwargs: Keyword arguments to filter according to `f`. Returns: kwargs: Dict of key-value pairs in `src_kwargs` which exist in `f`'s signature. """ if hasattr(f, "_func"): # functions returned by tf.make_template f = f._func # pylint: disable=protected-access try: # getargspec was deprecated in Python 3.6 argspec = inspect.getfullargspec(f) except AttributeError: argspec = inspect.getargspec(f) fkwargs = {k: v for k, v in six.iteritems(src_kwargs) if k in argspec.args} return fkwargs
[ "def", "_get_function_inputs", "(", "f", ",", "src_kwargs", ")", ":", "if", "hasattr", "(", "f", ",", "\"_func\"", ")", ":", "# functions returned by tf.make_template", "f", "=", "f", ".", "_func", "# pylint: disable=protected-access", "try", ":", "# getargspec was deprecated in Python 3.6", "argspec", "=", "inspect", ".", "getfullargspec", "(", "f", ")", "except", "AttributeError", ":", "argspec", "=", "inspect", ".", "getargspec", "(", "f", ")", "fkwargs", "=", "{", "k", ":", "v", "for", "k", ",", "v", "in", "six", ".", "iteritems", "(", "src_kwargs", ")", "if", "k", "in", "argspec", ".", "args", "}", "return", "fkwargs" ]
33.666667
21.952381
def exactly_n(l, n=1): ''' Tests that exactly N items in an iterable are "truthy" (neither None, False, nor 0). ''' i = iter(l) return all(any(i) for j in range(n)) and not any(i)
[ "def", "exactly_n", "(", "l", ",", "n", "=", "1", ")", ":", "i", "=", "iter", "(", "l", ")", "return", "all", "(", "any", "(", "i", ")", "for", "j", "in", "range", "(", "n", ")", ")", "and", "not", "any", "(", "i", ")" ]
28.142857
25.571429
def check_directory(placeholders): """ Find out, and create if needed, the directory in which the feed will be downloaded """ feed = placeholders.feed args = feed.args placeholders.directory = "This very directory" # wink, wink placeholders.fullpath = os.path.join( placeholders.directory, placeholders.filename) try: if args["downloaddirectory"]: ensure_dir(args["downloaddirectory"]) placeholders.directory = args["downloaddirectory"] except KeyError: pass download_path = os.path.expanduser( feed.retrieve_config("Download Directory", "~/Podcasts")) subdirectory = feed.retrieve_config( "Create subdirectory", "no") if "no" in subdirectory: placeholders.directory = download_path elif "yes" in subdirectory: subdnametemplate = feed.retrieve_config( "subdirectory_name", "{podcasttitle}") subdname = substitute_placeholders( subdnametemplate, placeholders) placeholders.directory = os.path.join(download_path, subdname) ensure_dir(placeholders.directory) placeholders.fullpath = os.path.join( placeholders.directory, placeholders.filename) return placeholders
[ "def", "check_directory", "(", "placeholders", ")", ":", "feed", "=", "placeholders", ".", "feed", "args", "=", "feed", ".", "args", "placeholders", ".", "directory", "=", "\"This very directory\"", "# wink, wink", "placeholders", ".", "fullpath", "=", "os", ".", "path", ".", "join", "(", "placeholders", ".", "directory", ",", "placeholders", ".", "filename", ")", "try", ":", "if", "args", "[", "\"downloaddirectory\"", "]", ":", "ensure_dir", "(", "args", "[", "\"downloaddirectory\"", "]", ")", "placeholders", ".", "directory", "=", "args", "[", "\"downloaddirectory\"", "]", "except", "KeyError", ":", "pass", "download_path", "=", "os", ".", "path", ".", "expanduser", "(", "feed", ".", "retrieve_config", "(", "\"Download Directory\"", ",", "\"~/Podcasts\"", ")", ")", "subdirectory", "=", "feed", ".", "retrieve_config", "(", "\"Create subdirectory\"", ",", "\"no\"", ")", "if", "\"no\"", "in", "subdirectory", ":", "placeholders", ".", "directory", "=", "download_path", "elif", "\"yes\"", "in", "subdirectory", ":", "subdnametemplate", "=", "feed", ".", "retrieve_config", "(", "\"subdirectory_name\"", ",", "\"{podcasttitle}\"", ")", "subdname", "=", "substitute_placeholders", "(", "subdnametemplate", ",", "placeholders", ")", "placeholders", ".", "directory", "=", "os", ".", "path", ".", "join", "(", "download_path", ",", "subdname", ")", "ensure_dir", "(", "placeholders", ".", "directory", ")", "placeholders", ".", "fullpath", "=", "os", ".", "path", ".", "join", "(", "placeholders", ".", "directory", ",", "placeholders", ".", "filename", ")", "return", "placeholders" ]
38.34375
11.09375
def _auth(self, load): ''' Authenticate the client, use the sent public key to encrypt the AES key which was generated at start up. This method fires an event over the master event manager. The event is tagged "auth" and returns a dict with information about the auth event # Verify that the key we are receiving matches the stored key # Store the key if it is not there # Make an RSA key with the pub key # Encrypt the AES key as an encrypted salt.payload # Package the return and return it ''' if not salt.utils.verify.valid_id(self.opts, load['id']): log.info('Authentication request from invalid id %s', load['id']) return {'enc': 'clear', 'load': {'ret': False}} log.info('Authentication request from %s', load['id']) # 0 is default which should be 'unlimited' if self.opts['max_minions'] > 0: # use the ConCache if enabled, else use the minion utils if self.cache_cli: minions = self.cache_cli.get_cached() else: minions = self.ckminions.connected_ids() if len(minions) > 1000: log.info('With large numbers of minions it is advised ' 'to enable the ConCache with \'con_cache: True\' ' 'in the masters configuration file.') if not len(minions) <= self.opts['max_minions']: # we reject new minions, minions that are already # connected must be allowed for the mine, highstate, etc. if load['id'] not in minions: msg = ('Too many minions connected (max_minions={0}). ' 'Rejecting connection from id ' '{1}'.format(self.opts['max_minions'], load['id'])) log.info(msg) eload = {'result': False, 'act': 'full', 'id': load['id'], 'pub': load['pub']} if self.opts.get('auth_events') is True: self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth')) return {'enc': 'clear', 'load': {'ret': 'full'}} # Check if key is configured to be auto-rejected/signed auto_reject = self.auto_key.check_autoreject(load['id']) auto_sign = self.auto_key.check_autosign(load['id'], load.get(u'autosign_grains', None)) pubfn = os.path.join(self.opts['pki_dir'], 'minions', load['id']) pubfn_pend = os.path.join(self.opts['pki_dir'], 'minions_pre', load['id']) pubfn_rejected = os.path.join(self.opts['pki_dir'], 'minions_rejected', load['id']) pubfn_denied = os.path.join(self.opts['pki_dir'], 'minions_denied', load['id']) if self.opts['open_mode']: # open mode is turned on, nuts to checks and overwrite whatever # is there pass elif os.path.isfile(pubfn_rejected): # The key has been rejected, don't place it in pending log.info('Public key rejected for %s. Key is present in ' 'rejection key dir.', load['id']) eload = {'result': False, 'id': load['id'], 'pub': load['pub']} if self.opts.get('auth_events') is True: self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth')) return {'enc': 'clear', 'load': {'ret': False}} elif os.path.isfile(pubfn): # The key has been accepted, check it with salt.utils.files.fopen(pubfn, 'r') as pubfn_handle: if pubfn_handle.read().strip() != load['pub'].strip(): log.error( 'Authentication attempt from %s failed, the public ' 'keys did not match. This may be an attempt to compromise ' 'the Salt cluster.', load['id'] ) # put denied minion key into minions_denied with salt.utils.files.fopen(pubfn_denied, 'w+') as fp_: fp_.write(load['pub']) eload = {'result': False, 'id': load['id'], 'act': 'denied', 'pub': load['pub']} if self.opts.get('auth_events') is True: self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth')) return {'enc': 'clear', 'load': {'ret': False}} elif not os.path.isfile(pubfn_pend): # The key has not been accepted, this is a new minion if os.path.isdir(pubfn_pend): # The key path is a directory, error out log.info('New public key %s is a directory', load['id']) eload = {'result': False, 'id': load['id'], 'pub': load['pub']} if self.opts.get('auth_events') is True: self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth')) return {'enc': 'clear', 'load': {'ret': False}} if auto_reject: key_path = pubfn_rejected log.info('New public key for %s rejected via autoreject_file', load['id']) key_act = 'reject' key_result = False elif not auto_sign: key_path = pubfn_pend log.info('New public key for %s placed in pending', load['id']) key_act = 'pend' key_result = True else: # The key is being automatically accepted, don't do anything # here and let the auto accept logic below handle it. key_path = None if key_path is not None: # Write the key to the appropriate location with salt.utils.files.fopen(key_path, 'w+') as fp_: fp_.write(load['pub']) ret = {'enc': 'clear', 'load': {'ret': key_result}} eload = {'result': key_result, 'act': key_act, 'id': load['id'], 'pub': load['pub']} if self.opts.get('auth_events') is True: self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth')) return ret elif os.path.isfile(pubfn_pend): # This key is in the pending dir and is awaiting acceptance if auto_reject: # We don't care if the keys match, this minion is being # auto-rejected. Move the key file from the pending dir to the # rejected dir. try: shutil.move(pubfn_pend, pubfn_rejected) except (IOError, OSError): pass log.info('Pending public key for %s rejected via ' 'autoreject_file', load['id']) ret = {'enc': 'clear', 'load': {'ret': False}} eload = {'result': False, 'act': 'reject', 'id': load['id'], 'pub': load['pub']} if self.opts.get('auth_events') is True: self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth')) return ret elif not auto_sign: # This key is in the pending dir and is not being auto-signed. # Check if the keys are the same and error out if this is the # case. Otherwise log the fact that the minion is still # pending. with salt.utils.files.fopen(pubfn_pend, 'r') as pubfn_handle: if pubfn_handle.read() != load['pub']: log.error( 'Authentication attempt from %s failed, the public ' 'key in pending did not match. This may be an ' 'attempt to compromise the Salt cluster.', load['id'] ) # put denied minion key into minions_denied with salt.utils.files.fopen(pubfn_denied, 'w+') as fp_: fp_.write(load['pub']) eload = {'result': False, 'id': load['id'], 'act': 'denied', 'pub': load['pub']} if self.opts.get('auth_events') is True: self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth')) return {'enc': 'clear', 'load': {'ret': False}} else: log.info( 'Authentication failed from host %s, the key is in ' 'pending and needs to be accepted with salt-key ' '-a %s', load['id'], load['id'] ) eload = {'result': True, 'act': 'pend', 'id': load['id'], 'pub': load['pub']} if self.opts.get('auth_events') is True: self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth')) return {'enc': 'clear', 'load': {'ret': True}} else: # This key is in pending and has been configured to be # auto-signed. Check to see if it is the same key, and if # so, pass on doing anything here, and let it get automatically # accepted below. with salt.utils.files.fopen(pubfn_pend, 'r') as pubfn_handle: if pubfn_handle.read() != load['pub']: log.error( 'Authentication attempt from %s failed, the public ' 'keys in pending did not match. This may be an ' 'attempt to compromise the Salt cluster.', load['id'] ) # put denied minion key into minions_denied with salt.utils.files.fopen(pubfn_denied, 'w+') as fp_: fp_.write(load['pub']) eload = {'result': False, 'id': load['id'], 'pub': load['pub']} if self.opts.get('auth_events') is True: self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth')) return {'enc': 'clear', 'load': {'ret': False}} else: os.remove(pubfn_pend) else: # Something happened that I have not accounted for, FAIL! log.warning('Unaccounted for authentication failure') eload = {'result': False, 'id': load['id'], 'pub': load['pub']} if self.opts.get('auth_events') is True: self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth')) return {'enc': 'clear', 'load': {'ret': False}} log.info('Authentication accepted from %s', load['id']) # only write to disk if you are adding the file, and in open mode, # which implies we accept any key from a minion. if not os.path.isfile(pubfn) and not self.opts['open_mode']: with salt.utils.files.fopen(pubfn, 'w+') as fp_: fp_.write(load['pub']) elif self.opts['open_mode']: disk_key = '' if os.path.isfile(pubfn): with salt.utils.files.fopen(pubfn, 'r') as fp_: disk_key = fp_.read() if load['pub'] and load['pub'] != disk_key: log.debug('Host key change detected in open mode.') with salt.utils.files.fopen(pubfn, 'w+') as fp_: fp_.write(load['pub']) elif not load['pub']: log.error('Public key is empty: %s', load['id']) return {'enc': 'clear', 'load': {'ret': False}} pub = None # the con_cache is enabled, send the minion id to the cache if self.cache_cli: self.cache_cli.put_cache([load['id']]) # The key payload may sometimes be corrupt when using auto-accept # and an empty request comes in try: pub = salt.crypt.get_rsa_pub_key(pubfn) except (ValueError, IndexError, TypeError) as err: log.error('Corrupt public key "%s": %s', pubfn, err) return {'enc': 'clear', 'load': {'ret': False}} if not HAS_M2: cipher = PKCS1_OAEP.new(pub) ret = {'enc': 'pub', 'pub_key': self.master_key.get_pub_str(), 'publish_port': self.opts['publish_port']} # sign the master's pubkey (if enabled) before it is # sent to the minion that was just authenticated if self.opts['master_sign_pubkey']: # append the pre-computed signature to the auth-reply if self.master_key.pubkey_signature(): log.debug('Adding pubkey signature to auth-reply') log.debug(self.master_key.pubkey_signature()) ret.update({'pub_sig': self.master_key.pubkey_signature()}) else: # the master has its own signing-keypair, compute the master.pub's # signature and append that to the auth-reply # get the key_pass for the signing key key_pass = salt.utils.sdb.sdb_get(self.opts['signing_key_pass'], self.opts) log.debug("Signing master public key before sending") pub_sign = salt.crypt.sign_message(self.master_key.get_sign_paths()[1], ret['pub_key'], key_pass) ret.update({'pub_sig': binascii.b2a_base64(pub_sign)}) if not HAS_M2: mcipher = PKCS1_OAEP.new(self.master_key.key) if self.opts['auth_mode'] >= 2: if 'token' in load: try: if HAS_M2: mtoken = self.master_key.key.private_decrypt(load['token'], RSA.pkcs1_oaep_padding) else: mtoken = mcipher.decrypt(load['token']) aes = '{0}_|-{1}'.format(salt.master.SMaster.secrets['aes']['secret'].value, mtoken) except Exception: # Token failed to decrypt, send back the salty bacon to # support older minions pass else: aes = salt.master.SMaster.secrets['aes']['secret'].value if HAS_M2: ret['aes'] = pub.public_encrypt(aes, RSA.pkcs1_oaep_padding) else: ret['aes'] = cipher.encrypt(aes) else: if 'token' in load: try: if HAS_M2: mtoken = self.master_key.key.private_decrypt(load['token'], RSA.pkcs1_oaep_padding) ret['token'] = pub.public_encrypt(mtoken, RSA.pkcs1_oaep_padding) else: mtoken = mcipher.decrypt(load['token']) ret['token'] = cipher.encrypt(mtoken) except Exception: # Token failed to decrypt, send back the salty bacon to # support older minions pass aes = salt.master.SMaster.secrets['aes']['secret'].value if HAS_M2: ret['aes'] = pub.public_encrypt(aes, RSA.pkcs1_oaep_padding) else: ret['aes'] = cipher.encrypt(aes) # Be aggressive about the signature digest = salt.utils.stringutils.to_bytes(hashlib.sha256(aes).hexdigest()) ret['sig'] = salt.crypt.private_encrypt(self.master_key.key, digest) eload = {'result': True, 'act': 'accept', 'id': load['id'], 'pub': load['pub']} if self.opts.get('auth_events') is True: self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth')) return ret
[ "def", "_auth", "(", "self", ",", "load", ")", ":", "if", "not", "salt", ".", "utils", ".", "verify", ".", "valid_id", "(", "self", ".", "opts", ",", "load", "[", "'id'", "]", ")", ":", "log", ".", "info", "(", "'Authentication request from invalid id %s'", ",", "load", "[", "'id'", "]", ")", "return", "{", "'enc'", ":", "'clear'", ",", "'load'", ":", "{", "'ret'", ":", "False", "}", "}", "log", ".", "info", "(", "'Authentication request from %s'", ",", "load", "[", "'id'", "]", ")", "# 0 is default which should be 'unlimited'", "if", "self", ".", "opts", "[", "'max_minions'", "]", ">", "0", ":", "# use the ConCache if enabled, else use the minion utils", "if", "self", ".", "cache_cli", ":", "minions", "=", "self", ".", "cache_cli", ".", "get_cached", "(", ")", "else", ":", "minions", "=", "self", ".", "ckminions", ".", "connected_ids", "(", ")", "if", "len", "(", "minions", ")", ">", "1000", ":", "log", ".", "info", "(", "'With large numbers of minions it is advised '", "'to enable the ConCache with \\'con_cache: True\\' '", "'in the masters configuration file.'", ")", "if", "not", "len", "(", "minions", ")", "<=", "self", ".", "opts", "[", "'max_minions'", "]", ":", "# we reject new minions, minions that are already", "# connected must be allowed for the mine, highstate, etc.", "if", "load", "[", "'id'", "]", "not", "in", "minions", ":", "msg", "=", "(", "'Too many minions connected (max_minions={0}). '", "'Rejecting connection from id '", "'{1}'", ".", "format", "(", "self", ".", "opts", "[", "'max_minions'", "]", ",", "load", "[", "'id'", "]", ")", ")", "log", ".", "info", "(", "msg", ")", "eload", "=", "{", "'result'", ":", "False", ",", "'act'", ":", "'full'", ",", "'id'", ":", "load", "[", "'id'", "]", ",", "'pub'", ":", "load", "[", "'pub'", "]", "}", "if", "self", ".", "opts", ".", "get", "(", "'auth_events'", ")", "is", "True", ":", "self", ".", "event", ".", "fire_event", "(", "eload", ",", "salt", ".", "utils", ".", "event", ".", "tagify", "(", "prefix", "=", "'auth'", ")", ")", "return", "{", "'enc'", ":", "'clear'", ",", "'load'", ":", "{", "'ret'", ":", "'full'", "}", "}", "# Check if key is configured to be auto-rejected/signed", "auto_reject", "=", "self", ".", "auto_key", ".", "check_autoreject", "(", "load", "[", "'id'", "]", ")", "auto_sign", "=", "self", ".", "auto_key", ".", "check_autosign", "(", "load", "[", "'id'", "]", ",", "load", ".", "get", "(", "u'autosign_grains'", ",", "None", ")", ")", "pubfn", "=", "os", ".", "path", ".", "join", "(", "self", ".", "opts", "[", "'pki_dir'", "]", ",", "'minions'", ",", "load", "[", "'id'", "]", ")", "pubfn_pend", "=", "os", ".", "path", ".", "join", "(", "self", ".", "opts", "[", "'pki_dir'", "]", ",", "'minions_pre'", ",", "load", "[", "'id'", "]", ")", "pubfn_rejected", "=", "os", ".", "path", ".", "join", "(", "self", ".", "opts", "[", "'pki_dir'", "]", ",", "'minions_rejected'", ",", "load", "[", "'id'", "]", ")", "pubfn_denied", "=", "os", ".", "path", ".", "join", "(", "self", ".", "opts", "[", "'pki_dir'", "]", ",", "'minions_denied'", ",", "load", "[", "'id'", "]", ")", "if", "self", ".", "opts", "[", "'open_mode'", "]", ":", "# open mode is turned on, nuts to checks and overwrite whatever", "# is there", "pass", "elif", "os", ".", "path", ".", "isfile", "(", "pubfn_rejected", ")", ":", "# The key has been rejected, don't place it in pending", "log", ".", "info", "(", "'Public key rejected for %s. Key is present in '", "'rejection key dir.'", ",", "load", "[", "'id'", "]", ")", "eload", "=", "{", "'result'", ":", "False", ",", "'id'", ":", "load", "[", "'id'", "]", ",", "'pub'", ":", "load", "[", "'pub'", "]", "}", "if", "self", ".", "opts", ".", "get", "(", "'auth_events'", ")", "is", "True", ":", "self", ".", "event", ".", "fire_event", "(", "eload", ",", "salt", ".", "utils", ".", "event", ".", "tagify", "(", "prefix", "=", "'auth'", ")", ")", "return", "{", "'enc'", ":", "'clear'", ",", "'load'", ":", "{", "'ret'", ":", "False", "}", "}", "elif", "os", ".", "path", ".", "isfile", "(", "pubfn", ")", ":", "# The key has been accepted, check it", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "pubfn", ",", "'r'", ")", "as", "pubfn_handle", ":", "if", "pubfn_handle", ".", "read", "(", ")", ".", "strip", "(", ")", "!=", "load", "[", "'pub'", "]", ".", "strip", "(", ")", ":", "log", ".", "error", "(", "'Authentication attempt from %s failed, the public '", "'keys did not match. This may be an attempt to compromise '", "'the Salt cluster.'", ",", "load", "[", "'id'", "]", ")", "# put denied minion key into minions_denied", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "pubfn_denied", ",", "'w+'", ")", "as", "fp_", ":", "fp_", ".", "write", "(", "load", "[", "'pub'", "]", ")", "eload", "=", "{", "'result'", ":", "False", ",", "'id'", ":", "load", "[", "'id'", "]", ",", "'act'", ":", "'denied'", ",", "'pub'", ":", "load", "[", "'pub'", "]", "}", "if", "self", ".", "opts", ".", "get", "(", "'auth_events'", ")", "is", "True", ":", "self", ".", "event", ".", "fire_event", "(", "eload", ",", "salt", ".", "utils", ".", "event", ".", "tagify", "(", "prefix", "=", "'auth'", ")", ")", "return", "{", "'enc'", ":", "'clear'", ",", "'load'", ":", "{", "'ret'", ":", "False", "}", "}", "elif", "not", "os", ".", "path", ".", "isfile", "(", "pubfn_pend", ")", ":", "# The key has not been accepted, this is a new minion", "if", "os", ".", "path", ".", "isdir", "(", "pubfn_pend", ")", ":", "# The key path is a directory, error out", "log", ".", "info", "(", "'New public key %s is a directory'", ",", "load", "[", "'id'", "]", ")", "eload", "=", "{", "'result'", ":", "False", ",", "'id'", ":", "load", "[", "'id'", "]", ",", "'pub'", ":", "load", "[", "'pub'", "]", "}", "if", "self", ".", "opts", ".", "get", "(", "'auth_events'", ")", "is", "True", ":", "self", ".", "event", ".", "fire_event", "(", "eload", ",", "salt", ".", "utils", ".", "event", ".", "tagify", "(", "prefix", "=", "'auth'", ")", ")", "return", "{", "'enc'", ":", "'clear'", ",", "'load'", ":", "{", "'ret'", ":", "False", "}", "}", "if", "auto_reject", ":", "key_path", "=", "pubfn_rejected", "log", ".", "info", "(", "'New public key for %s rejected via autoreject_file'", ",", "load", "[", "'id'", "]", ")", "key_act", "=", "'reject'", "key_result", "=", "False", "elif", "not", "auto_sign", ":", "key_path", "=", "pubfn_pend", "log", ".", "info", "(", "'New public key for %s placed in pending'", ",", "load", "[", "'id'", "]", ")", "key_act", "=", "'pend'", "key_result", "=", "True", "else", ":", "# The key is being automatically accepted, don't do anything", "# here and let the auto accept logic below handle it.", "key_path", "=", "None", "if", "key_path", "is", "not", "None", ":", "# Write the key to the appropriate location", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "key_path", ",", "'w+'", ")", "as", "fp_", ":", "fp_", ".", "write", "(", "load", "[", "'pub'", "]", ")", "ret", "=", "{", "'enc'", ":", "'clear'", ",", "'load'", ":", "{", "'ret'", ":", "key_result", "}", "}", "eload", "=", "{", "'result'", ":", "key_result", ",", "'act'", ":", "key_act", ",", "'id'", ":", "load", "[", "'id'", "]", ",", "'pub'", ":", "load", "[", "'pub'", "]", "}", "if", "self", ".", "opts", ".", "get", "(", "'auth_events'", ")", "is", "True", ":", "self", ".", "event", ".", "fire_event", "(", "eload", ",", "salt", ".", "utils", ".", "event", ".", "tagify", "(", "prefix", "=", "'auth'", ")", ")", "return", "ret", "elif", "os", ".", "path", ".", "isfile", "(", "pubfn_pend", ")", ":", "# This key is in the pending dir and is awaiting acceptance", "if", "auto_reject", ":", "# We don't care if the keys match, this minion is being", "# auto-rejected. Move the key file from the pending dir to the", "# rejected dir.", "try", ":", "shutil", ".", "move", "(", "pubfn_pend", ",", "pubfn_rejected", ")", "except", "(", "IOError", ",", "OSError", ")", ":", "pass", "log", ".", "info", "(", "'Pending public key for %s rejected via '", "'autoreject_file'", ",", "load", "[", "'id'", "]", ")", "ret", "=", "{", "'enc'", ":", "'clear'", ",", "'load'", ":", "{", "'ret'", ":", "False", "}", "}", "eload", "=", "{", "'result'", ":", "False", ",", "'act'", ":", "'reject'", ",", "'id'", ":", "load", "[", "'id'", "]", ",", "'pub'", ":", "load", "[", "'pub'", "]", "}", "if", "self", ".", "opts", ".", "get", "(", "'auth_events'", ")", "is", "True", ":", "self", ".", "event", ".", "fire_event", "(", "eload", ",", "salt", ".", "utils", ".", "event", ".", "tagify", "(", "prefix", "=", "'auth'", ")", ")", "return", "ret", "elif", "not", "auto_sign", ":", "# This key is in the pending dir and is not being auto-signed.", "# Check if the keys are the same and error out if this is the", "# case. Otherwise log the fact that the minion is still", "# pending.", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "pubfn_pend", ",", "'r'", ")", "as", "pubfn_handle", ":", "if", "pubfn_handle", ".", "read", "(", ")", "!=", "load", "[", "'pub'", "]", ":", "log", ".", "error", "(", "'Authentication attempt from %s failed, the public '", "'key in pending did not match. This may be an '", "'attempt to compromise the Salt cluster.'", ",", "load", "[", "'id'", "]", ")", "# put denied minion key into minions_denied", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "pubfn_denied", ",", "'w+'", ")", "as", "fp_", ":", "fp_", ".", "write", "(", "load", "[", "'pub'", "]", ")", "eload", "=", "{", "'result'", ":", "False", ",", "'id'", ":", "load", "[", "'id'", "]", ",", "'act'", ":", "'denied'", ",", "'pub'", ":", "load", "[", "'pub'", "]", "}", "if", "self", ".", "opts", ".", "get", "(", "'auth_events'", ")", "is", "True", ":", "self", ".", "event", ".", "fire_event", "(", "eload", ",", "salt", ".", "utils", ".", "event", ".", "tagify", "(", "prefix", "=", "'auth'", ")", ")", "return", "{", "'enc'", ":", "'clear'", ",", "'load'", ":", "{", "'ret'", ":", "False", "}", "}", "else", ":", "log", ".", "info", "(", "'Authentication failed from host %s, the key is in '", "'pending and needs to be accepted with salt-key '", "'-a %s'", ",", "load", "[", "'id'", "]", ",", "load", "[", "'id'", "]", ")", "eload", "=", "{", "'result'", ":", "True", ",", "'act'", ":", "'pend'", ",", "'id'", ":", "load", "[", "'id'", "]", ",", "'pub'", ":", "load", "[", "'pub'", "]", "}", "if", "self", ".", "opts", ".", "get", "(", "'auth_events'", ")", "is", "True", ":", "self", ".", "event", ".", "fire_event", "(", "eload", ",", "salt", ".", "utils", ".", "event", ".", "tagify", "(", "prefix", "=", "'auth'", ")", ")", "return", "{", "'enc'", ":", "'clear'", ",", "'load'", ":", "{", "'ret'", ":", "True", "}", "}", "else", ":", "# This key is in pending and has been configured to be", "# auto-signed. Check to see if it is the same key, and if", "# so, pass on doing anything here, and let it get automatically", "# accepted below.", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "pubfn_pend", ",", "'r'", ")", "as", "pubfn_handle", ":", "if", "pubfn_handle", ".", "read", "(", ")", "!=", "load", "[", "'pub'", "]", ":", "log", ".", "error", "(", "'Authentication attempt from %s failed, the public '", "'keys in pending did not match. This may be an '", "'attempt to compromise the Salt cluster.'", ",", "load", "[", "'id'", "]", ")", "# put denied minion key into minions_denied", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "pubfn_denied", ",", "'w+'", ")", "as", "fp_", ":", "fp_", ".", "write", "(", "load", "[", "'pub'", "]", ")", "eload", "=", "{", "'result'", ":", "False", ",", "'id'", ":", "load", "[", "'id'", "]", ",", "'pub'", ":", "load", "[", "'pub'", "]", "}", "if", "self", ".", "opts", ".", "get", "(", "'auth_events'", ")", "is", "True", ":", "self", ".", "event", ".", "fire_event", "(", "eload", ",", "salt", ".", "utils", ".", "event", ".", "tagify", "(", "prefix", "=", "'auth'", ")", ")", "return", "{", "'enc'", ":", "'clear'", ",", "'load'", ":", "{", "'ret'", ":", "False", "}", "}", "else", ":", "os", ".", "remove", "(", "pubfn_pend", ")", "else", ":", "# Something happened that I have not accounted for, FAIL!", "log", ".", "warning", "(", "'Unaccounted for authentication failure'", ")", "eload", "=", "{", "'result'", ":", "False", ",", "'id'", ":", "load", "[", "'id'", "]", ",", "'pub'", ":", "load", "[", "'pub'", "]", "}", "if", "self", ".", "opts", ".", "get", "(", "'auth_events'", ")", "is", "True", ":", "self", ".", "event", ".", "fire_event", "(", "eload", ",", "salt", ".", "utils", ".", "event", ".", "tagify", "(", "prefix", "=", "'auth'", ")", ")", "return", "{", "'enc'", ":", "'clear'", ",", "'load'", ":", "{", "'ret'", ":", "False", "}", "}", "log", ".", "info", "(", "'Authentication accepted from %s'", ",", "load", "[", "'id'", "]", ")", "# only write to disk if you are adding the file, and in open mode,", "# which implies we accept any key from a minion.", "if", "not", "os", ".", "path", ".", "isfile", "(", "pubfn", ")", "and", "not", "self", ".", "opts", "[", "'open_mode'", "]", ":", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "pubfn", ",", "'w+'", ")", "as", "fp_", ":", "fp_", ".", "write", "(", "load", "[", "'pub'", "]", ")", "elif", "self", ".", "opts", "[", "'open_mode'", "]", ":", "disk_key", "=", "''", "if", "os", ".", "path", ".", "isfile", "(", "pubfn", ")", ":", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "pubfn", ",", "'r'", ")", "as", "fp_", ":", "disk_key", "=", "fp_", ".", "read", "(", ")", "if", "load", "[", "'pub'", "]", "and", "load", "[", "'pub'", "]", "!=", "disk_key", ":", "log", ".", "debug", "(", "'Host key change detected in open mode.'", ")", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "pubfn", ",", "'w+'", ")", "as", "fp_", ":", "fp_", ".", "write", "(", "load", "[", "'pub'", "]", ")", "elif", "not", "load", "[", "'pub'", "]", ":", "log", ".", "error", "(", "'Public key is empty: %s'", ",", "load", "[", "'id'", "]", ")", "return", "{", "'enc'", ":", "'clear'", ",", "'load'", ":", "{", "'ret'", ":", "False", "}", "}", "pub", "=", "None", "# the con_cache is enabled, send the minion id to the cache", "if", "self", ".", "cache_cli", ":", "self", ".", "cache_cli", ".", "put_cache", "(", "[", "load", "[", "'id'", "]", "]", ")", "# The key payload may sometimes be corrupt when using auto-accept", "# and an empty request comes in", "try", ":", "pub", "=", "salt", ".", "crypt", ".", "get_rsa_pub_key", "(", "pubfn", ")", "except", "(", "ValueError", ",", "IndexError", ",", "TypeError", ")", "as", "err", ":", "log", ".", "error", "(", "'Corrupt public key \"%s\": %s'", ",", "pubfn", ",", "err", ")", "return", "{", "'enc'", ":", "'clear'", ",", "'load'", ":", "{", "'ret'", ":", "False", "}", "}", "if", "not", "HAS_M2", ":", "cipher", "=", "PKCS1_OAEP", ".", "new", "(", "pub", ")", "ret", "=", "{", "'enc'", ":", "'pub'", ",", "'pub_key'", ":", "self", ".", "master_key", ".", "get_pub_str", "(", ")", ",", "'publish_port'", ":", "self", ".", "opts", "[", "'publish_port'", "]", "}", "# sign the master's pubkey (if enabled) before it is", "# sent to the minion that was just authenticated", "if", "self", ".", "opts", "[", "'master_sign_pubkey'", "]", ":", "# append the pre-computed signature to the auth-reply", "if", "self", ".", "master_key", ".", "pubkey_signature", "(", ")", ":", "log", ".", "debug", "(", "'Adding pubkey signature to auth-reply'", ")", "log", ".", "debug", "(", "self", ".", "master_key", ".", "pubkey_signature", "(", ")", ")", "ret", ".", "update", "(", "{", "'pub_sig'", ":", "self", ".", "master_key", ".", "pubkey_signature", "(", ")", "}", ")", "else", ":", "# the master has its own signing-keypair, compute the master.pub's", "# signature and append that to the auth-reply", "# get the key_pass for the signing key", "key_pass", "=", "salt", ".", "utils", ".", "sdb", ".", "sdb_get", "(", "self", ".", "opts", "[", "'signing_key_pass'", "]", ",", "self", ".", "opts", ")", "log", ".", "debug", "(", "\"Signing master public key before sending\"", ")", "pub_sign", "=", "salt", ".", "crypt", ".", "sign_message", "(", "self", ".", "master_key", ".", "get_sign_paths", "(", ")", "[", "1", "]", ",", "ret", "[", "'pub_key'", "]", ",", "key_pass", ")", "ret", ".", "update", "(", "{", "'pub_sig'", ":", "binascii", ".", "b2a_base64", "(", "pub_sign", ")", "}", ")", "if", "not", "HAS_M2", ":", "mcipher", "=", "PKCS1_OAEP", ".", "new", "(", "self", ".", "master_key", ".", "key", ")", "if", "self", ".", "opts", "[", "'auth_mode'", "]", ">=", "2", ":", "if", "'token'", "in", "load", ":", "try", ":", "if", "HAS_M2", ":", "mtoken", "=", "self", ".", "master_key", ".", "key", ".", "private_decrypt", "(", "load", "[", "'token'", "]", ",", "RSA", ".", "pkcs1_oaep_padding", ")", "else", ":", "mtoken", "=", "mcipher", ".", "decrypt", "(", "load", "[", "'token'", "]", ")", "aes", "=", "'{0}_|-{1}'", ".", "format", "(", "salt", ".", "master", ".", "SMaster", ".", "secrets", "[", "'aes'", "]", "[", "'secret'", "]", ".", "value", ",", "mtoken", ")", "except", "Exception", ":", "# Token failed to decrypt, send back the salty bacon to", "# support older minions", "pass", "else", ":", "aes", "=", "salt", ".", "master", ".", "SMaster", ".", "secrets", "[", "'aes'", "]", "[", "'secret'", "]", ".", "value", "if", "HAS_M2", ":", "ret", "[", "'aes'", "]", "=", "pub", ".", "public_encrypt", "(", "aes", ",", "RSA", ".", "pkcs1_oaep_padding", ")", "else", ":", "ret", "[", "'aes'", "]", "=", "cipher", ".", "encrypt", "(", "aes", ")", "else", ":", "if", "'token'", "in", "load", ":", "try", ":", "if", "HAS_M2", ":", "mtoken", "=", "self", ".", "master_key", ".", "key", ".", "private_decrypt", "(", "load", "[", "'token'", "]", ",", "RSA", ".", "pkcs1_oaep_padding", ")", "ret", "[", "'token'", "]", "=", "pub", ".", "public_encrypt", "(", "mtoken", ",", "RSA", ".", "pkcs1_oaep_padding", ")", "else", ":", "mtoken", "=", "mcipher", ".", "decrypt", "(", "load", "[", "'token'", "]", ")", "ret", "[", "'token'", "]", "=", "cipher", ".", "encrypt", "(", "mtoken", ")", "except", "Exception", ":", "# Token failed to decrypt, send back the salty bacon to", "# support older minions", "pass", "aes", "=", "salt", ".", "master", ".", "SMaster", ".", "secrets", "[", "'aes'", "]", "[", "'secret'", "]", ".", "value", "if", "HAS_M2", ":", "ret", "[", "'aes'", "]", "=", "pub", ".", "public_encrypt", "(", "aes", ",", "RSA", ".", "pkcs1_oaep_padding", ")", "else", ":", "ret", "[", "'aes'", "]", "=", "cipher", ".", "encrypt", "(", "aes", ")", "# Be aggressive about the signature", "digest", "=", "salt", ".", "utils", ".", "stringutils", ".", "to_bytes", "(", "hashlib", ".", "sha256", "(", "aes", ")", ".", "hexdigest", "(", ")", ")", "ret", "[", "'sig'", "]", "=", "salt", ".", "crypt", ".", "private_encrypt", "(", "self", ".", "master_key", ".", "key", ",", "digest", ")", "eload", "=", "{", "'result'", ":", "True", ",", "'act'", ":", "'accept'", ",", "'id'", ":", "load", "[", "'id'", "]", ",", "'pub'", ":", "load", "[", "'pub'", "]", "}", "if", "self", ".", "opts", ".", "get", "(", "'auth_events'", ")", "is", "True", ":", "self", ".", "event", ".", "fire_event", "(", "eload", ",", "salt", ".", "utils", ".", "event", ".", "tagify", "(", "prefix", "=", "'auth'", ")", ")", "return", "ret" ]
48.016807
20.072829
def disable_alarm_actions(self, alarm_names): """ Disables actions for the specified alarms. :type alarms: list :param alarms: List of alarm names. """ params = {} self.build_list_params(params, alarm_names, 'AlarmNames.member.%s') return self.get_status('DisableAlarmActions', params)
[ "def", "disable_alarm_actions", "(", "self", ",", "alarm_names", ")", ":", "params", "=", "{", "}", "self", ".", "build_list_params", "(", "params", ",", "alarm_names", ",", "'AlarmNames.member.%s'", ")", "return", "self", ".", "get_status", "(", "'DisableAlarmActions'", ",", "params", ")" ]
34.1
14.9
def config(data_folder=settings.data_folder, logs_folder=settings.logs_folder, imgs_folder=settings.imgs_folder, cache_folder=settings.cache_folder, use_cache=settings.use_cache, log_file=settings.log_file, log_console=settings.log_console, log_level=settings.log_level, log_name=settings.log_name, log_filename=settings.log_filename, useful_tags_node=settings.useful_tags_node, useful_tags_path=settings.useful_tags_path, osm_xml_node_attrs=settings.osm_xml_node_attrs, osm_xml_node_tags=settings.osm_xml_node_tags, osm_xml_way_attrs=settings.osm_xml_way_attrs, osm_xml_way_tags=settings.osm_xml_way_tags, default_access=settings.default_access, default_crs=settings.default_crs, default_user_agent=settings.default_user_agent, default_referer=settings.default_referer, default_accept_language=settings.default_accept_language): """ Configure osmnx by setting the default global vars to desired values. Parameters --------- data_folder : string where to save and load data files logs_folder : string where to write the log files imgs_folder : string where to save figures cache_folder : string where to save the http response cache use_cache : bool if True, use a local cache to save/retrieve http responses instead of calling API repetitively for the same request URL log_file : bool if true, save log output to a log file in logs_folder log_console : bool if true, print log output to the console log_level : int one of the logger.level constants log_name : string name of the logger useful_tags_node : list a list of useful OSM tags to attempt to save from node elements useful_tags_path : list a list of useful OSM tags to attempt to save from path elements default_access : string default filter for OSM "access" key default_crs : string default CRS to set when creating graphs default_user_agent : string HTTP header user-agent default_referer : string HTTP header referer default_accept_language : string HTTP header accept-language Returns ------- None """ # set each global variable to the passed-in parameter value settings.use_cache = use_cache settings.cache_folder = cache_folder settings.data_folder = data_folder settings.imgs_folder = imgs_folder settings.logs_folder = logs_folder settings.log_console = log_console settings.log_file = log_file settings.log_level = log_level settings.log_name = log_name settings.log_filename = log_filename settings.useful_tags_node = useful_tags_node settings.useful_tags_path = useful_tags_path settings.useful_tags_node = list(set( useful_tags_node + osm_xml_node_attrs + osm_xml_node_tags)) settings.useful_tags_path = list(set( useful_tags_path + osm_xml_way_attrs + osm_xml_way_tags)) settings.osm_xml_node_attrs = osm_xml_node_attrs settings.osm_xml_node_tags = osm_xml_node_tags settings.osm_xml_way_attrs = osm_xml_way_attrs settings.osm_xml_way_tags = osm_xml_way_tags settings.default_access = default_access settings.default_crs = default_crs settings.default_user_agent = default_user_agent settings.default_referer = default_referer settings.default_accept_language = default_accept_language # if logging is turned on, log that we are configured if settings.log_file or settings.log_console: log('Configured osmnx')
[ "def", "config", "(", "data_folder", "=", "settings", ".", "data_folder", ",", "logs_folder", "=", "settings", ".", "logs_folder", ",", "imgs_folder", "=", "settings", ".", "imgs_folder", ",", "cache_folder", "=", "settings", ".", "cache_folder", ",", "use_cache", "=", "settings", ".", "use_cache", ",", "log_file", "=", "settings", ".", "log_file", ",", "log_console", "=", "settings", ".", "log_console", ",", "log_level", "=", "settings", ".", "log_level", ",", "log_name", "=", "settings", ".", "log_name", ",", "log_filename", "=", "settings", ".", "log_filename", ",", "useful_tags_node", "=", "settings", ".", "useful_tags_node", ",", "useful_tags_path", "=", "settings", ".", "useful_tags_path", ",", "osm_xml_node_attrs", "=", "settings", ".", "osm_xml_node_attrs", ",", "osm_xml_node_tags", "=", "settings", ".", "osm_xml_node_tags", ",", "osm_xml_way_attrs", "=", "settings", ".", "osm_xml_way_attrs", ",", "osm_xml_way_tags", "=", "settings", ".", "osm_xml_way_tags", ",", "default_access", "=", "settings", ".", "default_access", ",", "default_crs", "=", "settings", ".", "default_crs", ",", "default_user_agent", "=", "settings", ".", "default_user_agent", ",", "default_referer", "=", "settings", ".", "default_referer", ",", "default_accept_language", "=", "settings", ".", "default_accept_language", ")", ":", "# set each global variable to the passed-in parameter value", "settings", ".", "use_cache", "=", "use_cache", "settings", ".", "cache_folder", "=", "cache_folder", "settings", ".", "data_folder", "=", "data_folder", "settings", ".", "imgs_folder", "=", "imgs_folder", "settings", ".", "logs_folder", "=", "logs_folder", "settings", ".", "log_console", "=", "log_console", "settings", ".", "log_file", "=", "log_file", "settings", ".", "log_level", "=", "log_level", "settings", ".", "log_name", "=", "log_name", "settings", ".", "log_filename", "=", "log_filename", "settings", ".", "useful_tags_node", "=", "useful_tags_node", "settings", ".", "useful_tags_path", "=", "useful_tags_path", "settings", ".", "useful_tags_node", "=", "list", "(", "set", "(", "useful_tags_node", "+", "osm_xml_node_attrs", "+", "osm_xml_node_tags", ")", ")", "settings", ".", "useful_tags_path", "=", "list", "(", "set", "(", "useful_tags_path", "+", "osm_xml_way_attrs", "+", "osm_xml_way_tags", ")", ")", "settings", ".", "osm_xml_node_attrs", "=", "osm_xml_node_attrs", "settings", ".", "osm_xml_node_tags", "=", "osm_xml_node_tags", "settings", ".", "osm_xml_way_attrs", "=", "osm_xml_way_attrs", "settings", ".", "osm_xml_way_tags", "=", "osm_xml_way_tags", "settings", ".", "default_access", "=", "default_access", "settings", ".", "default_crs", "=", "default_crs", "settings", ".", "default_user_agent", "=", "default_user_agent", "settings", ".", "default_referer", "=", "default_referer", "settings", ".", "default_accept_language", "=", "default_accept_language", "# if logging is turned on, log that we are configured", "if", "settings", ".", "log_file", "or", "settings", ".", "log_console", ":", "log", "(", "'Configured osmnx'", ")" ]
38.652632
13.094737
def get_col_row_tot_array_from_data_record_array(array): # TODO: max ToT '''Convert raw data array to column, row, and ToT array. Parameters ---------- array : numpy.array Raw data array. Returns ------- Tuple of arrays. ''' def get_col_row_tot_1_array_from_data_record_array(value): return np.right_shift(np.bitwise_and(value, 0x00FE0000), 17), np.right_shift(np.bitwise_and(value, 0x0001FF00), 8), np.right_shift(np.bitwise_and(value, 0x000000F0), 4) def get_col_row_tot_2_array_from_data_record_array(value): return np.right_shift(np.bitwise_and(value, 0x00FE0000), 17), np.add(np.right_shift(np.bitwise_and(value, 0x0001FF00), 8), 1), np.bitwise_and(value, 0x0000000F) col_row_tot_1_array = np.column_stack(get_col_row_tot_1_array_from_data_record_array(array)) col_row_tot_2_array = np.column_stack(get_col_row_tot_2_array_from_data_record_array(array)) # interweave array here col_row_tot_array = np.vstack((col_row_tot_1_array.T, col_row_tot_2_array.T)).reshape((3, -1), order='F').T # http://stackoverflow.com/questions/5347065/interweaving-two-numpy-arrays # remove ToT > 14 (late hit, no hit) from array, remove row > 336 in case we saw hit in row 336 (no double hit possible) try: col_row_tot_array_filtered = col_row_tot_array[col_row_tot_array[:, 2] < 14] # [np.logical_and(col_row_tot_array[:,2]<14, col_row_tot_array[:,1]<=336)] except IndexError: # logging.warning('Array is empty') return np.array([], dtype=np.dtype('>u4')), np.array([], dtype=np.dtype('>u4')), np.array([], dtype=np.dtype('>u4')) return col_row_tot_array_filtered[:, 0], col_row_tot_array_filtered[:, 1], col_row_tot_array_filtered[:, 2]
[ "def", "get_col_row_tot_array_from_data_record_array", "(", "array", ")", ":", "# TODO: max ToT\r", "def", "get_col_row_tot_1_array_from_data_record_array", "(", "value", ")", ":", "return", "np", ".", "right_shift", "(", "np", ".", "bitwise_and", "(", "value", ",", "0x00FE0000", ")", ",", "17", ")", ",", "np", ".", "right_shift", "(", "np", ".", "bitwise_and", "(", "value", ",", "0x0001FF00", ")", ",", "8", ")", ",", "np", ".", "right_shift", "(", "np", ".", "bitwise_and", "(", "value", ",", "0x000000F0", ")", ",", "4", ")", "def", "get_col_row_tot_2_array_from_data_record_array", "(", "value", ")", ":", "return", "np", ".", "right_shift", "(", "np", ".", "bitwise_and", "(", "value", ",", "0x00FE0000", ")", ",", "17", ")", ",", "np", ".", "add", "(", "np", ".", "right_shift", "(", "np", ".", "bitwise_and", "(", "value", ",", "0x0001FF00", ")", ",", "8", ")", ",", "1", ")", ",", "np", ".", "bitwise_and", "(", "value", ",", "0x0000000F", ")", "col_row_tot_1_array", "=", "np", ".", "column_stack", "(", "get_col_row_tot_1_array_from_data_record_array", "(", "array", ")", ")", "col_row_tot_2_array", "=", "np", ".", "column_stack", "(", "get_col_row_tot_2_array_from_data_record_array", "(", "array", ")", ")", "# interweave array here\r", "col_row_tot_array", "=", "np", ".", "vstack", "(", "(", "col_row_tot_1_array", ".", "T", ",", "col_row_tot_2_array", ".", "T", ")", ")", ".", "reshape", "(", "(", "3", ",", "-", "1", ")", ",", "order", "=", "'F'", ")", ".", "T", "# http://stackoverflow.com/questions/5347065/interweaving-two-numpy-arrays\r", "# remove ToT > 14 (late hit, no hit) from array, remove row > 336 in case we saw hit in row 336 (no double hit possible)\r", "try", ":", "col_row_tot_array_filtered", "=", "col_row_tot_array", "[", "col_row_tot_array", "[", ":", ",", "2", "]", "<", "14", "]", "# [np.logical_and(col_row_tot_array[:,2]<14, col_row_tot_array[:,1]<=336)]\r", "except", "IndexError", ":", "# logging.warning('Array is empty')\r", "return", "np", ".", "array", "(", "[", "]", ",", "dtype", "=", "np", ".", "dtype", "(", "'>u4'", ")", ")", ",", "np", ".", "array", "(", "[", "]", ",", "dtype", "=", "np", ".", "dtype", "(", "'>u4'", ")", ")", ",", "np", ".", "array", "(", "[", "]", ",", "dtype", "=", "np", ".", "dtype", "(", "'>u4'", ")", ")", "return", "col_row_tot_array_filtered", "[", ":", ",", "0", "]", ",", "col_row_tot_array_filtered", "[", ":", ",", "1", "]", ",", "col_row_tot_array_filtered", "[", ":", ",", "2", "]" ]
60.344828
48.275862
def enumerate_all(vars, e, bn): """Return the sum of those entries in P(vars | e{others}) consistent with e, where P is the joint distribution represented by bn, and e{others} means e restricted to bn's other variables (the ones other than vars). Parents must precede children in vars.""" if not vars: return 1.0 Y, rest = vars[0], vars[1:] Ynode = bn.variable_node(Y) if Y in e: return Ynode.p(e[Y], e) * enumerate_all(rest, e, bn) else: return sum(Ynode.p(y, e) * enumerate_all(rest, extend(e, Y, y), bn) for y in bn.variable_values(Y))
[ "def", "enumerate_all", "(", "vars", ",", "e", ",", "bn", ")", ":", "if", "not", "vars", ":", "return", "1.0", "Y", ",", "rest", "=", "vars", "[", "0", "]", ",", "vars", "[", "1", ":", "]", "Ynode", "=", "bn", ".", "variable_node", "(", "Y", ")", "if", "Y", "in", "e", ":", "return", "Ynode", ".", "p", "(", "e", "[", "Y", "]", ",", "e", ")", "*", "enumerate_all", "(", "rest", ",", "e", ",", "bn", ")", "else", ":", "return", "sum", "(", "Ynode", ".", "p", "(", "y", ",", "e", ")", "*", "enumerate_all", "(", "rest", ",", "extend", "(", "e", ",", "Y", ",", "y", ")", ",", "bn", ")", "for", "y", "in", "bn", ".", "variable_values", "(", "Y", ")", ")" ]
43.142857
17.857143
def transpose(self, trans, scale="C"): """ Transpose the chord :param int trans: Transpose key :param str scale: key scale :return: """ if not isinstance(trans, int): raise TypeError("Expected integers, not {}".format(type(trans))) self._root = transpose_note(self._root, trans, scale) if self._on: self._on = transpose_note(self._on, trans, scale) self._reconfigure_chord()
[ "def", "transpose", "(", "self", ",", "trans", ",", "scale", "=", "\"C\"", ")", ":", "if", "not", "isinstance", "(", "trans", ",", "int", ")", ":", "raise", "TypeError", "(", "\"Expected integers, not {}\"", ".", "format", "(", "type", "(", "trans", ")", ")", ")", "self", ".", "_root", "=", "transpose_note", "(", "self", ".", "_root", ",", "trans", ",", "scale", ")", "if", "self", ".", "_on", ":", "self", ".", "_on", "=", "transpose_note", "(", "self", ".", "_on", ",", "trans", ",", "scale", ")", "self", ".", "_reconfigure_chord", "(", ")" ]
35.307692
13.769231
def get_board_information(self, query_params=None): ''' Get all information for this board. Returns a dictionary of values. ''' return self.fetch_json( uri_path='/boards/' + self.id, query_params=query_params or {} )
[ "def", "get_board_information", "(", "self", ",", "query_params", "=", "None", ")", ":", "return", "self", ".", "fetch_json", "(", "uri_path", "=", "'/boards/'", "+", "self", ".", "id", ",", "query_params", "=", "query_params", "or", "{", "}", ")" ]
34.125
18.625
def OnBackView(self, event): """Request to move backward in the history""" self.historyIndex -= 1 try: self.RestoreHistory(self.history[self.historyIndex]) except IndexError, err: self.SetStatusText(_('No further history available'))
[ "def", "OnBackView", "(", "self", ",", "event", ")", ":", "self", ".", "historyIndex", "-=", "1", "try", ":", "self", ".", "RestoreHistory", "(", "self", ".", "history", "[", "self", ".", "historyIndex", "]", ")", "except", "IndexError", ",", "err", ":", "self", ".", "SetStatusText", "(", "_", "(", "'No further history available'", ")", ")" ]
40.428571
15.428571
async def probe_message(self, _message, context): """Handle a probe message. See :meth:`AbstractDeviceAdapter.probe`. """ client_id = context.user_data await self.probe(client_id)
[ "async", "def", "probe_message", "(", "self", ",", "_message", ",", "context", ")", ":", "client_id", "=", "context", ".", "user_data", "await", "self", ".", "probe", "(", "client_id", ")" ]
26.75
13.125
def matchingTokens(self, tokenArray): '''Returns a list of tokens in the tokenArray that match this WordTemplate (the method self.matches(token) returns True). Returns an empty list if no matching tokens appear in the input list. Parameters ---------- tokenArray: list of word tokens; A list of word tokens along with their pyvabamorf's analyses; ''' assert isinstance(tokenArray, list), "tokenArray should be list "+str(tokenArray) matchingTok = [] for i in range( len(tokenArray) ): token = tokenArray[i] if self.matches(token): matchingTok.append( token ) return matchingTok
[ "def", "matchingTokens", "(", "self", ",", "tokenArray", ")", ":", "assert", "isinstance", "(", "tokenArray", ",", "list", ")", ",", "\"tokenArray should be list \"", "+", "str", "(", "tokenArray", ")", "matchingTok", "=", "[", "]", "for", "i", "in", "range", "(", "len", "(", "tokenArray", ")", ")", ":", "token", "=", "tokenArray", "[", "i", "]", "if", "self", ".", "matches", "(", "token", ")", ":", "matchingTok", ".", "append", "(", "token", ")", "return", "matchingTok" ]
43.470588
20.411765
def df_quantile(df, nb=100): """Returns the nb quantiles for datas in a dataframe """ quantiles = np.linspace(0, 1., nb) res = pd.DataFrame() for q in quantiles: res = res.append(df.quantile(q), ignore_index=True) return res
[ "def", "df_quantile", "(", "df", ",", "nb", "=", "100", ")", ":", "quantiles", "=", "np", ".", "linspace", "(", "0", ",", "1.", ",", "nb", ")", "res", "=", "pd", ".", "DataFrame", "(", ")", "for", "q", "in", "quantiles", ":", "res", "=", "res", ".", "append", "(", "df", ".", "quantile", "(", "q", ")", ",", "ignore_index", "=", "True", ")", "return", "res" ]
31.125
11.5
def parse_error(self, response): "Parse an error response" error_code = response.split(' ')[0] if error_code in self.EXCEPTION_CLASSES: response = response[len(error_code) + 1:] return self.EXCEPTION_CLASSES[error_code](response) return ResponseError(response)
[ "def", "parse_error", "(", "self", ",", "response", ")", ":", "error_code", "=", "response", ".", "split", "(", "' '", ")", "[", "0", "]", "if", "error_code", "in", "self", ".", "EXCEPTION_CLASSES", ":", "response", "=", "response", "[", "len", "(", "error_code", ")", "+", "1", ":", "]", "return", "self", ".", "EXCEPTION_CLASSES", "[", "error_code", "]", "(", "response", ")", "return", "ResponseError", "(", "response", ")" ]
44.285714
9.142857
def failure_data(self): '''Returns the failing step's data that happened during this solid's execution, if any''' for result in itertools.chain( self.input_expectations, self.output_expectations, self.transforms ): if result.event_type == DagsterEventType.STEP_FAILURE: return result.step_failure_data
[ "def", "failure_data", "(", "self", ")", ":", "for", "result", "in", "itertools", ".", "chain", "(", "self", ".", "input_expectations", ",", "self", ".", "output_expectations", ",", "self", ".", "transforms", ")", ":", "if", "result", ".", "event_type", "==", "DagsterEventType", ".", "STEP_FAILURE", ":", "return", "result", ".", "step_failure_data" ]
51.285714
25.285714
def strippen(function): "Decorator. Strip excess whitespace from return value." def wrapper(*args, **kwargs): return strip_strings(function(*args, **kwargs)) return wrapper
[ "def", "strippen", "(", "function", ")", ":", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "strip_strings", "(", "function", "(", "*", "args", ",", "*", "*", "kwargs", ")", ")", "return", "wrapper" ]
37.6
16
def set_file_license_comment(self, doc, text): """ Raises OrderError if no package or file defined. Raises SPDXValueError if text is not free form text. Raises CardinalityError if more than one per file. """ if self.has_package(doc) and self.has_file(doc): if not self.file_license_comment_set: self.file_license_comment_set = True if validations.validate_file_lics_comment(text): self.file(doc).license_comment = str_from_text(text) else: raise SPDXValueError('File::LicenseComment') else: raise CardinalityError('File::LicenseComment') else: raise OrderError('File::LicenseComment')
[ "def", "set_file_license_comment", "(", "self", ",", "doc", ",", "text", ")", ":", "if", "self", ".", "has_package", "(", "doc", ")", "and", "self", ".", "has_file", "(", "doc", ")", ":", "if", "not", "self", ".", "file_license_comment_set", ":", "self", ".", "file_license_comment_set", "=", "True", "if", "validations", ".", "validate_file_lics_comment", "(", "text", ")", ":", "self", ".", "file", "(", "doc", ")", ".", "license_comment", "=", "str_from_text", "(", "text", ")", "else", ":", "raise", "SPDXValueError", "(", "'File::LicenseComment'", ")", "else", ":", "raise", "CardinalityError", "(", "'File::LicenseComment'", ")", "else", ":", "raise", "OrderError", "(", "'File::LicenseComment'", ")" ]
44.941176
16.470588
def technical_words_from_shadow_contents(shadow_contents): """Get a set of technical words from :shadow_contents:. :shadow_contents: is an array of shadow contents, as returned by spellcheckable_and_shadow_contents. """ technical_words = set() for line in shadow_contents: # "Fix up" the shadow line by replacing zeros with spaces. line = "".join([(lambda c: " " if c == 0 else c)(c) for c in line]) for sym in _split_into_symbol_words(line): if len(sym) and re.compile(_VALID_SYMBOL_WORDS).match(sym): technical_words |= set([sym]) return technical_words
[ "def", "technical_words_from_shadow_contents", "(", "shadow_contents", ")", ":", "technical_words", "=", "set", "(", ")", "for", "line", "in", "shadow_contents", ":", "# \"Fix up\" the shadow line by replacing zeros with spaces.", "line", "=", "\"\"", ".", "join", "(", "[", "(", "lambda", "c", ":", "\" \"", "if", "c", "==", "0", "else", "c", ")", "(", "c", ")", "for", "c", "in", "line", "]", ")", "for", "sym", "in", "_split_into_symbol_words", "(", "line", ")", ":", "if", "len", "(", "sym", ")", "and", "re", ".", "compile", "(", "_VALID_SYMBOL_WORDS", ")", ".", "match", "(", "sym", ")", ":", "technical_words", "|=", "set", "(", "[", "sym", "]", ")", "return", "technical_words" ]
41.533333
17.933333
def is_empty(self): """ A group of modules is considered empty if it has no children or if all its children are empty. >>> from admin_tools.dashboard.modules import DashboardModule, LinkList >>> mod = Group() >>> mod.is_empty() True >>> mod.children.append(DashboardModule()) >>> mod.is_empty() True >>> mod.children.append(LinkList('links', children=[ ... {'title': 'example1', 'url': 'http://example.com'}, ... {'title': 'example2', 'url': 'http://example.com'}, ... ])) >>> mod.is_empty() False """ if super(Group, self).is_empty(): return True for child in self.children: if not child.is_empty(): return False return True
[ "def", "is_empty", "(", "self", ")", ":", "if", "super", "(", "Group", ",", "self", ")", ".", "is_empty", "(", ")", ":", "return", "True", "for", "child", "in", "self", ".", "children", ":", "if", "not", "child", ".", "is_empty", "(", ")", ":", "return", "False", "return", "True" ]
32.32
17.84