repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
sequencelengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
sequencelengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
RudolfCardinal/pythonlib
cardinal_pythonlib/sqlalchemy/dump.py
dump_ddl
def dump_ddl(metadata: MetaData, dialect_name: str, fileobj: TextIO = sys.stdout, checkfirst: bool = True) -> None: """ Sends schema-creating DDL from the metadata to the dump engine. This makes ``CREATE TABLE`` statements. Args: metadata: SQLAlchemy :class:`MetaData` dialect_name: string name of SQL dialect to generate DDL in fileobj: file-like object to send DDL to checkfirst: if ``True``, use ``CREATE TABLE IF NOT EXISTS`` or equivalent. """ # http://docs.sqlalchemy.org/en/rel_0_8/faq.html#how-can-i-get-the-create-table-drop-table-output-as-a-string # noqa # http://stackoverflow.com/questions/870925/how-to-generate-a-file-with-ddl-in-the-engines-sql-dialect-in-sqlalchemy # noqa # https://github.com/plq/scripts/blob/master/pg_dump.py # noinspection PyUnusedLocal def dump(querysql, *multiparams, **params): compsql = querysql.compile(dialect=engine.dialect) writeline_nl(fileobj, "{sql};".format(sql=compsql)) writeline_nl(fileobj, sql_comment("Schema (for dialect {}):".format(dialect_name))) engine = create_engine('{dialect}://'.format(dialect=dialect_name), strategy='mock', executor=dump) metadata.create_all(engine, checkfirst=checkfirst)
python
def dump_ddl(metadata: MetaData, dialect_name: str, fileobj: TextIO = sys.stdout, checkfirst: bool = True) -> None: """ Sends schema-creating DDL from the metadata to the dump engine. This makes ``CREATE TABLE`` statements. Args: metadata: SQLAlchemy :class:`MetaData` dialect_name: string name of SQL dialect to generate DDL in fileobj: file-like object to send DDL to checkfirst: if ``True``, use ``CREATE TABLE IF NOT EXISTS`` or equivalent. """ # http://docs.sqlalchemy.org/en/rel_0_8/faq.html#how-can-i-get-the-create-table-drop-table-output-as-a-string # noqa # http://stackoverflow.com/questions/870925/how-to-generate-a-file-with-ddl-in-the-engines-sql-dialect-in-sqlalchemy # noqa # https://github.com/plq/scripts/blob/master/pg_dump.py # noinspection PyUnusedLocal def dump(querysql, *multiparams, **params): compsql = querysql.compile(dialect=engine.dialect) writeline_nl(fileobj, "{sql};".format(sql=compsql)) writeline_nl(fileobj, sql_comment("Schema (for dialect {}):".format(dialect_name))) engine = create_engine('{dialect}://'.format(dialect=dialect_name), strategy='mock', executor=dump) metadata.create_all(engine, checkfirst=checkfirst)
[ "def", "dump_ddl", "(", "metadata", ":", "MetaData", ",", "dialect_name", ":", "str", ",", "fileobj", ":", "TextIO", "=", "sys", ".", "stdout", ",", "checkfirst", ":", "bool", "=", "True", ")", "->", "None", ":", "# http://docs.sqlalchemy.org/en/rel_0_8/faq.html#how-can-i-get-the-create-table-drop-table-output-as-a-string # noqa", "# http://stackoverflow.com/questions/870925/how-to-generate-a-file-with-ddl-in-the-engines-sql-dialect-in-sqlalchemy # noqa", "# https://github.com/plq/scripts/blob/master/pg_dump.py", "# noinspection PyUnusedLocal", "def", "dump", "(", "querysql", ",", "*", "multiparams", ",", "*", "*", "params", ")", ":", "compsql", "=", "querysql", ".", "compile", "(", "dialect", "=", "engine", ".", "dialect", ")", "writeline_nl", "(", "fileobj", ",", "\"{sql};\"", ".", "format", "(", "sql", "=", "compsql", ")", ")", "writeline_nl", "(", "fileobj", ",", "sql_comment", "(", "\"Schema (for dialect {}):\"", ".", "format", "(", "dialect_name", ")", ")", ")", "engine", "=", "create_engine", "(", "'{dialect}://'", ".", "format", "(", "dialect", "=", "dialect_name", ")", ",", "strategy", "=", "'mock'", ",", "executor", "=", "dump", ")", "metadata", ".", "create_all", "(", "engine", ",", "checkfirst", "=", "checkfirst", ")" ]
Sends schema-creating DDL from the metadata to the dump engine. This makes ``CREATE TABLE`` statements. Args: metadata: SQLAlchemy :class:`MetaData` dialect_name: string name of SQL dialect to generate DDL in fileobj: file-like object to send DDL to checkfirst: if ``True``, use ``CREATE TABLE IF NOT EXISTS`` or equivalent.
[ "Sends", "schema", "-", "creating", "DDL", "from", "the", "metadata", "to", "the", "dump", "engine", ".", "This", "makes", "CREATE", "TABLE", "statements", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/sqlalchemy/dump.py#L79-L106
RudolfCardinal/pythonlib
cardinal_pythonlib/sqlalchemy/dump.py
quick_mapper
def quick_mapper(table: Table) -> Type[DeclarativeMeta]: """ Makes a new SQLAlchemy mapper for an existing table. See http://www.tylerlesmann.com/2009/apr/27/copying-databases-across-platforms-sqlalchemy/ Args: table: SQLAlchemy :class:`Table` object Returns: a :class:`DeclarativeMeta` class """ # noqa # noinspection PyPep8Naming Base = declarative_base() class GenericMapper(Base): __table__ = table # noinspection PyTypeChecker return GenericMapper
python
def quick_mapper(table: Table) -> Type[DeclarativeMeta]: """ Makes a new SQLAlchemy mapper for an existing table. See http://www.tylerlesmann.com/2009/apr/27/copying-databases-across-platforms-sqlalchemy/ Args: table: SQLAlchemy :class:`Table` object Returns: a :class:`DeclarativeMeta` class """ # noqa # noinspection PyPep8Naming Base = declarative_base() class GenericMapper(Base): __table__ = table # noinspection PyTypeChecker return GenericMapper
[ "def", "quick_mapper", "(", "table", ":", "Table", ")", "->", "Type", "[", "DeclarativeMeta", "]", ":", "# noqa", "# noinspection PyPep8Naming", "Base", "=", "declarative_base", "(", ")", "class", "GenericMapper", "(", "Base", ")", ":", "__table__", "=", "table", "# noinspection PyTypeChecker", "return", "GenericMapper" ]
Makes a new SQLAlchemy mapper for an existing table. See http://www.tylerlesmann.com/2009/apr/27/copying-databases-across-platforms-sqlalchemy/ Args: table: SQLAlchemy :class:`Table` object Returns: a :class:`DeclarativeMeta` class
[ "Makes", "a", "new", "SQLAlchemy", "mapper", "for", "an", "existing", "table", ".", "See", "http", ":", "//", "www", ".", "tylerlesmann", ".", "com", "/", "2009", "/", "apr", "/", "27", "/", "copying", "-", "databases", "-", "across", "-", "platforms", "-", "sqlalchemy", "/", "Args", ":", "table", ":", "SQLAlchemy", ":", "class", ":", "Table", "object" ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/sqlalchemy/dump.py#L113-L133
RudolfCardinal/pythonlib
cardinal_pythonlib/sqlalchemy/dump.py
get_literal_query
def get_literal_query(statement: Union[Query, Executable], bind: Connectable = None) -> str: """ Takes an SQLAlchemy statement and produces a literal SQL version, with values filled in. As per http://stackoverflow.com/questions/5631078/sqlalchemy-print-the-actual-query Notes: - for debugging purposes *only* - insecure; you should always separate queries from their values - please also note that this function is quite slow Args: statement: the SQL statement (a SQLAlchemy object) to use bind: if the statement is unbound, you will need to specify an object here that supports SQL execution Returns: a string literal version of the query. """ # noqa # log.debug("statement: {!r}", statement) # log.debug("statement.bind: {!r}", statement.bind) if isinstance(statement, Query): if bind is None: bind = statement.session.get_bind(statement._mapper_zero_or_none()) statement = statement.statement elif bind is None: bind = statement.bind if bind is None: # despite all that raise ValueError("Attempt to call get_literal_query with an unbound " "statement and no 'bind' parameter") # noinspection PyUnresolvedReferences dialect = bind.dialect compiler = statement._compiler(dialect) class LiteralCompiler(compiler.__class__): # noinspection PyMethodMayBeStatic def visit_bindparam(self, bindparam: BindParameter, within_columns_clause: bool = False, literal_binds: bool = False, **kwargs) -> str: return super().render_literal_bindparam( bindparam, within_columns_clause=within_columns_clause, literal_binds=literal_binds, **kwargs ) # noinspection PyUnusedLocal def render_literal_value(self, value: Any, type_) -> str: """Render the value of a bind parameter as a quoted literal. This is used for statement sections that do not accept bind paramters on the target driver/database. This should be implemented by subclasses using the quoting services of the DBAPI. """ if isinstance(value, str): value = value.replace("'", "''") return "'%s'" % value elif value is None: return "NULL" elif isinstance(value, (float, int)): return repr(value) elif isinstance(value, decimal.Decimal): return str(value) elif (isinstance(value, datetime.datetime) or isinstance(value, datetime.date) or isinstance(value, datetime.time) or isinstance(value, pendulum.DateTime) or isinstance(value, pendulum.Date) or isinstance(value, pendulum.Time)): # All have an isoformat() method. return "'{}'".format(value.isoformat()) # return ( # "TO_DATE('%s','YYYY-MM-DD HH24:MI:SS')" # % value.strftime("%Y-%m-%d %H:%M:%S") # ) else: raise NotImplementedError( "Don't know how to literal-quote value %r" % value) compiler = LiteralCompiler(dialect, statement) return compiler.process(statement) + ";"
python
def get_literal_query(statement: Union[Query, Executable], bind: Connectable = None) -> str: """ Takes an SQLAlchemy statement and produces a literal SQL version, with values filled in. As per http://stackoverflow.com/questions/5631078/sqlalchemy-print-the-actual-query Notes: - for debugging purposes *only* - insecure; you should always separate queries from their values - please also note that this function is quite slow Args: statement: the SQL statement (a SQLAlchemy object) to use bind: if the statement is unbound, you will need to specify an object here that supports SQL execution Returns: a string literal version of the query. """ # noqa # log.debug("statement: {!r}", statement) # log.debug("statement.bind: {!r}", statement.bind) if isinstance(statement, Query): if bind is None: bind = statement.session.get_bind(statement._mapper_zero_or_none()) statement = statement.statement elif bind is None: bind = statement.bind if bind is None: # despite all that raise ValueError("Attempt to call get_literal_query with an unbound " "statement and no 'bind' parameter") # noinspection PyUnresolvedReferences dialect = bind.dialect compiler = statement._compiler(dialect) class LiteralCompiler(compiler.__class__): # noinspection PyMethodMayBeStatic def visit_bindparam(self, bindparam: BindParameter, within_columns_clause: bool = False, literal_binds: bool = False, **kwargs) -> str: return super().render_literal_bindparam( bindparam, within_columns_clause=within_columns_clause, literal_binds=literal_binds, **kwargs ) # noinspection PyUnusedLocal def render_literal_value(self, value: Any, type_) -> str: """Render the value of a bind parameter as a quoted literal. This is used for statement sections that do not accept bind paramters on the target driver/database. This should be implemented by subclasses using the quoting services of the DBAPI. """ if isinstance(value, str): value = value.replace("'", "''") return "'%s'" % value elif value is None: return "NULL" elif isinstance(value, (float, int)): return repr(value) elif isinstance(value, decimal.Decimal): return str(value) elif (isinstance(value, datetime.datetime) or isinstance(value, datetime.date) or isinstance(value, datetime.time) or isinstance(value, pendulum.DateTime) or isinstance(value, pendulum.Date) or isinstance(value, pendulum.Time)): # All have an isoformat() method. return "'{}'".format(value.isoformat()) # return ( # "TO_DATE('%s','YYYY-MM-DD HH24:MI:SS')" # % value.strftime("%Y-%m-%d %H:%M:%S") # ) else: raise NotImplementedError( "Don't know how to literal-quote value %r" % value) compiler = LiteralCompiler(dialect, statement) return compiler.process(statement) + ";"
[ "def", "get_literal_query", "(", "statement", ":", "Union", "[", "Query", ",", "Executable", "]", ",", "bind", ":", "Connectable", "=", "None", ")", "->", "str", ":", "# noqa", "# log.debug(\"statement: {!r}\", statement)", "# log.debug(\"statement.bind: {!r}\", statement.bind)", "if", "isinstance", "(", "statement", ",", "Query", ")", ":", "if", "bind", "is", "None", ":", "bind", "=", "statement", ".", "session", ".", "get_bind", "(", "statement", ".", "_mapper_zero_or_none", "(", ")", ")", "statement", "=", "statement", ".", "statement", "elif", "bind", "is", "None", ":", "bind", "=", "statement", ".", "bind", "if", "bind", "is", "None", ":", "# despite all that", "raise", "ValueError", "(", "\"Attempt to call get_literal_query with an unbound \"", "\"statement and no 'bind' parameter\"", ")", "# noinspection PyUnresolvedReferences", "dialect", "=", "bind", ".", "dialect", "compiler", "=", "statement", ".", "_compiler", "(", "dialect", ")", "class", "LiteralCompiler", "(", "compiler", ".", "__class__", ")", ":", "# noinspection PyMethodMayBeStatic", "def", "visit_bindparam", "(", "self", ",", "bindparam", ":", "BindParameter", ",", "within_columns_clause", ":", "bool", "=", "False", ",", "literal_binds", ":", "bool", "=", "False", ",", "*", "*", "kwargs", ")", "->", "str", ":", "return", "super", "(", ")", ".", "render_literal_bindparam", "(", "bindparam", ",", "within_columns_clause", "=", "within_columns_clause", ",", "literal_binds", "=", "literal_binds", ",", "*", "*", "kwargs", ")", "# noinspection PyUnusedLocal", "def", "render_literal_value", "(", "self", ",", "value", ":", "Any", ",", "type_", ")", "->", "str", ":", "\"\"\"Render the value of a bind parameter as a quoted literal.\n\n This is used for statement sections that do not accept bind\n paramters on the target driver/database.\n\n This should be implemented by subclasses using the quoting services\n of the DBAPI.\n \"\"\"", "if", "isinstance", "(", "value", ",", "str", ")", ":", "value", "=", "value", ".", "replace", "(", "\"'\"", ",", "\"''\"", ")", "return", "\"'%s'\"", "%", "value", "elif", "value", "is", "None", ":", "return", "\"NULL\"", "elif", "isinstance", "(", "value", ",", "(", "float", ",", "int", ")", ")", ":", "return", "repr", "(", "value", ")", "elif", "isinstance", "(", "value", ",", "decimal", ".", "Decimal", ")", ":", "return", "str", "(", "value", ")", "elif", "(", "isinstance", "(", "value", ",", "datetime", ".", "datetime", ")", "or", "isinstance", "(", "value", ",", "datetime", ".", "date", ")", "or", "isinstance", "(", "value", ",", "datetime", ".", "time", ")", "or", "isinstance", "(", "value", ",", "pendulum", ".", "DateTime", ")", "or", "isinstance", "(", "value", ",", "pendulum", ".", "Date", ")", "or", "isinstance", "(", "value", ",", "pendulum", ".", "Time", ")", ")", ":", "# All have an isoformat() method.", "return", "\"'{}'\"", ".", "format", "(", "value", ".", "isoformat", "(", ")", ")", "# return (", "# \"TO_DATE('%s','YYYY-MM-DD HH24:MI:SS')\"", "# % value.strftime(\"%Y-%m-%d %H:%M:%S\")", "# )", "else", ":", "raise", "NotImplementedError", "(", "\"Don't know how to literal-quote value %r\"", "%", "value", ")", "compiler", "=", "LiteralCompiler", "(", "dialect", ",", "statement", ")", "return", "compiler", ".", "process", "(", "statement", ")", "+", "\";\"" ]
Takes an SQLAlchemy statement and produces a literal SQL version, with values filled in. As per http://stackoverflow.com/questions/5631078/sqlalchemy-print-the-actual-query Notes: - for debugging purposes *only* - insecure; you should always separate queries from their values - please also note that this function is quite slow Args: statement: the SQL statement (a SQLAlchemy object) to use bind: if the statement is unbound, you will need to specify an object here that supports SQL execution Returns: a string literal version of the query.
[ "Takes", "an", "SQLAlchemy", "statement", "and", "produces", "a", "literal", "SQL", "version", "with", "values", "filled", "in", ".", "As", "per", "http", ":", "//", "stackoverflow", ".", "com", "/", "questions", "/", "5631078", "/", "sqlalchemy", "-", "print", "-", "the", "-", "actual", "-", "query", "Notes", ":", "-", "for", "debugging", "purposes", "*", "only", "*", "-", "insecure", ";", "you", "should", "always", "separate", "queries", "from", "their", "values", "-", "please", "also", "note", "that", "this", "function", "is", "quite", "slow", "Args", ":", "statement", ":", "the", "SQL", "statement", "(", "a", "SQLAlchemy", "object", ")", "to", "use", "bind", ":", "if", "the", "statement", "is", "unbound", "you", "will", "need", "to", "specify", "an", "object", "here", "that", "supports", "SQL", "execution" ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/sqlalchemy/dump.py#L192-L281
RudolfCardinal/pythonlib
cardinal_pythonlib/sqlalchemy/dump.py
dump_table_as_insert_sql
def dump_table_as_insert_sql(engine: Engine, table_name: str, fileobj: TextIO, wheredict: Dict[str, Any] = None, include_ddl: bool = False, multirow: bool = False) -> None: """ Reads a table from the database, and writes SQL to replicate the table's data to the output ``fileobj``. Args: engine: SQLAlchemy :class:`Engine` table_name: name of the table fileobj: file-like object to write to wheredict: optional dictionary of ``{column_name: value}`` to use as ``WHERE`` filters include_ddl: if ``True``, include the DDL to create the table as well multirow: write multi-row ``INSERT`` statements """ # http://stackoverflow.com/questions/5631078/sqlalchemy-print-the-actual-query # noqa # http://docs.sqlalchemy.org/en/latest/faq/sqlexpressions.html # http://www.tylerlesmann.com/2009/apr/27/copying-databases-across-platforms-sqlalchemy/ # noqa # https://github.com/plq/scripts/blob/master/pg_dump.py log.info("dump_data_as_insert_sql: table_name={}", table_name) writelines_nl(fileobj, [ SEP1, sql_comment("Data for table: {}".format(table_name)), SEP2, sql_comment("Filters: {}".format(wheredict)), ]) dialect = engine.dialect if not dialect.supports_multivalues_insert: multirow = False if multirow: log.warning("dump_data_as_insert_sql: multirow parameter substitution " "not working yet") multirow = False # literal_query = make_literal_query_fn(dialect) meta = MetaData(bind=engine) log.debug("... retrieving schema") table = Table(table_name, meta, autoload=True) if include_ddl: log.debug("... producing DDL") dump_ddl(table.metadata, dialect_name=engine.dialect.name, fileobj=fileobj) # NewRecord = quick_mapper(table) # columns = table.columns.keys() log.debug("... fetching records") # log.debug("meta: {}", meta) # obscures password # log.debug("table: {}", table) # log.debug("table.columns: {!r}", table.columns) # log.debug("multirow: {}", multirow) query = select(table.columns) if wheredict: for k, v in wheredict.items(): col = table.columns.get(k) query = query.where(col == v) # log.debug("query: {}", query) cursor = engine.execute(query) if multirow: row_dict_list = [] for r in cursor: row_dict_list.append(dict(r)) # log.debug("row_dict_list: {}", row_dict_list) if row_dict_list: statement = table.insert().values(row_dict_list) # log.debug("statement: {!r}", statement) # insert_str = literal_query(statement) insert_str = get_literal_query(statement, bind=engine) # NOT WORKING FOR MULTIROW INSERTS. ONLY SUBSTITUTES FIRST ROW. writeline_nl(fileobj, insert_str) else: writeline_nl(fileobj, sql_comment("No data!")) else: found_one = False for r in cursor: found_one = True row_dict = dict(r) statement = table.insert(values=row_dict) # insert_str = literal_query(statement) insert_str = get_literal_query(statement, bind=engine) # log.debug("row_dict: {}", row_dict) # log.debug("insert_str: {}", insert_str) writeline_nl(fileobj, insert_str) if not found_one: writeline_nl(fileobj, sql_comment("No data!")) writeline_nl(fileobj, SEP2) log.debug("... done")
python
def dump_table_as_insert_sql(engine: Engine, table_name: str, fileobj: TextIO, wheredict: Dict[str, Any] = None, include_ddl: bool = False, multirow: bool = False) -> None: """ Reads a table from the database, and writes SQL to replicate the table's data to the output ``fileobj``. Args: engine: SQLAlchemy :class:`Engine` table_name: name of the table fileobj: file-like object to write to wheredict: optional dictionary of ``{column_name: value}`` to use as ``WHERE`` filters include_ddl: if ``True``, include the DDL to create the table as well multirow: write multi-row ``INSERT`` statements """ # http://stackoverflow.com/questions/5631078/sqlalchemy-print-the-actual-query # noqa # http://docs.sqlalchemy.org/en/latest/faq/sqlexpressions.html # http://www.tylerlesmann.com/2009/apr/27/copying-databases-across-platforms-sqlalchemy/ # noqa # https://github.com/plq/scripts/blob/master/pg_dump.py log.info("dump_data_as_insert_sql: table_name={}", table_name) writelines_nl(fileobj, [ SEP1, sql_comment("Data for table: {}".format(table_name)), SEP2, sql_comment("Filters: {}".format(wheredict)), ]) dialect = engine.dialect if not dialect.supports_multivalues_insert: multirow = False if multirow: log.warning("dump_data_as_insert_sql: multirow parameter substitution " "not working yet") multirow = False # literal_query = make_literal_query_fn(dialect) meta = MetaData(bind=engine) log.debug("... retrieving schema") table = Table(table_name, meta, autoload=True) if include_ddl: log.debug("... producing DDL") dump_ddl(table.metadata, dialect_name=engine.dialect.name, fileobj=fileobj) # NewRecord = quick_mapper(table) # columns = table.columns.keys() log.debug("... fetching records") # log.debug("meta: {}", meta) # obscures password # log.debug("table: {}", table) # log.debug("table.columns: {!r}", table.columns) # log.debug("multirow: {}", multirow) query = select(table.columns) if wheredict: for k, v in wheredict.items(): col = table.columns.get(k) query = query.where(col == v) # log.debug("query: {}", query) cursor = engine.execute(query) if multirow: row_dict_list = [] for r in cursor: row_dict_list.append(dict(r)) # log.debug("row_dict_list: {}", row_dict_list) if row_dict_list: statement = table.insert().values(row_dict_list) # log.debug("statement: {!r}", statement) # insert_str = literal_query(statement) insert_str = get_literal_query(statement, bind=engine) # NOT WORKING FOR MULTIROW INSERTS. ONLY SUBSTITUTES FIRST ROW. writeline_nl(fileobj, insert_str) else: writeline_nl(fileobj, sql_comment("No data!")) else: found_one = False for r in cursor: found_one = True row_dict = dict(r) statement = table.insert(values=row_dict) # insert_str = literal_query(statement) insert_str = get_literal_query(statement, bind=engine) # log.debug("row_dict: {}", row_dict) # log.debug("insert_str: {}", insert_str) writeline_nl(fileobj, insert_str) if not found_one: writeline_nl(fileobj, sql_comment("No data!")) writeline_nl(fileobj, SEP2) log.debug("... done")
[ "def", "dump_table_as_insert_sql", "(", "engine", ":", "Engine", ",", "table_name", ":", "str", ",", "fileobj", ":", "TextIO", ",", "wheredict", ":", "Dict", "[", "str", ",", "Any", "]", "=", "None", ",", "include_ddl", ":", "bool", "=", "False", ",", "multirow", ":", "bool", "=", "False", ")", "->", "None", ":", "# http://stackoverflow.com/questions/5631078/sqlalchemy-print-the-actual-query # noqa", "# http://docs.sqlalchemy.org/en/latest/faq/sqlexpressions.html", "# http://www.tylerlesmann.com/2009/apr/27/copying-databases-across-platforms-sqlalchemy/ # noqa", "# https://github.com/plq/scripts/blob/master/pg_dump.py", "log", ".", "info", "(", "\"dump_data_as_insert_sql: table_name={}\"", ",", "table_name", ")", "writelines_nl", "(", "fileobj", ",", "[", "SEP1", ",", "sql_comment", "(", "\"Data for table: {}\"", ".", "format", "(", "table_name", ")", ")", ",", "SEP2", ",", "sql_comment", "(", "\"Filters: {}\"", ".", "format", "(", "wheredict", ")", ")", ",", "]", ")", "dialect", "=", "engine", ".", "dialect", "if", "not", "dialect", ".", "supports_multivalues_insert", ":", "multirow", "=", "False", "if", "multirow", ":", "log", ".", "warning", "(", "\"dump_data_as_insert_sql: multirow parameter substitution \"", "\"not working yet\"", ")", "multirow", "=", "False", "# literal_query = make_literal_query_fn(dialect)", "meta", "=", "MetaData", "(", "bind", "=", "engine", ")", "log", ".", "debug", "(", "\"... retrieving schema\"", ")", "table", "=", "Table", "(", "table_name", ",", "meta", ",", "autoload", "=", "True", ")", "if", "include_ddl", ":", "log", ".", "debug", "(", "\"... producing DDL\"", ")", "dump_ddl", "(", "table", ".", "metadata", ",", "dialect_name", "=", "engine", ".", "dialect", ".", "name", ",", "fileobj", "=", "fileobj", ")", "# NewRecord = quick_mapper(table)", "# columns = table.columns.keys()", "log", ".", "debug", "(", "\"... fetching records\"", ")", "# log.debug(\"meta: {}\", meta) # obscures password", "# log.debug(\"table: {}\", table)", "# log.debug(\"table.columns: {!r}\", table.columns)", "# log.debug(\"multirow: {}\", multirow)", "query", "=", "select", "(", "table", ".", "columns", ")", "if", "wheredict", ":", "for", "k", ",", "v", "in", "wheredict", ".", "items", "(", ")", ":", "col", "=", "table", ".", "columns", ".", "get", "(", "k", ")", "query", "=", "query", ".", "where", "(", "col", "==", "v", ")", "# log.debug(\"query: {}\", query)", "cursor", "=", "engine", ".", "execute", "(", "query", ")", "if", "multirow", ":", "row_dict_list", "=", "[", "]", "for", "r", "in", "cursor", ":", "row_dict_list", ".", "append", "(", "dict", "(", "r", ")", ")", "# log.debug(\"row_dict_list: {}\", row_dict_list)", "if", "row_dict_list", ":", "statement", "=", "table", ".", "insert", "(", ")", ".", "values", "(", "row_dict_list", ")", "# log.debug(\"statement: {!r}\", statement)", "# insert_str = literal_query(statement)", "insert_str", "=", "get_literal_query", "(", "statement", ",", "bind", "=", "engine", ")", "# NOT WORKING FOR MULTIROW INSERTS. ONLY SUBSTITUTES FIRST ROW.", "writeline_nl", "(", "fileobj", ",", "insert_str", ")", "else", ":", "writeline_nl", "(", "fileobj", ",", "sql_comment", "(", "\"No data!\"", ")", ")", "else", ":", "found_one", "=", "False", "for", "r", "in", "cursor", ":", "found_one", "=", "True", "row_dict", "=", "dict", "(", "r", ")", "statement", "=", "table", ".", "insert", "(", "values", "=", "row_dict", ")", "# insert_str = literal_query(statement)", "insert_str", "=", "get_literal_query", "(", "statement", ",", "bind", "=", "engine", ")", "# log.debug(\"row_dict: {}\", row_dict)", "# log.debug(\"insert_str: {}\", insert_str)", "writeline_nl", "(", "fileobj", ",", "insert_str", ")", "if", "not", "found_one", ":", "writeline_nl", "(", "fileobj", ",", "sql_comment", "(", "\"No data!\"", ")", ")", "writeline_nl", "(", "fileobj", ",", "SEP2", ")", "log", ".", "debug", "(", "\"... done\"", ")" ]
Reads a table from the database, and writes SQL to replicate the table's data to the output ``fileobj``. Args: engine: SQLAlchemy :class:`Engine` table_name: name of the table fileobj: file-like object to write to wheredict: optional dictionary of ``{column_name: value}`` to use as ``WHERE`` filters include_ddl: if ``True``, include the DDL to create the table as well multirow: write multi-row ``INSERT`` statements
[ "Reads", "a", "table", "from", "the", "database", "and", "writes", "SQL", "to", "replicate", "the", "table", "s", "data", "to", "the", "output", "fileobj", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/sqlalchemy/dump.py#L284-L373
RudolfCardinal/pythonlib
cardinal_pythonlib/sqlalchemy/dump.py
dump_database_as_insert_sql
def dump_database_as_insert_sql(engine: Engine, fileobj: TextIO = sys.stdout, include_ddl: bool = False, multirow: bool = False) -> None: """ Reads an entire database and writes SQL to replicate it to the output file-like object. Args: engine: SQLAlchemy :class:`Engine` fileobj: file-like object to write to include_ddl: if ``True``, include the DDL to create the table as well multirow: write multi-row ``INSERT`` statements """ for tablename in get_table_names(engine): dump_table_as_insert_sql( engine=engine, table_name=tablename, fileobj=fileobj, include_ddl=include_ddl, multirow=multirow )
python
def dump_database_as_insert_sql(engine: Engine, fileobj: TextIO = sys.stdout, include_ddl: bool = False, multirow: bool = False) -> None: """ Reads an entire database and writes SQL to replicate it to the output file-like object. Args: engine: SQLAlchemy :class:`Engine` fileobj: file-like object to write to include_ddl: if ``True``, include the DDL to create the table as well multirow: write multi-row ``INSERT`` statements """ for tablename in get_table_names(engine): dump_table_as_insert_sql( engine=engine, table_name=tablename, fileobj=fileobj, include_ddl=include_ddl, multirow=multirow )
[ "def", "dump_database_as_insert_sql", "(", "engine", ":", "Engine", ",", "fileobj", ":", "TextIO", "=", "sys", ".", "stdout", ",", "include_ddl", ":", "bool", "=", "False", ",", "multirow", ":", "bool", "=", "False", ")", "->", "None", ":", "for", "tablename", "in", "get_table_names", "(", "engine", ")", ":", "dump_table_as_insert_sql", "(", "engine", "=", "engine", ",", "table_name", "=", "tablename", ",", "fileobj", "=", "fileobj", ",", "include_ddl", "=", "include_ddl", ",", "multirow", "=", "multirow", ")" ]
Reads an entire database and writes SQL to replicate it to the output file-like object. Args: engine: SQLAlchemy :class:`Engine` fileobj: file-like object to write to include_ddl: if ``True``, include the DDL to create the table as well multirow: write multi-row ``INSERT`` statements
[ "Reads", "an", "entire", "database", "and", "writes", "SQL", "to", "replicate", "it", "to", "the", "output", "file", "-", "like", "object", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/sqlalchemy/dump.py#L376-L397
RudolfCardinal/pythonlib
cardinal_pythonlib/sqlalchemy/dump.py
dump_orm_object_as_insert_sql
def dump_orm_object_as_insert_sql(engine: Engine, obj: object, fileobj: TextIO) -> None: """ Takes a SQLAlchemy ORM object, and writes ``INSERT`` SQL to replicate it to the output file-like object. Args: engine: SQLAlchemy :class:`Engine` obj: SQLAlchemy ORM object to write fileobj: file-like object to write to """ # literal_query = make_literal_query_fn(engine.dialect) insp = inspect(obj) # insp: an InstanceState # http://docs.sqlalchemy.org/en/latest/orm/internals.html#sqlalchemy.orm.state.InstanceState # noqa # insp.mapper: a Mapper # http://docs.sqlalchemy.org/en/latest/orm/mapping_api.html#sqlalchemy.orm.mapper.Mapper # noqa # Don't do this: # table = insp.mapper.mapped_table # Do this instead. The method above gives you fancy data types like list # and Arrow on the Python side. We want the bog-standard datatypes drawn # from the database itself. meta = MetaData(bind=engine) table_name = insp.mapper.mapped_table.name # log.debug("table_name: {}", table_name) table = Table(table_name, meta, autoload=True) # log.debug("table: {}", table) # NewRecord = quick_mapper(table) # columns = table.columns.keys() query = select(table.columns) # log.debug("query: {}", query) for orm_pkcol in insp.mapper.primary_key: core_pkcol = table.columns.get(orm_pkcol.name) pkval = getattr(obj, orm_pkcol.name) query = query.where(core_pkcol == pkval) # log.debug("query: {}", query) cursor = engine.execute(query) row = cursor.fetchone() # should only be one... row_dict = dict(row) # log.debug("obj: {}", obj) # log.debug("row_dict: {}", row_dict) statement = table.insert(values=row_dict) # insert_str = literal_query(statement) insert_str = get_literal_query(statement, bind=engine) writeline_nl(fileobj, insert_str)
python
def dump_orm_object_as_insert_sql(engine: Engine, obj: object, fileobj: TextIO) -> None: """ Takes a SQLAlchemy ORM object, and writes ``INSERT`` SQL to replicate it to the output file-like object. Args: engine: SQLAlchemy :class:`Engine` obj: SQLAlchemy ORM object to write fileobj: file-like object to write to """ # literal_query = make_literal_query_fn(engine.dialect) insp = inspect(obj) # insp: an InstanceState # http://docs.sqlalchemy.org/en/latest/orm/internals.html#sqlalchemy.orm.state.InstanceState # noqa # insp.mapper: a Mapper # http://docs.sqlalchemy.org/en/latest/orm/mapping_api.html#sqlalchemy.orm.mapper.Mapper # noqa # Don't do this: # table = insp.mapper.mapped_table # Do this instead. The method above gives you fancy data types like list # and Arrow on the Python side. We want the bog-standard datatypes drawn # from the database itself. meta = MetaData(bind=engine) table_name = insp.mapper.mapped_table.name # log.debug("table_name: {}", table_name) table = Table(table_name, meta, autoload=True) # log.debug("table: {}", table) # NewRecord = quick_mapper(table) # columns = table.columns.keys() query = select(table.columns) # log.debug("query: {}", query) for orm_pkcol in insp.mapper.primary_key: core_pkcol = table.columns.get(orm_pkcol.name) pkval = getattr(obj, orm_pkcol.name) query = query.where(core_pkcol == pkval) # log.debug("query: {}", query) cursor = engine.execute(query) row = cursor.fetchone() # should only be one... row_dict = dict(row) # log.debug("obj: {}", obj) # log.debug("row_dict: {}", row_dict) statement = table.insert(values=row_dict) # insert_str = literal_query(statement) insert_str = get_literal_query(statement, bind=engine) writeline_nl(fileobj, insert_str)
[ "def", "dump_orm_object_as_insert_sql", "(", "engine", ":", "Engine", ",", "obj", ":", "object", ",", "fileobj", ":", "TextIO", ")", "->", "None", ":", "# literal_query = make_literal_query_fn(engine.dialect)", "insp", "=", "inspect", "(", "obj", ")", "# insp: an InstanceState", "# http://docs.sqlalchemy.org/en/latest/orm/internals.html#sqlalchemy.orm.state.InstanceState # noqa", "# insp.mapper: a Mapper", "# http://docs.sqlalchemy.org/en/latest/orm/mapping_api.html#sqlalchemy.orm.mapper.Mapper # noqa", "# Don't do this:", "# table = insp.mapper.mapped_table", "# Do this instead. The method above gives you fancy data types like list", "# and Arrow on the Python side. We want the bog-standard datatypes drawn", "# from the database itself.", "meta", "=", "MetaData", "(", "bind", "=", "engine", ")", "table_name", "=", "insp", ".", "mapper", ".", "mapped_table", ".", "name", "# log.debug(\"table_name: {}\", table_name)", "table", "=", "Table", "(", "table_name", ",", "meta", ",", "autoload", "=", "True", ")", "# log.debug(\"table: {}\", table)", "# NewRecord = quick_mapper(table)", "# columns = table.columns.keys()", "query", "=", "select", "(", "table", ".", "columns", ")", "# log.debug(\"query: {}\", query)", "for", "orm_pkcol", "in", "insp", ".", "mapper", ".", "primary_key", ":", "core_pkcol", "=", "table", ".", "columns", ".", "get", "(", "orm_pkcol", ".", "name", ")", "pkval", "=", "getattr", "(", "obj", ",", "orm_pkcol", ".", "name", ")", "query", "=", "query", ".", "where", "(", "core_pkcol", "==", "pkval", ")", "# log.debug(\"query: {}\", query)", "cursor", "=", "engine", ".", "execute", "(", "query", ")", "row", "=", "cursor", ".", "fetchone", "(", ")", "# should only be one...", "row_dict", "=", "dict", "(", "row", ")", "# log.debug(\"obj: {}\", obj)", "# log.debug(\"row_dict: {}\", row_dict)", "statement", "=", "table", ".", "insert", "(", "values", "=", "row_dict", ")", "# insert_str = literal_query(statement)", "insert_str", "=", "get_literal_query", "(", "statement", ",", "bind", "=", "engine", ")", "writeline_nl", "(", "fileobj", ",", "insert_str", ")" ]
Takes a SQLAlchemy ORM object, and writes ``INSERT`` SQL to replicate it to the output file-like object. Args: engine: SQLAlchemy :class:`Engine` obj: SQLAlchemy ORM object to write fileobj: file-like object to write to
[ "Takes", "a", "SQLAlchemy", "ORM", "object", "and", "writes", "INSERT", "SQL", "to", "replicate", "it", "to", "the", "output", "file", "-", "like", "object", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/sqlalchemy/dump.py#L400-L447
RudolfCardinal/pythonlib
cardinal_pythonlib/sqlalchemy/dump.py
bulk_insert_extras
def bulk_insert_extras(dialect_name: str, fileobj: TextIO, start: bool) -> None: """ Writes bulk ``INSERT`` preamble (start=True) or end (start=False). For MySQL, this temporarily switches off autocommit behaviour and index/FK checks, for speed, then re-enables them at the end and commits. Args: dialect_name: SQLAlchemy dialect name (see :class:`SqlaDialectName`) fileobj: file-like object to write to start: if ``True``, write preamble; if ``False``, write end """ lines = [] if dialect_name == SqlaDialectName.MYSQL: if start: lines = [ "SET autocommit=0;", "SET unique_checks=0;", "SET foreign_key_checks=0;", ] else: lines = [ "SET foreign_key_checks=1;", "SET unique_checks=1;", "COMMIT;", ] writelines_nl(fileobj, lines)
python
def bulk_insert_extras(dialect_name: str, fileobj: TextIO, start: bool) -> None: """ Writes bulk ``INSERT`` preamble (start=True) or end (start=False). For MySQL, this temporarily switches off autocommit behaviour and index/FK checks, for speed, then re-enables them at the end and commits. Args: dialect_name: SQLAlchemy dialect name (see :class:`SqlaDialectName`) fileobj: file-like object to write to start: if ``True``, write preamble; if ``False``, write end """ lines = [] if dialect_name == SqlaDialectName.MYSQL: if start: lines = [ "SET autocommit=0;", "SET unique_checks=0;", "SET foreign_key_checks=0;", ] else: lines = [ "SET foreign_key_checks=1;", "SET unique_checks=1;", "COMMIT;", ] writelines_nl(fileobj, lines)
[ "def", "bulk_insert_extras", "(", "dialect_name", ":", "str", ",", "fileobj", ":", "TextIO", ",", "start", ":", "bool", ")", "->", "None", ":", "lines", "=", "[", "]", "if", "dialect_name", "==", "SqlaDialectName", ".", "MYSQL", ":", "if", "start", ":", "lines", "=", "[", "\"SET autocommit=0;\"", ",", "\"SET unique_checks=0;\"", ",", "\"SET foreign_key_checks=0;\"", ",", "]", "else", ":", "lines", "=", "[", "\"SET foreign_key_checks=1;\"", ",", "\"SET unique_checks=1;\"", ",", "\"COMMIT;\"", ",", "]", "writelines_nl", "(", "fileobj", ",", "lines", ")" ]
Writes bulk ``INSERT`` preamble (start=True) or end (start=False). For MySQL, this temporarily switches off autocommit behaviour and index/FK checks, for speed, then re-enables them at the end and commits. Args: dialect_name: SQLAlchemy dialect name (see :class:`SqlaDialectName`) fileobj: file-like object to write to start: if ``True``, write preamble; if ``False``, write end
[ "Writes", "bulk", "INSERT", "preamble", "(", "start", "=", "True", ")", "or", "end", "(", "start", "=", "False", ")", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/sqlalchemy/dump.py#L450-L478
RudolfCardinal/pythonlib
cardinal_pythonlib/sqlalchemy/dump.py
dump_orm_tree_as_insert_sql
def dump_orm_tree_as_insert_sql(engine: Engine, baseobj: object, fileobj: TextIO) -> None: """ Sends an object, and all its relations (discovered via "relationship" links) as ``INSERT`` commands in SQL, to ``fileobj``. Args: engine: SQLAlchemy :class:`Engine` baseobj: starting SQLAlchemy ORM object fileobj: file-like object to write to Problem: foreign key constraints. - MySQL/InnoDB doesn't wait to the end of a transaction to check FK integrity (which it should): http://stackoverflow.com/questions/5014700/in-mysql-can-i-defer-referential-integrity-checks-until-commit # noqa - PostgreSQL can. - Anyway, slightly ugly hacks... https://dev.mysql.com/doc/refman/5.5/en/optimizing-innodb-bulk-data-loading.html - Not so obvious how we can iterate through the list of ORM objects and guarantee correct insertion order with respect to all FKs. """ # noqa writeline_nl( fileobj, sql_comment("Data for all objects related to the first below:")) bulk_insert_extras(engine.dialect.name, fileobj, start=True) for part in walk_orm_tree(baseobj): dump_orm_object_as_insert_sql(engine, part, fileobj) bulk_insert_extras(engine.dialect.name, fileobj, start=False)
python
def dump_orm_tree_as_insert_sql(engine: Engine, baseobj: object, fileobj: TextIO) -> None: """ Sends an object, and all its relations (discovered via "relationship" links) as ``INSERT`` commands in SQL, to ``fileobj``. Args: engine: SQLAlchemy :class:`Engine` baseobj: starting SQLAlchemy ORM object fileobj: file-like object to write to Problem: foreign key constraints. - MySQL/InnoDB doesn't wait to the end of a transaction to check FK integrity (which it should): http://stackoverflow.com/questions/5014700/in-mysql-can-i-defer-referential-integrity-checks-until-commit # noqa - PostgreSQL can. - Anyway, slightly ugly hacks... https://dev.mysql.com/doc/refman/5.5/en/optimizing-innodb-bulk-data-loading.html - Not so obvious how we can iterate through the list of ORM objects and guarantee correct insertion order with respect to all FKs. """ # noqa writeline_nl( fileobj, sql_comment("Data for all objects related to the first below:")) bulk_insert_extras(engine.dialect.name, fileobj, start=True) for part in walk_orm_tree(baseobj): dump_orm_object_as_insert_sql(engine, part, fileobj) bulk_insert_extras(engine.dialect.name, fileobj, start=False)
[ "def", "dump_orm_tree_as_insert_sql", "(", "engine", ":", "Engine", ",", "baseobj", ":", "object", ",", "fileobj", ":", "TextIO", ")", "->", "None", ":", "# noqa", "writeline_nl", "(", "fileobj", ",", "sql_comment", "(", "\"Data for all objects related to the first below:\"", ")", ")", "bulk_insert_extras", "(", "engine", ".", "dialect", ".", "name", ",", "fileobj", ",", "start", "=", "True", ")", "for", "part", "in", "walk_orm_tree", "(", "baseobj", ")", ":", "dump_orm_object_as_insert_sql", "(", "engine", ",", "part", ",", "fileobj", ")", "bulk_insert_extras", "(", "engine", ".", "dialect", ".", "name", ",", "fileobj", ",", "start", "=", "False", ")" ]
Sends an object, and all its relations (discovered via "relationship" links) as ``INSERT`` commands in SQL, to ``fileobj``. Args: engine: SQLAlchemy :class:`Engine` baseobj: starting SQLAlchemy ORM object fileobj: file-like object to write to Problem: foreign key constraints. - MySQL/InnoDB doesn't wait to the end of a transaction to check FK integrity (which it should): http://stackoverflow.com/questions/5014700/in-mysql-can-i-defer-referential-integrity-checks-until-commit # noqa - PostgreSQL can. - Anyway, slightly ugly hacks... https://dev.mysql.com/doc/refman/5.5/en/optimizing-innodb-bulk-data-loading.html - Not so obvious how we can iterate through the list of ORM objects and guarantee correct insertion order with respect to all FKs.
[ "Sends", "an", "object", "and", "all", "its", "relations", "(", "discovered", "via", "relationship", "links", ")", "as", "INSERT", "commands", "in", "SQL", "to", "fileobj", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/sqlalchemy/dump.py#L481-L510
avihad/twistes
twistes/scroller.py
Scroller.next
def next(self): """Fetch next page from scroll API.""" d = None if self._first_results: d = succeed(EsUtils.extract_hits(self._first_results)) self._first_results = None elif self._scroll_id: d = self._scroll_next_results() else: raise StopIteration() return d
python
def next(self): """Fetch next page from scroll API.""" d = None if self._first_results: d = succeed(EsUtils.extract_hits(self._first_results)) self._first_results = None elif self._scroll_id: d = self._scroll_next_results() else: raise StopIteration() return d
[ "def", "next", "(", "self", ")", ":", "d", "=", "None", "if", "self", ".", "_first_results", ":", "d", "=", "succeed", "(", "EsUtils", ".", "extract_hits", "(", "self", ".", "_first_results", ")", ")", "self", ".", "_first_results", "=", "None", "elif", "self", ".", "_scroll_id", ":", "d", "=", "self", ".", "_scroll_next_results", "(", ")", "else", ":", "raise", "StopIteration", "(", ")", "return", "d" ]
Fetch next page from scroll API.
[ "Fetch", "next", "page", "from", "scroll", "API", "." ]
train
https://github.com/avihad/twistes/blob/9ab8f5aa088b8886aefe3dec85a400e5035e034a/twistes/scroller.py#L31-L41
RudolfCardinal/pythonlib
cardinal_pythonlib/source_reformatting.py
reformat_python_docstrings
def reformat_python_docstrings(top_dirs: List[str], correct_copyright_lines: List[str], show_only: bool = True, rewrite: bool = False, process_only_filenum: int = None) -> None: """ Walk a directory, finding Python files and rewriting them. Args: top_dirs: list of directories to descend into correct_copyright_lines: list of lines (without newlines) representing the copyright docstring block, including the transition lines of equals symbols show_only: show results (to stdout) only; don't rewrite rewrite: write the changes process_only_filenum: only process this file number (1-based index); for debugging only """ filenum = 0 for top_dir in top_dirs: for dirpath, dirnames, filenames in walk(top_dir): for filename in filenames: fullname = join(dirpath, filename) extension = splitext(filename)[1] if extension != PYTHON_EXTENSION: # log.debug("Skipping non-Python file: {}", fullname) continue filenum += 1 if process_only_filenum and filenum != process_only_filenum: continue log.info("Processing file {}: {}", filenum, fullname) proc = PythonProcessor( full_path=fullname, top_dir=top_dir, correct_copyright_lines=correct_copyright_lines) if show_only: proc.show() elif rewrite: proc.rewrite_file()
python
def reformat_python_docstrings(top_dirs: List[str], correct_copyright_lines: List[str], show_only: bool = True, rewrite: bool = False, process_only_filenum: int = None) -> None: """ Walk a directory, finding Python files and rewriting them. Args: top_dirs: list of directories to descend into correct_copyright_lines: list of lines (without newlines) representing the copyright docstring block, including the transition lines of equals symbols show_only: show results (to stdout) only; don't rewrite rewrite: write the changes process_only_filenum: only process this file number (1-based index); for debugging only """ filenum = 0 for top_dir in top_dirs: for dirpath, dirnames, filenames in walk(top_dir): for filename in filenames: fullname = join(dirpath, filename) extension = splitext(filename)[1] if extension != PYTHON_EXTENSION: # log.debug("Skipping non-Python file: {}", fullname) continue filenum += 1 if process_only_filenum and filenum != process_only_filenum: continue log.info("Processing file {}: {}", filenum, fullname) proc = PythonProcessor( full_path=fullname, top_dir=top_dir, correct_copyright_lines=correct_copyright_lines) if show_only: proc.show() elif rewrite: proc.rewrite_file()
[ "def", "reformat_python_docstrings", "(", "top_dirs", ":", "List", "[", "str", "]", ",", "correct_copyright_lines", ":", "List", "[", "str", "]", ",", "show_only", ":", "bool", "=", "True", ",", "rewrite", ":", "bool", "=", "False", ",", "process_only_filenum", ":", "int", "=", "None", ")", "->", "None", ":", "filenum", "=", "0", "for", "top_dir", "in", "top_dirs", ":", "for", "dirpath", ",", "dirnames", ",", "filenames", "in", "walk", "(", "top_dir", ")", ":", "for", "filename", "in", "filenames", ":", "fullname", "=", "join", "(", "dirpath", ",", "filename", ")", "extension", "=", "splitext", "(", "filename", ")", "[", "1", "]", "if", "extension", "!=", "PYTHON_EXTENSION", ":", "# log.debug(\"Skipping non-Python file: {}\", fullname)", "continue", "filenum", "+=", "1", "if", "process_only_filenum", "and", "filenum", "!=", "process_only_filenum", ":", "continue", "log", ".", "info", "(", "\"Processing file {}: {}\"", ",", "filenum", ",", "fullname", ")", "proc", "=", "PythonProcessor", "(", "full_path", "=", "fullname", ",", "top_dir", "=", "top_dir", ",", "correct_copyright_lines", "=", "correct_copyright_lines", ")", "if", "show_only", ":", "proc", ".", "show", "(", ")", "elif", "rewrite", ":", "proc", ".", "rewrite_file", "(", ")" ]
Walk a directory, finding Python files and rewriting them. Args: top_dirs: list of directories to descend into correct_copyright_lines: list of lines (without newlines) representing the copyright docstring block, including the transition lines of equals symbols show_only: show results (to stdout) only; don't rewrite rewrite: write the changes process_only_filenum: only process this file number (1-based index); for debugging only
[ "Walk", "a", "directory", "finding", "Python", "files", "and", "rewriting", "them", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/source_reformatting.py#L310-L352
RudolfCardinal/pythonlib
cardinal_pythonlib/source_reformatting.py
PythonProcessor._read_source
def _read_source(self) -> None: """ Reads the source file. """ with open(self.full_path, "rt") as f: for linenum, line_with_nl in enumerate(f.readlines(), start=1): line_without_newline = ( line_with_nl[:-1] if line_with_nl.endswith(NL) else line_with_nl ) if TAB in line_without_newline: self._warn("Tab character at line {}".format(linenum)) if CR in line_without_newline: self._warn("Carriage return character at line {} " "(Windows CR+LF endings?)".format(linenum)) self.source_lines.append(line_without_newline)
python
def _read_source(self) -> None: """ Reads the source file. """ with open(self.full_path, "rt") as f: for linenum, line_with_nl in enumerate(f.readlines(), start=1): line_without_newline = ( line_with_nl[:-1] if line_with_nl.endswith(NL) else line_with_nl ) if TAB in line_without_newline: self._warn("Tab character at line {}".format(linenum)) if CR in line_without_newline: self._warn("Carriage return character at line {} " "(Windows CR+LF endings?)".format(linenum)) self.source_lines.append(line_without_newline)
[ "def", "_read_source", "(", "self", ")", "->", "None", ":", "with", "open", "(", "self", ".", "full_path", ",", "\"rt\"", ")", "as", "f", ":", "for", "linenum", ",", "line_with_nl", "in", "enumerate", "(", "f", ".", "readlines", "(", ")", ",", "start", "=", "1", ")", ":", "line_without_newline", "=", "(", "line_with_nl", "[", ":", "-", "1", "]", "if", "line_with_nl", ".", "endswith", "(", "NL", ")", "else", "line_with_nl", ")", "if", "TAB", "in", "line_without_newline", ":", "self", ".", "_warn", "(", "\"Tab character at line {}\"", ".", "format", "(", "linenum", ")", ")", "if", "CR", "in", "line_without_newline", ":", "self", ".", "_warn", "(", "\"Carriage return character at line {} \"", "\"(Windows CR+LF endings?)\"", ".", "format", "(", "linenum", ")", ")", "self", ".", "source_lines", ".", "append", "(", "line_without_newline", ")" ]
Reads the source file.
[ "Reads", "the", "source", "file", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/source_reformatting.py#L95-L110
RudolfCardinal/pythonlib
cardinal_pythonlib/source_reformatting.py
PythonProcessor._create_dest
def _create_dest(self) -> None: """ Creates an internal representation of the destination file. This is where the thinking happens """ in_body = False in_docstring = False in_copyright = False copyright_done = False docstring_done = False swallow_blanks_and_filename_in_docstring = False for linenum, sl in enumerate(self.source_lines, start=1): dl = sl if dl.endswith(SPACE): self._debug("Line {} ends in whitespace".format(linenum)) dl = dl.rstrip() if not in_body: if linenum == 1: # Shebang if not dl.startswith(SHEBANG_START): self._warn("File does not start with shebang; " "first line was {!r}".format(dl)) self._too_risky() return if dl != CORRECT_SHEBANG: self._debug("Rewriting shebang; was {!r}".format(dl)) dl = CORRECT_SHEBANG if (linenum == 2 and dl.startswith(HASH_SPACE) and dl.endswith(PYTHON_EXTENSION)): self._debug( "Removing filename comment: {!r}".format(dl)) dl = None elif TRIPLE_DOUBLEQUOTE in dl: if (not dl.startswith(TRIPLE_DOUBLEQUOTE) and not dl.startswith(RAW_TRIPLE_DOUBLEQUOTE)): self._warn( "Triple-quote not at start of line, as follows") self._debug_line(linenum, dl) self._too_risky() return if in_docstring: # docstring finishing in_docstring = False docstring_done = True in_body = True # ... and keep dl, so we write the end of the # docstring, potentially with e.g. "# noqa" on the end elif not docstring_done: # docstring starting in_docstring = True # self._critical("adding our new docstring") # Write our new docstring's start tdq = "" # stops linter moaning if dl.startswith(TRIPLE_DOUBLEQUOTE): tdq = TRIPLE_DOUBLEQUOTE elif dl.startswith(RAW_TRIPLE_DOUBLEQUOTE): tdq = RAW_TRIPLE_DOUBLEQUOTE else: assert "Bug!" self.dest_lines.append(tdq) self.dest_lines.append(self.advertised_filename) self.dest_lines.append(BLANK) self.dest_lines.extend(self.correct_copyright_lines) self.dest_lines.append(BLANK) swallow_blanks_and_filename_in_docstring = True if dl == tdq: dl = None # don't write another triple-quote line else: dl = dl[len(tdq):] elif in_docstring: # Reading within the source docstring if dl == TRANSITION: if in_copyright: # copyright finishing in_copyright = False copyright_done = True dl = None # we've already replaced with our own elif not copyright_done: in_copyright = True dl = None # we've already replaced with our own elif in_copyright: dl = None # we've already replaced with our own elif dl == RST_COMMENT_LINE: dl = None # remove these elif swallow_blanks_and_filename_in_docstring: # self._debug_line(linenum, dl) if dl == BLANK or dl == self.advertised_filename: dl = None elif copyright_done: swallow_blanks_and_filename_in_docstring = False elif not dl.startswith(HASH) and not dl == BLANK: in_body = True if not docstring_done: # The source file didn't have a docstring! new_docstring_lines = [ BLANK, TRIPLE_DOUBLEQUOTE, self.advertised_filename, BLANK, ] + self.correct_copyright_lines + [ BLANK, MISSING_RST_TITLE, BLANK, TRIPLE_DOUBLEQUOTE ] self._warn("File had no docstring; adding one. " "Will need manual edit to add RST title. " "Search for {!r}".format(MISSING_RST_TITLE)) self.dest_lines[1:1] = new_docstring_lines if dl is not None: # self._debug_line(linenum, dl, "adding ") self.dest_lines.append(dl) self.needs_rewriting = self.dest_lines != self.source_lines
python
def _create_dest(self) -> None: """ Creates an internal representation of the destination file. This is where the thinking happens """ in_body = False in_docstring = False in_copyright = False copyright_done = False docstring_done = False swallow_blanks_and_filename_in_docstring = False for linenum, sl in enumerate(self.source_lines, start=1): dl = sl if dl.endswith(SPACE): self._debug("Line {} ends in whitespace".format(linenum)) dl = dl.rstrip() if not in_body: if linenum == 1: # Shebang if not dl.startswith(SHEBANG_START): self._warn("File does not start with shebang; " "first line was {!r}".format(dl)) self._too_risky() return if dl != CORRECT_SHEBANG: self._debug("Rewriting shebang; was {!r}".format(dl)) dl = CORRECT_SHEBANG if (linenum == 2 and dl.startswith(HASH_SPACE) and dl.endswith(PYTHON_EXTENSION)): self._debug( "Removing filename comment: {!r}".format(dl)) dl = None elif TRIPLE_DOUBLEQUOTE in dl: if (not dl.startswith(TRIPLE_DOUBLEQUOTE) and not dl.startswith(RAW_TRIPLE_DOUBLEQUOTE)): self._warn( "Triple-quote not at start of line, as follows") self._debug_line(linenum, dl) self._too_risky() return if in_docstring: # docstring finishing in_docstring = False docstring_done = True in_body = True # ... and keep dl, so we write the end of the # docstring, potentially with e.g. "# noqa" on the end elif not docstring_done: # docstring starting in_docstring = True # self._critical("adding our new docstring") # Write our new docstring's start tdq = "" # stops linter moaning if dl.startswith(TRIPLE_DOUBLEQUOTE): tdq = TRIPLE_DOUBLEQUOTE elif dl.startswith(RAW_TRIPLE_DOUBLEQUOTE): tdq = RAW_TRIPLE_DOUBLEQUOTE else: assert "Bug!" self.dest_lines.append(tdq) self.dest_lines.append(self.advertised_filename) self.dest_lines.append(BLANK) self.dest_lines.extend(self.correct_copyright_lines) self.dest_lines.append(BLANK) swallow_blanks_and_filename_in_docstring = True if dl == tdq: dl = None # don't write another triple-quote line else: dl = dl[len(tdq):] elif in_docstring: # Reading within the source docstring if dl == TRANSITION: if in_copyright: # copyright finishing in_copyright = False copyright_done = True dl = None # we've already replaced with our own elif not copyright_done: in_copyright = True dl = None # we've already replaced with our own elif in_copyright: dl = None # we've already replaced with our own elif dl == RST_COMMENT_LINE: dl = None # remove these elif swallow_blanks_and_filename_in_docstring: # self._debug_line(linenum, dl) if dl == BLANK or dl == self.advertised_filename: dl = None elif copyright_done: swallow_blanks_and_filename_in_docstring = False elif not dl.startswith(HASH) and not dl == BLANK: in_body = True if not docstring_done: # The source file didn't have a docstring! new_docstring_lines = [ BLANK, TRIPLE_DOUBLEQUOTE, self.advertised_filename, BLANK, ] + self.correct_copyright_lines + [ BLANK, MISSING_RST_TITLE, BLANK, TRIPLE_DOUBLEQUOTE ] self._warn("File had no docstring; adding one. " "Will need manual edit to add RST title. " "Search for {!r}".format(MISSING_RST_TITLE)) self.dest_lines[1:1] = new_docstring_lines if dl is not None: # self._debug_line(linenum, dl, "adding ") self.dest_lines.append(dl) self.needs_rewriting = self.dest_lines != self.source_lines
[ "def", "_create_dest", "(", "self", ")", "->", "None", ":", "in_body", "=", "False", "in_docstring", "=", "False", "in_copyright", "=", "False", "copyright_done", "=", "False", "docstring_done", "=", "False", "swallow_blanks_and_filename_in_docstring", "=", "False", "for", "linenum", ",", "sl", "in", "enumerate", "(", "self", ".", "source_lines", ",", "start", "=", "1", ")", ":", "dl", "=", "sl", "if", "dl", ".", "endswith", "(", "SPACE", ")", ":", "self", ".", "_debug", "(", "\"Line {} ends in whitespace\"", ".", "format", "(", "linenum", ")", ")", "dl", "=", "dl", ".", "rstrip", "(", ")", "if", "not", "in_body", ":", "if", "linenum", "==", "1", ":", "# Shebang", "if", "not", "dl", ".", "startswith", "(", "SHEBANG_START", ")", ":", "self", ".", "_warn", "(", "\"File does not start with shebang; \"", "\"first line was {!r}\"", ".", "format", "(", "dl", ")", ")", "self", ".", "_too_risky", "(", ")", "return", "if", "dl", "!=", "CORRECT_SHEBANG", ":", "self", ".", "_debug", "(", "\"Rewriting shebang; was {!r}\"", ".", "format", "(", "dl", ")", ")", "dl", "=", "CORRECT_SHEBANG", "if", "(", "linenum", "==", "2", "and", "dl", ".", "startswith", "(", "HASH_SPACE", ")", "and", "dl", ".", "endswith", "(", "PYTHON_EXTENSION", ")", ")", ":", "self", ".", "_debug", "(", "\"Removing filename comment: {!r}\"", ".", "format", "(", "dl", ")", ")", "dl", "=", "None", "elif", "TRIPLE_DOUBLEQUOTE", "in", "dl", ":", "if", "(", "not", "dl", ".", "startswith", "(", "TRIPLE_DOUBLEQUOTE", ")", "and", "not", "dl", ".", "startswith", "(", "RAW_TRIPLE_DOUBLEQUOTE", ")", ")", ":", "self", ".", "_warn", "(", "\"Triple-quote not at start of line, as follows\"", ")", "self", ".", "_debug_line", "(", "linenum", ",", "dl", ")", "self", ".", "_too_risky", "(", ")", "return", "if", "in_docstring", ":", "# docstring finishing", "in_docstring", "=", "False", "docstring_done", "=", "True", "in_body", "=", "True", "# ... and keep dl, so we write the end of the", "# docstring, potentially with e.g. \"# noqa\" on the end", "elif", "not", "docstring_done", ":", "# docstring starting", "in_docstring", "=", "True", "# self._critical(\"adding our new docstring\")", "# Write our new docstring's start", "tdq", "=", "\"\"", "# stops linter moaning", "if", "dl", ".", "startswith", "(", "TRIPLE_DOUBLEQUOTE", ")", ":", "tdq", "=", "TRIPLE_DOUBLEQUOTE", "elif", "dl", ".", "startswith", "(", "RAW_TRIPLE_DOUBLEQUOTE", ")", ":", "tdq", "=", "RAW_TRIPLE_DOUBLEQUOTE", "else", ":", "assert", "\"Bug!\"", "self", ".", "dest_lines", ".", "append", "(", "tdq", ")", "self", ".", "dest_lines", ".", "append", "(", "self", ".", "advertised_filename", ")", "self", ".", "dest_lines", ".", "append", "(", "BLANK", ")", "self", ".", "dest_lines", ".", "extend", "(", "self", ".", "correct_copyright_lines", ")", "self", ".", "dest_lines", ".", "append", "(", "BLANK", ")", "swallow_blanks_and_filename_in_docstring", "=", "True", "if", "dl", "==", "tdq", ":", "dl", "=", "None", "# don't write another triple-quote line", "else", ":", "dl", "=", "dl", "[", "len", "(", "tdq", ")", ":", "]", "elif", "in_docstring", ":", "# Reading within the source docstring", "if", "dl", "==", "TRANSITION", ":", "if", "in_copyright", ":", "# copyright finishing", "in_copyright", "=", "False", "copyright_done", "=", "True", "dl", "=", "None", "# we've already replaced with our own", "elif", "not", "copyright_done", ":", "in_copyright", "=", "True", "dl", "=", "None", "# we've already replaced with our own", "elif", "in_copyright", ":", "dl", "=", "None", "# we've already replaced with our own", "elif", "dl", "==", "RST_COMMENT_LINE", ":", "dl", "=", "None", "# remove these", "elif", "swallow_blanks_and_filename_in_docstring", ":", "# self._debug_line(linenum, dl)", "if", "dl", "==", "BLANK", "or", "dl", "==", "self", ".", "advertised_filename", ":", "dl", "=", "None", "elif", "copyright_done", ":", "swallow_blanks_and_filename_in_docstring", "=", "False", "elif", "not", "dl", ".", "startswith", "(", "HASH", ")", "and", "not", "dl", "==", "BLANK", ":", "in_body", "=", "True", "if", "not", "docstring_done", ":", "# The source file didn't have a docstring!", "new_docstring_lines", "=", "[", "BLANK", ",", "TRIPLE_DOUBLEQUOTE", ",", "self", ".", "advertised_filename", ",", "BLANK", ",", "]", "+", "self", ".", "correct_copyright_lines", "+", "[", "BLANK", ",", "MISSING_RST_TITLE", ",", "BLANK", ",", "TRIPLE_DOUBLEQUOTE", "]", "self", ".", "_warn", "(", "\"File had no docstring; adding one. \"", "\"Will need manual edit to add RST title. \"", "\"Search for {!r}\"", ".", "format", "(", "MISSING_RST_TITLE", ")", ")", "self", ".", "dest_lines", "[", "1", ":", "1", "]", "=", "new_docstring_lines", "if", "dl", "is", "not", "None", ":", "# self._debug_line(linenum, dl, \"adding \")", "self", ".", "dest_lines", ".", "append", "(", "dl", ")", "self", ".", "needs_rewriting", "=", "self", ".", "dest_lines", "!=", "self", ".", "source_lines" ]
Creates an internal representation of the destination file. This is where the thinking happens
[ "Creates", "an", "internal", "representation", "of", "the", "destination", "file", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/source_reformatting.py#L112-L236
RudolfCardinal/pythonlib
cardinal_pythonlib/source_reformatting.py
PythonProcessor._debug_line
def _debug_line(linenum: int, line: str, extramsg: str = "") -> None: """ Writes a debugging report on a line. """ log.critical("{}Line {}: {!r}", extramsg, linenum, line)
python
def _debug_line(linenum: int, line: str, extramsg: str = "") -> None: """ Writes a debugging report on a line. """ log.critical("{}Line {}: {!r}", extramsg, linenum, line)
[ "def", "_debug_line", "(", "linenum", ":", "int", ",", "line", ":", "str", ",", "extramsg", ":", "str", "=", "\"\"", ")", "->", "None", ":", "log", ".", "critical", "(", "\"{}Line {}: {!r}\"", ",", "extramsg", ",", "linenum", ",", "line", ")" ]
Writes a debugging report on a line.
[ "Writes", "a", "debugging", "report", "on", "a", "line", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/source_reformatting.py#L239-L243
RudolfCardinal/pythonlib
cardinal_pythonlib/source_reformatting.py
PythonProcessor.rewrite_file
def rewrite_file(self) -> None: """ Rewrites the source file. """ if not self.needs_rewriting: return self._info("Rewriting file") with open(self.full_path, "w") as outfile: self._write(outfile)
python
def rewrite_file(self) -> None: """ Rewrites the source file. """ if not self.needs_rewriting: return self._info("Rewriting file") with open(self.full_path, "w") as outfile: self._write(outfile)
[ "def", "rewrite_file", "(", "self", ")", "->", "None", ":", "if", "not", "self", ".", "needs_rewriting", ":", "return", "self", ".", "_info", "(", "\"Rewriting file\"", ")", "with", "open", "(", "self", ".", "full_path", ",", "\"w\"", ")", "as", "outfile", ":", "self", ".", "_write", "(", "outfile", ")" ]
Rewrites the source file.
[ "Rewrites", "the", "source", "file", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/source_reformatting.py#L288-L296
RudolfCardinal/pythonlib
cardinal_pythonlib/source_reformatting.py
PythonProcessor._write
def _write(self, destination: TextIO) -> None: """ Writes the converted output to a destination. """ for line in self.dest_lines: destination.write(line + NL)
python
def _write(self, destination: TextIO) -> None: """ Writes the converted output to a destination. """ for line in self.dest_lines: destination.write(line + NL)
[ "def", "_write", "(", "self", ",", "destination", ":", "TextIO", ")", "->", "None", ":", "for", "line", "in", "self", ".", "dest_lines", ":", "destination", ".", "write", "(", "line", "+", "NL", ")" ]
Writes the converted output to a destination.
[ "Writes", "the", "converted", "output", "to", "a", "destination", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/source_reformatting.py#L298-L303
RudolfCardinal/pythonlib
cardinal_pythonlib/lists.py
contains_duplicates
def contains_duplicates(values: Iterable[Any]) -> bool: """ Does the iterable contain any duplicate values? """ for v in Counter(values).values(): if v > 1: return True return False
python
def contains_duplicates(values: Iterable[Any]) -> bool: """ Does the iterable contain any duplicate values? """ for v in Counter(values).values(): if v > 1: return True return False
[ "def", "contains_duplicates", "(", "values", ":", "Iterable", "[", "Any", "]", ")", "->", "bool", ":", "for", "v", "in", "Counter", "(", "values", ")", ".", "values", "(", ")", ":", "if", "v", ">", "1", ":", "return", "True", "return", "False" ]
Does the iterable contain any duplicate values?
[ "Does", "the", "iterable", "contain", "any", "duplicate", "values?" ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/lists.py#L38-L45
RudolfCardinal/pythonlib
cardinal_pythonlib/lists.py
index_list_for_sort_order
def index_list_for_sort_order(x: List[Any], key: Callable[[Any], Any] = None, reverse: bool = False) -> List[int]: """ Returns a list of indexes of ``x``, IF ``x`` WERE TO BE SORTED. Args: x: data key: function to be applied to the data to generate a sort key; this function is passed as the ``key=`` parameter to :func:`sorted`; the default is ``itemgetter(1)`` reverse: reverse the sort order? Returns: list of integer index values Example: .. code-block:: python z = ["a", "c", "b"] index_list_for_sort_order(z) # [0, 2, 1] index_list_for_sort_order(z, reverse=True) # [1, 2, 0] q = [("a", 9), ("b", 8), ("c", 7)] index_list_for_sort_order(q, key=itemgetter(1)) """ def key_with_user_func(idx_val: Tuple[int, Any]): return key(idx_val[1]) if key: sort_key = key_with_user_func # see the simpler version below else: sort_key = itemgetter(1) # enumerate, below, will return tuples of (index, value), so # itemgetter(1) means sort by the value index_value_list = sorted(enumerate(x), key=sort_key, reverse=reverse) return [i for i, _ in index_value_list]
python
def index_list_for_sort_order(x: List[Any], key: Callable[[Any], Any] = None, reverse: bool = False) -> List[int]: """ Returns a list of indexes of ``x``, IF ``x`` WERE TO BE SORTED. Args: x: data key: function to be applied to the data to generate a sort key; this function is passed as the ``key=`` parameter to :func:`sorted`; the default is ``itemgetter(1)`` reverse: reverse the sort order? Returns: list of integer index values Example: .. code-block:: python z = ["a", "c", "b"] index_list_for_sort_order(z) # [0, 2, 1] index_list_for_sort_order(z, reverse=True) # [1, 2, 0] q = [("a", 9), ("b", 8), ("c", 7)] index_list_for_sort_order(q, key=itemgetter(1)) """ def key_with_user_func(idx_val: Tuple[int, Any]): return key(idx_val[1]) if key: sort_key = key_with_user_func # see the simpler version below else: sort_key = itemgetter(1) # enumerate, below, will return tuples of (index, value), so # itemgetter(1) means sort by the value index_value_list = sorted(enumerate(x), key=sort_key, reverse=reverse) return [i for i, _ in index_value_list]
[ "def", "index_list_for_sort_order", "(", "x", ":", "List", "[", "Any", "]", ",", "key", ":", "Callable", "[", "[", "Any", "]", ",", "Any", "]", "=", "None", ",", "reverse", ":", "bool", "=", "False", ")", "->", "List", "[", "int", "]", ":", "def", "key_with_user_func", "(", "idx_val", ":", "Tuple", "[", "int", ",", "Any", "]", ")", ":", "return", "key", "(", "idx_val", "[", "1", "]", ")", "if", "key", ":", "sort_key", "=", "key_with_user_func", "# see the simpler version below", "else", ":", "sort_key", "=", "itemgetter", "(", "1", ")", "# enumerate, below, will return tuples of (index, value), so", "# itemgetter(1) means sort by the value", "index_value_list", "=", "sorted", "(", "enumerate", "(", "x", ")", ",", "key", "=", "sort_key", ",", "reverse", "=", "reverse", ")", "return", "[", "i", "for", "i", ",", "_", "in", "index_value_list", "]" ]
Returns a list of indexes of ``x``, IF ``x`` WERE TO BE SORTED. Args: x: data key: function to be applied to the data to generate a sort key; this function is passed as the ``key=`` parameter to :func:`sorted`; the default is ``itemgetter(1)`` reverse: reverse the sort order? Returns: list of integer index values Example: .. code-block:: python z = ["a", "c", "b"] index_list_for_sort_order(z) # [0, 2, 1] index_list_for_sort_order(z, reverse=True) # [1, 2, 0] q = [("a", 9), ("b", 8), ("c", 7)] index_list_for_sort_order(q, key=itemgetter(1))
[ "Returns", "a", "list", "of", "indexes", "of", "x", "IF", "x", "WERE", "TO", "BE", "SORTED", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/lists.py#L48-L84
RudolfCardinal/pythonlib
cardinal_pythonlib/lists.py
sort_list_by_index_list
def sort_list_by_index_list(x: List[Any], indexes: List[int]) -> None: """ Re-orders ``x`` by the list of ``indexes`` of ``x``, in place. Example: .. code-block:: python from cardinal_pythonlib.lists import sort_list_by_index_list z = ["a", "b", "c", "d", "e"] sort_list_by_index_list(z, [4, 0, 1, 2, 3]) z # ["e", "a", "b", "c", "d"] """ x[:] = [x[i] for i in indexes]
python
def sort_list_by_index_list(x: List[Any], indexes: List[int]) -> None: """ Re-orders ``x`` by the list of ``indexes`` of ``x``, in place. Example: .. code-block:: python from cardinal_pythonlib.lists import sort_list_by_index_list z = ["a", "b", "c", "d", "e"] sort_list_by_index_list(z, [4, 0, 1, 2, 3]) z # ["e", "a", "b", "c", "d"] """ x[:] = [x[i] for i in indexes]
[ "def", "sort_list_by_index_list", "(", "x", ":", "List", "[", "Any", "]", ",", "indexes", ":", "List", "[", "int", "]", ")", "->", "None", ":", "x", "[", ":", "]", "=", "[", "x", "[", "i", "]", "for", "i", "in", "indexes", "]" ]
Re-orders ``x`` by the list of ``indexes`` of ``x``, in place. Example: .. code-block:: python from cardinal_pythonlib.lists import sort_list_by_index_list z = ["a", "b", "c", "d", "e"] sort_list_by_index_list(z, [4, 0, 1, 2, 3]) z # ["e", "a", "b", "c", "d"]
[ "Re", "-", "orders", "x", "by", "the", "list", "of", "indexes", "of", "x", "in", "place", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/lists.py#L87-L101
RudolfCardinal/pythonlib
cardinal_pythonlib/lists.py
flatten_list
def flatten_list(x: List[Any]) -> List[Any]: """ Converts a list of lists into a flat list. Args: x: list of lists Returns: flat list As per http://stackoverflow.com/questions/952914/making-a-flat-list-out-of-list-of-lists-in-python """ # noqa return [item for sublist in x for item in sublist]
python
def flatten_list(x: List[Any]) -> List[Any]: """ Converts a list of lists into a flat list. Args: x: list of lists Returns: flat list As per http://stackoverflow.com/questions/952914/making-a-flat-list-out-of-list-of-lists-in-python """ # noqa return [item for sublist in x for item in sublist]
[ "def", "flatten_list", "(", "x", ":", "List", "[", "Any", "]", ")", "->", "List", "[", "Any", "]", ":", "# noqa", "return", "[", "item", "for", "sublist", "in", "x", "for", "item", "in", "sublist", "]" ]
Converts a list of lists into a flat list. Args: x: list of lists Returns: flat list As per http://stackoverflow.com/questions/952914/making-a-flat-list-out-of-list-of-lists-in-python
[ "Converts", "a", "list", "of", "lists", "into", "a", "flat", "list", ".", "Args", ":", "x", ":", "list", "of", "lists" ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/lists.py#L104-L118
RudolfCardinal/pythonlib
cardinal_pythonlib/lists.py
unique_list
def unique_list(seq: Iterable[Any]) -> List[Any]: """ Returns a list of all the unique elements in the input list. Args: seq: input list Returns: list of unique elements As per http://stackoverflow.com/questions/480214/how-do-you-remove-duplicates-from-a-list-in-whilst-preserving-order """ # noqa seen = set() seen_add = seen.add return [x for x in seq if not (x in seen or seen_add(x))]
python
def unique_list(seq: Iterable[Any]) -> List[Any]: """ Returns a list of all the unique elements in the input list. Args: seq: input list Returns: list of unique elements As per http://stackoverflow.com/questions/480214/how-do-you-remove-duplicates-from-a-list-in-whilst-preserving-order """ # noqa seen = set() seen_add = seen.add return [x for x in seq if not (x in seen or seen_add(x))]
[ "def", "unique_list", "(", "seq", ":", "Iterable", "[", "Any", "]", ")", "->", "List", "[", "Any", "]", ":", "# noqa", "seen", "=", "set", "(", ")", "seen_add", "=", "seen", ".", "add", "return", "[", "x", "for", "x", "in", "seq", "if", "not", "(", "x", "in", "seen", "or", "seen_add", "(", "x", ")", ")", "]" ]
Returns a list of all the unique elements in the input list. Args: seq: input list Returns: list of unique elements As per http://stackoverflow.com/questions/480214/how-do-you-remove-duplicates-from-a-list-in-whilst-preserving-order
[ "Returns", "a", "list", "of", "all", "the", "unique", "elements", "in", "the", "input", "list", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/lists.py#L121-L137
RudolfCardinal/pythonlib
cardinal_pythonlib/lists.py
chunks
def chunks(l: List[Any], n: int) -> Iterable[List[Any]]: """ Yield successive ``n``-sized chunks from ``l``. Args: l: input list n: chunk size Yields: successive chunks of size ``n`` """ for i in range(0, len(l), n): yield l[i:i + n]
python
def chunks(l: List[Any], n: int) -> Iterable[List[Any]]: """ Yield successive ``n``-sized chunks from ``l``. Args: l: input list n: chunk size Yields: successive chunks of size ``n`` """ for i in range(0, len(l), n): yield l[i:i + n]
[ "def", "chunks", "(", "l", ":", "List", "[", "Any", "]", ",", "n", ":", "int", ")", "->", "Iterable", "[", "List", "[", "Any", "]", "]", ":", "for", "i", "in", "range", "(", "0", ",", "len", "(", "l", ")", ",", "n", ")", ":", "yield", "l", "[", "i", ":", "i", "+", "n", "]" ]
Yield successive ``n``-sized chunks from ``l``. Args: l: input list n: chunk size Yields: successive chunks of size ``n``
[ "Yield", "successive", "n", "-", "sized", "chunks", "from", "l", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/lists.py#L140-L153
RudolfCardinal/pythonlib
cardinal_pythonlib/text.py
escape_newlines
def escape_newlines(s: str) -> str: """ Escapes CR, LF, and backslashes. Its counterpart is :func:`unescape_newlines`. ``s.encode("string_escape")`` and ``s.encode("unicode_escape")`` are alternatives, but they mess around with quotes, too (specifically, backslash-escaping single quotes). """ if not s: return s s = s.replace("\\", r"\\") # replace \ with \\ s = s.replace("\n", r"\n") # escape \n; note ord("\n") == 10 s = s.replace("\r", r"\r") # escape \r; note ord("\r") == 13 return s
python
def escape_newlines(s: str) -> str: """ Escapes CR, LF, and backslashes. Its counterpart is :func:`unescape_newlines`. ``s.encode("string_escape")`` and ``s.encode("unicode_escape")`` are alternatives, but they mess around with quotes, too (specifically, backslash-escaping single quotes). """ if not s: return s s = s.replace("\\", r"\\") # replace \ with \\ s = s.replace("\n", r"\n") # escape \n; note ord("\n") == 10 s = s.replace("\r", r"\r") # escape \r; note ord("\r") == 13 return s
[ "def", "escape_newlines", "(", "s", ":", "str", ")", "->", "str", ":", "if", "not", "s", ":", "return", "s", "s", "=", "s", ".", "replace", "(", "\"\\\\\"", ",", "r\"\\\\\"", ")", "# replace \\ with \\\\", "s", "=", "s", ".", "replace", "(", "\"\\n\"", ",", "r\"\\n\"", ")", "# escape \\n; note ord(\"\\n\") == 10", "s", "=", "s", ".", "replace", "(", "\"\\r\"", ",", "r\"\\r\"", ")", "# escape \\r; note ord(\"\\r\") == 13", "return", "s" ]
Escapes CR, LF, and backslashes. Its counterpart is :func:`unescape_newlines`. ``s.encode("string_escape")`` and ``s.encode("unicode_escape")`` are alternatives, but they mess around with quotes, too (specifically, backslash-escaping single quotes).
[ "Escapes", "CR", "LF", "and", "backslashes", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/text.py#L40-L55
RudolfCardinal/pythonlib
cardinal_pythonlib/text.py
unescape_newlines
def unescape_newlines(s: str) -> str: """ Reverses :func:`escape_newlines`. """ # See also http://stackoverflow.com/questions/4020539 if not s: return s d = "" # the destination string in_escape = False for i in range(len(s)): c = s[i] # the character being processed if in_escape: if c == "r": d += "\r" elif c == "n": d += "\n" else: d += c in_escape = False else: if c == "\\": in_escape = True else: d += c return d
python
def unescape_newlines(s: str) -> str: """ Reverses :func:`escape_newlines`. """ # See also http://stackoverflow.com/questions/4020539 if not s: return s d = "" # the destination string in_escape = False for i in range(len(s)): c = s[i] # the character being processed if in_escape: if c == "r": d += "\r" elif c == "n": d += "\n" else: d += c in_escape = False else: if c == "\\": in_escape = True else: d += c return d
[ "def", "unescape_newlines", "(", "s", ":", "str", ")", "->", "str", ":", "# See also http://stackoverflow.com/questions/4020539", "if", "not", "s", ":", "return", "s", "d", "=", "\"\"", "# the destination string", "in_escape", "=", "False", "for", "i", "in", "range", "(", "len", "(", "s", ")", ")", ":", "c", "=", "s", "[", "i", "]", "# the character being processed", "if", "in_escape", ":", "if", "c", "==", "\"r\"", ":", "d", "+=", "\"\\r\"", "elif", "c", "==", "\"n\"", ":", "d", "+=", "\"\\n\"", "else", ":", "d", "+=", "c", "in_escape", "=", "False", "else", ":", "if", "c", "==", "\"\\\\\"", ":", "in_escape", "=", "True", "else", ":", "d", "+=", "c", "return", "d" ]
Reverses :func:`escape_newlines`.
[ "Reverses", ":", "func", ":", "escape_newlines", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/text.py#L58-L82
RudolfCardinal/pythonlib
cardinal_pythonlib/text.py
escape_tabs_newlines
def escape_tabs_newlines(s: str) -> str: """ Escapes CR, LF, tab, and backslashes. Its counterpart is :func:`unescape_tabs_newlines`. """ if not s: return s s = s.replace("\\", r"\\") # replace \ with \\ s = s.replace("\n", r"\n") # escape \n; note ord("\n") == 10 s = s.replace("\r", r"\r") # escape \r; note ord("\r") == 13 s = s.replace("\t", r"\t") # escape \t; note ord("\t") == 9 return s
python
def escape_tabs_newlines(s: str) -> str: """ Escapes CR, LF, tab, and backslashes. Its counterpart is :func:`unescape_tabs_newlines`. """ if not s: return s s = s.replace("\\", r"\\") # replace \ with \\ s = s.replace("\n", r"\n") # escape \n; note ord("\n") == 10 s = s.replace("\r", r"\r") # escape \r; note ord("\r") == 13 s = s.replace("\t", r"\t") # escape \t; note ord("\t") == 9 return s
[ "def", "escape_tabs_newlines", "(", "s", ":", "str", ")", "->", "str", ":", "if", "not", "s", ":", "return", "s", "s", "=", "s", ".", "replace", "(", "\"\\\\\"", ",", "r\"\\\\\"", ")", "# replace \\ with \\\\", "s", "=", "s", ".", "replace", "(", "\"\\n\"", ",", "r\"\\n\"", ")", "# escape \\n; note ord(\"\\n\") == 10", "s", "=", "s", ".", "replace", "(", "\"\\r\"", ",", "r\"\\r\"", ")", "# escape \\r; note ord(\"\\r\") == 13", "s", "=", "s", ".", "replace", "(", "\"\\t\"", ",", "r\"\\t\"", ")", "# escape \\t; note ord(\"\\t\") == 9", "return", "s" ]
Escapes CR, LF, tab, and backslashes. Its counterpart is :func:`unescape_tabs_newlines`.
[ "Escapes", "CR", "LF", "tab", "and", "backslashes", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/text.py#L85-L97
RudolfCardinal/pythonlib
cardinal_pythonlib/text.py
_unicode_def_src_to_str
def _unicode_def_src_to_str(srclist: List[Union[str, int]]) -> str: """ Used to create :data:`UNICODE_CATEGORY_STRINGS`. Args: srclist: list of integers or hex range strings like ``"0061-007A"`` Returns: a string with all characters described by ``srclist``: either the character corresponding to the integer Unicode character number, or all characters corresponding to the inclusive range described """ charlist = [] # type: List[str] for src in srclist: if isinstance(src, int): charlist.append(chr(src)) else: # Range like "0041-005A" first, last = [int(x, 16) for x in src.split("-")] charlist += [chr(x) for x in range(first, last + 1)] return "".join(charlist)
python
def _unicode_def_src_to_str(srclist: List[Union[str, int]]) -> str: """ Used to create :data:`UNICODE_CATEGORY_STRINGS`. Args: srclist: list of integers or hex range strings like ``"0061-007A"`` Returns: a string with all characters described by ``srclist``: either the character corresponding to the integer Unicode character number, or all characters corresponding to the inclusive range described """ charlist = [] # type: List[str] for src in srclist: if isinstance(src, int): charlist.append(chr(src)) else: # Range like "0041-005A" first, last = [int(x, 16) for x in src.split("-")] charlist += [chr(x) for x in range(first, last + 1)] return "".join(charlist)
[ "def", "_unicode_def_src_to_str", "(", "srclist", ":", "List", "[", "Union", "[", "str", ",", "int", "]", "]", ")", "->", "str", ":", "charlist", "=", "[", "]", "# type: List[str]", "for", "src", "in", "srclist", ":", "if", "isinstance", "(", "src", ",", "int", ")", ":", "charlist", ".", "append", "(", "chr", "(", "src", ")", ")", "else", ":", "# Range like \"0041-005A\"", "first", ",", "last", "=", "[", "int", "(", "x", ",", "16", ")", "for", "x", "in", "src", ".", "split", "(", "\"-\"", ")", "]", "charlist", "+=", "[", "chr", "(", "x", ")", "for", "x", "in", "range", "(", "first", ",", "last", "+", "1", ")", "]", "return", "\"\"", ".", "join", "(", "charlist", ")" ]
Used to create :data:`UNICODE_CATEGORY_STRINGS`. Args: srclist: list of integers or hex range strings like ``"0061-007A"`` Returns: a string with all characters described by ``srclist``: either the character corresponding to the integer Unicode character number, or all characters corresponding to the inclusive range described
[ "Used", "to", "create", ":", "data", ":", "UNICODE_CATEGORY_STRINGS", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/text.py#L134-L154
davenquinn/Attitude
attitude/__dustbin/__report/__init__.py
report
def report(*arrays, **kwargs): """ Outputs a standalone HTML 'report card' for a measurement (or several grouped measurements), including relevant statistical information. """ name = kwargs.pop("name",None) grouped = len(arrays) > 1 if grouped: arr = N.concatenate(arrays) components = [PCAOrientation(a) for a in arrays] else: arr = arrays[0] components = [] #r = LinearOrientation(arr) pca = PCAOrientation(arr) distances = list(distance_from_group(components,pca)) kwargs = dict( levels=[1,2,3], alpha=[0.8,0.5,0.2], linewidth=2) #ellipse=error_ellipse(pca) kwargs = dict(n=500,levels=[1,2], ellipse=True) stereonet_data = dict( main=pca.error_coords(**kwargs), components=[i.error_coords(**kwargs) for i in components]) t = env.get_template("report.html") return t.render( name=name, pca=pca, stereonet_data=stereonet_data, angular_errors=tuple(N.degrees(i) for i in pca.angular_errors()[::-1]), aligned=plot_aligned(pca), distances=distances)
python
def report(*arrays, **kwargs): """ Outputs a standalone HTML 'report card' for a measurement (or several grouped measurements), including relevant statistical information. """ name = kwargs.pop("name",None) grouped = len(arrays) > 1 if grouped: arr = N.concatenate(arrays) components = [PCAOrientation(a) for a in arrays] else: arr = arrays[0] components = [] #r = LinearOrientation(arr) pca = PCAOrientation(arr) distances = list(distance_from_group(components,pca)) kwargs = dict( levels=[1,2,3], alpha=[0.8,0.5,0.2], linewidth=2) #ellipse=error_ellipse(pca) kwargs = dict(n=500,levels=[1,2], ellipse=True) stereonet_data = dict( main=pca.error_coords(**kwargs), components=[i.error_coords(**kwargs) for i in components]) t = env.get_template("report.html") return t.render( name=name, pca=pca, stereonet_data=stereonet_data, angular_errors=tuple(N.degrees(i) for i in pca.angular_errors()[::-1]), aligned=plot_aligned(pca), distances=distances)
[ "def", "report", "(", "*", "arrays", ",", "*", "*", "kwargs", ")", ":", "name", "=", "kwargs", ".", "pop", "(", "\"name\"", ",", "None", ")", "grouped", "=", "len", "(", "arrays", ")", ">", "1", "if", "grouped", ":", "arr", "=", "N", ".", "concatenate", "(", "arrays", ")", "components", "=", "[", "PCAOrientation", "(", "a", ")", "for", "a", "in", "arrays", "]", "else", ":", "arr", "=", "arrays", "[", "0", "]", "components", "=", "[", "]", "#r = LinearOrientation(arr)", "pca", "=", "PCAOrientation", "(", "arr", ")", "distances", "=", "list", "(", "distance_from_group", "(", "components", ",", "pca", ")", ")", "kwargs", "=", "dict", "(", "levels", "=", "[", "1", ",", "2", ",", "3", "]", ",", "alpha", "=", "[", "0.8", ",", "0.5", ",", "0.2", "]", ",", "linewidth", "=", "2", ")", "#ellipse=error_ellipse(pca)", "kwargs", "=", "dict", "(", "n", "=", "500", ",", "levels", "=", "[", "1", ",", "2", "]", ",", "ellipse", "=", "True", ")", "stereonet_data", "=", "dict", "(", "main", "=", "pca", ".", "error_coords", "(", "*", "*", "kwargs", ")", ",", "components", "=", "[", "i", ".", "error_coords", "(", "*", "*", "kwargs", ")", "for", "i", "in", "components", "]", ")", "t", "=", "env", ".", "get_template", "(", "\"report.html\"", ")", "return", "t", ".", "render", "(", "name", "=", "name", ",", "pca", "=", "pca", ",", "stereonet_data", "=", "stereonet_data", ",", "angular_errors", "=", "tuple", "(", "N", ".", "degrees", "(", "i", ")", "for", "i", "in", "pca", ".", "angular_errors", "(", ")", "[", ":", ":", "-", "1", "]", ")", ",", "aligned", "=", "plot_aligned", "(", "pca", ")", ",", "distances", "=", "distances", ")" ]
Outputs a standalone HTML 'report card' for a measurement (or several grouped measurements), including relevant statistical information.
[ "Outputs", "a", "standalone", "HTML", "report", "card", "for", "a", "measurement", "(", "or", "several", "grouped", "measurements", ")", "including", "relevant", "statistical", "information", "." ]
train
https://github.com/davenquinn/Attitude/blob/2ce97b9aba0aa5deedc6617c2315e07e6396d240/attitude/__dustbin/__report/__init__.py#L34-L78
meyersj/geotweet
geotweet/twitter/stream_steps.py
GeoFilterStep.validate_geotweet
def validate_geotweet(self, record): """ check that stream record is actual tweet with coordinates """ if record and self._validate('user', record) \ and self._validate('coordinates', record): return True return False
python
def validate_geotweet(self, record): """ check that stream record is actual tweet with coordinates """ if record and self._validate('user', record) \ and self._validate('coordinates', record): return True return False
[ "def", "validate_geotweet", "(", "self", ",", "record", ")", ":", "if", "record", "and", "self", ".", "_validate", "(", "'user'", ",", "record", ")", "and", "self", ".", "_validate", "(", "'coordinates'", ",", "record", ")", ":", "return", "True", "return", "False" ]
check that stream record is actual tweet with coordinates
[ "check", "that", "stream", "record", "is", "actual", "tweet", "with", "coordinates" ]
train
https://github.com/meyersj/geotweet/blob/1a6b55f98adf34d1b91f172d9187d599616412d9/geotweet/twitter/stream_steps.py#L53-L58
davenquinn/Attitude
attitude/geom/conics.py
angle_subtended
def angle_subtended(ell, **kwargs): """ Compute the half angle subtended (or min and max angles) for an offset elliptical conic from the origin or an arbitrary viewpoint. kwargs: tangent Return tangent instead of angle (default false) viewpoint Defaults to origin """ return_tangent = kwargs.pop('tangent',False) con, transform, offset = ell.projection(**kwargs) v = N.linalg.norm(N.array(con.major_axes()),axis=1) A = N.sort(v)[::-1] # Sort highest values first A = N.squeeze(A) B = N.linalg.norm(offset) if return_tangent: return A/B return N.arctan2(A,B)
python
def angle_subtended(ell, **kwargs): """ Compute the half angle subtended (or min and max angles) for an offset elliptical conic from the origin or an arbitrary viewpoint. kwargs: tangent Return tangent instead of angle (default false) viewpoint Defaults to origin """ return_tangent = kwargs.pop('tangent',False) con, transform, offset = ell.projection(**kwargs) v = N.linalg.norm(N.array(con.major_axes()),axis=1) A = N.sort(v)[::-1] # Sort highest values first A = N.squeeze(A) B = N.linalg.norm(offset) if return_tangent: return A/B return N.arctan2(A,B)
[ "def", "angle_subtended", "(", "ell", ",", "*", "*", "kwargs", ")", ":", "return_tangent", "=", "kwargs", ".", "pop", "(", "'tangent'", ",", "False", ")", "con", ",", "transform", ",", "offset", "=", "ell", ".", "projection", "(", "*", "*", "kwargs", ")", "v", "=", "N", ".", "linalg", ".", "norm", "(", "N", ".", "array", "(", "con", ".", "major_axes", "(", ")", ")", ",", "axis", "=", "1", ")", "A", "=", "N", ".", "sort", "(", "v", ")", "[", ":", ":", "-", "1", "]", "# Sort highest values first", "A", "=", "N", ".", "squeeze", "(", "A", ")", "B", "=", "N", ".", "linalg", ".", "norm", "(", "offset", ")", "if", "return_tangent", ":", "return", "A", "/", "B", "return", "N", ".", "arctan2", "(", "A", ",", "B", ")" ]
Compute the half angle subtended (or min and max angles) for an offset elliptical conic from the origin or an arbitrary viewpoint. kwargs: tangent Return tangent instead of angle (default false) viewpoint Defaults to origin
[ "Compute", "the", "half", "angle", "subtended", "(", "or", "min", "and", "max", "angles", ")", "for", "an", "offset", "elliptical", "conic", "from", "the", "origin", "or", "an", "arbitrary", "viewpoint", "." ]
train
https://github.com/davenquinn/Attitude/blob/2ce97b9aba0aa5deedc6617c2315e07e6396d240/attitude/geom/conics.py#L8-L26
davenquinn/Attitude
attitude/geom/conics.py
Conic.from_axes
def from_axes(cls,axes): """ Get axis-aligned elliptical conic from axis lenths This can be converted into a hyperbola by getting the dual conic """ ax = list(axes) #ax[-1] *= -1 # Not sure what is going on here... arr = N.diag(ax + [-1]) return arr.view(cls)
python
def from_axes(cls,axes): """ Get axis-aligned elliptical conic from axis lenths This can be converted into a hyperbola by getting the dual conic """ ax = list(axes) #ax[-1] *= -1 # Not sure what is going on here... arr = N.diag(ax + [-1]) return arr.view(cls)
[ "def", "from_axes", "(", "cls", ",", "axes", ")", ":", "ax", "=", "list", "(", "axes", ")", "#ax[-1] *= -1 # Not sure what is going on here...", "arr", "=", "N", ".", "diag", "(", "ax", "+", "[", "-", "1", "]", ")", "return", "arr", ".", "view", "(", "cls", ")" ]
Get axis-aligned elliptical conic from axis lenths This can be converted into a hyperbola by getting the dual conic
[ "Get", "axis", "-", "aligned", "elliptical", "conic", "from", "axis", "lenths", "This", "can", "be", "converted", "into", "a", "hyperbola", "by", "getting", "the", "dual", "conic" ]
train
https://github.com/davenquinn/Attitude/blob/2ce97b9aba0aa5deedc6617c2315e07e6396d240/attitude/geom/conics.py#L30-L38
davenquinn/Attitude
attitude/geom/conics.py
Conic.from_semiaxes
def from_semiaxes(cls,axes): """ Get axis-aligned elliptical conic from axis lenths This can be converted into a hyperbola by getting the dual conic """ ax = list(1/N.array(axes)**2) #ax[-1] *= -1 # Not sure what is going on here... arr = N.diag(ax + [-1]) return arr.view(cls)
python
def from_semiaxes(cls,axes): """ Get axis-aligned elliptical conic from axis lenths This can be converted into a hyperbola by getting the dual conic """ ax = list(1/N.array(axes)**2) #ax[-1] *= -1 # Not sure what is going on here... arr = N.diag(ax + [-1]) return arr.view(cls)
[ "def", "from_semiaxes", "(", "cls", ",", "axes", ")", ":", "ax", "=", "list", "(", "1", "/", "N", ".", "array", "(", "axes", ")", "**", "2", ")", "#ax[-1] *= -1 # Not sure what is going on here...", "arr", "=", "N", ".", "diag", "(", "ax", "+", "[", "-", "1", "]", ")", "return", "arr", ".", "view", "(", "cls", ")" ]
Get axis-aligned elliptical conic from axis lenths This can be converted into a hyperbola by getting the dual conic
[ "Get", "axis", "-", "aligned", "elliptical", "conic", "from", "axis", "lenths", "This", "can", "be", "converted", "into", "a", "hyperbola", "by", "getting", "the", "dual", "conic" ]
train
https://github.com/davenquinn/Attitude/blob/2ce97b9aba0aa5deedc6617c2315e07e6396d240/attitude/geom/conics.py#L41-L49
davenquinn/Attitude
attitude/geom/conics.py
Conic.contains
def contains(ell, p, shell_only=False): """ Check to see whether point is inside conic. :param exact: Only solutions exactly on conic are considered (default: False). """ v = augment(p) _ = ell.solve(v) return N.allclose(_,0) if shell_only else _ <= 0
python
def contains(ell, p, shell_only=False): """ Check to see whether point is inside conic. :param exact: Only solutions exactly on conic are considered (default: False). """ v = augment(p) _ = ell.solve(v) return N.allclose(_,0) if shell_only else _ <= 0
[ "def", "contains", "(", "ell", ",", "p", ",", "shell_only", "=", "False", ")", ":", "v", "=", "augment", "(", "p", ")", "_", "=", "ell", ".", "solve", "(", "v", ")", "return", "N", ".", "allclose", "(", "_", ",", "0", ")", "if", "shell_only", "else", "_", "<=", "0" ]
Check to see whether point is inside conic. :param exact: Only solutions exactly on conic are considered (default: False).
[ "Check", "to", "see", "whether", "point", "is", "inside", "conic", "." ]
train
https://github.com/davenquinn/Attitude/blob/2ce97b9aba0aa5deedc6617c2315e07e6396d240/attitude/geom/conics.py#L57-L67
davenquinn/Attitude
attitude/geom/conics.py
Conic.major_axes
def major_axes(ell): """ Gets major axes of ellipsoids """ _ = ell[:-1,:-1] U,s,V = N.linalg.svd(_) scalar = -(ell.sum()-_.sum()) return N.sqrt(s*scalar)*V
python
def major_axes(ell): """ Gets major axes of ellipsoids """ _ = ell[:-1,:-1] U,s,V = N.linalg.svd(_) scalar = -(ell.sum()-_.sum()) return N.sqrt(s*scalar)*V
[ "def", "major_axes", "(", "ell", ")", ":", "_", "=", "ell", "[", ":", "-", "1", ",", ":", "-", "1", "]", "U", ",", "s", ",", "V", "=", "N", ".", "linalg", ".", "svd", "(", "_", ")", "scalar", "=", "-", "(", "ell", ".", "sum", "(", ")", "-", "_", ".", "sum", "(", ")", ")", "return", "N", ".", "sqrt", "(", "s", "*", "scalar", ")", "*", "V" ]
Gets major axes of ellipsoids
[ "Gets", "major", "axes", "of", "ellipsoids" ]
train
https://github.com/davenquinn/Attitude/blob/2ce97b9aba0aa5deedc6617c2315e07e6396d240/attitude/geom/conics.py#L80-L87
davenquinn/Attitude
attitude/geom/conics.py
Conic.translate
def translate(conic, vector): """ Translates a conic by a vector """ # Translation matrix T = N.identity(len(conic)) T[:-1,-1] = -vector return conic.transform(T)
python
def translate(conic, vector): """ Translates a conic by a vector """ # Translation matrix T = N.identity(len(conic)) T[:-1,-1] = -vector return conic.transform(T)
[ "def", "translate", "(", "conic", ",", "vector", ")", ":", "# Translation matrix", "T", "=", "N", ".", "identity", "(", "len", "(", "conic", ")", ")", "T", "[", ":", "-", "1", ",", "-", "1", "]", "=", "-", "vector", "return", "conic", ".", "transform", "(", "T", ")" ]
Translates a conic by a vector
[ "Translates", "a", "conic", "by", "a", "vector" ]
train
https://github.com/davenquinn/Attitude/blob/2ce97b9aba0aa5deedc6617c2315e07e6396d240/attitude/geom/conics.py#L120-L127
davenquinn/Attitude
attitude/geom/conics.py
Conic.polar_plane
def polar_plane(conic, vector): """ Calculates the polar plane to a vector (a 'pole') for a given conic section. For poles outside the conic, the polar plane contains all vectors of tangency to the pole. """ pole = augment(vector) return dot(conic,pole).view(Plane)
python
def polar_plane(conic, vector): """ Calculates the polar plane to a vector (a 'pole') for a given conic section. For poles outside the conic, the polar plane contains all vectors of tangency to the pole. """ pole = augment(vector) return dot(conic,pole).view(Plane)
[ "def", "polar_plane", "(", "conic", ",", "vector", ")", ":", "pole", "=", "augment", "(", "vector", ")", "return", "dot", "(", "conic", ",", "pole", ")", ".", "view", "(", "Plane", ")" ]
Calculates the polar plane to a vector (a 'pole') for a given conic section. For poles outside the conic, the polar plane contains all vectors of tangency to the pole.
[ "Calculates", "the", "polar", "plane", "to", "a", "vector", "(", "a", "pole", ")", "for", "a", "given", "conic", "section", ".", "For", "poles", "outside", "the", "conic", "the", "polar", "plane", "contains", "all", "vectors", "of", "tangency", "to", "the", "pole", "." ]
train
https://github.com/davenquinn/Attitude/blob/2ce97b9aba0aa5deedc6617c2315e07e6396d240/attitude/geom/conics.py#L129-L137
davenquinn/Attitude
attitude/geom/conics.py
Conic.pole
def pole(conic, plane): """ Calculates the pole of a polar plane for a given conic section. """ v = dot(N.linalg.inv(conic),plane) return v[:-1]/v[-1]
python
def pole(conic, plane): """ Calculates the pole of a polar plane for a given conic section. """ v = dot(N.linalg.inv(conic),plane) return v[:-1]/v[-1]
[ "def", "pole", "(", "conic", ",", "plane", ")", ":", "v", "=", "dot", "(", "N", ".", "linalg", ".", "inv", "(", "conic", ")", ",", "plane", ")", "return", "v", "[", ":", "-", "1", "]", "/", "v", "[", "-", "1", "]" ]
Calculates the pole of a polar plane for a given conic section.
[ "Calculates", "the", "pole", "of", "a", "polar", "plane", "for", "a", "given", "conic", "section", "." ]
train
https://github.com/davenquinn/Attitude/blob/2ce97b9aba0aa5deedc6617c2315e07e6396d240/attitude/geom/conics.py#L139-L145
davenquinn/Attitude
attitude/geom/conics.py
Conic.projection
def projection(self, **kwargs): """ The elliptical cut of an ellipsoidal conic describing all points of tangency to the conic as viewed from the origin. """ viewpoint = kwargs.pop('viewpoint', None) if viewpoint is None: ndim = self.shape[0]-1 viewpoint = N.zeros(ndim) plane = self.polar_plane(viewpoint) return self.slice(plane, **kwargs)
python
def projection(self, **kwargs): """ The elliptical cut of an ellipsoidal conic describing all points of tangency to the conic as viewed from the origin. """ viewpoint = kwargs.pop('viewpoint', None) if viewpoint is None: ndim = self.shape[0]-1 viewpoint = N.zeros(ndim) plane = self.polar_plane(viewpoint) return self.slice(plane, **kwargs)
[ "def", "projection", "(", "self", ",", "*", "*", "kwargs", ")", ":", "viewpoint", "=", "kwargs", ".", "pop", "(", "'viewpoint'", ",", "None", ")", "if", "viewpoint", "is", "None", ":", "ndim", "=", "self", ".", "shape", "[", "0", "]", "-", "1", "viewpoint", "=", "N", ".", "zeros", "(", "ndim", ")", "plane", "=", "self", ".", "polar_plane", "(", "viewpoint", ")", "return", "self", ".", "slice", "(", "plane", ",", "*", "*", "kwargs", ")" ]
The elliptical cut of an ellipsoidal conic describing all points of tangency to the conic as viewed from the origin.
[ "The", "elliptical", "cut", "of", "an", "ellipsoidal", "conic", "describing", "all", "points", "of", "tangency", "to", "the", "conic", "as", "viewed", "from", "the", "origin", "." ]
train
https://github.com/davenquinn/Attitude/blob/2ce97b9aba0aa5deedc6617c2315e07e6396d240/attitude/geom/conics.py#L181-L192
ivanprjcts/sdklib
sdklib/http/renderers.py
guess_file_name_stream_type_header
def guess_file_name_stream_type_header(args): """ Guess filename, file stream, file type, file header from args. :param args: may be string (filepath), 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) or 4-tuples (filename, fileobj, contentype, custom_headers). :return: filename, file stream, file type, file header """ ftype = None fheader = None if isinstance(args, (tuple, list)): if len(args) == 2: fname, fstream = args elif len(args) == 3: fname, fstream, ftype = args else: fname, fstream, ftype, fheader = args else: fname, fstream = guess_filename_stream(args) ftype = guess_content_type(fname) if isinstance(fstream, (str, bytes, bytearray)): fdata = fstream else: fdata = fstream.read() return fname, fdata, ftype, fheader
python
def guess_file_name_stream_type_header(args): """ Guess filename, file stream, file type, file header from args. :param args: may be string (filepath), 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) or 4-tuples (filename, fileobj, contentype, custom_headers). :return: filename, file stream, file type, file header """ ftype = None fheader = None if isinstance(args, (tuple, list)): if len(args) == 2: fname, fstream = args elif len(args) == 3: fname, fstream, ftype = args else: fname, fstream, ftype, fheader = args else: fname, fstream = guess_filename_stream(args) ftype = guess_content_type(fname) if isinstance(fstream, (str, bytes, bytearray)): fdata = fstream else: fdata = fstream.read() return fname, fdata, ftype, fheader
[ "def", "guess_file_name_stream_type_header", "(", "args", ")", ":", "ftype", "=", "None", "fheader", "=", "None", "if", "isinstance", "(", "args", ",", "(", "tuple", ",", "list", ")", ")", ":", "if", "len", "(", "args", ")", "==", "2", ":", "fname", ",", "fstream", "=", "args", "elif", "len", "(", "args", ")", "==", "3", ":", "fname", ",", "fstream", ",", "ftype", "=", "args", "else", ":", "fname", ",", "fstream", ",", "ftype", ",", "fheader", "=", "args", "else", ":", "fname", ",", "fstream", "=", "guess_filename_stream", "(", "args", ")", "ftype", "=", "guess_content_type", "(", "fname", ")", "if", "isinstance", "(", "fstream", ",", "(", "str", ",", "bytes", ",", "bytearray", ")", ")", ":", "fdata", "=", "fstream", "else", ":", "fdata", "=", "fstream", ".", "read", "(", ")", "return", "fname", ",", "fdata", ",", "ftype", ",", "fheader" ]
Guess filename, file stream, file type, file header from args. :param args: may be string (filepath), 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) or 4-tuples (filename, fileobj, contentype, custom_headers). :return: filename, file stream, file type, file header
[ "Guess", "filename", "file", "stream", "file", "type", "file", "header", "from", "args", "." ]
train
https://github.com/ivanprjcts/sdklib/blob/7ba4273a05c40e2e338f49f2dd564920ed98fcab/sdklib/http/renderers.py#L52-L77
ivanprjcts/sdklib
sdklib/http/renderers.py
MultiPartRenderer.encode_params
def encode_params(self, data=None, files=None, **kwargs): """ Build the body for a multipart/form-data request. Will successfully encode files when passed as a dict or a list of tuples. Order is retained if data is a list of tuples but arbitrary if parameters are supplied as a dict. The tuples may be string (filepath), 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) or 4-tuples (filename, fileobj, contentype, custom_headers). """ if isinstance(data, basestring): raise ValueError("Data must not be a string.") # optional args boundary = kwargs.get("boundary", None) output_str = kwargs.get("output_str", self.output_str) new_fields = [] fields = to_key_val_list(data or {}) files = to_key_val_list(files or {}) for field, value in fields: ctype = None if isinstance(value, (tuple, list)) and len(value) == 2: val, ctype = value else: val = value if isinstance(val, basestring) or not hasattr(val, '__iter__'): val = [val] for v in val: # Don't call str() on bytestrings: in Py3 it all goes wrong. if not isinstance(v, bytes): v = to_string(v, lang=output_str) field = field.decode('utf-8') if isinstance(field, bytes) else field v = v.encode('utf-8') if isinstance(v, str) else v rf = RequestField(name=field, data=v) rf.make_multipart(content_type=ctype) new_fields.append(rf) for (k, v) in files: fn, fdata, ft, fh = guess_file_name_stream_type_header(v) rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) rf.make_multipart(content_type=ft) new_fields.append(rf) if boundary is None: boundary = self.boundary body, content_type = encode_multipart_formdata(new_fields, boundary=boundary) return body, content_type
python
def encode_params(self, data=None, files=None, **kwargs): """ Build the body for a multipart/form-data request. Will successfully encode files when passed as a dict or a list of tuples. Order is retained if data is a list of tuples but arbitrary if parameters are supplied as a dict. The tuples may be string (filepath), 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) or 4-tuples (filename, fileobj, contentype, custom_headers). """ if isinstance(data, basestring): raise ValueError("Data must not be a string.") # optional args boundary = kwargs.get("boundary", None) output_str = kwargs.get("output_str", self.output_str) new_fields = [] fields = to_key_val_list(data or {}) files = to_key_val_list(files or {}) for field, value in fields: ctype = None if isinstance(value, (tuple, list)) and len(value) == 2: val, ctype = value else: val = value if isinstance(val, basestring) or not hasattr(val, '__iter__'): val = [val] for v in val: # Don't call str() on bytestrings: in Py3 it all goes wrong. if not isinstance(v, bytes): v = to_string(v, lang=output_str) field = field.decode('utf-8') if isinstance(field, bytes) else field v = v.encode('utf-8') if isinstance(v, str) else v rf = RequestField(name=field, data=v) rf.make_multipart(content_type=ctype) new_fields.append(rf) for (k, v) in files: fn, fdata, ft, fh = guess_file_name_stream_type_header(v) rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) rf.make_multipart(content_type=ft) new_fields.append(rf) if boundary is None: boundary = self.boundary body, content_type = encode_multipart_formdata(new_fields, boundary=boundary) return body, content_type
[ "def", "encode_params", "(", "self", ",", "data", "=", "None", ",", "files", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "data", ",", "basestring", ")", ":", "raise", "ValueError", "(", "\"Data must not be a string.\"", ")", "# optional args", "boundary", "=", "kwargs", ".", "get", "(", "\"boundary\"", ",", "None", ")", "output_str", "=", "kwargs", ".", "get", "(", "\"output_str\"", ",", "self", ".", "output_str", ")", "new_fields", "=", "[", "]", "fields", "=", "to_key_val_list", "(", "data", "or", "{", "}", ")", "files", "=", "to_key_val_list", "(", "files", "or", "{", "}", ")", "for", "field", ",", "value", "in", "fields", ":", "ctype", "=", "None", "if", "isinstance", "(", "value", ",", "(", "tuple", ",", "list", ")", ")", "and", "len", "(", "value", ")", "==", "2", ":", "val", ",", "ctype", "=", "value", "else", ":", "val", "=", "value", "if", "isinstance", "(", "val", ",", "basestring", ")", "or", "not", "hasattr", "(", "val", ",", "'__iter__'", ")", ":", "val", "=", "[", "val", "]", "for", "v", "in", "val", ":", "# Don't call str() on bytestrings: in Py3 it all goes wrong.", "if", "not", "isinstance", "(", "v", ",", "bytes", ")", ":", "v", "=", "to_string", "(", "v", ",", "lang", "=", "output_str", ")", "field", "=", "field", ".", "decode", "(", "'utf-8'", ")", "if", "isinstance", "(", "field", ",", "bytes", ")", "else", "field", "v", "=", "v", ".", "encode", "(", "'utf-8'", ")", "if", "isinstance", "(", "v", ",", "str", ")", "else", "v", "rf", "=", "RequestField", "(", "name", "=", "field", ",", "data", "=", "v", ")", "rf", ".", "make_multipart", "(", "content_type", "=", "ctype", ")", "new_fields", ".", "append", "(", "rf", ")", "for", "(", "k", ",", "v", ")", "in", "files", ":", "fn", ",", "fdata", ",", "ft", ",", "fh", "=", "guess_file_name_stream_type_header", "(", "v", ")", "rf", "=", "RequestField", "(", "name", "=", "k", ",", "data", "=", "fdata", ",", "filename", "=", "fn", ",", "headers", "=", "fh", ")", "rf", ".", "make_multipart", "(", "content_type", "=", "ft", ")", "new_fields", ".", "append", "(", "rf", ")", "if", "boundary", "is", "None", ":", "boundary", "=", "self", ".", "boundary", "body", ",", "content_type", "=", "encode_multipart_formdata", "(", "new_fields", ",", "boundary", "=", "boundary", ")", "return", "body", ",", "content_type" ]
Build the body for a multipart/form-data request. Will successfully encode files when passed as a dict or a list of tuples. Order is retained if data is a list of tuples but arbitrary if parameters are supplied as a dict. The tuples may be string (filepath), 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) or 4-tuples (filename, fileobj, contentype, custom_headers).
[ "Build", "the", "body", "for", "a", "multipart", "/", "form", "-", "data", "request", ".", "Will", "successfully", "encode", "files", "when", "passed", "as", "a", "dict", "or", "a", "list", "of", "tuples", ".", "Order", "is", "retained", "if", "data", "is", "a", "list", "of", "tuples", "but", "arbitrary", "if", "parameters", "are", "supplied", "as", "a", "dict", ".", "The", "tuples", "may", "be", "string", "(", "filepath", ")", "2", "-", "tuples", "(", "filename", "fileobj", ")", "3", "-", "tuples", "(", "filename", "fileobj", "contentype", ")", "or", "4", "-", "tuples", "(", "filename", "fileobj", "contentype", "custom_headers", ")", "." ]
train
https://github.com/ivanprjcts/sdklib/blob/7ba4273a05c40e2e338f49f2dd564920ed98fcab/sdklib/http/renderers.py#L86-L137
ivanprjcts/sdklib
sdklib/http/renderers.py
FormRenderer.encode_params
def encode_params(self, data=None, **kwargs): """ Encode parameters in a piece of data. Will successfully encode parameters when passed as a dict or a list of 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary if parameters are supplied as a dict. """ collection_format = kwargs.get("collection_format", self.collection_format) output_str = kwargs.get("output_str", self.output_str) sort = kwargs.get("sort", self.sort) if data is None: return "", self.content_type elif isinstance(data, (str, bytes)): return data, self.content_type elif hasattr(data, 'read'): return data, self.content_type elif collection_format == 'multi' and hasattr(data, '__iter__'): result = [] for k, vs in to_key_val_list(data, sort=sort): if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): vs = [vs] for v in vs: result.append( (k.encode('utf-8') if isinstance(k, str) else k, v.encode('utf-8') if isinstance(v, str) else to_string(v, lang=output_str))) return urlencode(result, doseq=True), self.content_type elif collection_format == 'encoded' and hasattr(data, '__iter__'): return urlencode(data, doseq=False), self.content_type elif hasattr(data, '__iter__'): results = [] for k, vs in to_key_val_dict(data).items(): if isinstance(vs, list): v = self.COLLECTION_SEPARATORS[collection_format].join(quote_plus(e) for e in vs) key = k + '[]' else: v = quote_plus(vs) key = k results.append("%s=%s" % (key, v)) return '&'.join(results), self.content_type else: return data, self.content_type
python
def encode_params(self, data=None, **kwargs): """ Encode parameters in a piece of data. Will successfully encode parameters when passed as a dict or a list of 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary if parameters are supplied as a dict. """ collection_format = kwargs.get("collection_format", self.collection_format) output_str = kwargs.get("output_str", self.output_str) sort = kwargs.get("sort", self.sort) if data is None: return "", self.content_type elif isinstance(data, (str, bytes)): return data, self.content_type elif hasattr(data, 'read'): return data, self.content_type elif collection_format == 'multi' and hasattr(data, '__iter__'): result = [] for k, vs in to_key_val_list(data, sort=sort): if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): vs = [vs] for v in vs: result.append( (k.encode('utf-8') if isinstance(k, str) else k, v.encode('utf-8') if isinstance(v, str) else to_string(v, lang=output_str))) return urlencode(result, doseq=True), self.content_type elif collection_format == 'encoded' and hasattr(data, '__iter__'): return urlencode(data, doseq=False), self.content_type elif hasattr(data, '__iter__'): results = [] for k, vs in to_key_val_dict(data).items(): if isinstance(vs, list): v = self.COLLECTION_SEPARATORS[collection_format].join(quote_plus(e) for e in vs) key = k + '[]' else: v = quote_plus(vs) key = k results.append("%s=%s" % (key, v)) return '&'.join(results), self.content_type else: return data, self.content_type
[ "def", "encode_params", "(", "self", ",", "data", "=", "None", ",", "*", "*", "kwargs", ")", ":", "collection_format", "=", "kwargs", ".", "get", "(", "\"collection_format\"", ",", "self", ".", "collection_format", ")", "output_str", "=", "kwargs", ".", "get", "(", "\"output_str\"", ",", "self", ".", "output_str", ")", "sort", "=", "kwargs", ".", "get", "(", "\"sort\"", ",", "self", ".", "sort", ")", "if", "data", "is", "None", ":", "return", "\"\"", ",", "self", ".", "content_type", "elif", "isinstance", "(", "data", ",", "(", "str", ",", "bytes", ")", ")", ":", "return", "data", ",", "self", ".", "content_type", "elif", "hasattr", "(", "data", ",", "'read'", ")", ":", "return", "data", ",", "self", ".", "content_type", "elif", "collection_format", "==", "'multi'", "and", "hasattr", "(", "data", ",", "'__iter__'", ")", ":", "result", "=", "[", "]", "for", "k", ",", "vs", "in", "to_key_val_list", "(", "data", ",", "sort", "=", "sort", ")", ":", "if", "isinstance", "(", "vs", ",", "basestring", ")", "or", "not", "hasattr", "(", "vs", ",", "'__iter__'", ")", ":", "vs", "=", "[", "vs", "]", "for", "v", "in", "vs", ":", "result", ".", "append", "(", "(", "k", ".", "encode", "(", "'utf-8'", ")", "if", "isinstance", "(", "k", ",", "str", ")", "else", "k", ",", "v", ".", "encode", "(", "'utf-8'", ")", "if", "isinstance", "(", "v", ",", "str", ")", "else", "to_string", "(", "v", ",", "lang", "=", "output_str", ")", ")", ")", "return", "urlencode", "(", "result", ",", "doseq", "=", "True", ")", ",", "self", ".", "content_type", "elif", "collection_format", "==", "'encoded'", "and", "hasattr", "(", "data", ",", "'__iter__'", ")", ":", "return", "urlencode", "(", "data", ",", "doseq", "=", "False", ")", ",", "self", ".", "content_type", "elif", "hasattr", "(", "data", ",", "'__iter__'", ")", ":", "results", "=", "[", "]", "for", "k", ",", "vs", "in", "to_key_val_dict", "(", "data", ")", ".", "items", "(", ")", ":", "if", "isinstance", "(", "vs", ",", "list", ")", ":", "v", "=", "self", ".", "COLLECTION_SEPARATORS", "[", "collection_format", "]", ".", "join", "(", "quote_plus", "(", "e", ")", "for", "e", "in", "vs", ")", "key", "=", "k", "+", "'[]'", "else", ":", "v", "=", "quote_plus", "(", "vs", ")", "key", "=", "k", "results", ".", "append", "(", "\"%s=%s\"", "%", "(", "key", ",", "v", ")", ")", "return", "'&'", ".", "join", "(", "results", ")", ",", "self", ".", "content_type", "else", ":", "return", "data", ",", "self", ".", "content_type" ]
Encode parameters in a piece of data. Will successfully encode parameters when passed as a dict or a list of 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary if parameters are supplied as a dict.
[ "Encode", "parameters", "in", "a", "piece", "of", "data", ".", "Will", "successfully", "encode", "parameters", "when", "passed", "as", "a", "dict", "or", "a", "list", "of", "2", "-", "tuples", ".", "Order", "is", "retained", "if", "data", "is", "a", "list", "of", "2", "-", "tuples", "but", "arbitrary", "if", "parameters", "are", "supplied", "as", "a", "dict", "." ]
train
https://github.com/ivanprjcts/sdklib/blob/7ba4273a05c40e2e338f49f2dd564920ed98fcab/sdklib/http/renderers.py#L161-L203
ivanprjcts/sdklib
sdklib/http/renderers.py
PlainTextRenderer.encode_params
def encode_params(self, data=None, **kwargs): """ Build the body for a text/plain request. Will successfully encode parameters when passed as a dict or a list of 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary if parameters are supplied as a dict. """ charset = kwargs.get("charset", self.charset) collection_format = kwargs.get("collection_format", self.collection_format) output_str = kwargs.get("output_str", self.output_str) if data is None: return "", self.get_content_type(charset) elif isinstance(data, (str, bytes)): return data, self.get_content_type(charset) elif hasattr(data, 'read'): return data, self.get_content_type(charset) elif collection_format == 'multi' and hasattr(data, '__iter__'): result = [] for k, vs in to_key_val_list(data): if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): vs = [vs] for v in vs: result.append(b"=".join([self._encode(k, charset), self._encode(v, charset, output_str)])) return b'\n'.join(result), self.get_content_type(charset) elif collection_format == 'plain' and hasattr(data, '__iter__'): results = [] for k, vs in to_key_val_dict(data).items(): results.append(b"=".join([self._encode(k, charset), self._encode(vs, charset, output_str)])) return b'\n'.join(results), self.get_content_type(charset) elif hasattr(data, '__iter__'): results = [] for k, vs in to_key_val_dict(data).items(): if isinstance(vs, list): v = self.COLLECTION_SEPARATORS[collection_format].join(e for e in vs) key = k + '[]' else: v = vs key = k results.append(b"=".join([self._encode(key, charset), self._encode(v, charset, output_str)])) return b"\n".join(results), self.get_content_type(charset) else: return str(data).encode(charset) if charset else str(data), self.get_content_type(charset)
python
def encode_params(self, data=None, **kwargs): """ Build the body for a text/plain request. Will successfully encode parameters when passed as a dict or a list of 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary if parameters are supplied as a dict. """ charset = kwargs.get("charset", self.charset) collection_format = kwargs.get("collection_format", self.collection_format) output_str = kwargs.get("output_str", self.output_str) if data is None: return "", self.get_content_type(charset) elif isinstance(data, (str, bytes)): return data, self.get_content_type(charset) elif hasattr(data, 'read'): return data, self.get_content_type(charset) elif collection_format == 'multi' and hasattr(data, '__iter__'): result = [] for k, vs in to_key_val_list(data): if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): vs = [vs] for v in vs: result.append(b"=".join([self._encode(k, charset), self._encode(v, charset, output_str)])) return b'\n'.join(result), self.get_content_type(charset) elif collection_format == 'plain' and hasattr(data, '__iter__'): results = [] for k, vs in to_key_val_dict(data).items(): results.append(b"=".join([self._encode(k, charset), self._encode(vs, charset, output_str)])) return b'\n'.join(results), self.get_content_type(charset) elif hasattr(data, '__iter__'): results = [] for k, vs in to_key_val_dict(data).items(): if isinstance(vs, list): v = self.COLLECTION_SEPARATORS[collection_format].join(e for e in vs) key = k + '[]' else: v = vs key = k results.append(b"=".join([self._encode(key, charset), self._encode(v, charset, output_str)])) return b"\n".join(results), self.get_content_type(charset) else: return str(data).encode(charset) if charset else str(data), self.get_content_type(charset)
[ "def", "encode_params", "(", "self", ",", "data", "=", "None", ",", "*", "*", "kwargs", ")", ":", "charset", "=", "kwargs", ".", "get", "(", "\"charset\"", ",", "self", ".", "charset", ")", "collection_format", "=", "kwargs", ".", "get", "(", "\"collection_format\"", ",", "self", ".", "collection_format", ")", "output_str", "=", "kwargs", ".", "get", "(", "\"output_str\"", ",", "self", ".", "output_str", ")", "if", "data", "is", "None", ":", "return", "\"\"", ",", "self", ".", "get_content_type", "(", "charset", ")", "elif", "isinstance", "(", "data", ",", "(", "str", ",", "bytes", ")", ")", ":", "return", "data", ",", "self", ".", "get_content_type", "(", "charset", ")", "elif", "hasattr", "(", "data", ",", "'read'", ")", ":", "return", "data", ",", "self", ".", "get_content_type", "(", "charset", ")", "elif", "collection_format", "==", "'multi'", "and", "hasattr", "(", "data", ",", "'__iter__'", ")", ":", "result", "=", "[", "]", "for", "k", ",", "vs", "in", "to_key_val_list", "(", "data", ")", ":", "if", "isinstance", "(", "vs", ",", "basestring", ")", "or", "not", "hasattr", "(", "vs", ",", "'__iter__'", ")", ":", "vs", "=", "[", "vs", "]", "for", "v", "in", "vs", ":", "result", ".", "append", "(", "b\"=\"", ".", "join", "(", "[", "self", ".", "_encode", "(", "k", ",", "charset", ")", ",", "self", ".", "_encode", "(", "v", ",", "charset", ",", "output_str", ")", "]", ")", ")", "return", "b'\\n'", ".", "join", "(", "result", ")", ",", "self", ".", "get_content_type", "(", "charset", ")", "elif", "collection_format", "==", "'plain'", "and", "hasattr", "(", "data", ",", "'__iter__'", ")", ":", "results", "=", "[", "]", "for", "k", ",", "vs", "in", "to_key_val_dict", "(", "data", ")", ".", "items", "(", ")", ":", "results", ".", "append", "(", "b\"=\"", ".", "join", "(", "[", "self", ".", "_encode", "(", "k", ",", "charset", ")", ",", "self", ".", "_encode", "(", "vs", ",", "charset", ",", "output_str", ")", "]", ")", ")", "return", "b'\\n'", ".", "join", "(", "results", ")", ",", "self", ".", "get_content_type", "(", "charset", ")", "elif", "hasattr", "(", "data", ",", "'__iter__'", ")", ":", "results", "=", "[", "]", "for", "k", ",", "vs", "in", "to_key_val_dict", "(", "data", ")", ".", "items", "(", ")", ":", "if", "isinstance", "(", "vs", ",", "list", ")", ":", "v", "=", "self", ".", "COLLECTION_SEPARATORS", "[", "collection_format", "]", ".", "join", "(", "e", "for", "e", "in", "vs", ")", "key", "=", "k", "+", "'[]'", "else", ":", "v", "=", "vs", "key", "=", "k", "results", ".", "append", "(", "b\"=\"", ".", "join", "(", "[", "self", ".", "_encode", "(", "key", ",", "charset", ")", ",", "self", ".", "_encode", "(", "v", ",", "charset", ",", "output_str", ")", "]", ")", ")", "return", "b\"\\n\"", ".", "join", "(", "results", ")", ",", "self", ".", "get_content_type", "(", "charset", ")", "else", ":", "return", "str", "(", "data", ")", ".", "encode", "(", "charset", ")", "if", "charset", "else", "str", "(", "data", ")", ",", "self", ".", "get_content_type", "(", "charset", ")" ]
Build the body for a text/plain request. Will successfully encode parameters when passed as a dict or a list of 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary if parameters are supplied as a dict.
[ "Build", "the", "body", "for", "a", "text", "/", "plain", "request", ".", "Will", "successfully", "encode", "parameters", "when", "passed", "as", "a", "dict", "or", "a", "list", "of", "2", "-", "tuples", ".", "Order", "is", "retained", "if", "data", "is", "a", "list", "of", "2", "-", "tuples", "but", "arbitrary", "if", "parameters", "are", "supplied", "as", "a", "dict", "." ]
train
https://github.com/ivanprjcts/sdklib/blob/7ba4273a05c40e2e338f49f2dd564920ed98fcab/sdklib/http/renderers.py#L236-L280
ivanprjcts/sdklib
sdklib/http/renderers.py
JSONRenderer.encode_params
def encode_params(self, data=None, **kwargs): """ Build the body for a application/json request. """ if isinstance(data, basestring): raise ValueError("Data must not be a string.") if data is None: return b"", self.content_type fields = to_key_val_dict(data or "") try: body = json.dumps(fields) except: body = json.dumps(fields, encoding='latin-1') return str(body).encode(), self.content_type
python
def encode_params(self, data=None, **kwargs): """ Build the body for a application/json request. """ if isinstance(data, basestring): raise ValueError("Data must not be a string.") if data is None: return b"", self.content_type fields = to_key_val_dict(data or "") try: body = json.dumps(fields) except: body = json.dumps(fields, encoding='latin-1') return str(body).encode(), self.content_type
[ "def", "encode_params", "(", "self", ",", "data", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "data", ",", "basestring", ")", ":", "raise", "ValueError", "(", "\"Data must not be a string.\"", ")", "if", "data", "is", "None", ":", "return", "b\"\"", ",", "self", ".", "content_type", "fields", "=", "to_key_val_dict", "(", "data", "or", "\"\"", ")", "try", ":", "body", "=", "json", ".", "dumps", "(", "fields", ")", "except", ":", "body", "=", "json", ".", "dumps", "(", "fields", ",", "encoding", "=", "'latin-1'", ")", "return", "str", "(", "body", ")", ".", "encode", "(", ")", ",", "self", ".", "content_type" ]
Build the body for a application/json request.
[ "Build", "the", "body", "for", "a", "application", "/", "json", "request", "." ]
train
https://github.com/ivanprjcts/sdklib/blob/7ba4273a05c40e2e338f49f2dd564920ed98fcab/sdklib/http/renderers.py#L290-L305
RudolfCardinal/pythonlib
cardinal_pythonlib/cmdline.py
cmdline_split
def cmdline_split(s: str, platform: Union[int, str] = 'this') -> List[str]: """ As per https://stackoverflow.com/questions/33560364/python-windows-parsing-command-lines-with-shlex. Multi-platform variant of ``shlex.split()`` for command-line splitting. For use with ``subprocess``, for ``argv`` injection etc. Using fast REGEX. Args: s: string to split platform: - ``'this'`` = auto from current platform; - ``1`` = POSIX; - ``0`` = Windows/CMD - (other values reserved) """ # noqa if platform == 'this': platform = (sys.platform != 'win32') # RNC: includes 64-bit Windows if platform == 1: # POSIX re_cmd_lex = r'''"((?:\\["\\]|[^"])*)"|'([^']*)'|(\\.)|(&&?|\|\|?|\d?\>|[<])|([^\s'"\\&|<>]+)|(\s+)|(.)''' # noqa elif platform == 0: # Windows/CMD re_cmd_lex = r'''"((?:""|\\["\\]|[^"])*)"?()|(\\\\(?=\\*")|\\")|(&&?|\|\|?|\d?>|[<])|([^\s"&|<>]+)|(\s+)|(.)''' # noqa else: raise AssertionError('unknown platform %r' % platform) args = [] accu = None # collects pieces of one arg for qs, qss, esc, pipe, word, white, fail in re.findall(re_cmd_lex, s): if word: pass # most frequent elif esc: word = esc[1] elif white or pipe: if accu is not None: args.append(accu) if pipe: args.append(pipe) accu = None continue elif fail: raise ValueError("invalid or incomplete shell string") elif qs: word = qs.replace('\\"', '"').replace('\\\\', '\\') if platform == 0: word = word.replace('""', '"') else: word = qss # may be even empty; must be last accu = (accu or '') + word if accu is not None: args.append(accu) return args
python
def cmdline_split(s: str, platform: Union[int, str] = 'this') -> List[str]: """ As per https://stackoverflow.com/questions/33560364/python-windows-parsing-command-lines-with-shlex. Multi-platform variant of ``shlex.split()`` for command-line splitting. For use with ``subprocess``, for ``argv`` injection etc. Using fast REGEX. Args: s: string to split platform: - ``'this'`` = auto from current platform; - ``1`` = POSIX; - ``0`` = Windows/CMD - (other values reserved) """ # noqa if platform == 'this': platform = (sys.platform != 'win32') # RNC: includes 64-bit Windows if platform == 1: # POSIX re_cmd_lex = r'''"((?:\\["\\]|[^"])*)"|'([^']*)'|(\\.)|(&&?|\|\|?|\d?\>|[<])|([^\s'"\\&|<>]+)|(\s+)|(.)''' # noqa elif platform == 0: # Windows/CMD re_cmd_lex = r'''"((?:""|\\["\\]|[^"])*)"?()|(\\\\(?=\\*")|\\")|(&&?|\|\|?|\d?>|[<])|([^\s"&|<>]+)|(\s+)|(.)''' # noqa else: raise AssertionError('unknown platform %r' % platform) args = [] accu = None # collects pieces of one arg for qs, qss, esc, pipe, word, white, fail in re.findall(re_cmd_lex, s): if word: pass # most frequent elif esc: word = esc[1] elif white or pipe: if accu is not None: args.append(accu) if pipe: args.append(pipe) accu = None continue elif fail: raise ValueError("invalid or incomplete shell string") elif qs: word = qs.replace('\\"', '"').replace('\\\\', '\\') if platform == 0: word = word.replace('""', '"') else: word = qss # may be even empty; must be last accu = (accu or '') + word if accu is not None: args.append(accu) return args
[ "def", "cmdline_split", "(", "s", ":", "str", ",", "platform", ":", "Union", "[", "int", ",", "str", "]", "=", "'this'", ")", "->", "List", "[", "str", "]", ":", "# noqa", "if", "platform", "==", "'this'", ":", "platform", "=", "(", "sys", ".", "platform", "!=", "'win32'", ")", "# RNC: includes 64-bit Windows", "if", "platform", "==", "1", ":", "# POSIX", "re_cmd_lex", "=", "r'''\"((?:\\\\[\"\\\\]|[^\"])*)\"|'([^']*)'|(\\\\.)|(&&?|\\|\\|?|\\d?\\>|[<])|([^\\s'\"\\\\&|<>]+)|(\\s+)|(.)'''", "# noqa", "elif", "platform", "==", "0", ":", "# Windows/CMD", "re_cmd_lex", "=", "r'''\"((?:\"\"|\\\\[\"\\\\]|[^\"])*)\"?()|(\\\\\\\\(?=\\\\*\")|\\\\\")|(&&?|\\|\\|?|\\d?>|[<])|([^\\s\"&|<>]+)|(\\s+)|(.)'''", "# noqa", "else", ":", "raise", "AssertionError", "(", "'unknown platform %r'", "%", "platform", ")", "args", "=", "[", "]", "accu", "=", "None", "# collects pieces of one arg", "for", "qs", ",", "qss", ",", "esc", ",", "pipe", ",", "word", ",", "white", ",", "fail", "in", "re", ".", "findall", "(", "re_cmd_lex", ",", "s", ")", ":", "if", "word", ":", "pass", "# most frequent", "elif", "esc", ":", "word", "=", "esc", "[", "1", "]", "elif", "white", "or", "pipe", ":", "if", "accu", "is", "not", "None", ":", "args", ".", "append", "(", "accu", ")", "if", "pipe", ":", "args", ".", "append", "(", "pipe", ")", "accu", "=", "None", "continue", "elif", "fail", ":", "raise", "ValueError", "(", "\"invalid or incomplete shell string\"", ")", "elif", "qs", ":", "word", "=", "qs", ".", "replace", "(", "'\\\\\"'", ",", "'\"'", ")", ".", "replace", "(", "'\\\\\\\\'", ",", "'\\\\'", ")", "if", "platform", "==", "0", ":", "word", "=", "word", ".", "replace", "(", "'\"\"'", ",", "'\"'", ")", "else", ":", "word", "=", "qss", "# may be even empty; must be last", "accu", "=", "(", "accu", "or", "''", ")", "+", "word", "if", "accu", "is", "not", "None", ":", "args", ".", "append", "(", "accu", ")", "return", "args" ]
As per https://stackoverflow.com/questions/33560364/python-windows-parsing-command-lines-with-shlex. Multi-platform variant of ``shlex.split()`` for command-line splitting. For use with ``subprocess``, for ``argv`` injection etc. Using fast REGEX. Args: s: string to split platform: - ``'this'`` = auto from current platform; - ``1`` = POSIX; - ``0`` = Windows/CMD - (other values reserved)
[ "As", "per", "https", ":", "//", "stackoverflow", ".", "com", "/", "questions", "/", "33560364", "/", "python", "-", "windows", "-", "parsing", "-", "command", "-", "lines", "-", "with", "-", "shlex", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/cmdline.py#L36-L90
davenquinn/Attitude
attitude/display/plot/cov_types/misc.py
percentiles
def percentiles(a, pcts, axis=None): """Like scoreatpercentile but can take and return array of percentiles. Parameters ---------- a : array data pcts : sequence of percentile values percentile or percentiles to find score at axis : int or None if not None, computes scores over this axis Returns ------- scores: array array of scores at requested percentiles first dimension is length of object passed to ``pcts`` """ scores = [] try: n = len(pcts) except TypeError: pcts = [pcts] n = 0 for i, p in enumerate(pcts): if axis is None: score = stats.scoreatpercentile(a.ravel(), p) else: score = N.apply_along_axis(stats.scoreatpercentile, axis, a, p) scores.append(score) scores = N.asarray(scores) if not n: scores = scores.squeeze() return scores
python
def percentiles(a, pcts, axis=None): """Like scoreatpercentile but can take and return array of percentiles. Parameters ---------- a : array data pcts : sequence of percentile values percentile or percentiles to find score at axis : int or None if not None, computes scores over this axis Returns ------- scores: array array of scores at requested percentiles first dimension is length of object passed to ``pcts`` """ scores = [] try: n = len(pcts) except TypeError: pcts = [pcts] n = 0 for i, p in enumerate(pcts): if axis is None: score = stats.scoreatpercentile(a.ravel(), p) else: score = N.apply_along_axis(stats.scoreatpercentile, axis, a, p) scores.append(score) scores = N.asarray(scores) if not n: scores = scores.squeeze() return scores
[ "def", "percentiles", "(", "a", ",", "pcts", ",", "axis", "=", "None", ")", ":", "scores", "=", "[", "]", "try", ":", "n", "=", "len", "(", "pcts", ")", "except", "TypeError", ":", "pcts", "=", "[", "pcts", "]", "n", "=", "0", "for", "i", ",", "p", "in", "enumerate", "(", "pcts", ")", ":", "if", "axis", "is", "None", ":", "score", "=", "stats", ".", "scoreatpercentile", "(", "a", ".", "ravel", "(", ")", ",", "p", ")", "else", ":", "score", "=", "N", ".", "apply_along_axis", "(", "stats", ".", "scoreatpercentile", ",", "axis", ",", "a", ",", "p", ")", "scores", ".", "append", "(", "score", ")", "scores", "=", "N", ".", "asarray", "(", "scores", ")", "if", "not", "n", ":", "scores", "=", "scores", ".", "squeeze", "(", ")", "return", "scores" ]
Like scoreatpercentile but can take and return array of percentiles. Parameters ---------- a : array data pcts : sequence of percentile values percentile or percentiles to find score at axis : int or None if not None, computes scores over this axis Returns ------- scores: array array of scores at requested percentiles first dimension is length of object passed to ``pcts``
[ "Like", "scoreatpercentile", "but", "can", "take", "and", "return", "array", "of", "percentiles", ".", "Parameters", "----------", "a", ":", "array", "data", "pcts", ":", "sequence", "of", "percentile", "values", "percentile", "or", "percentiles", "to", "find", "score", "at", "axis", ":", "int", "or", "None", "if", "not", "None", "computes", "scores", "over", "this", "axis", "Returns", "-------", "scores", ":", "array", "array", "of", "scores", "at", "requested", "percentiles", "first", "dimension", "is", "length", "of", "object", "passed", "to", "pcts" ]
train
https://github.com/davenquinn/Attitude/blob/2ce97b9aba0aa5deedc6617c2315e07e6396d240/attitude/display/plot/cov_types/misc.py#L6-L37
davenquinn/Attitude
attitude/display/plot/cov_types/misc.py
ci
def ci(a, which=95, axis=None): """Return a percentile range from an array of values.""" p = 50 - which / 2, 50 + which / 2 return percentiles(a, p, axis)
python
def ci(a, which=95, axis=None): """Return a percentile range from an array of values.""" p = 50 - which / 2, 50 + which / 2 return percentiles(a, p, axis)
[ "def", "ci", "(", "a", ",", "which", "=", "95", ",", "axis", "=", "None", ")", ":", "p", "=", "50", "-", "which", "/", "2", ",", "50", "+", "which", "/", "2", "return", "percentiles", "(", "a", ",", "p", ",", "axis", ")" ]
Return a percentile range from an array of values.
[ "Return", "a", "percentile", "range", "from", "an", "array", "of", "values", "." ]
train
https://github.com/davenquinn/Attitude/blob/2ce97b9aba0aa5deedc6617c2315e07e6396d240/attitude/display/plot/cov_types/misc.py#L40-L43
RudolfCardinal/pythonlib
cardinal_pythonlib/configfiles.py
get_config_string_option
def get_config_string_option(parser: ConfigParser, section: str, option: str, default: str = None) -> str: """ Retrieves a string value from a parser. Args: parser: instance of :class:`ConfigParser` section: section name within config file option: option (variable) name within that section default: value to return if option is absent Returns: string value Raises: ValueError: if the section is absent """ if not parser.has_section(section): raise ValueError("config missing section: " + section) return parser.get(section, option, fallback=default)
python
def get_config_string_option(parser: ConfigParser, section: str, option: str, default: str = None) -> str: """ Retrieves a string value from a parser. Args: parser: instance of :class:`ConfigParser` section: section name within config file option: option (variable) name within that section default: value to return if option is absent Returns: string value Raises: ValueError: if the section is absent """ if not parser.has_section(section): raise ValueError("config missing section: " + section) return parser.get(section, option, fallback=default)
[ "def", "get_config_string_option", "(", "parser", ":", "ConfigParser", ",", "section", ":", "str", ",", "option", ":", "str", ",", "default", ":", "str", "=", "None", ")", "->", "str", ":", "if", "not", "parser", ".", "has_section", "(", "section", ")", ":", "raise", "ValueError", "(", "\"config missing section: \"", "+", "section", ")", "return", "parser", ".", "get", "(", "section", ",", "option", ",", "fallback", "=", "default", ")" ]
Retrieves a string value from a parser. Args: parser: instance of :class:`ConfigParser` section: section name within config file option: option (variable) name within that section default: value to return if option is absent Returns: string value Raises: ValueError: if the section is absent
[ "Retrieves", "a", "string", "value", "from", "a", "parser", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/configfiles.py#L42-L64
RudolfCardinal/pythonlib
cardinal_pythonlib/configfiles.py
read_config_string_options
def read_config_string_options(obj: Any, parser: ConfigParser, section: str, options: Iterable[str], default: str = None) -> None: """ Reads config options and writes them as attributes of ``obj``, with attribute names as per ``options``. Args: obj: the object to modify parser: instance of :class:`ConfigParser` section: section name within config file options: option (variable) names within that section default: value to use for any missing options Returns: """ # enforce_str removed; ConfigParser always returns strings unless asked # specifically for o in options: setattr(obj, o, get_config_string_option(parser, section, o, default=default))
python
def read_config_string_options(obj: Any, parser: ConfigParser, section: str, options: Iterable[str], default: str = None) -> None: """ Reads config options and writes them as attributes of ``obj``, with attribute names as per ``options``. Args: obj: the object to modify parser: instance of :class:`ConfigParser` section: section name within config file options: option (variable) names within that section default: value to use for any missing options Returns: """ # enforce_str removed; ConfigParser always returns strings unless asked # specifically for o in options: setattr(obj, o, get_config_string_option(parser, section, o, default=default))
[ "def", "read_config_string_options", "(", "obj", ":", "Any", ",", "parser", ":", "ConfigParser", ",", "section", ":", "str", ",", "options", ":", "Iterable", "[", "str", "]", ",", "default", ":", "str", "=", "None", ")", "->", "None", ":", "# enforce_str removed; ConfigParser always returns strings unless asked", "# specifically", "for", "o", "in", "options", ":", "setattr", "(", "obj", ",", "o", ",", "get_config_string_option", "(", "parser", ",", "section", ",", "o", ",", "default", "=", "default", ")", ")" ]
Reads config options and writes them as attributes of ``obj``, with attribute names as per ``options``. Args: obj: the object to modify parser: instance of :class:`ConfigParser` section: section name within config file options: option (variable) names within that section default: value to use for any missing options Returns:
[ "Reads", "config", "options", "and", "writes", "them", "as", "attributes", "of", "obj", "with", "attribute", "names", "as", "per", "options", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/configfiles.py#L67-L90
RudolfCardinal/pythonlib
cardinal_pythonlib/configfiles.py
get_config_multiline_option
def get_config_multiline_option(parser: ConfigParser, section: str, option: str, default: List[str] = None) -> List[str]: """ Retrieves a multi-line string value from a parser as a list of strings (one per line, ignoring blank lines). Args: parser: instance of :class:`ConfigParser` section: section name within config file option: option (variable) name within that section default: value to return if option is absent (``None`` is mapped to ``[]``) Returns: list of strings Raises: ValueError: if the section is absent """ default = default or [] if not parser.has_section(section): raise ValueError("config missing section: " + section) try: multiline = parser.get(section, option) values = [x.strip() for x in multiline.splitlines() if x.strip()] return values except NoOptionError: return default
python
def get_config_multiline_option(parser: ConfigParser, section: str, option: str, default: List[str] = None) -> List[str]: """ Retrieves a multi-line string value from a parser as a list of strings (one per line, ignoring blank lines). Args: parser: instance of :class:`ConfigParser` section: section name within config file option: option (variable) name within that section default: value to return if option is absent (``None`` is mapped to ``[]``) Returns: list of strings Raises: ValueError: if the section is absent """ default = default or [] if not parser.has_section(section): raise ValueError("config missing section: " + section) try: multiline = parser.get(section, option) values = [x.strip() for x in multiline.splitlines() if x.strip()] return values except NoOptionError: return default
[ "def", "get_config_multiline_option", "(", "parser", ":", "ConfigParser", ",", "section", ":", "str", ",", "option", ":", "str", ",", "default", ":", "List", "[", "str", "]", "=", "None", ")", "->", "List", "[", "str", "]", ":", "default", "=", "default", "or", "[", "]", "if", "not", "parser", ".", "has_section", "(", "section", ")", ":", "raise", "ValueError", "(", "\"config missing section: \"", "+", "section", ")", "try", ":", "multiline", "=", "parser", ".", "get", "(", "section", ",", "option", ")", "values", "=", "[", "x", ".", "strip", "(", ")", "for", "x", "in", "multiline", ".", "splitlines", "(", ")", "if", "x", ".", "strip", "(", ")", "]", "return", "values", "except", "NoOptionError", ":", "return", "default" ]
Retrieves a multi-line string value from a parser as a list of strings (one per line, ignoring blank lines). Args: parser: instance of :class:`ConfigParser` section: section name within config file option: option (variable) name within that section default: value to return if option is absent (``None`` is mapped to ``[]``) Returns: list of strings Raises: ValueError: if the section is absent
[ "Retrieves", "a", "multi", "-", "line", "string", "value", "from", "a", "parser", "as", "a", "list", "of", "strings", "(", "one", "per", "line", "ignoring", "blank", "lines", ")", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/configfiles.py#L93-L123
RudolfCardinal/pythonlib
cardinal_pythonlib/configfiles.py
read_config_multiline_options
def read_config_multiline_options(obj: Any, parser: ConfigParser, section: str, options: Iterable[str]) -> None: """ This is to :func:`read_config_string_options` as :func:`get_config_multiline_option` is to :func:`get_config_string_option`. """ for o in options: setattr(obj, o, get_config_multiline_option(parser, section, o))
python
def read_config_multiline_options(obj: Any, parser: ConfigParser, section: str, options: Iterable[str]) -> None: """ This is to :func:`read_config_string_options` as :func:`get_config_multiline_option` is to :func:`get_config_string_option`. """ for o in options: setattr(obj, o, get_config_multiline_option(parser, section, o))
[ "def", "read_config_multiline_options", "(", "obj", ":", "Any", ",", "parser", ":", "ConfigParser", ",", "section", ":", "str", ",", "options", ":", "Iterable", "[", "str", "]", ")", "->", "None", ":", "for", "o", "in", "options", ":", "setattr", "(", "obj", ",", "o", ",", "get_config_multiline_option", "(", "parser", ",", "section", ",", "o", ")", ")" ]
This is to :func:`read_config_string_options` as :func:`get_config_multiline_option` is to :func:`get_config_string_option`.
[ "This", "is", "to", ":", "func", ":", "read_config_string_options", "as", ":", "func", ":", "get_config_multiline_option", "is", "to", ":", "func", ":", "get_config_string_option", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/configfiles.py#L126-L135
RudolfCardinal/pythonlib
cardinal_pythonlib/configfiles.py
get_config_bool_option
def get_config_bool_option(parser: ConfigParser, section: str, option: str, default: bool = None) -> bool: """ Retrieves a boolean value from a parser. Args: parser: instance of :class:`ConfigParser` section: section name within config file option: option (variable) name within that section default: value to return if option is absent Returns: string value Raises: ValueError: if the section is absent """ if not parser.has_section(section): raise ValueError("config missing section: " + section) return parser.getboolean(section, option, fallback=default)
python
def get_config_bool_option(parser: ConfigParser, section: str, option: str, default: bool = None) -> bool: """ Retrieves a boolean value from a parser. Args: parser: instance of :class:`ConfigParser` section: section name within config file option: option (variable) name within that section default: value to return if option is absent Returns: string value Raises: ValueError: if the section is absent """ if not parser.has_section(section): raise ValueError("config missing section: " + section) return parser.getboolean(section, option, fallback=default)
[ "def", "get_config_bool_option", "(", "parser", ":", "ConfigParser", ",", "section", ":", "str", ",", "option", ":", "str", ",", "default", ":", "bool", "=", "None", ")", "->", "bool", ":", "if", "not", "parser", ".", "has_section", "(", "section", ")", ":", "raise", "ValueError", "(", "\"config missing section: \"", "+", "section", ")", "return", "parser", ".", "getboolean", "(", "section", ",", "option", ",", "fallback", "=", "default", ")" ]
Retrieves a boolean value from a parser. Args: parser: instance of :class:`ConfigParser` section: section name within config file option: option (variable) name within that section default: value to return if option is absent Returns: string value Raises: ValueError: if the section is absent
[ "Retrieves", "a", "boolean", "value", "from", "a", "parser", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/configfiles.py#L138-L160
RudolfCardinal/pythonlib
cardinal_pythonlib/configfiles.py
get_config_parameter
def get_config_parameter(config: ConfigParser, section: str, param: str, fn: Callable[[Any], Any], default: Any) -> Any: """ Fetch parameter from ``configparser`` ``.INI`` file. Args: config: :class:`ConfigParser` object section: section name within config file param: name of parameter within section fn: function to apply to string parameter (e.g. ``int``) default: default value Returns: parameter value, or ``None`` if ``default is None``, or ``fn(default)`` """ try: value = fn(config.get(section, param)) except (TypeError, ValueError, NoOptionError): log.warning( "Configuration variable {} not found or improper in section [{}]; " "using default of {!r}", param, section, default) if default is None: value = default else: value = fn(default) return value
python
def get_config_parameter(config: ConfigParser, section: str, param: str, fn: Callable[[Any], Any], default: Any) -> Any: """ Fetch parameter from ``configparser`` ``.INI`` file. Args: config: :class:`ConfigParser` object section: section name within config file param: name of parameter within section fn: function to apply to string parameter (e.g. ``int``) default: default value Returns: parameter value, or ``None`` if ``default is None``, or ``fn(default)`` """ try: value = fn(config.get(section, param)) except (TypeError, ValueError, NoOptionError): log.warning( "Configuration variable {} not found or improper in section [{}]; " "using default of {!r}", param, section, default) if default is None: value = default else: value = fn(default) return value
[ "def", "get_config_parameter", "(", "config", ":", "ConfigParser", ",", "section", ":", "str", ",", "param", ":", "str", ",", "fn", ":", "Callable", "[", "[", "Any", "]", ",", "Any", "]", ",", "default", ":", "Any", ")", "->", "Any", ":", "try", ":", "value", "=", "fn", "(", "config", ".", "get", "(", "section", ",", "param", ")", ")", "except", "(", "TypeError", ",", "ValueError", ",", "NoOptionError", ")", ":", "log", ".", "warning", "(", "\"Configuration variable {} not found or improper in section [{}]; \"", "\"using default of {!r}\"", ",", "param", ",", "section", ",", "default", ")", "if", "default", "is", "None", ":", "value", "=", "default", "else", ":", "value", "=", "fn", "(", "default", ")", "return", "value" ]
Fetch parameter from ``configparser`` ``.INI`` file. Args: config: :class:`ConfigParser` object section: section name within config file param: name of parameter within section fn: function to apply to string parameter (e.g. ``int``) default: default value Returns: parameter value, or ``None`` if ``default is None``, or ``fn(default)``
[ "Fetch", "parameter", "from", "configparser", ".", "INI", "file", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/configfiles.py#L171-L199
RudolfCardinal/pythonlib
cardinal_pythonlib/configfiles.py
get_config_parameter_boolean
def get_config_parameter_boolean(config: ConfigParser, section: str, param: str, default: bool) -> bool: """ Get Boolean parameter from ``configparser`` ``.INI`` file. Args: config: :class:`ConfigParser` object section: section name within config file param: name of parameter within section default: default value Returns: parameter value, or default """ try: value = config.getboolean(section, param) except (TypeError, ValueError, NoOptionError): log.warning( "Configuration variable {} not found or improper in section [{}]; " "using default of {!r}", param, section, default) value = default return value
python
def get_config_parameter_boolean(config: ConfigParser, section: str, param: str, default: bool) -> bool: """ Get Boolean parameter from ``configparser`` ``.INI`` file. Args: config: :class:`ConfigParser` object section: section name within config file param: name of parameter within section default: default value Returns: parameter value, or default """ try: value = config.getboolean(section, param) except (TypeError, ValueError, NoOptionError): log.warning( "Configuration variable {} not found or improper in section [{}]; " "using default of {!r}", param, section, default) value = default return value
[ "def", "get_config_parameter_boolean", "(", "config", ":", "ConfigParser", ",", "section", ":", "str", ",", "param", ":", "str", ",", "default", ":", "bool", ")", "->", "bool", ":", "try", ":", "value", "=", "config", ".", "getboolean", "(", "section", ",", "param", ")", "except", "(", "TypeError", ",", "ValueError", ",", "NoOptionError", ")", ":", "log", ".", "warning", "(", "\"Configuration variable {} not found or improper in section [{}]; \"", "\"using default of {!r}\"", ",", "param", ",", "section", ",", "default", ")", "value", "=", "default", "return", "value" ]
Get Boolean parameter from ``configparser`` ``.INI`` file. Args: config: :class:`ConfigParser` object section: section name within config file param: name of parameter within section default: default value Returns: parameter value, or default
[ "Get", "Boolean", "parameter", "from", "configparser", ".", "INI", "file", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/configfiles.py#L202-L224
RudolfCardinal/pythonlib
cardinal_pythonlib/configfiles.py
get_config_parameter_loglevel
def get_config_parameter_loglevel(config: ConfigParser, section: str, param: str, default: int) -> int: """ Get ``loglevel`` parameter from ``configparser`` ``.INI`` file, e.g. mapping ``'debug'`` to ``logging.DEBUG``. Args: config: :class:`ConfigParser` object section: section name within config file param: name of parameter within section default: default value Returns: parameter value, or default """ try: value = config.get(section, param).lower() if value == "debug": return logging.DEBUG # 10 elif value == "info": return logging.INFO elif value in ["warn", "warning"]: return logging.WARN elif value == "error": return logging.ERROR elif value in ["critical", "fatal"]: return logging.CRITICAL # 50 else: raise ValueError except (TypeError, ValueError, NoOptionError, AttributeError): log.warning( "Configuration variable {} not found or improper in section [{}]; " "using default of {!r}", param, section, default) return default
python
def get_config_parameter_loglevel(config: ConfigParser, section: str, param: str, default: int) -> int: """ Get ``loglevel`` parameter from ``configparser`` ``.INI`` file, e.g. mapping ``'debug'`` to ``logging.DEBUG``. Args: config: :class:`ConfigParser` object section: section name within config file param: name of parameter within section default: default value Returns: parameter value, or default """ try: value = config.get(section, param).lower() if value == "debug": return logging.DEBUG # 10 elif value == "info": return logging.INFO elif value in ["warn", "warning"]: return logging.WARN elif value == "error": return logging.ERROR elif value in ["critical", "fatal"]: return logging.CRITICAL # 50 else: raise ValueError except (TypeError, ValueError, NoOptionError, AttributeError): log.warning( "Configuration variable {} not found or improper in section [{}]; " "using default of {!r}", param, section, default) return default
[ "def", "get_config_parameter_loglevel", "(", "config", ":", "ConfigParser", ",", "section", ":", "str", ",", "param", ":", "str", ",", "default", ":", "int", ")", "->", "int", ":", "try", ":", "value", "=", "config", ".", "get", "(", "section", ",", "param", ")", ".", "lower", "(", ")", "if", "value", "==", "\"debug\"", ":", "return", "logging", ".", "DEBUG", "# 10", "elif", "value", "==", "\"info\"", ":", "return", "logging", ".", "INFO", "elif", "value", "in", "[", "\"warn\"", ",", "\"warning\"", "]", ":", "return", "logging", ".", "WARN", "elif", "value", "==", "\"error\"", ":", "return", "logging", ".", "ERROR", "elif", "value", "in", "[", "\"critical\"", ",", "\"fatal\"", "]", ":", "return", "logging", ".", "CRITICAL", "# 50", "else", ":", "raise", "ValueError", "except", "(", "TypeError", ",", "ValueError", ",", "NoOptionError", ",", "AttributeError", ")", ":", "log", ".", "warning", "(", "\"Configuration variable {} not found or improper in section [{}]; \"", "\"using default of {!r}\"", ",", "param", ",", "section", ",", "default", ")", "return", "default" ]
Get ``loglevel`` parameter from ``configparser`` ``.INI`` file, e.g. mapping ``'debug'`` to ``logging.DEBUG``. Args: config: :class:`ConfigParser` object section: section name within config file param: name of parameter within section default: default value Returns: parameter value, or default
[ "Get", "loglevel", "parameter", "from", "configparser", ".", "INI", "file", "e", ".", "g", ".", "mapping", "debug", "to", "logging", ".", "DEBUG", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/configfiles.py#L227-L261
RudolfCardinal/pythonlib
cardinal_pythonlib/configfiles.py
get_config_parameter_multiline
def get_config_parameter_multiline(config: ConfigParser, section: str, param: str, default: List[str]) -> List[str]: """ Get multi-line string parameter from ``configparser`` ``.INI`` file, as a list of strings (one per line, ignoring blank lines). Args: config: :class:`ConfigParser` object section: section name within config file param: name of parameter within section default: default value Returns: parameter value, or default """ try: multiline = config.get(section, param) lines = [x.strip() for x in multiline.splitlines()] return [line for line in lines if line] except (TypeError, ValueError, NoOptionError): log.warning( "Configuration variable {} not found or improper in section [{}]; " "using default of {!r}", param, section, default) return default
python
def get_config_parameter_multiline(config: ConfigParser, section: str, param: str, default: List[str]) -> List[str]: """ Get multi-line string parameter from ``configparser`` ``.INI`` file, as a list of strings (one per line, ignoring blank lines). Args: config: :class:`ConfigParser` object section: section name within config file param: name of parameter within section default: default value Returns: parameter value, or default """ try: multiline = config.get(section, param) lines = [x.strip() for x in multiline.splitlines()] return [line for line in lines if line] except (TypeError, ValueError, NoOptionError): log.warning( "Configuration variable {} not found or improper in section [{}]; " "using default of {!r}", param, section, default) return default
[ "def", "get_config_parameter_multiline", "(", "config", ":", "ConfigParser", ",", "section", ":", "str", ",", "param", ":", "str", ",", "default", ":", "List", "[", "str", "]", ")", "->", "List", "[", "str", "]", ":", "try", ":", "multiline", "=", "config", ".", "get", "(", "section", ",", "param", ")", "lines", "=", "[", "x", ".", "strip", "(", ")", "for", "x", "in", "multiline", ".", "splitlines", "(", ")", "]", "return", "[", "line", "for", "line", "in", "lines", "if", "line", "]", "except", "(", "TypeError", ",", "ValueError", ",", "NoOptionError", ")", ":", "log", ".", "warning", "(", "\"Configuration variable {} not found or improper in section [{}]; \"", "\"using default of {!r}\"", ",", "param", ",", "section", ",", "default", ")", "return", "default" ]
Get multi-line string parameter from ``configparser`` ``.INI`` file, as a list of strings (one per line, ignoring blank lines). Args: config: :class:`ConfigParser` object section: section name within config file param: name of parameter within section default: default value Returns: parameter value, or default
[ "Get", "multi", "-", "line", "string", "parameter", "from", "configparser", ".", "INI", "file", "as", "a", "list", "of", "strings", "(", "one", "per", "line", "ignoring", "blank", "lines", ")", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/configfiles.py#L264-L288
AndrewWalker/glud
glud/predicates.py
is_definition
def is_definition(cursor): """Test if a cursor refers to a definition This occurs when the cursor has a definition, and shares the location of that definiton """ defn = cursor.get_definition() return (defn is not None) and (cursor.location == defn.location)
python
def is_definition(cursor): """Test if a cursor refers to a definition This occurs when the cursor has a definition, and shares the location of that definiton """ defn = cursor.get_definition() return (defn is not None) and (cursor.location == defn.location)
[ "def", "is_definition", "(", "cursor", ")", ":", "defn", "=", "cursor", ".", "get_definition", "(", ")", "return", "(", "defn", "is", "not", "None", ")", "and", "(", "cursor", ".", "location", "==", "defn", ".", "location", ")" ]
Test if a cursor refers to a definition This occurs when the cursor has a definition, and shares the location of that definiton
[ "Test", "if", "a", "cursor", "refers", "to", "a", "definition" ]
train
https://github.com/AndrewWalker/glud/blob/57de000627fed13d0c383f131163795b09549257/glud/predicates.py#L38-L44
davenquinn/Attitude
attitude/error/__init__.py
asymptotes
def asymptotes(hyp, n=1000): """ Gets a cone of asymptotes for hyperbola """ assert N.linalg.norm(hyp.center()) == 0 u = N.linspace(0,2*N.pi,n) _ = N.ones(len(u)) angles = N.array([N.cos(u),N.sin(u),_]).T return dot(angles,hyp[:-1,:-1])
python
def asymptotes(hyp, n=1000): """ Gets a cone of asymptotes for hyperbola """ assert N.linalg.norm(hyp.center()) == 0 u = N.linspace(0,2*N.pi,n) _ = N.ones(len(u)) angles = N.array([N.cos(u),N.sin(u),_]).T return dot(angles,hyp[:-1,:-1])
[ "def", "asymptotes", "(", "hyp", ",", "n", "=", "1000", ")", ":", "assert", "N", ".", "linalg", ".", "norm", "(", "hyp", ".", "center", "(", ")", ")", "==", "0", "u", "=", "N", ".", "linspace", "(", "0", ",", "2", "*", "N", ".", "pi", ",", "n", ")", "_", "=", "N", ".", "ones", "(", "len", "(", "u", ")", ")", "angles", "=", "N", ".", "array", "(", "[", "N", ".", "cos", "(", "u", ")", ",", "N", ".", "sin", "(", "u", ")", ",", "_", "]", ")", ".", "T", "return", "dot", "(", "angles", ",", "hyp", "[", ":", "-", "1", ",", ":", "-", "1", "]", ")" ]
Gets a cone of asymptotes for hyperbola
[ "Gets", "a", "cone", "of", "asymptotes", "for", "hyperbola" ]
train
https://github.com/davenquinn/Attitude/blob/2ce97b9aba0aa5deedc6617c2315e07e6396d240/attitude/error/__init__.py#L12-L21
davenquinn/Attitude
attitude/error/__init__.py
pca_to_mapping
def pca_to_mapping(pca,**extra_props): """ A helper to return a mapping of a PCA result set suitable for reconstructing a planar error surface in other software packages kwargs: method (defaults to sampling axes) """ from .axes import sampling_axes method = extra_props.pop('method',sampling_axes) return dict( axes=pca.axes.tolist(), covariance=method(pca).tolist(), **extra_props)
python
def pca_to_mapping(pca,**extra_props): """ A helper to return a mapping of a PCA result set suitable for reconstructing a planar error surface in other software packages kwargs: method (defaults to sampling axes) """ from .axes import sampling_axes method = extra_props.pop('method',sampling_axes) return dict( axes=pca.axes.tolist(), covariance=method(pca).tolist(), **extra_props)
[ "def", "pca_to_mapping", "(", "pca", ",", "*", "*", "extra_props", ")", ":", "from", ".", "axes", "import", "sampling_axes", "method", "=", "extra_props", ".", "pop", "(", "'method'", ",", "sampling_axes", ")", "return", "dict", "(", "axes", "=", "pca", ".", "axes", ".", "tolist", "(", ")", ",", "covariance", "=", "method", "(", "pca", ")", ".", "tolist", "(", ")", ",", "*", "*", "extra_props", ")" ]
A helper to return a mapping of a PCA result set suitable for reconstructing a planar error surface in other software packages kwargs: method (defaults to sampling axes)
[ "A", "helper", "to", "return", "a", "mapping", "of", "a", "PCA", "result", "set", "suitable", "for", "reconstructing", "a", "planar", "error", "surface", "in", "other", "software", "packages" ]
train
https://github.com/davenquinn/Attitude/blob/2ce97b9aba0aa5deedc6617c2315e07e6396d240/attitude/error/__init__.py#L23-L35
RudolfCardinal/pythonlib
cardinal_pythonlib/winservice.py
generic_service_main
def generic_service_main(cls: Type[WindowsService], name: str) -> None: """ Call this from your command-line entry point to manage a service. - Via inherited functions, enables you to ``install``, ``update``, ``remove``, ``start``, ``stop``, and ``restart`` the service. - Via our additional code, allows you to run the service function directly from the command line in debug mode, using the ``debug`` command. - Run with an invalid command like ``help`` to see help (!). See https://mail.python.org/pipermail/python-win32/2008-April/007299.html Args: cls: class deriving from :class:`WindowsService` name: name of this service """ argc = len(sys.argv) if argc == 1: try: print("Trying to start service directly...") evtsrc_dll = os.path.abspath(servicemanager.__file__) # noinspection PyUnresolvedReferences servicemanager.PrepareToHostSingle(cls) # <-- sets up the service # noinspection PyUnresolvedReferences servicemanager.Initialize(name, evtsrc_dll) # noinspection PyUnresolvedReferences servicemanager.StartServiceCtrlDispatcher() except win32service.error as details: print("Failed: {}".format(details)) # print(repr(details.__dict__)) errnum = details.winerror if errnum == winerror.ERROR_FAILED_SERVICE_CONTROLLER_CONNECT: win32serviceutil.usage() elif argc == 2 and sys.argv[1] == 'debug': s = cls() s.run_debug() else: win32serviceutil.HandleCommandLine(cls)
python
def generic_service_main(cls: Type[WindowsService], name: str) -> None: """ Call this from your command-line entry point to manage a service. - Via inherited functions, enables you to ``install``, ``update``, ``remove``, ``start``, ``stop``, and ``restart`` the service. - Via our additional code, allows you to run the service function directly from the command line in debug mode, using the ``debug`` command. - Run with an invalid command like ``help`` to see help (!). See https://mail.python.org/pipermail/python-win32/2008-April/007299.html Args: cls: class deriving from :class:`WindowsService` name: name of this service """ argc = len(sys.argv) if argc == 1: try: print("Trying to start service directly...") evtsrc_dll = os.path.abspath(servicemanager.__file__) # noinspection PyUnresolvedReferences servicemanager.PrepareToHostSingle(cls) # <-- sets up the service # noinspection PyUnresolvedReferences servicemanager.Initialize(name, evtsrc_dll) # noinspection PyUnresolvedReferences servicemanager.StartServiceCtrlDispatcher() except win32service.error as details: print("Failed: {}".format(details)) # print(repr(details.__dict__)) errnum = details.winerror if errnum == winerror.ERROR_FAILED_SERVICE_CONTROLLER_CONNECT: win32serviceutil.usage() elif argc == 2 and sys.argv[1] == 'debug': s = cls() s.run_debug() else: win32serviceutil.HandleCommandLine(cls)
[ "def", "generic_service_main", "(", "cls", ":", "Type", "[", "WindowsService", "]", ",", "name", ":", "str", ")", "->", "None", ":", "argc", "=", "len", "(", "sys", ".", "argv", ")", "if", "argc", "==", "1", ":", "try", ":", "print", "(", "\"Trying to start service directly...\"", ")", "evtsrc_dll", "=", "os", ".", "path", ".", "abspath", "(", "servicemanager", ".", "__file__", ")", "# noinspection PyUnresolvedReferences", "servicemanager", ".", "PrepareToHostSingle", "(", "cls", ")", "# <-- sets up the service", "# noinspection PyUnresolvedReferences", "servicemanager", ".", "Initialize", "(", "name", ",", "evtsrc_dll", ")", "# noinspection PyUnresolvedReferences", "servicemanager", ".", "StartServiceCtrlDispatcher", "(", ")", "except", "win32service", ".", "error", "as", "details", ":", "print", "(", "\"Failed: {}\"", ".", "format", "(", "details", ")", ")", "# print(repr(details.__dict__))", "errnum", "=", "details", ".", "winerror", "if", "errnum", "==", "winerror", ".", "ERROR_FAILED_SERVICE_CONTROLLER_CONNECT", ":", "win32serviceutil", ".", "usage", "(", ")", "elif", "argc", "==", "2", "and", "sys", ".", "argv", "[", "1", "]", "==", "'debug'", ":", "s", "=", "cls", "(", ")", "s", ".", "run_debug", "(", ")", "else", ":", "win32serviceutil", ".", "HandleCommandLine", "(", "cls", ")" ]
Call this from your command-line entry point to manage a service. - Via inherited functions, enables you to ``install``, ``update``, ``remove``, ``start``, ``stop``, and ``restart`` the service. - Via our additional code, allows you to run the service function directly from the command line in debug mode, using the ``debug`` command. - Run with an invalid command like ``help`` to see help (!). See https://mail.python.org/pipermail/python-win32/2008-April/007299.html Args: cls: class deriving from :class:`WindowsService` name: name of this service
[ "Call", "this", "from", "your", "command", "-", "line", "entry", "point", "to", "manage", "a", "service", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/winservice.py#L1004-L1042
RudolfCardinal/pythonlib
cardinal_pythonlib/winservice.py
ProcessManager.fullname
def fullname(self) -> str: """ Description of the process. """ fullname = "Process {}/{} ({})".format(self.procnum, self.nprocs, self.details.name) if self.running: fullname += " (PID={})".format(self.process.pid) return fullname
python
def fullname(self) -> str: """ Description of the process. """ fullname = "Process {}/{} ({})".format(self.procnum, self.nprocs, self.details.name) if self.running: fullname += " (PID={})".format(self.process.pid) return fullname
[ "def", "fullname", "(", "self", ")", "->", "str", ":", "fullname", "=", "\"Process {}/{} ({})\"", ".", "format", "(", "self", ".", "procnum", ",", "self", ".", "nprocs", ",", "self", ".", "details", ".", "name", ")", "if", "self", ".", "running", ":", "fullname", "+=", "\" (PID={})\"", ".", "format", "(", "self", ".", "process", ".", "pid", ")", "return", "fullname" ]
Description of the process.
[ "Description", "of", "the", "process", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/winservice.py#L375-L383
RudolfCardinal/pythonlib
cardinal_pythonlib/winservice.py
ProcessManager.debug
def debug(self, msg: str) -> None: """ If we are being verbose, write a debug message to the Python disk log. """ if self.debugging: s = "{}: {}".format(self.fullname, msg) log.debug(s)
python
def debug(self, msg: str) -> None: """ If we are being verbose, write a debug message to the Python disk log. """ if self.debugging: s = "{}: {}".format(self.fullname, msg) log.debug(s)
[ "def", "debug", "(", "self", ",", "msg", ":", "str", ")", "->", "None", ":", "if", "self", ".", "debugging", ":", "s", "=", "\"{}: {}\"", ".", "format", "(", "self", ".", "fullname", ",", "msg", ")", "log", ".", "debug", "(", "s", ")" ]
If we are being verbose, write a debug message to the Python disk log.
[ "If", "we", "are", "being", "verbose", "write", "a", "debug", "message", "to", "the", "Python", "disk", "log", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/winservice.py#L389-L395
RudolfCardinal/pythonlib
cardinal_pythonlib/winservice.py
ProcessManager.info
def info(self, msg: str) -> None: """ Write an info message to the Windows Application log (± to the Python disk log). """ # noinspection PyUnresolvedReferences s = "{}: {}".format(self.fullname, msg) servicemanager.LogInfoMsg(s) if self.debugging: log.info(s)
python
def info(self, msg: str) -> None: """ Write an info message to the Windows Application log (± to the Python disk log). """ # noinspection PyUnresolvedReferences s = "{}: {}".format(self.fullname, msg) servicemanager.LogInfoMsg(s) if self.debugging: log.info(s)
[ "def", "info", "(", "self", ",", "msg", ":", "str", ")", "->", "None", ":", "# noinspection PyUnresolvedReferences", "s", "=", "\"{}: {}\"", ".", "format", "(", "self", ".", "fullname", ",", "msg", ")", "servicemanager", ".", "LogInfoMsg", "(", "s", ")", "if", "self", ".", "debugging", ":", "log", ".", "info", "(", "s", ")" ]
Write an info message to the Windows Application log (± to the Python disk log).
[ "Write", "an", "info", "message", "to", "the", "Windows", "Application", "log", "(", "±", "to", "the", "Python", "disk", "log", ")", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/winservice.py#L397-L406
RudolfCardinal/pythonlib
cardinal_pythonlib/winservice.py
ProcessManager.warning
def warning(self, msg: str) -> None: """ Write a warning message to the Windows Application log (± to the Python disk log). """ # Log messages go to the Windows APPLICATION log. # noinspection PyUnresolvedReferences s = "{}: {}".format(self.fullname, msg) servicemanager.LogWarningMsg(s) if self.debugging: log.warning(s)
python
def warning(self, msg: str) -> None: """ Write a warning message to the Windows Application log (± to the Python disk log). """ # Log messages go to the Windows APPLICATION log. # noinspection PyUnresolvedReferences s = "{}: {}".format(self.fullname, msg) servicemanager.LogWarningMsg(s) if self.debugging: log.warning(s)
[ "def", "warning", "(", "self", ",", "msg", ":", "str", ")", "->", "None", ":", "# Log messages go to the Windows APPLICATION log.", "# noinspection PyUnresolvedReferences", "s", "=", "\"{}: {}\"", ".", "format", "(", "self", ".", "fullname", ",", "msg", ")", "servicemanager", ".", "LogWarningMsg", "(", "s", ")", "if", "self", ".", "debugging", ":", "log", ".", "warning", "(", "s", ")" ]
Write a warning message to the Windows Application log (± to the Python disk log).
[ "Write", "a", "warning", "message", "to", "the", "Windows", "Application", "log", "(", "±", "to", "the", "Python", "disk", "log", ")", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/winservice.py#L408-L418
RudolfCardinal/pythonlib
cardinal_pythonlib/winservice.py
ProcessManager.error
def error(self, msg: str) -> None: """ Write an error message to the Windows Application log (± to the Python disk log). """ # noinspection PyUnresolvedReferences s = "{}: {}".format(self.fullname, msg) servicemanager.LogErrorMsg(s) if self.debugging: log.warning(s)
python
def error(self, msg: str) -> None: """ Write an error message to the Windows Application log (± to the Python disk log). """ # noinspection PyUnresolvedReferences s = "{}: {}".format(self.fullname, msg) servicemanager.LogErrorMsg(s) if self.debugging: log.warning(s)
[ "def", "error", "(", "self", ",", "msg", ":", "str", ")", "->", "None", ":", "# noinspection PyUnresolvedReferences", "s", "=", "\"{}: {}\"", ".", "format", "(", "self", ".", "fullname", ",", "msg", ")", "servicemanager", ".", "LogErrorMsg", "(", "s", ")", "if", "self", ".", "debugging", ":", "log", ".", "warning", "(", "s", ")" ]
Write an error message to the Windows Application log (± to the Python disk log).
[ "Write", "an", "error", "message", "to", "the", "Windows", "Application", "log", "(", "±", "to", "the", "Python", "disk", "log", ")", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/winservice.py#L420-L429
RudolfCardinal/pythonlib
cardinal_pythonlib/winservice.py
ProcessManager.open_logs
def open_logs(self) -> None: """ Open Python disk logs. """ if self.details.logfile_out: self.stdout = open(self.details.logfile_out, 'a') else: self.stdout = None if self.details.logfile_err: if self.details.logfile_err == self.details.logfile_out: self.stderr = subprocess.STDOUT else: self.stderr = open(self.details.logfile_err, 'a') else: self.stderr = None
python
def open_logs(self) -> None: """ Open Python disk logs. """ if self.details.logfile_out: self.stdout = open(self.details.logfile_out, 'a') else: self.stdout = None if self.details.logfile_err: if self.details.logfile_err == self.details.logfile_out: self.stderr = subprocess.STDOUT else: self.stderr = open(self.details.logfile_err, 'a') else: self.stderr = None
[ "def", "open_logs", "(", "self", ")", "->", "None", ":", "if", "self", ".", "details", ".", "logfile_out", ":", "self", ".", "stdout", "=", "open", "(", "self", ".", "details", ".", "logfile_out", ",", "'a'", ")", "else", ":", "self", ".", "stdout", "=", "None", "if", "self", ".", "details", ".", "logfile_err", ":", "if", "self", ".", "details", ".", "logfile_err", "==", "self", ".", "details", ".", "logfile_out", ":", "self", ".", "stderr", "=", "subprocess", ".", "STDOUT", "else", ":", "self", ".", "stderr", "=", "open", "(", "self", ".", "details", ".", "logfile_err", ",", "'a'", ")", "else", ":", "self", ".", "stderr", "=", "None" ]
Open Python disk logs.
[ "Open", "Python", "disk", "logs", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/winservice.py#L431-L445
RudolfCardinal/pythonlib
cardinal_pythonlib/winservice.py
ProcessManager.close_logs
def close_logs(self) -> None: """ Close Python disk logs. """ if self.stdout is not None: self.stdout.close() self.stdout = None if self.stderr is not None and self.stderr != subprocess.STDOUT: self.stderr.close() self.stderr = None
python
def close_logs(self) -> None: """ Close Python disk logs. """ if self.stdout is not None: self.stdout.close() self.stdout = None if self.stderr is not None and self.stderr != subprocess.STDOUT: self.stderr.close() self.stderr = None
[ "def", "close_logs", "(", "self", ")", "->", "None", ":", "if", "self", ".", "stdout", "is", "not", "None", ":", "self", ".", "stdout", ".", "close", "(", ")", "self", ".", "stdout", "=", "None", "if", "self", ".", "stderr", "is", "not", "None", "and", "self", ".", "stderr", "!=", "subprocess", ".", "STDOUT", ":", "self", ".", "stderr", ".", "close", "(", ")", "self", ".", "stderr", "=", "None" ]
Close Python disk logs.
[ "Close", "Python", "disk", "logs", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/winservice.py#L447-L456
RudolfCardinal/pythonlib
cardinal_pythonlib/winservice.py
ProcessManager.start
def start(self) -> None: """ Starts a subprocess. Optionally routes its output to our disk logs. """ if self.running: return self.info("Starting: {} (with logs stdout={}, stderr={})".format( self.details.procargs, self.details.logfile_out, self.details.logfile_err)) self.open_logs() creationflags = CREATE_NEW_PROCESS_GROUP if WINDOWS else 0 # self.warning("creationflags: {}".format(creationflags)) self.process = subprocess.Popen(self.details.procargs, stdin=None, stdout=self.stdout, stderr=self.stderr, creationflags=creationflags) self.running = True
python
def start(self) -> None: """ Starts a subprocess. Optionally routes its output to our disk logs. """ if self.running: return self.info("Starting: {} (with logs stdout={}, stderr={})".format( self.details.procargs, self.details.logfile_out, self.details.logfile_err)) self.open_logs() creationflags = CREATE_NEW_PROCESS_GROUP if WINDOWS else 0 # self.warning("creationflags: {}".format(creationflags)) self.process = subprocess.Popen(self.details.procargs, stdin=None, stdout=self.stdout, stderr=self.stderr, creationflags=creationflags) self.running = True
[ "def", "start", "(", "self", ")", "->", "None", ":", "if", "self", ".", "running", ":", "return", "self", ".", "info", "(", "\"Starting: {} (with logs stdout={}, stderr={})\"", ".", "format", "(", "self", ".", "details", ".", "procargs", ",", "self", ".", "details", ".", "logfile_out", ",", "self", ".", "details", ".", "logfile_err", ")", ")", "self", ".", "open_logs", "(", ")", "creationflags", "=", "CREATE_NEW_PROCESS_GROUP", "if", "WINDOWS", "else", "0", "# self.warning(\"creationflags: {}\".format(creationflags))", "self", ".", "process", "=", "subprocess", ".", "Popen", "(", "self", ".", "details", ".", "procargs", ",", "stdin", "=", "None", ",", "stdout", "=", "self", ".", "stdout", ",", "stderr", "=", "self", ".", "stderr", ",", "creationflags", "=", "creationflags", ")", "self", ".", "running", "=", "True" ]
Starts a subprocess. Optionally routes its output to our disk logs.
[ "Starts", "a", "subprocess", ".", "Optionally", "routes", "its", "output", "to", "our", "disk", "logs", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/winservice.py#L462-L478
RudolfCardinal/pythonlib
cardinal_pythonlib/winservice.py
ProcessManager.stop
def stop(self) -> None: """ Stops a subprocess. Asks nicely. Waits. Asks less nicely. Repeat until subprocess is dead. .. todo:: cardinal_pythonlib.winservice.ProcessManager._kill: make it reliable under Windows """ if not self.running: return try: self.wait(timeout_s=0) # If we get here: stopped already except subprocess.TimeoutExpired: # still running for kill_level in self.ALL_KILL_LEVELS: tried_to_kill = self._terminate(level=kill_level) # please stop if tried_to_kill: try: self.wait(timeout_s=self.kill_timeout_sec) break except subprocess.TimeoutExpired: # failed to close self.warning("Subprocess didn't stop when asked") pass # carry on escalating self.close_logs() self.running = False
python
def stop(self) -> None: """ Stops a subprocess. Asks nicely. Waits. Asks less nicely. Repeat until subprocess is dead. .. todo:: cardinal_pythonlib.winservice.ProcessManager._kill: make it reliable under Windows """ if not self.running: return try: self.wait(timeout_s=0) # If we get here: stopped already except subprocess.TimeoutExpired: # still running for kill_level in self.ALL_KILL_LEVELS: tried_to_kill = self._terminate(level=kill_level) # please stop if tried_to_kill: try: self.wait(timeout_s=self.kill_timeout_sec) break except subprocess.TimeoutExpired: # failed to close self.warning("Subprocess didn't stop when asked") pass # carry on escalating self.close_logs() self.running = False
[ "def", "stop", "(", "self", ")", "->", "None", ":", "if", "not", "self", ".", "running", ":", "return", "try", ":", "self", ".", "wait", "(", "timeout_s", "=", "0", ")", "# If we get here: stopped already", "except", "subprocess", ".", "TimeoutExpired", ":", "# still running", "for", "kill_level", "in", "self", ".", "ALL_KILL_LEVELS", ":", "tried_to_kill", "=", "self", ".", "_terminate", "(", "level", "=", "kill_level", ")", "# please stop", "if", "tried_to_kill", ":", "try", ":", "self", ".", "wait", "(", "timeout_s", "=", "self", ".", "kill_timeout_sec", ")", "break", "except", "subprocess", ".", "TimeoutExpired", ":", "# failed to close", "self", ".", "warning", "(", "\"Subprocess didn't stop when asked\"", ")", "pass", "# carry on escalating", "self", ".", "close_logs", "(", ")", "self", ".", "running", "=", "False" ]
Stops a subprocess. Asks nicely. Waits. Asks less nicely. Repeat until subprocess is dead. .. todo:: cardinal_pythonlib.winservice.ProcessManager._kill: make it reliable under Windows
[ "Stops", "a", "subprocess", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/winservice.py#L480-L506
RudolfCardinal/pythonlib
cardinal_pythonlib/winservice.py
ProcessManager._terminate
def _terminate(self, level: int) -> bool: """ Returns: succeeded in *attempting* a kill? """ if not self.running: return True # Already closed by itself? try: self.wait(0) return True except subprocess.TimeoutExpired: # failed to close pass # SEE NOTES ABOVE. This is tricky under Windows. suffix = " [to child process {}]".format(self.process.pid) if level == self.KILL_LEVEL_CTRL_C_OR_SOFT_KILL: if WINDOWS: success = 0 != ctypes.windll.kernel32.GenerateConsoleCtrlEvent( CTRL_C_EVENT, self.process.pid) if success: self.info("Sent CTRL-C to request stop" + suffix) # ... but that doesn't mean it'll stop... else: self.info("Failed to send CTRL-C" + suffix) return success else: self.warning("Asking process to stop (SIGTERM)" + suffix) self.process.terminate() # soft kill under POSIX return True elif level == self.KILL_LEVEL_CTRL_BREAK: if not WINDOWS: return False success = 0 != ctypes.windll.kernel32.GenerateConsoleCtrlEvent( CTRL_BREAK_EVENT, self.process.pid) if success: self.info("Sent CTRL-BREAK to request stop" + suffix) else: self.info("Failed to send CTRL-BREAK" + suffix) return success elif level == self.KILL_LEVEL_TASKKILL: if not WINDOWS: return False retcode = self._taskkill(force=False) # does its own info messages return retcode == winerror.ERROR_SUCCESS elif level == self.KILL_LEVEL_TASKKILL_FORCE: if not WINDOWS: return False retcode = self._taskkill(force=True) # does its own info messages return retcode == winerror.ERROR_SUCCESS elif level == self.KILL_LEVEL_HARD_KILL: # Last resort self._kill() # may do TASKKILL /F or some other method return True else: raise ValueError("Bad kill level requested")
python
def _terminate(self, level: int) -> bool: """ Returns: succeeded in *attempting* a kill? """ if not self.running: return True # Already closed by itself? try: self.wait(0) return True except subprocess.TimeoutExpired: # failed to close pass # SEE NOTES ABOVE. This is tricky under Windows. suffix = " [to child process {}]".format(self.process.pid) if level == self.KILL_LEVEL_CTRL_C_OR_SOFT_KILL: if WINDOWS: success = 0 != ctypes.windll.kernel32.GenerateConsoleCtrlEvent( CTRL_C_EVENT, self.process.pid) if success: self.info("Sent CTRL-C to request stop" + suffix) # ... but that doesn't mean it'll stop... else: self.info("Failed to send CTRL-C" + suffix) return success else: self.warning("Asking process to stop (SIGTERM)" + suffix) self.process.terminate() # soft kill under POSIX return True elif level == self.KILL_LEVEL_CTRL_BREAK: if not WINDOWS: return False success = 0 != ctypes.windll.kernel32.GenerateConsoleCtrlEvent( CTRL_BREAK_EVENT, self.process.pid) if success: self.info("Sent CTRL-BREAK to request stop" + suffix) else: self.info("Failed to send CTRL-BREAK" + suffix) return success elif level == self.KILL_LEVEL_TASKKILL: if not WINDOWS: return False retcode = self._taskkill(force=False) # does its own info messages return retcode == winerror.ERROR_SUCCESS elif level == self.KILL_LEVEL_TASKKILL_FORCE: if not WINDOWS: return False retcode = self._taskkill(force=True) # does its own info messages return retcode == winerror.ERROR_SUCCESS elif level == self.KILL_LEVEL_HARD_KILL: # Last resort self._kill() # may do TASKKILL /F or some other method return True else: raise ValueError("Bad kill level requested")
[ "def", "_terminate", "(", "self", ",", "level", ":", "int", ")", "->", "bool", ":", "if", "not", "self", ".", "running", ":", "return", "True", "# Already closed by itself?", "try", ":", "self", ".", "wait", "(", "0", ")", "return", "True", "except", "subprocess", ".", "TimeoutExpired", ":", "# failed to close", "pass", "# SEE NOTES ABOVE. This is tricky under Windows.", "suffix", "=", "\" [to child process {}]\"", ".", "format", "(", "self", ".", "process", ".", "pid", ")", "if", "level", "==", "self", ".", "KILL_LEVEL_CTRL_C_OR_SOFT_KILL", ":", "if", "WINDOWS", ":", "success", "=", "0", "!=", "ctypes", ".", "windll", ".", "kernel32", ".", "GenerateConsoleCtrlEvent", "(", "CTRL_C_EVENT", ",", "self", ".", "process", ".", "pid", ")", "if", "success", ":", "self", ".", "info", "(", "\"Sent CTRL-C to request stop\"", "+", "suffix", ")", "# ... but that doesn't mean it'll stop...", "else", ":", "self", ".", "info", "(", "\"Failed to send CTRL-C\"", "+", "suffix", ")", "return", "success", "else", ":", "self", ".", "warning", "(", "\"Asking process to stop (SIGTERM)\"", "+", "suffix", ")", "self", ".", "process", ".", "terminate", "(", ")", "# soft kill under POSIX", "return", "True", "elif", "level", "==", "self", ".", "KILL_LEVEL_CTRL_BREAK", ":", "if", "not", "WINDOWS", ":", "return", "False", "success", "=", "0", "!=", "ctypes", ".", "windll", ".", "kernel32", ".", "GenerateConsoleCtrlEvent", "(", "CTRL_BREAK_EVENT", ",", "self", ".", "process", ".", "pid", ")", "if", "success", ":", "self", ".", "info", "(", "\"Sent CTRL-BREAK to request stop\"", "+", "suffix", ")", "else", ":", "self", ".", "info", "(", "\"Failed to send CTRL-BREAK\"", "+", "suffix", ")", "return", "success", "elif", "level", "==", "self", ".", "KILL_LEVEL_TASKKILL", ":", "if", "not", "WINDOWS", ":", "return", "False", "retcode", "=", "self", ".", "_taskkill", "(", "force", "=", "False", ")", "# does its own info messages", "return", "retcode", "==", "winerror", ".", "ERROR_SUCCESS", "elif", "level", "==", "self", ".", "KILL_LEVEL_TASKKILL_FORCE", ":", "if", "not", "WINDOWS", ":", "return", "False", "retcode", "=", "self", ".", "_taskkill", "(", "force", "=", "True", ")", "# does its own info messages", "return", "retcode", "==", "winerror", ".", "ERROR_SUCCESS", "elif", "level", "==", "self", ".", "KILL_LEVEL_HARD_KILL", ":", "# Last resort", "self", ".", "_kill", "(", ")", "# may do TASKKILL /F or some other method", "return", "True", "else", ":", "raise", "ValueError", "(", "\"Bad kill level requested\"", ")" ]
Returns: succeeded in *attempting* a kill?
[ "Returns", ":", "succeeded", "in", "*", "attempting", "*", "a", "kill?" ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/winservice.py#L508-L569
RudolfCardinal/pythonlib
cardinal_pythonlib/winservice.py
ProcessManager._taskkill
def _taskkill(self, force: bool = False) -> int: """ Executes a Windows ``TASKKILL /pid PROCESS_ID /t`` command (``/t`` for "tree kill" = "kill all children"). Args: force: also add ``/f`` (forcefully) Returns: return code from ``TASKKILL`` **Test code:** Firstly we need a program that won't let itself be killed. Save this as ``nokill.py``: .. code-block:: python #!/usr/bin/env python import logging import time import os from cardinal_pythonlib.logs import main_only_quicksetup_rootlogger from cardinal_pythonlib.signalfunc import trap_ctrl_c_ctrl_break main_only_quicksetup_rootlogger(level=logging.DEBUG) trap_ctrl_c_ctrl_break() while True: print("Process ID is {}; time is {} s".format(os.getpid(), time.clock())) time.sleep(1) Now run that with ``python nokill.py``. It should resist CTRL-C and CTRL-BREAK. Start another command prompt in which to play with ``TASKKILL``. .. code-block:: bat REM Firstly, avoid this single-ampersand syntax: REM taskkill /im notepad.exe & echo %errorlevel% REM ... as it prints the WRONG (previous?) errorlevel. notepad.exe taskkill /im notepad.exe echo %errorlevel% REM ... 0 for success (Windows 10), e.g. REM 'SUCCESS: Sent termination signal to the process "notepad.exe" with PID 6988.' taskkill /im notepad.exe echo %errorlevel% REM ... 128 for "not found" (Windows 10), e.g. REM 'ERROR: The process "notepad.exe" not found.' REM Now run notepad.exe as Administrator taskkill /im notepad.exe & echo %errorlevel% REM ... 1 for "access denied" (Windows 10) REM Now kill the nokill.py process by its PID (e.g. 11892 here): taskkill /pid 11892 echo %errorlevel% REM ... 1 for "not allowed" (Windows 10), e.g. REM 'ERROR: The process with PID 11892 could not be terminated.' REM 'Reason: This process can only be terminated forcefully (with /F option).' REM Now forcefully: taskkill /pid 11892 /f echo %errorlevel% REM ... 0 for success (Windows 10), e.g. REM 'SUCCESS: The process with PID 11892 has been terminated.' """ # noqa args = [ "taskkill", # built in to Windows XP and higher "/pid", str(self.process.pid), "/t", # tree kill: kill all children ] if force: args.append("/f") # forcefully callname = " ".join(args) retcode = subprocess.call(args) # http://stackoverflow.com/questions/18682681/what-are-exit-codes-from-the-taskkill-utility # noqa if retcode == winerror.ERROR_SUCCESS: # 0 self.info("Killed with " + repr(callname)) elif retcode == winerror.ERROR_INVALID_FUNCTION: # 1 self.warning( repr(callname) + " failed (error code 1 = ERROR_INVALID_FUNCTION; " "can mean 'Access denied', or 'This process can only be " "terminated forcefully (with /F option)').") elif retcode == winerror.ERROR_WAIT_NO_CHILDREN: # 128 self.warning( repr(callname) + " failed (error code 128 = ERROR_WAIT_NO_CHILDREN " "= 'There are no child processes to wait for', but also " "occurs when the process doesn't exist, and when processes " "require a forceful [/F] termination)") elif retcode == winerror.ERROR_EA_LIST_INCONSISTENT: # 255 self.warning( repr(callname) + " failed (error code 255 = ERROR_EA_LIST_INCONSISTENT " "= 'The extended attributes are inconsistent.')") else: self.warning(callname + " failed: error code {}".format(retcode)) return retcode
python
def _taskkill(self, force: bool = False) -> int: """ Executes a Windows ``TASKKILL /pid PROCESS_ID /t`` command (``/t`` for "tree kill" = "kill all children"). Args: force: also add ``/f`` (forcefully) Returns: return code from ``TASKKILL`` **Test code:** Firstly we need a program that won't let itself be killed. Save this as ``nokill.py``: .. code-block:: python #!/usr/bin/env python import logging import time import os from cardinal_pythonlib.logs import main_only_quicksetup_rootlogger from cardinal_pythonlib.signalfunc import trap_ctrl_c_ctrl_break main_only_quicksetup_rootlogger(level=logging.DEBUG) trap_ctrl_c_ctrl_break() while True: print("Process ID is {}; time is {} s".format(os.getpid(), time.clock())) time.sleep(1) Now run that with ``python nokill.py``. It should resist CTRL-C and CTRL-BREAK. Start another command prompt in which to play with ``TASKKILL``. .. code-block:: bat REM Firstly, avoid this single-ampersand syntax: REM taskkill /im notepad.exe & echo %errorlevel% REM ... as it prints the WRONG (previous?) errorlevel. notepad.exe taskkill /im notepad.exe echo %errorlevel% REM ... 0 for success (Windows 10), e.g. REM 'SUCCESS: Sent termination signal to the process "notepad.exe" with PID 6988.' taskkill /im notepad.exe echo %errorlevel% REM ... 128 for "not found" (Windows 10), e.g. REM 'ERROR: The process "notepad.exe" not found.' REM Now run notepad.exe as Administrator taskkill /im notepad.exe & echo %errorlevel% REM ... 1 for "access denied" (Windows 10) REM Now kill the nokill.py process by its PID (e.g. 11892 here): taskkill /pid 11892 echo %errorlevel% REM ... 1 for "not allowed" (Windows 10), e.g. REM 'ERROR: The process with PID 11892 could not be terminated.' REM 'Reason: This process can only be terminated forcefully (with /F option).' REM Now forcefully: taskkill /pid 11892 /f echo %errorlevel% REM ... 0 for success (Windows 10), e.g. REM 'SUCCESS: The process with PID 11892 has been terminated.' """ # noqa args = [ "taskkill", # built in to Windows XP and higher "/pid", str(self.process.pid), "/t", # tree kill: kill all children ] if force: args.append("/f") # forcefully callname = " ".join(args) retcode = subprocess.call(args) # http://stackoverflow.com/questions/18682681/what-are-exit-codes-from-the-taskkill-utility # noqa if retcode == winerror.ERROR_SUCCESS: # 0 self.info("Killed with " + repr(callname)) elif retcode == winerror.ERROR_INVALID_FUNCTION: # 1 self.warning( repr(callname) + " failed (error code 1 = ERROR_INVALID_FUNCTION; " "can mean 'Access denied', or 'This process can only be " "terminated forcefully (with /F option)').") elif retcode == winerror.ERROR_WAIT_NO_CHILDREN: # 128 self.warning( repr(callname) + " failed (error code 128 = ERROR_WAIT_NO_CHILDREN " "= 'There are no child processes to wait for', but also " "occurs when the process doesn't exist, and when processes " "require a forceful [/F] termination)") elif retcode == winerror.ERROR_EA_LIST_INCONSISTENT: # 255 self.warning( repr(callname) + " failed (error code 255 = ERROR_EA_LIST_INCONSISTENT " "= 'The extended attributes are inconsistent.')") else: self.warning(callname + " failed: error code {}".format(retcode)) return retcode
[ "def", "_taskkill", "(", "self", ",", "force", ":", "bool", "=", "False", ")", "->", "int", ":", "# noqa", "args", "=", "[", "\"taskkill\"", ",", "# built in to Windows XP and higher", "\"/pid\"", ",", "str", "(", "self", ".", "process", ".", "pid", ")", ",", "\"/t\"", ",", "# tree kill: kill all children", "]", "if", "force", ":", "args", ".", "append", "(", "\"/f\"", ")", "# forcefully", "callname", "=", "\" \"", ".", "join", "(", "args", ")", "retcode", "=", "subprocess", ".", "call", "(", "args", ")", "# http://stackoverflow.com/questions/18682681/what-are-exit-codes-from-the-taskkill-utility # noqa", "if", "retcode", "==", "winerror", ".", "ERROR_SUCCESS", ":", "# 0", "self", ".", "info", "(", "\"Killed with \"", "+", "repr", "(", "callname", ")", ")", "elif", "retcode", "==", "winerror", ".", "ERROR_INVALID_FUNCTION", ":", "# 1", "self", ".", "warning", "(", "repr", "(", "callname", ")", "+", "\" failed (error code 1 = ERROR_INVALID_FUNCTION; \"", "\"can mean 'Access denied', or 'This process can only be \"", "\"terminated forcefully (with /F option)').\"", ")", "elif", "retcode", "==", "winerror", ".", "ERROR_WAIT_NO_CHILDREN", ":", "# 128", "self", ".", "warning", "(", "repr", "(", "callname", ")", "+", "\" failed (error code 128 = ERROR_WAIT_NO_CHILDREN \"", "\"= 'There are no child processes to wait for', but also \"", "\"occurs when the process doesn't exist, and when processes \"", "\"require a forceful [/F] termination)\"", ")", "elif", "retcode", "==", "winerror", ".", "ERROR_EA_LIST_INCONSISTENT", ":", "# 255", "self", ".", "warning", "(", "repr", "(", "callname", ")", "+", "\" failed (error code 255 = ERROR_EA_LIST_INCONSISTENT \"", "\"= 'The extended attributes are inconsistent.')\"", ")", "else", ":", "self", ".", "warning", "(", "callname", "+", "\" failed: error code {}\"", ".", "format", "(", "retcode", ")", ")", "return", "retcode" ]
Executes a Windows ``TASKKILL /pid PROCESS_ID /t`` command (``/t`` for "tree kill" = "kill all children"). Args: force: also add ``/f`` (forcefully) Returns: return code from ``TASKKILL`` **Test code:** Firstly we need a program that won't let itself be killed. Save this as ``nokill.py``: .. code-block:: python #!/usr/bin/env python import logging import time import os from cardinal_pythonlib.logs import main_only_quicksetup_rootlogger from cardinal_pythonlib.signalfunc import trap_ctrl_c_ctrl_break main_only_quicksetup_rootlogger(level=logging.DEBUG) trap_ctrl_c_ctrl_break() while True: print("Process ID is {}; time is {} s".format(os.getpid(), time.clock())) time.sleep(1) Now run that with ``python nokill.py``. It should resist CTRL-C and CTRL-BREAK. Start another command prompt in which to play with ``TASKKILL``. .. code-block:: bat REM Firstly, avoid this single-ampersand syntax: REM taskkill /im notepad.exe & echo %errorlevel% REM ... as it prints the WRONG (previous?) errorlevel. notepad.exe taskkill /im notepad.exe echo %errorlevel% REM ... 0 for success (Windows 10), e.g. REM 'SUCCESS: Sent termination signal to the process "notepad.exe" with PID 6988.' taskkill /im notepad.exe echo %errorlevel% REM ... 128 for "not found" (Windows 10), e.g. REM 'ERROR: The process "notepad.exe" not found.' REM Now run notepad.exe as Administrator taskkill /im notepad.exe & echo %errorlevel% REM ... 1 for "access denied" (Windows 10) REM Now kill the nokill.py process by its PID (e.g. 11892 here): taskkill /pid 11892 echo %errorlevel% REM ... 1 for "not allowed" (Windows 10), e.g. REM 'ERROR: The process with PID 11892 could not be terminated.' REM 'Reason: This process can only be terminated forcefully (with /F option).' REM Now forcefully: taskkill /pid 11892 /f echo %errorlevel% REM ... 0 for success (Windows 10), e.g. REM 'SUCCESS: The process with PID 11892 has been terminated.'
[ "Executes", "a", "Windows", "TASKKILL", "/", "pid", "PROCESS_ID", "/", "t", "command", "(", "/", "t", "for", "tree", "kill", "=", "kill", "all", "children", ")", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/winservice.py#L571-L675
RudolfCardinal/pythonlib
cardinal_pythonlib/winservice.py
ProcessManager._kill
def _kill(self) -> None: """ Hard kill. - PROBLEM: originally, via ``self.process.kill()``, could leave orphans under Windows. - SOLUTION: see https://stackoverflow.com/questions/1230669/subprocess-deleting-child-processes-in-windows, which uses ``psutil``. """ # noqa self.warning("Using a recursive hard kill; will assume it worked") pid = self.process.pid gone, still_alive = kill_proc_tree(pid, including_parent=True, timeout_s=self.kill_timeout_sec) self.debug("Killed: {!r}".format(gone)) self.warning("Still alive: {!r}".format(still_alive))
python
def _kill(self) -> None: """ Hard kill. - PROBLEM: originally, via ``self.process.kill()``, could leave orphans under Windows. - SOLUTION: see https://stackoverflow.com/questions/1230669/subprocess-deleting-child-processes-in-windows, which uses ``psutil``. """ # noqa self.warning("Using a recursive hard kill; will assume it worked") pid = self.process.pid gone, still_alive = kill_proc_tree(pid, including_parent=True, timeout_s=self.kill_timeout_sec) self.debug("Killed: {!r}".format(gone)) self.warning("Still alive: {!r}".format(still_alive))
[ "def", "_kill", "(", "self", ")", "->", "None", ":", "# noqa", "self", ".", "warning", "(", "\"Using a recursive hard kill; will assume it worked\"", ")", "pid", "=", "self", ".", "process", ".", "pid", "gone", ",", "still_alive", "=", "kill_proc_tree", "(", "pid", ",", "including_parent", "=", "True", ",", "timeout_s", "=", "self", ".", "kill_timeout_sec", ")", "self", ".", "debug", "(", "\"Killed: {!r}\"", ".", "format", "(", "gone", ")", ")", "self", ".", "warning", "(", "\"Still alive: {!r}\"", ".", "format", "(", "still_alive", ")", ")" ]
Hard kill. - PROBLEM: originally, via ``self.process.kill()``, could leave orphans under Windows. - SOLUTION: see https://stackoverflow.com/questions/1230669/subprocess-deleting-child-processes-in-windows, which uses ``psutil``.
[ "Hard", "kill", ".", "-", "PROBLEM", ":", "originally", "via", "self", ".", "process", ".", "kill", "()", "could", "leave", "orphans", "under", "Windows", ".", "-", "SOLUTION", ":", "see", "https", ":", "//", "stackoverflow", ".", "com", "/", "questions", "/", "1230669", "/", "subprocess", "-", "deleting", "-", "child", "-", "processes", "-", "in", "-", "windows", "which", "uses", "psutil", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/winservice.py#L677-L693
RudolfCardinal/pythonlib
cardinal_pythonlib/winservice.py
ProcessManager.wait
def wait(self, timeout_s: float = None) -> int: """ Wait for up to ``timeout_s`` for the child process to finish. Args: timeout_s: maximum time to wait or ``None`` to wait forever Returns: process return code; or ``0`` if it wasn't running, or ``1`` if it managed to exit without a return code Raises: subprocess.TimeoutExpired: if the process continues to run """ if not self.running: return 0 retcode = self.process.wait(timeout=timeout_s) # We won't get further unless the process has stopped. if retcode is None: self.error("Subprocess finished, but return code was None") retcode = 1 # we're promising to return an int elif retcode == 0: self.info("Subprocess finished cleanly (return code 0).") else: self.error( "Subprocess finished, but FAILED (return code {}). " "Logs were: {} (stdout), {} (stderr)".format( retcode, self.details.logfile_out, self.details.logfile_err)) self.running = False return retcode
python
def wait(self, timeout_s: float = None) -> int: """ Wait for up to ``timeout_s`` for the child process to finish. Args: timeout_s: maximum time to wait or ``None`` to wait forever Returns: process return code; or ``0`` if it wasn't running, or ``1`` if it managed to exit without a return code Raises: subprocess.TimeoutExpired: if the process continues to run """ if not self.running: return 0 retcode = self.process.wait(timeout=timeout_s) # We won't get further unless the process has stopped. if retcode is None: self.error("Subprocess finished, but return code was None") retcode = 1 # we're promising to return an int elif retcode == 0: self.info("Subprocess finished cleanly (return code 0).") else: self.error( "Subprocess finished, but FAILED (return code {}). " "Logs were: {} (stdout), {} (stderr)".format( retcode, self.details.logfile_out, self.details.logfile_err)) self.running = False return retcode
[ "def", "wait", "(", "self", ",", "timeout_s", ":", "float", "=", "None", ")", "->", "int", ":", "if", "not", "self", ".", "running", ":", "return", "0", "retcode", "=", "self", ".", "process", ".", "wait", "(", "timeout", "=", "timeout_s", ")", "# We won't get further unless the process has stopped.", "if", "retcode", "is", "None", ":", "self", ".", "error", "(", "\"Subprocess finished, but return code was None\"", ")", "retcode", "=", "1", "# we're promising to return an int", "elif", "retcode", "==", "0", ":", "self", ".", "info", "(", "\"Subprocess finished cleanly (return code 0).\"", ")", "else", ":", "self", ".", "error", "(", "\"Subprocess finished, but FAILED (return code {}). \"", "\"Logs were: {} (stdout), {} (stderr)\"", ".", "format", "(", "retcode", ",", "self", ".", "details", ".", "logfile_out", ",", "self", ".", "details", ".", "logfile_err", ")", ")", "self", ".", "running", "=", "False", "return", "retcode" ]
Wait for up to ``timeout_s`` for the child process to finish. Args: timeout_s: maximum time to wait or ``None`` to wait forever Returns: process return code; or ``0`` if it wasn't running, or ``1`` if it managed to exit without a return code Raises: subprocess.TimeoutExpired: if the process continues to run
[ "Wait", "for", "up", "to", "timeout_s", "for", "the", "child", "process", "to", "finish", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/winservice.py#L695-L727
RudolfCardinal/pythonlib
cardinal_pythonlib/winservice.py
WindowsService.info
def info(self, msg: str) -> None: """ Write an info message to the Windows Application log (± to the Python disk log). """ # noinspection PyUnresolvedReferences servicemanager.LogInfoMsg(str(msg)) if self.debugging: log.info(msg)
python
def info(self, msg: str) -> None: """ Write an info message to the Windows Application log (± to the Python disk log). """ # noinspection PyUnresolvedReferences servicemanager.LogInfoMsg(str(msg)) if self.debugging: log.info(msg)
[ "def", "info", "(", "self", ",", "msg", ":", "str", ")", "->", "None", ":", "# noinspection PyUnresolvedReferences", "servicemanager", ".", "LogInfoMsg", "(", "str", "(", "msg", ")", ")", "if", "self", ".", "debugging", ":", "log", ".", "info", "(", "msg", ")" ]
Write an info message to the Windows Application log (± to the Python disk log).
[ "Write", "an", "info", "message", "to", "the", "Windows", "Application", "log", "(", "±", "to", "the", "Python", "disk", "log", ")", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/winservice.py#L787-L795
RudolfCardinal/pythonlib
cardinal_pythonlib/winservice.py
WindowsService.error
def error(self, msg: str) -> None: """ Write an error message to the Windows Application log (± to the Python disk log). """ # noinspection PyUnresolvedReferences servicemanager.LogErrorMsg(str(msg)) if self.debugging: log.error(msg)
python
def error(self, msg: str) -> None: """ Write an error message to the Windows Application log (± to the Python disk log). """ # noinspection PyUnresolvedReferences servicemanager.LogErrorMsg(str(msg)) if self.debugging: log.error(msg)
[ "def", "error", "(", "self", ",", "msg", ":", "str", ")", "->", "None", ":", "# noinspection PyUnresolvedReferences", "servicemanager", ".", "LogErrorMsg", "(", "str", "(", "msg", ")", ")", "if", "self", ".", "debugging", ":", "log", ".", "error", "(", "msg", ")" ]
Write an error message to the Windows Application log (± to the Python disk log).
[ "Write", "an", "error", "message", "to", "the", "Windows", "Application", "log", "(", "±", "to", "the", "Python", "disk", "log", ")", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/winservice.py#L797-L805
RudolfCardinal/pythonlib
cardinal_pythonlib/winservice.py
WindowsService.SvcStop
def SvcStop(self) -> None: """ Called when the service is being shut down. """ # tell the SCM we're shutting down # noinspection PyUnresolvedReferences self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) # fire the stop event win32event.SetEvent(self.h_stop_event)
python
def SvcStop(self) -> None: """ Called when the service is being shut down. """ # tell the SCM we're shutting down # noinspection PyUnresolvedReferences self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) # fire the stop event win32event.SetEvent(self.h_stop_event)
[ "def", "SvcStop", "(", "self", ")", "->", "None", ":", "# tell the SCM we're shutting down", "# noinspection PyUnresolvedReferences", "self", ".", "ReportServiceStatus", "(", "win32service", ".", "SERVICE_STOP_PENDING", ")", "# fire the stop event", "win32event", ".", "SetEvent", "(", "self", ".", "h_stop_event", ")" ]
Called when the service is being shut down.
[ "Called", "when", "the", "service", "is", "being", "shut", "down", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/winservice.py#L812-L820
RudolfCardinal/pythonlib
cardinal_pythonlib/winservice.py
WindowsService.SvcDoRun
def SvcDoRun(self) -> None: """ Called when the service is started. """ # No need to self.ReportServiceStatus(win32service.SERVICE_RUNNING); # that is done by the framework (see win32serviceutil.py). # Similarly, no need to report a SERVICE_STOP_PENDING on exit. # noinspection PyUnresolvedReferences self.debug("Sending PYS_SERVICE_STARTED message") # noinspection PyUnresolvedReferences servicemanager.LogMsg(servicemanager.EVENTLOG_INFORMATION_TYPE, servicemanager.PYS_SERVICE_STARTED, (self._svc_name_, '')) # self.test_service() # test service self.main() # real service # noinspection PyUnresolvedReferences servicemanager.LogMsg(servicemanager.EVENTLOG_INFORMATION_TYPE, servicemanager.PYS_SERVICE_STOPPED, (self._svc_name_, '')) # noinspection PyUnresolvedReferences self.ReportServiceStatus(win32service.SERVICE_STOPPED)
python
def SvcDoRun(self) -> None: """ Called when the service is started. """ # No need to self.ReportServiceStatus(win32service.SERVICE_RUNNING); # that is done by the framework (see win32serviceutil.py). # Similarly, no need to report a SERVICE_STOP_PENDING on exit. # noinspection PyUnresolvedReferences self.debug("Sending PYS_SERVICE_STARTED message") # noinspection PyUnresolvedReferences servicemanager.LogMsg(servicemanager.EVENTLOG_INFORMATION_TYPE, servicemanager.PYS_SERVICE_STARTED, (self._svc_name_, '')) # self.test_service() # test service self.main() # real service # noinspection PyUnresolvedReferences servicemanager.LogMsg(servicemanager.EVENTLOG_INFORMATION_TYPE, servicemanager.PYS_SERVICE_STOPPED, (self._svc_name_, '')) # noinspection PyUnresolvedReferences self.ReportServiceStatus(win32service.SERVICE_STOPPED)
[ "def", "SvcDoRun", "(", "self", ")", "->", "None", ":", "# No need to self.ReportServiceStatus(win32service.SERVICE_RUNNING);", "# that is done by the framework (see win32serviceutil.py).", "# Similarly, no need to report a SERVICE_STOP_PENDING on exit.", "# noinspection PyUnresolvedReferences", "self", ".", "debug", "(", "\"Sending PYS_SERVICE_STARTED message\"", ")", "# noinspection PyUnresolvedReferences", "servicemanager", ".", "LogMsg", "(", "servicemanager", ".", "EVENTLOG_INFORMATION_TYPE", ",", "servicemanager", ".", "PYS_SERVICE_STARTED", ",", "(", "self", ".", "_svc_name_", ",", "''", ")", ")", "# self.test_service() # test service", "self", ".", "main", "(", ")", "# real service", "# noinspection PyUnresolvedReferences", "servicemanager", ".", "LogMsg", "(", "servicemanager", ".", "EVENTLOG_INFORMATION_TYPE", ",", "servicemanager", ".", "PYS_SERVICE_STOPPED", ",", "(", "self", ".", "_svc_name_", ",", "''", ")", ")", "# noinspection PyUnresolvedReferences", "self", ".", "ReportServiceStatus", "(", "win32service", ".", "SERVICE_STOPPED", ")" ]
Called when the service is started.
[ "Called", "when", "the", "service", "is", "started", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/winservice.py#L824-L846
RudolfCardinal/pythonlib
cardinal_pythonlib/winservice.py
WindowsService.main
def main(self) -> None: """ Main entry point. Runs :func:`service`. """ # Actual main service code. try: self.service() except Exception as e: self.error("Unexpected exception: {e}\n{t}".format( e=e, t=traceback.format_exc()))
python
def main(self) -> None: """ Main entry point. Runs :func:`service`. """ # Actual main service code. try: self.service() except Exception as e: self.error("Unexpected exception: {e}\n{t}".format( e=e, t=traceback.format_exc()))
[ "def", "main", "(", "self", ")", "->", "None", ":", "# Actual main service code.", "try", ":", "self", ".", "service", "(", ")", "except", "Exception", "as", "e", ":", "self", ".", "error", "(", "\"Unexpected exception: {e}\\n{t}\"", ".", "format", "(", "e", "=", "e", ",", "t", "=", "traceback", ".", "format_exc", "(", ")", ")", ")" ]
Main entry point. Runs :func:`service`.
[ "Main", "entry", "point", ".", "Runs", ":", "func", ":", "service", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/winservice.py#L895-L904
RudolfCardinal/pythonlib
cardinal_pythonlib/winservice.py
WindowsService.run_processes
def run_processes(self, procdetails: List[ProcessDetails], subproc_run_timeout_sec: float = 1, stop_event_timeout_ms: int = 1000, kill_timeout_sec: float = 5) -> None: """ Run multiple child processes. Args: procdetails: list of :class:`ProcessDetails` objects (q.v.) subproc_run_timeout_sec: time (in seconds) to wait for each process when polling child processes to see how they're getting on (default ``1``) stop_event_timeout_ms: time to wait (in ms) while checking the Windows stop event for this service (default ``1000``) kill_timeout_sec: how long (in seconds) will we wait for the subprocesses to end peacefully, before we try to kill them? .. todo:: cardinal_pythonlib.winservice.WindowsService: NOT YET IMPLEMENTED: Windows service autorestart """ # https://stackoverflow.com/questions/16333054 def cleanup(): self.debug("atexit function called: cleaning up") for pmgr_ in self.process_managers: pmgr_.stop() atexit.register(cleanup) # Set up process info self.process_managers = [] # type: List[ProcessManager] n = len(procdetails) for i, details in enumerate(procdetails): pmgr = ProcessManager(details, i + 1, n, kill_timeout_sec=kill_timeout_sec, debugging=self.debugging) self.process_managers.append(pmgr) # Start processes for pmgr in self.process_managers: pmgr.start() self.info("All started") # Run processes something_running = True stop_requested = False subproc_failed = False while something_running and not stop_requested and not subproc_failed: if (win32event.WaitForSingleObject( self.h_stop_event, stop_event_timeout_ms) == win32event.WAIT_OBJECT_0): stop_requested = True self.info("Stop requested; stopping") else: something_running = False for pmgr in self.process_managers: if subproc_failed: break try: retcode = pmgr.wait(timeout_s=subproc_run_timeout_sec) if retcode != 0: subproc_failed = True except subprocess.TimeoutExpired: something_running = True # Kill any outstanding processes # # (a) Slow way # for pmgr in self.process_managers: # pmgr.stop() # # (b) Faster (slightly more parallel) way # for pmgr in self.process_managers: # pmgr.terminate() # for pmgr in self.process_managers: # pmgr.stop_having_terminated() # # ... No, it's bad if we leave things orphaned. # Let's go for slow, clean code. for pmgr in self.process_managers: pmgr.stop() self.info("All stopped")
python
def run_processes(self, procdetails: List[ProcessDetails], subproc_run_timeout_sec: float = 1, stop_event_timeout_ms: int = 1000, kill_timeout_sec: float = 5) -> None: """ Run multiple child processes. Args: procdetails: list of :class:`ProcessDetails` objects (q.v.) subproc_run_timeout_sec: time (in seconds) to wait for each process when polling child processes to see how they're getting on (default ``1``) stop_event_timeout_ms: time to wait (in ms) while checking the Windows stop event for this service (default ``1000``) kill_timeout_sec: how long (in seconds) will we wait for the subprocesses to end peacefully, before we try to kill them? .. todo:: cardinal_pythonlib.winservice.WindowsService: NOT YET IMPLEMENTED: Windows service autorestart """ # https://stackoverflow.com/questions/16333054 def cleanup(): self.debug("atexit function called: cleaning up") for pmgr_ in self.process_managers: pmgr_.stop() atexit.register(cleanup) # Set up process info self.process_managers = [] # type: List[ProcessManager] n = len(procdetails) for i, details in enumerate(procdetails): pmgr = ProcessManager(details, i + 1, n, kill_timeout_sec=kill_timeout_sec, debugging=self.debugging) self.process_managers.append(pmgr) # Start processes for pmgr in self.process_managers: pmgr.start() self.info("All started") # Run processes something_running = True stop_requested = False subproc_failed = False while something_running and not stop_requested and not subproc_failed: if (win32event.WaitForSingleObject( self.h_stop_event, stop_event_timeout_ms) == win32event.WAIT_OBJECT_0): stop_requested = True self.info("Stop requested; stopping") else: something_running = False for pmgr in self.process_managers: if subproc_failed: break try: retcode = pmgr.wait(timeout_s=subproc_run_timeout_sec) if retcode != 0: subproc_failed = True except subprocess.TimeoutExpired: something_running = True # Kill any outstanding processes # # (a) Slow way # for pmgr in self.process_managers: # pmgr.stop() # # (b) Faster (slightly more parallel) way # for pmgr in self.process_managers: # pmgr.terminate() # for pmgr in self.process_managers: # pmgr.stop_having_terminated() # # ... No, it's bad if we leave things orphaned. # Let's go for slow, clean code. for pmgr in self.process_managers: pmgr.stop() self.info("All stopped")
[ "def", "run_processes", "(", "self", ",", "procdetails", ":", "List", "[", "ProcessDetails", "]", ",", "subproc_run_timeout_sec", ":", "float", "=", "1", ",", "stop_event_timeout_ms", ":", "int", "=", "1000", ",", "kill_timeout_sec", ":", "float", "=", "5", ")", "->", "None", ":", "# https://stackoverflow.com/questions/16333054", "def", "cleanup", "(", ")", ":", "self", ".", "debug", "(", "\"atexit function called: cleaning up\"", ")", "for", "pmgr_", "in", "self", ".", "process_managers", ":", "pmgr_", ".", "stop", "(", ")", "atexit", ".", "register", "(", "cleanup", ")", "# Set up process info", "self", ".", "process_managers", "=", "[", "]", "# type: List[ProcessManager]", "n", "=", "len", "(", "procdetails", ")", "for", "i", ",", "details", "in", "enumerate", "(", "procdetails", ")", ":", "pmgr", "=", "ProcessManager", "(", "details", ",", "i", "+", "1", ",", "n", ",", "kill_timeout_sec", "=", "kill_timeout_sec", ",", "debugging", "=", "self", ".", "debugging", ")", "self", ".", "process_managers", ".", "append", "(", "pmgr", ")", "# Start processes", "for", "pmgr", "in", "self", ".", "process_managers", ":", "pmgr", ".", "start", "(", ")", "self", ".", "info", "(", "\"All started\"", ")", "# Run processes", "something_running", "=", "True", "stop_requested", "=", "False", "subproc_failed", "=", "False", "while", "something_running", "and", "not", "stop_requested", "and", "not", "subproc_failed", ":", "if", "(", "win32event", ".", "WaitForSingleObject", "(", "self", ".", "h_stop_event", ",", "stop_event_timeout_ms", ")", "==", "win32event", ".", "WAIT_OBJECT_0", ")", ":", "stop_requested", "=", "True", "self", ".", "info", "(", "\"Stop requested; stopping\"", ")", "else", ":", "something_running", "=", "False", "for", "pmgr", "in", "self", ".", "process_managers", ":", "if", "subproc_failed", ":", "break", "try", ":", "retcode", "=", "pmgr", ".", "wait", "(", "timeout_s", "=", "subproc_run_timeout_sec", ")", "if", "retcode", "!=", "0", ":", "subproc_failed", "=", "True", "except", "subprocess", ".", "TimeoutExpired", ":", "something_running", "=", "True", "# Kill any outstanding processes", "#", "# (a) Slow way", "# for pmgr in self.process_managers:", "# pmgr.stop()", "#", "# (b) Faster (slightly more parallel) way", "# for pmgr in self.process_managers:", "# pmgr.terminate()", "# for pmgr in self.process_managers:", "# pmgr.stop_having_terminated()", "#", "# ... No, it's bad if we leave things orphaned.", "# Let's go for slow, clean code.", "for", "pmgr", "in", "self", ".", "process_managers", ":", "pmgr", ".", "stop", "(", ")", "self", ".", "info", "(", "\"All stopped\"", ")" ]
Run multiple child processes. Args: procdetails: list of :class:`ProcessDetails` objects (q.v.) subproc_run_timeout_sec: time (in seconds) to wait for each process when polling child processes to see how they're getting on (default ``1``) stop_event_timeout_ms: time to wait (in ms) while checking the Windows stop event for this service (default ``1000``) kill_timeout_sec: how long (in seconds) will we wait for the subprocesses to end peacefully, before we try to kill them? .. todo:: cardinal_pythonlib.winservice.WindowsService: NOT YET IMPLEMENTED: Windows service autorestart
[ "Run", "multiple", "child", "processes", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/winservice.py#L912-L997
RudolfCardinal/pythonlib
cardinal_pythonlib/exceptions.py
add_info_to_exception
def add_info_to_exception(err: Exception, info: Dict) -> None: """ Adds an information dictionary to an exception. See http://stackoverflow.com/questions/9157210/how-do-i-raise-the-same-exception-with-a-custom-message-in-python Args: err: the exception to be modified info: the information to add """ # noqa if not err.args: err.args = ('', ) err.args += (info, )
python
def add_info_to_exception(err: Exception, info: Dict) -> None: """ Adds an information dictionary to an exception. See http://stackoverflow.com/questions/9157210/how-do-i-raise-the-same-exception-with-a-custom-message-in-python Args: err: the exception to be modified info: the information to add """ # noqa if not err.args: err.args = ('', ) err.args += (info, )
[ "def", "add_info_to_exception", "(", "err", ":", "Exception", ",", "info", ":", "Dict", ")", "->", "None", ":", "# noqa", "if", "not", "err", ".", "args", ":", "err", ".", "args", "=", "(", "''", ",", ")", "err", ".", "args", "+=", "(", "info", ",", ")" ]
Adds an information dictionary to an exception. See http://stackoverflow.com/questions/9157210/how-do-i-raise-the-same-exception-with-a-custom-message-in-python Args: err: the exception to be modified info: the information to add
[ "Adds", "an", "information", "dictionary", "to", "an", "exception", ".", "See", "http", ":", "//", "stackoverflow", ".", "com", "/", "questions", "/", "9157210", "/", "how", "-", "do", "-", "i", "-", "raise", "-", "the", "-", "same", "-", "exception", "-", "with", "-", "a", "-", "custom", "-", "message", "-", "in", "-", "python", "Args", ":", "err", ":", "the", "exception", "to", "be", "modified", "info", ":", "the", "information", "to", "add" ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/exceptions.py#L42-L55
RudolfCardinal/pythonlib
cardinal_pythonlib/exceptions.py
recover_info_from_exception
def recover_info_from_exception(err: Exception) -> Dict: """ Retrives the information added to an exception by :func:`add_info_to_exception`. """ if len(err.args) < 1: return {} info = err.args[-1] if not isinstance(info, dict): return {} return info
python
def recover_info_from_exception(err: Exception) -> Dict: """ Retrives the information added to an exception by :func:`add_info_to_exception`. """ if len(err.args) < 1: return {} info = err.args[-1] if not isinstance(info, dict): return {} return info
[ "def", "recover_info_from_exception", "(", "err", ":", "Exception", ")", "->", "Dict", ":", "if", "len", "(", "err", ".", "args", ")", "<", "1", ":", "return", "{", "}", "info", "=", "err", ".", "args", "[", "-", "1", "]", "if", "not", "isinstance", "(", "info", ",", "dict", ")", ":", "return", "{", "}", "return", "info" ]
Retrives the information added to an exception by :func:`add_info_to_exception`.
[ "Retrives", "the", "information", "added", "to", "an", "exception", "by", ":", "func", ":", "add_info_to_exception", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/exceptions.py#L58-L68
RudolfCardinal/pythonlib
cardinal_pythonlib/exceptions.py
die
def die(exc: Exception = None, exit_code: int = 1) -> None: """ It is not clear that Python guarantees to exit with a non-zero exit code (errorlevel in DOS/Windows) upon an unhandled exception. So this function produces the usual stack trace then dies with the specified exit code. See http://stackoverflow.com/questions/9555133/e-printstacktrace-equivalent-in-python. Test code: .. code-block:: python import logging import sys import traceback logging.basicConfig(level=logging.DEBUG) log = logging.getLogger() def fail(): try: x = 1/0 except Exception as exc: die(exc) Then call .. code-block:: python fail() ... which should exit Python; then from Linux (for example): .. code-block:: bash echo $? # show exit code """ # noqa if exc: lines = traceback.format_exception( None, # etype: ignored exc, exc.__traceback__) # https://www.python.org/dev/peps/pep-3134/ msg = "".join(lines) # Method 1: # print("".join(lines), file=sys.stderr, flush=True) # Method 2: log.critical(msg) log.critical("Exiting with exit code {}", exit_code) sys.exit(exit_code)
python
def die(exc: Exception = None, exit_code: int = 1) -> None: """ It is not clear that Python guarantees to exit with a non-zero exit code (errorlevel in DOS/Windows) upon an unhandled exception. So this function produces the usual stack trace then dies with the specified exit code. See http://stackoverflow.com/questions/9555133/e-printstacktrace-equivalent-in-python. Test code: .. code-block:: python import logging import sys import traceback logging.basicConfig(level=logging.DEBUG) log = logging.getLogger() def fail(): try: x = 1/0 except Exception as exc: die(exc) Then call .. code-block:: python fail() ... which should exit Python; then from Linux (for example): .. code-block:: bash echo $? # show exit code """ # noqa if exc: lines = traceback.format_exception( None, # etype: ignored exc, exc.__traceback__) # https://www.python.org/dev/peps/pep-3134/ msg = "".join(lines) # Method 1: # print("".join(lines), file=sys.stderr, flush=True) # Method 2: log.critical(msg) log.critical("Exiting with exit code {}", exit_code) sys.exit(exit_code)
[ "def", "die", "(", "exc", ":", "Exception", "=", "None", ",", "exit_code", ":", "int", "=", "1", ")", "->", "None", ":", "# noqa", "if", "exc", ":", "lines", "=", "traceback", ".", "format_exception", "(", "None", ",", "# etype: ignored", "exc", ",", "exc", ".", "__traceback__", ")", "# https://www.python.org/dev/peps/pep-3134/", "msg", "=", "\"\"", ".", "join", "(", "lines", ")", "# Method 1:", "# print(\"\".join(lines), file=sys.stderr, flush=True)", "# Method 2:", "log", ".", "critical", "(", "msg", ")", "log", ".", "critical", "(", "\"Exiting with exit code {}\"", ",", "exit_code", ")", "sys", ".", "exit", "(", "exit_code", ")" ]
It is not clear that Python guarantees to exit with a non-zero exit code (errorlevel in DOS/Windows) upon an unhandled exception. So this function produces the usual stack trace then dies with the specified exit code. See http://stackoverflow.com/questions/9555133/e-printstacktrace-equivalent-in-python. Test code: .. code-block:: python import logging import sys import traceback logging.basicConfig(level=logging.DEBUG) log = logging.getLogger() def fail(): try: x = 1/0 except Exception as exc: die(exc) Then call .. code-block:: python fail() ... which should exit Python; then from Linux (for example): .. code-block:: bash echo $? # show exit code
[ "It", "is", "not", "clear", "that", "Python", "guarantees", "to", "exit", "with", "a", "non", "-", "zero", "exit", "code", "(", "errorlevel", "in", "DOS", "/", "Windows", ")", "upon", "an", "unhandled", "exception", ".", "So", "this", "function", "produces", "the", "usual", "stack", "trace", "then", "dies", "with", "the", "specified", "exit", "code", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/exceptions.py#L71-L120
henzk/featuremonkey
featuremonkey/tracing/serializer.py
_serialize_function
def _serialize_function(obj): """ Still needing this much try-except stuff. We should find a way to get rid of this. :param obj: :return: """ try: obj = inspect.getsource(obj) except (TypeError, IOError): try: obj = marshal.dumps(obj) except ValueError: if hasattr(obj, '__dict__'): obj = _serialize_dict(obj.__dict__) return obj
python
def _serialize_function(obj): """ Still needing this much try-except stuff. We should find a way to get rid of this. :param obj: :return: """ try: obj = inspect.getsource(obj) except (TypeError, IOError): try: obj = marshal.dumps(obj) except ValueError: if hasattr(obj, '__dict__'): obj = _serialize_dict(obj.__dict__) return obj
[ "def", "_serialize_function", "(", "obj", ")", ":", "try", ":", "obj", "=", "inspect", ".", "getsource", "(", "obj", ")", "except", "(", "TypeError", ",", "IOError", ")", ":", "try", ":", "obj", "=", "marshal", ".", "dumps", "(", "obj", ")", "except", "ValueError", ":", "if", "hasattr", "(", "obj", ",", "'__dict__'", ")", ":", "obj", "=", "_serialize_dict", "(", "obj", ".", "__dict__", ")", "return", "obj" ]
Still needing this much try-except stuff. We should find a way to get rid of this. :param obj: :return:
[ "Still", "needing", "this", "much", "try", "-", "except", "stuff", ".", "We", "should", "find", "a", "way", "to", "get", "rid", "of", "this", ".", ":", "param", "obj", ":", ":", "return", ":" ]
train
https://github.com/henzk/featuremonkey/blob/e44414fc68427bcd71ad33ec2d816da0dd78eefa/featuremonkey/tracing/serializer.py#L52-L66
henzk/featuremonkey
featuremonkey/tracing/serializer.py
_serialize_module
def _serialize_module(obj): """ Tries to serialize a module by its __dict__ attr. Remove the builtins attr as this one is not relevant and extremely large. If its value is a callable, serialize it using serialize_obj, else, use its repr, because in this case we most likely run into max recursion depth errors. :param obj: :return: """ obj = dict(obj.__dict__) if '__builtins__' in obj.keys(): obj.pop('__builtins__') for k, v in obj.items(): if callable(v): obj[k] = serialize_obj(v) else: obj[k] = repr(v) return obj
python
def _serialize_module(obj): """ Tries to serialize a module by its __dict__ attr. Remove the builtins attr as this one is not relevant and extremely large. If its value is a callable, serialize it using serialize_obj, else, use its repr, because in this case we most likely run into max recursion depth errors. :param obj: :return: """ obj = dict(obj.__dict__) if '__builtins__' in obj.keys(): obj.pop('__builtins__') for k, v in obj.items(): if callable(v): obj[k] = serialize_obj(v) else: obj[k] = repr(v) return obj
[ "def", "_serialize_module", "(", "obj", ")", ":", "obj", "=", "dict", "(", "obj", ".", "__dict__", ")", "if", "'__builtins__'", "in", "obj", ".", "keys", "(", ")", ":", "obj", ".", "pop", "(", "'__builtins__'", ")", "for", "k", ",", "v", "in", "obj", ".", "items", "(", ")", ":", "if", "callable", "(", "v", ")", ":", "obj", "[", "k", "]", "=", "serialize_obj", "(", "v", ")", "else", ":", "obj", "[", "k", "]", "=", "repr", "(", "v", ")", "return", "obj" ]
Tries to serialize a module by its __dict__ attr. Remove the builtins attr as this one is not relevant and extremely large. If its value is a callable, serialize it using serialize_obj, else, use its repr, because in this case we most likely run into max recursion depth errors. :param obj: :return:
[ "Tries", "to", "serialize", "a", "module", "by", "its", "__dict__", "attr", ".", "Remove", "the", "builtins", "attr", "as", "this", "one", "is", "not", "relevant", "and", "extremely", "large", ".", "If", "its", "value", "is", "a", "callable", "serialize", "it", "using", "serialize_obj", "else", "use", "its", "repr", "because", "in", "this", "case", "we", "most", "likely", "run", "into", "max", "recursion", "depth", "errors", ".", ":", "param", "obj", ":", ":", "return", ":" ]
train
https://github.com/henzk/featuremonkey/blob/e44414fc68427bcd71ad33ec2d816da0dd78eefa/featuremonkey/tracing/serializer.py#L69-L86
henzk/featuremonkey
featuremonkey/tracing/serializer.py
_serialize_iterable
def _serialize_iterable(obj): """ Only for serializing list and tuples and stuff. Dicts and Strings/Unicode is treated differently. String/Unicode normally don't need further serialization and it would cause a max recursion error trying to do so. :param obj: :return: """ if isinstance(obj, (tuple, set)): # make a tuple assignable by casting it to list obj = list(obj) for item in obj: obj[obj.index(item)] = serialize_obj(item) return obj
python
def _serialize_iterable(obj): """ Only for serializing list and tuples and stuff. Dicts and Strings/Unicode is treated differently. String/Unicode normally don't need further serialization and it would cause a max recursion error trying to do so. :param obj: :return: """ if isinstance(obj, (tuple, set)): # make a tuple assignable by casting it to list obj = list(obj) for item in obj: obj[obj.index(item)] = serialize_obj(item) return obj
[ "def", "_serialize_iterable", "(", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "(", "tuple", ",", "set", ")", ")", ":", "# make a tuple assignable by casting it to list", "obj", "=", "list", "(", "obj", ")", "for", "item", "in", "obj", ":", "obj", "[", "obj", ".", "index", "(", "item", ")", "]", "=", "serialize_obj", "(", "item", ")", "return", "obj" ]
Only for serializing list and tuples and stuff. Dicts and Strings/Unicode is treated differently. String/Unicode normally don't need further serialization and it would cause a max recursion error trying to do so. :param obj: :return:
[ "Only", "for", "serializing", "list", "and", "tuples", "and", "stuff", ".", "Dicts", "and", "Strings", "/", "Unicode", "is", "treated", "differently", ".", "String", "/", "Unicode", "normally", "don", "t", "need", "further", "serialization", "and", "it", "would", "cause", "a", "max", "recursion", "error", "trying", "to", "do", "so", ".", ":", "param", "obj", ":", ":", "return", ":" ]
train
https://github.com/henzk/featuremonkey/blob/e44414fc68427bcd71ad33ec2d816da0dd78eefa/featuremonkey/tracing/serializer.py#L117-L131
RudolfCardinal/pythonlib
cardinal_pythonlib/django/admin.py
disable_bool_icon
def disable_bool_icon( fieldname: str, model) -> Callable[[Any], bool]: """ Disable boolean icons for a Django ModelAdmin field. The '_meta' attribute is present on Django model classes and instances. model_class: ``Union[Model, Type[Model]]`` ... only the type checker in Py3.5 is broken; see ``files.py`` """ # noinspection PyUnusedLocal def func(self, obj): return getattr(obj, fieldname) func.boolean = False func.admin_order_field = fieldname # func.short_description = \ # model._meta.get_field_by_name(fieldname)[0].verbose_name # get_field_by_name() deprecated in Django 1.9 and will go in 1.10 # https://docs.djangoproject.com/en/1.8/ref/models/meta/ # noinspection PyProtectedMember, PyUnresolvedReferences func.short_description = \ model._meta.get_field(fieldname).verbose_name return func
python
def disable_bool_icon( fieldname: str, model) -> Callable[[Any], bool]: """ Disable boolean icons for a Django ModelAdmin field. The '_meta' attribute is present on Django model classes and instances. model_class: ``Union[Model, Type[Model]]`` ... only the type checker in Py3.5 is broken; see ``files.py`` """ # noinspection PyUnusedLocal def func(self, obj): return getattr(obj, fieldname) func.boolean = False func.admin_order_field = fieldname # func.short_description = \ # model._meta.get_field_by_name(fieldname)[0].verbose_name # get_field_by_name() deprecated in Django 1.9 and will go in 1.10 # https://docs.djangoproject.com/en/1.8/ref/models/meta/ # noinspection PyProtectedMember, PyUnresolvedReferences func.short_description = \ model._meta.get_field(fieldname).verbose_name return func
[ "def", "disable_bool_icon", "(", "fieldname", ":", "str", ",", "model", ")", "->", "Callable", "[", "[", "Any", "]", ",", "bool", "]", ":", "# noinspection PyUnusedLocal", "def", "func", "(", "self", ",", "obj", ")", ":", "return", "getattr", "(", "obj", ",", "fieldname", ")", "func", ".", "boolean", "=", "False", "func", ".", "admin_order_field", "=", "fieldname", "# func.short_description = \\", "# model._meta.get_field_by_name(fieldname)[0].verbose_name", "# get_field_by_name() deprecated in Django 1.9 and will go in 1.10", "# https://docs.djangoproject.com/en/1.8/ref/models/meta/", "# noinspection PyProtectedMember, PyUnresolvedReferences", "func", ".", "short_description", "=", "model", ".", "_meta", ".", "get_field", "(", "fieldname", ")", ".", "verbose_name", "return", "func" ]
Disable boolean icons for a Django ModelAdmin field. The '_meta' attribute is present on Django model classes and instances. model_class: ``Union[Model, Type[Model]]`` ... only the type checker in Py3.5 is broken; see ``files.py``
[ "Disable", "boolean", "icons", "for", "a", "Django", "ModelAdmin", "field", ".", "The", "_meta", "attribute", "is", "present", "on", "Django", "model", "classes", "and", "instances", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/django/admin.py#L43-L67
RudolfCardinal/pythonlib
cardinal_pythonlib/django/admin.py
admin_view_url
def admin_view_url(admin_site: AdminSite, obj, view_type: str = "change", current_app: str = None) -> str: """ Get a Django admin site URL for an object. """ app_name = obj._meta.app_label.lower() model_name = obj._meta.object_name.lower() pk = obj.pk viewname = "admin:{}_{}_{}".format(app_name, model_name, view_type) if current_app is None: current_app = admin_site.name url = reverse(viewname, args=[pk], current_app=current_app) return url
python
def admin_view_url(admin_site: AdminSite, obj, view_type: str = "change", current_app: str = None) -> str: """ Get a Django admin site URL for an object. """ app_name = obj._meta.app_label.lower() model_name = obj._meta.object_name.lower() pk = obj.pk viewname = "admin:{}_{}_{}".format(app_name, model_name, view_type) if current_app is None: current_app = admin_site.name url = reverse(viewname, args=[pk], current_app=current_app) return url
[ "def", "admin_view_url", "(", "admin_site", ":", "AdminSite", ",", "obj", ",", "view_type", ":", "str", "=", "\"change\"", ",", "current_app", ":", "str", "=", "None", ")", "->", "str", ":", "app_name", "=", "obj", ".", "_meta", ".", "app_label", ".", "lower", "(", ")", "model_name", "=", "obj", ".", "_meta", ".", "object_name", ".", "lower", "(", ")", "pk", "=", "obj", ".", "pk", "viewname", "=", "\"admin:{}_{}_{}\"", ".", "format", "(", "app_name", ",", "model_name", ",", "view_type", ")", "if", "current_app", "is", "None", ":", "current_app", "=", "admin_site", ".", "name", "url", "=", "reverse", "(", "viewname", ",", "args", "=", "[", "pk", "]", ",", "current_app", "=", "current_app", ")", "return", "url" ]
Get a Django admin site URL for an object.
[ "Get", "a", "Django", "admin", "site", "URL", "for", "an", "object", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/django/admin.py#L75-L89
RudolfCardinal/pythonlib
cardinal_pythonlib/django/admin.py
admin_view_fk_link
def admin_view_fk_link(modeladmin: ModelAdmin, obj, fkfield: str, missing: str = "(None)", use_str: bool = True, view_type: str = "change", current_app: str = None) -> str: """ Get a Django admin site URL for an object that's found from a foreign key in our object of interest. """ if not hasattr(obj, fkfield): return missing linked_obj = getattr(obj, fkfield) app_name = linked_obj._meta.app_label.lower() model_name = linked_obj._meta.object_name.lower() viewname = "admin:{}_{}_{}".format(app_name, model_name, view_type) # https://docs.djangoproject.com/en/dev/ref/contrib/admin/#reversing-admin-urls # noqa if current_app is None: current_app = modeladmin.admin_site.name # ... plus a bit of home-grown magic; see Django source url = reverse(viewname, args=[linked_obj.pk], current_app=current_app) if use_str: label = escape(str(linked_obj)) else: label = "{} {}".format(escape(linked_obj._meta.object_name), linked_obj.pk) return '<a href="{}">{}</a>'.format(url, label)
python
def admin_view_fk_link(modeladmin: ModelAdmin, obj, fkfield: str, missing: str = "(None)", use_str: bool = True, view_type: str = "change", current_app: str = None) -> str: """ Get a Django admin site URL for an object that's found from a foreign key in our object of interest. """ if not hasattr(obj, fkfield): return missing linked_obj = getattr(obj, fkfield) app_name = linked_obj._meta.app_label.lower() model_name = linked_obj._meta.object_name.lower() viewname = "admin:{}_{}_{}".format(app_name, model_name, view_type) # https://docs.djangoproject.com/en/dev/ref/contrib/admin/#reversing-admin-urls # noqa if current_app is None: current_app = modeladmin.admin_site.name # ... plus a bit of home-grown magic; see Django source url = reverse(viewname, args=[linked_obj.pk], current_app=current_app) if use_str: label = escape(str(linked_obj)) else: label = "{} {}".format(escape(linked_obj._meta.object_name), linked_obj.pk) return '<a href="{}">{}</a>'.format(url, label)
[ "def", "admin_view_fk_link", "(", "modeladmin", ":", "ModelAdmin", ",", "obj", ",", "fkfield", ":", "str", ",", "missing", ":", "str", "=", "\"(None)\"", ",", "use_str", ":", "bool", "=", "True", ",", "view_type", ":", "str", "=", "\"change\"", ",", "current_app", ":", "str", "=", "None", ")", "->", "str", ":", "if", "not", "hasattr", "(", "obj", ",", "fkfield", ")", ":", "return", "missing", "linked_obj", "=", "getattr", "(", "obj", ",", "fkfield", ")", "app_name", "=", "linked_obj", ".", "_meta", ".", "app_label", ".", "lower", "(", ")", "model_name", "=", "linked_obj", ".", "_meta", ".", "object_name", ".", "lower", "(", ")", "viewname", "=", "\"admin:{}_{}_{}\"", ".", "format", "(", "app_name", ",", "model_name", ",", "view_type", ")", "# https://docs.djangoproject.com/en/dev/ref/contrib/admin/#reversing-admin-urls # noqa", "if", "current_app", "is", "None", ":", "current_app", "=", "modeladmin", ".", "admin_site", ".", "name", "# ... plus a bit of home-grown magic; see Django source", "url", "=", "reverse", "(", "viewname", ",", "args", "=", "[", "linked_obj", ".", "pk", "]", ",", "current_app", "=", "current_app", ")", "if", "use_str", ":", "label", "=", "escape", "(", "str", "(", "linked_obj", ")", ")", "else", ":", "label", "=", "\"{} {}\"", ".", "format", "(", "escape", "(", "linked_obj", ".", "_meta", ".", "object_name", ")", ",", "linked_obj", ".", "pk", ")", "return", "'<a href=\"{}\">{}</a>'", ".", "format", "(", "url", ",", "label", ")" ]
Get a Django admin site URL for an object that's found from a foreign key in our object of interest.
[ "Get", "a", "Django", "admin", "site", "URL", "for", "an", "object", "that", "s", "found", "from", "a", "foreign", "key", "in", "our", "object", "of", "interest", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/django/admin.py#L93-L120
RudolfCardinal/pythonlib
cardinal_pythonlib/django/admin.py
admin_view_reverse_fk_links
def admin_view_reverse_fk_links(modeladmin: ModelAdmin, obj, reverse_fk_set_field: str, missing: str = "(None)", use_str: bool = True, separator: str = "<br>", view_type: str = "change", current_app: str = None) -> str: """ Get multiple Django admin site URL for multiple objects linked to our object of interest (where the other objects have foreign keys to our object). """ if not hasattr(obj, reverse_fk_set_field): return missing linked_objs = getattr(obj, reverse_fk_set_field).all() if not linked_objs: return missing first = linked_objs[0] app_name = first._meta.app_label.lower() model_name = first._meta.object_name.lower() viewname = "admin:{}_{}_{}".format(app_name, model_name, view_type) if current_app is None: current_app = modeladmin.admin_site.name links = [] for linked_obj in linked_objs: # log.debug("linked_obj: {}", linked_obj) url = reverse(viewname, args=[linked_obj.pk], current_app=current_app) if use_str: label = escape(str(linked_obj)) else: label = "{} {}".format(escape(linked_obj._meta.object_name), linked_obj.pk) links.append('<a href="{}">{}</a>'.format(url, label)) # log.debug("links: {}", links) return separator.join(links)
python
def admin_view_reverse_fk_links(modeladmin: ModelAdmin, obj, reverse_fk_set_field: str, missing: str = "(None)", use_str: bool = True, separator: str = "<br>", view_type: str = "change", current_app: str = None) -> str: """ Get multiple Django admin site URL for multiple objects linked to our object of interest (where the other objects have foreign keys to our object). """ if not hasattr(obj, reverse_fk_set_field): return missing linked_objs = getattr(obj, reverse_fk_set_field).all() if not linked_objs: return missing first = linked_objs[0] app_name = first._meta.app_label.lower() model_name = first._meta.object_name.lower() viewname = "admin:{}_{}_{}".format(app_name, model_name, view_type) if current_app is None: current_app = modeladmin.admin_site.name links = [] for linked_obj in linked_objs: # log.debug("linked_obj: {}", linked_obj) url = reverse(viewname, args=[linked_obj.pk], current_app=current_app) if use_str: label = escape(str(linked_obj)) else: label = "{} {}".format(escape(linked_obj._meta.object_name), linked_obj.pk) links.append('<a href="{}">{}</a>'.format(url, label)) # log.debug("links: {}", links) return separator.join(links)
[ "def", "admin_view_reverse_fk_links", "(", "modeladmin", ":", "ModelAdmin", ",", "obj", ",", "reverse_fk_set_field", ":", "str", ",", "missing", ":", "str", "=", "\"(None)\"", ",", "use_str", ":", "bool", "=", "True", ",", "separator", ":", "str", "=", "\"<br>\"", ",", "view_type", ":", "str", "=", "\"change\"", ",", "current_app", ":", "str", "=", "None", ")", "->", "str", ":", "if", "not", "hasattr", "(", "obj", ",", "reverse_fk_set_field", ")", ":", "return", "missing", "linked_objs", "=", "getattr", "(", "obj", ",", "reverse_fk_set_field", ")", ".", "all", "(", ")", "if", "not", "linked_objs", ":", "return", "missing", "first", "=", "linked_objs", "[", "0", "]", "app_name", "=", "first", ".", "_meta", ".", "app_label", ".", "lower", "(", ")", "model_name", "=", "first", ".", "_meta", ".", "object_name", ".", "lower", "(", ")", "viewname", "=", "\"admin:{}_{}_{}\"", ".", "format", "(", "app_name", ",", "model_name", ",", "view_type", ")", "if", "current_app", "is", "None", ":", "current_app", "=", "modeladmin", ".", "admin_site", ".", "name", "links", "=", "[", "]", "for", "linked_obj", "in", "linked_objs", ":", "# log.debug(\"linked_obj: {}\", linked_obj)", "url", "=", "reverse", "(", "viewname", ",", "args", "=", "[", "linked_obj", ".", "pk", "]", ",", "current_app", "=", "current_app", ")", "if", "use_str", ":", "label", "=", "escape", "(", "str", "(", "linked_obj", ")", ")", "else", ":", "label", "=", "\"{} {}\"", ".", "format", "(", "escape", "(", "linked_obj", ".", "_meta", ".", "object_name", ")", ",", "linked_obj", ".", "pk", ")", "links", ".", "append", "(", "'<a href=\"{}\">{}</a>'", ".", "format", "(", "url", ",", "label", ")", ")", "# log.debug(\"links: {}\", links)", "return", "separator", ".", "join", "(", "links", ")" ]
Get multiple Django admin site URL for multiple objects linked to our object of interest (where the other objects have foreign keys to our object).
[ "Get", "multiple", "Django", "admin", "site", "URL", "for", "multiple", "objects", "linked", "to", "our", "object", "of", "interest", "(", "where", "the", "other", "objects", "have", "foreign", "keys", "to", "our", "object", ")", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/django/admin.py#L124-L160
RudolfCardinal/pythonlib
cardinal_pythonlib/dsp.py
lowpass_filter
def lowpass_filter(data: FLOATS_TYPE, sampling_freq_hz: float, cutoff_freq_hz: float, numtaps: int) -> FLOATS_TYPE: """ Apply a low-pass filter to the data. Args: data: time series of the data sampling_freq_hz: sampling frequency :math:`f_s`, in Hz (or other consistent units) cutoff_freq_hz: filter cutoff frequency in Hz (or other consistent units) numtaps: number of filter taps Returns: filtered data Note: number of filter taps = filter order + 1 """ coeffs = firwin( numtaps=numtaps, cutoff=normalized_frequency(cutoff_freq_hz, sampling_freq_hz), pass_zero=True ) # coefficients of a finite impulse response (FIR) filter using window method # noqa filtered_data = lfilter(b=coeffs, a=1.0, x=data) return filtered_data
python
def lowpass_filter(data: FLOATS_TYPE, sampling_freq_hz: float, cutoff_freq_hz: float, numtaps: int) -> FLOATS_TYPE: """ Apply a low-pass filter to the data. Args: data: time series of the data sampling_freq_hz: sampling frequency :math:`f_s`, in Hz (or other consistent units) cutoff_freq_hz: filter cutoff frequency in Hz (or other consistent units) numtaps: number of filter taps Returns: filtered data Note: number of filter taps = filter order + 1 """ coeffs = firwin( numtaps=numtaps, cutoff=normalized_frequency(cutoff_freq_hz, sampling_freq_hz), pass_zero=True ) # coefficients of a finite impulse response (FIR) filter using window method # noqa filtered_data = lfilter(b=coeffs, a=1.0, x=data) return filtered_data
[ "def", "lowpass_filter", "(", "data", ":", "FLOATS_TYPE", ",", "sampling_freq_hz", ":", "float", ",", "cutoff_freq_hz", ":", "float", ",", "numtaps", ":", "int", ")", "->", "FLOATS_TYPE", ":", "coeffs", "=", "firwin", "(", "numtaps", "=", "numtaps", ",", "cutoff", "=", "normalized_frequency", "(", "cutoff_freq_hz", ",", "sampling_freq_hz", ")", ",", "pass_zero", "=", "True", ")", "# coefficients of a finite impulse response (FIR) filter using window method # noqa", "filtered_data", "=", "lfilter", "(", "b", "=", "coeffs", ",", "a", "=", "1.0", ",", "x", "=", "data", ")", "return", "filtered_data" ]
Apply a low-pass filter to the data. Args: data: time series of the data sampling_freq_hz: sampling frequency :math:`f_s`, in Hz (or other consistent units) cutoff_freq_hz: filter cutoff frequency in Hz (or other consistent units) numtaps: number of filter taps Returns: filtered data Note: number of filter taps = filter order + 1
[ "Apply", "a", "low", "-", "pass", "filter", "to", "the", "data", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/dsp.py#L71-L97
RudolfCardinal/pythonlib
cardinal_pythonlib/dsp.py
bandpass_filter
def bandpass_filter(data: FLOATS_TYPE, sampling_freq_hz: float, lower_freq_hz: float, upper_freq_hz: float, numtaps: int) -> FLOATS_TYPE: """ Apply a band-pass filter to the data. Args: data: time series of the data sampling_freq_hz: sampling frequency :math:`f_s`, in Hz (or other consistent units) lower_freq_hz: filter cutoff lower frequency in Hz (or other consistent units) upper_freq_hz: filter cutoff upper frequency in Hz (or other consistent units) numtaps: number of filter taps Returns: filtered data Note: number of filter taps = filter order + 1 """ f1 = normalized_frequency(lower_freq_hz, sampling_freq_hz) f2 = normalized_frequency(upper_freq_hz, sampling_freq_hz) coeffs = firwin( numtaps=numtaps, cutoff=[f1, f2], pass_zero=False ) filtered_data = lfilter(b=coeffs, a=1.0, x=data) return filtered_data
python
def bandpass_filter(data: FLOATS_TYPE, sampling_freq_hz: float, lower_freq_hz: float, upper_freq_hz: float, numtaps: int) -> FLOATS_TYPE: """ Apply a band-pass filter to the data. Args: data: time series of the data sampling_freq_hz: sampling frequency :math:`f_s`, in Hz (or other consistent units) lower_freq_hz: filter cutoff lower frequency in Hz (or other consistent units) upper_freq_hz: filter cutoff upper frequency in Hz (or other consistent units) numtaps: number of filter taps Returns: filtered data Note: number of filter taps = filter order + 1 """ f1 = normalized_frequency(lower_freq_hz, sampling_freq_hz) f2 = normalized_frequency(upper_freq_hz, sampling_freq_hz) coeffs = firwin( numtaps=numtaps, cutoff=[f1, f2], pass_zero=False ) filtered_data = lfilter(b=coeffs, a=1.0, x=data) return filtered_data
[ "def", "bandpass_filter", "(", "data", ":", "FLOATS_TYPE", ",", "sampling_freq_hz", ":", "float", ",", "lower_freq_hz", ":", "float", ",", "upper_freq_hz", ":", "float", ",", "numtaps", ":", "int", ")", "->", "FLOATS_TYPE", ":", "f1", "=", "normalized_frequency", "(", "lower_freq_hz", ",", "sampling_freq_hz", ")", "f2", "=", "normalized_frequency", "(", "upper_freq_hz", ",", "sampling_freq_hz", ")", "coeffs", "=", "firwin", "(", "numtaps", "=", "numtaps", ",", "cutoff", "=", "[", "f1", ",", "f2", "]", ",", "pass_zero", "=", "False", ")", "filtered_data", "=", "lfilter", "(", "b", "=", "coeffs", ",", "a", "=", "1.0", ",", "x", "=", "data", ")", "return", "filtered_data" ]
Apply a band-pass filter to the data. Args: data: time series of the data sampling_freq_hz: sampling frequency :math:`f_s`, in Hz (or other consistent units) lower_freq_hz: filter cutoff lower frequency in Hz (or other consistent units) upper_freq_hz: filter cutoff upper frequency in Hz (or other consistent units) numtaps: number of filter taps Returns: filtered data Note: number of filter taps = filter order + 1
[ "Apply", "a", "band", "-", "pass", "filter", "to", "the", "data", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/dsp.py#L129-L160
RudolfCardinal/pythonlib
cardinal_pythonlib/dsp.py
notch_filter
def notch_filter(data: FLOATS_TYPE, sampling_freq_hz: float, notch_freq_hz: float, quality_factor: float) -> FLOATS_TYPE: """ Design and use a notch (band reject) filter to filter the data. Args: data: time series of the data sampling_freq_hz: sampling frequency :math:`f_s`, in Hz (or other consistent units) notch_freq_hz: notch frequency, in Hz (or other consistent units) quality_factor: notch filter quality factor, :math:`Q` Returns: filtered data """ b, a = iirnotch( w0=normalized_frequency(notch_freq_hz, sampling_freq_hz), Q=quality_factor ) filtered_data = lfilter(b=b, a=a, x=data) return filtered_data
python
def notch_filter(data: FLOATS_TYPE, sampling_freq_hz: float, notch_freq_hz: float, quality_factor: float) -> FLOATS_TYPE: """ Design and use a notch (band reject) filter to filter the data. Args: data: time series of the data sampling_freq_hz: sampling frequency :math:`f_s`, in Hz (or other consistent units) notch_freq_hz: notch frequency, in Hz (or other consistent units) quality_factor: notch filter quality factor, :math:`Q` Returns: filtered data """ b, a = iirnotch( w0=normalized_frequency(notch_freq_hz, sampling_freq_hz), Q=quality_factor ) filtered_data = lfilter(b=b, a=a, x=data) return filtered_data
[ "def", "notch_filter", "(", "data", ":", "FLOATS_TYPE", ",", "sampling_freq_hz", ":", "float", ",", "notch_freq_hz", ":", "float", ",", "quality_factor", ":", "float", ")", "->", "FLOATS_TYPE", ":", "b", ",", "a", "=", "iirnotch", "(", "w0", "=", "normalized_frequency", "(", "notch_freq_hz", ",", "sampling_freq_hz", ")", ",", "Q", "=", "quality_factor", ")", "filtered_data", "=", "lfilter", "(", "b", "=", "b", ",", "a", "=", "a", ",", "x", "=", "data", ")", "return", "filtered_data" ]
Design and use a notch (band reject) filter to filter the data. Args: data: time series of the data sampling_freq_hz: sampling frequency :math:`f_s`, in Hz (or other consistent units) notch_freq_hz: notch frequency, in Hz (or other consistent units) quality_factor: notch filter quality factor, :math:`Q` Returns: filtered data
[ "Design", "and", "use", "a", "notch", "(", "band", "reject", ")", "filter", "to", "filter", "the", "data", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/dsp.py#L163-L186
davenquinn/Attitude
attitude/orientation/base.py
rotation
def rotation(angle): """Rotation about the Z axis (in the XY plane)""" return N.array([[N.cos(angle),-N.sin(angle),0], [N.sin(angle), N.cos(angle),0], [0 , 0 ,1]])
python
def rotation(angle): """Rotation about the Z axis (in the XY plane)""" return N.array([[N.cos(angle),-N.sin(angle),0], [N.sin(angle), N.cos(angle),0], [0 , 0 ,1]])
[ "def", "rotation", "(", "angle", ")", ":", "return", "N", ".", "array", "(", "[", "[", "N", ".", "cos", "(", "angle", ")", ",", "-", "N", ".", "sin", "(", "angle", ")", ",", "0", "]", ",", "[", "N", ".", "sin", "(", "angle", ")", ",", "N", ".", "cos", "(", "angle", ")", ",", "0", "]", ",", "[", "0", ",", "0", ",", "1", "]", "]", ")" ]
Rotation about the Z axis (in the XY plane)
[ "Rotation", "about", "the", "Z", "axis", "(", "in", "the", "XY", "plane", ")" ]
train
https://github.com/davenquinn/Attitude/blob/2ce97b9aba0aa5deedc6617c2315e07e6396d240/attitude/orientation/base.py#L10-L14
davenquinn/Attitude
attitude/orientation/base.py
ellipse
def ellipse(center,covariance_matrix,level=1, n=1000): """Returns error ellipse in slope-azimuth space""" # singular value decomposition U, s, rotation_matrix = N.linalg.svd(covariance_matrix) # semi-axes (largest first) saxes = N.sqrt(s)*level ## If the _area_ of a 2s ellipse is twice that of a 1s ellipse # If the _axes_ are supposed to be twice as long, then it should be N.sqrt(s)*width u = N.linspace(0, 2*N.pi, n) data = N.column_stack((saxes[0]*N.cos(u), saxes[1]*N.sin(u))) # rotate data return N.dot(data, rotation_matrix)+ center
python
def ellipse(center,covariance_matrix,level=1, n=1000): """Returns error ellipse in slope-azimuth space""" # singular value decomposition U, s, rotation_matrix = N.linalg.svd(covariance_matrix) # semi-axes (largest first) saxes = N.sqrt(s)*level ## If the _area_ of a 2s ellipse is twice that of a 1s ellipse # If the _axes_ are supposed to be twice as long, then it should be N.sqrt(s)*width u = N.linspace(0, 2*N.pi, n) data = N.column_stack((saxes[0]*N.cos(u), saxes[1]*N.sin(u))) # rotate data return N.dot(data, rotation_matrix)+ center
[ "def", "ellipse", "(", "center", ",", "covariance_matrix", ",", "level", "=", "1", ",", "n", "=", "1000", ")", ":", "# singular value decomposition", "U", ",", "s", ",", "rotation_matrix", "=", "N", ".", "linalg", ".", "svd", "(", "covariance_matrix", ")", "# semi-axes (largest first)", "saxes", "=", "N", ".", "sqrt", "(", "s", ")", "*", "level", "## If the _area_ of a 2s ellipse is twice that of a 1s ellipse", "# If the _axes_ are supposed to be twice as long, then it should be N.sqrt(s)*width", "u", "=", "N", ".", "linspace", "(", "0", ",", "2", "*", "N", ".", "pi", ",", "n", ")", "data", "=", "N", ".", "column_stack", "(", "(", "saxes", "[", "0", "]", "*", "N", ".", "cos", "(", "u", ")", ",", "saxes", "[", "1", "]", "*", "N", ".", "sin", "(", "u", ")", ")", ")", "# rotate data", "return", "N", ".", "dot", "(", "data", ",", "rotation_matrix", ")", "+", "center" ]
Returns error ellipse in slope-azimuth space
[ "Returns", "error", "ellipse", "in", "slope", "-", "azimuth", "space" ]
train
https://github.com/davenquinn/Attitude/blob/2ce97b9aba0aa5deedc6617c2315e07e6396d240/attitude/orientation/base.py#L16-L28
davenquinn/Attitude
attitude/orientation/base.py
BaseOrientation.to_mapping
def to_mapping(self,**values): """ Create a JSON-serializable representation of the plane that is usable with the javascript frontend """ strike, dip, rake = self.strike_dip_rake() min, max = self.angular_errors() try: disabled = self.disabled except AttributeError: disabled = False mapping = dict( uid=self.hash, axes=self.axes.tolist(), hyperbolic_axes=self.hyperbolic_axes.tolist(), max_angular_error=max, min_angular_error=min, strike=strike, dip=dip, rake=rake, disabled=disabled) # Add in user-provided-values, overwriting if # necessary for k,v in values.items(): mapping[k] = v return mapping
python
def to_mapping(self,**values): """ Create a JSON-serializable representation of the plane that is usable with the javascript frontend """ strike, dip, rake = self.strike_dip_rake() min, max = self.angular_errors() try: disabled = self.disabled except AttributeError: disabled = False mapping = dict( uid=self.hash, axes=self.axes.tolist(), hyperbolic_axes=self.hyperbolic_axes.tolist(), max_angular_error=max, min_angular_error=min, strike=strike, dip=dip, rake=rake, disabled=disabled) # Add in user-provided-values, overwriting if # necessary for k,v in values.items(): mapping[k] = v return mapping
[ "def", "to_mapping", "(", "self", ",", "*", "*", "values", ")", ":", "strike", ",", "dip", ",", "rake", "=", "self", ".", "strike_dip_rake", "(", ")", "min", ",", "max", "=", "self", ".", "angular_errors", "(", ")", "try", ":", "disabled", "=", "self", ".", "disabled", "except", "AttributeError", ":", "disabled", "=", "False", "mapping", "=", "dict", "(", "uid", "=", "self", ".", "hash", ",", "axes", "=", "self", ".", "axes", ".", "tolist", "(", ")", ",", "hyperbolic_axes", "=", "self", ".", "hyperbolic_axes", ".", "tolist", "(", ")", ",", "max_angular_error", "=", "max", ",", "min_angular_error", "=", "min", ",", "strike", "=", "strike", ",", "dip", "=", "dip", ",", "rake", "=", "rake", ",", "disabled", "=", "disabled", ")", "# Add in user-provided-values, overwriting if", "# necessary", "for", "k", ",", "v", "in", "values", ".", "items", "(", ")", ":", "mapping", "[", "k", "]", "=", "v", "return", "mapping" ]
Create a JSON-serializable representation of the plane that is usable with the javascript frontend
[ "Create", "a", "JSON", "-", "serializable", "representation", "of", "the", "plane", "that", "is", "usable", "with", "the", "javascript", "frontend" ]
train
https://github.com/davenquinn/Attitude/blob/2ce97b9aba0aa5deedc6617c2315e07e6396d240/attitude/orientation/base.py#L91-L119
RudolfCardinal/pythonlib
cardinal_pythonlib/subproc.py
kill_child_processes
def kill_child_processes() -> None: """ Kills children of this process that were registered in the :data:`processes` variable. Use with ``@atexit.register``. """ timeout_sec = 5 for p in processes: try: p.wait(timeout_sec) except TimeoutExpired: # failed to close p.kill()
python
def kill_child_processes() -> None: """ Kills children of this process that were registered in the :data:`processes` variable. Use with ``@atexit.register``. """ timeout_sec = 5 for p in processes: try: p.wait(timeout_sec) except TimeoutExpired: # failed to close p.kill()
[ "def", "kill_child_processes", "(", ")", "->", "None", ":", "timeout_sec", "=", "5", "for", "p", "in", "processes", ":", "try", ":", "p", ".", "wait", "(", "timeout_sec", ")", "except", "TimeoutExpired", ":", "# failed to close", "p", ".", "kill", "(", ")" ]
Kills children of this process that were registered in the :data:`processes` variable. Use with ``@atexit.register``.
[ "Kills", "children", "of", "this", "process", "that", "were", "registered", "in", "the", ":", "data", ":", "processes", "variable", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/subproc.py#L78-L91
RudolfCardinal/pythonlib
cardinal_pythonlib/subproc.py
start_process
def start_process(args: List[str], stdin: Any = None, stdout: Any = None, stderr: Any = None) -> Popen: """ Launch a child process and record it in our :data:`processes` variable. Args: args: program and its arguments, as a list stdin: typically None stdout: use None to perform no routing, which preserves console colour! Otherwise, specify somewhere to route stdout. See subprocess documentation. If either is PIPE, you'll need to deal with the output. stderr: As above. You can use stderr=STDOUT to route stderr to the same place as stdout. Returns: The process object (which is also stored in :data:`processes`). """ log.debug("{!r}", args) global processes global proc_args_list proc = Popen(args, stdin=stdin, stdout=stdout, stderr=stderr) # proc = Popen(args, stdin=None, stdout=PIPE, stderr=STDOUT) # proc = Popen(args, stdin=None, stdout=PIPE, stderr=PIPE) # Can't preserve colour: http://stackoverflow.com/questions/13299550/preserve-colored-output-from-python-os-popen # noqa processes.append(proc) proc_args_list.append(args) return proc
python
def start_process(args: List[str], stdin: Any = None, stdout: Any = None, stderr: Any = None) -> Popen: """ Launch a child process and record it in our :data:`processes` variable. Args: args: program and its arguments, as a list stdin: typically None stdout: use None to perform no routing, which preserves console colour! Otherwise, specify somewhere to route stdout. See subprocess documentation. If either is PIPE, you'll need to deal with the output. stderr: As above. You can use stderr=STDOUT to route stderr to the same place as stdout. Returns: The process object (which is also stored in :data:`processes`). """ log.debug("{!r}", args) global processes global proc_args_list proc = Popen(args, stdin=stdin, stdout=stdout, stderr=stderr) # proc = Popen(args, stdin=None, stdout=PIPE, stderr=STDOUT) # proc = Popen(args, stdin=None, stdout=PIPE, stderr=PIPE) # Can't preserve colour: http://stackoverflow.com/questions/13299550/preserve-colored-output-from-python-os-popen # noqa processes.append(proc) proc_args_list.append(args) return proc
[ "def", "start_process", "(", "args", ":", "List", "[", "str", "]", ",", "stdin", ":", "Any", "=", "None", ",", "stdout", ":", "Any", "=", "None", ",", "stderr", ":", "Any", "=", "None", ")", "->", "Popen", ":", "log", ".", "debug", "(", "\"{!r}\"", ",", "args", ")", "global", "processes", "global", "proc_args_list", "proc", "=", "Popen", "(", "args", ",", "stdin", "=", "stdin", ",", "stdout", "=", "stdout", ",", "stderr", "=", "stderr", ")", "# proc = Popen(args, stdin=None, stdout=PIPE, stderr=STDOUT)", "# proc = Popen(args, stdin=None, stdout=PIPE, stderr=PIPE)", "# Can't preserve colour: http://stackoverflow.com/questions/13299550/preserve-colored-output-from-python-os-popen # noqa", "processes", ".", "append", "(", "proc", ")", "proc_args_list", ".", "append", "(", "args", ")", "return", "proc" ]
Launch a child process and record it in our :data:`processes` variable. Args: args: program and its arguments, as a list stdin: typically None stdout: use None to perform no routing, which preserves console colour! Otherwise, specify somewhere to route stdout. See subprocess documentation. If either is PIPE, you'll need to deal with the output. stderr: As above. You can use stderr=STDOUT to route stderr to the same place as stdout. Returns: The process object (which is also stored in :data:`processes`).
[ "Launch", "a", "child", "process", "and", "record", "it", "in", "our", ":", "data", ":", "processes", "variable", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/subproc.py#L117-L146
RudolfCardinal/pythonlib
cardinal_pythonlib/subproc.py
wait_for_processes
def wait_for_processes(die_on_failure: bool = True, timeout_sec: float = 1) -> None: """ Wait for child processes (catalogued in :data:`processes`) to finish. If ``die_on_failure`` is ``True``, then whenever a subprocess returns failure, all are killed. If ``timeout_sec`` is None, the function waits for its first process to complete, then waits for the second, etc. So a subprocess dying does not trigger a full quit instantly (or potentially for ages). If ``timeout_sec`` is something else, each process is tried for that time; if it quits within that time, well and good (successful quit -> continue waiting for the others; failure -> kill everything, if ``die_on_failure``); if it doesn't, we try the next. That is much more responsive. """ global processes global proc_args_list n = len(processes) Pool(n).map(print_lines, processes) # in case of PIPE something_running = True while something_running: something_running = False for i, p in enumerate(processes): try: retcode = p.wait(timeout=timeout_sec) if retcode == 0: log.info("Process #{} (of {}) exited cleanly", i, n) if retcode != 0: log.critical( "Process #{} (of {}) exited with return code {} " "(indicating failure); its args were: {!r}", i, n, retcode, proc_args_list[i]) if die_on_failure: log.critical("Exiting top-level process (will kill " "all other children)") fail() # exit this process, therefore kill its children # noqa except TimeoutExpired: something_running = True processes.clear() proc_args_list.clear()
python
def wait_for_processes(die_on_failure: bool = True, timeout_sec: float = 1) -> None: """ Wait for child processes (catalogued in :data:`processes`) to finish. If ``die_on_failure`` is ``True``, then whenever a subprocess returns failure, all are killed. If ``timeout_sec`` is None, the function waits for its first process to complete, then waits for the second, etc. So a subprocess dying does not trigger a full quit instantly (or potentially for ages). If ``timeout_sec`` is something else, each process is tried for that time; if it quits within that time, well and good (successful quit -> continue waiting for the others; failure -> kill everything, if ``die_on_failure``); if it doesn't, we try the next. That is much more responsive. """ global processes global proc_args_list n = len(processes) Pool(n).map(print_lines, processes) # in case of PIPE something_running = True while something_running: something_running = False for i, p in enumerate(processes): try: retcode = p.wait(timeout=timeout_sec) if retcode == 0: log.info("Process #{} (of {}) exited cleanly", i, n) if retcode != 0: log.critical( "Process #{} (of {}) exited with return code {} " "(indicating failure); its args were: {!r}", i, n, retcode, proc_args_list[i]) if die_on_failure: log.critical("Exiting top-level process (will kill " "all other children)") fail() # exit this process, therefore kill its children # noqa except TimeoutExpired: something_running = True processes.clear() proc_args_list.clear()
[ "def", "wait_for_processes", "(", "die_on_failure", ":", "bool", "=", "True", ",", "timeout_sec", ":", "float", "=", "1", ")", "->", "None", ":", "global", "processes", "global", "proc_args_list", "n", "=", "len", "(", "processes", ")", "Pool", "(", "n", ")", ".", "map", "(", "print_lines", ",", "processes", ")", "# in case of PIPE", "something_running", "=", "True", "while", "something_running", ":", "something_running", "=", "False", "for", "i", ",", "p", "in", "enumerate", "(", "processes", ")", ":", "try", ":", "retcode", "=", "p", ".", "wait", "(", "timeout", "=", "timeout_sec", ")", "if", "retcode", "==", "0", ":", "log", ".", "info", "(", "\"Process #{} (of {}) exited cleanly\"", ",", "i", ",", "n", ")", "if", "retcode", "!=", "0", ":", "log", ".", "critical", "(", "\"Process #{} (of {}) exited with return code {} \"", "\"(indicating failure); its args were: {!r}\"", ",", "i", ",", "n", ",", "retcode", ",", "proc_args_list", "[", "i", "]", ")", "if", "die_on_failure", ":", "log", ".", "critical", "(", "\"Exiting top-level process (will kill \"", "\"all other children)\"", ")", "fail", "(", ")", "# exit this process, therefore kill its children # noqa", "except", "TimeoutExpired", ":", "something_running", "=", "True", "processes", ".", "clear", "(", ")", "proc_args_list", ".", "clear", "(", ")" ]
Wait for child processes (catalogued in :data:`processes`) to finish. If ``die_on_failure`` is ``True``, then whenever a subprocess returns failure, all are killed. If ``timeout_sec`` is None, the function waits for its first process to complete, then waits for the second, etc. So a subprocess dying does not trigger a full quit instantly (or potentially for ages). If ``timeout_sec`` is something else, each process is tried for that time; if it quits within that time, well and good (successful quit -> continue waiting for the others; failure -> kill everything, if ``die_on_failure``); if it doesn't, we try the next. That is much more responsive.
[ "Wait", "for", "child", "processes", "(", "catalogued", "in", ":", "data", ":", "processes", ")", "to", "finish", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/subproc.py#L149-L191
RudolfCardinal/pythonlib
cardinal_pythonlib/subproc.py
print_lines
def print_lines(process: Popen) -> None: """ Let a subprocess :func:`communicate`, then write both its ``stdout`` and its ``stderr`` to our ``stdout``. """ out, err = process.communicate() if out: for line in out.decode("utf-8").splitlines(): print(line) if err: for line in err.decode("utf-8").splitlines(): print(line)
python
def print_lines(process: Popen) -> None: """ Let a subprocess :func:`communicate`, then write both its ``stdout`` and its ``stderr`` to our ``stdout``. """ out, err = process.communicate() if out: for line in out.decode("utf-8").splitlines(): print(line) if err: for line in err.decode("utf-8").splitlines(): print(line)
[ "def", "print_lines", "(", "process", ":", "Popen", ")", "->", "None", ":", "out", ",", "err", "=", "process", ".", "communicate", "(", ")", "if", "out", ":", "for", "line", "in", "out", ".", "decode", "(", "\"utf-8\"", ")", ".", "splitlines", "(", ")", ":", "print", "(", "line", ")", "if", "err", ":", "for", "line", "in", "err", ".", "decode", "(", "\"utf-8\"", ")", ".", "splitlines", "(", ")", ":", "print", "(", "line", ")" ]
Let a subprocess :func:`communicate`, then write both its ``stdout`` and its ``stderr`` to our ``stdout``.
[ "Let", "a", "subprocess", ":", "func", ":", "communicate", "then", "write", "both", "its", "stdout", "and", "its", "stderr", "to", "our", "stdout", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/subproc.py#L194-L205
RudolfCardinal/pythonlib
cardinal_pythonlib/subproc.py
run_multiple_processes
def run_multiple_processes(args_list: List[List[str]], die_on_failure: bool = True) -> None: """ Fire up multiple processes, and wait for them to finihs. Args: args_list: command arguments for each process die_on_failure: see :func:`wait_for_processes` """ for procargs in args_list: start_process(procargs) # Wait for them all to finish wait_for_processes(die_on_failure=die_on_failure)
python
def run_multiple_processes(args_list: List[List[str]], die_on_failure: bool = True) -> None: """ Fire up multiple processes, and wait for them to finihs. Args: args_list: command arguments for each process die_on_failure: see :func:`wait_for_processes` """ for procargs in args_list: start_process(procargs) # Wait for them all to finish wait_for_processes(die_on_failure=die_on_failure)
[ "def", "run_multiple_processes", "(", "args_list", ":", "List", "[", "List", "[", "str", "]", "]", ",", "die_on_failure", ":", "bool", "=", "True", ")", "->", "None", ":", "for", "procargs", "in", "args_list", ":", "start_process", "(", "procargs", ")", "# Wait for them all to finish", "wait_for_processes", "(", "die_on_failure", "=", "die_on_failure", ")" ]
Fire up multiple processes, and wait for them to finihs. Args: args_list: command arguments for each process die_on_failure: see :func:`wait_for_processes`
[ "Fire", "up", "multiple", "processes", "and", "wait", "for", "them", "to", "finihs", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/subproc.py#L208-L220
RudolfCardinal/pythonlib
cardinal_pythonlib/subproc.py
mimic_user_input
def mimic_user_input( args: List[str], source_challenge_response: List[Tuple[SubprocSource, str, Union[str, SubprocCommand]]], line_terminators: List[str] = None, print_stdout: bool = False, print_stderr: bool = False, print_stdin: bool = False, stdin_encoding: str = None, stdout_encoding: str = None, suppress_decoding_errors: bool = True, sleep_time_s: float = 0.1) -> None: r""" Run an external command. Pretend to be a human by sending text to the subcommand (responses) when the external command sends us triggers (challenges). This is a bit nasty. Args: args: command-line arguments source_challenge_response: list of tuples of the format ``(challsrc, challenge, response)``; see below line_terminators: valid line terminators print_stdout: print_stderr: print_stdin: stdin_encoding: stdout_encoding: suppress_decoding_errors: trap any ``UnicodeDecodeError``? sleep_time_s: The ``(challsrc, challenge, response)`` tuples have this meaning: - ``challsrc``: where is the challenge coming from? Must be one of the objects :data:`SOURCE_STDOUT` or :data:`SOURCE_STDERR`; - ``challenge``: text of challenge - ``response``: text of response (send to the subcommand's ``stdin``). Example (modified from :class:`CorruptedZipReader`): .. code-block:: python from cardinal_pythonlib.subproc import * SOURCE_FILENAME = "corrupt.zip" TMP_DIR = "/tmp" OUTPUT_FILENAME = "rescued.zip" cmdargs = [ "zip", # Linux zip tool "-FF", # or "--fixfix": "fix very broken things" SOURCE_FILENAME, # input file "--temp-path", TMP_DIR, # temporary storage path "--out", OUTPUT_FILENAME # output file ] # We would like to be able to say "y" automatically to # "Is this a single-disk archive? (y/n):" # The source code (api.c, zip.c, zipfile.c), from # ftp://ftp.info-zip.org/pub/infozip/src/ , suggests that "-q" # should do this (internally "-q" sets "noisy = 0") - but in # practice it doesn't work. This is a critical switch. # Therefore we will do something very ugly, and send raw text via # stdin. ZIP_PROMPTS_RESPONSES = [ (SOURCE_STDOUT, "Is this a single-disk archive? (y/n): ", "y\n"), (SOURCE_STDOUT, " or ENTER (try reading this split again): ", "q\n"), (SOURCE_STDERR, "zip: malloc.c:2394: sysmalloc: Assertion `(old_top == initial_top (av) " "&& old_size == 0) || ((unsigned long) (old_size) >= MINSIZE && " "prev_inuse (old_top) && ((unsigned long) old_end & (pagesize - 1)) " "== 0)' failed.", TERMINATE_SUBPROCESS), ] ZIP_STDOUT_TERMINATORS = ["\n", "): "] mimic_user_input(cmdargs, source_challenge_response=ZIP_PROMPTS_RESPONSES, line_terminators=ZIP_STDOUT_TERMINATORS, print_stdout=show_zip_output, print_stdin=show_zip_output) """ # noqa line_terminators = line_terminators or ["\n"] # type: List[str] stdin_encoding = stdin_encoding or sys.getdefaultencoding() stdout_encoding = stdout_encoding or sys.getdefaultencoding() # Launch the command p = Popen(args, stdin=PIPE, stdout=PIPE, stderr=PIPE, bufsize=0) # Launch the asynchronous readers of stdout and stderr stdout_queue = Queue() # noinspection PyTypeChecker stdout_reader = AsynchronousFileReader( fd=p.stdout, queue=stdout_queue, encoding=stdout_encoding, line_terminators=line_terminators, cmdargs=args, suppress_decoding_errors=suppress_decoding_errors ) stdout_reader.start() stderr_queue = Queue() # noinspection PyTypeChecker stderr_reader = AsynchronousFileReader( fd=p.stderr, queue=stderr_queue, encoding=stdout_encoding, # same as stdout line_terminators=line_terminators, cmdargs=args, suppress_decoding_errors=suppress_decoding_errors ) stderr_reader.start() while not stdout_reader.eof() or not stderr_reader.eof(): lines_with_source = [] # type: List[Tuple[SubprocSource, str]] while not stdout_queue.empty(): lines_with_source.append((SOURCE_STDOUT, stdout_queue.get())) while not stderr_queue.empty(): lines_with_source.append((SOURCE_STDERR, stderr_queue.get())) for src, line in lines_with_source: if src is SOURCE_STDOUT and print_stdout: print(line, end="") # terminator already in line if src is SOURCE_STDERR and print_stderr: print(line, end="") # terminator already in line for challsrc, challenge, response in source_challenge_response: # log.critical("challsrc={!r}", challsrc) # log.critical("challenge={!r}", challenge) # log.critical("line={!r}", line) # log.critical("response={!r}", response) if challsrc != src: continue if challenge in line: if response is TERMINATE_SUBPROCESS: log.warning("Terminating subprocess {!r} because input " "{!r} received", args, challenge) p.kill() return else: p.stdin.write(response.encode(stdin_encoding)) p.stdin.flush() if print_stdin: print(response, end="") # Sleep a bit before asking the readers again. sleep(sleep_time_s) stdout_reader.join() stderr_reader.join() p.stdout.close() p.stderr.close()
python
def mimic_user_input( args: List[str], source_challenge_response: List[Tuple[SubprocSource, str, Union[str, SubprocCommand]]], line_terminators: List[str] = None, print_stdout: bool = False, print_stderr: bool = False, print_stdin: bool = False, stdin_encoding: str = None, stdout_encoding: str = None, suppress_decoding_errors: bool = True, sleep_time_s: float = 0.1) -> None: r""" Run an external command. Pretend to be a human by sending text to the subcommand (responses) when the external command sends us triggers (challenges). This is a bit nasty. Args: args: command-line arguments source_challenge_response: list of tuples of the format ``(challsrc, challenge, response)``; see below line_terminators: valid line terminators print_stdout: print_stderr: print_stdin: stdin_encoding: stdout_encoding: suppress_decoding_errors: trap any ``UnicodeDecodeError``? sleep_time_s: The ``(challsrc, challenge, response)`` tuples have this meaning: - ``challsrc``: where is the challenge coming from? Must be one of the objects :data:`SOURCE_STDOUT` or :data:`SOURCE_STDERR`; - ``challenge``: text of challenge - ``response``: text of response (send to the subcommand's ``stdin``). Example (modified from :class:`CorruptedZipReader`): .. code-block:: python from cardinal_pythonlib.subproc import * SOURCE_FILENAME = "corrupt.zip" TMP_DIR = "/tmp" OUTPUT_FILENAME = "rescued.zip" cmdargs = [ "zip", # Linux zip tool "-FF", # or "--fixfix": "fix very broken things" SOURCE_FILENAME, # input file "--temp-path", TMP_DIR, # temporary storage path "--out", OUTPUT_FILENAME # output file ] # We would like to be able to say "y" automatically to # "Is this a single-disk archive? (y/n):" # The source code (api.c, zip.c, zipfile.c), from # ftp://ftp.info-zip.org/pub/infozip/src/ , suggests that "-q" # should do this (internally "-q" sets "noisy = 0") - but in # practice it doesn't work. This is a critical switch. # Therefore we will do something very ugly, and send raw text via # stdin. ZIP_PROMPTS_RESPONSES = [ (SOURCE_STDOUT, "Is this a single-disk archive? (y/n): ", "y\n"), (SOURCE_STDOUT, " or ENTER (try reading this split again): ", "q\n"), (SOURCE_STDERR, "zip: malloc.c:2394: sysmalloc: Assertion `(old_top == initial_top (av) " "&& old_size == 0) || ((unsigned long) (old_size) >= MINSIZE && " "prev_inuse (old_top) && ((unsigned long) old_end & (pagesize - 1)) " "== 0)' failed.", TERMINATE_SUBPROCESS), ] ZIP_STDOUT_TERMINATORS = ["\n", "): "] mimic_user_input(cmdargs, source_challenge_response=ZIP_PROMPTS_RESPONSES, line_terminators=ZIP_STDOUT_TERMINATORS, print_stdout=show_zip_output, print_stdin=show_zip_output) """ # noqa line_terminators = line_terminators or ["\n"] # type: List[str] stdin_encoding = stdin_encoding or sys.getdefaultencoding() stdout_encoding = stdout_encoding or sys.getdefaultencoding() # Launch the command p = Popen(args, stdin=PIPE, stdout=PIPE, stderr=PIPE, bufsize=0) # Launch the asynchronous readers of stdout and stderr stdout_queue = Queue() # noinspection PyTypeChecker stdout_reader = AsynchronousFileReader( fd=p.stdout, queue=stdout_queue, encoding=stdout_encoding, line_terminators=line_terminators, cmdargs=args, suppress_decoding_errors=suppress_decoding_errors ) stdout_reader.start() stderr_queue = Queue() # noinspection PyTypeChecker stderr_reader = AsynchronousFileReader( fd=p.stderr, queue=stderr_queue, encoding=stdout_encoding, # same as stdout line_terminators=line_terminators, cmdargs=args, suppress_decoding_errors=suppress_decoding_errors ) stderr_reader.start() while not stdout_reader.eof() or not stderr_reader.eof(): lines_with_source = [] # type: List[Tuple[SubprocSource, str]] while not stdout_queue.empty(): lines_with_source.append((SOURCE_STDOUT, stdout_queue.get())) while not stderr_queue.empty(): lines_with_source.append((SOURCE_STDERR, stderr_queue.get())) for src, line in lines_with_source: if src is SOURCE_STDOUT and print_stdout: print(line, end="") # terminator already in line if src is SOURCE_STDERR and print_stderr: print(line, end="") # terminator already in line for challsrc, challenge, response in source_challenge_response: # log.critical("challsrc={!r}", challsrc) # log.critical("challenge={!r}", challenge) # log.critical("line={!r}", line) # log.critical("response={!r}", response) if challsrc != src: continue if challenge in line: if response is TERMINATE_SUBPROCESS: log.warning("Terminating subprocess {!r} because input " "{!r} received", args, challenge) p.kill() return else: p.stdin.write(response.encode(stdin_encoding)) p.stdin.flush() if print_stdin: print(response, end="") # Sleep a bit before asking the readers again. sleep(sleep_time_s) stdout_reader.join() stderr_reader.join() p.stdout.close() p.stderr.close()
[ "def", "mimic_user_input", "(", "args", ":", "List", "[", "str", "]", ",", "source_challenge_response", ":", "List", "[", "Tuple", "[", "SubprocSource", ",", "str", ",", "Union", "[", "str", ",", "SubprocCommand", "]", "]", "]", ",", "line_terminators", ":", "List", "[", "str", "]", "=", "None", ",", "print_stdout", ":", "bool", "=", "False", ",", "print_stderr", ":", "bool", "=", "False", ",", "print_stdin", ":", "bool", "=", "False", ",", "stdin_encoding", ":", "str", "=", "None", ",", "stdout_encoding", ":", "str", "=", "None", ",", "suppress_decoding_errors", ":", "bool", "=", "True", ",", "sleep_time_s", ":", "float", "=", "0.1", ")", "->", "None", ":", "# noqa", "line_terminators", "=", "line_terminators", "or", "[", "\"\\n\"", "]", "# type: List[str]", "stdin_encoding", "=", "stdin_encoding", "or", "sys", ".", "getdefaultencoding", "(", ")", "stdout_encoding", "=", "stdout_encoding", "or", "sys", ".", "getdefaultencoding", "(", ")", "# Launch the command", "p", "=", "Popen", "(", "args", ",", "stdin", "=", "PIPE", ",", "stdout", "=", "PIPE", ",", "stderr", "=", "PIPE", ",", "bufsize", "=", "0", ")", "# Launch the asynchronous readers of stdout and stderr", "stdout_queue", "=", "Queue", "(", ")", "# noinspection PyTypeChecker", "stdout_reader", "=", "AsynchronousFileReader", "(", "fd", "=", "p", ".", "stdout", ",", "queue", "=", "stdout_queue", ",", "encoding", "=", "stdout_encoding", ",", "line_terminators", "=", "line_terminators", ",", "cmdargs", "=", "args", ",", "suppress_decoding_errors", "=", "suppress_decoding_errors", ")", "stdout_reader", ".", "start", "(", ")", "stderr_queue", "=", "Queue", "(", ")", "# noinspection PyTypeChecker", "stderr_reader", "=", "AsynchronousFileReader", "(", "fd", "=", "p", ".", "stderr", ",", "queue", "=", "stderr_queue", ",", "encoding", "=", "stdout_encoding", ",", "# same as stdout", "line_terminators", "=", "line_terminators", ",", "cmdargs", "=", "args", ",", "suppress_decoding_errors", "=", "suppress_decoding_errors", ")", "stderr_reader", ".", "start", "(", ")", "while", "not", "stdout_reader", ".", "eof", "(", ")", "or", "not", "stderr_reader", ".", "eof", "(", ")", ":", "lines_with_source", "=", "[", "]", "# type: List[Tuple[SubprocSource, str]]", "while", "not", "stdout_queue", ".", "empty", "(", ")", ":", "lines_with_source", ".", "append", "(", "(", "SOURCE_STDOUT", ",", "stdout_queue", ".", "get", "(", ")", ")", ")", "while", "not", "stderr_queue", ".", "empty", "(", ")", ":", "lines_with_source", ".", "append", "(", "(", "SOURCE_STDERR", ",", "stderr_queue", ".", "get", "(", ")", ")", ")", "for", "src", ",", "line", "in", "lines_with_source", ":", "if", "src", "is", "SOURCE_STDOUT", "and", "print_stdout", ":", "print", "(", "line", ",", "end", "=", "\"\"", ")", "# terminator already in line", "if", "src", "is", "SOURCE_STDERR", "and", "print_stderr", ":", "print", "(", "line", ",", "end", "=", "\"\"", ")", "# terminator already in line", "for", "challsrc", ",", "challenge", ",", "response", "in", "source_challenge_response", ":", "# log.critical(\"challsrc={!r}\", challsrc)", "# log.critical(\"challenge={!r}\", challenge)", "# log.critical(\"line={!r}\", line)", "# log.critical(\"response={!r}\", response)", "if", "challsrc", "!=", "src", ":", "continue", "if", "challenge", "in", "line", ":", "if", "response", "is", "TERMINATE_SUBPROCESS", ":", "log", ".", "warning", "(", "\"Terminating subprocess {!r} because input \"", "\"{!r} received\"", ",", "args", ",", "challenge", ")", "p", ".", "kill", "(", ")", "return", "else", ":", "p", ".", "stdin", ".", "write", "(", "response", ".", "encode", "(", "stdin_encoding", ")", ")", "p", ".", "stdin", ".", "flush", "(", ")", "if", "print_stdin", ":", "print", "(", "response", ",", "end", "=", "\"\"", ")", "# Sleep a bit before asking the readers again.", "sleep", "(", "sleep_time_s", ")", "stdout_reader", ".", "join", "(", ")", "stderr_reader", ".", "join", "(", ")", "p", ".", "stdout", ".", "close", "(", ")", "p", ".", "stderr", ".", "close", "(", ")" ]
r""" Run an external command. Pretend to be a human by sending text to the subcommand (responses) when the external command sends us triggers (challenges). This is a bit nasty. Args: args: command-line arguments source_challenge_response: list of tuples of the format ``(challsrc, challenge, response)``; see below line_terminators: valid line terminators print_stdout: print_stderr: print_stdin: stdin_encoding: stdout_encoding: suppress_decoding_errors: trap any ``UnicodeDecodeError``? sleep_time_s: The ``(challsrc, challenge, response)`` tuples have this meaning: - ``challsrc``: where is the challenge coming from? Must be one of the objects :data:`SOURCE_STDOUT` or :data:`SOURCE_STDERR`; - ``challenge``: text of challenge - ``response``: text of response (send to the subcommand's ``stdin``). Example (modified from :class:`CorruptedZipReader`): .. code-block:: python from cardinal_pythonlib.subproc import * SOURCE_FILENAME = "corrupt.zip" TMP_DIR = "/tmp" OUTPUT_FILENAME = "rescued.zip" cmdargs = [ "zip", # Linux zip tool "-FF", # or "--fixfix": "fix very broken things" SOURCE_FILENAME, # input file "--temp-path", TMP_DIR, # temporary storage path "--out", OUTPUT_FILENAME # output file ] # We would like to be able to say "y" automatically to # "Is this a single-disk archive? (y/n):" # The source code (api.c, zip.c, zipfile.c), from # ftp://ftp.info-zip.org/pub/infozip/src/ , suggests that "-q" # should do this (internally "-q" sets "noisy = 0") - but in # practice it doesn't work. This is a critical switch. # Therefore we will do something very ugly, and send raw text via # stdin. ZIP_PROMPTS_RESPONSES = [ (SOURCE_STDOUT, "Is this a single-disk archive? (y/n): ", "y\n"), (SOURCE_STDOUT, " or ENTER (try reading this split again): ", "q\n"), (SOURCE_STDERR, "zip: malloc.c:2394: sysmalloc: Assertion `(old_top == initial_top (av) " "&& old_size == 0) || ((unsigned long) (old_size) >= MINSIZE && " "prev_inuse (old_top) && ((unsigned long) old_end & (pagesize - 1)) " "== 0)' failed.", TERMINATE_SUBPROCESS), ] ZIP_STDOUT_TERMINATORS = ["\n", "): "] mimic_user_input(cmdargs, source_challenge_response=ZIP_PROMPTS_RESPONSES, line_terminators=ZIP_STDOUT_TERMINATORS, print_stdout=show_zip_output, print_stdin=show_zip_output)
[ "r", "Run", "an", "external", "command", ".", "Pretend", "to", "be", "a", "human", "by", "sending", "text", "to", "the", "subcommand", "(", "responses", ")", "when", "the", "external", "command", "sends", "us", "triggers", "(", "challenges", ")", ".", "This", "is", "a", "bit", "nasty", ".", "Args", ":", "args", ":", "command", "-", "line", "arguments", "source_challenge_response", ":", "list", "of", "tuples", "of", "the", "format", "(", "challsrc", "challenge", "response", ")", ";", "see", "below", "line_terminators", ":", "valid", "line", "terminators", "print_stdout", ":", "print_stderr", ":", "print_stdin", ":", "stdin_encoding", ":", "stdout_encoding", ":", "suppress_decoding_errors", ":", "trap", "any", "UnicodeDecodeError", "?", "sleep_time_s", ":", "The", "(", "challsrc", "challenge", "response", ")", "tuples", "have", "this", "meaning", ":", "-", "challsrc", ":", "where", "is", "the", "challenge", "coming", "from?", "Must", "be", "one", "of", "the", "objects", ":", "data", ":", "SOURCE_STDOUT", "or", ":", "data", ":", "SOURCE_STDERR", ";", "-", "challenge", ":", "text", "of", "challenge", "-", "response", ":", "text", "of", "response", "(", "send", "to", "the", "subcommand", "s", "stdin", ")", ".", "Example", "(", "modified", "from", ":", "class", ":", "CorruptedZipReader", ")", ":" ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/subproc.py#L301-L454
RudolfCardinal/pythonlib
cardinal_pythonlib/subproc.py
AsynchronousFileReader.run
def run(self) -> None: """ Read lines and put them on the queue. """ fd = self._fd encoding = self._encoding line_terminators = self._line_terminators queue = self._queue buf = "" while True: try: c = fd.read(1).decode(encoding) except UnicodeDecodeError as e: log.warning("Decoding error from {!r}: {!r}", self._cmdargs, e) if self._suppress_decoding_errors: continue else: raise # log.critical("c={!r}, returncode={!r}", c, p.returncode) if not c: # Subprocess has finished return buf += c # log.critical("buf={!r}", buf) # noinspection PyTypeChecker for t in line_terminators: try: t_idx = buf.index(t) + len(t) # include terminator fragment = buf[:t_idx] buf = buf[t_idx:] queue.put(fragment) except ValueError: pass
python
def run(self) -> None: """ Read lines and put them on the queue. """ fd = self._fd encoding = self._encoding line_terminators = self._line_terminators queue = self._queue buf = "" while True: try: c = fd.read(1).decode(encoding) except UnicodeDecodeError as e: log.warning("Decoding error from {!r}: {!r}", self._cmdargs, e) if self._suppress_decoding_errors: continue else: raise # log.critical("c={!r}, returncode={!r}", c, p.returncode) if not c: # Subprocess has finished return buf += c # log.critical("buf={!r}", buf) # noinspection PyTypeChecker for t in line_terminators: try: t_idx = buf.index(t) + len(t) # include terminator fragment = buf[:t_idx] buf = buf[t_idx:] queue.put(fragment) except ValueError: pass
[ "def", "run", "(", "self", ")", "->", "None", ":", "fd", "=", "self", ".", "_fd", "encoding", "=", "self", ".", "_encoding", "line_terminators", "=", "self", ".", "_line_terminators", "queue", "=", "self", ".", "_queue", "buf", "=", "\"\"", "while", "True", ":", "try", ":", "c", "=", "fd", ".", "read", "(", "1", ")", ".", "decode", "(", "encoding", ")", "except", "UnicodeDecodeError", "as", "e", ":", "log", ".", "warning", "(", "\"Decoding error from {!r}: {!r}\"", ",", "self", ".", "_cmdargs", ",", "e", ")", "if", "self", ".", "_suppress_decoding_errors", ":", "continue", "else", ":", "raise", "# log.critical(\"c={!r}, returncode={!r}\", c, p.returncode)", "if", "not", "c", ":", "# Subprocess has finished", "return", "buf", "+=", "c", "# log.critical(\"buf={!r}\", buf)", "# noinspection PyTypeChecker", "for", "t", "in", "line_terminators", ":", "try", ":", "t_idx", "=", "buf", ".", "index", "(", "t", ")", "+", "len", "(", "t", ")", "# include terminator", "fragment", "=", "buf", "[", ":", "t_idx", "]", "buf", "=", "buf", "[", "t_idx", ":", "]", "queue", ".", "put", "(", "fragment", ")", "except", "ValueError", ":", "pass" ]
Read lines and put them on the queue.
[ "Read", "lines", "and", "put", "them", "on", "the", "queue", "." ]
train
https://github.com/RudolfCardinal/pythonlib/blob/0b84cb35f38bd7d8723958dae51b480a829b7227/cardinal_pythonlib/subproc.py#L260-L292
The-Politico/politico-civic-geography
geography/management/commands/bootstrap/fixtures/_nation.py
NationFixtures.create_nation_fixtures
def create_nation_fixtures(self): """ Create national US and State Map """ SHP_SLUG = "cb_{}_us_state_500k".format(self.YEAR) DOWNLOAD_PATH = os.path.join(self.DOWNLOAD_DIRECTORY, SHP_SLUG) shape = shapefile.Reader( os.path.join(DOWNLOAD_PATH, "{}.shp".format(SHP_SLUG)) ) fields = shape.fields[1:] field_names = [f[0] for f in fields] features = [] for shp in shape.shapeRecords(): state = dict(zip(field_names, shp.record)) geodata = { "type": "Feature", "geometry": shp.shape.__geo_interface__, "properties": { "state": state["STATEFP"], "name": state["NAME"], }, } features.append(geodata) Geometry.objects.update_or_create( division=self.NATION, subdivision_level=self.STATE_LEVEL, simplification=self.THRESHOLDS["nation"], source=os.path.join( self.SHP_SOURCE_BASE.format(self.YEAR), SHP_SLUG ) + ".zip", series=self.YEAR, defaults={ "topojson": self.toposimplify( geojson.FeatureCollection(features), self.THRESHOLDS["nation"], ) }, ) geo, created = Geometry.objects.update_or_create( division=self.NATION, subdivision_level=self.COUNTY_LEVEL, simplification=self.THRESHOLDS["nation"], source=os.path.join( self.SHP_SOURCE_BASE.format(self.YEAR), SHP_SLUG ) + ".zip", series=self.YEAR, defaults={"topojson": self.get_state_county_shps("00")}, ) tqdm.write("Nation\n") tqdm.write( self.TQDM_PREFIX + "> FIPS {} @ ~{}kb ".format( "00", round(len(json.dumps(geo.topojson)) / 1000) ) ) tqdm.write(self.style.SUCCESS("Done.\n"))
python
def create_nation_fixtures(self): """ Create national US and State Map """ SHP_SLUG = "cb_{}_us_state_500k".format(self.YEAR) DOWNLOAD_PATH = os.path.join(self.DOWNLOAD_DIRECTORY, SHP_SLUG) shape = shapefile.Reader( os.path.join(DOWNLOAD_PATH, "{}.shp".format(SHP_SLUG)) ) fields = shape.fields[1:] field_names = [f[0] for f in fields] features = [] for shp in shape.shapeRecords(): state = dict(zip(field_names, shp.record)) geodata = { "type": "Feature", "geometry": shp.shape.__geo_interface__, "properties": { "state": state["STATEFP"], "name": state["NAME"], }, } features.append(geodata) Geometry.objects.update_or_create( division=self.NATION, subdivision_level=self.STATE_LEVEL, simplification=self.THRESHOLDS["nation"], source=os.path.join( self.SHP_SOURCE_BASE.format(self.YEAR), SHP_SLUG ) + ".zip", series=self.YEAR, defaults={ "topojson": self.toposimplify( geojson.FeatureCollection(features), self.THRESHOLDS["nation"], ) }, ) geo, created = Geometry.objects.update_or_create( division=self.NATION, subdivision_level=self.COUNTY_LEVEL, simplification=self.THRESHOLDS["nation"], source=os.path.join( self.SHP_SOURCE_BASE.format(self.YEAR), SHP_SLUG ) + ".zip", series=self.YEAR, defaults={"topojson": self.get_state_county_shps("00")}, ) tqdm.write("Nation\n") tqdm.write( self.TQDM_PREFIX + "> FIPS {} @ ~{}kb ".format( "00", round(len(json.dumps(geo.topojson)) / 1000) ) ) tqdm.write(self.style.SUCCESS("Done.\n"))
[ "def", "create_nation_fixtures", "(", "self", ")", ":", "SHP_SLUG", "=", "\"cb_{}_us_state_500k\"", ".", "format", "(", "self", ".", "YEAR", ")", "DOWNLOAD_PATH", "=", "os", ".", "path", ".", "join", "(", "self", ".", "DOWNLOAD_DIRECTORY", ",", "SHP_SLUG", ")", "shape", "=", "shapefile", ".", "Reader", "(", "os", ".", "path", ".", "join", "(", "DOWNLOAD_PATH", ",", "\"{}.shp\"", ".", "format", "(", "SHP_SLUG", ")", ")", ")", "fields", "=", "shape", ".", "fields", "[", "1", ":", "]", "field_names", "=", "[", "f", "[", "0", "]", "for", "f", "in", "fields", "]", "features", "=", "[", "]", "for", "shp", "in", "shape", ".", "shapeRecords", "(", ")", ":", "state", "=", "dict", "(", "zip", "(", "field_names", ",", "shp", ".", "record", ")", ")", "geodata", "=", "{", "\"type\"", ":", "\"Feature\"", ",", "\"geometry\"", ":", "shp", ".", "shape", ".", "__geo_interface__", ",", "\"properties\"", ":", "{", "\"state\"", ":", "state", "[", "\"STATEFP\"", "]", ",", "\"name\"", ":", "state", "[", "\"NAME\"", "]", ",", "}", ",", "}", "features", ".", "append", "(", "geodata", ")", "Geometry", ".", "objects", ".", "update_or_create", "(", "division", "=", "self", ".", "NATION", ",", "subdivision_level", "=", "self", ".", "STATE_LEVEL", ",", "simplification", "=", "self", ".", "THRESHOLDS", "[", "\"nation\"", "]", ",", "source", "=", "os", ".", "path", ".", "join", "(", "self", ".", "SHP_SOURCE_BASE", ".", "format", "(", "self", ".", "YEAR", ")", ",", "SHP_SLUG", ")", "+", "\".zip\"", ",", "series", "=", "self", ".", "YEAR", ",", "defaults", "=", "{", "\"topojson\"", ":", "self", ".", "toposimplify", "(", "geojson", ".", "FeatureCollection", "(", "features", ")", ",", "self", ".", "THRESHOLDS", "[", "\"nation\"", "]", ",", ")", "}", ",", ")", "geo", ",", "created", "=", "Geometry", ".", "objects", ".", "update_or_create", "(", "division", "=", "self", ".", "NATION", ",", "subdivision_level", "=", "self", ".", "COUNTY_LEVEL", ",", "simplification", "=", "self", ".", "THRESHOLDS", "[", "\"nation\"", "]", ",", "source", "=", "os", ".", "path", ".", "join", "(", "self", ".", "SHP_SOURCE_BASE", ".", "format", "(", "self", ".", "YEAR", ")", ",", "SHP_SLUG", ")", "+", "\".zip\"", ",", "series", "=", "self", ".", "YEAR", ",", "defaults", "=", "{", "\"topojson\"", ":", "self", ".", "get_state_county_shps", "(", "\"00\"", ")", "}", ",", ")", "tqdm", ".", "write", "(", "\"Nation\\n\"", ")", "tqdm", ".", "write", "(", "self", ".", "TQDM_PREFIX", "+", "\"> FIPS {} @ ~{}kb \"", ".", "format", "(", "\"00\"", ",", "round", "(", "len", "(", "json", ".", "dumps", "(", "geo", ".", "topojson", ")", ")", "/", "1000", ")", ")", ")", "tqdm", ".", "write", "(", "self", ".", "style", ".", "SUCCESS", "(", "\"Done.\\n\"", ")", ")" ]
Create national US and State Map
[ "Create", "national", "US", "and", "State", "Map" ]
train
https://github.com/The-Politico/politico-civic-geography/blob/032b3ee773b50b65cfe672f230dda772df0f89e0/geography/management/commands/bootstrap/fixtures/_nation.py#L12-L71