repo_name
stringlengths
5
100
path
stringlengths
4
299
copies
stringlengths
1
5
size
stringlengths
4
7
content
stringlengths
666
1.03M
license
stringclasses
15 values
hash
int64
-9,223,351,895,964,839,000
9,223,297,778B
line_mean
float64
3.17
100
line_max
int64
7
1k
alpha_frac
float64
0.25
0.98
autogenerated
bool
1 class
testmana2/test
ThirdParty/Pygments/pygments/lexers/erlang.py
72
18195
# -*- coding: utf-8 -*- """ pygments.lexers.erlang ~~~~~~~~~~~~~~~~~~~~~~ Lexers for Erlang. :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import re from pygments.lexer import Lexer, RegexLexer, bygroups, words, do_insertions, \ include, default from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation, Generic __all__ = ['ErlangLexer', 'ErlangShellLexer', 'ElixirConsoleLexer', 'ElixirLexer'] line_re = re.compile('.*?\n') class ErlangLexer(RegexLexer): """ For the Erlang functional programming language. Blame Jeremy Thurgood (http://jerith.za.net/). .. versionadded:: 0.9 """ name = 'Erlang' aliases = ['erlang'] filenames = ['*.erl', '*.hrl', '*.es', '*.escript'] mimetypes = ['text/x-erlang'] keywords = ( 'after', 'begin', 'case', 'catch', 'cond', 'end', 'fun', 'if', 'let', 'of', 'query', 'receive', 'try', 'when', ) builtins = ( # See erlang(3) man page 'abs', 'append_element', 'apply', 'atom_to_list', 'binary_to_list', 'bitstring_to_list', 'binary_to_term', 'bit_size', 'bump_reductions', 'byte_size', 'cancel_timer', 'check_process_code', 'delete_module', 'demonitor', 'disconnect_node', 'display', 'element', 'erase', 'exit', 'float', 'float_to_list', 'fun_info', 'fun_to_list', 'function_exported', 'garbage_collect', 'get', 'get_keys', 'group_leader', 'hash', 'hd', 'integer_to_list', 'iolist_to_binary', 'iolist_size', 'is_atom', 'is_binary', 'is_bitstring', 'is_boolean', 'is_builtin', 'is_float', 'is_function', 'is_integer', 'is_list', 'is_number', 'is_pid', 'is_port', 'is_process_alive', 'is_record', 'is_reference', 'is_tuple', 'length', 'link', 'list_to_atom', 'list_to_binary', 'list_to_bitstring', 'list_to_existing_atom', 'list_to_float', 'list_to_integer', 'list_to_pid', 'list_to_tuple', 'load_module', 'localtime_to_universaltime', 'make_tuple', 'md5', 'md5_final', 'md5_update', 'memory', 'module_loaded', 'monitor', 'monitor_node', 'node', 'nodes', 'open_port', 'phash', 'phash2', 'pid_to_list', 'port_close', 'port_command', 'port_connect', 'port_control', 'port_call', 'port_info', 'port_to_list', 'process_display', 'process_flag', 'process_info', 'purge_module', 'put', 'read_timer', 'ref_to_list', 'register', 'resume_process', 'round', 'send', 'send_after', 'send_nosuspend', 'set_cookie', 'setelement', 'size', 'spawn', 'spawn_link', 'spawn_monitor', 'spawn_opt', 'split_binary', 'start_timer', 'statistics', 'suspend_process', 'system_flag', 'system_info', 'system_monitor', 'system_profile', 'term_to_binary', 'tl', 'trace', 'trace_delivered', 'trace_info', 'trace_pattern', 'trunc', 'tuple_size', 'tuple_to_list', 'universaltime_to_localtime', 'unlink', 'unregister', 'whereis' ) operators = r'(\+\+?|--?|\*|/|<|>|/=|=:=|=/=|=<|>=|==?|<-|!|\?)' word_operators = ( 'and', 'andalso', 'band', 'bnot', 'bor', 'bsl', 'bsr', 'bxor', 'div', 'not', 'or', 'orelse', 'rem', 'xor' ) atom_re = r"(?:[a-z]\w*|'[^\n']*[^\\]')" variable_re = r'(?:[A-Z_]\w*)' escape_re = r'(?:\\(?:[bdefnrstv\'"\\/]|[0-7][0-7]?[0-7]?|\^[a-zA-Z]))' macro_re = r'(?:'+variable_re+r'|'+atom_re+r')' base_re = r'(?:[2-9]|[12][0-9]|3[0-6])' tokens = { 'root': [ (r'\s+', Text), (r'%.*\n', Comment), (words(keywords, suffix=r'\b'), Keyword), (words(builtins, suffix=r'\b'), Name.Builtin), (words(word_operators, suffix=r'\b'), Operator.Word), (r'^-', Punctuation, 'directive'), (operators, Operator), (r'"', String, 'string'), (r'<<', Name.Label), (r'>>', Name.Label), ('(' + atom_re + ')(:)', bygroups(Name.Namespace, Punctuation)), ('(?:^|(?<=:))(' + atom_re + r')(\s*)(\()', bygroups(Name.Function, Text, Punctuation)), (r'[+-]?' + base_re + r'#[0-9a-zA-Z]+', Number.Integer), (r'[+-]?\d+', Number.Integer), (r'[+-]?\d+.\d+', Number.Float), (r'[]\[:_@\".{}()|;,]', Punctuation), (variable_re, Name.Variable), (atom_re, Name), (r'\?'+macro_re, Name.Constant), (r'\$(?:'+escape_re+r'|\\[ %]|[^\\])', String.Char), (r'#'+atom_re+r'(:?\.'+atom_re+r')?', Name.Label), ], 'string': [ (escape_re, String.Escape), (r'"', String, '#pop'), (r'~[0-9.*]*[~#+bBcdefginpPswWxX]', String.Interpol), (r'[^"\\~]+', String), (r'~', String), ], 'directive': [ (r'(define)(\s*)(\()('+macro_re+r')', bygroups(Name.Entity, Text, Punctuation, Name.Constant), '#pop'), (r'(record)(\s*)(\()('+macro_re+r')', bygroups(Name.Entity, Text, Punctuation, Name.Label), '#pop'), (atom_re, Name.Entity, '#pop'), ], } class ErlangShellLexer(Lexer): """ Shell sessions in erl (for Erlang code). .. versionadded:: 1.1 """ name = 'Erlang erl session' aliases = ['erl'] filenames = ['*.erl-sh'] mimetypes = ['text/x-erl-shellsession'] _prompt_re = re.compile(r'\d+>(?=\s|\Z)') def get_tokens_unprocessed(self, text): erlexer = ErlangLexer(**self.options) curcode = '' insertions = [] for match in line_re.finditer(text): line = match.group() m = self._prompt_re.match(line) if m is not None: end = m.end() insertions.append((len(curcode), [(0, Generic.Prompt, line[:end])])) curcode += line[end:] else: if curcode: for item in do_insertions(insertions, erlexer.get_tokens_unprocessed(curcode)): yield item curcode = '' insertions = [] if line.startswith('*'): yield match.start(), Generic.Traceback, line else: yield match.start(), Generic.Output, line if curcode: for item in do_insertions(insertions, erlexer.get_tokens_unprocessed(curcode)): yield item def gen_elixir_string_rules(name, symbol, token): states = {} states['string_' + name] = [ (r'[^#%s\\]+' % (symbol,), token), include('escapes'), (r'\\.', token), (r'(%s)' % (symbol,), bygroups(token), "#pop"), include('interpol') ] return states def gen_elixir_sigstr_rules(term, token, interpol=True): if interpol: return [ (r'[^#%s\\]+' % (term,), token), include('escapes'), (r'\\.', token), (r'%s[a-zA-Z]*' % (term,), token, '#pop'), include('interpol') ] else: return [ (r'[^%s\\]+' % (term,), token), (r'\\.', token), (r'%s[a-zA-Z]*' % (term,), token, '#pop'), ] class ElixirLexer(RegexLexer): """ For the `Elixir language <http://elixir-lang.org>`_. .. versionadded:: 1.5 """ name = 'Elixir' aliases = ['elixir', 'ex', 'exs'] filenames = ['*.ex', '*.exs'] mimetypes = ['text/x-elixir'] KEYWORD = ('fn', 'do', 'end', 'after', 'else', 'rescue', 'catch') KEYWORD_OPERATOR = ('not', 'and', 'or', 'when', 'in') BUILTIN = ( 'case', 'cond', 'for', 'if', 'unless', 'try', 'receive', 'raise', 'quote', 'unquote', 'unquote_splicing', 'throw', 'super' ) BUILTIN_DECLARATION = ( 'def', 'defp', 'defmodule', 'defprotocol', 'defmacro', 'defmacrop', 'defdelegate', 'defexception', 'defstruct', 'defimpl', 'defcallback' ) BUILTIN_NAMESPACE = ('import', 'require', 'use', 'alias') CONSTANT = ('nil', 'true', 'false') PSEUDO_VAR = ('_', '__MODULE__', '__DIR__', '__ENV__', '__CALLER__') OPERATORS3 = ( '<<<', '>>>', '|||', '&&&', '^^^', '~~~', '===', '!==', '~>>', '<~>', '|~>', '<|>', ) OPERATORS2 = ( '==', '!=', '<=', '>=', '&&', '||', '<>', '++', '--', '|>', '=~', '->', '<-', '|', '.', '=', '~>', '<~', ) OPERATORS1 = ('<', '>', '+', '-', '*', '/', '!', '^', '&') PUNCTUATION = ( '\\\\', '<<', '>>', '=>', '(', ')', ':', ';', ',', '[', ']' ) def get_tokens_unprocessed(self, text): for index, token, value in RegexLexer.get_tokens_unprocessed(self, text): if token is Name: if value in self.KEYWORD: yield index, Keyword, value elif value in self.KEYWORD_OPERATOR: yield index, Operator.Word, value elif value in self.BUILTIN: yield index, Keyword, value elif value in self.BUILTIN_DECLARATION: yield index, Keyword.Declaration, value elif value in self.BUILTIN_NAMESPACE: yield index, Keyword.Namespace, value elif value in self.CONSTANT: yield index, Name.Constant, value elif value in self.PSEUDO_VAR: yield index, Name.Builtin.Pseudo, value else: yield index, token, value else: yield index, token, value def gen_elixir_sigil_rules(): # all valid sigil terminators (excluding heredocs) terminators = [ (r'\{', r'\}', 'cb'), (r'\[', r'\]', 'sb'), (r'\(', r'\)', 'pa'), (r'<', r'>', 'ab'), (r'/', r'/', 'slas'), (r'\|', r'\|', 'pipe'), ('"', '"', 'quot'), ("'", "'", 'apos'), ] # heredocs have slightly different rules triquotes = [(r'"""', 'triquot'), (r"'''", 'triapos')] token = String.Other states = {'sigils': []} for term, name in triquotes: states['sigils'] += [ (r'(~[a-z])(%s)' % (term,), bygroups(token, String.Heredoc), (name + '-end', name + '-intp')), (r'(~[A-Z])(%s)' % (term,), bygroups(token, String.Heredoc), (name + '-end', name + '-no-intp')), ] states[name + '-end'] = [ (r'[a-zA-Z]+', token, '#pop'), default('#pop'), ] states[name + '-intp'] = [ (r'^\s*' + term, String.Heredoc, '#pop'), include('heredoc_interpol'), ] states[name + '-no-intp'] = [ (r'^\s*' + term, String.Heredoc, '#pop'), include('heredoc_no_interpol'), ] for lterm, rterm, name in terminators: states['sigils'] += [ (r'~[a-z]' + lterm, token, name + '-intp'), (r'~[A-Z]' + lterm, token, name + '-no-intp'), ] states[name + '-intp'] = gen_elixir_sigstr_rules(rterm, token) states[name + '-no-intp'] = \ gen_elixir_sigstr_rules(rterm, token, interpol=False) return states op3_re = "|".join(re.escape(s) for s in OPERATORS3) op2_re = "|".join(re.escape(s) for s in OPERATORS2) op1_re = "|".join(re.escape(s) for s in OPERATORS1) ops_re = r'(?:%s|%s|%s)' % (op3_re, op2_re, op1_re) punctuation_re = "|".join(re.escape(s) for s in PUNCTUATION) alnum = '\w' name_re = r'(?:\.\.\.|[a-z_]%s*[!?]?)' % alnum modname_re = r'[A-Z]%(alnum)s*(?:\.[A-Z]%(alnum)s*)*' % {'alnum': alnum} complex_name_re = r'(?:%s|%s|%s)' % (name_re, modname_re, ops_re) special_atom_re = r'(?:\.\.\.|<<>>|%\{\}|%|\{\})' long_hex_char_re = r'(\\x\{)([\da-fA-F]+)(\})' hex_char_re = r'(\\x[\da-fA-F]{1,2})' escape_char_re = r'(\\[abdefnrstv])' tokens = { 'root': [ (r'\s+', Text), (r'#.*$', Comment.Single), # Various kinds of characters (r'(\?)' + long_hex_char_re, bygroups(String.Char, String.Escape, Number.Hex, String.Escape)), (r'(\?)' + hex_char_re, bygroups(String.Char, String.Escape)), (r'(\?)' + escape_char_re, bygroups(String.Char, String.Escape)), (r'\?\\?.', String.Char), # '::' has to go before atoms (r':::', String.Symbol), (r'::', Operator), # atoms (r':' + special_atom_re, String.Symbol), (r':' + complex_name_re, String.Symbol), (r':"', String.Symbol, 'string_double_atom'), (r":'", String.Symbol, 'string_single_atom'), # [keywords: ...] (r'(%s|%s)(:)(?=\s|\n)' % (special_atom_re, complex_name_re), bygroups(String.Symbol, Punctuation)), # @attributes (r'@' + name_re, Name.Attribute), # identifiers (name_re, Name), (r'(%%?)(%s)' % (modname_re,), bygroups(Punctuation, Name.Class)), # operators and punctuation (op3_re, Operator), (op2_re, Operator), (punctuation_re, Punctuation), (r'&\d', Name.Entity), # anon func arguments (op1_re, Operator), # numbers (r'0b[01]+', Number.Bin), (r'0o[0-7]+', Number.Oct), (r'0x[\da-fA-F]+', Number.Hex), (r'\d(_?\d)*\.\d(_?\d)*([eE][-+]?\d(_?\d)*)?', Number.Float), (r'\d(_?\d)*', Number.Integer), # strings and heredocs (r'"""\s*', String.Heredoc, 'heredoc_double'), (r"'''\s*$", String.Heredoc, 'heredoc_single'), (r'"', String.Double, 'string_double'), (r"'", String.Single, 'string_single'), include('sigils'), (r'%\{', Punctuation, 'map_key'), (r'\{', Punctuation, 'tuple'), ], 'heredoc_double': [ (r'^\s*"""', String.Heredoc, '#pop'), include('heredoc_interpol'), ], 'heredoc_single': [ (r"^\s*'''", String.Heredoc, '#pop'), include('heredoc_interpol'), ], 'heredoc_interpol': [ (r'[^#\\\n]+', String.Heredoc), include('escapes'), (r'\\.', String.Heredoc), (r'\n+', String.Heredoc), include('interpol'), ], 'heredoc_no_interpol': [ (r'[^\\\n]+', String.Heredoc), (r'\\.', String.Heredoc), (r'\n+', String.Heredoc), ], 'escapes': [ (long_hex_char_re, bygroups(String.Escape, Number.Hex, String.Escape)), (hex_char_re, String.Escape), (escape_char_re, String.Escape), ], 'interpol': [ (r'#\{', String.Interpol, 'interpol_string'), ], 'interpol_string': [ (r'\}', String.Interpol, "#pop"), include('root') ], 'map_key': [ include('root'), (r':', Punctuation, 'map_val'), (r'=>', Punctuation, 'map_val'), (r'\}', Punctuation, '#pop'), ], 'map_val': [ include('root'), (r',', Punctuation, '#pop'), (r'(?=\})', Punctuation, '#pop'), ], 'tuple': [ include('root'), (r'\}', Punctuation, '#pop'), ], } tokens.update(gen_elixir_string_rules('double', '"', String.Double)) tokens.update(gen_elixir_string_rules('single', "'", String.Single)) tokens.update(gen_elixir_string_rules('double_atom', '"', String.Symbol)) tokens.update(gen_elixir_string_rules('single_atom', "'", String.Symbol)) tokens.update(gen_elixir_sigil_rules()) class ElixirConsoleLexer(Lexer): """ For Elixir interactive console (iex) output like: .. sourcecode:: iex iex> [head | tail] = [1,2,3] [1,2,3] iex> head 1 iex> tail [2,3] iex> [head | tail] [1,2,3] iex> length [head | tail] 3 .. versionadded:: 1.5 """ name = 'Elixir iex session' aliases = ['iex'] mimetypes = ['text/x-elixir-shellsession'] _prompt_re = re.compile('(iex|\.{3})(\(\d+\))?> ') def get_tokens_unprocessed(self, text): exlexer = ElixirLexer(**self.options) curcode = '' in_error = False insertions = [] for match in line_re.finditer(text): line = match.group() if line.startswith(u'** '): in_error = True insertions.append((len(curcode), [(0, Generic.Error, line[:-1])])) curcode += line[-1:] else: m = self._prompt_re.match(line) if m is not None: in_error = False end = m.end() insertions.append((len(curcode), [(0, Generic.Prompt, line[:end])])) curcode += line[end:] else: if curcode: for item in do_insertions( insertions, exlexer.get_tokens_unprocessed(curcode)): yield item curcode = '' insertions = [] token = Generic.Error if in_error else Generic.Output yield match.start(), token, line if curcode: for item in do_insertions( insertions, exlexer.get_tokens_unprocessed(curcode)): yield item
gpl-3.0
-6,485,025,965,252,877,000
34.606654
87
0.452872
false
Arcanemagus/SickRage
lib/six.py
172
30888
# Copyright (c) 2010-2017 Benjamin Peterson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. """Utilities for writing code that runs on Python 2 and 3""" from __future__ import absolute_import import functools import itertools import operator import sys import types __author__ = "Benjamin Peterson <benjamin@python.org>" __version__ = "1.11.0" # Useful for very coarse version differentiation. PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 PY34 = sys.version_info[0:2] >= (3, 4) if PY3: string_types = str, integer_types = int, class_types = type, text_type = str binary_type = bytes MAXSIZE = sys.maxsize else: string_types = basestring, integer_types = (int, long) class_types = (type, types.ClassType) text_type = unicode binary_type = str if sys.platform.startswith("java"): # Jython always uses 32 bits. MAXSIZE = int((1 << 31) - 1) else: # It's possible to have sizeof(long) != sizeof(Py_ssize_t). class X(object): def __len__(self): return 1 << 31 try: len(X()) except OverflowError: # 32-bit MAXSIZE = int((1 << 31) - 1) else: # 64-bit MAXSIZE = int((1 << 63) - 1) del X def _add_doc(func, doc): """Add documentation to a function.""" func.__doc__ = doc def _import_module(name): """Import module, returning the module after the last dot.""" __import__(name) return sys.modules[name] class _LazyDescr(object): def __init__(self, name): self.name = name def __get__(self, obj, tp): result = self._resolve() setattr(obj, self.name, result) # Invokes __set__. try: # This is a bit ugly, but it avoids running this again by # removing this descriptor. delattr(obj.__class__, self.name) except AttributeError: pass return result class MovedModule(_LazyDescr): def __init__(self, name, old, new=None): super(MovedModule, self).__init__(name) if PY3: if new is None: new = name self.mod = new else: self.mod = old def _resolve(self): return _import_module(self.mod) def __getattr__(self, attr): _module = self._resolve() value = getattr(_module, attr) setattr(self, attr, value) return value class _LazyModule(types.ModuleType): def __init__(self, name): super(_LazyModule, self).__init__(name) self.__doc__ = self.__class__.__doc__ def __dir__(self): attrs = ["__doc__", "__name__"] attrs += [attr.name for attr in self._moved_attributes] return attrs # Subclasses should override this _moved_attributes = [] class MovedAttribute(_LazyDescr): def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): super(MovedAttribute, self).__init__(name) if PY3: if new_mod is None: new_mod = name self.mod = new_mod if new_attr is None: if old_attr is None: new_attr = name else: new_attr = old_attr self.attr = new_attr else: self.mod = old_mod if old_attr is None: old_attr = name self.attr = old_attr def _resolve(self): module = _import_module(self.mod) return getattr(module, self.attr) class _SixMetaPathImporter(object): """ A meta path importer to import six.moves and its submodules. This class implements a PEP302 finder and loader. It should be compatible with Python 2.5 and all existing versions of Python3 """ def __init__(self, six_module_name): self.name = six_module_name self.known_modules = {} def _add_module(self, mod, *fullnames): for fullname in fullnames: self.known_modules[self.name + "." + fullname] = mod def _get_module(self, fullname): return self.known_modules[self.name + "." + fullname] def find_module(self, fullname, path=None): if fullname in self.known_modules: return self return None def __get_module(self, fullname): try: return self.known_modules[fullname] except KeyError: raise ImportError("This loader does not know module " + fullname) def load_module(self, fullname): try: # in case of a reload return sys.modules[fullname] except KeyError: pass mod = self.__get_module(fullname) if isinstance(mod, MovedModule): mod = mod._resolve() else: mod.__loader__ = self sys.modules[fullname] = mod return mod def is_package(self, fullname): """ Return true, if the named module is a package. We need this method to get correct spec objects with Python 3.4 (see PEP451) """ return hasattr(self.__get_module(fullname), "__path__") def get_code(self, fullname): """Return None Required, if is_package is implemented""" self.__get_module(fullname) # eventually raises ImportError return None get_source = get_code # same as get_code _importer = _SixMetaPathImporter(__name__) class _MovedItems(_LazyModule): """Lazy loading of moved objects""" __path__ = [] # mark as package _moved_attributes = [ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), MovedAttribute("intern", "__builtin__", "sys"), MovedAttribute("map", "itertools", "builtins", "imap", "map"), MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), MovedAttribute("getoutput", "commands", "subprocess"), MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), MovedAttribute("reduce", "__builtin__", "functools"), MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), MovedAttribute("StringIO", "StringIO", "io"), MovedAttribute("UserDict", "UserDict", "collections"), MovedAttribute("UserList", "UserList", "collections"), MovedAttribute("UserString", "UserString", "collections"), MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), MovedModule("builtins", "__builtin__"), MovedModule("configparser", "ConfigParser"), MovedModule("copyreg", "copy_reg"), MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), MovedModule("http_cookies", "Cookie", "http.cookies"), MovedModule("html_entities", "htmlentitydefs", "html.entities"), MovedModule("html_parser", "HTMLParser", "html.parser"), MovedModule("http_client", "httplib", "http.client"), MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), MovedModule("cPickle", "cPickle", "pickle"), MovedModule("queue", "Queue"), MovedModule("reprlib", "repr"), MovedModule("socketserver", "SocketServer"), MovedModule("_thread", "thread", "_thread"), MovedModule("tkinter", "Tkinter"), MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), MovedModule("tkinter_tix", "Tix", "tkinter.tix"), MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), MovedModule("tkinter_colorchooser", "tkColorChooser", "tkinter.colorchooser"), MovedModule("tkinter_commondialog", "tkCommonDialog", "tkinter.commondialog"), MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), MovedModule("tkinter_font", "tkFont", "tkinter.font"), MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", "tkinter.simpledialog"), MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), ] # Add windows specific modules. if sys.platform == "win32": _moved_attributes += [ MovedModule("winreg", "_winreg"), ] for attr in _moved_attributes: setattr(_MovedItems, attr.name, attr) if isinstance(attr, MovedModule): _importer._add_module(attr, "moves." + attr.name) del attr _MovedItems._moved_attributes = _moved_attributes moves = _MovedItems(__name__ + ".moves") _importer._add_module(moves, "moves") class Module_six_moves_urllib_parse(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_parse""" _urllib_parse_moved_attributes = [ MovedAttribute("ParseResult", "urlparse", "urllib.parse"), MovedAttribute("SplitResult", "urlparse", "urllib.parse"), MovedAttribute("parse_qs", "urlparse", "urllib.parse"), MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), MovedAttribute("urldefrag", "urlparse", "urllib.parse"), MovedAttribute("urljoin", "urlparse", "urllib.parse"), MovedAttribute("urlparse", "urlparse", "urllib.parse"), MovedAttribute("urlsplit", "urlparse", "urllib.parse"), MovedAttribute("urlunparse", "urlparse", "urllib.parse"), MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), MovedAttribute("quote", "urllib", "urllib.parse"), MovedAttribute("quote_plus", "urllib", "urllib.parse"), MovedAttribute("unquote", "urllib", "urllib.parse"), MovedAttribute("unquote_plus", "urllib", "urllib.parse"), MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"), MovedAttribute("urlencode", "urllib", "urllib.parse"), MovedAttribute("splitquery", "urllib", "urllib.parse"), MovedAttribute("splittag", "urllib", "urllib.parse"), MovedAttribute("splituser", "urllib", "urllib.parse"), MovedAttribute("splitvalue", "urllib", "urllib.parse"), MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), MovedAttribute("uses_params", "urlparse", "urllib.parse"), MovedAttribute("uses_query", "urlparse", "urllib.parse"), MovedAttribute("uses_relative", "urlparse", "urllib.parse"), ] for attr in _urllib_parse_moved_attributes: setattr(Module_six_moves_urllib_parse, attr.name, attr) del attr Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes _importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), "moves.urllib_parse", "moves.urllib.parse") class Module_six_moves_urllib_error(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_error""" _urllib_error_moved_attributes = [ MovedAttribute("URLError", "urllib2", "urllib.error"), MovedAttribute("HTTPError", "urllib2", "urllib.error"), MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), ] for attr in _urllib_error_moved_attributes: setattr(Module_six_moves_urllib_error, attr.name, attr) del attr Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes _importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), "moves.urllib_error", "moves.urllib.error") class Module_six_moves_urllib_request(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_request""" _urllib_request_moved_attributes = [ MovedAttribute("urlopen", "urllib2", "urllib.request"), MovedAttribute("install_opener", "urllib2", "urllib.request"), MovedAttribute("build_opener", "urllib2", "urllib.request"), MovedAttribute("pathname2url", "urllib", "urllib.request"), MovedAttribute("url2pathname", "urllib", "urllib.request"), MovedAttribute("getproxies", "urllib", "urllib.request"), MovedAttribute("Request", "urllib2", "urllib.request"), MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), MovedAttribute("BaseHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), MovedAttribute("FileHandler", "urllib2", "urllib.request"), MovedAttribute("FTPHandler", "urllib2", "urllib.request"), MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), MovedAttribute("urlretrieve", "urllib", "urllib.request"), MovedAttribute("urlcleanup", "urllib", "urllib.request"), MovedAttribute("URLopener", "urllib", "urllib.request"), MovedAttribute("FancyURLopener", "urllib", "urllib.request"), MovedAttribute("proxy_bypass", "urllib", "urllib.request"), MovedAttribute("parse_http_list", "urllib2", "urllib.request"), MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), ] for attr in _urllib_request_moved_attributes: setattr(Module_six_moves_urllib_request, attr.name, attr) del attr Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes _importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), "moves.urllib_request", "moves.urllib.request") class Module_six_moves_urllib_response(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_response""" _urllib_response_moved_attributes = [ MovedAttribute("addbase", "urllib", "urllib.response"), MovedAttribute("addclosehook", "urllib", "urllib.response"), MovedAttribute("addinfo", "urllib", "urllib.response"), MovedAttribute("addinfourl", "urllib", "urllib.response"), ] for attr in _urllib_response_moved_attributes: setattr(Module_six_moves_urllib_response, attr.name, attr) del attr Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes _importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), "moves.urllib_response", "moves.urllib.response") class Module_six_moves_urllib_robotparser(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_robotparser""" _urllib_robotparser_moved_attributes = [ MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), ] for attr in _urllib_robotparser_moved_attributes: setattr(Module_six_moves_urllib_robotparser, attr.name, attr) del attr Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes _importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), "moves.urllib_robotparser", "moves.urllib.robotparser") class Module_six_moves_urllib(types.ModuleType): """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" __path__ = [] # mark as package parse = _importer._get_module("moves.urllib_parse") error = _importer._get_module("moves.urllib_error") request = _importer._get_module("moves.urllib_request") response = _importer._get_module("moves.urllib_response") robotparser = _importer._get_module("moves.urllib_robotparser") def __dir__(self): return ['parse', 'error', 'request', 'response', 'robotparser'] _importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), "moves.urllib") def add_move(move): """Add an item to six.moves.""" setattr(_MovedItems, move.name, move) def remove_move(name): """Remove item from six.moves.""" try: delattr(_MovedItems, name) except AttributeError: try: del moves.__dict__[name] except KeyError: raise AttributeError("no such move, %r" % (name,)) if PY3: _meth_func = "__func__" _meth_self = "__self__" _func_closure = "__closure__" _func_code = "__code__" _func_defaults = "__defaults__" _func_globals = "__globals__" else: _meth_func = "im_func" _meth_self = "im_self" _func_closure = "func_closure" _func_code = "func_code" _func_defaults = "func_defaults" _func_globals = "func_globals" try: advance_iterator = next except NameError: def advance_iterator(it): return it.next() next = advance_iterator try: callable = callable except NameError: def callable(obj): return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) if PY3: def get_unbound_function(unbound): return unbound create_bound_method = types.MethodType def create_unbound_method(func, cls): return func Iterator = object else: def get_unbound_function(unbound): return unbound.im_func def create_bound_method(func, obj): return types.MethodType(func, obj, obj.__class__) def create_unbound_method(func, cls): return types.MethodType(func, None, cls) class Iterator(object): def next(self): return type(self).__next__(self) callable = callable _add_doc(get_unbound_function, """Get the function out of a possibly unbound function""") get_method_function = operator.attrgetter(_meth_func) get_method_self = operator.attrgetter(_meth_self) get_function_closure = operator.attrgetter(_func_closure) get_function_code = operator.attrgetter(_func_code) get_function_defaults = operator.attrgetter(_func_defaults) get_function_globals = operator.attrgetter(_func_globals) if PY3: def iterkeys(d, **kw): return iter(d.keys(**kw)) def itervalues(d, **kw): return iter(d.values(**kw)) def iteritems(d, **kw): return iter(d.items(**kw)) def iterlists(d, **kw): return iter(d.lists(**kw)) viewkeys = operator.methodcaller("keys") viewvalues = operator.methodcaller("values") viewitems = operator.methodcaller("items") else: def iterkeys(d, **kw): return d.iterkeys(**kw) def itervalues(d, **kw): return d.itervalues(**kw) def iteritems(d, **kw): return d.iteritems(**kw) def iterlists(d, **kw): return d.iterlists(**kw) viewkeys = operator.methodcaller("viewkeys") viewvalues = operator.methodcaller("viewvalues") viewitems = operator.methodcaller("viewitems") _add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") _add_doc(itervalues, "Return an iterator over the values of a dictionary.") _add_doc(iteritems, "Return an iterator over the (key, value) pairs of a dictionary.") _add_doc(iterlists, "Return an iterator over the (key, [values]) pairs of a dictionary.") if PY3: def b(s): return s.encode("latin-1") def u(s): return s unichr = chr import struct int2byte = struct.Struct(">B").pack del struct byte2int = operator.itemgetter(0) indexbytes = operator.getitem iterbytes = iter import io StringIO = io.StringIO BytesIO = io.BytesIO _assertCountEqual = "assertCountEqual" if sys.version_info[1] <= 1: _assertRaisesRegex = "assertRaisesRegexp" _assertRegex = "assertRegexpMatches" else: _assertRaisesRegex = "assertRaisesRegex" _assertRegex = "assertRegex" else: def b(s): return s # Workaround for standalone backslash def u(s): return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") unichr = unichr int2byte = chr def byte2int(bs): return ord(bs[0]) def indexbytes(buf, i): return ord(buf[i]) iterbytes = functools.partial(itertools.imap, ord) import StringIO StringIO = BytesIO = StringIO.StringIO _assertCountEqual = "assertItemsEqual" _assertRaisesRegex = "assertRaisesRegexp" _assertRegex = "assertRegexpMatches" _add_doc(b, """Byte literal""") _add_doc(u, """Text literal""") def assertCountEqual(self, *args, **kwargs): return getattr(self, _assertCountEqual)(*args, **kwargs) def assertRaisesRegex(self, *args, **kwargs): return getattr(self, _assertRaisesRegex)(*args, **kwargs) def assertRegex(self, *args, **kwargs): return getattr(self, _assertRegex)(*args, **kwargs) if PY3: exec_ = getattr(moves.builtins, "exec") def reraise(tp, value, tb=None): try: if value is None: value = tp() if value.__traceback__ is not tb: raise value.with_traceback(tb) raise value finally: value = None tb = None else: def exec_(_code_, _globs_=None, _locs_=None): """Execute code in a namespace.""" if _globs_ is None: frame = sys._getframe(1) _globs_ = frame.f_globals if _locs_ is None: _locs_ = frame.f_locals del frame elif _locs_ is None: _locs_ = _globs_ exec("""exec _code_ in _globs_, _locs_""") exec_("""def reraise(tp, value, tb=None): try: raise tp, value, tb finally: tb = None """) if sys.version_info[:2] == (3, 2): exec_("""def raise_from(value, from_value): try: if from_value is None: raise value raise value from from_value finally: value = None """) elif sys.version_info[:2] > (3, 2): exec_("""def raise_from(value, from_value): try: raise value from from_value finally: value = None """) else: def raise_from(value, from_value): raise value print_ = getattr(moves.builtins, "print", None) if print_ is None: def print_(*args, **kwargs): """The new-style print function for Python 2.4 and 2.5.""" fp = kwargs.pop("file", sys.stdout) if fp is None: return def write(data): if not isinstance(data, basestring): data = str(data) # If the file has an encoding, encode unicode with it. if (isinstance(fp, file) and isinstance(data, unicode) and fp.encoding is not None): errors = getattr(fp, "errors", None) if errors is None: errors = "strict" data = data.encode(fp.encoding, errors) fp.write(data) want_unicode = False sep = kwargs.pop("sep", None) if sep is not None: if isinstance(sep, unicode): want_unicode = True elif not isinstance(sep, str): raise TypeError("sep must be None or a string") end = kwargs.pop("end", None) if end is not None: if isinstance(end, unicode): want_unicode = True elif not isinstance(end, str): raise TypeError("end must be None or a string") if kwargs: raise TypeError("invalid keyword arguments to print()") if not want_unicode: for arg in args: if isinstance(arg, unicode): want_unicode = True break if want_unicode: newline = unicode("\n") space = unicode(" ") else: newline = "\n" space = " " if sep is None: sep = space if end is None: end = newline for i, arg in enumerate(args): if i: write(sep) write(arg) write(end) if sys.version_info[:2] < (3, 3): _print = print_ def print_(*args, **kwargs): fp = kwargs.get("file", sys.stdout) flush = kwargs.pop("flush", False) _print(*args, **kwargs) if flush and fp is not None: fp.flush() _add_doc(reraise, """Reraise an exception.""") if sys.version_info[0:2] < (3, 4): def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, updated=functools.WRAPPER_UPDATES): def wrapper(f): f = functools.wraps(wrapped, assigned, updated)(f) f.__wrapped__ = wrapped return f return wrapper else: wraps = functools.wraps def with_metaclass(meta, *bases): """Create a base class with a metaclass.""" # This requires a bit of explanation: the basic idea is to make a dummy # metaclass for one level of class instantiation that replaces itself with # the actual metaclass. class metaclass(type): def __new__(cls, name, this_bases, d): return meta(name, bases, d) @classmethod def __prepare__(cls, name, this_bases): return meta.__prepare__(name, bases) return type.__new__(metaclass, 'temporary_class', (), {}) def add_metaclass(metaclass): """Class decorator for creating a class with a metaclass.""" def wrapper(cls): orig_vars = cls.__dict__.copy() slots = orig_vars.get('__slots__') if slots is not None: if isinstance(slots, str): slots = [slots] for slots_var in slots: orig_vars.pop(slots_var) orig_vars.pop('__dict__', None) orig_vars.pop('__weakref__', None) return metaclass(cls.__name__, cls.__bases__, orig_vars) return wrapper def python_2_unicode_compatible(klass): """ A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. """ if PY2: if '__str__' not in klass.__dict__: raise ValueError("@python_2_unicode_compatible cannot be applied " "to %s because it doesn't define __str__()." % klass.__name__) klass.__unicode__ = klass.__str__ klass.__str__ = lambda self: self.__unicode__().encode('utf-8') return klass # Complete the moves implementation. # This code is at the end of this module to speed up module loading. # Turn this module into a package. __path__ = [] # required for PEP 302 and PEP 451 __package__ = __name__ # see PEP 366 @ReservedAssignment if globals().get("__spec__") is not None: __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable # Remove other six meta path importers, since they cause problems. This can # happen if six is removed from sys.modules and then reloaded. (Setuptools does # this for some reason.) if sys.meta_path: for i, importer in enumerate(sys.meta_path): # Here's some real nastiness: Another "instance" of the six module might # be floating around. Therefore, we can't use isinstance() to check for # the six meta path importer, since the other six instance will have # inserted an importer with different class. if (type(importer).__name__ == "_SixMetaPathImporter" and importer.name == __name__): del sys.meta_path[i] break del i, importer # Finally, add the importer to the meta path import hook. sys.meta_path.append(_importer)
gpl-3.0
3,833,100,061,493,608,400
33.666667
98
0.629468
false
LePastis/pyload
module/gui/MainWindow.py
41
30215
# -*- coding: utf-8 -*- """ This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see <http://www.gnu.org/licenses/>. @author: mkaay """ from PyQt4.QtCore import * from PyQt4.QtGui import * from os.path import join from module.gui.PackageDock import * from module.gui.LinkDock import * from module.gui.CaptchaDock import CaptchaDock from module.gui.SettingsWidget import SettingsWidget from module.gui.Collector import CollectorView, Package, Link from module.gui.Queue import QueueView from module.gui.Overview import OverviewView from module.gui.Accounts import AccountView from module.gui.AccountEdit import AccountEdit from module.remote.thriftbackend.ThriftClient import AccountInfo class MainWindow(QMainWindow): def __init__(self, connector): """ set up main window """ QMainWindow.__init__(self) #window stuff self.setWindowTitle(_("pyLoad Client")) self.setWindowIcon(QIcon(join(pypath, "icons","logo.png"))) self.resize(1000,600) #layout version self.version = 3 #init docks self.newPackDock = NewPackageDock() self.addDockWidget(Qt.RightDockWidgetArea, self.newPackDock) self.connect(self.newPackDock, SIGNAL("done"), self.slotAddPackage) self.captchaDock = CaptchaDock() self.addDockWidget(Qt.BottomDockWidgetArea, self.captchaDock) self.newLinkDock = NewLinkDock() self.addDockWidget(Qt.RightDockWidgetArea, self.newLinkDock) self.connect(self.newLinkDock, SIGNAL("done"), self.slotAddLinksToPackage) #central widget, layout self.masterlayout = QVBoxLayout() lw = QWidget() lw.setLayout(self.masterlayout) self.setCentralWidget(lw) #status self.statusw = QFrame() self.statusw.setFrameStyle(QFrame.StyledPanel | QFrame.Raised) self.statusw.setLineWidth(2) self.statusw.setLayout(QGridLayout()) #palette = self.statusw.palette() #palette.setColor(QPalette.Window, QColor(255, 255, 255)) #self.statusw.setPalette(palette) #self.statusw.setAutoFillBackground(True) l = self.statusw.layout() class BoldLabel(QLabel): def __init__(self, text): QLabel.__init__(self, text) f = self.font() f.setBold(True) self.setFont(f) self.setAlignment(Qt.AlignRight) class Seperator(QFrame): def __init__(self): QFrame.__init__(self) self.setFrameShape(QFrame.VLine) self.setFrameShadow(QFrame.Sunken) l.addWidget(BoldLabel(_("Packages:")), 0, 0) self.packageCount = QLabel("0") l.addWidget(self.packageCount, 0, 1) l.addWidget(BoldLabel(_("Files:")), 0, 2) self.fileCount = QLabel("0") l.addWidget(self.fileCount, 0, 3) l.addWidget(BoldLabel(_("Status:")), 0, 4) self.status = QLabel("running") l.addWidget(self.status, 0, 5) l.addWidget(BoldLabel(_("Space:")), 0, 6) self.space = QLabel("") l.addWidget(self.space, 0, 7) l.addWidget(BoldLabel(_("Speed:")), 0, 8) self.speed = QLabel("") l.addWidget(self.speed, 0, 9) #l.addWidget(BoldLabel(_("Max. downloads:")), 0, 9) #l.addWidget(BoldLabel(_("Max. chunks:")), 1, 9) #self.maxDownloads = QSpinBox() #self.maxDownloads.setEnabled(False) #self.maxChunks = QSpinBox() #self.maxChunks.setEnabled(False) #l.addWidget(self.maxDownloads, 0, 10) #l.addWidget(self.maxChunks, 1, 10) #set menubar and statusbar self.menubar = self.menuBar() #self.statusbar = self.statusBar() #self.connect(self.statusbar, SIGNAL("showMsg"), self.statusbar.showMessage) #self.serverStatus = QLabel(_("Status: Not Connected")) #self.statusbar.addPermanentWidget(self.serverStatus) #menu self.menus = {"file": self.menubar.addMenu(_("File")), "connections": self.menubar.addMenu(_("Connections"))} #menu actions self.mactions = {"exit": QAction(_("Exit"), self.menus["file"]), "manager": QAction(_("Connection manager"), self.menus["connections"])} #add menu actions self.menus["file"].addAction(self.mactions["exit"]) self.menus["connections"].addAction(self.mactions["manager"]) #toolbar self.actions = {} self.init_toolbar() #tabs self.tabw = QTabWidget() self.tabs = {"overview": {"w": QWidget()}, "queue": {"w": QWidget()}, "collector": {"w": QWidget()}, "accounts": {"w": QWidget()}, "settings": {}} #self.tabs["settings"]["s"] = QScrollArea() self.tabs["settings"]["w"] = SettingsWidget() #self.tabs["settings"]["s"].setWidgetResizable(True) #self.tabs["settings"]["s"].setWidget(self.tabs["settings"]["w"]) self.tabs["log"] = {"w":QWidget()} self.tabw.addTab(self.tabs["overview"]["w"], _("Overview")) self.tabw.addTab(self.tabs["queue"]["w"], _("Queue")) self.tabw.addTab(self.tabs["collector"]["w"], _("Collector")) self.tabw.addTab(self.tabs["accounts"]["w"], _("Accounts")) self.tabw.addTab(self.tabs["settings"]["w"], _("Settings")) self.tabw.addTab(self.tabs["log"]["w"], _("Log")) #init tabs self.init_tabs(connector) #context menus self.init_context() #layout self.masterlayout.addWidget(self.tabw) self.masterlayout.addWidget(self.statusw) #signals.. self.connect(self.mactions["manager"], SIGNAL("triggered()"), self.slotShowConnector) self.connect(self.tabs["queue"]["view"], SIGNAL('customContextMenuRequested(const QPoint &)'), self.slotQueueContextMenu) self.connect(self.tabs["collector"]["package_view"], SIGNAL('customContextMenuRequested(const QPoint &)'), self.slotCollectorContextMenu) self.connect(self.tabs["accounts"]["view"], SIGNAL('customContextMenuRequested(const QPoint &)'), self.slotAccountContextMenu) self.connect(self.tabw, SIGNAL("currentChanged(int)"), self.slotTabChanged) self.lastAddedID = None self.connector = connector def init_toolbar(self): """ create toolbar """ self.toolbar = self.addToolBar(_("Hide Toolbar")) self.toolbar.setObjectName("Main Toolbar") self.toolbar.setIconSize(QSize(30,30)) self.toolbar.setMovable(False) self.actions["toggle_status"] = self.toolbar.addAction(_("Toggle Pause/Resume")) pricon = QIcon() pricon.addFile(join(pypath, "icons","toolbar_start.png"), QSize(), QIcon.Normal, QIcon.Off) pricon.addFile(join(pypath, "icons","toolbar_pause.png"), QSize(), QIcon.Normal, QIcon.On) self.actions["toggle_status"].setIcon(pricon) self.actions["toggle_status"].setCheckable(True) self.actions["status_stop"] = self.toolbar.addAction(QIcon(join(pypath, "icons","toolbar_stop.png")), _("Stop")) self.toolbar.addSeparator() self.actions["add"] = self.toolbar.addAction(QIcon(join(pypath, "icons","toolbar_add.png")), _("Add")) self.toolbar.addSeparator() self.actions["clipboard"] = self.toolbar.addAction(QIcon(join(pypath, "icons","clipboard.png")), _("Check Clipboard")) self.actions["clipboard"].setCheckable(True) self.connect(self.actions["toggle_status"], SIGNAL("toggled(bool)"), self.slotToggleStatus) self.connect(self.actions["clipboard"], SIGNAL("toggled(bool)"), self.slotToggleClipboard) self.connect(self.actions["status_stop"], SIGNAL("triggered()"), self.slotStatusStop) self.addMenu = QMenu() packageAction = self.addMenu.addAction(_("Package")) containerAction = self.addMenu.addAction(_("Container")) accountAction = self.addMenu.addAction(_("Account")) linksAction = self.addMenu.addAction(_("Links")) self.connect(self.actions["add"], SIGNAL("triggered()"), self.slotAdd) self.connect(packageAction, SIGNAL("triggered()"), self.slotShowAddPackage) self.connect(containerAction, SIGNAL("triggered()"), self.slotShowAddContainer) self.connect(accountAction, SIGNAL("triggered()"), self.slotNewAccount) self.connect(linksAction, SIGNAL("triggered()"), self.slotShowAddLinks) def init_tabs(self, connector): """ create tabs """ #overview self.tabs["overview"]["l"] = QGridLayout() self.tabs["overview"]["w"].setLayout(self.tabs["overview"]["l"]) self.tabs["overview"]["view"] = OverviewView(connector) self.tabs["overview"]["l"].addWidget(self.tabs["overview"]["view"]) #queue self.tabs["queue"]["l"] = QGridLayout() self.tabs["queue"]["w"].setLayout(self.tabs["queue"]["l"]) self.tabs["queue"]["view"] = QueueView(connector) self.tabs["queue"]["l"].addWidget(self.tabs["queue"]["view"]) #collector toQueue = QPushButton(_("Push selected packages to queue")) self.tabs["collector"]["l"] = QGridLayout() self.tabs["collector"]["w"].setLayout(self.tabs["collector"]["l"]) self.tabs["collector"]["package_view"] = CollectorView(connector) self.tabs["collector"]["l"].addWidget(self.tabs["collector"]["package_view"], 0, 0) self.tabs["collector"]["l"].addWidget(toQueue, 1, 0) self.connect(toQueue, SIGNAL("clicked()"), self.slotPushPackageToQueue) self.tabs["collector"]["package_view"].setContextMenuPolicy(Qt.CustomContextMenu) self.tabs["queue"]["view"].setContextMenuPolicy(Qt.CustomContextMenu) #log self.tabs["log"]["l"] = QGridLayout() self.tabs["log"]["w"].setLayout(self.tabs["log"]["l"]) self.tabs["log"]["text"] = QTextEdit() self.tabs["log"]["text"].logOffset = 0 self.tabs["log"]["text"].setReadOnly(True) self.connect(self.tabs["log"]["text"], SIGNAL("append(QString)"), self.tabs["log"]["text"].append) self.tabs["log"]["l"].addWidget(self.tabs["log"]["text"]) #accounts self.tabs["accounts"]["view"] = AccountView(connector) self.tabs["accounts"]["w"].setLayout(QVBoxLayout()) self.tabs["accounts"]["w"].layout().addWidget(self.tabs["accounts"]["view"]) newbutton = QPushButton(_("New Account")) self.tabs["accounts"]["w"].layout().addWidget(newbutton) self.connect(newbutton, SIGNAL("clicked()"), self.slotNewAccount) self.tabs["accounts"]["view"].setContextMenuPolicy(Qt.CustomContextMenu) def init_context(self): """ create context menus """ self.activeMenu = None #queue self.queueContext = QMenu() self.queueContext.buttons = {} self.queueContext.item = (None, None) self.queueContext.buttons["remove"] = QAction(QIcon(join(pypath, "icons","remove_small.png")), _("Remove"), self.queueContext) self.queueContext.buttons["restart"] = QAction(QIcon(join(pypath, "icons","refresh_small.png")), _("Restart"), self.queueContext) self.queueContext.buttons["pull"] = QAction(QIcon(join(pypath, "icons","pull_small.png")), _("Pull out"), self.queueContext) self.queueContext.buttons["abort"] = QAction(QIcon(join(pypath, "icons","abort.png")), _("Abort"), self.queueContext) self.queueContext.buttons["edit"] = QAction(QIcon(join(pypath, "icons","edit_small.png")), _("Edit Name"), self.queueContext) self.queueContext.addAction(self.queueContext.buttons["pull"]) self.queueContext.addAction(self.queueContext.buttons["edit"]) self.queueContext.addAction(self.queueContext.buttons["remove"]) self.queueContext.addAction(self.queueContext.buttons["restart"]) self.queueContext.addAction(self.queueContext.buttons["abort"]) self.connect(self.queueContext.buttons["remove"], SIGNAL("triggered()"), self.slotRemoveDownload) self.connect(self.queueContext.buttons["restart"], SIGNAL("triggered()"), self.slotRestartDownload) self.connect(self.queueContext.buttons["pull"], SIGNAL("triggered()"), self.slotPullOutPackage) self.connect(self.queueContext.buttons["abort"], SIGNAL("triggered()"), self.slotAbortDownload) self.connect(self.queueContext.buttons["edit"], SIGNAL("triggered()"), self.slotEditPackage) #collector self.collectorContext = QMenu() self.collectorContext.buttons = {} self.collectorContext.item = (None, None) self.collectorContext.buttons["remove"] = QAction(QIcon(join(pypath, "icons","remove_small.png")), _("Remove"), self.collectorContext) self.collectorContext.buttons["push"] = QAction(QIcon(join(pypath, "icons","push_small.png")), _("Push to queue"), self.collectorContext) self.collectorContext.buttons["edit"] = QAction(QIcon(join(pypath, "icons","edit_small.png")), _("Edit Name"), self.collectorContext) self.collectorContext.buttons["restart"] = QAction(QIcon(join(pypath, "icons","refresh_small.png")), _("Restart"), self.collectorContext) self.collectorContext.buttons["refresh"] = QAction(QIcon(join(pypath, "icons","refresh1_small.png")),_("Refresh Status"), self.collectorContext) self.collectorContext.addAction(self.collectorContext.buttons["push"]) self.collectorContext.addSeparator() self.collectorContext.buttons["add"] = self.collectorContext.addMenu(QIcon(join(pypath, "icons","add_small.png")), _("Add")) self.collectorContext.addAction(self.collectorContext.buttons["edit"]) self.collectorContext.addAction(self.collectorContext.buttons["remove"]) self.collectorContext.addAction(self.collectorContext.buttons["restart"]) self.collectorContext.addSeparator() self.collectorContext.addAction(self.collectorContext.buttons["refresh"]) packageAction = self.collectorContext.buttons["add"].addAction(_("Package")) containerAction = self.collectorContext.buttons["add"].addAction(_("Container")) linkAction = self.collectorContext.buttons["add"].addAction(_("Links")) self.connect(self.collectorContext.buttons["remove"], SIGNAL("triggered()"), self.slotRemoveDownload) self.connect(self.collectorContext.buttons["push"], SIGNAL("triggered()"), self.slotPushPackageToQueue) self.connect(self.collectorContext.buttons["edit"], SIGNAL("triggered()"), self.slotEditPackage) self.connect(self.collectorContext.buttons["restart"], SIGNAL("triggered()"), self.slotRestartDownload) self.connect(self.collectorContext.buttons["refresh"], SIGNAL("triggered()"), self.slotRefreshPackage) self.connect(packageAction, SIGNAL("triggered()"), self.slotShowAddPackage) self.connect(containerAction, SIGNAL("triggered()"), self.slotShowAddContainer) self.connect(linkAction, SIGNAL("triggered()"), self.slotShowAddLinks) self.accountContext = QMenu() self.accountContext.buttons = {} self.accountContext.buttons["add"] = QAction(QIcon(join(pypath, "icons","add_small.png")), _("Add"), self.accountContext) self.accountContext.buttons["remove"] = QAction(QIcon(join(pypath, "icons","remove_small.png")), _("Remove"), self.accountContext) self.accountContext.buttons["edit"] = QAction(QIcon(join(pypath, "icons","edit_small.png")), _("Edit"), self.accountContext) self.accountContext.addAction(self.accountContext.buttons["add"]) self.accountContext.addAction(self.accountContext.buttons["edit"]) self.accountContext.addAction(self.accountContext.buttons["remove"]) self.connect(self.accountContext.buttons["add"], SIGNAL("triggered()"), self.slotNewAccount) self.connect(self.accountContext.buttons["edit"], SIGNAL("triggered()"), self.slotEditAccount) self.connect(self.accountContext.buttons["remove"], SIGNAL("triggered()"), self.slotRemoveAccount) def slotToggleStatus(self, status): """ pause/start toggle (toolbar) """ self.emit(SIGNAL("setDownloadStatus"), status) def slotStatusStop(self): """ stop button (toolbar) """ self.emit(SIGNAL("stopAllDownloads")) def slotAdd(self): """ add button (toolbar) show context menu (choice: links/package) """ self.addMenu.exec_(QCursor.pos()) def slotShowAddPackage(self): """ action from add-menu show new-package dock """ self.tabw.setCurrentIndex(1) self.newPackDock.show() def slotShowAddLinks(self): """ action from add-menu show new-links dock """ self.tabw.setCurrentIndex(1) self.newLinkDock.show() def slotShowConnector(self): """ connectionmanager action triggered let main to the stuff """ self.emit(SIGNAL("connector")) def slotAddPackage(self, name, links, password=None): """ new package let main to the stuff """ self.emit(SIGNAL("addPackage"), name, links, password) def slotAddLinksToPackage(self, links): """ adds links to currently selected package only in collector """ if self.tabw.currentIndex() != 1: return smodel = self.tabs["collector"]["package_view"].selectionModel() for index in smodel.selectedRows(0): item = index.internalPointer() if isinstance(item, Package): self.connector.proxy.addFiles(item.id, links) break def slotShowAddContainer(self): """ action from add-menu show file selector, emit upload """ typeStr = ";;".join([ _("All Container Types (%s)") % "*.dlc *.ccf *.rsdf *.txt", _("DLC (%s)") % "*.dlc", _("CCF (%s)") % "*.ccf", _("RSDF (%s)") % "*.rsdf", _("Text Files (%s)") % "*.txt" ]) fileNames = QFileDialog.getOpenFileNames(self, _("Open container"), "", typeStr) for name in fileNames: self.emit(SIGNAL("addContainer"), str(name)) def slotPushPackageToQueue(self): """ push collector pack to queue get child ids let main to the rest """ smodel = self.tabs["collector"]["package_view"].selectionModel() for index in smodel.selectedRows(0): item = index.internalPointer() if isinstance(item, Package): self.emit(SIGNAL("pushPackageToQueue"), item.id) else: self.emit(SIGNAL("pushPackageToQueue"), item.package.id) def saveWindow(self): """ get window state/geometry pass data to main """ state_raw = self.saveState(self.version) geo_raw = self.saveGeometry() state = str(state_raw.toBase64()) geo = str(geo_raw.toBase64()) self.emit(SIGNAL("saveMainWindow"), state, geo) def closeEvent(self, event): """ somebody wants to close me! let me first save my state """ self.saveWindow() event.ignore() self.hide() self.emit(SIGNAL("hidden")) # quit when no tray is available if not QSystemTrayIcon.isSystemTrayAvailable(): self.emit(SIGNAL("Quit")) def restoreWindow(self, state, geo): """ restore window state/geometry """ state = QByteArray(state) geo = QByteArray(geo) state_raw = QByteArray.fromBase64(state) geo_raw = QByteArray.fromBase64(geo) self.restoreState(state_raw, self.version) self.restoreGeometry(geo_raw) def slotQueueContextMenu(self, pos): """ custom context menu in queue view requested """ globalPos = self.tabs["queue"]["view"].mapToGlobal(pos) i = self.tabs["queue"]["view"].indexAt(pos) if not i: return item = i.internalPointer() menuPos = QCursor.pos() menuPos.setX(menuPos.x()+2) self.activeMenu = self.queueContext showAbort = False if isinstance(item, Link) and item.data["downloading"]: showAbort = True elif isinstance(item, Package): for child in item.children: if child.data["downloading"]: showAbort = True break if showAbort: self.queueContext.buttons["abort"].setEnabled(True) else: self.queueContext.buttons["abort"].setEnabled(False) if isinstance(item, Package): self.queueContext.index = i #self.queueContext.buttons["remove"].setEnabled(True) #self.queueContext.buttons["restart"].setEnabled(True) self.queueContext.buttons["pull"].setEnabled(True) self.queueContext.buttons["edit"].setEnabled(True) elif isinstance(item, Link): self.collectorContext.index = i self.collectorContext.buttons["edit"].setEnabled(False) self.collectorContext.buttons["remove"].setEnabled(True) self.collectorContext.buttons["push"].setEnabled(False) self.collectorContext.buttons["restart"].setEnabled(True) else: self.queueContext.index = None #self.queueContext.buttons["remove"].setEnabled(False) #self.queueContext.buttons["restart"].setEnabled(False) self.queueContext.buttons["pull"].setEnabled(False) self.queueContext.buttons["edit"].setEnabled(False) self.queueContext.exec_(menuPos) def slotCollectorContextMenu(self, pos): """ custom context menu in package collector view requested """ globalPos = self.tabs["collector"]["package_view"].mapToGlobal(pos) i = self.tabs["collector"]["package_view"].indexAt(pos) if not i: return item = i.internalPointer() menuPos = QCursor.pos() menuPos.setX(menuPos.x()+2) self.activeMenu = self.collectorContext if isinstance(item, Package): self.collectorContext.index = i self.collectorContext.buttons["edit"].setEnabled(True) self.collectorContext.buttons["remove"].setEnabled(True) self.collectorContext.buttons["push"].setEnabled(True) self.collectorContext.buttons["restart"].setEnabled(True) elif isinstance(item, Link): self.collectorContext.index = i self.collectorContext.buttons["edit"].setEnabled(False) self.collectorContext.buttons["remove"].setEnabled(True) self.collectorContext.buttons["push"].setEnabled(False) self.collectorContext.buttons["restart"].setEnabled(True) else: self.collectorContext.index = None self.collectorContext.buttons["edit"].setEnabled(False) self.collectorContext.buttons["remove"].setEnabled(False) self.collectorContext.buttons["push"].setEnabled(False) self.collectorContext.buttons["restart"].setEnabled(False) self.collectorContext.exec_(menuPos) def slotLinkCollectorContextMenu(self, pos): """ custom context menu in link collector view requested """ pass def slotRestartDownload(self): """ restart download action is triggered """ smodel = self.tabs["queue"]["view"].selectionModel() for index in smodel.selectedRows(0): item = index.internalPointer() self.emit(SIGNAL("restartDownload"), item.id, isinstance(item, Package)) def slotRemoveDownload(self): """ remove download action is triggered """ if self.activeMenu == self.queueContext: view = self.tabs["queue"]["view"] else: view = self.tabs["collector"]["package_view"] smodel = view.selectionModel() for index in smodel.selectedRows(0): item = index.internalPointer() self.emit(SIGNAL("removeDownload"), item.id, isinstance(item, Package)) def slotToggleClipboard(self, status): """ check clipboard (toolbar) """ self.emit(SIGNAL("setClipboardStatus"), status) def slotEditPackage(self): # in Queue, only edit name if self.activeMenu == self.queueContext: view = self.tabs["queue"]["view"] else: view = self.tabs["collector"]["package_view"] view.edit(self.activeMenu.index) def slotEditCommit(self, editor): self.emit(SIGNAL("changePackageName"), self.activeMenu.index.internalPointer().id, editor.text()) def slotPullOutPackage(self): """ pull package out of the queue """ smodel = self.tabs["queue"]["view"].selectionModel() for index in smodel.selectedRows(0): item = index.internalPointer() if isinstance(item, Package): self.emit(SIGNAL("pullOutPackage"), item.id) else: self.emit(SIGNAL("pullOutPackage"), item.package.id) def slotAbortDownload(self): view = self.tabs["queue"]["view"] smodel = view.selectionModel() for index in smodel.selectedRows(0): item = index.internalPointer() self.emit(SIGNAL("abortDownload"), item.id, isinstance(item, Package)) # TODO disabled because changing desktop on linux, main window disappears #def changeEvent(self, e): # if e.type() == QEvent.WindowStateChange and self.isMinimized(): # e.ignore() # self.hide() # self.emit(SIGNAL("hidden")) # else: # super(MainWindow, self).changeEvent(e) def slotTabChanged(self, index): if index == 2: self.emit(SIGNAL("reloadAccounts")) elif index == 3: self.tabs["settings"]["w"].loadConfig() def slotRefreshPackage(self): smodel = self.tabs["collector"]["package_view"].selectionModel() for index in smodel.selectedRows(0): item = index.internalPointer() pid = item.id if isinstance(item, Link): pid = item.package.id self.emit(SIGNAL("refreshStatus"), pid) def slotNewAccount(self): types = self.connector.proxy.getAccountTypes() self.accountEdit = AccountEdit.newAccount(types) #TODO make more easy n1, n2, n3 def save(data): if data["password"]: self.accountEdit.close() n1 = data["acctype"] n2 = data["login"] n3 = data["password"] self.connector.updateAccount(n1, n2, n3, None) self.accountEdit.connect(self.accountEdit, SIGNAL("done"), save) self.accountEdit.show() def slotEditAccount(self): types = self.connector.getAccountTypes() data = self.tabs["accounts"]["view"].selectedIndexes() if len(data) < 1: return data = data[0].internalPointer() self.accountEdit = AccountEdit.editAccount(types, data) #TODO make more easy n1, n2, n3 #TODO reload accounts tab after insert of edit account #TODO if account does not exist give error def save(data): self.accountEdit.close() n1 = data["acctype"] n2 = data["login"] if data["password"]: n3 = data["password"] self.connector.updateAccount(n1, n2, n3, None) self.accountEdit.connect(self.accountEdit, SIGNAL("done"), save) self.accountEdit.show() def slotRemoveAccount(self): data = self.tabs["accounts"]["view"].selectedIndexes() if len(data) < 1: return data = data[0].internalPointer() self.connector.removeAccount(data.type, data.login) def slotAccountContextMenu(self, pos): globalPos = self.tabs["accounts"]["view"].mapToGlobal(pos) i = self.tabs["accounts"]["view"].indexAt(pos) if not i: return data = i.internalPointer() if data is None: self.accountContext.buttons["edit"].setEnabled(False) self.accountContext.buttons["remove"].setEnabled(False) else: self.accountContext.buttons["edit"].setEnabled(True) self.accountContext.buttons["remove"].setEnabled(True) menuPos = QCursor.pos() menuPos.setX(menuPos.x()+2) self.accountContext.exec_(menuPos)
gpl-3.0
4,450,976,661,799,134,700
42.350072
152
0.60556
false
dfalt974/SickRage
lib/sqlalchemy/orm/unitofwork.py
78
23204
# orm/unitofwork.py # Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """The internals for the unit of work system. The session's flush() process passes objects to a contextual object here, which assembles flush tasks based on mappers and their properties, organizes them in order of dependency, and executes. """ from .. import util, event from ..util import topological from . import attributes, persistence, util as orm_util def track_cascade_events(descriptor, prop): """Establish event listeners on object attributes which handle cascade-on-set/append. """ key = prop.key def append(state, item, initiator): # process "save_update" cascade rules for when # an instance is appended to the list of another instance if item is None: return sess = state.session if sess: if sess._warn_on_events: sess._flush_warning("collection append") prop = state.manager.mapper._props[key] item_state = attributes.instance_state(item) if prop._cascade.save_update and \ (prop.cascade_backrefs or key == initiator.key) and \ not sess._contains_state(item_state): sess._save_or_update_state(item_state) return item def remove(state, item, initiator): if item is None: return sess = state.session if sess: prop = state.manager.mapper._props[key] if sess._warn_on_events: sess._flush_warning( "collection remove" if prop.uselist else "related attribute delete") # expunge pending orphans item_state = attributes.instance_state(item) if prop._cascade.delete_orphan and \ item_state in sess._new and \ prop.mapper._is_orphan(item_state): sess.expunge(item) def set_(state, newvalue, oldvalue, initiator): # process "save_update" cascade rules for when an instance # is attached to another instance if oldvalue is newvalue: return newvalue sess = state.session if sess: if sess._warn_on_events: sess._flush_warning("related attribute set") prop = state.manager.mapper._props[key] if newvalue is not None: newvalue_state = attributes.instance_state(newvalue) if prop._cascade.save_update and \ (prop.cascade_backrefs or key == initiator.key) and \ not sess._contains_state(newvalue_state): sess._save_or_update_state(newvalue_state) if oldvalue is not None and \ oldvalue is not attributes.PASSIVE_NO_RESULT and \ prop._cascade.delete_orphan: # possible to reach here with attributes.NEVER_SET ? oldvalue_state = attributes.instance_state(oldvalue) if oldvalue_state in sess._new and \ prop.mapper._is_orphan(oldvalue_state): sess.expunge(oldvalue) return newvalue event.listen(descriptor, 'append', append, raw=True, retval=True) event.listen(descriptor, 'remove', remove, raw=True, retval=True) event.listen(descriptor, 'set', set_, raw=True, retval=True) class UOWTransaction(object): def __init__(self, session): self.session = session # dictionary used by external actors to # store arbitrary state information. self.attributes = {} # dictionary of mappers to sets of # DependencyProcessors, which are also # set to be part of the sorted flush actions, # which have that mapper as a parent. self.deps = util.defaultdict(set) # dictionary of mappers to sets of InstanceState # items pending for flush which have that mapper # as a parent. self.mappers = util.defaultdict(set) # a dictionary of Preprocess objects, which gather # additional states impacted by the flush # and determine if a flush action is needed self.presort_actions = {} # dictionary of PostSortRec objects, each # one issues work during the flush within # a certain ordering. self.postsort_actions = {} # a set of 2-tuples, each containing two # PostSortRec objects where the second # is dependent on the first being executed # first self.dependencies = set() # dictionary of InstanceState-> (isdelete, listonly) # tuples, indicating if this state is to be deleted # or insert/updated, or just refreshed self.states = {} # tracks InstanceStates which will be receiving # a "post update" call. Keys are mappers, # values are a set of states and a set of the # columns which should be included in the update. self.post_update_states = util.defaultdict(lambda: (set(), set())) @property def has_work(self): return bool(self.states) def is_deleted(self, state): """return true if the given state is marked as deleted within this uowtransaction.""" return state in self.states and self.states[state][0] def memo(self, key, callable_): if key in self.attributes: return self.attributes[key] else: self.attributes[key] = ret = callable_() return ret def remove_state_actions(self, state): """remove pending actions for a state from the uowtransaction.""" isdelete = self.states[state][0] self.states[state] = (isdelete, True) def get_attribute_history(self, state, key, passive=attributes.PASSIVE_NO_INITIALIZE): """facade to attributes.get_state_history(), including caching of results.""" hashkey = ("history", state, key) # cache the objects, not the states; the strong reference here # prevents newly loaded objects from being dereferenced during the # flush process if hashkey in self.attributes: history, state_history, cached_passive = self.attributes[hashkey] # if the cached lookup was "passive" and now # we want non-passive, do a non-passive lookup and re-cache if not cached_passive & attributes.SQL_OK \ and passive & attributes.SQL_OK: impl = state.manager[key].impl history = impl.get_history(state, state.dict, attributes.PASSIVE_OFF | attributes.LOAD_AGAINST_COMMITTED) if history and impl.uses_objects: state_history = history.as_state() else: state_history = history self.attributes[hashkey] = (history, state_history, passive) else: impl = state.manager[key].impl # TODO: store the history as (state, object) tuples # so we don't have to keep converting here history = impl.get_history(state, state.dict, passive | attributes.LOAD_AGAINST_COMMITTED) if history and impl.uses_objects: state_history = history.as_state() else: state_history = history self.attributes[hashkey] = (history, state_history, passive) return state_history def has_dep(self, processor): return (processor, True) in self.presort_actions def register_preprocessor(self, processor, fromparent): key = (processor, fromparent) if key not in self.presort_actions: self.presort_actions[key] = Preprocess(processor, fromparent) def register_object(self, state, isdelete=False, listonly=False, cancel_delete=False, operation=None, prop=None): if not self.session._contains_state(state): if not state.deleted and operation is not None: util.warn("Object of type %s not in session, %s operation " "along '%s' will not proceed" % (orm_util.state_class_str(state), operation, prop)) return False if state not in self.states: mapper = state.manager.mapper if mapper not in self.mappers: self._per_mapper_flush_actions(mapper) self.mappers[mapper].add(state) self.states[state] = (isdelete, listonly) else: if not listonly and (isdelete or cancel_delete): self.states[state] = (isdelete, False) return True def issue_post_update(self, state, post_update_cols): mapper = state.manager.mapper.base_mapper states, cols = self.post_update_states[mapper] states.add(state) cols.update(post_update_cols) def _per_mapper_flush_actions(self, mapper): saves = SaveUpdateAll(self, mapper.base_mapper) deletes = DeleteAll(self, mapper.base_mapper) self.dependencies.add((saves, deletes)) for dep in mapper._dependency_processors: dep.per_property_preprocessors(self) for prop in mapper.relationships: if prop.viewonly: continue dep = prop._dependency_processor dep.per_property_preprocessors(self) @util.memoized_property def _mapper_for_dep(self): """return a dynamic mapping of (Mapper, DependencyProcessor) to True or False, indicating if the DependencyProcessor operates on objects of that Mapper. The result is stored in the dictionary persistently once calculated. """ return util.PopulateDict( lambda tup: tup[0]._props.get(tup[1].key) is tup[1].prop ) def filter_states_for_dep(self, dep, states): """Filter the given list of InstanceStates to those relevant to the given DependencyProcessor. """ mapper_for_dep = self._mapper_for_dep return [s for s in states if mapper_for_dep[(s.manager.mapper, dep)]] def states_for_mapper_hierarchy(self, mapper, isdelete, listonly): checktup = (isdelete, listonly) for mapper in mapper.base_mapper.self_and_descendants: for state in self.mappers[mapper]: if self.states[state] == checktup: yield state def _generate_actions(self): """Generate the full, unsorted collection of PostSortRecs as well as dependency pairs for this UOWTransaction. """ # execute presort_actions, until all states # have been processed. a presort_action might # add new states to the uow. while True: ret = False for action in list(self.presort_actions.values()): if action.execute(self): ret = True if not ret: break # see if the graph of mapper dependencies has cycles. self.cycles = cycles = topological.find_cycles( self.dependencies, list(self.postsort_actions.values())) if cycles: # if yes, break the per-mapper actions into # per-state actions convert = dict( (rec, set(rec.per_state_flush_actions(self))) for rec in cycles ) # rewrite the existing dependencies to point to # the per-state actions for those per-mapper actions # that were broken up. for edge in list(self.dependencies): if None in edge or \ edge[0].disabled or edge[1].disabled or \ cycles.issuperset(edge): self.dependencies.remove(edge) elif edge[0] in cycles: self.dependencies.remove(edge) for dep in convert[edge[0]]: self.dependencies.add((dep, edge[1])) elif edge[1] in cycles: self.dependencies.remove(edge) for dep in convert[edge[1]]: self.dependencies.add((edge[0], dep)) return set([a for a in self.postsort_actions.values() if not a.disabled ] ).difference(cycles) def execute(self): postsort_actions = self._generate_actions() #sort = topological.sort(self.dependencies, postsort_actions) #print "--------------" #print "\ndependencies:", self.dependencies #print "\ncycles:", self.cycles #print "\nsort:", list(sort) #print "\nCOUNT OF POSTSORT ACTIONS", len(postsort_actions) # execute if self.cycles: for set_ in topological.sort_as_subsets( self.dependencies, postsort_actions): while set_: n = set_.pop() n.execute_aggregate(self, set_) else: for rec in topological.sort( self.dependencies, postsort_actions): rec.execute(self) def finalize_flush_changes(self): """mark processed objects as clean / deleted after a successful flush(). this method is called within the flush() method after the execute() method has succeeded and the transaction has been committed. """ states = set(self.states) isdel = set( s for (s, (isdelete, listonly)) in self.states.items() if isdelete ) other = states.difference(isdel) self.session._remove_newly_deleted(isdel) self.session._register_newly_persistent(other) class IterateMappersMixin(object): def _mappers(self, uow): if self.fromparent: return iter( m for m in self.dependency_processor.parent.self_and_descendants if uow._mapper_for_dep[(m, self.dependency_processor)] ) else: return self.dependency_processor.mapper.self_and_descendants class Preprocess(IterateMappersMixin): def __init__(self, dependency_processor, fromparent): self.dependency_processor = dependency_processor self.fromparent = fromparent self.processed = set() self.setup_flush_actions = False def execute(self, uow): delete_states = set() save_states = set() for mapper in self._mappers(uow): for state in uow.mappers[mapper].difference(self.processed): (isdelete, listonly) = uow.states[state] if not listonly: if isdelete: delete_states.add(state) else: save_states.add(state) if delete_states: self.dependency_processor.presort_deletes(uow, delete_states) self.processed.update(delete_states) if save_states: self.dependency_processor.presort_saves(uow, save_states) self.processed.update(save_states) if (delete_states or save_states): if not self.setup_flush_actions and ( self.dependency_processor.\ prop_has_changes(uow, delete_states, True) or self.dependency_processor.\ prop_has_changes(uow, save_states, False) ): self.dependency_processor.per_property_flush_actions(uow) self.setup_flush_actions = True return True else: return False class PostSortRec(object): disabled = False def __new__(cls, uow, *args): key = (cls, ) + args if key in uow.postsort_actions: return uow.postsort_actions[key] else: uow.postsort_actions[key] = \ ret = \ object.__new__(cls) return ret def execute_aggregate(self, uow, recs): self.execute(uow) def __repr__(self): return "%s(%s)" % ( self.__class__.__name__, ",".join(str(x) for x in self.__dict__.values()) ) class ProcessAll(IterateMappersMixin, PostSortRec): def __init__(self, uow, dependency_processor, delete, fromparent): self.dependency_processor = dependency_processor self.delete = delete self.fromparent = fromparent uow.deps[dependency_processor.parent.base_mapper].\ add(dependency_processor) def execute(self, uow): states = self._elements(uow) if self.delete: self.dependency_processor.process_deletes(uow, states) else: self.dependency_processor.process_saves(uow, states) def per_state_flush_actions(self, uow): # this is handled by SaveUpdateAll and DeleteAll, # since a ProcessAll should unconditionally be pulled # into per-state if either the parent/child mappers # are part of a cycle return iter([]) def __repr__(self): return "%s(%s, delete=%s)" % ( self.__class__.__name__, self.dependency_processor, self.delete ) def _elements(self, uow): for mapper in self._mappers(uow): for state in uow.mappers[mapper]: (isdelete, listonly) = uow.states[state] if isdelete == self.delete and not listonly: yield state class IssuePostUpdate(PostSortRec): def __init__(self, uow, mapper, isdelete): self.mapper = mapper self.isdelete = isdelete def execute(self, uow): states, cols = uow.post_update_states[self.mapper] states = [s for s in states if uow.states[s][0] == self.isdelete] persistence.post_update(self.mapper, states, uow, cols) class SaveUpdateAll(PostSortRec): def __init__(self, uow, mapper): self.mapper = mapper assert mapper is mapper.base_mapper def execute(self, uow): persistence.save_obj(self.mapper, uow.states_for_mapper_hierarchy(self.mapper, False, False), uow ) def per_state_flush_actions(self, uow): states = list(uow.states_for_mapper_hierarchy( self.mapper, False, False)) base_mapper = self.mapper.base_mapper delete_all = DeleteAll(uow, base_mapper) for state in states: # keep saves before deletes - # this ensures 'row switch' operations work action = SaveUpdateState(uow, state, base_mapper) uow.dependencies.add((action, delete_all)) yield action for dep in uow.deps[self.mapper]: states_for_prop = uow.filter_states_for_dep(dep, states) dep.per_state_flush_actions(uow, states_for_prop, False) class DeleteAll(PostSortRec): def __init__(self, uow, mapper): self.mapper = mapper assert mapper is mapper.base_mapper def execute(self, uow): persistence.delete_obj(self.mapper, uow.states_for_mapper_hierarchy(self.mapper, True, False), uow ) def per_state_flush_actions(self, uow): states = list(uow.states_for_mapper_hierarchy( self.mapper, True, False)) base_mapper = self.mapper.base_mapper save_all = SaveUpdateAll(uow, base_mapper) for state in states: # keep saves before deletes - # this ensures 'row switch' operations work action = DeleteState(uow, state, base_mapper) uow.dependencies.add((save_all, action)) yield action for dep in uow.deps[self.mapper]: states_for_prop = uow.filter_states_for_dep(dep, states) dep.per_state_flush_actions(uow, states_for_prop, True) class ProcessState(PostSortRec): def __init__(self, uow, dependency_processor, delete, state): self.dependency_processor = dependency_processor self.delete = delete self.state = state def execute_aggregate(self, uow, recs): cls_ = self.__class__ dependency_processor = self.dependency_processor delete = self.delete our_recs = [r for r in recs if r.__class__ is cls_ and r.dependency_processor is dependency_processor and r.delete is delete] recs.difference_update(our_recs) states = [self.state] + [r.state for r in our_recs] if delete: dependency_processor.process_deletes(uow, states) else: dependency_processor.process_saves(uow, states) def __repr__(self): return "%s(%s, %s, delete=%s)" % ( self.__class__.__name__, self.dependency_processor, orm_util.state_str(self.state), self.delete ) class SaveUpdateState(PostSortRec): def __init__(self, uow, state, mapper): self.state = state self.mapper = mapper def execute_aggregate(self, uow, recs): cls_ = self.__class__ mapper = self.mapper our_recs = [r for r in recs if r.__class__ is cls_ and r.mapper is mapper] recs.difference_update(our_recs) persistence.save_obj(mapper, [self.state] + [r.state for r in our_recs], uow) def __repr__(self): return "%s(%s)" % ( self.__class__.__name__, orm_util.state_str(self.state) ) class DeleteState(PostSortRec): def __init__(self, uow, state, mapper): self.state = state self.mapper = mapper def execute_aggregate(self, uow, recs): cls_ = self.__class__ mapper = self.mapper our_recs = [r for r in recs if r.__class__ is cls_ and r.mapper is mapper] recs.difference_update(our_recs) states = [self.state] + [r.state for r in our_recs] persistence.delete_obj(mapper, [s for s in states if uow.states[s][0]], uow) def __repr__(self): return "%s(%s)" % ( self.__class__.__name__, orm_util.state_str(self.state) )
gpl-3.0
3,319,720,626,130,826,000
34.919505
84
0.564687
false
sandeepgupta2k4/tensorflow
tensorflow/python/ops/parsing_ops.py
21
49286
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Parsing Ops.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections import re from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.framework import sparse_tensor from tensorflow.python.framework import tensor_shape from tensorflow.python.ops import array_ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import gen_parsing_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import sparse_ops # go/tf-wildcard-import # pylint: disable=wildcard-import,undefined-variable from tensorflow.python.ops.gen_parsing_ops import * # pylint: enable=wildcard-import,undefined-variable from tensorflow.python.platform import tf_logging ops.NotDifferentiable("DecodeRaw") ops.NotDifferentiable("ParseTensor") ops.NotDifferentiable("StringToNumber") class VarLenFeature(collections.namedtuple("VarLenFeature", ["dtype"])): """Configuration for parsing a variable-length input feature. Fields: dtype: Data type of input. """ pass class SparseFeature( collections.namedtuple( "SparseFeature", ["index_key", "value_key", "dtype", "size", "already_sorted"])): """Configuration for parsing a sparse input feature from an `Example`. Note, preferably use `VarLenFeature` (possibly in combination with a `SequenceExample`) in order to parse out `SparseTensor`s instead of `SparseFeature` due to its simplicity. Closely mimicking the `SparseTensor` that will be obtained by parsing an `Example` with a `SparseFeature` config, a `SparseFeature` contains a * `value_key`: The name of key for a `Feature` in the `Example` whose parsed `Tensor` will be the resulting `SparseTensor.values`. * `index_key`: A list of names - one for each dimension in the resulting `SparseTensor` whose `indices[i][dim]` indicating the position of the `i`-th value in the `dim` dimension will be equal to the `i`-th value in the Feature with key named `index_key[dim]` in the `Example`. * `size`: A list of ints for the resulting `SparseTensor.dense_shape`. For example, we can represent the following 2D `SparseTensor` ```python SparseTensor(indices=[[3, 1], [20, 0]], values=[0.5, -1.0] dense_shape=[100, 3]) ``` with an `Example` input proto ```python features { feature { key: "val" value { float_list { value: [ 0.5, -1.0 ] } } } feature { key: "ix0" value { int64_list { value: [ 3, 20 ] } } } feature { key: "ix1" value { int64_list { value: [ 1, 0 ] } } } } ``` and `SparseFeature` config with 2 `index_key`s ```python SparseFeature(index_key=["ix0", "ix1"], value_key="val", dtype=tf.float32, size=[100, 3]) ``` Fields: index_key: A single string name or a list of string names of index features. For each key the underlying feature's type must be `int64` and its length must always match that of the `value_key` feature. To represent `SparseTensor`s with a `dense_shape` of `rank` higher than 1 a list of length `rank` should be used. value_key: Name of value feature. The underlying feature's type must be `dtype` and its length must always match that of all the `index_key`s' features. dtype: Data type of the `value_key` feature. size: A Python int or list thereof specifying the dense shape. Should be a list if and only if `index_key` is a list. In that case the list must be equal to the length of `index_key`. Each for each entry `i` all values in the `index_key`[i] feature must be in `[0, size[i])`. already_sorted: A Python boolean to specify whether the values in `value_key` are already sorted by their index position. If so skip sorting. False by default (optional). """ pass SparseFeature.__new__.__defaults__ = (False,) class FixedLenFeature(collections.namedtuple( "FixedLenFeature", ["shape", "dtype", "default_value"])): """Configuration for parsing a fixed-length input feature. To treat sparse input as dense, provide a `default_value`; otherwise, the parse functions will fail on any examples missing this feature. Fields: shape: Shape of input data. dtype: Data type of input. default_value: Value to be used if an example is missing this feature. It must be compatible with `dtype` and of the specified `shape`. """ pass FixedLenFeature.__new__.__defaults__ = (None,) class FixedLenSequenceFeature(collections.namedtuple( "FixedLenSequenceFeature", ["shape", "dtype", "allow_missing", "default_value"])): """Configuration for parsing a variable-length input feature into a `Tensor`. The resulting `Tensor` of parsing a single `SequenceExample` or `Example` has a static `shape` of `[None] + shape` and the specified `dtype`. The resulting `Tensor` of parsing a `batch_size` many `Example`s has a static `shape` of `[batch_size, None] + shape` and the specified `dtype`. The entries in the `batch` from different `Examples` will be padded with `default_value` to the maximum length present in the `batch`. To treat a sparse input as dense, provide `allow_missing=True`; otherwise, the parse functions will fail on any examples missing this feature. Fields: shape: Shape of input data for dimension 2 and higher. First dimension is of variable length `None`. dtype: Data type of input. allow_missing: Whether to allow this feature to be missing from a feature list item. Is available only for parsing `SequenceExample` not for parsing `Examples`. default_value: Scalar value to be used to pad multiple `Example`s to their maximum length. Irrelevant for parsing a single `Example` or `SequenceExample`. Defaults to "" for dtype string and 0 otherwise (optional). """ pass FixedLenSequenceFeature.__new__.__defaults__ = (False, None) def _features_to_raw_params(features, types): """Split feature tuples into raw params used by `gen_parsing_ops`. Args: features: A `dict` mapping feature keys to objects of a type in `types`. types: Type of features to allow, among `FixedLenFeature`, `VarLenFeature`, `SparseFeature`, and `FixedLenSequenceFeature`. Returns: Tuple of `sparse_keys`, `sparse_types`, `dense_keys`, `dense_types`, `dense_defaults`, `dense_shapes`. Raises: ValueError: if `features` contains an item not in `types`, or an invalid feature. """ sparse_keys = [] sparse_types = [] dense_keys = [] dense_types = [] dense_defaults = {} dense_shapes = [] if features: # NOTE: We iterate over sorted keys to keep things deterministic. for key in sorted(features.keys()): feature = features[key] if isinstance(feature, VarLenFeature): if VarLenFeature not in types: raise ValueError("Unsupported VarLenFeature %s.", feature) if not feature.dtype: raise ValueError("Missing type for feature %s." % key) sparse_keys.append(key) sparse_types.append(feature.dtype) elif isinstance(feature, SparseFeature): if SparseFeature not in types: raise ValueError("Unsupported SparseFeature %s.", feature) if not feature.index_key: raise ValueError( "Missing index_key for SparseFeature %s.", feature) if not feature.value_key: raise ValueError( "Missing value_key for SparseFeature %s.", feature) if not feature.dtype: raise ValueError("Missing type for feature %s." % key) index_keys = feature.index_key if isinstance(index_keys, str): index_keys = [index_keys] elif len(index_keys) > 1: tf_logging.warning("SparseFeature is a complicated feature config " "and should only be used after careful " "consideration of VarLenFeature.") for index_key in sorted(index_keys): if index_key in sparse_keys: dtype = sparse_types[sparse_keys.index(index_key)] if dtype != dtypes.int64: raise ValueError("Conflicting type %s vs int64 for feature %s." % (dtype, index_key)) else: sparse_keys.append(index_key) sparse_types.append(dtypes.int64) if feature.value_key in sparse_keys: dtype = sparse_types[sparse_keys.index(feature.value_key)] if dtype != feature.dtype: raise ValueError("Conflicting type %s vs %s for feature %s." % ( dtype, feature.dtype, feature.value_key)) else: sparse_keys.append(feature.value_key) sparse_types.append(feature.dtype) elif isinstance(feature, FixedLenFeature): if FixedLenFeature not in types: raise ValueError("Unsupported FixedLenFeature %s.", feature) if not feature.dtype: raise ValueError("Missing type for feature %s." % key) if feature.shape is None: raise ValueError("Missing shape for feature %s." % key) feature_tensor_shape = tensor_shape.as_shape(feature.shape) if (feature.shape and feature_tensor_shape.ndims and feature_tensor_shape.dims[0].value is None): raise ValueError("First dimension of shape for feature %s unknown. " "Consider using FixedLenSequenceFeature." % key) if (feature.shape is not None and not feature_tensor_shape.is_fully_defined()): raise ValueError("All dimensions of shape for feature %s need to be " "known but received %s." % (key, str(feature.shape))) dense_keys.append(key) dense_shapes.append(feature.shape) dense_types.append(feature.dtype) if feature.default_value is not None: dense_defaults[key] = feature.default_value elif isinstance(feature, FixedLenSequenceFeature): if FixedLenSequenceFeature not in types: raise ValueError("Unsupported FixedLenSequenceFeature %s.", feature) if not feature.dtype: raise ValueError("Missing type for feature %s." % key) if feature.shape is None: raise ValueError("Missing shape for feature %s." % key) dense_keys.append(key) dense_shapes.append(feature.shape) dense_types.append(feature.dtype) if feature.allow_missing: dense_defaults[key] = None if feature.default_value is not None: dense_defaults[key] = feature.default_value else: raise ValueError("Invalid feature %s:%s." % (key, feature)) return ( sparse_keys, sparse_types, dense_keys, dense_types, dense_defaults, dense_shapes) def _construct_sparse_tensors_for_sparse_features(features, tensor_dict): """Merges SparseTensors of indices and values of SparseFeatures. Constructs new dict based on `tensor_dict`. For `SparseFeatures` in the values of `features` expects their `index_key`s and `index_value`s to be present in `tensor_dict` mapping to `SparseTensor`s. Constructs a single `SparseTensor` from them, and adds it to the result with the key from `features`. Copies other keys and values from `tensor_dict` with keys present in `features`. Args: features: A `dict` mapping feature keys to `SparseFeature` values. Values of other types will be ignored. tensor_dict: A `dict` mapping feature keys to `Tensor` and `SparseTensor` values. Expected to contain keys of the `SparseFeature`s' `index_key`s and `value_key`s and mapping them to `SparseTensor`s. Returns: A `dict` mapping feature keys to `Tensor` and `SparseTensor` values. Similar to `tensor_dict` except each `SparseFeature`s in `features` results in a single `SparseTensor`. """ tensor_dict = dict(tensor_dict) # Do not modify argument passed in. # Construct SparseTensors for SparseFeatures. for key in sorted(features.keys()): feature = features[key] if isinstance(feature, SparseFeature): if isinstance(feature.index_key, str): sp_ids = tensor_dict[feature.index_key] else: sp_ids = [tensor_dict[index_key] for index_key in feature.index_key] sp_values = tensor_dict[feature.value_key] tensor_dict[key] = sparse_ops.sparse_merge( sp_ids, sp_values, vocab_size=feature.size, already_sorted=feature.already_sorted) # Remove tensors from dictionary that were only used to construct # SparseTensors for SparseFeature. for key in set(tensor_dict) - set(features): del tensor_dict[key] return tensor_dict def _prepend_none_dimension(features): if features: modified_features = dict(features) # Create a copy to modify for key, feature in features.items(): if isinstance(feature, FixedLenSequenceFeature): if not feature.allow_missing: raise ValueError("Unsupported: FixedLenSequenceFeature requires " "allow_missing to be True.") modified_features[key] = FixedLenSequenceFeature( [None] + list(feature.shape), feature.dtype, feature.allow_missing, feature.default_value) return modified_features else: return features def parse_example(serialized, features, name=None, example_names=None): # pylint: disable=line-too-long """Parses `Example` protos into a `dict` of tensors. Parses a number of serialized [`Example`](https://www.tensorflow.org/code/tensorflow/core/example/example.proto) protos given in `serialized`. We refer to `serialized` as a batch with `batch_size` many entries of individual `Example` protos. `example_names` may contain descriptive names for the corresponding serialized protos. These may be useful for debugging purposes, but they have no effect on the output. If not `None`, `example_names` must be the same length as `serialized`. This op parses serialized examples into a dictionary mapping keys to `Tensor` and `SparseTensor` objects. `features` is a dict from keys to `VarLenFeature`, `SparseFeature`, and `FixedLenFeature` objects. Each `VarLenFeature` and `SparseFeature` is mapped to a `SparseTensor`, and each `FixedLenFeature` is mapped to a `Tensor`. Each `VarLenFeature` maps to a `SparseTensor` of the specified type representing a ragged matrix. Its indices are `[batch, index]` where `batch` identifies the example in `serialized`, and `index` is the value's index in the list of values associated with that feature and example. Each `SparseFeature` maps to a `SparseTensor` of the specified type representing a Tensor of `dense_shape` `[batch_size] + SparseFeature.size`. Its `values` come from the feature in the examples with key `value_key`. A `values[i]` comes from a position `k` in the feature of an example at batch entry `batch`. This positional information is recorded in `indices[i]` as `[batch, index_0, index_1, ...]` where `index_j` is the `k-th` value of the feature in the example at with key `SparseFeature.index_key[j]. In other words, we split the indices (except the first index indicating the batch entry) of a `SparseTensor` by dimension into different features of the `Example`. Due to its complexity a `VarLenFeature` should be preferred over a `SparseFeature` whenever possible. Each `FixedLenFeature` `df` maps to a `Tensor` of the specified type (or `tf.float32` if not specified) and shape `(serialized.size(),) + df.shape`. `FixedLenFeature` entries with a `default_value` are optional. With no default value, we will fail if that `Feature` is missing from any example in `serialized`. Each `FixedLenSequenceFeature` `df` maps to a `Tensor` of the specified type (or `tf.float32` if not specified) and shape `(serialized.size(), None) + df.shape`. All examples in `serialized` will be padded with `default_value` along the second dimension. Examples: For example, if one expects a `tf.float32` `VarLenFeature` `ft` and three serialized `Example`s are provided: ``` serialized = [ features { feature { key: "ft" value { float_list { value: [1.0, 2.0] } } } }, features { feature []}, features { feature { key: "ft" value { float_list { value: [3.0] } } } ] ``` then the output will look like: ``` {"ft": SparseTensor(indices=[[0, 0], [0, 1], [2, 0]], values=[1.0, 2.0, 3.0], dense_shape=(3, 2)) } ``` If instead a `FixedLenSequenceFeature` with `default_value = -1.0` and `shape=[]` is used then the output will look like: ``` {"ft": [[1.0, 2.0], [3.0, -1.0]]} ``` Given two `Example` input protos in `serialized`: ``` [ features { feature { key: "kw" value { bytes_list { value: [ "knit", "big" ] } } } feature { key: "gps" value { float_list { value: [] } } } }, features { feature { key: "kw" value { bytes_list { value: [ "emmy" ] } } } feature { key: "dank" value { int64_list { value: [ 42 ] } } } feature { key: "gps" value { } } } ] ``` And arguments ``` example_names: ["input0", "input1"], features: { "kw": VarLenFeature(tf.string), "dank": VarLenFeature(tf.int64), "gps": VarLenFeature(tf.float32), } ``` Then the output is a dictionary: ```python { "kw": SparseTensor( indices=[[0, 0], [0, 1], [1, 0]], values=["knit", "big", "emmy"] dense_shape=[2, 2]), "dank": SparseTensor( indices=[[1, 0]], values=[42], dense_shape=[2, 1]), "gps": SparseTensor( indices=[], values=[], dense_shape=[2, 0]), } ``` For dense results in two serialized `Example`s: ``` [ features { feature { key: "age" value { int64_list { value: [ 0 ] } } } feature { key: "gender" value { bytes_list { value: [ "f" ] } } } }, features { feature { key: "age" value { int64_list { value: [] } } } feature { key: "gender" value { bytes_list { value: [ "f" ] } } } } ] ``` We can use arguments: ``` example_names: ["input0", "input1"], features: { "age": FixedLenFeature([], dtype=tf.int64, default_value=-1), "gender": FixedLenFeature([], dtype=tf.string), } ``` And the expected output is: ```python { "age": [[0], [-1]], "gender": [["f"], ["f"]], } ``` An alternative to `VarLenFeature` to obtain a `SparseTensor` is `SparseFeature`. For example, given two `Example` input protos in `serialized`: ``` [ features { feature { key: "val" value { float_list { value: [ 0.5, -1.0 ] } } } feature { key: "ix" value { int64_list { value: [ 3, 20 ] } } } }, features { feature { key: "val" value { float_list { value: [ 0.0 ] } } } feature { key: "ix" value { int64_list { value: [ 42 ] } } } } ] ``` And arguments ``` example_names: ["input0", "input1"], features: { "sparse": SparseFeature( index_key="ix", value_key="val", dtype=tf.float32, size=100), } ``` Then the output is a dictionary: ```python { "sparse": SparseTensor( indices=[[0, 3], [0, 20], [1, 42]], values=[0.5, -1.0, 0.0] dense_shape=[2, 100]), } ``` Args: serialized: A vector (1-D Tensor) of strings, a batch of binary serialized `Example` protos. features: A `dict` mapping feature keys to `FixedLenFeature`, `VarLenFeature`, and `SparseFeature` values. name: A name for this operation (optional). example_names: A vector (1-D Tensor) of strings (optional), the names of the serialized protos in the batch. Returns: A `dict` mapping feature keys to `Tensor` and `SparseTensor` values. Raises: ValueError: if any feature is invalid. """ if not features: raise ValueError("Missing: features was %s." % features) features = _prepend_none_dimension(features) (sparse_keys, sparse_types, dense_keys, dense_types, dense_defaults, dense_shapes) = _features_to_raw_params( features, [VarLenFeature, SparseFeature, FixedLenFeature, FixedLenSequenceFeature]) outputs = _parse_example_raw( serialized, example_names, sparse_keys, sparse_types, dense_keys, dense_types, dense_defaults, dense_shapes, name) return _construct_sparse_tensors_for_sparse_features(features, outputs) def _parse_example_raw(serialized, names=None, sparse_keys=None, sparse_types=None, dense_keys=None, dense_types=None, dense_defaults=None, dense_shapes=None, name=None): """Parses `Example` protos. Args: serialized: A vector (1-D Tensor) of strings, a batch of binary serialized `Example` protos. names: A vector (1-D Tensor) of strings (optional), the names of the serialized protos. sparse_keys: A list of string keys in the examples' features. The results for these keys will be returned as `SparseTensor` objects. sparse_types: A list of `DTypes` of the same length as `sparse_keys`. Only `tf.float32` (`FloatList`), `tf.int64` (`Int64List`), and `tf.string` (`BytesList`) are supported. dense_keys: A list of string keys in the examples' features. The results for these keys will be returned as `Tensor`s dense_types: A list of DTypes of the same length as `dense_keys`. Only `tf.float32` (`FloatList`), `tf.int64` (`Int64List`), and `tf.string` (`BytesList`) are supported. dense_defaults: A dict mapping string keys to `Tensor`s. The keys of the dict must match the dense_keys of the feature. dense_shapes: A list of tuples with the same length as `dense_keys`. The shape of the data for each dense feature referenced by `dense_keys`. Required for any input tensors identified by `dense_keys`. Must be either fully defined, or may contain an unknown first dimension. An unknown first dimension means the feature is treated as having a variable number of blocks, and the output shape along this dimension is considered unknown at graph build time. Padding is applied for minibatch elements smaller than the maximum number of blocks for the given feature along this dimension. name: A name for this operation (optional). Returns: A `dict` mapping keys to `Tensor`s and `SparseTensor`s. Raises: ValueError: If sparse and dense key sets intersect, or input lengths do not match up. """ with ops.name_scope(name, "ParseExample", [serialized, names]): names = [] if names is None else names dense_defaults = {} if dense_defaults is None else dense_defaults sparse_keys = [] if sparse_keys is None else sparse_keys sparse_types = [] if sparse_types is None else sparse_types dense_keys = [] if dense_keys is None else dense_keys dense_types = [] if dense_types is None else dense_types dense_shapes = ( [[]] * len(dense_keys) if dense_shapes is None else dense_shapes) num_dense = len(dense_keys) num_sparse = len(sparse_keys) if len(dense_shapes) != num_dense: raise ValueError("len(dense_shapes) != len(dense_keys): %d vs. %d" % (len(dense_shapes), num_dense)) if len(dense_types) != num_dense: raise ValueError("len(dense_types) != len(num_dense): %d vs. %d" % (len(dense_types), num_dense)) if len(sparse_types) != num_sparse: raise ValueError("len(sparse_types) != len(sparse_keys): %d vs. %d" % (len(sparse_types), num_sparse)) if num_dense + num_sparse == 0: raise ValueError("Must provide at least one sparse key or dense key") if not set(dense_keys).isdisjoint(set(sparse_keys)): raise ValueError( "Dense and sparse keys must not intersect; intersection: %s" % set(dense_keys).intersection(set(sparse_keys))) # Convert dense_shapes to TensorShape object. dense_shapes = [tensor_shape.as_shape(shape) for shape in dense_shapes] dense_defaults_vec = [] for i, key in enumerate(dense_keys): default_value = dense_defaults.get(key) dense_shape = dense_shapes[i] if (dense_shape.ndims is not None and dense_shape.ndims > 0 and dense_shape[0].value is None): # Variable stride dense shape, the default value should be a # scalar padding value if default_value is None: default_value = ops.convert_to_tensor( "" if dense_types[i] == dtypes.string else 0, dtype=dense_types[i]) else: # Reshape to a scalar to ensure user gets an error if they # provide a tensor that's not intended to be a padding value # (0 or 2+ elements). key_name = "padding_" + re.sub("[^A-Za-z0-9_.\\-/]", "_", key) default_value = ops.convert_to_tensor( default_value, dtype=dense_types[i], name=key_name) default_value = array_ops.reshape(default_value, []) else: if default_value is None: default_value = constant_op.constant([], dtype=dense_types[i]) elif not isinstance(default_value, ops.Tensor): key_name = "key_" + re.sub("[^A-Za-z0-9_.\\-/]", "_", key) default_value = ops.convert_to_tensor( default_value, dtype=dense_types[i], name=key_name) default_value = array_ops.reshape(default_value, dense_shape) dense_defaults_vec.append(default_value) # Finally, convert dense_shapes to TensorShapeProto dense_shapes = [shape.as_proto() for shape in dense_shapes] # pylint: disable=protected-access outputs = gen_parsing_ops._parse_example( serialized=serialized, names=names, dense_defaults=dense_defaults_vec, sparse_keys=sparse_keys, sparse_types=sparse_types, dense_keys=dense_keys, dense_shapes=dense_shapes, name=name) # pylint: enable=protected-access (sparse_indices, sparse_values, sparse_shapes, dense_values) = outputs sparse_tensors = [ sparse_tensor.SparseTensor(ix, val, shape) for (ix, val, shape) in zip(sparse_indices, sparse_values, sparse_shapes)] return dict(zip(sparse_keys + dense_keys, sparse_tensors + dense_values)) def parse_single_example(serialized, features, name=None, example_names=None): """Parses a single `Example` proto. Similar to `parse_example`, except: For dense tensors, the returned `Tensor` is identical to the output of `parse_example`, except there is no batch dimension, the output shape is the same as the shape given in `dense_shape`. For `SparseTensor`s, the first (batch) column of the indices matrix is removed (the indices matrix is a column vector), the values vector is unchanged, and the first (`batch_size`) entry of the shape vector is removed (it is now a single element vector). One might see performance advantages by batching `Example` protos with `parse_example` instead of using this function directly. Args: serialized: A scalar string Tensor, a single serialized Example. See `_parse_single_example_raw` documentation for more details. features: A `dict` mapping feature keys to `FixedLenFeature` or `VarLenFeature` values. name: A name for this operation (optional). example_names: (Optional) A scalar string Tensor, the associated name. See `_parse_single_example_raw` documentation for more details. Returns: A `dict` mapping feature keys to `Tensor` and `SparseTensor` values. Raises: ValueError: if any feature is invalid. """ if not features: raise ValueError("Missing features.") features = _prepend_none_dimension(features) (sparse_keys, sparse_types, dense_keys, dense_types, dense_defaults, dense_shapes) = _features_to_raw_params( features, [VarLenFeature, FixedLenFeature, FixedLenSequenceFeature, SparseFeature]) outputs = _parse_single_example_raw( serialized, example_names, sparse_keys, sparse_types, dense_keys, dense_types, dense_defaults, dense_shapes, name) return _construct_sparse_tensors_for_sparse_features(features, outputs) def _parse_single_example_raw(serialized, names=None, sparse_keys=None, sparse_types=None, dense_keys=None, dense_types=None, dense_defaults=None, dense_shapes=None, name=None): """Parses a single `Example` proto. Args: serialized: A scalar string Tensor, a single serialized Example. See `_parse_example_raw` documentation for more details. names: (Optional) A scalar string Tensor, the associated name. See `_parse_example_raw` documentation for more details. sparse_keys: See `_parse_example_raw` documentation for more details. sparse_types: See `_parse_example_raw` documentation for more details. dense_keys: See `_parse_example_raw` documentation for more details. dense_types: See `_parse_example_raw` documentation for more details. dense_defaults: See `_parse_example_raw` documentation for more details. dense_shapes: See `_parse_example_raw` documentation for more details. name: A name for this operation (optional). Returns: A `dict` mapping feature keys to `Tensor` and `SparseTensor` values. Raises: ValueError: if any feature is invalid. """ with ops.name_scope(name, "ParseSingleExample", [serialized, names]): serialized = ops.convert_to_tensor(serialized) serialized_shape = serialized.get_shape() if serialized_shape.ndims is not None: if serialized_shape.ndims != 0: raise ValueError("Input serialized must be a scalar") else: serialized = control_flow_ops.with_dependencies( [control_flow_ops.Assert( math_ops.equal(array_ops.rank(serialized), 0), ["Input serialized must be a scalar"], name="SerializedIsScalar")], serialized, name="SerializedDependencies") serialized = array_ops.expand_dims(serialized, 0) if names is not None: names = ops.convert_to_tensor(names) names_shape = names.get_shape() if names_shape.ndims is not None: if names_shape.ndims != 0: raise ValueError("Input names must be a scalar") else: names = control_flow_ops.with_dependencies( [control_flow_ops.Assert( math_ops.equal(array_ops.rank(names), 0), ["Input names must be a scalar"], name="NamesIsScalar")], names, name="NamesDependencies") names = array_ops.expand_dims(names, 0) outputs = _parse_example_raw( serialized, names=names, sparse_keys=sparse_keys, sparse_types=sparse_types, dense_keys=dense_keys, dense_types=dense_types, dense_defaults=dense_defaults, dense_shapes=dense_shapes, name=name) if dense_keys is not None: for d in dense_keys: d_name = re.sub("[^A-Za-z0-9_.\\-/]", "_", d) outputs[d] = array_ops.squeeze( outputs[d], [0], name="Squeeze_%s" % d_name) if sparse_keys is not None: for s in sparse_keys: s_name = re.sub("[^A-Za-z0-9_.\\-/]", "_", s) outputs[s] = sparse_tensor.SparseTensor( array_ops.slice(outputs[s].indices, [0, 1], [-1, -1], name="Slice_Indices_%s" % s_name), outputs[s].values, array_ops.slice(outputs[s].dense_shape, [1], [-1], name="Squeeze_Shape_%s" % s_name)) return outputs def parse_single_sequence_example( serialized, context_features=None, sequence_features=None, example_name=None, name=None): # pylint: disable=line-too-long """Parses a single `SequenceExample` proto. Parses a single serialized [`SequenceExample`](https://www.tensorflow.org/code/tensorflow/core/example/example.proto) proto given in `serialized`. This op parses a serialized sequence example into a tuple of dictionaries mapping keys to `Tensor` and `SparseTensor` objects respectively. The first dictionary contains mappings for keys appearing in `context_features`, and the second dictionary contains mappings for keys appearing in `sequence_features`. At least one of `context_features` and `sequence_features` must be provided and non-empty. The `context_features` keys are associated with a `SequenceExample` as a whole, independent of time / frame. In contrast, the `sequence_features` keys provide a way to access variable-length data within the `FeatureList` section of the `SequenceExample` proto. While the shapes of `context_features` values are fixed with respect to frame, the frame dimension (the first dimension) of `sequence_features` values may vary between `SequenceExample` protos, and even between `feature_list` keys within the same `SequenceExample`. `context_features` contains `VarLenFeature` and `FixedLenFeature` objects. Each `VarLenFeature` is mapped to a `SparseTensor`, and each `FixedLenFeature` is mapped to a `Tensor`, of the specified type, shape, and default value. `sequence_features` contains `VarLenFeature` and `FixedLenSequenceFeature` objects. Each `VarLenFeature` is mapped to a `SparseTensor`, and each `FixedLenSequenceFeature` is mapped to a `Tensor`, each of the specified type. The shape will be `(T,) + df.dense_shape` for `FixedLenSequenceFeature` `df`, where `T` is the length of the associated `FeatureList` in the `SequenceExample`. For instance, `FixedLenSequenceFeature([])` yields a scalar 1-D `Tensor` of static shape `[None]` and dynamic shape `[T]`, while `FixedLenSequenceFeature([k])` (for `int k >= 1`) yields a 2-D matrix `Tensor` of static shape `[None, k]` and dynamic shape `[T, k]`. Each `SparseTensor` corresponding to `sequence_features` represents a ragged vector. Its indices are `[time, index]`, where `time` is the `FeatureList` entry and `index` is the value's index in the list of values associated with that time. `FixedLenFeature` entries with a `default_value` and `FixedLenSequenceFeature` entries with `allow_missing=True` are optional; otherwise, we will fail if that `Feature` or `FeatureList` is missing from any example in `serialized`. `example_name` may contain a descriptive name for the corresponding serialized proto. This may be useful for debugging purposes, but it has no effect on the output. If not `None`, `example_name` must be a scalar. Args: serialized: A scalar (0-D Tensor) of type string, a single binary serialized `SequenceExample` proto. context_features: A `dict` mapping feature keys to `FixedLenFeature` or `VarLenFeature` values. These features are associated with a `SequenceExample` as a whole. sequence_features: A `dict` mapping feature keys to `FixedLenSequenceFeature` or `VarLenFeature` values. These features are associated with data within the `FeatureList` section of the `SequenceExample` proto. example_name: A scalar (0-D Tensor) of strings (optional), the name of the serialized proto. name: A name for this operation (optional). Returns: A tuple of two `dict`s, each mapping keys to `Tensor`s and `SparseTensor`s. The first dict contains the context key/values. The second dict contains the feature_list key/values. Raises: ValueError: if any feature is invalid. """ # pylint: enable=line-too-long if not (context_features or sequence_features): raise ValueError("Missing features.") (context_sparse_keys, context_sparse_types, context_dense_keys, context_dense_types, context_dense_defaults, context_dense_shapes) = _features_to_raw_params( context_features, [VarLenFeature, FixedLenFeature]) (feature_list_sparse_keys, feature_list_sparse_types, feature_list_dense_keys, feature_list_dense_types, feature_list_dense_defaults, feature_list_dense_shapes) = _features_to_raw_params( sequence_features, [VarLenFeature, FixedLenSequenceFeature]) return _parse_single_sequence_example_raw( serialized, context_sparse_keys, context_sparse_types, context_dense_keys, context_dense_types, context_dense_defaults, context_dense_shapes, feature_list_sparse_keys, feature_list_sparse_types, feature_list_dense_keys, feature_list_dense_types, feature_list_dense_shapes, feature_list_dense_defaults, example_name, name) def _parse_single_sequence_example_raw(serialized, context_sparse_keys=None, context_sparse_types=None, context_dense_keys=None, context_dense_types=None, context_dense_defaults=None, context_dense_shapes=None, feature_list_sparse_keys=None, feature_list_sparse_types=None, feature_list_dense_keys=None, feature_list_dense_types=None, feature_list_dense_shapes=None, feature_list_dense_defaults=None, debug_name=None, name=None): """Parses a single `SequenceExample` proto. Args: serialized: A scalar (0-D Tensor) of type string, a single binary serialized `SequenceExample` proto. context_sparse_keys: A list of string keys in the `SequenceExample`'s features. The results for these keys will be returned as `SparseTensor` objects. context_sparse_types: A list of `DTypes`, the same length as `sparse_keys`. Only `tf.float32` (`FloatList`), `tf.int64` (`Int64List`), and `tf.string` (`BytesList`) are supported. context_dense_keys: A list of string keys in the examples' features. The results for these keys will be returned as `Tensor`s context_dense_types: A list of DTypes, same length as `context_dense_keys`. Only `tf.float32` (`FloatList`), `tf.int64` (`Int64List`), and `tf.string` (`BytesList`) are supported. context_dense_defaults: A dict mapping string keys to `Tensor`s. The keys of the dict must match the context_dense_keys of the feature. context_dense_shapes: A list of tuples, same length as `context_dense_keys`. The shape of the data for each context_dense feature referenced by `context_dense_keys`. Required for any input tensors identified by `context_dense_keys` whose shapes are anything other than `[]` or `[1]`. feature_list_sparse_keys: A list of string keys in the `SequenceExample`'s feature_lists. The results for these keys will be returned as `SparseTensor` objects. feature_list_sparse_types: A list of `DTypes`, same length as `sparse_keys`. Only `tf.float32` (`FloatList`), `tf.int64` (`Int64List`), and `tf.string` (`BytesList`) are supported. feature_list_dense_keys: A list of string keys in the `SequenceExample`'s features_lists. The results for these keys will be returned as `Tensor`s. feature_list_dense_types: A list of `DTypes`, same length as `feature_list_dense_keys`. Only `tf.float32` (`FloatList`), `tf.int64` (`Int64List`), and `tf.string` (`BytesList`) are supported. feature_list_dense_shapes: A list of tuples, same length as `feature_list_dense_keys`. The shape of the data for each `FeatureList` feature referenced by `feature_list_dense_keys`. feature_list_dense_defaults: A dict mapping key strings to values. The only currently allowed value is `None`. Any key appearing in this dict with value `None` is allowed to be missing from the `SequenceExample`. If missing, the key is treated as zero-length. debug_name: A scalar (0-D Tensor) of strings (optional), the name of the serialized proto. name: A name for this operation (optional). Returns: A tuple of two `dict`s, each mapping keys to `Tensor`s and `SparseTensor`s. The first dict contains the context key/values. The second dict contains the feature_list key/values. Raises: ValueError: If context_sparse and context_dense key sets intersect, if input lengths do not match up, or if a value in feature_list_dense_defaults is not None. TypeError: if feature_list_dense_defaults is not either None or a dict. """ with ops.name_scope(name, "ParseSingleSequenceExample", [serialized]): context_dense_defaults = ( {} if context_dense_defaults is None else context_dense_defaults) context_sparse_keys = ( [] if context_sparse_keys is None else context_sparse_keys) context_sparse_types = ( [] if context_sparse_types is None else context_sparse_types) context_dense_keys = ( [] if context_dense_keys is None else context_dense_keys) context_dense_types = ( [] if context_dense_types is None else context_dense_types) context_dense_shapes = ( [[]] * len(context_dense_keys) if context_dense_shapes is None else context_dense_shapes) feature_list_sparse_keys = ( [] if feature_list_sparse_keys is None else feature_list_sparse_keys) feature_list_sparse_types = ( [] if feature_list_sparse_types is None else feature_list_sparse_types) feature_list_dense_keys = ( [] if feature_list_dense_keys is None else feature_list_dense_keys) feature_list_dense_types = ( [] if feature_list_dense_types is None else feature_list_dense_types) feature_list_dense_shapes = ( [[]] * len(feature_list_dense_keys) if feature_list_dense_shapes is None else feature_list_dense_shapes) feature_list_dense_defaults = ( dict() if feature_list_dense_defaults is None else feature_list_dense_defaults) debug_name = "" if debug_name is None else debug_name # Internal feature_list_dense_missing_assumed_empty = [] num_context_dense = len(context_dense_keys) num_feature_list_dense = len(feature_list_dense_keys) num_context_sparse = len(context_sparse_keys) num_feature_list_sparse = len(feature_list_sparse_keys) if len(context_dense_shapes) != num_context_dense: raise ValueError( "len(context_dense_shapes) != len(context_dense_keys): %d vs. %d" % (len(context_dense_shapes), num_context_dense)) if len(context_dense_types) != num_context_dense: raise ValueError( "len(context_dense_types) != len(num_context_dense): %d vs. %d" % (len(context_dense_types), num_context_dense)) if len(feature_list_dense_shapes) != num_feature_list_dense: raise ValueError( "len(feature_list_dense_shapes) != len(feature_list_dense_keys): " "%d vs. %d" % (len(feature_list_dense_shapes), num_feature_list_dense)) if len(feature_list_dense_types) != num_feature_list_dense: raise ValueError( "len(feature_list_dense_types) != len(num_feature_list_dense):" "%d vs. %d" % (len(feature_list_dense_types), num_feature_list_dense)) if len(context_sparse_types) != num_context_sparse: raise ValueError( "len(context_sparse_types) != len(context_sparse_keys): %d vs. %d" % (len(context_sparse_types), num_context_sparse)) if len(feature_list_sparse_types) != num_feature_list_sparse: raise ValueError( "len(feature_list_sparse_types) != len(feature_list_sparse_keys): " "%d vs. %d" % (len(feature_list_sparse_types), num_feature_list_sparse)) if (num_context_dense + num_context_sparse + num_feature_list_dense + num_feature_list_sparse) == 0: raise ValueError( "Must provide at least one context_sparse key, context_dense key, " ", feature_list_sparse key, or feature_list_dense key") if not set(context_dense_keys).isdisjoint(set(context_sparse_keys)): raise ValueError( "context_dense and context_sparse keys must not intersect; " "intersection: %s" % set(context_dense_keys).intersection(set(context_sparse_keys))) if not set(feature_list_dense_keys).isdisjoint( set(feature_list_sparse_keys)): raise ValueError( "feature_list_dense and feature_list_sparse keys must not intersect; " "intersection: %s" % set(feature_list_dense_keys).intersection( set(feature_list_sparse_keys))) if not isinstance(feature_list_dense_defaults, dict): raise TypeError("feature_list_dense_defaults must be a dict") for k, v in feature_list_dense_defaults.items(): if v is not None: raise ValueError("Value feature_list_dense_defaults[%s] must be None" % k) feature_list_dense_missing_assumed_empty.append(k) context_dense_defaults_vec = [] for i, key in enumerate(context_dense_keys): default_value = context_dense_defaults.get(key) if default_value is None: default_value = constant_op.constant([], dtype=context_dense_types[i]) elif not isinstance(default_value, ops.Tensor): key_name = "key_" + re.sub("[^A-Za-z0-9_.\\-/]", "_", key) default_value = ops.convert_to_tensor( default_value, dtype=context_dense_types[i], name=key_name) default_value = array_ops.reshape( default_value, context_dense_shapes[i]) context_dense_defaults_vec.append(default_value) context_dense_shapes = [tensor_shape.as_shape(shape).as_proto() for shape in context_dense_shapes] feature_list_dense_shapes = [tensor_shape.as_shape(shape).as_proto() for shape in feature_list_dense_shapes] # pylint: disable=protected-access outputs = gen_parsing_ops._parse_single_sequence_example( serialized=serialized, debug_name=debug_name, context_dense_defaults=context_dense_defaults_vec, context_sparse_keys=context_sparse_keys, context_sparse_types=context_sparse_types, context_dense_keys=context_dense_keys, context_dense_shapes=context_dense_shapes, feature_list_sparse_keys=feature_list_sparse_keys, feature_list_sparse_types=feature_list_sparse_types, feature_list_dense_keys=feature_list_dense_keys, feature_list_dense_types=feature_list_dense_types, feature_list_dense_shapes=feature_list_dense_shapes, feature_list_dense_missing_assumed_empty=( feature_list_dense_missing_assumed_empty), name=name) # pylint: enable=protected-access (context_sparse_indices, context_sparse_values, context_sparse_shapes, context_dense_values, feature_list_sparse_indices, feature_list_sparse_values, feature_list_sparse_shapes, feature_list_dense_values) = outputs context_sparse_tensors = [ sparse_tensor.SparseTensor(ix, val, shape) for (ix, val, shape) in zip(context_sparse_indices, context_sparse_values, context_sparse_shapes)] feature_list_sparse_tensors = [ sparse_tensor.SparseTensor(ix, val, shape) for (ix, val, shape) in zip(feature_list_sparse_indices, feature_list_sparse_values, feature_list_sparse_shapes)] context_output = dict( zip(context_sparse_keys + context_dense_keys, context_sparse_tensors + context_dense_values)) feature_list_output = dict( zip(feature_list_sparse_keys + feature_list_dense_keys, feature_list_sparse_tensors + feature_list_dense_values)) return (context_output, feature_list_output)
apache-2.0
-5,405,936,189,678,612,000
41.634948
119
0.653674
false
abonil91/ncanda-data-integration
scripts/redcap/scoring/ctq/__init__.py
1
3092
#!/usr/bin/env python ## ## Copyright 2016 SRI International ## See COPYING file distributed along with the package for the copyright and license terms. ## import pandas import Rwrapper # # Variables from surveys needed for CTQ # # LimeSurvey field names lime_fields = [ "ctq_set1 [ctq1]", "ctq_set1 [ctq2]", "ctq_set1 [ctq3]", "ctq_set1 [ctq4]", "ctq_set1 [ctq5]", "ctq_set1 [ctq6]", "ctq_set1 [ctq7]", "ctq_set2 [ctq8]", "ctq_set2 [ctq9]", "ctq_set2 [ct10]", "ctq_set2 [ct11]", "ctq_set2 [ct12]", "ctq_set2 [ct13]", "ctq_set2 [ct14]", "ctq_set3 [ctq15]", "ctq_set3 [ctq16]", "ctq_set3 [ctq17]", "ctq_set3 [ctq18]", "ctq_set3 [ctq19]", "ctq_set3 [ctq20]", "ctq_set3 [ctq21]", "ctq_set4 [ctq22]", "ctq_set4 [ctq23]", "ctq_set4 [ctq24]", "ctq_set4 [ctq25]", "ctq_set4 [ctq26]", "ctq_set4 [ctq27]", "ctq_set4 [ctq28]" ] # Dictionary to recover LimeSurvey field names from REDCap names rc2lime = dict() for field in lime_fields: rc2lime[Rwrapper.label_to_sri( 'youthreport2', field )] = field # REDCap fields names input_fields = { 'mrireport' : [ 'youth_report_2_complete', 'youthreport2_missing' ] + rc2lime.keys() } # # This determines the name of the form in REDCap where the results are posted. # output_form = 'clinical' # # CTQ field names mapping from R to REDCap # R2rc = { 'Emotional Abuse Scale Total Score' : 'ctq_ea', 'Physical Abuse Scale Total Score' : 'ctq_pa', 'Sexual Abuse Scale Total Score' : 'ctq_sa', 'Emotional Neglect Scale Total Score' : 'ctq_en', 'Physical Neglect Scale Total Score' : 'ctq_pn', 'Minimization/Denial Scale Total Score' : 'ctq_minds' } # # Scoring function - take requested data (as requested by "input_fields") for each (subject,event), and demographics (date of birth, gender) for each subject. # def compute_scores( data, demographics ): # Get rid of all records that don't have YR2 data.dropna( axis=1, subset=['youth_report_2_complete'] ) data = data[ data['youth_report_2_complete'] > 0 ] data = data[ ~(data['youthreport2_missing'] > 0) ] # If no records to score, return empty DF if len( data ) == 0: return pandas.DataFrame() # Replace all column labels with the original LimeSurvey names data.columns = Rwrapper.map_labels( data.columns, rc2lime ) # Call the scoring function for all table rows scores = data.apply( Rwrapper.runscript, axis=1, Rscript='ctq/CTQ.R', scores_key='CTQ.ary' ) # Replace all score columns with REDCap field names scores.columns = Rwrapper.map_labels( scores.columns, R2rc ) # Simply copy completion status from the input surveys scores['ctq_complete'] = data['youth_report_2_complete'].map( int ) # Make a proper multi-index for the scores table scores.index = pandas.MultiIndex.from_tuples(scores.index) scores.index.names = ['study_id', 'redcap_event_name'] # Return the computed scores - this is what will be imported back into REDCap outfield_list = [ 'ctq_complete' ] + R2rc.values() return scores[ outfield_list ]
bsd-3-clause
6,952,695,894,297,899,000
39.684211
224
0.663648
false
Simran-B/arangodb
3rdParty/V8-4.3.61/third_party/python_26/Lib/site-packages/win32/Demos/security/account_rights.py
34
1472
import win32security,win32file,win32api,ntsecuritycon,win32con from security_enums import TRUSTEE_TYPE,TRUSTEE_FORM,ACE_FLAGS,ACCESS_MODE new_privs = ((win32security.LookupPrivilegeValue('',ntsecuritycon.SE_SECURITY_NAME),win32con.SE_PRIVILEGE_ENABLED), (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_CREATE_PERMANENT_NAME),win32con.SE_PRIVILEGE_ENABLED), (win32security.LookupPrivilegeValue('','SeEnableDelegationPrivilege'),win32con.SE_PRIVILEGE_ENABLED) ##doesn't seem to be in ntsecuritycon.py ? ) ph = win32api.GetCurrentProcess() th = win32security.OpenProcessToken(ph,win32security.TOKEN_ALL_ACCESS) ##win32con.TOKEN_ADJUST_PRIVILEGES) win32security.AdjustTokenPrivileges(th,0,new_privs) policy_handle = win32security.GetPolicyHandle('',win32security.POLICY_ALL_ACCESS) tmp_sid = win32security.LookupAccountName('','tmp')[0] privs=[ntsecuritycon.SE_DEBUG_NAME,ntsecuritycon.SE_TCB_NAME,ntsecuritycon.SE_RESTORE_NAME,ntsecuritycon.SE_REMOTE_SHUTDOWN_NAME] win32security.LsaAddAccountRights(policy_handle,tmp_sid,privs) privlist=win32security.LsaEnumerateAccountRights(policy_handle,tmp_sid) for priv in privlist: print priv privs=[ntsecuritycon.SE_DEBUG_NAME,ntsecuritycon.SE_TCB_NAME] win32security.LsaRemoveAccountRights(policy_handle,tmp_sid,0,privs) privlist=win32security.LsaEnumerateAccountRights(policy_handle,tmp_sid) for priv in privlist: print priv win32security.LsaClose(policy_handle)
apache-2.0
-8,382,494,482,341,797,000
46.483871
156
0.80231
false
deklungel/iRulez
src/dimmer/mqtt_sender.py
1
1164
import src.irulez.util as util import src.irulez.topic_factory as topic_factory import src.irulez.log as log import paho.mqtt.client as mqtt import uuid logger = log.get_logger('dimmer_mqtt_sender') class MqttSender: def __init__(self, client: mqtt.Client): self.__client = client def publish_dimming_action_to_timer(self, dimming_action_id: uuid.UUID, delay: int): publish_topic = topic_factory.create_timer_dimmer_timer_fired_topic() topic_name = topic_factory.create_timer_dimmer_timer_fired_response_topic() payload = util.serialize_json({ 'topic': topic_name, 'payload': str(dimming_action_id), 'delay': delay }) logger.debug(f"Publishing: {publish_topic}{payload}") self.__client.publish(publish_topic, payload, 0, False) def publish_dimming_action_to_arduino(self, arduino_name: str, pin_number: int, dim_value: int): publish_topic = topic_factory.create_arduino_dim_action_topic(arduino_name, pin_number) logger.debug(f"Publishing: {publish_topic} / {dim_value}") self.__client.publish(publish_topic, dim_value, 0, False)
mit
-1,275,875,996,077,186,000
39.137931
100
0.67354
false
dandan94/OpenGLTest
finalOpenGL/HelloGLFW/lib/boost_1_59_0/tools/build/test/clean.py
44
3076
#!/usr/bin/python # Copyright (C) Vladimir Prus 2006. # Distributed under the Boost Software License, Version 1.0. (See # accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) import BoostBuild t = BoostBuild.Tester(use_test_config=False) t.write("a.cpp", "int main() {}\n") t.write("jamroot.jam", "exe a : a.cpp sub1//sub1 sub2//sub2 sub3//sub3 ;") t.write("sub1/jamfile.jam", """\ lib sub1 : sub1.cpp sub1_2 ../sub2//sub2 ; lib sub1_2 : sub1_2.cpp ; """) t.write("sub1/sub1.cpp", """\ #ifdef _WIN32 __declspec(dllexport) #endif void sub1() {} """) t.write("sub1/sub1_2.cpp", """\ #ifdef _WIN32 __declspec(dllexport) #endif void sub1() {} """) t.write("sub2/jamfile.jam", "lib sub2 : sub2.cpp ;") t.write("sub2/sub2.cpp", """\ #ifdef _WIN32 __declspec(dllexport) #endif void sub2() {} """) t.write("sub3/jamroot.jam", "lib sub3 : sub3.cpp ;") t.write("sub3/sub3.cpp", """\ #ifdef _WIN32 __declspec(dllexport) #endif void sub3() {} """) # 'clean' should not remove files under separate jamroot.jam. t.run_build_system() t.run_build_system(["--clean"]) t.expect_removal("bin/$toolset/debug/a.obj") t.expect_removal("sub1/bin/$toolset/debug/sub1.obj") t.expect_removal("sub1/bin/$toolset/debug/sub1_2.obj") t.expect_removal("sub2/bin/$toolset/debug/sub2.obj") t.expect_nothing("sub3/bin/$toolset/debug/sub3.obj") # 'clean-all' removes everything it can reach. t.run_build_system() t.run_build_system(["--clean-all"]) t.expect_removal("bin/$toolset/debug/a.obj") t.expect_removal("sub1/bin/$toolset/debug/sub1.obj") t.expect_removal("sub1/bin/$toolset/debug/sub1_2.obj") t.expect_removal("sub2/bin/$toolset/debug/sub2.obj") t.expect_nothing("sub3/bin/$toolset/debug/sub3.obj") # 'clean' together with project target removes only under that project. t.run_build_system() t.run_build_system(["sub1", "--clean"]) t.expect_nothing("bin/$toolset/debug/a.obj") t.expect_removal("sub1/bin/$toolset/debug/sub1.obj") t.expect_removal("sub1/bin/$toolset/debug/sub1_2.obj") t.expect_nothing("sub2/bin/$toolset/debug/sub2.obj") t.expect_nothing("sub3/bin/$toolset/debug/sub3.obj") # 'clean-all' removes everything. t.run_build_system() t.run_build_system(["sub1", "--clean-all"]) t.expect_nothing("bin/$toolset/debug/a.obj") t.expect_removal("sub1/bin/$toolset/debug/sub1.obj") t.expect_removal("sub1/bin/$toolset/debug/sub1_2.obj") t.expect_removal("sub2/bin/$toolset/debug/sub2.obj") t.expect_nothing("sub3/bin/$toolset/debug/sub3.obj") # If main target is explicitly named, we should not remove files from other # targets. t.run_build_system() t.run_build_system(["sub1//sub1", "--clean"]) t.expect_removal("sub1/bin/$toolset/debug/sub1.obj") t.expect_nothing("sub1/bin/$toolset/debug/sub1_2.obj") t.expect_nothing("sub2/bin/$toolset/debug/sub2.obj") t.expect_nothing("sub3/bin/$toolset/debug/sub3.obj") # Regression test: sources of the 'cast' rule were mistakenly deleted. t.rm(".") t.write("jamroot.jam", """\ import cast ; cast a cpp : a.h ; """) t.write("a.h", "") t.run_build_system(["--clean"]) t.expect_nothing("a.h") t.cleanup()
gpl-3.0
2,652,525,735,450,800,600
28.576923
75
0.696034
false
master2be1/pychess
lib/pychess/widgets/preferencesDialog.py
20
23931
from __future__ import print_function import sys, os from os import listdir from os.path import isdir, isfile, splitext from xml.dom import minidom from gi.repository import Gtk, GdkPixbuf from gi.repository import Gdk from pychess.System.prefix import addDataPrefix, getDataPrefix from pychess.System.glock import glock_connect_after from pychess.System import conf, gstreamer, uistuff from pychess.System.uistuff import POSITION_GOLDEN from pychess.Players.engineNest import discoverer from pychess.Utils.const import * from pychess.Utils.IconLoader import load_icon from pychess.gfx import Pieces firstRun = True def run(widgets): global firstRun if firstRun: initialize(widgets) firstRun = False widgets["preferences"].show() widgets["preferences"].present() def initialize(widgets): GeneralTab(widgets) HintTab(widgets) SoundTab(widgets) PanelTab(widgets) ThemeTab(widgets) uistuff.keepWindowSize("preferencesdialog", widgets["preferences"], defaultPosition=POSITION_GOLDEN) def delete_event (widget, *args): widgets["preferences"].hide() return True widgets["preferences"].connect("delete-event", delete_event) widgets["preferences"].connect("key-press-event", lambda w,e: w.event(Gdk.Event(Gdk.EventType.DELETE)) if e.keyval == Gdk.KEY_Escape else None) ################################################################################ # General initing # ################################################################################ class GeneralTab: def __init__ (self, widgets): conf.set("firstName", conf.get("firstName", conf.username)) conf.set("secondName", conf.get("secondName", _("Guest"))) # Give to uistuff.keeper for key in ("firstName", "secondName", "showEmt", "showEval", "hideTabs", "faceToFace", "showCords", "showCaptured", "figuresInNotation", "fullAnimation", "moveAnimation", "noAnimation"): uistuff.keep(widgets[key], key) # Options on by default for key in ("autoRotate", "fullAnimation", "showBlunder"): uistuff.keep(widgets[key], key, first_value=True) ################################################################################ # Hint initing # ################################################################################ def anal_combo_get_value (combobox): engine = list(discoverer.getAnalyzers())[combobox.get_active()] return engine.get("md5") def anal_combo_set_value (combobox, value, show_arrow_check, ana_check, analyzer_type): engine = discoverer.getEngineByMd5(value) if engine is None: combobox.set_active(0) # This return saves us from the None-engine being used # in later code -Jonas Thiem return else: try: index = list(discoverer.getAnalyzers()).index(engine) except ValueError: index = 0 combobox.set_active(index) from pychess.Main import gameDic from pychess.widgets.gamewidget import widgets for gmwidg in gameDic.keys(): spectators = gmwidg.gamemodel.spectators md5 = engine.get('md5') if analyzer_type in spectators and \ spectators[analyzer_type].md5 != md5: gmwidg.gamemodel.remove_analyzer(analyzer_type) gmwidg.gamemodel.start_analyzer(analyzer_type) if not widgets[show_arrow_check].get_active(): gmwidg.gamemodel.pause_analyzer(analyzer_type) class HintTab: def __init__ (self, widgets): self.widgets = widgets # Options on by default for key in ("opening_check", "endgame_check", "online_egtb_check", "analyzer_check", "inv_analyzer_check"): uistuff.keep(widgets[key], key, first_value=True) # Opening book default_path = os.path.join(addDataPrefix("pychess_book.bin")) path = conf.get("opening_file_entry", default_path) conf.set("opening_file_entry", path) book_chooser_dialog = Gtk.FileChooserDialog(_("Select book file"), None, Gtk.FileChooserAction.OPEN, (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_OPEN, Gtk.ResponseType.OK)) book_chooser_button = Gtk.FileChooserButton(book_chooser_dialog) filter = Gtk.FileFilter() filter.set_name(_("Opening books")) filter.add_pattern("*.bin") book_chooser_dialog.add_filter(filter) book_chooser_button.set_filename(path) self.widgets["bookChooserDock"].add(book_chooser_button) book_chooser_button.show() def select_new_book(button): new_book = book_chooser_dialog.get_filename() if new_book: conf.set("opening_file_entry", new_book) else: # restore the original book_chooser_dialog.set_filename(path) book_chooser_button.connect("file-set", select_new_book) def on_opening_check_toggled (check): widgets["opening_hbox"].set_sensitive(check.get_active()) widgets["opening_check"].connect_after("toggled", on_opening_check_toggled) # Endgame default_path = os.path.join(getDataPrefix()) egtb_path = conf.get("egtb_path", default_path) conf.set("egtb_path", egtb_path) egtb_chooser_dialog = Gtk.FileChooserDialog(_("Select Gaviota TB path"), None, Gtk.FileChooserAction.SELECT_FOLDER, (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_OPEN, Gtk.ResponseType.OK)) egtb_chooser_button = Gtk.FileChooserButton.new_with_dialog(egtb_chooser_dialog) egtb_chooser_button.set_current_folder(egtb_path) self.widgets["egtbChooserDock"].add(egtb_chooser_button) egtb_chooser_button.show() def select_egtb(button): new_directory = egtb_chooser_dialog.get_filename() if new_directory != egtb_path: conf.set("egtb_path", new_directory) egtb_chooser_button.connect("current-folder-changed", select_egtb) def on_endgame_check_toggled (check): widgets["endgame_hbox"].set_sensitive(check.get_active()) widgets["endgame_check"].connect_after("toggled", on_endgame_check_toggled) # Analyzing engines uistuff.createCombo(widgets["ana_combobox"]) uistuff.createCombo(widgets["inv_ana_combobox"]) from pychess.widgets import newGameDialog def update_analyzers_store(discoverer): data = [(item[0], item[1]) for item in newGameDialog.analyzerItems] uistuff.updateCombo(widgets["ana_combobox"], data) uistuff.updateCombo(widgets["inv_ana_combobox"], data) glock_connect_after(discoverer, "all_engines_discovered", update_analyzers_store) update_analyzers_store(discoverer) # Save, load and make analyze combos active conf.set("ana_combobox", conf.get("ana_combobox", 0)) conf.set("inv_ana_combobox", conf.get("inv_ana_combobox", 0)) def on_analyzer_check_toggled (check): widgets["analyzers_vbox"].set_sensitive(check.get_active()) from pychess.Main import gameDic if gameDic: if check.get_active(): for gmwidg in gameDic.keys(): gmwidg.gamemodel.restart_analyzer(HINT) if not widgets["hint_mode"].get_active(): gmwidg.gamemodel.pause_analyzer(HINT) else: for gmwidg in gameDic.keys(): gmwidg.gamemodel.remove_analyzer(HINT) widgets["analyzers_vbox"].set_sensitive( widgets["analyzer_check"].get_active()) widgets["analyzer_check"].connect_after("toggled", on_analyzer_check_toggled) def on_invanalyzer_check_toggled (check): widgets["inv_analyzers_vbox"].set_sensitive(check.get_active()) from pychess.Main import gameDic if gameDic: if check.get_active(): for gmwidg in gameDic.keys(): gmwidg.gamemodel.restart_analyzer(SPY) if not widgets["spy_mode"].get_active(): gmwidg.gamemodel.pause_analyzer(SPY) else: for gmwidg in gameDic.keys(): gmwidg.gamemodel.remove_analyzer(SPY) widgets["inv_analyzers_vbox"].set_sensitive( widgets["inv_analyzer_check"].get_active()) widgets["inv_analyzer_check"].connect_after("toggled", on_invanalyzer_check_toggled) # Give widgets to keeper uistuff.keep(widgets["ana_combobox"], "ana_combobox", anal_combo_get_value, lambda combobox, value: anal_combo_set_value(combobox, value, "hint_mode", "analyzer_check", HINT)) uistuff.keep(widgets["inv_ana_combobox"], "inv_ana_combobox", anal_combo_get_value, lambda combobox, value: anal_combo_set_value(combobox, value, "spy_mode", "inv_analyzer_check", SPY)) uistuff.keep(widgets["max_analysis_spin"], "max_analysis_spin", first_value=3) ################################################################################ # Sound initing # ################################################################################ # Setup default sounds for i in range(11): if not conf.hasKey("soundcombo%d" % i): conf.set("soundcombo%d" % i, SOUND_URI) if not conf.hasKey("sounduri0"): conf.set("sounduri0", "file://"+addDataPrefix("sounds/move1.ogg")) if not conf.hasKey("sounduri1"): conf.set("sounduri1", "file://"+addDataPrefix("sounds/check1.ogg")) if not conf.hasKey("sounduri2"): conf.set("sounduri2", "file://"+addDataPrefix("sounds/capture1.ogg")) if not conf.hasKey("sounduri3"): conf.set("sounduri3", "file://"+addDataPrefix("sounds/start1.ogg")) if not conf.hasKey("sounduri4"): conf.set("sounduri4", "file://"+addDataPrefix("sounds/win1.ogg")) if not conf.hasKey("sounduri5"): conf.set("sounduri5", "file://"+addDataPrefix("sounds/lose1.ogg")) if not conf.hasKey("sounduri6"): conf.set("sounduri6", "file://"+addDataPrefix("sounds/draw1.ogg")) if not conf.hasKey("sounduri7"): conf.set("sounduri7", "file://"+addDataPrefix("sounds/obs_mov.ogg")) if not conf.hasKey("sounduri8"): conf.set("sounduri8", "file://"+addDataPrefix("sounds/obs_end.ogg")) if not conf.hasKey("sounduri9"): conf.set("sounduri9", "file://"+addDataPrefix("sounds/alarm.ogg")) if not conf.hasKey("sounduri10"): conf.set("sounduri10", "file://"+addDataPrefix("sounds/invalid.ogg")) class SoundTab: SOUND_DIRS = (addDataPrefix("sounds"), "/usr/share/sounds", "/usr/local/share/sounds", os.environ["HOME"]) COUNT_OF_SOUNDS = 11 actionToKeyNo = { "aPlayerMoves": 0, "aPlayerChecks": 1, "aPlayerCaptures": 2, "gameIsSetup": 3, "gameIsWon": 4, "gameIsLost": 5, "gameIsDrawn": 6, "observedMoves": 7, "oberservedEnds": 8, "shortOnTime": 9, "invalidMove": 10, } _player = None @classmethod def getPlayer (cls): if not cls._player: cls._player = gstreamer.Player() return cls._player @classmethod def playAction (cls, action): if not conf.get("useSounds", True): return if isinstance(action, str): no = cls.actionToKeyNo[action] else: no = action typ = conf.get("soundcombo%d" % no, SOUND_MUTE) if typ == SOUND_BEEP: sys.stdout.write("\a") sys.stdout.flush() elif typ == SOUND_URI: uri = conf.get("sounduri%d" % no, "") if not os.path.isfile(uri[7:]): conf.set("soundcombo%d" % no, SOUND_MUTE) return cls.getPlayer().play(uri) def __init__ (self, widgets): # Init open dialog opendialog = Gtk.FileChooserDialog ( _("Open Sound File"), None, Gtk.FileChooserAction.OPEN, (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_OPEN, Gtk.ResponseType.ACCEPT)) for dir in self.SOUND_DIRS: if os.path.isdir(dir): opendialog.set_current_folder(dir) break soundfilter = Gtk.FileFilter() soundfilter.set_name(_("Sound files")) #soundfilter.add_custom(soundfilter.get_needed(), # lambda data: data[3] and data[3].startswith("audio/")) soundfilter.add_mime_type("audio/*") opendialog.add_filter(soundfilter) opendialog.set_filter(soundfilter) # Get combo icons icons = ((_("No sound"), "audio-volume-muted", "audio-volume-muted"), (_("Beep"), "stock_bell", "audio-x-generic"), (_("Select sound file..."), "gtk-open", "document-open")) items = [] for level, stock, altstock in icons: image = load_icon(16, stock, altstock) items += [(image, level)] audioIco = load_icon(16, "audio-x-generic") # Set-up combos def callback (combobox, index): if combobox.get_active() == SOUND_SELECT: if opendialog.run() == Gtk.ResponseType.ACCEPT: uri = opendialog.get_uri() model = combobox.get_model() conf.set("sounduri%d"%index, uri) label = os.path.split(uri)[1] if len(model) == 3: model.append([audioIco, label]) else: model.set(model.get_iter((3,)), 1, label) combobox.set_active(3) else: combobox.set_active(conf.get("soundcombo%d"%index,SOUND_MUTE)) opendialog.hide() for i in range(self.COUNT_OF_SOUNDS): combo = widgets["soundcombo%d"%i] uistuff.createCombo (combo, items) combo.set_active(0) combo.connect("changed", callback, i) label = widgets["soundlabel%d"%i] label.props.mnemonic_widget = combo uri = conf.get("sounduri%d"%i,"") if os.path.isfile(uri[7:]): model = combo.get_model() model.append([audioIco, os.path.split(uri)[1]]) combo.set_active(3) for i in range(self.COUNT_OF_SOUNDS): if conf.get("soundcombo%d"%i, SOUND_MUTE) == SOUND_URI and \ not os.path.isfile(conf.get("sounduri%d"%i,"")[7:]): conf.set("soundcombo%d"%i, SOUND_MUTE) uistuff.keep(widgets["soundcombo%d"%i], "soundcombo%d"%i) #widgets["soundcombo%d"%i].set_active(conf.get("soundcombo%d"%i, SOUND_MUTE)) # Init play button def playCallback (button, index): SoundTab.playAction(index) for i in range (self.COUNT_OF_SOUNDS): button = widgets["soundbutton%d"%i] button.connect("clicked", playCallback, i) # Init 'use sound" checkbutton def checkCallBack (*args): checkbox = widgets["useSounds"] widgets["frame23"].set_property("sensitive", checkbox.get_active()) conf.notify_add("useSounds", checkCallBack) widgets["useSounds"].set_active(True) uistuff.keep(widgets["useSounds"], "useSounds") checkCallBack() def soundError (player, gstmessage): widgets["useSounds"].set_sensitive(False) widgets["useSounds"].set_active(False) self.getPlayer().connect("error", soundError) uistuff.keep(widgets["alarm_spin"], "alarm_spin", first_value=15) ################################################################################ # Panel initing # ################################################################################ class PanelTab: def __init__ (self, widgets): # Put panels in trees self.widgets = widgets from pychess.widgets.gamewidget import sidePanels, dockLocation saved_panels = [] xmlOK = os.path.isfile(dockLocation) if xmlOK: doc = minidom.parse(dockLocation) for elem in doc.getElementsByTagName("panel"): saved_panels.append(elem.getAttribute("id")) store = Gtk.ListStore(bool, GdkPixbuf.Pixbuf, str, object) for panel in sidePanels: checked = True if not xmlOK else panel.__name__ in saved_panels panel_icon = GdkPixbuf.Pixbuf.new_from_file_at_size(panel.__icon__, 32, 32) text = "<b>%s</b>\n%s" % (panel.__title__, panel.__desc__) store.append((checked, panel_icon, text, panel)) self.tv = widgets["treeview1"] self.tv.set_model(store) self.widgets['panel_about_button'].connect('clicked', self.panel_about) self.widgets['panel_enable_button'].connect('toggled', self.panel_toggled) self.tv.get_selection().connect('changed', self.selection_changed) pixbuf = Gtk.CellRendererPixbuf() pixbuf.props.yalign = 0 pixbuf.props.ypad = 3 pixbuf.props.xpad = 3 self.tv.append_column(Gtk.TreeViewColumn("Icon", pixbuf, pixbuf=1, sensitive=0)) uistuff.appendAutowrapColumn(self.tv, "Name", markup=2, sensitive=0) widgets['notebook1'].connect("switch-page", self.__on_switch_page) widgets["preferences"].connect("show", self.__on_show_window) widgets["preferences"].connect("hide", self.__on_hide_window) def selection_changed(self, treeselection): store, iter = self.tv.get_selection().get_selected() self.widgets['panel_enable_button'].set_sensitive(bool(iter)) self.widgets['panel_about_button'].set_sensitive(bool(iter)) if iter: active = self.tv.get_model().get(iter, 0)[0] self.widgets['panel_enable_button'].set_active(active) def panel_about(self, button): store, iter = self.tv.get_selection().get_selected() assert iter # The button should only be clickable when we have a selection path = store.get_path(iter) panel = store[path][3] d = Gtk.MessageDialog (type=Gtk.MessageType.INFO, buttons=Gtk.ButtonsType.CLOSE) d.set_markup ("<big><b>%s</b></big>" % panel.__title__) text = panel.__about__ if hasattr(panel, '__about__') else _('Undescribed panel') d.format_secondary_text (text) d.run() d.hide() def panel_toggled(self, button): store, iter = self.tv.get_selection().get_selected() assert iter # The button should only be clickable when we have a selection path = store.get_path(iter) active = button.get_active() if store[path][0] == active: return store[path][0] = active self.__set_panel_active(store[path][3], active) def __set_panel_active(self, panel, active): name = panel.__name__ from pychess.widgets.gamewidget import notebooks, docks from pychess.widgets.pydock import EAST if active: leaf = notebooks["board"].get_parent().get_parent() leaf.dock(docks[name][1], EAST, docks[name][0], name) else: try: notebooks[name].get_parent().get_parent().undock(notebooks[name]) except AttributeError: # A new panel appeared in the panels directory leaf = notebooks["board"].get_parent().get_parent() leaf.dock(docks[name][1], EAST, docks[name][0], name) def showit(self): from pychess.widgets.gamewidget import showDesignGW showDesignGW() def hideit(self): from pychess.widgets.gamewidget import hideDesignGW hideDesignGW() def __on_switch_page(self, notebook, page, page_num): if notebook.get_nth_page(page_num) == self.widgets['sidepanels']: self.showit() else: self.hideit() def __on_show_window(self, widget): notebook = self.widgets['notebook1'] page_num = notebook.get_current_page() if notebook.get_nth_page(page_num) == self.widgets['sidepanels']: self.showit() def __on_hide_window(self, widget): self.hideit() class ThemeTab: def __init__ (self, widgets): self.themes = self.discover_themes() store = Gtk.ListStore(GdkPixbuf.Pixbuf, str) for theme in self.themes: pngfile = "%s/%s.png" % (addDataPrefix("pieces"), theme) if isfile(pngfile): pixbuf = GdkPixbuf.Pixbuf.new_from_file(pngfile) store.append((pixbuf, theme)) else: print("WARNING: No piece theme preview icons find. Run create_theme_preview.sh !") break iconView = widgets["pieceTheme"] iconView.set_model(store) iconView.set_pixbuf_column(0) iconView.set_text_column(1) ############################################# # Hack to fix spacing problem in iconview # http://stackoverflow.com/questions/14090094/what-causes-the-different-display-behaviour-for-a-gtkiconview-between-different def keep_size(crt, *args): crt.handler_block(crt_notify) crt.set_property('width', 40) crt.handler_unblock(crt_notify) crt, crp = iconView.get_cells() crt_notify = crt.connect('notify', keep_size) ############################################# def _get_active(iconview): model = iconview.get_model() selected = iconview.get_selected_items() if len(selected) == 0: return conf.get("pieceTheme", "Pychess") i = selected[0][0] theme = model[i][1] Pieces.set_piece_theme(theme) return theme def _set_active(iconview, value): try: index = self.themes.index(value) except ValueError: index = 0 iconview.select_path(Gtk.TreePath(index,)) uistuff.keep(widgets["pieceTheme"], "pieceTheme", _get_active, _set_active, "Pychess") def discover_themes(self): themes = ['Pychess'] pieces = addDataPrefix("pieces") themes += [d.capitalize() for d in listdir(pieces) if isdir(os.path.join(pieces,d)) and d != 'ttf'] ttf = addDataPrefix("pieces/ttf") themes += ["ttf-" + splitext(d)[0].capitalize() for d in listdir(ttf) if splitext(d)[1] == '.ttf'] themes.sort() return themes
gpl-3.0
-1,862,141,245,394,663,400
38.951586
133
0.551962
false
telefonicaid/fiware-IoTAgent-Cplusplus
third_party/mosquitto-1.4.4/test/lib/03-publish-c2b-qos2.py
7
3223
#!/usr/bin/env python # Test whether a client sends a correct PUBLISH to a topic with QoS 2. # The client should connect to port 1888 with keepalive=60, clean session set, # and client id publish-qos2-test # The test will send a CONNACK message to the client with rc=0. Upon receiving # the CONNACK the client should verify that rc==0. If not, it should exit with # return code=1. # On a successful CONNACK, the client should send a PUBLISH message with topic # "pub/qos2/test", payload "message" and QoS=2. # The test will not respond to the first PUBLISH message, so the client must # resend the PUBLISH message with dup=1. Note that to keep test durations low, a # message retry timeout of less than 10 seconds is required for this test. # On receiving the second PUBLISH message, the test will send the correct # PUBREC response. On receiving the correct PUBREC response, the client should # send a PUBREL message. # The test will not respond to the first PUBREL message, so the client must # resend the PUBREL message with dup=1. On receiving the second PUBREL message, # the test will send the correct PUBCOMP response. On receiving the correct # PUBCOMP response, the client should send a DISCONNECT message. import inspect import os import subprocess import socket import sys import time # From http://stackoverflow.com/questions/279237/python-import-a-module-from-a-folder cmd_subfolder = os.path.realpath(os.path.abspath(os.path.join(os.path.split(inspect.getfile( inspect.currentframe() ))[0],".."))) if cmd_subfolder not in sys.path: sys.path.insert(0, cmd_subfolder) import mosq_test rc = 1 keepalive = 60 connect_packet = mosq_test.gen_connect("publish-qos2-test", keepalive=keepalive) connack_packet = mosq_test.gen_connack(rc=0) disconnect_packet = mosq_test.gen_disconnect() mid = 1 publish_packet = mosq_test.gen_publish("pub/qos2/test", qos=2, mid=mid, payload="message") publish_dup_packet = mosq_test.gen_publish("pub/qos2/test", qos=2, mid=mid, payload="message", dup=True) pubrec_packet = mosq_test.gen_pubrec(mid) pubrel_packet = mosq_test.gen_pubrel(mid) pubcomp_packet = mosq_test.gen_pubcomp(mid) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.settimeout(10) sock.bind(('', 1888)) sock.listen(5) client_args = sys.argv[1:] env = dict(os.environ) env['LD_LIBRARY_PATH'] = '../../lib:../../lib/cpp' try: pp = env['PYTHONPATH'] except KeyError: pp = '' env['PYTHONPATH'] = '../../lib/python:'+pp client = mosq_test.start_client(filename=sys.argv[1].replace('/', '-'), cmd=client_args, env=env) try: (conn, address) = sock.accept() conn.settimeout(10) if mosq_test.expect_packet(conn, "connect", connect_packet): conn.send(connack_packet) if mosq_test.expect_packet(conn, "publish", publish_packet): conn.send(pubrec_packet) if mosq_test.expect_packet(conn, "pubrel", pubrel_packet): conn.send(pubcomp_packet) if mosq_test.expect_packet(conn, "disconnect", disconnect_packet): rc = 0 conn.close() finally: client.terminate() client.wait() sock.close() exit(rc)
agpl-3.0
8,244,254,197,598,557,000
35.213483
129
0.711449
false
alsrgv/tensorflow
tensorflow/python/data/kernel_tests/interleave_test.py
2
10138
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for `tf.data.Dataset.interleave()`.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import multiprocessing from absl.testing import parameterized import numpy as np from tensorflow.python.data.kernel_tests import test_base from tensorflow.python.data.ops import dataset_ops from tensorflow.python.framework import errors from tensorflow.python.framework import sparse_tensor from tensorflow.python.framework import test_util from tensorflow.python.ops import array_ops from tensorflow.python.ops import sparse_ops from tensorflow.python.platform import test def _interleave(lists, cycle_length, block_length): """Reference implementation of interleave used for testing. Args: lists: a list of lists to interleave cycle_length: the length of the interleave cycle block_length: the length of the interleave block Yields: Elements of `lists` interleaved in the order determined by `cycle_length` and `block_length`. """ num_open = 0 # `all_iterators` acts as a queue of iterators over each element of `lists`. all_iterators = [iter(l) for l in lists] # `open_iterators` are the iterators whose elements are currently being # interleaved. open_iterators = [] if cycle_length == dataset_ops.AUTOTUNE: cycle_length = multiprocessing.cpu_count() for i in range(cycle_length): if all_iterators: open_iterators.append(all_iterators.pop(0)) num_open += 1 else: open_iterators.append(None) while num_open or all_iterators: for i in range(cycle_length): if open_iterators[i] is None: if all_iterators: open_iterators[i] = all_iterators.pop(0) num_open += 1 else: continue for _ in range(block_length): try: yield next(open_iterators[i]) except StopIteration: open_iterators[i] = None num_open -= 1 break def _repeat(values, count): """Produces a list of lists suitable for testing interleave. Args: values: for each element `x` the result contains `[x] * x` count: determines how many times to repeat `[x] * x` in the result Returns: A list of lists of values suitable for testing interleave. """ return [[value] * value for value in np.tile(values, count)] @test_util.run_all_in_graph_and_eager_modes class InterleaveTest(test_base.DatasetTestBase, parameterized.TestCase): @parameterized.named_parameters( ("1", [4, 5, 6], 1, 1, [ 4, 4, 4, 4, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 4, 4, 4, 4, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6 ]), ("2", [4, 5, 6], 2, 1, [ 4, 5, 4, 5, 4, 5, 4, 5, 5, 6, 6, 4, 6, 4, 6, 4, 6, 4, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 6 ]), ("3", [4, 5, 6], 2, 3, [ 4, 4, 4, 5, 5, 5, 4, 5, 5, 6, 6, 6, 4, 4, 4, 6, 6, 6, 4, 5, 5, 5, 6, 6, 6, 5, 5, 6, 6, 6 ]), ("4", [4, 5, 6], 7, 2, [ 4, 4, 5, 5, 6, 6, 4, 4, 5, 5, 6, 6, 4, 4, 5, 5, 6, 6, 4, 4, 5, 5, 6, 6, 5, 6, 6, 5, 6, 6 ]), ("5", [4, 0, 6], 2, 1, [4, 4, 6, 4, 6, 4, 6, 6, 4, 6, 4, 6, 4, 4, 6, 6, 6, 6, 6, 6]), ) def testPythonImplementation(self, input_values, cycle_length, block_length, expected_elements): input_lists = _repeat(input_values, 2) for expected, produced in zip( expected_elements, _interleave(input_lists, cycle_length, block_length)): self.assertEqual(expected, produced) @parameterized.named_parameters( ("1", np.int64([4, 5, 6]), 1, 3, None), ("2", np.int64([4, 5, 6]), 1, 3, 1), ("3", np.int64([4, 5, 6]), 2, 1, None), ("4", np.int64([4, 5, 6]), 2, 1, 1), ("5", np.int64([4, 5, 6]), 2, 1, 2), ("6", np.int64([4, 5, 6]), 2, 3, None), ("7", np.int64([4, 5, 6]), 2, 3, 1), ("8", np.int64([4, 5, 6]), 2, 3, 2), ("9", np.int64([4, 5, 6]), 7, 2, None), ("10", np.int64([4, 5, 6]), 7, 2, 1), ("11", np.int64([4, 5, 6]), 7, 2, 3), ("12", np.int64([4, 5, 6]), 7, 2, 5), ("13", np.int64([4, 5, 6]), 7, 2, 7), ("14", np.int64([4, 5, 6]), dataset_ops.AUTOTUNE, 3, None), ("15", np.int64([4, 5, 6]), dataset_ops.AUTOTUNE, 3, 1), ("16", np.int64([]), 2, 3, None), ("17", np.int64([0, 0, 0]), 2, 3, None), ("18", np.int64([4, 0, 6]), 2, 3, None), ("19", np.int64([4, 0, 6]), 2, 3, 1), ("20", np.int64([4, 0, 6]), 2, 3, 2), ) def testInterleaveDataset(self, input_values, cycle_length, block_length, num_parallel_calls): count = 2 dataset = dataset_ops.Dataset.from_tensor_slices(input_values).repeat( count).interleave( lambda x: dataset_ops.Dataset.from_tensors(x).repeat(x), cycle_length, block_length, num_parallel_calls) expected_output = [ element for element in _interleave( _repeat(input_values, count), cycle_length, block_length) ] self.assertDatasetProduces(dataset, expected_output) @parameterized.named_parameters( ("1", np.float32([1., np.nan, 2., np.nan, 3.]), 1, 3, None), ("2", np.float32([1., np.nan, 2., np.nan, 3.]), 1, 3, 1), ("3", np.float32([1., np.nan, 2., np.nan, 3.]), 2, 1, None), ("4", np.float32([1., np.nan, 2., np.nan, 3.]), 2, 1, 1), ("5", np.float32([1., np.nan, 2., np.nan, 3.]), 2, 1, 2), ("6", np.float32([1., np.nan, 2., np.nan, 3.]), 2, 3, None), ("7", np.float32([1., np.nan, 2., np.nan, 3.]), 2, 3, 1), ("8", np.float32([1., np.nan, 2., np.nan, 3.]), 2, 3, 2), ("9", np.float32([1., np.nan, 2., np.nan, 3.]), 7, 2, None), ("10", np.float32([1., np.nan, 2., np.nan, 3.]), 7, 2, 1), ("11", np.float32([1., np.nan, 2., np.nan, 3.]), 7, 2, 3), ("12", np.float32([1., np.nan, 2., np.nan, 3.]), 7, 2, 5), ("13", np.float32([1., np.nan, 2., np.nan, 3.]), 7, 2, 7), ) def testInterleaveDatasetError(self, input_values, cycle_length, block_length, num_parallel_calls): dataset = dataset_ops.Dataset.from_tensor_slices(input_values).map( lambda x: array_ops.check_numerics(x, "message")).interleave( dataset_ops.Dataset.from_tensors, cycle_length, block_length, num_parallel_calls) get_next = self.getNext(dataset) for value in input_values: if np.isnan(value): with self.assertRaises(errors.InvalidArgumentError): self.evaluate(get_next()) else: self.assertEqual(value, self.evaluate(get_next())) with self.assertRaises(errors.OutOfRangeError): self.evaluate(get_next()) def testInterleaveSparse(self): def _map_fn(i): return sparse_tensor.SparseTensorValue( indices=[[0, 0], [1, 1]], values=(i * [1, -1]), dense_shape=[2, 2]) def _interleave_fn(x): return dataset_ops.Dataset.from_tensor_slices( sparse_ops.sparse_to_dense(x.indices, x.dense_shape, x.values)) dataset = dataset_ops.Dataset.range(10).map(_map_fn).interleave( _interleave_fn, cycle_length=1) get_next = self.getNext(dataset) for i in range(10): for j in range(2): expected = [i, 0] if j % 2 == 0 else [0, -i] self.assertAllEqual(expected, self.evaluate(get_next())) with self.assertRaises(errors.OutOfRangeError): self.evaluate(get_next()) with self.assertRaises(errors.OutOfRangeError): self.evaluate(get_next()) @parameterized.named_parameters( ("1", np.int64([4, 5, 6]), 1, 3, 1), ("2", np.int64([4, 5, 6]), 2, 1, 1), ("3", np.int64([4, 5, 6]), 2, 1, 2), ("4", np.int64([4, 5, 6]), 2, 3, 1), ("5", np.int64([4, 5, 6]), 2, 3, 2), ("6", np.int64([4, 5, 6]), 7, 2, 1), ("7", np.int64([4, 5, 6]), 7, 2, 3), ("8", np.int64([4, 5, 6]), 7, 2, 5), ("9", np.int64([4, 5, 6]), 7, 2, 7), ("10", np.int64([4, 5, 6]), dataset_ops.AUTOTUNE, 3, 1), ("11", np.int64([4, 0, 6]), 2, 3, 1), ("12", np.int64([4, 0, 6]), 2, 3, 2), ) def testSloppyInterleaveDataset(self, input_values, cycle_length, block_length, num_parallel_calls): count = 2 dataset = dataset_ops.Dataset.from_tensor_slices(input_values).repeat( count).interleave( lambda x: dataset_ops.Dataset.from_tensors(x).repeat(x), cycle_length, block_length, num_parallel_calls) options = dataset_ops.Options() options.experimental_deterministic = False dataset = dataset.with_options(options) expected_output = [ element for element in _interleave( _repeat(input_values, count), cycle_length, block_length) ] get_next = self.getNext(dataset) actual_output = [] for _ in range(len(expected_output)): actual_output.append(self.evaluate(get_next())) self.assertAllEqual(expected_output.sort(), actual_output.sort()) def testInterleaveMap(self): dataset = dataset_ops.Dataset.range(100) def interleave_fn(x): dataset = dataset_ops.Dataset.from_tensors(x) return dataset.map(lambda x: x + x) dataset = dataset.interleave(interleave_fn, cycle_length=5) dataset = dataset.interleave(interleave_fn, cycle_length=5) self.assertDatasetProduces(dataset, [4 * x for x in range(100)]) if __name__ == "__main__": test.main()
apache-2.0
1,540,068,868,335,517,000
37.547529
80
0.574078
false
Neurita/boyle
scripts/compare_id_sets.py
1
16843
#!/usr/bin/env python #TODO #Transform this into a CLI import re import os import logging import collections import numpy as np from tabulate import tabulate from dcm_anonymize import get_all_patient_mri_ids #logging config logging.basicConfig(level=logging.DEBUG, filename='idset_comparator.log', format="%(asctime)-15s %(message)s") log = logging.getLogger('idset_comparing') class idset(np.ndarray): """ Array of identifiers. ... Attributes ---------- name: string Name of the set Methods ------- get_repetitions() print_counts() print_unique_nums() Prints the total number of IDs and the number of unique IDs """ #@classmethod def __new__(cls, input_array, name=None): # Input array is an already formed ndarray instance # We first cast to be our class type obj = np.asarray(input_array).view(cls) # add the new attribute to the created instance obj.name = name # Finally, we must return the newly created object: return obj def __array_finalize__(self, obj): # see idset.__array_finalize__ for comments if obj is None: return self.name = getattr(obj, 'name', None) def get_repetitions(self): return [i for i in np.unique(self) if i is not None and np.sum(self == i) > 1] def print_unique_nums(self): table = [] table.append(['Number of {0}: '.format(self.name), len(self)]) table.append(['Unique: ', len(np.unique(self))]) print(tabulate(table)) def print_counts(self): reps = self.get_repetitions() table = [[self.name, 'Repetitions']] for rep in reps: table.append([rep, np.sum(self == rep)]) #print('{0} {1} appears {2} times'.format(self.name, rep, # np.sum(self == rep))) print(tabulate(table, headers='firstrow')) def self_test(self): print('======================================================') print('Checking {0} values:'.format(self.name)) self.print_unique_nums() print('\n') self.print_counts() class idset_with_reference(idset): """ Array of identifiers with a list of references for each ID. The list of references and IDs must have the same number, we assume they are in the same order. ... Attributes ---------- name: string Name of the set reflst: list or ndarray List of references for each ID refname: string Name of the reference Methods ------- print_repeated_references() """ #@classmethod def __new__(cls, input_array, name=None, reflst=None, refname=None): # Input array is an already formed ndarray instance # We first cast to be our class type obj = idset.__new__(cls, input_array, name) obj.reflst = reflst obj.refname = refname # Finally, we must return the newly created object: return obj def __array_finalize__(self, obj): # see idset.__array_finalize__ for comments if obj is None: return super(idset_with_reference, self).__array_finalize__(obj) self.reflst = getattr(obj, 'reflst', None) self.refname = getattr(obj, 'refname', None) def print_repeatedid_references(self): reps = self.get_repetitions() refs = np.array(self.reflst) for rep in reps: if rep is None: continue try: if rep == np.array(None).astype(self.dtype): continue except: pass table = [] for name in refs[np.where(self == rep)]: # print('{0} {1} corresponds to {2} {3}'.format(self.name.capitalize(), rep, # self.refname.lower(), name)) table11 = '' table12 = name if not table: table11 = '{0} {1} corresponds to {2}'.format(self.name, rep, self.refname) table.append([table11, table12]) print(tabulate(table)) def get_noneid_references(self): """ Returns ------- ndarray Array of references in self.reflst whose self id is None. """ #return [self.reflst[idx] for idx, idval in enumerate(self) if idval is None] try: nun = np.array(None).astype(self.dtype) return np.array(self.reflst)[self == nun] except: nun = None return np.array(self.reflst)[self is None] def print_noneid_references(self): nune_refs = self.get_noneid_references() table = [['Has {0} as None'.format(self.name), '']] for ref in nune_refs: table.append(['', ref]) #print('{0} {1} has {2} as None.'.format(self.refname, ref, # self.name)) if len(table) > 1: print(tabulate(table)) def self_test(self): super(idset_with_reference, self).self_test() print('\n') self.print_repeatedid_references() print('\n') self.print_noneid_references() class idset_comparator(collections.OrderedDict): """ """ def __init__(self): super(idset_comparator, self).__init__() @staticmethod def _get_elem(list, idx, default=None): elem = default try: elem = list[idx] except: pass return elem @staticmethod def _tabulate_2_lists(list1, list2): """ """ tablst = [] for idx in list(range(max(len(list1), len(list2)))): elem1 = idset_comparator._get_elem(list1, idx, '') elem2 = idset_comparator._get_elem(list2, idx, '') tablst.append([elem1, elem2]) return tablst @staticmethod def _tabulate_4_lists(list1, list2, list3, list4): """ """ tablst = [] for idx in list(range(max(len(list1), len(list2), len(list3), len(list4)))): elem1 = idset_comparator._get_elem(list1, idx, '') elem2 = idset_comparator._get_elem(list2, idx, '') elem3 = idset_comparator._get_elem(list3, idx, '') elem4 = idset_comparator._get_elem(list4, idx, '') tablst.append([elem1, elem2, elem3, elem4]) return tablst def _print_general_vs_table(self, idset1, idset2): """ :param idset1: :param idset2: """ ref1name = '' set1_hasref = isinstance(idset1, idset_with_reference) if set1_hasref: ref1arr = np.array(idset1.reflst) ref1name = idset1.refname ref2name = ref1name set2_hasref = isinstance(idset2, idset_with_reference) if set2_hasref: ref2arr = np.array(idset2.reflst) ref2name = idset2.refname else: ref2name = ref1name #First show a general table hdr11 = '{0} > {1}'.format(idset1.name, idset2.name) hdr12 = '{0} > {1} {2}'.format(idset1.name, idset2.name, ref2name) hdr13 = '{0} < {1}'.format(idset1.name, idset2.name) hdr14 = '{0} < {1} {2}'.format(idset1.name, idset2.name, ref1name) table = [[hdr11, hdr12, hdr13, hdr14]] set1 = set(idset1) set2 = set(idset2) row11 = list(set1 - set2) if set1_hasref: row12 = [ref1arr[np.where(idset1 == nom)][0] for nom in row11] else: row12 = ['Not found' for _ in row11] row13 = list(set2 - set1) if set2_hasref: row14 = [ref2arr[np.where(idset2 == nom)][0] for nom in row13] else: row14 = ['Not found' for _ in row13] tablst = self._tabulate_4_lists(row11, row12, row13, row14) table.extend(tablst) if len(table) > 1: print(tabulate(table, headers='firstrow')) print('\n') def _print_foreign_repetition_table(self, idset1, idset2): """ :param idset1: :param idset2: """ assert(isinstance(idset1, idset_with_reference)) assert(isinstance(idset2, idset)) reps = idset2.get_repetitions() if len(reps) < 1: return refs = np.array(idset1.reflst) table = [['{0} {1} values of repetitions in {2}'.format(idset1.name, idset1.refname, idset2.name), '']] for rep in reps: if np.any(idset1 == rep): matches = refs[np.where(idset1 == rep)] myrep = rep for m in matches: table.append([myrep, m]) myrep = '' print(tabulate(table, headers='firstrow')) print('\n') def print_compare_idsets(self, idset1_name, idset2_name): """ """ try: idset1 = self[idset1_name] idset2 = self[idset2_name] except KeyError as ke: log.error('Error compare_idsets: getting keys {0} and {1}'.format(idset1_name, idset2_name)) import sys, pdb pdb.post_mortem(sys.exc_info()[2]) raise assert(isinstance(idset1, idset)) assert(isinstance(idset2, idset)) hdr11 = '{0} > {1}'.format(idset1_name, idset) hdr12 = '{0} < {1}'.format(idset1_name, idset2_name) table = [[hdr11, hdr12]] set1 = set(idset1) set2 = set(idset2) row11 = list(set1 - set2) row12 = list(set2 - set1) tablst = self._tabulate_2_lists(row11, row12) table.extend(tablst) if len(table) > 1: print(tabulate(table, headers='firstrow')) print('\n') def print_compare_idsets_one_ref(self, idset1_name, idset2_name): """ idset1_name: string key of an idset_with_reference idset2_name: string key of an idset """ try: idset1 = self[idset1_name] idset2 = self[idset2_name] except KeyError as ke: log.error('Error compare_idsets: getting keys {0} and {1}'.format(idset1_name, idset2_name)) import sys, pdb pdb.post_mortem(sys.exc_info()[2]) raise assert(isinstance(idset1, idset_with_reference)) assert(isinstance(idset2, idset)) self._print_general_vs_table(idset1, idset2) self._print_foreign_repetition_table(idset1, idset2) def print_compare_idsets_two_refs(self, idset1_name, idset2_name): """ idset1_name: string key of an idset_with_reference idset2_name: string key of an idset """ try: idset1 = self[idset1_name] idset2 = self[idset2_name] except KeyError as ke: log.error('Error compare_idsets: getting keys {0} and {1}'.format(idset1_name, idset2_name)) import sys, pdb pdb.post_mortem(sys.exc_info()[2]) raise assert(isinstance(idset1, idset_with_reference)) assert(isinstance(idset2, idset_with_reference)) self._print_general_vs_table(idset1, idset2) self._print_foreign_repetition_table(idset1, idset2) self._print_foreign_repetition_table(idset2, idset1) def print_all_comparisons(self): """ """ keys = self.keys() for idx, set1 in enumerate(keys): for set2 in keys[idx+1:]: if set1 == set2: continue is_idset1 = isinstance(self[set1], idset) is_idset2 = isinstance(self[set2], idset) if not is_idset1 or not is_idset2: continue print('======================================================') print('{0} VS. {1}'.format(set1, set2)) print('======================================================') is_idset1 = isinstance(self[set1], idset) and not isinstance(self[set1], idset_with_reference) is_idset2 = isinstance(self[set2], idset) and not isinstance(self[set2], idset_with_reference) if is_idset1 and is_idset2: self.print_compare_idsets(set1, set2) is_refidset1 = isinstance(self[set1], idset_with_reference) is_refidset2 = isinstance(self[set2], idset_with_reference) if is_refidset1 and not is_refidset2: self.print_compare_idsets_one_ref(set1, set2) elif is_refidset1 and is_refidset2: self.print_compare_idsets_two_refs(set1, set2) print('======================================================') print('\n') if __name__ == '__main__': curdir = '/data/santiago/data' curdir = os.path.abspath(curdir) subjlst = os.listdir(curdir) subjlst.sort() #get IDs from folder names idregex = r'[N|P]?\d\d*-?\d?$' dirids = [re.search(idregex, i).group(0) for i in subjlst] #print DICOM ids found for each subject enhe = '\xd1' all_dicids = collections.OrderedDict((i, get_all_patient_mri_ids(os.path.join(curdir, i))) for i in subjlst) idtab = [[k.replace(enhe, 'N'), str(list(all_dicids[k]))] for k in all_dicids.keys()] print(tabulate(idtab, headers=['Folder Name', 'DICOM IDs'])) #get IDs from DICOM files, if there are more than one, #looks for the one that matches dirids[i] dicids = [] for idx, i in enumerate(subjlst): sids = all_dicids[i] if len(sids) == 1: sid = sids.pop() elif len(sids) > 1: for sid in sids: if sid == dirids[idx]: break else: sid = 'None' dicids.append(sid) csvids = ['N122054', '99100167', '99106030', '886783', '99109169', 'P99106025', '99106027', 'N100712', 'P99117222', 'N640001', '1560237', '697987', '99135286', 'N859000', 'N640000', 'N968000', 'N890000', '55826', 'P99143948', '1542604', '1325097', 'N110626', '472315', 'N092449', 'N328000', 'N115611', '1113611', '49612', '261676', '26513', '734615', '890405', '687685', '1328215', '15817', '472717', '1365535', '921735', '470663', '1084809', '470848', '1578187', 'N104950', '381249', '320712', '99130021', '99135285', '99130019', '1124286', 'N265000', '1740968', '99141533', '886153', '238394', 'N102326', 'N140000', 'N031000', '375000', 'N781000', 'N111637', 'N8590001', 'N750000', 'N406000', 'N875000', 'N718000', '1954086', 'N281000', '1942888', '1328924', '1756647', '241599', '1313224', 'N103159', '2136656', '1969022', '1549266', '99072462', '1983029', 'N437000', '661449', '24525', '683479', '248966', '1939978', '1547734', '35207', '1738856', '1164737', '671898', '1314109', '1770861', '99135289', 'N718000', 'N312000', '45706', '99105995', '99109170', '46689', '400742', 'N093000', '99117888', '99117812', '925987', '99119960', '482470', 'N156000', '707383', '1952854', '685906', '66334', 'N173253', 'N133209', '1554723', '2233682', '1956804', '19797', '936976', '1539140', '96049', '458039', '1364867', '1335553'] #remove a nasty spanish symbol enhe = '\xd1' subjlst = [s.replace(enhe, 'N') for s in subjlst] #Set of IDs from folder names diridset = idset_with_reference(dirids, name='Folder ID', reflst=subjlst, refname='Folder') #Set of IDs from DICOM data dicidset = idset_with_reference(dicids, name='DICOM ID', reflst=subjlst, refname='Folder') csvidset = idset(csvids, name='CSV ID') diridset.self_test() dicidset.self_test() csvidset.self_test() idcomp = idset_comparator() idcomp[diridset.name] = diridset idcomp[dicidset.name] = dicidset idcomp[csvidset.name] = csvidset idcomp.print_all_comparisons()
bsd-3-clause
274,210,726,395,340,400
32.286561
112
0.519801
false
foursquare/pants
contrib/node/src/python/pants/contrib/node/subsystems/package_managers.py
2
9205
# coding=utf-8 # Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import absolute_import, division, print_function, unicode_literals import logging from builtins import object from pants.contrib.node.subsystems.command import command_gen LOG = logging.getLogger(__name__) PACKAGE_MANAGER_NPM = 'npm' PACKAGE_MANAGER_YARNPKG = 'yarnpkg' PACKAGE_MANAGER_YARNPKG_ALIAS = 'yarn' VALID_PACKAGE_MANAGERS = [PACKAGE_MANAGER_NPM, PACKAGE_MANAGER_YARNPKG, PACKAGE_MANAGER_YARNPKG_ALIAS] # TODO: Change to enum type when migrated to Python 3.4+ class PackageInstallationTypeOption(object): PROD = 'prod' DEV = 'dev' PEER = 'peer' BUNDLE = 'bundle' OPTIONAL = 'optional' NO_SAVE = 'not saved' class PackageInstallationVersionOption(object): EXACT = 'exact' TILDE = 'tilde' class PackageManager(object): """Defines node package manager functionalities.""" def __init__(self, name, tool_installations): self.name = name self.tool_installations = tool_installations def _get_installation_args(self, install_optional, production_only, force, frozen_lockfile): """Returns command line args for installing package. :param install_optional: True to request install optional dependencies. :param production_only: True to only install production dependencies, i.e. ignore devDependencies. :param force: True to force re-download dependencies. :param frozen_lockfile: True to disallow automatic update of lock files. :rtype: list of strings """ raise NotImplementedError def _get_run_script_args(self): """Returns command line args to run a package.json script. :rtype: list of strings """ raise NotImplementedError def _get_add_package_args(self, package, type_option, version_option): """Returns command line args to add a node pacakge. :rtype: list of strings """ raise NotImplementedError() def run_command(self, args=None, node_paths=None): """Returns a command that when executed will run an arbitury command via package manager.""" return command_gen( self.tool_installations, self.name, args=args, node_paths=node_paths ) def install_module( self, install_optional=False, production_only=False, force=False, frozen_lockfile=True, node_paths=None): """Returns a command that when executed will install node package. :param install_optional: True to install optional dependencies. :param production_only: True to only install production dependencies, i.e. ignore devDependencies. :param force: True to force re-download dependencies. :param frozen_lockfile: True to disallow automatic update of lock files. :param node_paths: A list of path that should be included in $PATH when running installation. """ args=self._get_installation_args( install_optional=install_optional, production_only=production_only, force=force, frozen_lockfile=frozen_lockfile) return self.run_command(args=args, node_paths=node_paths) def run_script(self, script_name, script_args=None, node_paths=None): """Returns a command to execute a package.json script. :param script_name: Name of the script to name. Note that script name 'test' can be used to run node tests. :param script_args: Args to be passed to package.json script. :param node_paths: A list of path that should be included in $PATH when running the script. """ # TODO: consider add a pants.util function to manipulate command line. package_manager_args = self._get_run_script_args() package_manager_args.append(script_name) if script_args: package_manager_args.append('--') package_manager_args.extend(script_args) return self.run_command(args=package_manager_args, node_paths=node_paths) def add_package( self, package, node_paths=None, type_option=PackageInstallationTypeOption.PROD, version_option=None): """Returns a command that when executed will add a node package to current node module. :param package: string. A valid npm/yarn package description. The accepted forms are package-name, package-name@version, package-name@tag, file:/folder, file:/path/to.tgz https://url/to.tgz :param node_paths: A list of path that should be included in $PATH when running the script. :param type_option: A value from PackageInstallationTypeOption that indicates the type of package to be installed. Default to 'prod', which is a production dependency. :param version_option: A value from PackageInstallationVersionOption that indicates how to match version. Default to None, which uses package manager default. """ args=self._get_add_package_args( package, type_option=type_option, version_option=version_option) return self.run_command(args=args, node_paths=node_paths) def run_cli(self, cli, args=None, node_paths=None): """Returns a command that when executed will run an installed cli via package manager.""" cli_args = [cli] if args: cli_args.append('--') cli_args.extend(args) return self.run_command(args=cli_args, node_paths=node_paths) class PackageManagerYarnpkg(PackageManager): def __init__(self, tool_installation): super(PackageManagerYarnpkg, self).__init__(PACKAGE_MANAGER_YARNPKG, tool_installation) def _get_run_script_args(self): return ['run'] def _get_installation_args(self, install_optional, production_only, force, frozen_lockfile): return_args = ['--non-interactive'] if not install_optional: return_args.append('--ignore-optional') if production_only: return_args.append('--production=true') if force: return_args.append('--force') if frozen_lockfile: return_args.append('--frozen-lockfile') return return_args def _get_add_package_args(self, package, type_option, version_option): return_args = ['add', package] package_type_option = { PackageInstallationTypeOption.PROD: '', # Yarn save production is the default. PackageInstallationTypeOption.DEV: '--dev', PackageInstallationTypeOption.PEER: '--peer', PackageInstallationTypeOption.OPTIONAL: '--optional', PackageInstallationTypeOption.BUNDLE: None, PackageInstallationTypeOption.NO_SAVE: None, }.get(type_option) if package_type_option is None: LOG.warning('{} does not support {} packages, ignored.'.format(self.name, type_option)) elif package_type_option: # Skip over '' entries return_args.append(package_type_option) package_version_option = { PackageInstallationVersionOption.EXACT: '--exact', PackageInstallationVersionOption.TILDE: '--tilde', }.get(version_option) if package_version_option is None: LOG.warning( '{} does not support install with {} version, ignored'.format(self.name, version_option)) elif package_version_option: # Skip over '' entries return_args.append(package_version_option) return return_args class PackageManagerNpm(PackageManager): def __init__(self, tool_installation): super(PackageManagerNpm, self).__init__(PACKAGE_MANAGER_NPM, tool_installation) def _get_run_script_args(self): return ['run-script'] def _get_installation_args(self, install_optional, production_only, force, frozen_lockfile): return_args = ['install'] if not install_optional: return_args.append('--no-optional') if production_only: return_args.append('--production') if force: return_args.append('--force') if frozen_lockfile: LOG.warning('{} does not support frozen lockfile option. Ignored.'.format(self.name)) return return_args def _get_add_package_args(self, package, type_option, version_option): return_args = ['install', package] package_type_option = { PackageInstallationTypeOption.PROD: '--save-prod', PackageInstallationTypeOption.DEV: '--save-dev', PackageInstallationTypeOption.PEER: None, PackageInstallationTypeOption.OPTIONAL: '--save-optional', PackageInstallationTypeOption.BUNDLE: '--save-bundle', PackageInstallationTypeOption.NO_SAVE: '--no-save', }.get(type_option) if package_type_option is None: LOG.warning('{} does not support {} packages, ignored.'.format(self.name, type_option)) elif package_type_option: # Skip over '' entries return_args.append(package_type_option) package_version_option = { PackageInstallationVersionOption.EXACT: '--save-exact', PackageInstallationVersionOption.TILDE: None, }.get(version_option) if package_version_option is None: LOG.warning( '{} does not support install with {} version, ignored.'.format(self.name, version_option)) elif package_version_option: # Skip over '' entries return_args.append(package_version_option) return return_args def run_cli(self, cli, args=None, node_paths=None): raise RuntimeError('npm does not support run cli directly. Please use Yarn instead.')
apache-2.0
3,468,031,671,416,038,000
36.72541
102
0.705703
false
dictoon/blenderseed
docs/conf.py
2
8063
# -*- coding: utf-8 -*- # # Configuration file for the Sphinx documentation builder. # # This file does only contain a selection of the most common options. For a # full list see the documentation: # http://www.sphinx-doc.org/en/stable/config # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # import os import sys import sphinx_bootstrap_theme # sys.path.insert(0, os.path.abspath('.')) sys.path.insert(0, os.path.abspath('../..')) # -- Project information ----------------------------------------------------- project = 'blenderseed' copyright = '2010-2018, The appleseedhq Organization' author = 'The appleseedhq Organization' # The short X.Y version version = '2.0.0-beta' # The full version, including alpha/beta/rc tags release = '2.0.0-beta' # -- General configuration --------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = ['sphinx.ext.intersphinx'] intersphinx_mapping = {'appleseed_maya': ('http://appleseed.readthedocs.io/projects/appleseed-maya/en/latest/', None)} # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The master toctree document. master_doc = 'index' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = 'en' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path . exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'bootstrap' html_theme_path = sphinx_bootstrap_theme.get_html_theme_path() # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = { # Navigation bar title. (Default: ``project`` value) 'navbar_title': "blenderseed", # Tab name for entire site. (Default: "Site") 'navbar_site_name': "Site", # A list of tuples containing pages or urls to link to. # Valid tuples should be in the following forms: # (name, page) # a link to a page # (name, "/aa/bb", 1) # a link to an arbitrary relative url # (name, "http://example.com", True) # arbitrary absolute url # Note the "1" or "True" value above as the third argument to indicate # an arbitrary url. 'navbar_links': [ ("Features", "features"), ("Installation", "installation"), ("Reference", "reference"), ("About", "about"), ("Tutorials", "tutorials"), ("appleseedhq", "https://appleseedhq.net", True), # ("vimeo", "https://vimeo.com/appleseedhq", True) ], # Render the next and previous page links in navbar. (Default: true) 'navbar_sidebarrel': False, # Render the current pages TOC in the navbar. (Default: true) 'navbar_pagenav': False, # Tab name for the current pages TOC. (Default: "Page") # 'navbar_pagenav_name': "Page", # Global TOC depth for "site" navbar tab. (Default: 1) # Switching to -1 shows all levels. 'globaltoc_depth': -1, # Include hidden TOCs in Site navbar? # # Note: If this is "false", you cannot have mixed ``:hidden:`` and # non-hidden ``toctree`` directives in the same page, or else the build # will break. # # Values: "true" (default) or "false" 'globaltoc_includehidden': "false", # HTML navbar class (Default: "navbar") to attach to <div> element. # For black navbar, do "navbar navbar-inverse" # 'navbar_class': "navbar navbar-inverse", 'navbar_class': "navbar navbar", # Fix navigation bar to top of page? # Values: "true" (default) or "false" 'navbar_fixed_top': "true", # Location of link to source. # Options are "nav" (default), "footer" or anything else to exclude. 'source_link_position': "footer", # Bootswatch (http://bootswatch.com/) theme. # # Options are nothing (default) or the name of a valid theme # such as "amelia" or "cosmo". # 'bootswatch_theme': "lumen", # 'bootswatch_theme': "sandstone", # 'bootswatch_theme': "readable", 'bootswatch_theme': "yeti", # Choose Bootstrap version. # Values: "3" (default) or "2" (in quotes) 'bootstrap_version': "3", } def setup(app): app.add_stylesheet("css/blockquote_custom1.css") html_logo = "_static/appleseed-logo.png" # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. html_favicon = "_static/appleseed-favicon.ico" # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Custom sidebar templates, must be a dictionary that maps document names # to template names. # # The default sidebars (for documents that don't match any pattern) are # defined by theme itself. Builtin themes are using these templates by # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', # 'searchbox.html']``. # html_sidebars = { '**': ['localtoc.html', 'searchbox.html'], 'using/windows': ['windowssidebar.html', 'searchbox.html'], } # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. htmlhelp_basename = 'blenderseedManualdoc' # -- Options for LaTeX output ------------------------------------------------ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'blenderseed.tex', 'blenderseed Documentation', 'blenderseed Manual', 'manual'), ] # -- Options for manual page output ------------------------------------------ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'blenderseed', 'blenderseed Documentation', [author], 1) ] # -- Options for Texinfo output ---------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'blenderseed', 'blenderseed Documentation', author, 'blenderseed', 'appleseed plugin for Blender', 'Miscellaneous'), ]
mit
5,154,334,373,517,243,000
31.643725
118
0.644177
false
jimberlage/servo
components/script/dom/bindings/codegen/parser/tests/test_implements.py
264
5961
# Import the WebIDL module, so we can do isinstance checks and whatnot import WebIDL def WebIDLTest(parser, harness): # Basic functionality threw = False try: parser.parse(""" A implements B; interface B { attribute long x; }; interface A { attribute long y; }; """) results = parser.finish() except: threw = True harness.ok(not threw, "Should not have thrown on implements statement " "before interfaces") harness.check(len(results), 3, "We have three statements") harness.ok(isinstance(results[1], WebIDL.IDLInterface), "B is an interface") harness.check(len(results[1].members), 1, "B has one member") A = results[2] harness.ok(isinstance(A, WebIDL.IDLInterface), "A is an interface") harness.check(len(A.members), 2, "A has two members") harness.check(A.members[0].identifier.name, "y", "First member is 'y'") harness.check(A.members[1].identifier.name, "x", "Second member is 'x'") # Duplicated member names not allowed threw = False try: parser.parse(""" C implements D; interface D { attribute long x; }; interface C { attribute long x; }; """) parser.finish() except: threw = True harness.ok(threw, "Should have thrown on implemented interface duplicating " "a name on base interface") # Same, but duplicated across implemented interfaces threw = False try: parser.parse(""" E implements F; E implements G; interface F { attribute long x; }; interface G { attribute long x; }; interface E {}; """) parser.finish() except: threw = True harness.ok(threw, "Should have thrown on implemented interfaces " "duplicating each other's member names") # Same, but duplicated across indirectly implemented interfaces threw = False try: parser.parse(""" H implements I; H implements J; I implements K; interface K { attribute long x; }; interface L { attribute long x; }; interface I {}; interface J : L {}; interface H {}; """) parser.finish() except: threw = True harness.ok(threw, "Should have thrown on indirectly implemented interfaces " "duplicating each other's member names") # Same, but duplicated across an implemented interface and its parent threw = False try: parser.parse(""" M implements N; interface O { attribute long x; }; interface N : O { attribute long x; }; interface M {}; """) parser.finish() except: threw = True harness.ok(threw, "Should have thrown on implemented interface and its " "ancestor duplicating member names") # Reset the parser so we can actually find things where we expect # them in the list parser = parser.reset() # Diamonds should be allowed threw = False try: parser.parse(""" P implements Q; P implements R; Q implements S; R implements S; interface Q {}; interface R {}; interface S { attribute long x; }; interface P {}; """) results = parser.finish() except: threw = True harness.ok(not threw, "Diamond inheritance is fine") harness.check(results[6].identifier.name, "S", "We should be looking at 'S'") harness.check(len(results[6].members), 1, "S should have one member") harness.check(results[6].members[0].identifier.name, "x", "S's member should be 'x'") parser = parser.reset() threw = False try: parser.parse(""" interface TestInterface { }; callback interface TestCallbackInterface { }; TestInterface implements TestCallbackInterface; """) results = parser.finish() except: threw = True harness.ok(threw, "Should not allow callback interfaces on the right-hand side " "of 'implements'") parser = parser.reset() threw = False try: parser.parse(""" interface TestInterface { }; callback interface TestCallbackInterface { }; TestCallbackInterface implements TestInterface; """) results = parser.finish() except: threw = True harness.ok(threw, "Should not allow callback interfaces on the left-hand side of " "'implements'") parser = parser.reset() threw = False try: parser.parse(""" interface TestInterface { }; dictionary Dict { }; Dict implements TestInterface; """) results = parser.finish() except: threw = True harness.ok(threw, "Should not allow non-interfaces on the left-hand side " "of 'implements'") parser = parser.reset() threw = False try: parser.parse(""" interface TestInterface { }; dictionary Dict { }; TestInterface implements Dict; """) results = parser.finish() except: threw = True harness.ok(threw, "Should not allow non-interfaces on the right-hand side " "of 'implements'")
mpl-2.0
6,021,330,726,498,025,000
26.597222
81
0.522899
false
segwitcoin/SegwitCoin
contrib/linearize/linearize-hashes.py
27
4579
#!/usr/bin/env python3 # # linearize-hashes.py: List blocks in a linear, no-fork version of the chain. # # Copyright (c) 2013-2016 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # from __future__ import print_function try: # Python 3 import http.client as httplib except ImportError: # Python 2 import httplib import json import re import base64 import sys import os import os.path settings = {} ##### Switch endian-ness ##### def hex_switchEndian(s): """ Switches the endianness of a hex string (in pairs of hex chars) """ pairList = [s[i:i+2].encode() for i in range(0, len(s), 2)] return b''.join(pairList[::-1]).decode() class BitcoinRPC: def __init__(self, host, port, username, password): authpair = "%s:%s" % (username, password) authpair = authpair.encode('utf-8') self.authhdr = b"Basic " + base64.b64encode(authpair) self.conn = httplib.HTTPConnection(host, port=port, timeout=30) def execute(self, obj): try: self.conn.request('POST', '/', json.dumps(obj), { 'Authorization' : self.authhdr, 'Content-type' : 'application/json' }) except ConnectionRefusedError: print('RPC connection refused. Check RPC settings and the server status.', file=sys.stderr) return None resp = self.conn.getresponse() if resp is None: print("JSON-RPC: no response", file=sys.stderr) return None body = resp.read().decode('utf-8') resp_obj = json.loads(body) return resp_obj @staticmethod def build_request(idx, method, params): obj = { 'version' : '1.1', 'method' : method, 'id' : idx } if params is None: obj['params'] = [] else: obj['params'] = params return obj @staticmethod def response_is_error(resp_obj): return 'error' in resp_obj and resp_obj['error'] is not None def get_block_hashes(settings, max_blocks_per_call=10000): rpc = BitcoinRPC(settings['host'], settings['port'], settings['rpcuser'], settings['rpcpassword']) height = settings['min_height'] while height < settings['max_height']+1: num_blocks = min(settings['max_height']+1-height, max_blocks_per_call) batch = [] for x in range(num_blocks): batch.append(rpc.build_request(x, 'getblockhash', [height + x])) reply = rpc.execute(batch) if reply is None: print('Cannot continue. Program will halt.') return None for x,resp_obj in enumerate(reply): if rpc.response_is_error(resp_obj): print('JSON-RPC: error at height', height+x, ': ', resp_obj['error'], file=sys.stderr) exit(1) assert(resp_obj['id'] == x) # assume replies are in-sequence if settings['rev_hash_bytes'] == 'true': resp_obj['result'] = hex_switchEndian(resp_obj['result']) print(resp_obj['result']) height += num_blocks def get_rpc_cookie(): # Open the cookie file with open(os.path.join(os.path.expanduser(settings['datadir']), '.cookie'), 'r') as f: combined = f.readline() combined_split = combined.split(":") settings['rpcuser'] = combined_split[0] settings['rpcpassword'] = combined_split[1] if __name__ == '__main__': if len(sys.argv) != 2: print("Usage: linearize-hashes.py CONFIG-FILE") sys.exit(1) f = open(sys.argv[1]) for line in f: # skip comment lines m = re.search('^\s*#', line) if m: continue # parse key=value lines m = re.search('^(\w+)\s*=\s*(\S.*)$', line) if m is None: continue settings[m.group(1)] = m.group(2) f.close() if 'host' not in settings: settings['host'] = '127.0.0.1' if 'port' not in settings: settings['port'] = 8332 if 'min_height' not in settings: settings['min_height'] = 0 if 'max_height' not in settings: settings['max_height'] = 313000 if 'rev_hash_bytes' not in settings: settings['rev_hash_bytes'] = 'false' use_userpass = True use_datadir = False if 'rpcuser' not in settings or 'rpcpassword' not in settings: use_userpass = False if 'datadir' in settings and not use_userpass: use_datadir = True if not use_userpass and not use_datadir: print("Missing datadir or username and/or password in cfg file", file=stderr) sys.exit(1) settings['port'] = int(settings['port']) settings['min_height'] = int(settings['min_height']) settings['max_height'] = int(settings['max_height']) # Force hash byte format setting to be lowercase to make comparisons easier. settings['rev_hash_bytes'] = settings['rev_hash_bytes'].lower() # Get the rpc user and pass from the cookie if the datadir is set if use_datadir: get_rpc_cookie() get_block_hashes(settings)
mit
2,729,803,349,214,221,000
28.165605
90
0.672199
false
triveous/LearnFlask
flask/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.py
341
1877
# -*- coding: utf-8 -*- """ requests.exceptions ~~~~~~~~~~~~~~~~~~~ This module contains the set of Requests' exceptions. """ from .packages.urllib3.exceptions import HTTPError as BaseHTTPError class RequestException(IOError): """There was an ambiguous exception that occurred while handling your request.""" def __init__(self, *args, **kwargs): """ Initialize RequestException with `request` and `response` objects. """ response = kwargs.pop('response', None) self.response = response self.request = kwargs.pop('request', None) if (response is not None and not self.request and hasattr(response, 'request')): self.request = self.response.request super(RequestException, self).__init__(*args, **kwargs) class HTTPError(RequestException): """An HTTP error occurred.""" class ConnectionError(RequestException): """A Connection error occurred.""" class ProxyError(ConnectionError): """A proxy error occurred.""" class SSLError(ConnectionError): """An SSL error occurred.""" class Timeout(RequestException): """The request timed out.""" class URLRequired(RequestException): """A valid URL is required to make a request.""" class TooManyRedirects(RequestException): """Too many redirects.""" class MissingSchema(RequestException, ValueError): """The URL schema (e.g. http or https) is missing.""" class InvalidSchema(RequestException, ValueError): """See defaults.py for valid schemas.""" class InvalidURL(RequestException, ValueError): """ The URL provided was somehow invalid. """ class ChunkedEncodingError(RequestException): """The server declared chunked encoding but sent an invalid chunk.""" class ContentDecodingError(RequestException, BaseHTTPError): """Failed to decode response content"""
apache-2.0
2,154,816,855,264,491,500
24.026667
74
0.679808
false
perryjrandall/arsenalsuite
cpp/apps/bach/web/bach/models/keyword.py
10
1381
# # Copyright (c) 2009 Dr. D Studios. (Please refer to license for details) # SVN_META_HEADURL = "$HeadURL: $" # SVN_META_ID = "$Id: keyword.py 9408 2010-03-03 22:35:49Z brobison $" # from sqlalchemy import Column, Table, types, ForeignKey, Index from sqlalchemy.orm import relation, backref from ..config import mapper, metadata from .asset import Asset class Keyword( object ): def __init__( self ): self.keybachkeyword = None self.name = None @property def asset_count(self): return 0 #len(self.assets) def __repr__( self ): return '<%s:%s:%s>' % ( self.__class__.__name__, self.keybachkeyword, self.name ) table = Table( 'bachkeyword', metadata, Column( 'keybachkeyword', types.Integer, primary_key=True ), Column( 'name', types.String, nullable=False ) ) join_table = Table( 'bachkeywordmap', metadata, Column( 'fkeybachkeyword', types.Integer, ForeignKey( 'bachkeyword.keybachkeyword' ) ), Column( 'fkeybachasset', types.Integer, ForeignKey( 'bachasset.keybachasset' ) ) ) mapper( Keyword, table, properties={ 'assets':relation( Asset, secondary=join_table, # backref='buckets' ), } )
gpl-2.0
-8,533,436,228,025,473,000
33.525
107
0.566256
false
liu602348184/django
tests/migrations/test_writer.py
65
22965
# -*- coding: utf-8 -*- from __future__ import unicode_literals import datetime import math import os import re import tokenize import unittest import custom_migration_operations.more_operations import custom_migration_operations.operations from django.conf import settings from django.core.validators import EmailValidator, RegexValidator from django.db import migrations, models from django.db.migrations.writer import ( MigrationWriter, OperationWriter, SettingsReference, ) from django.test import SimpleTestCase, ignore_warnings from django.utils import datetime_safe, six from django.utils._os import upath from django.utils.deconstruct import deconstructible from django.utils.timezone import FixedOffset, get_default_timezone, utc from django.utils.translation import ugettext_lazy as _ from .models import FoodManager, FoodQuerySet class TestModel1(object): def upload_to(self): return "somewhere dynamic" thing = models.FileField(upload_to=upload_to) class OperationWriterTests(SimpleTestCase): def test_empty_signature(self): operation = custom_migration_operations.operations.TestOperation() buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.TestOperation(\n' '),' ) def test_args_signature(self): operation = custom_migration_operations.operations.ArgsOperation(1, 2) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ArgsOperation(\n' ' arg1=1,\n' ' arg2=2,\n' '),' ) def test_kwargs_signature(self): operation = custom_migration_operations.operations.KwargsOperation(kwarg1=1) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.KwargsOperation(\n' ' kwarg1=1,\n' '),' ) def test_args_kwargs_signature(self): operation = custom_migration_operations.operations.ArgsKwargsOperation(1, 2, kwarg2=4) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ArgsKwargsOperation(\n' ' arg1=1,\n' ' arg2=2,\n' ' kwarg2=4,\n' '),' ) def test_nested_args_signature(self): operation = custom_migration_operations.operations.ArgsOperation( custom_migration_operations.operations.ArgsOperation(1, 2), custom_migration_operations.operations.KwargsOperation(kwarg1=3, kwarg2=4) ) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ArgsOperation(\n' ' arg1=custom_migration_operations.operations.ArgsOperation(\n' ' arg1=1,\n' ' arg2=2,\n' ' ),\n' ' arg2=custom_migration_operations.operations.KwargsOperation(\n' ' kwarg1=3,\n' ' kwarg2=4,\n' ' ),\n' '),' ) def test_multiline_args_signature(self): operation = custom_migration_operations.operations.ArgsOperation("test\n arg1", "test\narg2") buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, "custom_migration_operations.operations.ArgsOperation(\n" " arg1='test\\n arg1',\n" " arg2='test\\narg2',\n" ")," ) def test_expand_args_signature(self): operation = custom_migration_operations.operations.ExpandArgsOperation([1, 2]) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ExpandArgsOperation(\n' ' arg=[\n' ' 1,\n' ' 2,\n' ' ],\n' '),' ) def test_nested_operation_expand_args_signature(self): operation = custom_migration_operations.operations.ExpandArgsOperation( arg=[ custom_migration_operations.operations.KwargsOperation( kwarg1=1, kwarg2=2, ), ] ) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ExpandArgsOperation(\n' ' arg=[\n' ' custom_migration_operations.operations.KwargsOperation(\n' ' kwarg1=1,\n' ' kwarg2=2,\n' ' ),\n' ' ],\n' '),' ) class WriterTests(SimpleTestCase): """ Tests the migration writer (makes migration files from Migration instances) """ def safe_exec(self, string, value=None): l = {} try: exec(string, globals(), l) except Exception as e: if value: self.fail("Could not exec %r (from value %r): %s" % (string.strip(), value, e)) else: self.fail("Could not exec %r: %s" % (string.strip(), e)) return l def serialize_round_trip(self, value): string, imports = MigrationWriter.serialize(value) return self.safe_exec("%s\ntest_value_result = %s" % ("\n".join(imports), string), value)['test_value_result'] def assertSerializedEqual(self, value): self.assertEqual(self.serialize_round_trip(value), value) def assertSerializedResultEqual(self, value, target): self.assertEqual(MigrationWriter.serialize(value), target) def assertSerializedFieldEqual(self, value): new_value = self.serialize_round_trip(value) self.assertEqual(value.__class__, new_value.__class__) self.assertEqual(value.max_length, new_value.max_length) self.assertEqual(value.null, new_value.null) self.assertEqual(value.unique, new_value.unique) def test_serialize_numbers(self): self.assertSerializedEqual(1) self.assertSerializedEqual(1.2) self.assertTrue(math.isinf(self.serialize_round_trip(float("inf")))) self.assertTrue(math.isinf(self.serialize_round_trip(float("-inf")))) self.assertTrue(math.isnan(self.serialize_round_trip(float("nan")))) def test_serialize_constants(self): self.assertSerializedEqual(None) self.assertSerializedEqual(True) self.assertSerializedEqual(False) def test_serialize_strings(self): self.assertSerializedEqual(b"foobar") string, imports = MigrationWriter.serialize(b"foobar") self.assertEqual(string, "b'foobar'") self.assertSerializedEqual("föobár") string, imports = MigrationWriter.serialize("foobar") self.assertEqual(string, "'foobar'") def test_serialize_multiline_strings(self): self.assertSerializedEqual(b"foo\nbar") string, imports = MigrationWriter.serialize(b"foo\nbar") self.assertEqual(string, "b'foo\\nbar'") self.assertSerializedEqual("föo\nbár") string, imports = MigrationWriter.serialize("foo\nbar") self.assertEqual(string, "'foo\\nbar'") def test_serialize_collections(self): self.assertSerializedEqual({1: 2}) self.assertSerializedEqual(["a", 2, True, None]) self.assertSerializedEqual({2, 3, "eighty"}) self.assertSerializedEqual({"lalalala": ["yeah", "no", "maybe"]}) self.assertSerializedEqual(_('Hello')) def test_serialize_builtin_types(self): self.assertSerializedEqual([list, tuple, dict, set, frozenset]) self.assertSerializedResultEqual( [list, tuple, dict, set, frozenset], ("[list, tuple, dict, set, frozenset]", set()) ) def test_serialize_functions(self): with six.assertRaisesRegex(self, ValueError, 'Cannot serialize function: lambda'): self.assertSerializedEqual(lambda x: 42) self.assertSerializedEqual(models.SET_NULL) string, imports = MigrationWriter.serialize(models.SET(42)) self.assertEqual(string, 'models.SET(42)') self.serialize_round_trip(models.SET(42)) def test_serialize_datetime(self): self.assertSerializedEqual(datetime.datetime.utcnow()) self.assertSerializedEqual(datetime.datetime.utcnow) self.assertSerializedEqual(datetime.datetime.today()) self.assertSerializedEqual(datetime.datetime.today) self.assertSerializedEqual(datetime.date.today()) self.assertSerializedEqual(datetime.date.today) self.assertSerializedEqual(datetime.datetime.now().time()) self.assertSerializedEqual(datetime.datetime(2014, 1, 1, 1, 1, tzinfo=get_default_timezone())) self.assertSerializedEqual(datetime.datetime(2013, 12, 31, 22, 1, tzinfo=FixedOffset(180))) self.assertSerializedResultEqual( datetime.datetime(2014, 1, 1, 1, 1), ("datetime.datetime(2014, 1, 1, 1, 1)", {'import datetime'}) ) self.assertSerializedResultEqual( datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc), ( "datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc)", {'import datetime', 'from django.utils.timezone import utc'}, ) ) def test_serialize_datetime_safe(self): self.assertSerializedResultEqual( datetime_safe.date(2014, 3, 31), ("datetime.date(2014, 3, 31)", {'import datetime'}) ) self.assertSerializedResultEqual( datetime_safe.time(10, 25), ("datetime.time(10, 25)", {'import datetime'}) ) self.assertSerializedResultEqual( datetime_safe.datetime(2014, 3, 31, 16, 4, 31), ("datetime.datetime(2014, 3, 31, 16, 4, 31)", {'import datetime'}) ) def test_serialize_fields(self): self.assertSerializedFieldEqual(models.CharField(max_length=255)) self.assertSerializedResultEqual( models.CharField(max_length=255), ("models.CharField(max_length=255)", {"from django.db import models"}) ) self.assertSerializedFieldEqual(models.TextField(null=True, blank=True)) self.assertSerializedResultEqual( models.TextField(null=True, blank=True), ("models.TextField(blank=True, null=True)", {'from django.db import models'}) ) def test_serialize_settings(self): self.assertSerializedEqual(SettingsReference(settings.AUTH_USER_MODEL, "AUTH_USER_MODEL")) self.assertSerializedResultEqual( SettingsReference("someapp.model", "AUTH_USER_MODEL"), ("settings.AUTH_USER_MODEL", {"from django.conf import settings"}) ) self.assertSerializedResultEqual( ((x, x * x) for x in range(3)), ("((0, 0), (1, 1), (2, 4))", set()) ) def test_serialize_compiled_regex(self): """ Make sure compiled regex can be serialized. """ regex = re.compile(r'^\w+$', re.U) self.assertSerializedEqual(regex) def test_serialize_class_based_validators(self): """ Ticket #22943: Test serialization of class-based validators, including compiled regexes. """ validator = RegexValidator(message="hello") string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator(message='hello')") self.serialize_round_trip(validator) # Test with a compiled regex. validator = RegexValidator(regex=re.compile(r'^\w+$', re.U)) string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator(regex=re.compile('^\\\\w+$', 32))") self.serialize_round_trip(validator) # Test a string regex with flag validator = RegexValidator(r'^[0-9]+$', flags=re.U) string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator('^[0-9]+$', flags=32)") self.serialize_round_trip(validator) # Test message and code validator = RegexValidator('^[-a-zA-Z0-9_]+$', 'Invalid', 'invalid') string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator('^[-a-zA-Z0-9_]+$', 'Invalid', 'invalid')") self.serialize_round_trip(validator) # Test with a subclass. validator = EmailValidator(message="hello") string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.EmailValidator(message='hello')") self.serialize_round_trip(validator) validator = deconstructible(path="migrations.test_writer.EmailValidator")(EmailValidator)(message="hello") string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "migrations.test_writer.EmailValidator(message='hello')") validator = deconstructible(path="custom.EmailValidator")(EmailValidator)(message="hello") with six.assertRaisesRegex(self, ImportError, "No module named '?custom'?"): MigrationWriter.serialize(validator) validator = deconstructible(path="django.core.validators.EmailValidator2")(EmailValidator)(message="hello") with self.assertRaisesMessage(ValueError, "Could not find object EmailValidator2 in django.core.validators."): MigrationWriter.serialize(validator) def test_serialize_empty_nonempty_tuple(self): """ Ticket #22679: makemigrations generates invalid code for (an empty tuple) default_permissions = () """ empty_tuple = () one_item_tuple = ('a',) many_items_tuple = ('a', 'b', 'c') self.assertSerializedEqual(empty_tuple) self.assertSerializedEqual(one_item_tuple) self.assertSerializedEqual(many_items_tuple) @unittest.skipUnless(six.PY2, "Only applies on Python 2") def test_serialize_direct_function_reference(self): """ Ticket #22436: You cannot use a function straight from its body (e.g. define the method and use it in the same body) """ with self.assertRaises(ValueError): self.serialize_round_trip(TestModel1.thing) def test_serialize_local_function_reference(self): """ Neither py2 or py3 can serialize a reference in a local scope. """ class TestModel2(object): def upload_to(self): return "somewhere dynamic" thing = models.FileField(upload_to=upload_to) with self.assertRaises(ValueError): self.serialize_round_trip(TestModel2.thing) def test_serialize_local_function_reference_message(self): """ Make sure user is seeing which module/function is the issue """ class TestModel2(object): def upload_to(self): return "somewhere dynamic" thing = models.FileField(upload_to=upload_to) with six.assertRaisesRegex(self, ValueError, '^Could not find function upload_to in migrations.test_writer'): self.serialize_round_trip(TestModel2.thing) def test_serialize_managers(self): self.assertSerializedEqual(models.Manager()) self.assertSerializedResultEqual( FoodQuerySet.as_manager(), ('migrations.models.FoodQuerySet.as_manager()', {'import migrations.models'}) ) self.assertSerializedEqual(FoodManager('a', 'b')) self.assertSerializedEqual(FoodManager('x', 'y', c=3, d=4)) def test_serialize_frozensets(self): self.assertSerializedEqual(frozenset()) self.assertSerializedEqual(frozenset("let it go")) def test_serialize_timedelta(self): self.assertSerializedEqual(datetime.timedelta()) self.assertSerializedEqual(datetime.timedelta(minutes=42)) def test_simple_migration(self): """ Tests serializing a simple migration. """ fields = { 'charfield': models.DateTimeField(default=datetime.datetime.utcnow), 'datetimefield': models.DateTimeField(default=datetime.datetime.utcnow), } options = { 'verbose_name': 'My model', 'verbose_name_plural': 'My models', } migration = type(str("Migration"), (migrations.Migration,), { "operations": [ migrations.CreateModel("MyModel", tuple(fields.items()), options, (models.Model,)), migrations.CreateModel("MyModel2", tuple(fields.items()), bases=(models.Model,)), migrations.CreateModel(name="MyModel3", fields=tuple(fields.items()), options=options, bases=(models.Model,)), migrations.DeleteModel("MyModel"), migrations.AddField("OtherModel", "datetimefield", fields["datetimefield"]), ], "dependencies": [("testapp", "some_other_one")], }) writer = MigrationWriter(migration) output = writer.as_string() # It should NOT be unicode. self.assertIsInstance(output, six.binary_type, "Migration as_string returned unicode") # We don't test the output formatting - that's too fragile. # Just make sure it runs for now, and that things look alright. result = self.safe_exec(output) self.assertIn("Migration", result) # In order to preserve compatibility with Python 3.2 unicode literals # prefix shouldn't be added to strings. tokens = tokenize.generate_tokens(six.StringIO(str(output)).readline) for token_type, token_source, (srow, scol), __, line in tokens: if token_type == tokenize.STRING: self.assertFalse( token_source.startswith('u'), "Unicode literal prefix found at %d:%d: %r" % ( srow, scol, line.strip() ) ) # Silence warning on Python 2: Not importing directory # 'tests/migrations/migrations_test_apps/without_init_file/migrations': # missing __init__.py @ignore_warnings(category=ImportWarning) def test_migration_path(self): test_apps = [ 'migrations.migrations_test_apps.normal', 'migrations.migrations_test_apps.with_package_model', 'migrations.migrations_test_apps.without_init_file', ] base_dir = os.path.dirname(os.path.dirname(upath(__file__))) for app in test_apps: with self.modify_settings(INSTALLED_APPS={'append': app}): migration = migrations.Migration('0001_initial', app.split('.')[-1]) expected_path = os.path.join(base_dir, *(app.split('.') + ['migrations', '0001_initial.py'])) writer = MigrationWriter(migration) self.assertEqual(writer.path, expected_path) def test_custom_operation(self): migration = type(str("Migration"), (migrations.Migration,), { "operations": [ custom_migration_operations.operations.TestOperation(), custom_migration_operations.operations.CreateModel(), migrations.CreateModel("MyModel", (), {}, (models.Model,)), custom_migration_operations.more_operations.TestOperation() ], "dependencies": [] }) writer = MigrationWriter(migration) output = writer.as_string() result = self.safe_exec(output) self.assertIn("custom_migration_operations", result) self.assertNotEqual( result['custom_migration_operations'].operations.TestOperation, result['custom_migration_operations'].more_operations.TestOperation ) def test_sorted_imports(self): """ #24155 - Tests ordering of imports. """ migration = type(str("Migration"), (migrations.Migration,), { "operations": [ migrations.AddField("mymodel", "myfield", models.DateTimeField( default=datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc), )), ] }) writer = MigrationWriter(migration) output = writer.as_string().decode('utf-8') self.assertIn( "import datetime\n" "from django.db import migrations, models\n" "from django.utils.timezone import utc\n", output ) def test_models_import_omitted(self): """ django.db.models shouldn't be imported if unused. """ migration = type(str("Migration"), (migrations.Migration,), { "operations": [ migrations.AlterModelOptions( name='model', options={'verbose_name': 'model', 'verbose_name_plural': 'models'}, ), ] }) writer = MigrationWriter(migration) output = writer.as_string().decode('utf-8') self.assertIn("from django.db import migrations\n", output) def test_deconstruct_class_arguments(self): # Yes, it doesn't make sense to use a class as a default for a # CharField. It does make sense for custom fields though, for example # an enumfield that takes the enum class as an argument. class DeconstructableInstances(object): def deconstruct(self): return ('DeconstructableInstances', [], {}) string = MigrationWriter.serialize(models.CharField(default=DeconstructableInstances))[0] self.assertEqual(string, "models.CharField(default=migrations.test_writer.DeconstructableInstances)")
bsd-3-clause
-8,129,352,722,528,693,000
41.52037
126
0.622621
false
freyley/trello-traceability
rungui.py
1
12592
#!/usr/bin/env python import urwid from trello import TrelloClient import models import settings from models import Board, get_session, TrelloList from trellointerface import create_dbcard_and_ensure_checklist class RemoveOrgUser(object): def __init__(self, parent): self.items = [urwid.Text("foo"), urwid.Text("bar")] self.main_content = urwid.SimpleListWalker( [urwid.AttrMap(w, None, 'reveal focus') for w in self.items]) self.parent = parent self.listbox = urwid.ListBox(self.main_content) @property def widget(self): return urwid.AttrWrap(self.listbox, 'body') def handle_input(self, k): if k in ('u', 'U'): self.parent.set_view(Top) class NoRefocusColumns(urwid.Columns): def keypress(self, size, key): return key class NoRefocusPile(urwid.Pile): def keypress(self, size, key): return key class TrelloCard(object): def __init__(self, card, trello): self.card = card self.trello = trello self.initialize() def initialize(self): raise NotImplementedError() @property def trellocard(self): return self.trello.get_card(self.card.id) @property def url(self): return self.trellocard.url @property def id(self): return self.card.id @property def name(self): return self.card.name class Story(TrelloCard): def initialize(self): pass @property def meta_checklist(self): tc = self.trellocard tc.fetch(eager=True) return [ checklist for checklist in tc.checklists if checklist.id == self.card.magic_checklist_id ][0] def connect_to(self, epic): self.meta_checklist.add_checklist_item("Epic Connection: {}: {}".format(epic.id, epic.url)) epic.story_checklist.add_checklist_item("{}: {}".format(self.id, self.url)) self.card.connected_to_id = epic.id @property def more_info_area(self): if self.card.connected_to is None: return "No connection" else: epic = self.card.connected_to return "Connected to {}".format(epic.name) class Epic(TrelloCard): def initialize(self): pass @property def story_checklist(self): tc = self.trellocard tc.fetch(eager=True) return [ checklist for checklist in tc.checklists if checklist.id == self.card.magic_checklist_id ][0] @property def more_info_area(self): return "" class Panel(object): def __init__(self, parent, board, card_cls): self.db_session = parent.db_session self.trello = parent.trello self.parent = parent self.board = board self.card_list_ptr = 0 self.card_lists = self.db_session.query(TrelloList).filter_by(board=self.board) self.card_cls = card_cls self.content = urwid.SimpleListWalker( [urwid.AttrMap(w, None, 'reveal focus') for w in self.items]) self.listbox = urwid.ListBox(self.content) def reset_content(self): while self.content: self.content.pop() self.content += [ urwid.AttrMap(w, None, 'reveal focus') for w in self.items] if len(self.items) >2: self.listbox.set_focus(2) # TODO: this doesn't belong here exactly. self.parent.more_info_area.set_text(self.card.more_info_area) @property def trelloboard(self): return self.trello.get_board(self.board.id) @property def items(self): items = [urwid.Text(self.card_list.name), urwid.Text('-=-=-=-=-=-=-=-=-=-')] items += [urwid.Text("{}] {}".format(i, card.name)) for i, card in enumerate(self.get_cards())] return items @property def card_list(self): return self.card_lists[self.card_list_ptr] def get_cards(self): db_cards = self.db_session.query(models.Card).filter_by(trellolist=self.card_list) self.cards = [ self.card_cls(card, self.trello) for card in db_cards] return self.cards def set_focus(self, idx): self.listbox.set_focus(idx) @property def card(self): return self.cards[self.listbox.get_focus()[1] - 2] def go_left(self): if self.card_list_ptr > 0: self.card_list_ptr -= 1 self.reset_content() def go_right(self): if self.card_list_ptr < self.card_lists.count() - 1: self.card_list_ptr += 1 self.reset_content() def move_up(self): focus_widget, idx = self.listbox.get_focus() if idx > 2: idx = idx - 1 self.listbox.set_focus(idx) self.parent.more_info_area.set_text(self.card.more_info_area) def move_down(self): focus_widget, idx = self.listbox.get_focus() if idx < len(self.content) - 1: idx = idx + 1 self.listbox.set_focus(idx) self.parent.more_info_area.set_text(self.card.more_info_area) class Connect(object): def __init__(self, parent): self.db_session = get_session()() self.mid_cmd = self.old_focus = None self.parent = parent self.story_board = self.db_session.query(Board).filter_by(story_board=True).first() self.epic_board = self.db_session.query(Board).filter_by(epic_board=True).first() self.future_story_boards = self.db_session.query(Board).filter_by(future_story_board=True) self.panels = [Panel(self, board=self.story_board, card_cls=Story)] self.panels += [ Panel(self, board=fsb, card_cls=Story) for fsb in self.future_story_boards] self.left_panel_idx = 0 self.left_panel = self.panels[self.left_panel_idx] self.right_panel = Panel(self, board=self.epic_board, card_cls=Epic) self.left_panel.set_focus(2) self.columns = NoRefocusColumns([self.left_panel.listbox, self.right_panel.listbox], focus_column=0) self.more_info_area = urwid.Text(self.left_panel.card.more_info_area) self.command_area = urwid.Edit(caption="") self.edit_area_listbox = urwid.ListBox([urwid.Text("-=-=-=-=-=-=-=-"), self.more_info_area, self.command_area]) #urwid.AttrMap(self.command_area, "notfocus", "focus")]) self.frame = NoRefocusPile([self.columns, self.edit_area_listbox], focus_item=0) @property def widget(self): return urwid.AttrWrap(self.frame, 'body') @property def trello(self): return self.parent.trelloclient def _complete(self): self.command_area.set_edit_text("") self.mid_cmd = False self.left_panel.listbox.set_focus(self.old_focus) self.frame.set_focus(0) def complete_n(self): output = self.command_area.get_edit_text().strip() card_list = self.right_panel.card_list trello_list = self.right_panel.trelloboard.get_list(card_list.id) card = trello_list.add_card(output) db_card = create_dbcard_and_ensure_checklist(self.db_session, card, prefetch_checklists=True) self.db_session.commit() self.db_session = get_session()() self.left_panel.card.connect_to(Epic(db_card, self.trello)) self.right_panel.reset_content() self._complete() def complete_c(self): output = int(self.command_area.get_edit_text()) self._complete() self.left_panel.card.connect_to(self.right_panel.cards[output]) self.db_session.commit() def switch_story_boards(self): self.left_panel_idx += 1 if self.left_panel_idx == len(self.panels): self.left_panel_idx = 0 self.left_panel = self.panels[self.left_panel_idx] self.columns.widget_list = [self.left_panel.listbox, self.right_panel.listbox] self.columns.set_focus(0) self.left_panel.listbox.set_focus(2) def handle_input(self, k): if self.mid_cmd: if k == 'esc': self._complete() if k == 'enter': if self.mid_cmd == 'n': self.complete_n() elif self.mid_cmd == 'c': self.complete_c() else: self.command_area.keypress([0], k) return if k in ('u', 'U'): self.parent.set_view(Top) if k == 's': self.switch_story_boards() if k == 'c': self.frame.set_focus(1) self.command_area.set_edit_pos(0) self.mid_cmd = 'c' self.old_focus = self.left_panel.listbox.get_focus()[1] if k == 'n': self.frame.set_focus(1) self.command_area.set_edit_pos(0) self.mid_cmd = 'n' self.old_focus = self.left_panel.listbox.get_focus()[1] # navigation elif k == 'j': self.right_panel.go_left() elif k == 'l': self.right_panel.go_right() elif k == 'a': self.left_panel.go_left() elif k == 'd': self.left_panel.go_right() elif k == 'up': self.left_panel.move_up() elif k == 'down': self.left_panel.move_down() VIEWS = { "Remove Organization User": RemoveOrgUser, "Connect Stories to Epics": Connect, } class Top(object): def __init__(self, parent): self.commands = [urwid.Text(text) for text in VIEWS.keys() ] self.main_content = urwid.SimpleListWalker( [urwid.Text('Commands'), urwid.Text('-=-=-=-=-=-=-=-=-=-')]+ [urwid.AttrMap(w, None, 'reveal focus') for w in self.commands]) self.listbox = urwid.ListBox(self.main_content) self.listbox.set_focus(2) self.parent = parent @property def widget(self): return urwid.AttrWrap(self.listbox, 'body') def enter_command(self): focus_widget, idx = self.listbox.get_focus() item = self.main_content[idx].original_widget.text view = VIEWS[item] self.parent.set_view(view) # focus_widget, idx = self.listbox.get_focus() # item = self.main_content[idx].original_widget.text # new_item = urwid.Text(item) # self.commands.append(new_item) # self.main_content.append(urwid.AttrMap(new_item, None, 'reveal focus')) def handle_input(self, k): if k == 'up': focus_widget, idx = self.listbox.get_focus() if idx > 2: idx = idx - 1 self.listbox.set_focus(idx) elif k == 'down': focus_widget, idx = self.listbox.get_focus() if idx < len(self.main_content) - 1: idx = idx + 1 self.listbox.set_focus(idx) elif k == 'enter': self.enter_command() class TrelloTraceability: palette = [ ('body', 'black', 'light gray'), ('focus', 'light gray', 'dark blue', 'standout'), ('head', 'yellow', 'black', 'standout'), ('foot', 'light gray', 'black'), ('key', 'light cyan', 'black','underline'), ('title', 'white', 'black', 'bold'), ('flag', 'dark gray', 'light gray'), ('error', 'dark red', 'light gray'), ] cmdstrings = [ ('title', "Commands"), " ", ('key', "Q"), ' ', ('key', 'U') ] def __init__(self): self.current_view = Top(self) # header and footer self.header = urwid.Text( "Trello Traceability" ) self.cmds = urwid.AttrWrap(urwid.Text(self.cmdstrings), 'foot') self.view = urwid.Frame( self.current_view.widget, header=urwid.AttrWrap(self.header, 'head' ), footer=self.cmds ) self.trelloclient = TrelloClient( api_key=settings.TRELLO_API_KEY, api_secret=settings.TRELLO_API_SECRET, token=settings.TRELLO_OAUTH_TOKEN, ) self.organization = self.trelloclient.get_organization(settings.TRELLO_ORGANIZATION_ID) def set_view(self, cls): self.current_view = cls(self) self.view.body = self.current_view.widget def main(self): """Run the program.""" self.loop = urwid.MainLoop(self.view, self.palette, unhandled_input=self.unhandled_input) self.loop.run() def unhandled_input(self, k): if k in ('q','Q'): raise urwid.ExitMainLoop() else: self.current_view.handle_input(k) def main(): TrelloTraceability().main() if __name__=="__main__": main()
agpl-3.0
-8,318,284,779,255,094,000
31.456186
119
0.579018
false
tsgit/invenio
modules/bibfield/lib/functions/is_type_isbn.py
17
1815
## This file is part of Invenio. ## Copyright (C) 2013 CERN. ## ## Invenio is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 2 of the ## License, or (at your option) any later version. ## ## Invenio is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Invenio; if not, write to the Free Software Foundation, Inc., ## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. def _convert_x_to_10(x): if x != 'X': return int(x) else: return 10 def is_type_isbn10(val): """ Test if argument is an ISBN-10 number Courtesy Wikipedia: http://en.wikipedia.org/wiki/International_Standard_Book_Number """ val = val.replace("-", "").replace(" ", "") if len(val) != 10: return False r = sum([(10 - i) * (_convert_x_to_10(x)) for i, x in enumerate(val)]) return not (r % 11) def is_type_isbn13(val): """ Test if argument is an ISBN-13 number Courtesy Wikipedia: http://en.wikipedia.org/wiki/International_Standard_Book_Number """ val = val.replace("-", "").replace(" ", "") if len(val) != 13: return False total = sum([int(num) * weight for num, weight in zip(val, (1, 3) * 6)]) ck = (10 - total) % 10 return ck == int(val[-1]) def is_type_isbn(val): """ Test if argument is an ISBN-10 or ISBN-13 number """ try: return is_type_isbn10(val) or is_type_isbn13(val) except: return False
gpl-2.0
-7,659,831,587,807,510,000
29.25
76
0.638567
false
CiuffysHub/MITMf
mitmflib-0.18.4/build/lib.linux-i686-2.7/mitmflib/impacket/testcases/SMB_RPC/test_rrp.py
2
25098
############################################################################### # Tested so far: # # OpenClassesRoot # OpenCurrentUser # OpenLocalMachine # OpenPerformanceData # OpenUsers # BaseRegCloseKey # BaseRegCreateKey # BaseRegDeleteKey # BaseRegFlushKey # BaseRegGetKeySecurity # BaseRegOpenKey # BaseRegQueryInfoKey # BaseRegQueryValue # BaseRegReplaceKey # BaseRegRestoreKey # BaseRegSaveKey # BaseRegSetValue # BaseRegEnumValue # BaseRegEnumKey # BaseRegGetVersion # OpenCurrentConfig # BaseRegQueryMultipleValues # BaseRegSaveKeyEx # OpenPerformanceText # OpenPerformanceNlsText # BaseRegQueryMultipleValues2 # BaseRegDeleteKeyEx # BaseRegLoadKey # BaseRegUnLoadKey # BaseRegDeleteValue # # Not yet: # # BaseRegSetKeySecurity # # Shouldn't dump errors against a win7 # ################################################################################ import unittest import ConfigParser from mitmflib.impacket.dcerpc.v5 import transport from mitmflib.impacket.dcerpc.v5 import epm, rrp from mitmflib.impacket.dcerpc.v5.dtypes import NULL, MAXIMUM_ALLOWED, OWNER_SECURITY_INFORMATION class RRPTests(unittest.TestCase): def connect(self): rpctransport = transport.DCERPCTransportFactory(self.stringBinding) if len(self.hashes) > 0: lmhash, nthash = self.hashes.split(':') else: lmhash = '' nthash = '' if hasattr(rpctransport, 'set_credentials'): # This method exists only for selected protocol sequences. rpctransport.set_credentials(self.username,self.password, self.domain, lmhash, nthash) dce = rpctransport.get_dce_rpc() #dce.set_auth_level(RPC_C_AUTHN_LEVEL_PKT_INTEGRITY) dce.connect() dce.bind(rrp.MSRPC_UUID_RRP, transfer_syntax = self.ts) resp = rrp.hOpenLocalMachine(dce, MAXIMUM_ALLOWED | rrp.KEY_WOW64_32KEY | rrp.KEY_ENUMERATE_SUB_KEYS) return dce, rpctransport, resp['phKey'] def test_OpenClassesRoot(self): dce, rpctransport, phKey = self.connect() request = rrp.OpenClassesRoot() request['ServerName'] = NULL request['samDesired'] = MAXIMUM_ALLOWED resp = dce.request(request) resp.dump() def test_OpenCurrentUser(self): dce, rpctransport, phKey = self.connect() request = rrp.OpenCurrentUser() request['ServerName'] = NULL request['samDesired'] = MAXIMUM_ALLOWED resp = dce.request(request) resp.dump() def test_OpenLocalMachine(self): dce, rpctransport, phKey = self.connect() request = rrp.OpenLocalMachine() request['ServerName'] = NULL request['samDesired'] = MAXIMUM_ALLOWED resp = dce.request(request) resp.dump() def test_OpenPerformanceData(self): dce, rpctransport, phKey = self.connect() request = rrp.OpenPerformanceData() request['ServerName'] = NULL request['samDesired'] = MAXIMUM_ALLOWED resp = dce.request(request) resp.dump() def test_OpenUsers(self): dce, rpctransport, phKey = self.connect() request = rrp.OpenUsers() request['ServerName'] = NULL request['samDesired'] = MAXIMUM_ALLOWED resp = dce.request(request) resp.dump() def test_BaseRegCloseKey(self): dce, rpctransport, phKey = self.connect() request = rrp.BaseRegCloseKey() request['hKey'] = phKey resp = dce.request(request) resp.dump() def test_hBaseRegCreateKey_hBaseRegSetValue_hBaseRegDeleteKey(self): dce, rpctransport, phKey = self.connect() resp = rrp.hOpenClassesRoot(dce) resp.dump() regHandle = resp['phKey'] resp = rrp.hBaseRegCreateKey(dce, regHandle, 'BETO\x00') resp.dump() phKey = resp['phkResult'] try: resp = rrp.hBaseRegSetValue(dce, phKey, 'BETO2\x00', rrp.REG_SZ, 'HOLA COMO TE VA\x00') resp.dump() except Exception, e: print e type, data = rrp.hBaseRegQueryValue(dce, phKey, 'BETO2\x00') #print data resp = rrp.hBaseRegDeleteValue(dce, phKey, 'BETO2\x00') resp.dump() resp = rrp.hBaseRegDeleteKey(dce, regHandle, 'BETO\x00') resp.dump() self.assertTrue( 'HOLA COMO TE VA\x00' == data ) def test_BaseRegCreateKey_BaseRegSetValue_BaseRegDeleteKey(self): dce, rpctransport, phKey = self.connect() request = rrp.OpenClassesRoot() request['ServerName'] = NULL request['samDesired'] = MAXIMUM_ALLOWED resp = dce.request(request) resp.dump() regHandle = resp['phKey'] request = rrp.BaseRegCreateKey() request['hKey'] = regHandle request['lpSubKey'] = 'BETO\x00' request['lpClass'] = NULL request['dwOptions'] = 0x00000001 request['samDesired'] = MAXIMUM_ALLOWED request['lpSecurityAttributes']['RpcSecurityDescriptor']['lpSecurityDescriptor'] = NULL request['lpdwDisposition'] = rrp.REG_CREATED_NEW_KEY resp = dce.request(request) resp.dump() phKey = resp['phkResult'] request = rrp.BaseRegSetValue() request['hKey'] = phKey request['lpValueName'] = 'BETO\x00' request['dwType'] = rrp.REG_SZ request['lpData'] = 'HOLA COMO TE VA\x00'.encode('utf-16le') request['cbData'] = len('HOLA COMO TE VA\x00')*2 try: resp = dce.request(request) resp.dump() except Exception, e: print e request = rrp.BaseRegQueryValue() request['hKey'] = phKey request['lpValueName'] = 'BETO\x00' request['lpData'] = ' '*100 request['lpcbData'] = 100 request['lpcbLen'] = 100 resp = dce.request(request) resp.dump() resData = resp['lpData'] request = rrp.BaseRegDeleteKey() request['hKey'] = regHandle request['lpSubKey'] = 'BETO\x00' resp = dce.request(request) resp.dump() self.assertTrue( 'HOLA COMO TE VA\x00' == ''.join(resData).decode('utf-16le')) def test_BaseRegEnumKey(self): dce, rpctransport, phKey = self.connect() request = rrp.BaseRegOpenKey() request['hKey'] = phKey request['lpSubKey'] = 'SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\x00' request['dwOptions'] = 0x00000001 request['samDesired'] = MAXIMUM_ALLOWED | rrp.KEY_ENUMERATE_SUB_KEYS resp = dce.request(request) request = rrp.BaseRegEnumKey() request['hKey'] = resp['phkResult'] request['dwIndex'] = 1 # I gotta access the fields mannually :s request.fields['lpNameIn'].fields['MaximumLength'] = 510 request.fields['lpNameIn'].fields['Data'].fields['Data'].fields['MaximumCount'] = 255 request['lpClassIn'] = ' '*100 request['lpftLastWriteTime'] = NULL resp = dce.request(request) resp.dump() def test_hBaseRegEnumKey(self): dce, rpctransport, phKey = self.connect() request = rrp.BaseRegOpenKey() request['hKey'] = phKey request['lpSubKey'] = 'SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\x00' request['dwOptions'] = 0x00000001 request['samDesired'] = MAXIMUM_ALLOWED | rrp.KEY_ENUMERATE_SUB_KEYS resp = dce.request(request) resp = rrp.hBaseRegEnumKey(dce, resp['phkResult'], 1 ) resp.dump() def test_BaseRegEnumValue(self): dce, rpctransport, phKey = self.connect() request = rrp.BaseRegOpenKey() request['hKey'] = phKey request['lpSubKey'] = 'SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\x00' request['dwOptions'] = 0x00000001 request['samDesired'] = MAXIMUM_ALLOWED resp = dce.request(request) request = rrp.BaseRegEnumValue() request['hKey'] = resp['phkResult'] request['dwIndex'] = 6 request['lpValueNameIn'] = ' '*100 request['lpData'] = ' '*100 request['lpcbData'] = 100 request['lpcbLen'] = 100 resp = dce.request(request) resp.dump() def test_hBaseRegEnumValue(self): dce, rpctransport, phKey = self.connect() request = rrp.BaseRegOpenKey() request['hKey'] = phKey request['lpSubKey'] = 'SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\x00' request['dwOptions'] = 0x00000001 request['samDesired'] = MAXIMUM_ALLOWED resp = dce.request(request) resp = rrp.hBaseRegEnumValue(dce, resp['phkResult'], 7) resp.dump() def test_BaseRegFlushKey(self): dce, rpctransport, phKey = self.connect() resp = rrp.hBaseRegFlushKey(dce,phKey) resp.dump() def test_BaseRegGetKeySecurity(self): dce, rpctransport, phKey = self.connect() resp = rrp.hBaseRegGetKeySecurity(dce, phKey, OWNER_SECURITY_INFORMATION) resp.dump() def test_BaseRegOpenKey(self): dce, rpctransport, phKey = self.connect() request = rrp.BaseRegOpenKey() request['hKey'] = phKey request['lpSubKey'] = 'SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\x00' request['dwOptions'] = 0x00000001 request['samDesired'] = MAXIMUM_ALLOWED resp = dce.request(request) resp.dump() def test_hBaseRegQueryInfoKey(self): dce, rpctransport, phKey = self.connect() resp = rrp.hBaseRegOpenKey(dce, phKey, 'SYSTEM\\CurrentControlSet\\Control\\Lsa\\JD\x00' ) resp = rrp.hBaseRegQueryInfoKey(dce,resp['phkResult']) resp.dump() def test_BaseRegQueryValue(self): dce, rpctransport, phKey = self.connect() request = rrp.BaseRegOpenKey() request['hKey'] = phKey request['lpSubKey'] = 'SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\x00' request['dwOptions'] = 0x00000001 request['samDesired'] = MAXIMUM_ALLOWED resp = dce.request(request) resp.dump() request = rrp.BaseRegQueryValue() request['hKey'] = resp['phkResult'] request['lpValueName'] = 'ProductName\x00' request['lpData'] = ' '*100 request['lpcbData'] = 100 request['lpcbLen'] = 100 resp = dce.request(request) resp.dump() def test_hBaseRegQueryValue(self): dce, rpctransport, phKey = self.connect() resp = rrp.hBaseRegOpenKey(dce, phKey, 'SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\x00' ) resp.dump() resp = rrp.hBaseRegQueryValue(dce, resp['phkResult'], 'ProductName\x00') def test_BaseRegReplaceKey(self): dce, rpctransport, phKey = self.connect() request = rrp.BaseRegReplaceKey() request['hKey'] = phKey request['lpSubKey'] = 'SOFTWARE\x00' request['lpNewFile'] = 'SOFTWARE\x00' request['lpOldFile'] = 'SOFTWARE\x00' try: resp = dce.request(request) resp.dump() except Exception, e: if str(e).find('ERROR_FILE_NOT_FOUND') < 0: raise def test_hBaseRegReplaceKey(self): dce, rpctransport, phKey = self.connect() try: resp = rrp.hBaseRegReplaceKey(dce, phKey, 'SOFTWARE\x00', 'SOFTWARE\x00', 'SOFTWARE\x00') resp.dump() except Exception, e: if str(e).find('ERROR_FILE_NOT_FOUND') < 0: raise def test_BaseRegRestoreKey(self): dce, rpctransport, phKey = self.connect() request = rrp.BaseRegRestoreKey() request['hKey'] = phKey request['lpFile'] = 'SOFTWARE\x00' request['Flags'] = rrp.REG_REFRESH_HIVE try: resp = dce.request(request) resp.dump() except Exception, e: if str(e).find('ERROR_FILE_NOT_FOUND') < 0: raise def test_hBaseRegRestoreKey(self): dce, rpctransport, phKey = self.connect() try: resp = rrp.hBaseRegRestoreKey(dce, phKey, 'SOFTWARE\x00') resp.dump() except Exception, e: if str(e).find('ERROR_FILE_NOT_FOUND') < 0: raise def test_BaseRegSaveKey(self): dce, rpctransport, phKey = self.connect() request = rrp.OpenCurrentUser() request['ServerName'] = NULL request['samDesired'] = MAXIMUM_ALLOWED resp = dce.request(request) resp.dump() request = rrp.BaseRegSaveKey() request['hKey'] = resp['phKey'] request['lpFile'] = 'BETUSFILE2\x00' request['pSecurityAttributes'] = NULL resp = dce.request(request) resp.dump() # I gotta remove the file now :s smb = rpctransport.get_smb_connection() smb.deleteFile('ADMIN$', 'System32\\BETUSFILE2') def test_hBaseRegSaveKey(self): dce, rpctransport, phKey = self.connect() resp = rrp.hOpenCurrentUser(dce) resp.dump() resp = rrp.hBaseRegSaveKey(dce,resp['phKey'],'BETUSFILE2\x00') resp.dump() # I gotta remove the file now :s smb = rpctransport.get_smb_connection() smb.deleteFile('ADMIN$', 'System32\\BETUSFILE2') def test_BaseRegGetVersion(self): dce, rpctransport, phKey = self.connect() request = rrp.BaseRegGetVersion() request['hKey'] = phKey resp = dce.request(request) resp.dump() def test_hBaseRegGetVersion(self): dce, rpctransport, phKey = self.connect() resp = rrp.hBaseRegGetVersion(dce, phKey) resp.dump() def test_OpenCurrentConfig(self): dce, rpctransport, phKey = self.connect() request = rrp.OpenCurrentConfig() request['ServerName'] = NULL request['samDesired'] = MAXIMUM_ALLOWED resp = dce.request(request) resp.dump() def test_hOpenCurrentConfig(self): dce, rpctransport, phKey = self.connect() resp = rrp.hOpenCurrentConfig(dce) resp.dump() def test_BaseRegQueryMultipleValues(self): dce, rpctransport, phKey = self.connect() request = rrp.BaseRegOpenKey() request['hKey'] = phKey request['lpSubKey'] = 'SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\x00' request['dwOptions'] = 0x00000001 request['samDesired'] = MAXIMUM_ALLOWED | rrp.KEY_QUERY_VALUE resp = dce.request(request) resp.dump() request = rrp.BaseRegQueryMultipleValues() item1 = rrp.RVALENT() item1['ve_valuename'] = 'ProductName\x00' item1['ve_valuelen'] = len('ProductName\x00') item1['ve_valueptr'] = NULL item1['ve_type'] = rrp.REG_SZ item2 = rrp.RVALENT() item2['ve_valuename'] = 'SystemRoot\x00' item2['ve_valuelen'] = len('SystemRoot\x00') item1['ve_valueptr'] = NULL item2['ve_type'] = rrp.REG_SZ item3 = rrp.RVALENT() item3['ve_valuename'] = 'EditionID\x00' item3['ve_valuelen'] = len('EditionID\x00') item3['ve_valueptr'] = NULL item3['ve_type'] = rrp.REG_SZ request['hKey'] = resp['phkResult'] request['val_listIn'].append(item1) request['val_listIn'].append(item2) request['val_listIn'].append(item3) request['num_vals'] = len(request['val_listIn']) request['lpvalueBuf'] = list(' '*128) request['ldwTotsize'] = 128 resp = dce.request(request) resp.dump() def test_hBaseRegQueryMultipleValues(self): dce, rpctransport, phKey = self.connect() resp = rrp.hBaseRegOpenKey(dce, phKey, 'SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\x00') resp.dump() valueIn = list() item1 = {} item1['ValueName'] = 'ProductName\x00' item1['ValueType'] = rrp.REG_SZ valueIn.append(item1) item2 = {} item2['ValueName'] = 'InstallDate\x00' item2['ValueType'] = rrp.REG_DWORD valueIn.append(item2) item3 = {} item3['ValueName'] = 'DigitalProductId\x00' item3['ValueType'] = rrp.REG_BINARY #valueIn.append(item3) resp = rrp.hBaseRegQueryMultipleValues(dce, resp['phkResult'], valueIn) #print resp def test_BaseRegSaveKeyEx(self): dce, rpctransport, phKey = self.connect() request = rrp.OpenCurrentUser() request['ServerName'] = NULL request['samDesired'] = MAXIMUM_ALLOWED resp = dce.request(request) resp.dump() request = rrp.BaseRegSaveKeyEx() request['hKey'] = resp['phKey'] request['lpFile'] = 'BETUSFILE2\x00' request['pSecurityAttributes'] = NULL request['Flags'] = 4 resp = dce.request(request) resp.dump() # I gotta remove the file now :s smb = rpctransport.get_smb_connection() smb.deleteFile('ADMIN$', 'System32\\BETUSFILE2') def test_hBaseRegSaveKeyEx(self): dce, rpctransport, phKey = self.connect() resp = rrp.hOpenCurrentUser(dce) resp.dump() resp = rrp.hBaseRegSaveKeyEx(dce, resp['phKey'], 'BETUSFILE2\x00') resp.dump() # I gotta remove the file now :s smb = rpctransport.get_smb_connection() smb.deleteFile('ADMIN$', 'System32\\BETUSFILE2') def test_OpenPerformanceText(self): dce, rpctransport, phKey = self.connect() request = rrp.OpenPerformanceText() request['ServerName'] = NULL request['samDesired'] = MAXIMUM_ALLOWED resp = dce.request(request) resp.dump() def test_hOpenPerformanceText(self): dce, rpctransport, phKey = self.connect() resp = rrp.hOpenPerformanceText(dce) resp.dump() def test_OpenPerformanceNlsText(self): dce, rpctransport, phKey = self.connect() request = rrp.OpenPerformanceNlsText() request['ServerName'] = NULL request['samDesired'] = MAXIMUM_ALLOWED resp = dce.request(request) resp.dump() def test_hOpenPerformanceNlsText(self): dce, rpctransport, phKey = self.connect() resp = rrp.hOpenPerformanceNlsText(dce) resp.dump() def test_BaseRegQueryMultipleValues2(self): dce, rpctransport, phKey = self.connect() request = rrp.BaseRegOpenKey() request['hKey'] = phKey request['lpSubKey'] = 'SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\x00' request['dwOptions'] = 0x00000001 request['samDesired'] = MAXIMUM_ALLOWED | rrp.KEY_QUERY_VALUE resp = dce.request(request) resp.dump() request = rrp.BaseRegQueryMultipleValues2() item1 = rrp.RVALENT() item1['ve_valuename'] = 'ProductName\x00' item1['ve_valuelen'] = len('ProductName\x00') item1['ve_valueptr'] = NULL item1['ve_type'] = rrp.REG_SZ item2 = rrp.RVALENT() item2['ve_valuename'] = 'SystemRoot\x00' item2['ve_valuelen'] = len('SystemRoot\x00') item1['ve_valueptr'] = NULL item2['ve_type'] = rrp.REG_SZ item3 = rrp.RVALENT() item3['ve_valuename'] = 'EditionID\x00' item3['ve_valuelen'] = len('EditionID\x00') item3['ve_valueptr'] = NULL item3['ve_type'] = rrp.REG_SZ request['hKey'] = resp['phkResult'] request['val_listIn'].append(item1) request['val_listIn'].append(item2) request['val_listIn'].append(item3) request['num_vals'] = len(request['val_listIn']) request['lpvalueBuf'] = list(' '*128) request['ldwTotsize'] = 128 resp = dce.request(request) resp.dump() def test_BaseRegDeleteKeyEx(self): dce, rpctransport, phKey = self.connect() request = rrp.OpenClassesRoot() request['ServerName'] = NULL request['samDesired'] = MAXIMUM_ALLOWED resp = dce.request(request) resp.dump() regHandle = resp['phKey'] request = rrp.BaseRegCreateKey() request['hKey'] = regHandle request['lpSubKey'] = 'BETO\x00' request['lpClass'] = NULL request['dwOptions'] = 0x00000001 request['samDesired'] = MAXIMUM_ALLOWED request['lpSecurityAttributes']['RpcSecurityDescriptor']['lpSecurityDescriptor'] = NULL request['lpdwDisposition'] = rrp.REG_CREATED_NEW_KEY resp = dce.request(request) resp.dump() phKey = resp['phkResult'] request = rrp.BaseRegDeleteKeyEx() request['hKey'] = regHandle request['lpSubKey'] = 'BETO\x00' request['AccessMask'] = rrp.KEY_WOW64_32KEY request['Reserved'] = 0 resp = dce.request(request) resp.dump() def test_BaseRegLoadKey_BaseRegUnLoadKey(self): dce, rpctransport, phKey = self.connect() request = rrp.BaseRegOpenKey() request['hKey'] = phKey request['lpSubKey'] = 'SECURITY\x00' request['dwOptions'] = 0x00000001 request['samDesired'] = MAXIMUM_ALLOWED resp = dce.request(request) resp.dump() request = rrp.BaseRegSaveKey() request['hKey'] = resp['phkResult'] request['lpFile'] = 'SEC\x00' request['pSecurityAttributes'] = NULL resp = dce.request(request) resp.dump() request = rrp.BaseRegLoadKey() request['hKey'] = phKey request['lpSubKey'] = 'BETUS\x00' request['lpFile'] = 'SEC\x00' resp = dce.request(request) resp.dump() request = rrp.BaseRegUnLoadKey() request['hKey'] = phKey request['lpSubKey'] = 'BETUS\x00' resp = dce.request(request) resp.dump() smb = rpctransport.get_smb_connection() smb.deleteFile('ADMIN$', 'System32\\SEC') def test_hBaseRegLoadKey_hBaseRegUnLoadKey(self): dce, rpctransport, phKey = self.connect() resp = rrp.hBaseRegOpenKey(dce,phKey, 'SECURITY\x00') resp.dump() request = rrp.BaseRegSaveKey() request['hKey'] = resp['phkResult'] request['lpFile'] = 'SEC\x00' request['pSecurityAttributes'] = NULL resp = dce.request(request) resp.dump() resp = rrp.hBaseRegLoadKey(dce, phKey,'BETUS\x00', 'SEC\x00' ) resp.dump() resp = rrp.hBaseRegUnLoadKey(dce, phKey, 'BETUS\x00') resp.dump() smb = rpctransport.get_smb_connection() smb.deleteFile('ADMIN$', 'System32\\SEC') class SMBTransport(RRPTests): def setUp(self): RRPTests.setUp(self) configFile = ConfigParser.ConfigParser() configFile.read('dcetests.cfg') self.username = configFile.get('SMBTransport', 'username') self.domain = configFile.get('SMBTransport', 'domain') self.serverName = configFile.get('SMBTransport', 'servername') self.password = configFile.get('SMBTransport', 'password') self.machine = configFile.get('SMBTransport', 'machine') self.hashes = configFile.get('SMBTransport', 'hashes') self.stringBinding = r'ncacn_np:%s[\PIPE\winreg]' % self.machine self.ts = ('8a885d04-1ceb-11c9-9fe8-08002b104860', '2.0') class SMBTransport64(RRPTests): def setUp(self): RRPTests.setUp(self) configFile = ConfigParser.ConfigParser() configFile.read('dcetests.cfg') self.username = configFile.get('SMBTransport', 'username') self.domain = configFile.get('SMBTransport', 'domain') self.serverName = configFile.get('SMBTransport', 'servername') self.password = configFile.get('SMBTransport', 'password') self.machine = configFile.get('SMBTransport', 'machine') self.hashes = configFile.get('SMBTransport', 'hashes') self.stringBinding = r'ncacn_np:%s[\PIPE\winreg]' % self.machine self.ts = ('71710533-BEBA-4937-8319-B5DBEF9CCC36', '1.0') class TCPTransport(RRPTests): def setUp(self): RRPTests.setUp(self) configFile = ConfigParser.ConfigParser() configFile.read('dcetests.cfg') self.username = configFile.get('TCPTransport', 'username') self.domain = configFile.get('TCPTransport', 'domain') self.serverName = configFile.get('TCPTransport', 'servername') self.password = configFile.get('TCPTransport', 'password') self.machine = configFile.get('TCPTransport', 'machine') self.hashes = configFile.get('TCPTransport', 'hashes') self.stringBinding = epm.hept_map(self.machine, rrp.MSRPC_UUID_RRP, protocol = 'ncacn_ip_tcp') # Process command-line arguments. if __name__ == '__main__': import sys if len(sys.argv) > 1: testcase = sys.argv[1] suite = unittest.TestLoader().loadTestsFromTestCase(globals()[testcase]) else: suite = unittest.TestLoader().loadTestsFromTestCase(SMBTransport) suite.addTests(unittest.TestLoader().loadTestsFromTestCase(SMBTransport64)) unittest.TextTestRunner(verbosity=1).run(suite)
gpl-3.0
1,233,617,822,753,360,000
33.054274
109
0.606622
false
numerigraphe/odoo
addons/mrp_repair/__init__.py
380
1087
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import mrp_repair import wizard # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
1,353,090,048,141,671,400
42.48
79
0.612695
false
supergentle/migueltutorial
flask/lib/python2.7/site-packages/pbr/tests/test_core.py
86
5269
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # # Copyright (C) 2013 Association of Universities for Research in Astronomy # (AURA) # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # # 3. The name of AURA and its representatives may not be used to # endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED # WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS import glob import os import tarfile import fixtures from pbr.tests import base class TestCore(base.BaseTestCase): cmd_names = ('pbr_test_cmd', 'pbr_test_cmd_with_class') def check_script_install(self, install_stdout): for cmd_name in self.cmd_names: install_txt = 'Installing %s script to %s' % (cmd_name, self.temp_dir) self.assertIn(install_txt, install_stdout) cmd_filename = os.path.join(self.temp_dir, cmd_name) script_txt = open(cmd_filename, 'r').read() self.assertNotIn('pkg_resources', script_txt) stdout, _, return_code = self._run_cmd(cmd_filename) self.assertIn("PBR", stdout) def test_setup_py_keywords(self): """setup.py --keywords. Test that the `./setup.py --keywords` command returns the correct value without balking. """ self.run_setup('egg_info') stdout, _, _ = self.run_setup('--keywords') assert stdout == 'packaging,distutils,setuptools' def test_sdist_extra_files(self): """Test that the extra files are correctly added.""" stdout, _, return_code = self.run_setup('sdist', '--formats=gztar') # There can be only one try: tf_path = glob.glob(os.path.join('dist', '*.tar.gz'))[0] except IndexError: assert False, 'source dist not found' tf = tarfile.open(tf_path) names = ['/'.join(p.split('/')[1:]) for p in tf.getnames()] self.assertIn('extra-file.txt', names) def test_console_script_install(self): """Test that we install a non-pkg-resources console script.""" if os.name == 'nt': self.skipTest('Windows support is passthrough') stdout, _, return_code = self.run_setup( 'install_scripts', '--install-dir=%s' % self.temp_dir) self.useFixture( fixtures.EnvironmentVariable('PYTHONPATH', '.')) self.check_script_install(stdout) def test_console_script_develop(self): """Test that we develop a non-pkg-resources console script.""" if os.name == 'nt': self.skipTest('Windows support is passthrough') self.useFixture( fixtures.EnvironmentVariable( 'PYTHONPATH', ".:%s" % self.temp_dir)) stdout, _, return_code = self.run_setup( 'develop', '--install-dir=%s' % self.temp_dir) self.check_script_install(stdout) class TestGitSDist(base.BaseTestCase): def setUp(self): super(TestGitSDist, self).setUp() stdout, _, return_code = self._run_cmd('git', ('init',)) if return_code: self.skipTest("git not installed") stdout, _, return_code = self._run_cmd('git', ('add', '.')) stdout, _, return_code = self._run_cmd( 'git', ('commit', '-m', 'Turn this into a git repo')) stdout, _, return_code = self.run_setup('sdist', '--formats=gztar') def test_sdist_git_extra_files(self): """Test that extra files found in git are correctly added.""" # There can be only one tf_path = glob.glob(os.path.join('dist', '*.tar.gz'))[0] tf = tarfile.open(tf_path) names = ['/'.join(p.split('/')[1:]) for p in tf.getnames()] self.assertIn('git-extra-file.txt', names)
bsd-3-clause
7,623,451,611,574,101,000
34.843537
77
0.633707
false
wolverineav/neutron
neutron/pecan_wsgi/hooks/body_validation.py
4
2556
# Copyright (c) 2015 Mirantis, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log from oslo_serialization import jsonutils from pecan import hooks from neutron.api.v2 import attributes as v2_attributes from neutron.api.v2 import base as v2_base LOG = log.getLogger(__name__) class BodyValidationHook(hooks.PecanHook): priority = 120 def before(self, state): if state.request.method not in ('POST', 'PUT'): return resource = state.request.context.get('resource') collection = state.request.context.get('collection') neutron_context = state.request.context['neutron_context'] is_create = state.request.method == 'POST' if not resource: return try: json_data = jsonutils.loads(state.request.body) except ValueError: LOG.debug("No JSON Data in %(method)s request for %(collection)s", {'method': state.request.method, 'collections': collection}) return # Raw data are consumed by member actions such as add_router_interface state.request.context['request_data'] = json_data if not (resource in json_data or collection in json_data): # there is no resource in the request. This can happen when a # member action is being processed or on agent scheduler operations return # Prepare data to be passed to the plugin from request body data = v2_base.Controller.prepare_request_body( neutron_context, json_data, is_create, resource, v2_attributes.get_collection_info(collection), allow_bulk=is_create) if collection in data: state.request.context['resources'] = [item[resource] for item in data[collection]] else: state.request.context['resources'] = [data[resource]]
apache-2.0
6,770,917,538,147,770,000
38.323077
79
0.634585
false
Chilledheart/chromium
build/android/gyp/process_resources.py
1
15150
#!/usr/bin/env python # # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Process Android resources to generate R.java, and prepare for packaging. This will crunch images and generate v14 compatible resources (see generate_v14_compatible_resources.py). """ import codecs import optparse import os import re import shutil import sys import generate_v14_compatible_resources from util import build_utils # Import jinja2 from third_party/jinja2 sys.path.insert(1, os.path.join(os.path.dirname(__file__), '../../../third_party')) from jinja2 import Template # pylint: disable=F0401 def ParseArgs(args): """Parses command line options. Returns: An options object as from optparse.OptionsParser.parse_args() """ parser = optparse.OptionParser() build_utils.AddDepfileOption(parser) parser.add_option('--android-sdk', help='path to the Android SDK folder') parser.add_option('--aapt-path', help='path to the Android aapt tool') parser.add_option('--non-constant-id', action='store_true') parser.add_option('--android-manifest', help='AndroidManifest.xml path') parser.add_option('--custom-package', help='Java package for R.java') parser.add_option( '--shared-resources', action='store_true', help='Make a resource package that can be loaded by a different' 'application at runtime to access the package\'s resources.') parser.add_option('--resource-dirs', help='Directories containing resources of this target.') parser.add_option('--dependencies-res-zips', help='Resources from dependents.') parser.add_option('--resource-zip-out', help='Path for output zipped resources.') parser.add_option('--R-dir', help='directory to hold generated R.java.') parser.add_option('--srcjar-out', help='Path to srcjar to contain generated R.java.') parser.add_option('--r-text-out', help='Path to store the R.txt file generated by appt.') parser.add_option('--proguard-file', help='Path to proguard.txt generated file') parser.add_option( '--v14-skip', action="store_true", help='Do not generate nor verify v14 resources') parser.add_option( '--extra-res-packages', help='Additional package names to generate R.java files for') parser.add_option( '--extra-r-text-files', help='For each additional package, the R.txt file should contain a ' 'list of resources to be included in the R.java file in the format ' 'generated by aapt') parser.add_option( '--include-all-resources', action='store_true', help='Include every resource ID in every generated R.java file ' '(ignoring R.txt).') parser.add_option( '--all-resources-zip-out', help='Path for output of all resources. This includes resources in ' 'dependencies.') parser.add_option('--stamp', help='File to touch on success') (options, args) = parser.parse_args(args) if args: parser.error('No positional arguments should be given.') # Check that required options have been provided. required_options = ( 'android_sdk', 'aapt_path', 'android_manifest', 'dependencies_res_zips', 'resource_dirs', 'resource_zip_out', ) build_utils.CheckOptions(options, parser, required=required_options) if (options.R_dir is None) == (options.srcjar_out is None): raise Exception('Exactly one of --R-dir or --srcjar-out must be specified.') return options def CreateExtraRJavaFiles( r_dir, extra_packages, extra_r_text_files, shared_resources, include_all): if include_all: java_files = build_utils.FindInDirectory(r_dir, "R.java") if len(java_files) != 1: return r_java_file = java_files[0] r_java_contents = codecs.open(r_java_file, encoding='utf-8').read() for package in extra_packages: package_r_java_dir = os.path.join(r_dir, *package.split('.')) build_utils.MakeDirectory(package_r_java_dir) package_r_java_path = os.path.join(package_r_java_dir, 'R.java') new_r_java = re.sub(r'package [.\w]*;', u'package %s;' % package, r_java_contents) codecs.open(package_r_java_path, 'w', encoding='utf-8').write(new_r_java) else: if len(extra_packages) != len(extra_r_text_files): raise Exception('Need one R.txt file per extra package') all_resources = {} r_txt_file = os.path.join(r_dir, 'R.txt') if not os.path.exists(r_txt_file): return with open(r_txt_file) as f: for line in f: m = re.match(r'(int(?:\[\])?) (\w+) (\w+) (.+)$', line) if not m: raise Exception('Unexpected line in R.txt: %s' % line) java_type, resource_type, name, value = m.groups() all_resources[(resource_type, name)] = (java_type, value) for package, r_text_file in zip(extra_packages, extra_r_text_files): if os.path.exists(r_text_file): package_r_java_dir = os.path.join(r_dir, *package.split('.')) build_utils.MakeDirectory(package_r_java_dir) package_r_java_path = os.path.join(package_r_java_dir, 'R.java') CreateExtraRJavaFile( package, package_r_java_path, r_text_file, all_resources, shared_resources) def CreateExtraRJavaFile( package, r_java_path, r_text_file, all_resources, shared_resources): resources = {} with open(r_text_file) as f: for line in f: m = re.match(r'int(?:\[\])? (\w+) (\w+) ', line) if not m: raise Exception('Unexpected line in R.txt: %s' % line) resource_type, name = m.groups() java_type, value = all_resources[(resource_type, name)] if resource_type not in resources: resources[resource_type] = [] resources[resource_type].append((name, java_type, value)) template = Template("""/* AUTO-GENERATED FILE. DO NOT MODIFY. */ package {{ package }}; public final class R { {% for resource_type in resources %} public static final class {{ resource_type }} { {% for name, java_type, value in resources[resource_type] %} {% if shared_resources %} public static {{ java_type }} {{ name }} = {{ value }}; {% else %} public static final {{ java_type }} {{ name }} = {{ value }}; {% endif %} {% endfor %} } {% endfor %} {% if shared_resources %} public static void onResourcesLoaded(int packageId) { {% for resource_type in resources %} {% for name, java_type, value in resources[resource_type] %} {% if java_type == 'int[]' %} for(int i = 0; i < {{ resource_type }}.{{ name }}.length; ++i) { {{ resource_type }}.{{ name }}[i] = ({{ resource_type }}.{{ name }}[i] & 0x00ffffff) | (packageId << 24); } {% else %} {{ resource_type }}.{{ name }} = ({{ resource_type }}.{{ name }} & 0x00ffffff) | (packageId << 24); {% endif %} {% endfor %} {% endfor %} } {% endif %} } """, trim_blocks=True, lstrip_blocks=True) output = template.render(package=package, resources=resources, shared_resources=shared_resources) with open(r_java_path, 'w') as f: f.write(output) def CrunchDirectory(aapt, input_dir, output_dir): """Crunches the images in input_dir and its subdirectories into output_dir. If an image is already optimized, crunching often increases image size. In this case, the crunched image is overwritten with the original image. """ aapt_cmd = [aapt, 'crunch', '-C', output_dir, '-S', input_dir, '--ignore-assets', build_utils.AAPT_IGNORE_PATTERN] build_utils.CheckOutput(aapt_cmd, stderr_filter=FilterCrunchStderr, fail_func=DidCrunchFail) # Check for images whose size increased during crunching and replace them # with their originals (except for 9-patches, which must be crunched). for dir_, _, files in os.walk(output_dir): for crunched in files: if crunched.endswith('.9.png'): continue if not crunched.endswith('.png'): raise Exception('Unexpected file in crunched dir: ' + crunched) crunched = os.path.join(dir_, crunched) original = os.path.join(input_dir, os.path.relpath(crunched, output_dir)) original_size = os.path.getsize(original) crunched_size = os.path.getsize(crunched) if original_size < crunched_size: shutil.copyfile(original, crunched) def FilterCrunchStderr(stderr): """Filters out lines from aapt crunch's stderr that can safely be ignored.""" filtered_lines = [] for line in stderr.splitlines(True): # Ignore this libpng warning, which is a known non-error condition. # http://crbug.com/364355 if ('libpng warning: iCCP: Not recognizing known sRGB profile that has ' + 'been edited' in line): continue filtered_lines.append(line) return ''.join(filtered_lines) def DidCrunchFail(returncode, stderr): """Determines whether aapt crunch failed from its return code and output. Because aapt's return code cannot be trusted, any output to stderr is an indication that aapt has failed (http://crbug.com/314885). """ return returncode != 0 or stderr def ZipResources(resource_dirs, zip_path): # Python zipfile does not provide a way to replace a file (it just writes # another file with the same name). So, first collect all the files to put # in the zip (with proper overriding), and then zip them. files_to_zip = dict() for d in resource_dirs: for root, _, files in os.walk(d): for f in files: archive_path = f parent_dir = os.path.relpath(root, d) if parent_dir != '.': archive_path = os.path.join(parent_dir, f) path = os.path.join(root, f) files_to_zip[archive_path] = path build_utils.DoZip(files_to_zip.iteritems(), zip_path) def CombineZips(zip_files, output_path): # When packaging resources, if the top-level directories in the zip file are # of the form 0, 1, ..., then each subdirectory will be passed to aapt as a # resources directory. While some resources just clobber others (image files, # etc), other resources (particularly .xml files) need to be more # intelligently merged. That merging is left up to aapt. def path_transform(name, src_zip): return '%d/%s' % (zip_files.index(src_zip), name) build_utils.MergeZips(output_path, zip_files, path_transform=path_transform) def main(): args = build_utils.ExpandFileArgs(sys.argv[1:]) options = ParseArgs(args) android_jar = os.path.join(options.android_sdk, 'android.jar') aapt = options.aapt_path input_files = [] with build_utils.TempDir() as temp_dir: deps_dir = os.path.join(temp_dir, 'deps') build_utils.MakeDirectory(deps_dir) v14_dir = os.path.join(temp_dir, 'v14') build_utils.MakeDirectory(v14_dir) gen_dir = os.path.join(temp_dir, 'gen') build_utils.MakeDirectory(gen_dir) input_resource_dirs = build_utils.ParseGypList(options.resource_dirs) if not options.v14_skip: for resource_dir in input_resource_dirs: generate_v14_compatible_resources.GenerateV14Resources( resource_dir, v14_dir) dep_zips = build_utils.ParseGypList(options.dependencies_res_zips) input_files += dep_zips dep_subdirs = [] for z in dep_zips: subdir = os.path.join(deps_dir, os.path.basename(z)) if os.path.exists(subdir): raise Exception('Resource zip name conflict: ' + os.path.basename(z)) build_utils.ExtractAll(z, path=subdir) dep_subdirs.append(subdir) # Generate R.java. This R.java contains non-final constants and is used only # while compiling the library jar (e.g. chromium_content.jar). When building # an apk, a new R.java file with the correct resource -> ID mappings will be # generated by merging the resources from all libraries and the main apk # project. package_command = [aapt, 'package', '-m', '-M', options.android_manifest, '--auto-add-overlay', '-I', android_jar, '--output-text-symbols', gen_dir, '-J', gen_dir, '--ignore-assets', build_utils.AAPT_IGNORE_PATTERN] for d in input_resource_dirs: package_command += ['-S', d] for d in dep_subdirs: package_command += ['-S', d] if options.non_constant_id: package_command.append('--non-constant-id') if options.custom_package: package_command += ['--custom-package', options.custom_package] if options.proguard_file: package_command += ['-G', options.proguard_file] if options.shared_resources: package_command.append('--shared-lib') build_utils.CheckOutput(package_command, print_stderr=False) if options.extra_res_packages: CreateExtraRJavaFiles( gen_dir, build_utils.ParseGypList(options.extra_res_packages), build_utils.ParseGypList(options.extra_r_text_files), options.shared_resources, options.include_all_resources) # This is the list of directories with resources to put in the final .zip # file. The order of these is important so that crunched/v14 resources # override the normal ones. zip_resource_dirs = input_resource_dirs + [v14_dir] base_crunch_dir = os.path.join(temp_dir, 'crunch') # Crunch image resources. This shrinks png files and is necessary for # 9-patch images to display correctly. 'aapt crunch' accepts only a single # directory at a time and deletes everything in the output directory. for idx, input_dir in enumerate(input_resource_dirs): crunch_dir = os.path.join(base_crunch_dir, str(idx)) build_utils.MakeDirectory(crunch_dir) zip_resource_dirs.append(crunch_dir) CrunchDirectory(aapt, input_dir, crunch_dir) ZipResources(zip_resource_dirs, options.resource_zip_out) if options.all_resources_zip_out: CombineZips([options.resource_zip_out] + dep_zips, options.all_resources_zip_out) if options.R_dir: build_utils.DeleteDirectory(options.R_dir) shutil.copytree(gen_dir, options.R_dir) else: build_utils.ZipDir(options.srcjar_out, gen_dir) if options.r_text_out: r_text_path = os.path.join(gen_dir, 'R.txt') if os.path.exists(r_text_path): shutil.copyfile(r_text_path, options.r_text_out) else: open(options.r_text_out, 'w').close() if options.depfile: input_files += build_utils.GetPythonDependencies() build_utils.WriteDepfile(options.depfile, input_files) if options.stamp: build_utils.Touch(options.stamp) if __name__ == '__main__': main()
bsd-3-clause
-3,804,866,868,311,570,400
35.244019
80
0.636964
false
eloquence/unisubs
apps/externalsites/migrations/0003_fix_thumb_options.py
4
27746
# encoding: utf-8 import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): pass def backwards(self, orm): pass models = { 'accountlinker.thirdpartyaccount': { 'Meta': {'unique_together': "(('type', 'username'),)", 'object_name': 'ThirdPartyAccount'}, 'full_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'oauth_access_token': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), 'oauth_refresh_token': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), 'type': ('django.db.models.fields.CharField', [], {'max_length': '10'}), 'username': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}) }, 'auth.customuser': { 'Meta': {'object_name': 'CustomUser', '_ormbases': ['auth.User']}, 'autoplay_preferences': ('django.db.models.fields.IntegerField', [], {'default': '1'}), 'award_points': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'biography': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'can_send_messages': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}), 'full_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '63', 'blank': 'True'}), 'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}), 'is_partner': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'last_ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}), 'notify_by_email': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}), 'notify_by_message': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}), 'partner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Partner']", 'null': 'True', 'blank': 'True'}), 'pay_rate_code': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '3', 'blank': 'True'}), 'picture': ('utils.amazon.fields.S3EnabledImageField', [], {'max_length': '100', 'blank': 'True'}), 'preferred_language': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}), 'third_party_accounts': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'users'", 'symmetrical': 'False', 'to': "orm['accountlinker.ThirdPartyAccount']"}), 'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'primary_key': 'True'}), 'valid_email': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'videos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['videos.Video']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 11, 15, 15, 56, 58, 428412)'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 11, 15, 15, 56, 58, 428337)'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'externalsites.kalturaaccount': { 'Meta': {'object_name': 'KalturaAccount'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'partner_id': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'secret': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'team': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['teams.Team']", 'unique': 'True'}) }, 'externalsites.syncedsubtitleversion': { 'Meta': {'unique_together': "(('account_type', 'account_id', 'video_url', 'language'),)", 'object_name': 'SyncedSubtitleVersion'}, 'account_id': ('django.db.models.fields.PositiveIntegerField', [], {}), 'account_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['subtitles.SubtitleLanguage']"}), 'version': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['subtitles.SubtitleVersion']"}), 'video_url': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.VideoUrl']"}) }, 'externalsites.synchistory': { 'Meta': {'object_name': 'SyncHistory'}, 'account_id': ('django.db.models.fields.PositiveIntegerField', [], {}), 'account_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}), 'action': ('django.db.models.fields.CharField', [], {'max_length': '1'}), 'datetime': ('django.db.models.fields.DateTimeField', [], {}), 'details': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['subtitles.SubtitleLanguage']"}), 'result': ('django.db.models.fields.CharField', [], {'max_length': '1'}), 'version': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['subtitles.SubtitleVersion']", 'null': 'True', 'blank': 'True'}), 'video_url': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.VideoUrl']"}) }, 'subtitles.subtitlelanguage': { 'Meta': {'unique_together': "[('video', 'language_code')]", 'object_name': 'SubtitleLanguage'}, 'created': ('django.db.models.fields.DateTimeField', [], {}), 'followers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'new_followed_languages'", 'blank': 'True', 'to': "orm['auth.CustomUser']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_forked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'language_code': ('django.db.models.fields.CharField', [], {'max_length': '16'}), 'official_signoff_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), 'pending_signoff_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), 'pending_signoff_expired_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), 'pending_signoff_unexpired_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), 'subtitles_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'unofficial_signoff_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), 'video': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'newsubtitlelanguage_set'", 'to': "orm['videos.Video']"}), 'writelock_owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'writelocked_newlanguages'", 'null': 'True', 'to': "orm['auth.CustomUser']"}), 'writelock_session_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'writelock_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}) }, 'subtitles.subtitleversion': { 'Meta': {'unique_together': "[('video', 'subtitle_language', 'version_number'), ('video', 'language_code', 'version_number')]", 'object_name': 'SubtitleVersion'}, 'author': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'newsubtitleversion_set'", 'to': "orm['auth.CustomUser']"}), 'created': ('django.db.models.fields.DateTimeField', [], {}), 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'language_code': ('django.db.models.fields.CharField', [], {'max_length': '16'}), 'meta_1_content': ('videos.metadata.MetadataContentField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}), 'meta_2_content': ('videos.metadata.MetadataContentField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}), 'meta_3_content': ('videos.metadata.MetadataContentField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}), 'note': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'blank': 'True'}), 'origin': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}), 'parents': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['subtitles.SubtitleVersion']", 'symmetrical': 'False', 'blank': 'True'}), 'rollback_of_version_number': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'serialized_lineage': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'serialized_subtitles': ('django.db.models.fields.TextField', [], {}), 'subtitle_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), 'subtitle_language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['subtitles.SubtitleLanguage']"}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}), 'version_number': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}), 'video': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'newsubtitleversion_set'", 'to': "orm['videos.Video']"}), 'visibility': ('django.db.models.fields.CharField', [], {'default': "'public'", 'max_length': '10'}), 'visibility_override': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '10', 'blank': 'True'}) }, 'teams.application': { 'Meta': {'unique_together': "(('team', 'user', 'status'),)", 'object_name': 'Application'}, 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'history': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'note': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), 'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'applications'", 'to': "orm['teams.Team']"}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'team_applications'", 'to': "orm['auth.CustomUser']"}) }, 'teams.partner': { 'Meta': {'object_name': 'Partner'}, 'admins': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'managed_partners'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['auth.CustomUser']"}), 'can_request_paid_captions': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '250'}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}) }, 'teams.project': { 'Meta': {'unique_together': "(('team', 'name'), ('team', 'slug'))", 'object_name': 'Project'}, 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}), 'guidelines': ('django.db.models.fields.TextField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'modified': ('django.db.models.fields.DateTimeField', [], {'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'order': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), 'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '50', 'blank': 'True'}), 'team': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Team']"}), 'workflow_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}) }, 'teams.team': { 'Meta': {'object_name': 'Team'}, 'applicants': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'applicated_teams'", 'symmetrical': 'False', 'through': "orm['teams.Application']", 'to': "orm['auth.CustomUser']"}), 'application_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'auth_provider_code': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '24', 'blank': 'True'}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'header_html_text': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}), 'highlight': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_moderated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'is_visible': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}), 'last_notification_time': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'logo': ('utils.amazon.fields.S3EnabledImageField', [], {'max_length': '100', 'blank': 'True'}), 'max_tasks_per_member': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'membership_policy': ('django.db.models.fields.IntegerField', [], {'default': '4'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '250'}), 'notify_interval': ('django.db.models.fields.CharField', [], {'default': "'D'", 'max_length': '1'}), 'page_content': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'partner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'teams'", 'null': 'True', 'to': "orm['teams.Partner']"}), 'points': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'projects_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}), 'subtitle_policy': ('django.db.models.fields.IntegerField', [], {'default': '10'}), 'task_assign_policy': ('django.db.models.fields.IntegerField', [], {'default': '10'}), 'task_expiration': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'third_party_accounts': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'teams'", 'symmetrical': 'False', 'to': "orm['accountlinker.ThirdPartyAccount']"}), 'translate_policy': ('django.db.models.fields.IntegerField', [], {'default': '10'}), 'users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'teams'", 'symmetrical': 'False', 'through': "orm['teams.TeamMember']", 'to': "orm['auth.CustomUser']"}), 'video': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'intro_for_teams'", 'null': 'True', 'to': "orm['videos.Video']"}), 'video_policy': ('django.db.models.fields.IntegerField', [], {'default': '1'}), 'videos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['videos.Video']", 'through': "orm['teams.TeamVideo']", 'symmetrical': 'False'}), 'workflow_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}) }, 'teams.teammember': { 'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'TeamMember'}, 'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'role': ('django.db.models.fields.CharField', [], {'default': "'contributor'", 'max_length': '16', 'db_index': 'True'}), 'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'members'", 'to': "orm['teams.Team']"}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'team_members'", 'to': "orm['auth.CustomUser']"}) }, 'teams.teamvideo': { 'Meta': {'unique_together': "(('team', 'video'),)", 'object_name': 'TeamVideo'}, 'added_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']"}), 'all_languages': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'created': ('django.db.models.fields.DateTimeField', [], {'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'partner_id': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}), 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Project']"}), 'team': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Team']"}), 'thumbnail': ('utils.amazon.fields.S3EnabledImageField', [], {'max_length': '100', 'null': 'True', 'thumb_sizes': '((288, 162), (120, 90))', 'blank': 'True'}), 'video': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['videos.Video']", 'unique': 'True'}) }, 'videos.video': { 'Meta': {'object_name': 'Video'}, 'allow_community_edits': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'allow_video_urls_edit': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}), 'complete_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'duration': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}), 'edited': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'featured': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'followers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'followed_videos'", 'blank': 'True', 'to': "orm['auth.CustomUser']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}), 'is_subtitled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'languages_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}), 'meta_1_content': ('videos.metadata.MetadataContentField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}), 'meta_1_type': ('videos.metadata.MetadataTypeField', [], {'null': 'True', 'blank': 'True'}), 'meta_2_content': ('videos.metadata.MetadataContentField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}), 'meta_2_type': ('videos.metadata.MetadataTypeField', [], {'null': 'True', 'blank': 'True'}), 'meta_3_content': ('videos.metadata.MetadataContentField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}), 'meta_3_type': ('videos.metadata.MetadataTypeField', [], {'null': 'True', 'blank': 'True'}), 'moderated_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'moderating'", 'null': 'True', 'to': "orm['teams.Team']"}), 'primary_audio_language_code': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '16', 'blank': 'True'}), 's3_thumbnail': ('utils.amazon.fields.S3EnabledImageField', [], {'max_length': '100', 'thumb_sizes': '((288, 162), (120, 90))', 'blank': 'True'}), 'small_thumbnail': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}), 'thumbnail': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True', 'blank': 'True'}), 'video_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), 'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}), 'was_subtitled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True', 'blank': 'True'}), 'writelock_owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'writelock_owners'", 'null': 'True', 'to': "orm['auth.CustomUser']"}), 'writelock_session_key': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'writelock_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}) }, 'videos.videourl': { 'Meta': {'object_name': 'VideoUrl'}, 'added_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True', 'blank': 'True'}), 'created': ('django.db.models.fields.DateTimeField', [], {}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'original': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'owner_username': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'primary': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'type': ('django.db.models.fields.CharField', [], {'max_length': '1'}), 'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '255'}), 'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.Video']"}), 'videoid': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}) } } complete_apps = ['externalsites']
agpl-3.0
6,987,934,475,690,635,000
93.696246
217
0.559576
false
rruebner/odoo
addons/point_of_sale/wizard/pos_confirm.py
343
2403
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import osv class pos_confirm(osv.osv_memory): _name = 'pos.confirm' _description = 'Post POS Journal Entries' def action_confirm(self, cr, uid, ids, context=None): order_obj = self.pool.get('pos.order') ids = order_obj.search(cr, uid, [('state','=','paid')], context=context) for order in order_obj.browse(cr, uid, ids, context=context): todo = True for line in order.statement_ids: if line.statement_id.state != 'confirm': todo = False break if todo: order.signal_workflow('done') # Check if there is orders to reconcile their invoices ids = order_obj.search(cr, uid, [('state','=','invoiced'),('invoice_id.state','=','open')], context=context) for order in order_obj.browse(cr, uid, ids, context=context): invoice = order.invoice_id data_lines = [x.id for x in invoice.move_id.line_id if x.account_id.id == invoice.account_id.id] for st in order.statement_ids: for move in st.move_ids: data_lines += [x.id for x in move.line_id if x.account_id.id == invoice.account_id.id] self.pool.get('account.move.line').reconcile(cr, uid, data_lines, context=context) return {} # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
7,988,854,076,979,739,000
45.211538
116
0.589263
false
open-synergy/sale-workflow
sale_allotment/__openerp__.py
9
1444
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2015 Odoo.com. # Copyright (C) 2015 Openies.com. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Allotment on sale orders', 'version': '8.0.1.1.0', 'category': 'Sales', 'summary': "Separate the shipment according to allotment partner", 'author': u'Openies,Numérigraphe,Odoo Community Association (OCA)', 'website': 'http://www.Openies.com/', 'depends': ['sale_stock'], 'data': [ 'views/sale_order_line_view.xml' ], 'installable': True, 'auto_install': False, 'license': 'AGPL-3', }
agpl-3.0
-148,017,888,377,820,260
38
78
0.597367
false
rdipietro/tensorflow
tensorflow/python/ops/split_benchmark.py
7
4025
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Benchmark for split and grad of split.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np import tensorflow as tf from tensorflow.python.platform import benchmark from tensorflow.python.platform import tf_logging as logging def build_graph(device, input_shape, output_sizes, axis): """Build a graph containing a sequence of batch normalizations. Args: device: string, the device to run on. input_shape: shape of the input tensor. output_sizes: size of each output along axis. axis: axis to be split along. Returns: An array of tensors to run() """ with tf.device("/%s:0" % device): inp = tf.zeros(input_shape) outputs = [] for _ in range(100): outputs.extend(tf.split_v(inp, output_sizes, axis)) return tf.group(*outputs) class SplitBenchmark(tf.test.Benchmark): """Benchmark split!""" def _run_graph(self, device, output_shape, variable, num_outputs, axis): """Run the graph and print its execution time. Args: device: string, the device to run on. output_shape: shape of each output tensors. variable: whether or not the output shape should be fixed num_outputs: the number of outputs to split the input into axis: axis to be split Returns: The duration of the run in seconds. """ graph = tf.Graph() with graph.as_default(): if not variable: if axis == 0: input_shape = [output_shape[0] * num_outputs, output_shape[1]] sizes = [output_shape[0] for _ in range(num_outputs)] else: input_shape = [output_shape[0], output_shape[1] * num_outputs] sizes = [output_shape[1] for _ in range(num_outputs)] else: sizes = np.random.randint( low=max(1, output_shape[axis] - 2), high=output_shape[axis] + 2, size=num_outputs) total_size = np.sum(sizes) if axis == 0: input_shape = [total_size, output_shape[1]] else: input_shape = [output_shape[0], total_size] outputs = build_graph(device, input_shape, sizes, axis) config = tf.ConfigProto(graph_options=tf.GraphOptions( optimizer_options=tf.OptimizerOptions( opt_level=tf.OptimizerOptions.L0))) with tf.Session(graph=graph, config=config) as session: logging.set_verbosity("info") tf.global_variables_initializer().run() bench = benchmark.TensorFlowBenchmark() bench.run_op_benchmark( session, outputs, mbs=input_shape[0] * input_shape[1] * 4 * 2 * 100 / 1e6, extras={ "input_shape": input_shape, "variable": variable, "axis": axis }) def benchmark_split(self): print("Forward vs backward concat") shapes = [[2000, 8], [8, 2000], [100, 18], [1000, 18], [10000, 18], [100, 97], [1000, 97], [10000, 1], [1, 10000]] axis_ = [1] # 0 is very fast because it doesn't actually do any copying num_outputs = 100 variable = [False, True] # fixed input size or not for shape in shapes: for axis in axis_: for v in variable: self._run_graph("gpu", shape, v, num_outputs, axis) if __name__ == "__main__": tf.test.main()
apache-2.0
1,387,651,702,457,593,900
33.698276
80
0.624099
false
byu-aml-lab/bzrflag
bzagents/bzrc.py
18
14582
#!/usr/bin/python -tt # Control BZFlag tanks remotely with synchronous communication. #################################################################### # NOTE TO STUDENTS: # You CAN and probably SHOULD modify this code. Just because it is # in a separate file does not mean that you can ignore it or that # you have to leave it alone. Treat it as your code. You are # required to understand it enough to be able to modify it if you # find something wrong. This is merely a help to get you started # on interacting with BZRC. It is provided AS IS, with NO WARRANTY, # express or implied. #################################################################### from __future__ import division import math import sys import socket import time class BZRC: """Class handles queries and responses with remote controled tanks.""" def __init__(self, host, port, debug=False): """Given a hostname and port number, connect to the RC tanks.""" self.debug = debug # Note that AF_INET and SOCK_STREAM are defaults. sock = socket.socket() sock.connect((host, port)) # Make a line-buffered "file" from the socket. self.conn = sock.makefile(bufsize=1) self.handshake() def handshake(self): """Perform the handshake with the remote tanks.""" self.expect(('bzrobots', '1'), True) print >>self.conn, 'agent 1' def close(self): """Close the socket.""" self.conn.close() def read_arr(self): """Read a response from the RC tanks as an array split on whitespace. """ try: line = self.conn.readline() except socket.error: print 'Server Shut down. Aborting' sys.exit(1) if self.debug: print 'Received: %s' % line.split() return line.split() def sendline(self, line): """Send a line to the RC tanks.""" print >>self.conn, line def die_confused(self, expected, got_arr): """When we think the RC tanks should have responded differently, call this method with a string explaining what should have been sent and with the array containing what was actually sent. """ raise UnexpectedResponse(expected, ' '.join(got_arr)) def expect(self, expected, full=False): """Verify that server's response is as expected.""" if isinstance(expected, str): expected = (expected,) line = self.read_arr() good = True if full and len(expected) != len(line): good = False else: for a,b in zip(expected,line): if a!=b: good = False break if not good: self.die_confused(' '.join(expected), line) if full: return True return line[len(expected):] def expect_multi(self, *expecteds, **kwds): """Verify the server's response looks like one of several possible responses. Return the index of the matched response, and the server's line response. """ line = self.read_arr() for i,expected in enumerate(expecteds): for a,b in zip(expected, line): if a!=b: break else: if not kwds.get('full',False) or len(expected) == len(line): break else: self.die_confused(' or '.join(' '.join(one) for one in expecteds), line) return i, line[len(expected):] def read_ack(self): """Expect an "ack" line from the remote tanks. Raise an UnexpectedResponse exception if we get something else. """ self.expect('ack') def read_bool(self): """Expect a boolean response from the remote tanks. Return True or False in accordance with the response. Raise an UnexpectedResponse exception if we get something else. """ i, rest = self.expect_multi(('ok',),('fail',)) return (True, False)[i] def read_teams(self): """Get team information.""" self.expect('begin') teams = [] while True: i, rest = self.expect_multi(('team',),('end',)) if i == 1: break team = Answer() team.color = rest[0] team.count = float(rest[1]) team.base = [(float(x), float(y)) for (x, y) in zip(rest[2:10:2], rest[3:10:2])] teams.append(team) return teams def read_obstacles(self): """Get obstacle information.""" self.expect('begin') obstacles = [] while True: i, rest = self.expect_multi(('obstacle',),('end',)) if i == 1: break obstacle = [(float(x), float(y)) for (x, y) in zip(rest[::2], rest[1::2])] obstacles.append(obstacle) return obstacles def read_occgrid(self): """Read grid.""" response = self.read_arr() if 'fail' in response: return None pos = tuple(int(a) for a in self.expect('at')[0].split(',')) size = tuple(int(a) for a in self.expect('size')[0].split('x')) grid = [[0 for i in range(size[1])] for j in range(size[0])] for x in range(size[0]): line = self.read_arr()[0] for y in range(size[1]): if line[y] == '1': grid[x][y] = 1 self.expect('end', True) return pos, grid def read_flags(self): """Get flag information.""" line = self.read_arr() if line[0] != 'begin': self.die_confused('begin', line) flags = [] while True: line = self.read_arr() if line[0] == 'flag': flag = Answer() flag.color = line[1] flag.poss_color = line[2] flag.x = float(line[3]) flag.y = float(line[4]) flags.append(flag) elif line[0] == 'end': break else: self.die_confused('flag or end', line) return flags def read_shots(self): """Get shot information.""" line = self.read_arr() if line[0] != 'begin': self.die_confused('begin', line) shots = [] while True: line = self.read_arr() if line[0] == 'shot': shot = Answer() shot.x = float(line[1]) shot.y = float(line[2]) shot.vx = float(line[3]) shot.vy = float(line[4]) shots.append(shot) elif line[0] == 'end': break else: self.die_confused('shot or end', line) return shots def read_mytanks(self): """Get friendly tank information.""" line = self.read_arr() if line[0] != 'begin': self.die_confused('begin', line) tanks = [] while True: line = self.read_arr() if line[0] == 'mytank': tank = Answer() tank.index = int(line[1]) tank.callsign = line[2] tank.status = line[3] tank.shots_avail = int(line[4]) tank.time_to_reload = float(line[5]) tank.flag = line[6] tank.x = float(line[7]) tank.y = float(line[8]) tank.angle = float(line[9]) tank.vx = float(line[10]) tank.vy = float(line[11]) tank.angvel = float(line[12]) tanks.append(tank) elif line[0] == 'end': break else: self.die_confused('mytank or end', line) return tanks def read_othertanks(self): """Get enemy tank information.""" line = self.read_arr() if line[0] != 'begin': self.die_confused('begin', line) tanks = [] while True: line = self.read_arr() if line[0] == 'othertank': tank = Answer() tank.callsign = line[1] tank.color = line[2] tank.status = line[3] tank.flag = line[4] tank.x = float(line[5]) tank.y = float(line[6]) tank.angle = float(line[7]) tanks.append(tank) elif line[0] == 'end': break else: self.die_confused('othertank or end', line) return tanks def read_bases(self): """Get base information.""" bases = [] line = self.read_arr() if line[0] != 'begin': self.die_confused('begin', line) while True: line = self.read_arr() if line[0] == 'base': base = Answer() base.color = line[1] base.corner1_x = float(line[2]) base.corner1_y = float(line[3]) base.corner2_x = float(line[4]) base.corner2_y = float(line[5]) base.corner3_x = float(line[6]) base.corner3_y = float(line[7]) base.corner4_x = float(line[8]) base.corner4_y = float(line[9]) bases.append(base) elif line[0] == 'end': break else: self.die_confused('othertank or end', line) return bases def read_constants(self): """Get constants.""" line = self.read_arr() if line[0] != 'begin': self.die_confused('begin', line) constants = {} while True: line = self.read_arr() if line[0] == 'constant': constants[line[1]] = line[2] elif line[0] == 'end': break else: self.die_confused('constant or end', line) return constants # Commands: def shoot(self, index): """Perform a shoot request.""" self.sendline('shoot %s' % index) self.read_ack() return self.read_bool() def speed(self, index, value): """Set the desired speed to the specified value.""" self.sendline('speed %s %s' % (index, value)) self.read_ack() return self.read_bool() def angvel(self, index, value): """Set the desired angular velocity to the specified value.""" self.sendline('angvel %s %s' % (index, value)) self.read_ack() return self.read_bool() # Information Requests: def get_teams(self): """Request a list of teams.""" self.sendline('teams') self.read_ack() return self.read_teams() def get_obstacles(self): """Request a list of obstacles.""" self.sendline('obstacles') self.read_ack() return self.read_obstacles() def get_occgrid(self, tankid): """Request an occupancy grid for a tank""" self.sendline('occgrid %d' % tankid) self.read_ack() return self.read_occgrid() def get_flags(self): """Request a list of flags.""" self.sendline('flags') self.read_ack() return self.read_flags() def get_shots(self): """Request a list of shots.""" self.sendline('shots') self.read_ack() return self.read_shots() def get_mytanks(self): """Request a list of our tanks.""" self.sendline('mytanks') self.read_ack() return self.read_mytanks() def get_othertanks(self): """Request a list of tanks that aren't ours.""" self.sendline('othertanks') self.read_ack() return self.read_othertanks() def get_bases(self): """Request a list of bases.""" self.sendline('bases') self.read_ack() return self.read_bases() def get_constants(self): """Request a dictionary of game constants.""" self.sendline('constants') self.read_ack() return self.read_constants() # Optimized queries def get_lots_o_stuff(self): """Network-optimized request for mytanks, othertanks, flags, and shots. Returns a tuple with the four results. """ self.sendline('mytanks') self.sendline('othertanks') self.sendline('flags') self.sendline('shots') self.read_ack() mytanks = self.read_mytanks() self.read_ack() othertanks = self.read_othertanks() self.read_ack() flags = self.read_flags() self.read_ack() shots = self.read_shots() return (mytanks, othertanks, flags, shots) def do_commands(self, commands): """Send commands for a bunch of tanks in a network-optimized way.""" for cmd in commands: self.sendline('speed %s %s' % (cmd.index, cmd.speed)) self.sendline('angvel %s %s' % (cmd.index, cmd.angvel)) if cmd.shoot: self.sendline('shoot %s' % cmd.index) results = [] for cmd in commands: self.read_ack() result_speed = self.read_bool() self.read_ack() result_angvel = self.read_bool() if cmd.shoot: self.read_ack() result_shoot = self.read_bool() else: result_shoot = False results.append((result_speed, result_angvel, result_shoot)) return results class Answer(object): """BZRC returns an Answer for things like tanks, obstacles, etc. You should probably write your own code for this sort of stuff. We created this class just to keep things short and sweet. """ pass class Command(object): """Class for setting a command for a tank.""" def __init__(self, index, speed, angvel, shoot): self.index = index self.speed = speed self.angvel = angvel self.shoot = shoot class UnexpectedResponse(Exception): """Exception raised when the BZRC gets confused by a bad response.""" def __init__(self, expected, got): self.expected = expected self.got = got def __str__(self): return 'BZRC: Expected "%s". Instead got "%s".' % (self.expected, self.got) # vim: et sw=4 sts=4
gpl-3.0
26,795,852,626,714,104
29.698947
79
0.510012
false
chaluemwut/fbserver
venv/lib/python2.7/site-packages/scipy/linalg/tests/test_basic.py
18
23525
#!/usr/bin/env python # # Created by: Pearu Peterson, March 2002 # """ Test functions for linalg.basic module """ from __future__ import division, print_function, absolute_import """ Bugs: 1) solve.check_random_sym_complex fails if a is complex and transpose(a) = conjugate(a) (a is Hermitian). """ __usage__ = """ Build linalg: python setup_linalg.py build Run tests if scipy is installed: python -c 'import scipy;scipy.linalg.test()' Run tests if linalg is not installed: python tests/test_basic.py """ import numpy as np from numpy import arange, array, dot, zeros, identity, conjugate, transpose, \ float32 import numpy.linalg as linalg from numpy.testing import TestCase, rand, run_module_suite, assert_raises, \ assert_equal, assert_almost_equal, assert_array_almost_equal, assert_, \ assert_allclose from scipy.linalg import solve, inv, det, lstsq, pinv, pinv2, pinvh, norm,\ solve_banded, solveh_banded, solve_triangular from scipy.linalg._testutils import assert_no_overwrite def random(size): return rand(*size) class TestSolveBanded(TestCase): def test_real(self): a = array([[1.0, 20, 0, 0], [-30, 4, 6, 0], [2, 1, 20, 2], [0, -1, 7, 14]]) ab = array([[0.0, 20, 6, 2], [1, 4, 20, 14], [-30, 1, 7, 0], [2, -1, 0, 0]]) l,u = 2,1 b4 = array([10.0, 0.0, 2.0, 14.0]) b4by1 = b4.reshape(-1,1) b4by2 = array([[2, 1], [-30, 4], [2, 3], [1, 3]]) b4by4 = array([[1, 0, 0, 0], [0, 0, 0, 1], [0, 1, 0, 0], [0, 1, 0, 0]]) for b in [b4, b4by1, b4by2, b4by4]: x = solve_banded((l, u), ab, b) assert_array_almost_equal(dot(a, x), b) def test_complex(self): a = array([[1.0, 20, 0, 0], [-30, 4, 6, 0], [2j, 1, 20, 2j], [0, -1, 7, 14]]) ab = array([[0.0, 20, 6, 2j], [1, 4, 20, 14], [-30, 1, 7, 0], [2j, -1, 0, 0]]) l,u = 2,1 b4 = array([10.0, 0.0, 2.0, 14.0j]) b4by1 = b4.reshape(-1,1) b4by2 = array([[2, 1], [-30, 4], [2, 3], [1, 3]]) b4by4 = array([[1, 0, 0, 0], [0, 0, 0,1j], [0, 1, 0, 0], [0, 1, 0, 0]]) for b in [b4, b4by1, b4by2, b4by4]: x = solve_banded((l, u), ab, b) assert_array_almost_equal(dot(a, x), b) def test_check_finite(self): a = array([[1.0, 20, 0, 0], [-30, 4, 6, 0], [2, 1, 20, 2], [0, -1, 7, 14]]) ab = array([[0.0, 20, 6, 2], [1, 4, 20, 14], [-30, 1, 7, 0], [2, -1, 0, 0]]) l,u = 2,1 b4 = array([10.0, 0.0, 2.0, 14.0]) x = solve_banded((l, u), ab, b4, check_finite=False) assert_array_almost_equal(dot(a, x), b4) def test_bad_shape(self): ab = array([[0.0, 20, 6, 2], [1, 4, 20, 14], [-30, 1, 7, 0], [2, -1, 0, 0]]) l,u = 2,1 bad = array([1.0, 2.0, 3.0, 4.0]).reshape(-1,4) assert_raises(ValueError, solve_banded, (l, u), ab, bad) assert_raises(ValueError, solve_banded, (l, u), ab, [1.0, 2.0]) # Values of (l,u) are not compatible with ab. assert_raises(ValueError, solve_banded, (1, 1), ab, [1.0, 2.0]) class TestSolveHBanded(TestCase): def test_01_upper(self): # Solve # [ 4 1 0] [1] # [ 1 4 1] X = [4] # [ 0 1 4] [1] # with the RHS as a 1D array. ab = array([[-99, 1.0, 1.0], [4.0, 4.0, 4.0]]) b = array([1.0, 4.0, 1.0]) x = solveh_banded(ab, b) assert_array_almost_equal(x, [0.0, 1.0, 0.0]) def test_02_upper(self): # Solve # [ 4 1 0] [1 4] # [ 1 4 1] X = [4 2] # [ 0 1 4] [1 4] # ab = array([[-99, 1.0, 1.0], [4.0, 4.0, 4.0]]) b = array([[1.0, 4.0], [4.0, 2.0], [1.0, 4.0]]) x = solveh_banded(ab, b) expected = array([[0.0, 1.0], [1.0, 0.0], [0.0, 1.0]]) assert_array_almost_equal(x, expected) def test_03_upper(self): # Solve # [ 4 1 0] [1] # [ 1 4 1] X = [4] # [ 0 1 4] [1] # with the RHS as a 2D array with shape (3,1). ab = array([[-99, 1.0, 1.0], [4.0, 4.0, 4.0]]) b = array([1.0, 4.0, 1.0]).reshape(-1,1) x = solveh_banded(ab, b) assert_array_almost_equal(x, array([0.0, 1.0, 0.0]).reshape(-1,1)) def test_01_lower(self): # Solve # [ 4 1 0] [1] # [ 1 4 1] X = [4] # [ 0 1 4] [1] # ab = array([[4.0, 4.0, 4.0], [1.0, 1.0, -99]]) b = array([1.0, 4.0, 1.0]) x = solveh_banded(ab, b, lower=True) assert_array_almost_equal(x, [0.0, 1.0, 0.0]) def test_02_lower(self): # Solve # [ 4 1 0] [1 4] # [ 1 4 1] X = [4 2] # [ 0 1 4] [1 4] # ab = array([[4.0, 4.0, 4.0], [1.0, 1.0, -99]]) b = array([[1.0, 4.0], [4.0, 2.0], [1.0, 4.0]]) x = solveh_banded(ab, b, lower=True) expected = array([[0.0, 1.0], [1.0, 0.0], [0.0, 1.0]]) assert_array_almost_equal(x, expected) def test_01_float32(self): # Solve # [ 4 1 0] [1] # [ 1 4 1] X = [4] # [ 0 1 4] [1] # ab = array([[-99, 1.0, 1.0], [4.0, 4.0, 4.0]], dtype=float32) b = array([1.0, 4.0, 1.0], dtype=float32) x = solveh_banded(ab, b) assert_array_almost_equal(x, [0.0, 1.0, 0.0]) def test_02_float32(self): # Solve # [ 4 1 0] [1 4] # [ 1 4 1] X = [4 2] # [ 0 1 4] [1 4] # ab = array([[-99, 1.0, 1.0], [4.0, 4.0, 4.0]], dtype=float32) b = array([[1.0, 4.0], [4.0, 2.0], [1.0, 4.0]], dtype=float32) x = solveh_banded(ab, b) expected = array([[0.0, 1.0], [1.0, 0.0], [0.0, 1.0]]) assert_array_almost_equal(x, expected) def test_01_complex(self): # Solve # [ 4 -j 0] [ -j] # [ j 4 -j] X = [4-j] # [ 0 j 4] [4+j] # ab = array([[-99, -1.0j, -1.0j], [4.0, 4.0, 4.0]]) b = array([-1.0j, 4.0-1j, 4+1j]) x = solveh_banded(ab, b) assert_array_almost_equal(x, [0.0, 1.0, 1.0]) def test_02_complex(self): # Solve # [ 4 -j 0] [ -j 4j] # [ j 4 -j] X = [4-j -1-j] # [ 0 j 4] [4+j 4 ] # ab = array([[-99, -1.0j, -1.0j], [4.0, 4.0, 4.0]]) b = array([[-1j, 4.0j], [4.0-1j, -1.0-1j], [4.0+1j, 4.0]]) x = solveh_banded(ab, b) expected = array([[0.0, 1.0j], [1.0, 0.0], [1.0, 1.0]]) assert_array_almost_equal(x, expected) def test_check_finite(self): # Solve # [ 4 1 0] [1] # [ 1 4 1] X = [4] # [ 0 1 4] [1] # with the RHS as a 1D array. ab = array([[-99, 1.0, 1.0], [4.0, 4.0, 4.0]]) b = array([1.0, 4.0, 1.0]) x = solveh_banded(ab, b, check_finite=False) assert_array_almost_equal(x, [0.0, 1.0, 0.0]) def test_bad_shapes(self): ab = array([[-99, 1.0, 1.0], [4.0, 4.0, 4.0]]) b = array([[1.0, 4.0], [4.0, 2.0]]) assert_raises(ValueError, solveh_banded, ab, b) assert_raises(ValueError, solveh_banded, ab, [1.0, 2.0]) assert_raises(ValueError, solveh_banded, ab, [1.0]) class TestSolve(TestCase): def setUp(self): np.random.seed(1234) def test_20Feb04_bug(self): a = [[1,1],[1.0,0]] # ok x0 = solve(a,[1,0j]) assert_array_almost_equal(dot(a,x0),[1,0]) a = [[1,1],[1.2,0]] # gives failure with clapack.zgesv(..,rowmajor=0) b = [1,0j] x0 = solve(a,b) assert_array_almost_equal(dot(a,x0),[1,0]) def test_simple(self): a = [[1,20],[-30,4]] for b in ([[1,0],[0,1]],[1,0], [[2,1],[-30,4]]): x = solve(a,b) assert_array_almost_equal(dot(a,x),b) def test_simple_sym(self): a = [[2,3],[3,5]] for lower in [0,1]: for b in ([[1,0],[0,1]],[1,0]): x = solve(a,b,sym_pos=1,lower=lower) assert_array_almost_equal(dot(a,x),b) def test_simple_sym_complex(self): a = [[5,2],[2,4]] for b in [[1j,0], [[1j,1j], [0,2]], ]: x = solve(a,b,sym_pos=1) assert_array_almost_equal(dot(a,x),b) def test_simple_complex(self): a = array([[5,2],[2j,4]],'D') for b in [[1j,0], [[1j,1j], [0,2]], [1,0j], array([1,0],'D'), ]: x = solve(a,b) assert_array_almost_equal(dot(a,x),b) def test_nils_20Feb04(self): n = 2 A = random([n,n])+random([n,n])*1j X = zeros((n,n),'D') Ainv = inv(A) R = identity(n)+identity(n)*0j for i in arange(0,n): r = R[:,i] X[:,i] = solve(A,r) assert_array_almost_equal(X,Ainv) def test_random(self): n = 20 a = random([n,n]) for i in range(n): a[i,i] = 20*(.1+a[i,i]) for i in range(4): b = random([n,3]) x = solve(a,b) assert_array_almost_equal(dot(a,x),b) def test_random_complex(self): n = 20 a = random([n,n]) + 1j * random([n,n]) for i in range(n): a[i,i] = 20*(.1+a[i,i]) for i in range(2): b = random([n,3]) x = solve(a,b) assert_array_almost_equal(dot(a,x),b) def test_random_sym(self): n = 20 a = random([n,n]) for i in range(n): a[i,i] = abs(20*(.1+a[i,i])) for j in range(i): a[i,j] = a[j,i] for i in range(4): b = random([n]) x = solve(a,b,sym_pos=1) assert_array_almost_equal(dot(a,x),b) def test_random_sym_complex(self): n = 20 a = random([n,n]) # a = a + 1j*random([n,n]) # XXX: with this the accuracy will be very low for i in range(n): a[i,i] = abs(20*(.1+a[i,i])) for j in range(i): a[i,j] = conjugate(a[j,i]) b = random([n])+2j*random([n]) for i in range(2): x = solve(a,b,sym_pos=1) assert_array_almost_equal(dot(a,x),b) def test_check_finite(self): a = [[1,20],[-30,4]] for b in ([[1,0],[0,1]],[1,0], [[2,1],[-30,4]]): x = solve(a,b, check_finite=False) assert_array_almost_equal(dot(a,x),b) class TestSolveTriangular(TestCase): def test_simple(self): """ solve_triangular on a simple 2x2 matrix. """ A = array([[1,0], [1,2]]) b = [1, 1] sol = solve_triangular(A, b, lower=True) assert_array_almost_equal(sol, [1, 0]) # check that it works also for non-contiguous matrices sol = solve_triangular(A.T, b, lower=False) assert_array_almost_equal(sol, [.5, .5]) # and that it gives the same result as trans=1 sol = solve_triangular(A, b, lower=True, trans=1) assert_array_almost_equal(sol, [.5, .5]) b = identity(2) sol = solve_triangular(A, b, lower=True, trans=1) assert_array_almost_equal(sol, [[1., -.5], [0, 0.5]]) def test_simple_complex(self): """ solve_triangular on a simple 2x2 complex matrix """ A = array([[1+1j, 0], [1j, 2]]) b = identity(2) sol = solve_triangular(A, b, lower=True, trans=1) assert_array_almost_equal(sol, [[.5-.5j, -.25-.25j], [0, 0.5]]) def test_check_finite(self): """ solve_triangular on a simple 2x2 matrix. """ A = array([[1,0], [1,2]]) b = [1, 1] sol = solve_triangular(A, b, lower=True, check_finite=False) assert_array_almost_equal(sol, [1, 0]) class TestInv(TestCase): def setUp(self): np.random.seed(1234) def test_simple(self): a = [[1,2],[3,4]] a_inv = inv(a) assert_array_almost_equal(dot(a,a_inv), [[1,0],[0,1]]) a = [[1,2,3],[4,5,6],[7,8,10]] a_inv = inv(a) assert_array_almost_equal(dot(a,a_inv), [[1,0,0],[0,1,0],[0,0,1]]) def test_random(self): n = 20 for i in range(4): a = random([n,n]) for i in range(n): a[i,i] = 20*(.1+a[i,i]) a_inv = inv(a) assert_array_almost_equal(dot(a,a_inv), identity(n)) def test_simple_complex(self): a = [[1,2],[3,4j]] a_inv = inv(a) assert_array_almost_equal(dot(a,a_inv), [[1,0],[0,1]]) def test_random_complex(self): n = 20 for i in range(4): a = random([n,n])+2j*random([n,n]) for i in range(n): a[i,i] = 20*(.1+a[i,i]) a_inv = inv(a) assert_array_almost_equal(dot(a,a_inv), identity(n)) def test_check_finite(self): a = [[1,2],[3,4]] a_inv = inv(a, check_finite=False) assert_array_almost_equal(dot(a,a_inv), [[1,0],[0,1]]) class TestDet(TestCase): def setUp(self): np.random.seed(1234) def test_simple(self): a = [[1,2],[3,4]] a_det = det(a) assert_almost_equal(a_det,-2.0) def test_simple_complex(self): a = [[1,2],[3,4j]] a_det = det(a) assert_almost_equal(a_det,-6+4j) def test_random(self): basic_det = linalg.det n = 20 for i in range(4): a = random([n,n]) d1 = det(a) d2 = basic_det(a) assert_almost_equal(d1,d2) def test_random_complex(self): basic_det = linalg.det n = 20 for i in range(4): a = random([n,n]) + 2j*random([n,n]) d1 = det(a) d2 = basic_det(a) assert_allclose(d1, d2, rtol=1e-13) def test_check_finite(self): a = [[1,2],[3,4]] a_det = det(a, check_finite=False) assert_almost_equal(a_det,-2.0) def direct_lstsq(a,b,cmplx=0): at = transpose(a) if cmplx: at = conjugate(at) a1 = dot(at, a) b1 = dot(at, b) return solve(a1, b1) class TestLstsq(TestCase): def setUp(self): np.random.seed(1234) def test_random_overdet_large(self): # bug report: Nils Wagner n = 200 a = random([n,2]) for i in range(2): a[i,i] = 20*(.1+a[i,i]) b = random([n,3]) x = lstsq(a,b)[0] assert_array_almost_equal(x,direct_lstsq(a,b)) def test_simple_exact(self): a = [[1,20],[-30,4]] for b in ([[1,0],[0,1]],[1,0], [[2,1],[-30,4]]): x = lstsq(a,b)[0] assert_array_almost_equal(dot(a,x),b) def test_simple_overdet(self): a = [[1,2],[4,5],[3,4]] b = [1,2,3] x,res,r,s = lstsq(a,b) assert_array_almost_equal(x,direct_lstsq(a,b)) assert_almost_equal((abs(dot(a,x) - b)**2).sum(axis=0), res) def test_simple_overdet_complex(self): a = [[1+2j,2],[4,5],[3,4]] b = [1,2+4j,3] x,res,r,s = lstsq(a,b) assert_array_almost_equal(x,direct_lstsq(a,b,cmplx=1)) assert_almost_equal(res, (abs(dot(a,x) - b)**2).sum(axis=0)) def test_simple_underdet(self): a = [[1,2,3],[4,5,6]] b = [1,2] x,res,r,s = lstsq(a,b) # XXX: need independent check assert_array_almost_equal(x,[-0.05555556, 0.11111111, 0.27777778]) def test_random_exact(self): n = 20 a = random([n,n]) for i in range(n): a[i,i] = 20*(.1+a[i,i]) for i in range(4): b = random([n,3]) x = lstsq(a,b)[0] assert_array_almost_equal(dot(a,x),b) def test_random_complex_exact(self): n = 20 a = random([n,n]) + 1j * random([n,n]) for i in range(n): a[i,i] = 20*(.1+a[i,i]) for i in range(2): b = random([n,3]) x = lstsq(a,b)[0] assert_array_almost_equal(dot(a,x),b) def test_random_overdet(self): n = 20 m = 15 a = random([n,m]) for i in range(m): a[i,i] = 20*(.1+a[i,i]) for i in range(4): b = random([n,3]) x,res,r,s = lstsq(a,b) assert_(r == m, 'unexpected efficient rank') # XXX: check definition of res assert_array_almost_equal(x,direct_lstsq(a,b)) def test_random_complex_overdet(self): n = 20 m = 15 a = random([n,m]) + 1j * random([n,m]) for i in range(m): a[i,i] = 20*(.1+a[i,i]) for i in range(2): b = random([n,3]) x,res,r,s = lstsq(a,b) assert_(r == m, 'unexpected efficient rank') # XXX: check definition of res assert_array_almost_equal(x,direct_lstsq(a,b,1)) def test_check_finite(self): a = [[1,20],[-30,4]] for b in ([[1,0],[0,1]],[1,0], [[2,1],[-30,4]]): x = lstsq(a,b, check_finite=False)[0] assert_array_almost_equal(dot(a,x),b) class TestPinv(TestCase): def test_simple_real(self): a = array([[1, 2, 3], [4, 5, 6], [7, 8, 10]], dtype=float) a_pinv = pinv(a) assert_array_almost_equal(dot(a,a_pinv), np.eye(3)) a_pinv = pinv2(a) assert_array_almost_equal(dot(a,a_pinv), np.eye(3)) def test_simple_complex(self): a = (array([[1, 2, 3], [4, 5, 6], [7, 8, 10]], dtype=float) + 1j * array([[10, 8, 7], [6, 5, 4], [3, 2, 1]], dtype=float)) a_pinv = pinv(a) assert_array_almost_equal(dot(a, a_pinv), np.eye(3)) a_pinv = pinv2(a) assert_array_almost_equal(dot(a, a_pinv), np.eye(3)) def test_simple_singular(self): a = array([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=float) a_pinv = pinv(a) a_pinv2 = pinv2(a) assert_array_almost_equal(a_pinv,a_pinv2) def test_simple_cols(self): a = array([[1, 2, 3], [4, 5, 6]], dtype=float) a_pinv = pinv(a) a_pinv2 = pinv2(a) assert_array_almost_equal(a_pinv,a_pinv2) def test_simple_rows(self): a = array([[1, 2], [3, 4], [5, 6]], dtype=float) a_pinv = pinv(a) a_pinv2 = pinv2(a) assert_array_almost_equal(a_pinv,a_pinv2) def test_check_finite(self): a = array([[1,2,3],[4,5,6.],[7,8,10]]) a_pinv = pinv(a, check_finite=False) assert_array_almost_equal(dot(a,a_pinv),[[1,0,0],[0,1,0],[0,0,1]]) a_pinv = pinv2(a, check_finite=False) assert_array_almost_equal(dot(a,a_pinv),[[1,0,0],[0,1,0],[0,0,1]]) class TestPinvSymmetric(TestCase): def test_simple_real(self): a = array([[1, 2, 3], [4, 5, 6], [7, 8, 10]], dtype=float) a = np.dot(a, a.T) a_pinv = pinvh(a) assert_array_almost_equal(np.dot(a, a_pinv), np.eye(3)) def test_nonpositive(self): a = array([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=float) a = np.dot(a, a.T) u, s, vt = np.linalg.svd(a) s[0] *= -1 a = np.dot(u * s, vt) # a is now symmetric non-positive and singular a_pinv = pinv2(a) a_pinvh = pinvh(a) assert_array_almost_equal(a_pinv, a_pinvh) def test_simple_complex(self): a = (array([[1, 2, 3], [4, 5, 6], [7, 8, 10]], dtype=float) + 1j * array([[10, 8, 7], [6, 5, 4], [3, 2, 1]], dtype=float)) a = np.dot(a, a.conj().T) a_pinv = pinvh(a) assert_array_almost_equal(np.dot(a, a_pinv), np.eye(3)) class TestNorm(object): def test_types(self): for dtype in np.typecodes['AllFloat']: x = np.array([1,2,3], dtype=dtype) tol = max(1e-15, np.finfo(dtype).eps.real * 20) assert_allclose(norm(x), np.sqrt(14), rtol=tol) assert_allclose(norm(x, 2), np.sqrt(14), rtol=tol) for dtype in np.typecodes['Complex']: x = np.array([1j,2j,3j], dtype=dtype) tol = max(1e-15, np.finfo(dtype).eps.real * 20) assert_allclose(norm(x), np.sqrt(14), rtol=tol) assert_allclose(norm(x, 2), np.sqrt(14), rtol=tol) def test_overflow(self): # unlike numpy's norm, this one is # safer on overflow a = array([1e20], dtype=float32) assert_almost_equal(norm(a), a) def test_stable(self): # more stable than numpy's norm a = array([1e4] + [1]*10000, dtype=float32) try: # snrm in double precision; we obtain the same as for float64 # -- large atol needed due to varying blas implementations assert_allclose(norm(a) - 1e4, 0.5, atol=1e-2) except AssertionError: # snrm implemented in single precision, == np.linalg.norm result msg = ": Result should equal either 0.0 or 0.5 (depending on " \ "implementation of snrm2)." assert_almost_equal(norm(a) - 1e4, 0.0, err_msg=msg) def test_zero_norm(self): assert_equal(norm([1,0,3], 0), 2) assert_equal(norm([1,2,3], 0), 3) class TestOverwrite(object): def test_solve(self): assert_no_overwrite(solve, [(3,3), (3,)]) def test_solve_triangular(self): assert_no_overwrite(solve_triangular, [(3,3), (3,)]) def test_solve_banded(self): assert_no_overwrite(lambda ab, b: solve_banded((2,1), ab, b), [(4,6), (6,)]) def test_solveh_banded(self): assert_no_overwrite(solveh_banded, [(2,6), (6,)]) def test_inv(self): assert_no_overwrite(inv, [(3,3)]) def test_det(self): assert_no_overwrite(det, [(3,3)]) def test_lstsq(self): assert_no_overwrite(lstsq, [(3,2), (3,)]) def test_pinv(self): assert_no_overwrite(pinv, [(3,3)]) def test_pinv2(self): assert_no_overwrite(pinv2, [(3,3)]) def test_pinvh(self): assert_no_overwrite(pinvh, [(3,3)]) if __name__ == "__main__": run_module_suite()
apache-2.0
-2,806,558,991,388,101,600
30.15894
82
0.447141
false
leiferikb/bitpop
src/third_party/pyftpdlib/src/demo/tls_ftpd.py
4
2359
#!/usr/bin/env python # $Id: tls_ftpd.py 977 2012-01-22 23:05:09Z g.rodola $ # pyftpdlib is released under the MIT license, reproduced below: # ====================================================================== # Copyright (C) 2007-2012 Giampaolo Rodola' <g.rodola@gmail.com> # # All Rights Reserved # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation # files (the "Software"), to deal in the Software without # restriction, including without limitation the rights to use, # copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following # conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR # OTHER DEALINGS IN THE SOFTWARE. # # ====================================================================== """An RFC-4217 asynchronous FTPS server supporting both SSL and TLS. Requires PyOpenSSL module (http://pypi.python.org/pypi/pyOpenSSL). """ import os from pyftpdlib import ftpserver from pyftpdlib.contrib.handlers import TLS_FTPHandler CERTFILE = os.path.abspath(os.path.join(os.path.dirname(__file__), "keycert.pem")) def main(): authorizer = ftpserver.DummyAuthorizer() authorizer.add_user('user', '12345', '.', perm='elradfmw') authorizer.add_anonymous('.') ftp_handler = TLS_FTPHandler ftp_handler.certfile = CERTFILE ftp_handler.authorizer = authorizer # requires SSL for both control and data channel #ftp_handler.tls_control_required = True #ftp_handler.tls_data_required = True ftpd = ftpserver.FTPServer(('', 8021), ftp_handler) ftpd.serve_forever() if __name__ == '__main__': main()
gpl-3.0
6,446,451,407,849,144,000
38.316667
73
0.673167
false
mith1979/ansible_automation
applied_python/applied_python/lib/python2.7/site-packages/pylint/test/functional/bad_reversed_sequence.py
12
2132
""" Checks that reversed() receive proper argument """ # pylint: disable=missing-docstring # pylint: disable=too-few-public-methods,no-self-use,no-absolute-import from collections import deque __revision__ = 0 class GoodReversed(object): """ Implements __reversed__ """ def __reversed__(self): return [1, 2, 3] class SecondGoodReversed(object): """ Implements __len__ and __getitem__ """ def __len__(self): return 3 def __getitem__(self, index): return index class BadReversed(object): """ implements only len() """ def __len__(self): return 3 class SecondBadReversed(object): """ implements only __getitem__ """ def __getitem__(self, index): return index class ThirdBadReversed(dict): """ dict subclass """ def uninferable(seq): """ This can't be infered at this moment, make sure we don't have a false positive. """ return reversed(seq) def test(path): """ test function """ seq = reversed() # No argument given seq = reversed(None) # [bad-reversed-sequence] seq = reversed([1, 2, 3]) seq = reversed((1, 2, 3)) seq = reversed(set()) # [bad-reversed-sequence] seq = reversed({'a': 1, 'b': 2}) # [bad-reversed-sequence] seq = reversed(iter([1, 2, 3])) # [bad-reversed-sequence] seq = reversed(GoodReversed()) seq = reversed(SecondGoodReversed()) seq = reversed(BadReversed()) # [bad-reversed-sequence] seq = reversed(SecondBadReversed()) # [bad-reversed-sequence] seq = reversed(range(100)) seq = reversed(ThirdBadReversed()) # [bad-reversed-sequence] seq = reversed(lambda: None) # [bad-reversed-sequence] seq = reversed(deque([])) seq = reversed("123") seq = uninferable([1, 2, 3]) seq = reversed(path.split("/")) return seq def test_dict_ancestor_and_reversed(): """Don't emit for subclasses of dict, with __reversed__ implemented.""" from collections import OrderedDict class Child(dict): def __reversed__(self): return reversed(range(10)) seq = reversed(OrderedDict()) return reversed(Child()), seq
apache-2.0
6,344,979,343,260,533,000
29.028169
75
0.625235
false
intel-analytics/analytics-zoo
pyzoo/zoo/chronos/model/Seq2Seq_pytorch.py
1
5221
# # Copyright 2018 Analytics Zoo Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import torch import torch.nn as nn from zoo.automl.model.base_pytorch_model import PytorchBaseModel, \ PYTORCH_REGRESSION_LOSS_MAP import numpy as np class LSTMSeq2Seq(nn.Module): def __init__(self, input_feature_num, future_seq_len, output_feature_num, lstm_hidden_dim=128, lstm_layer_num=2, dropout=0.25, teacher_forcing=False): super(LSTMSeq2Seq, self).__init__() self.lstm_encoder = nn.LSTM(input_size=input_feature_num, hidden_size=lstm_hidden_dim, num_layers=lstm_layer_num, dropout=dropout, batch_first=True) self.lstm_decoder = nn.LSTM(input_size=output_feature_num, hidden_size=lstm_hidden_dim, num_layers=lstm_layer_num, dropout=dropout, batch_first=True) self.fc = nn.Linear(in_features=lstm_hidden_dim, out_features=output_feature_num) self.future_seq_len = future_seq_len self.output_feature_num = output_feature_num self.teacher_forcing = teacher_forcing def forward(self, input_seq, target_seq=None): x, (hidden, cell) = self.lstm_encoder(input_seq) # input feature order should have target dimensions in the first decoder_input = input_seq[:, -1, :self.output_feature_num] decoder_input = decoder_input.unsqueeze(1) decoder_output = [] for i in range(self.future_seq_len): decoder_output_step, (hidden, cell) = self.lstm_decoder(decoder_input, (hidden, cell)) out_step = self.fc(decoder_output_step) decoder_output.append(out_step) if not self.teacher_forcing or target_seq is None: # no teaching force decoder_input = out_step else: # with teaching force decoder_input = target_seq[:, i:i+1, :] decoder_output = torch.cat(decoder_output, dim=1) return decoder_output def model_creator(config): return LSTMSeq2Seq(input_feature_num=config["input_feature_num"], output_feature_num=config["output_feature_num"], future_seq_len=config["future_seq_len"], lstm_hidden_dim=config.get("lstm_hidden_dim", 128), lstm_layer_num=config.get("lstm_layer_num", 2), dropout=config.get("dropout", 0.25), teacher_forcing=config.get("teacher_forcing", False)) def optimizer_creator(model, config): return getattr(torch.optim, config.get("optim", "Adam"))(model.parameters(), lr=config.get("lr", 0.001)) def loss_creator(config): loss_name = config.get("loss", "mse") if loss_name in PYTORCH_REGRESSION_LOSS_MAP: loss_name = PYTORCH_REGRESSION_LOSS_MAP[loss_name] else: raise RuntimeError(f"Got \"{loss_name}\" for loss name,\ where \"mse\", \"mae\" or \"huber_loss\" is expected") return getattr(torch.nn, loss_name)() class Seq2SeqPytorch(PytorchBaseModel): def __init__(self, check_optional_config=False): super().__init__(model_creator=model_creator, optimizer_creator=optimizer_creator, loss_creator=loss_creator, check_optional_config=check_optional_config) def _input_check(self, x, y): if len(x.shape) < 3: raise RuntimeError(f"Invalid data x with {len(x.shape)} dim where 3 dim is required.") if len(y.shape) < 3: raise RuntimeError(f"Invalid data y with {len(y.shape)} dim where 3 dim is required.") if y.shape[-1] > x.shape[-1]: raise RuntimeError(f"output dim should not larger than input dim,\ while we get {y.shape[-1]} > {x.shape[-1]}.") def _forward(self, x, y): self._input_check(x, y) return self.model(x, y) def _get_required_parameters(self): return { "input_feature_num", "future_seq_len", "output_feature_num" } def _get_optional_parameters(self): return { "lstm_hidden_dim", "lstm_layer_num", "teacher_forcing" } | super()._get_optional_parameters()
apache-2.0
2,251,469,979,598,284,800
40.110236
98
0.565984
false
yaoandw/joke
Pods/AVOSCloudCrashReporting/Breakpad/src/testing/test/gmock_output_test.py
986
5999
#!/usr/bin/env python # # Copyright 2008, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Tests the text output of Google C++ Mocking Framework. SYNOPSIS gmock_output_test.py --build_dir=BUILD/DIR --gengolden # where BUILD/DIR contains the built gmock_output_test_ file. gmock_output_test.py --gengolden gmock_output_test.py """ __author__ = 'wan@google.com (Zhanyong Wan)' import os import re import sys import gmock_test_utils # The flag for generating the golden file GENGOLDEN_FLAG = '--gengolden' PROGRAM_PATH = gmock_test_utils.GetTestExecutablePath('gmock_output_test_') COMMAND = [PROGRAM_PATH, '--gtest_stack_trace_depth=0', '--gtest_print_time=0'] GOLDEN_NAME = 'gmock_output_test_golden.txt' GOLDEN_PATH = os.path.join(gmock_test_utils.GetSourceDir(), GOLDEN_NAME) def ToUnixLineEnding(s): """Changes all Windows/Mac line endings in s to UNIX line endings.""" return s.replace('\r\n', '\n').replace('\r', '\n') def RemoveReportHeaderAndFooter(output): """Removes Google Test result report's header and footer from the output.""" output = re.sub(r'.*gtest_main.*\n', '', output) output = re.sub(r'\[.*\d+ tests.*\n', '', output) output = re.sub(r'\[.* test environment .*\n', '', output) output = re.sub(r'\[=+\] \d+ tests .* ran.*', '', output) output = re.sub(r'.* FAILED TESTS\n', '', output) return output def RemoveLocations(output): """Removes all file location info from a Google Test program's output. Args: output: the output of a Google Test program. Returns: output with all file location info (in the form of 'DIRECTORY/FILE_NAME:LINE_NUMBER: 'or 'DIRECTORY\\FILE_NAME(LINE_NUMBER): ') replaced by 'FILE:#: '. """ return re.sub(r'.*[/\\](.+)(\:\d+|\(\d+\))\:', 'FILE:#:', output) def NormalizeErrorMarker(output): """Normalizes the error marker, which is different on Windows vs on Linux.""" return re.sub(r' error: ', ' Failure\n', output) def RemoveMemoryAddresses(output): """Removes memory addresses from the test output.""" return re.sub(r'@\w+', '@0x#', output) def RemoveTestNamesOfLeakedMocks(output): """Removes the test names of leaked mock objects from the test output.""" return re.sub(r'\(used in test .+\) ', '', output) def GetLeakyTests(output): """Returns a list of test names that leak mock objects.""" # findall() returns a list of all matches of the regex in output. # For example, if '(used in test FooTest.Bar)' is in output, the # list will contain 'FooTest.Bar'. return re.findall(r'\(used in test (.+)\)', output) def GetNormalizedOutputAndLeakyTests(output): """Normalizes the output of gmock_output_test_. Args: output: The test output. Returns: A tuple (the normalized test output, the list of test names that have leaked mocks). """ output = ToUnixLineEnding(output) output = RemoveReportHeaderAndFooter(output) output = NormalizeErrorMarker(output) output = RemoveLocations(output) output = RemoveMemoryAddresses(output) return (RemoveTestNamesOfLeakedMocks(output), GetLeakyTests(output)) def GetShellCommandOutput(cmd): """Runs a command in a sub-process, and returns its STDOUT in a string.""" return gmock_test_utils.Subprocess(cmd, capture_stderr=False).output def GetNormalizedCommandOutputAndLeakyTests(cmd): """Runs a command and returns its normalized output and a list of leaky tests. Args: cmd: the shell command. """ # Disables exception pop-ups on Windows. os.environ['GTEST_CATCH_EXCEPTIONS'] = '1' return GetNormalizedOutputAndLeakyTests(GetShellCommandOutput(cmd)) class GMockOutputTest(gmock_test_utils.TestCase): def testOutput(self): (output, leaky_tests) = GetNormalizedCommandOutputAndLeakyTests(COMMAND) golden_file = open(GOLDEN_PATH, 'rb') golden = golden_file.read() golden_file.close() # The normalized output should match the golden file. self.assertEquals(golden, output) # The raw output should contain 2 leaked mock object errors for # test GMockOutputTest.CatchesLeakedMocks. self.assertEquals(['GMockOutputTest.CatchesLeakedMocks', 'GMockOutputTest.CatchesLeakedMocks'], leaky_tests) if __name__ == '__main__': if sys.argv[1:] == [GENGOLDEN_FLAG]: (output, _) = GetNormalizedCommandOutputAndLeakyTests(COMMAND) golden_file = open(GOLDEN_PATH, 'wb') golden_file.write(output) golden_file.close() else: gmock_test_utils.Main()
mit
-4,474,121,058,681,443,000
32.327778
80
0.710118
false
slarosa/QGIS
python/plugins/sextante/tests/SextanteToolsTest.py
3
3013
# -*- coding: utf-8 -*- """ *************************************************************************** SextanteToolsTest.py --------------------- Date : April 2013 Copyright : (C) 2013 by Victor Olaya Email : volayaf at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Victor Olaya' __date__ = 'April 2013' __copyright__ = '(C) 2013, Victor Olaya' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' import sextante import unittest from sextante.tests.TestData import points, points2, polygons, polygons2, lines, union,\ table, polygonsGeoJson, raster from sextante.core import Sextante from sextante.tools.vector import values from sextante.tools.general import getfromname class SextanteToolsTest(unittest.TestCase): '''tests the method imported when doing an "import sextante", and also in sextante.tools. They are mostly convenience tools''' def test_getobject(self): layer = sextante.getobject(points()); self.assertIsNotNone(layer) layer = sextante.getobject("points"); self.assertIsNotNone(layer) def test_runandload(self): sextante.runandload("qgis:countpointsinpolygon",polygons(),points(),"NUMPOINTS", None) layer = getfromname("Result") self.assertIsNotNone(layer) def test_featuresWithoutSelection(self): layer = sextante.getobject(points()) features = sextante.getfeatures(layer) self.assertEqual(12, len(features)) def test_featuresWithSelection(self): layer = sextante.getobject(points()) feature = layer.getFeatures().next() selected = [feature.id()] layer.setSelectedFeatures(selected) features = sextante.getfeatures(layer) self.assertEqual(1, len(features)) layer.setSelectedFeatures([]) def test_attributeValues(self): layer = sextante.getobject(points()) attributeValues = values(layer, "ID") i = 1 for value in attributeValues['ID']: self.assertEqual(int(i), int(value)) i+=1 self.assertEquals(13,i) def test_extent(self): pass def suite(): suite = unittest.makeSuite(SextanteToolsTest, 'test') return suite def runtests(): result = unittest.TestResult() testsuite = suite() testsuite.run(result) return result
gpl-2.0
6,003,430,409,727,838,000
34.447059
94
0.563226
false
largelymfs/IRModel
src/models/Tfidf.py
1
2000
#!/usr/bin/env python # -*- coding: utf-8 -*- # @Author: largelymfs # @Date: 2014-12-23 20:06:10 # @Last Modified by: largelymfs # @Last Modified time: 2014-12-23 23:18:08 import numpy as np import numpy.linalg as LA class TFIDF: def __init__(self, filename): #get the self.vocab self.vocab = {} id = 0 doc = 0 with open(filename) as fin: self.doc = [l.strip().split() for l in fin] for words in self.doc: for word in words: if word not in self.vocab: self.vocab[word] = id id+=1 doc+=1 self.word_number = id self.doc_number = doc self.matrix = np.zeros((self.doc_number, self.word_number)) self.idf = {} for words in self.doc: now = set(words) for word in now: word_id = self.vocab[word] if word_id not in self.idf: self.idf[word_id] = 1 else: self.idf[word_id] +=1 for k in self.idf.keys(): self.idf[k] = np.log((float(self.doc_number) / float(self.idf[k]))) id = 0 for words in self.doc: total = 0.0 for word in words: self.matrix[id][self.vocab[word]] +=1.0 total +=1.0 if total==0: print words continue self.matrix[id] = self.matrix[id] * (1./total) id+=1 self.matrix = self.matrix.T for i in range(self.word_number): self.matrix[i] = self.matrix[i] * (self.idf[i]) self.matrix = self.matrix.T def get_score(self, v1, v2): return np.dot(v1, v2)/(LA.norm(v1) * LA.norm(v2)) def querry(self, q): vector = np.zeros(self.word_number) total = 0.0 for w in q: if w in self.vocab: vector[self.vocab[w]]+=1.0 total +=1.0 vector = vector * (1./total) for i in range(self.word_number): vector[i] *= (self.idf[i]) result = [(i, self.get_score(self.matrix[i], vector)) for i in range(self.doc_number)] result = sorted(result, cmp=lambda x, y:-cmp(x[1],y[1]))[:10] for (id, score) in result: print id, "".join(self.doc[id]) if __name__=='__main__': model = TFIDF("./../../data/demo.txt.out") model.querry(["进球", "晋级", "胜利"])
mit
-5,200,241,484,263,836,000
25.878378
88
0.606137
false
vprnet/traces
app/index.py
1
1098
#!/usr/local/bin/python2.7 from flask import Flask import sys from flask_frozen import Freezer from upload_s3 import set_metadata from config import AWS_DIRECTORY from query import get_slugs app = Flask(__name__) app.config.from_object('config') from views import * # Serving from s3 leads to some complications in how static files are served if len(sys.argv) > 1 and sys.argv[1] == 'build': PROJECT_ROOT = '/' + AWS_DIRECTORY else: PROJECT_ROOT = '/' class WebFactionMiddleware(object): def __init__(self, app): self.app = app def __call__(self, environ, start_response): environ['SCRIPT_NAME'] = PROJECT_ROOT return self.app(environ, start_response) app.wsgi_app = WebFactionMiddleware(app.wsgi_app) freezer = Freezer(app) @freezer.register_generator def post(): slugs, links = get_slugs(title=False) for i in slugs: yield {'title': i} if __name__ == '__main__': if len(sys.argv) > 1 and sys.argv[1] == 'build': app.debug = True freezer.freeze() set_metadata() else: app.run(debug=True)
apache-2.0
-6,234,646,654,411,311,000
21.875
76
0.652095
false
jorik041/scikit-learn
sklearn/linear_model/randomized_l1.py
95
23365
""" Randomized Lasso/Logistic: feature selection based on Lasso and sparse Logistic Regression """ # Author: Gael Varoquaux, Alexandre Gramfort # # License: BSD 3 clause import itertools from abc import ABCMeta, abstractmethod import warnings import numpy as np from scipy.sparse import issparse from scipy import sparse from scipy.interpolate import interp1d from .base import center_data from ..base import BaseEstimator, TransformerMixin from ..externals import six from ..externals.joblib import Memory, Parallel, delayed from ..utils import (as_float_array, check_random_state, check_X_y, check_array, safe_mask, ConvergenceWarning) from ..utils.validation import check_is_fitted from .least_angle import lars_path, LassoLarsIC from .logistic import LogisticRegression ############################################################################### # Randomized linear model: feature selection def _resample_model(estimator_func, X, y, scaling=.5, n_resampling=200, n_jobs=1, verbose=False, pre_dispatch='3*n_jobs', random_state=None, sample_fraction=.75, **params): random_state = check_random_state(random_state) # We are generating 1 - weights, and not weights n_samples, n_features = X.shape if not (0 < scaling < 1): raise ValueError( "'scaling' should be between 0 and 1. Got %r instead." % scaling) scaling = 1. - scaling scores_ = 0.0 for active_set in Parallel(n_jobs=n_jobs, verbose=verbose, pre_dispatch=pre_dispatch)( delayed(estimator_func)( X, y, weights=scaling * random_state.random_integers( 0, 1, size=(n_features,)), mask=(random_state.rand(n_samples) < sample_fraction), verbose=max(0, verbose - 1), **params) for _ in range(n_resampling)): scores_ += active_set scores_ /= n_resampling return scores_ class BaseRandomizedLinearModel(six.with_metaclass(ABCMeta, BaseEstimator, TransformerMixin)): """Base class to implement randomized linear models for feature selection This implements the strategy by Meinshausen and Buhlman: stability selection with randomized sampling, and random re-weighting of the penalty. """ @abstractmethod def __init__(self): pass _center_data = staticmethod(center_data) def fit(self, X, y): """Fit the model using X, y as training data. Parameters ---------- X : array-like, sparse matrix shape = [n_samples, n_features] Training data. y : array-like, shape = [n_samples] Target values. Returns ------- self : object Returns an instance of self. """ X, y = check_X_y(X, y, ['csr', 'csc', 'coo'], y_numeric=True) X = as_float_array(X, copy=False) n_samples, n_features = X.shape X, y, X_mean, y_mean, X_std = self._center_data(X, y, self.fit_intercept, self.normalize) estimator_func, params = self._make_estimator_and_params(X, y) memory = self.memory if isinstance(memory, six.string_types): memory = Memory(cachedir=memory) scores_ = memory.cache( _resample_model, ignore=['verbose', 'n_jobs', 'pre_dispatch'] )( estimator_func, X, y, scaling=self.scaling, n_resampling=self.n_resampling, n_jobs=self.n_jobs, verbose=self.verbose, pre_dispatch=self.pre_dispatch, random_state=self.random_state, sample_fraction=self.sample_fraction, **params) if scores_.ndim == 1: scores_ = scores_[:, np.newaxis] self.all_scores_ = scores_ self.scores_ = np.max(self.all_scores_, axis=1) return self def _make_estimator_and_params(self, X, y): """Return the parameters passed to the estimator""" raise NotImplementedError def get_support(self, indices=False): """Return a mask, or list, of the features/indices selected.""" check_is_fitted(self, 'scores_') mask = self.scores_ > self.selection_threshold return mask if not indices else np.where(mask)[0] # XXX: the two function below are copy/pasted from feature_selection, # Should we add an intermediate base class? def transform(self, X): """Transform a new matrix using the selected features""" mask = self.get_support() X = check_array(X) if len(mask) != X.shape[1]: raise ValueError("X has a different shape than during fitting.") return check_array(X)[:, safe_mask(X, mask)] def inverse_transform(self, X): """Transform a new matrix using the selected features""" support = self.get_support() if X.ndim == 1: X = X[None, :] Xt = np.zeros((X.shape[0], support.size)) Xt[:, support] = X return Xt ############################################################################### # Randomized lasso: regression settings def _randomized_lasso(X, y, weights, mask, alpha=1., verbose=False, precompute=False, eps=np.finfo(np.float).eps, max_iter=500): X = X[safe_mask(X, mask)] y = y[mask] # Center X and y to avoid fit the intercept X -= X.mean(axis=0) y -= y.mean() alpha = np.atleast_1d(np.asarray(alpha, dtype=np.float)) X = (1 - weights) * X with warnings.catch_warnings(): warnings.simplefilter('ignore', ConvergenceWarning) alphas_, _, coef_ = lars_path(X, y, Gram=precompute, copy_X=False, copy_Gram=False, alpha_min=np.min(alpha), method='lasso', verbose=verbose, max_iter=max_iter, eps=eps) if len(alpha) > 1: if len(alphas_) > 1: # np.min(alpha) < alpha_min interpolator = interp1d(alphas_[::-1], coef_[:, ::-1], bounds_error=False, fill_value=0.) scores = (interpolator(alpha) != 0.0) else: scores = np.zeros((X.shape[1], len(alpha)), dtype=np.bool) else: scores = coef_[:, -1] != 0.0 return scores class RandomizedLasso(BaseRandomizedLinearModel): """Randomized Lasso. Randomized Lasso works by resampling the train data and computing a Lasso on each resampling. In short, the features selected more often are good features. It is also known as stability selection. Read more in the :ref:`User Guide <randomized_l1>`. Parameters ---------- alpha : float, 'aic', or 'bic', optional The regularization parameter alpha parameter in the Lasso. Warning: this is not the alpha parameter in the stability selection article which is scaling. scaling : float, optional The alpha parameter in the stability selection article used to randomly scale the features. Should be between 0 and 1. sample_fraction : float, optional The fraction of samples to be used in each randomized design. Should be between 0 and 1. If 1, all samples are used. n_resampling : int, optional Number of randomized models. selection_threshold: float, optional The score above which features should be selected. fit_intercept : boolean, optional whether to calculate the intercept for this model. If set to false, no intercept will be used in calculations (e.g. data is expected to be already centered). verbose : boolean or integer, optional Sets the verbosity amount normalize : boolean, optional, default True If True, the regressors X will be normalized before regression. precompute : True | False | 'auto' Whether to use a precomputed Gram matrix to speed up calculations. If set to 'auto' let us decide. The Gram matrix can also be passed as argument. max_iter : integer, optional Maximum number of iterations to perform in the Lars algorithm. eps : float, optional The machine-precision regularization in the computation of the Cholesky diagonal factors. Increase this for very ill-conditioned systems. Unlike the 'tol' parameter in some iterative optimization-based algorithms, this parameter does not control the tolerance of the optimization. n_jobs : integer, optional Number of CPUs to use during the resampling. If '-1', use all the CPUs random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. pre_dispatch : int, or string, optional Controls the number of jobs that get dispatched during parallel execution. Reducing this number can be useful to avoid an explosion of memory consumption when more jobs get dispatched than CPUs can process. This parameter can be: - None, in which case all the jobs are immediately created and spawned. Use this for lightweight and fast-running jobs, to avoid delays due to on-demand spawning of the jobs - An int, giving the exact number of total jobs that are spawned - A string, giving an expression as a function of n_jobs, as in '2*n_jobs' memory : Instance of joblib.Memory or string Used for internal caching. By default, no caching is done. If a string is given, it is the path to the caching directory. Attributes ---------- scores_ : array, shape = [n_features] Feature scores between 0 and 1. all_scores_ : array, shape = [n_features, n_reg_parameter] Feature scores between 0 and 1 for all values of the regularization \ parameter. The reference article suggests ``scores_`` is the max of \ ``all_scores_``. Examples -------- >>> from sklearn.linear_model import RandomizedLasso >>> randomized_lasso = RandomizedLasso() Notes ----- See examples/linear_model/plot_sparse_recovery.py for an example. References ---------- Stability selection Nicolai Meinshausen, Peter Buhlmann Journal of the Royal Statistical Society: Series B Volume 72, Issue 4, pages 417-473, September 2010 DOI: 10.1111/j.1467-9868.2010.00740.x See also -------- RandomizedLogisticRegression, LogisticRegression """ def __init__(self, alpha='aic', scaling=.5, sample_fraction=.75, n_resampling=200, selection_threshold=.25, fit_intercept=True, verbose=False, normalize=True, precompute='auto', max_iter=500, eps=np.finfo(np.float).eps, random_state=None, n_jobs=1, pre_dispatch='3*n_jobs', memory=Memory(cachedir=None, verbose=0)): self.alpha = alpha self.scaling = scaling self.sample_fraction = sample_fraction self.n_resampling = n_resampling self.fit_intercept = fit_intercept self.max_iter = max_iter self.verbose = verbose self.normalize = normalize self.precompute = precompute self.eps = eps self.random_state = random_state self.n_jobs = n_jobs self.selection_threshold = selection_threshold self.pre_dispatch = pre_dispatch self.memory = memory def _make_estimator_and_params(self, X, y): assert self.precompute in (True, False, None, 'auto') alpha = self.alpha if alpha in ('aic', 'bic'): model = LassoLarsIC(precompute=self.precompute, criterion=self.alpha, max_iter=self.max_iter, eps=self.eps) model.fit(X, y) self.alpha_ = alpha = model.alpha_ return _randomized_lasso, dict(alpha=alpha, max_iter=self.max_iter, eps=self.eps, precompute=self.precompute) ############################################################################### # Randomized logistic: classification settings def _randomized_logistic(X, y, weights, mask, C=1., verbose=False, fit_intercept=True, tol=1e-3): X = X[safe_mask(X, mask)] y = y[mask] if issparse(X): size = len(weights) weight_dia = sparse.dia_matrix((1 - weights, 0), (size, size)) X = X * weight_dia else: X *= (1 - weights) C = np.atleast_1d(np.asarray(C, dtype=np.float)) scores = np.zeros((X.shape[1], len(C)), dtype=np.bool) for this_C, this_scores in zip(C, scores.T): # XXX : would be great to do it with a warm_start ... clf = LogisticRegression(C=this_C, tol=tol, penalty='l1', dual=False, fit_intercept=fit_intercept) clf.fit(X, y) this_scores[:] = np.any( np.abs(clf.coef_) > 10 * np.finfo(np.float).eps, axis=0) return scores class RandomizedLogisticRegression(BaseRandomizedLinearModel): """Randomized Logistic Regression Randomized Regression works by resampling the train data and computing a LogisticRegression on each resampling. In short, the features selected more often are good features. It is also known as stability selection. Read more in the :ref:`User Guide <randomized_l1>`. Parameters ---------- C : float, optional, default=1 The regularization parameter C in the LogisticRegression. scaling : float, optional, default=0.5 The alpha parameter in the stability selection article used to randomly scale the features. Should be between 0 and 1. sample_fraction : float, optional, default=0.75 The fraction of samples to be used in each randomized design. Should be between 0 and 1. If 1, all samples are used. n_resampling : int, optional, default=200 Number of randomized models. selection_threshold : float, optional, default=0.25 The score above which features should be selected. fit_intercept : boolean, optional, default=True whether to calculate the intercept for this model. If set to false, no intercept will be used in calculations (e.g. data is expected to be already centered). verbose : boolean or integer, optional Sets the verbosity amount normalize : boolean, optional, default=True If True, the regressors X will be normalized before regression. tol : float, optional, default=1e-3 tolerance for stopping criteria of LogisticRegression n_jobs : integer, optional Number of CPUs to use during the resampling. If '-1', use all the CPUs random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. pre_dispatch : int, or string, optional Controls the number of jobs that get dispatched during parallel execution. Reducing this number can be useful to avoid an explosion of memory consumption when more jobs get dispatched than CPUs can process. This parameter can be: - None, in which case all the jobs are immediately created and spawned. Use this for lightweight and fast-running jobs, to avoid delays due to on-demand spawning of the jobs - An int, giving the exact number of total jobs that are spawned - A string, giving an expression as a function of n_jobs, as in '2*n_jobs' memory : Instance of joblib.Memory or string Used for internal caching. By default, no caching is done. If a string is given, it is the path to the caching directory. Attributes ---------- scores_ : array, shape = [n_features] Feature scores between 0 and 1. all_scores_ : array, shape = [n_features, n_reg_parameter] Feature scores between 0 and 1 for all values of the regularization \ parameter. The reference article suggests ``scores_`` is the max \ of ``all_scores_``. Examples -------- >>> from sklearn.linear_model import RandomizedLogisticRegression >>> randomized_logistic = RandomizedLogisticRegression() Notes ----- See examples/linear_model/plot_sparse_recovery.py for an example. References ---------- Stability selection Nicolai Meinshausen, Peter Buhlmann Journal of the Royal Statistical Society: Series B Volume 72, Issue 4, pages 417-473, September 2010 DOI: 10.1111/j.1467-9868.2010.00740.x See also -------- RandomizedLasso, Lasso, ElasticNet """ def __init__(self, C=1, scaling=.5, sample_fraction=.75, n_resampling=200, selection_threshold=.25, tol=1e-3, fit_intercept=True, verbose=False, normalize=True, random_state=None, n_jobs=1, pre_dispatch='3*n_jobs', memory=Memory(cachedir=None, verbose=0)): self.C = C self.scaling = scaling self.sample_fraction = sample_fraction self.n_resampling = n_resampling self.fit_intercept = fit_intercept self.verbose = verbose self.normalize = normalize self.tol = tol self.random_state = random_state self.n_jobs = n_jobs self.selection_threshold = selection_threshold self.pre_dispatch = pre_dispatch self.memory = memory def _make_estimator_and_params(self, X, y): params = dict(C=self.C, tol=self.tol, fit_intercept=self.fit_intercept) return _randomized_logistic, params def _center_data(self, X, y, fit_intercept, normalize=False): """Center the data in X but not in y""" X, _, Xmean, _, X_std = center_data(X, y, fit_intercept, normalize=normalize) return X, y, Xmean, y, X_std ############################################################################### # Stability paths def _lasso_stability_path(X, y, mask, weights, eps): "Inner loop of lasso_stability_path" X = X * weights[np.newaxis, :] X = X[safe_mask(X, mask), :] y = y[mask] alpha_max = np.max(np.abs(np.dot(X.T, y))) / X.shape[0] alpha_min = eps * alpha_max # set for early stopping in path with warnings.catch_warnings(): warnings.simplefilter('ignore', ConvergenceWarning) alphas, _, coefs = lars_path(X, y, method='lasso', verbose=False, alpha_min=alpha_min) # Scale alpha by alpha_max alphas /= alphas[0] # Sort alphas in assending order alphas = alphas[::-1] coefs = coefs[:, ::-1] # Get rid of the alphas that are too small mask = alphas >= eps # We also want to keep the first one: it should be close to the OLS # solution mask[0] = True alphas = alphas[mask] coefs = coefs[:, mask] return alphas, coefs def lasso_stability_path(X, y, scaling=0.5, random_state=None, n_resampling=200, n_grid=100, sample_fraction=0.75, eps=4 * np.finfo(np.float).eps, n_jobs=1, verbose=False): """Stabiliy path based on randomized Lasso estimates Read more in the :ref:`User Guide <randomized_l1>`. Parameters ---------- X : array-like, shape = [n_samples, n_features] training data. y : array-like, shape = [n_samples] target values. scaling : float, optional, default=0.5 The alpha parameter in the stability selection article used to randomly scale the features. Should be between 0 and 1. random_state : integer or numpy.random.RandomState, optional The generator used to randomize the design. n_resampling : int, optional, default=200 Number of randomized models. n_grid : int, optional, default=100 Number of grid points. The path is linearly reinterpolated on a grid between 0 and 1 before computing the scores. sample_fraction : float, optional, default=0.75 The fraction of samples to be used in each randomized design. Should be between 0 and 1. If 1, all samples are used. eps : float, optional Smallest value of alpha / alpha_max considered n_jobs : integer, optional Number of CPUs to use during the resampling. If '-1', use all the CPUs verbose : boolean or integer, optional Sets the verbosity amount Returns ------- alphas_grid : array, shape ~ [n_grid] The grid points between 0 and 1: alpha/alpha_max scores_path : array, shape = [n_features, n_grid] The scores for each feature along the path. Notes ----- See examples/linear_model/plot_sparse_recovery.py for an example. """ rng = check_random_state(random_state) if not (0 < scaling < 1): raise ValueError("Parameter 'scaling' should be between 0 and 1." " Got %r instead." % scaling) n_samples, n_features = X.shape paths = Parallel(n_jobs=n_jobs, verbose=verbose)( delayed(_lasso_stability_path)( X, y, mask=rng.rand(n_samples) < sample_fraction, weights=1. - scaling * rng.random_integers(0, 1, size=(n_features,)), eps=eps) for k in range(n_resampling)) all_alphas = sorted(list(set(itertools.chain(*[p[0] for p in paths])))) # Take approximately n_grid values stride = int(max(1, int(len(all_alphas) / float(n_grid)))) all_alphas = all_alphas[::stride] if not all_alphas[-1] == 1: all_alphas.append(1.) all_alphas = np.array(all_alphas) scores_path = np.zeros((n_features, len(all_alphas))) for alphas, coefs in paths: if alphas[0] != 0: alphas = np.r_[0, alphas] coefs = np.c_[np.ones((n_features, 1)), coefs] if alphas[-1] != all_alphas[-1]: alphas = np.r_[alphas, all_alphas[-1]] coefs = np.c_[coefs, np.zeros((n_features, 1))] scores_path += (interp1d(alphas, coefs, kind='nearest', bounds_error=False, fill_value=0, axis=-1)(all_alphas) != 0) scores_path /= n_resampling return all_alphas, scores_path
bsd-3-clause
3,757,223,006,510,591,000
36.028526
79
0.599486
false
psb-seclab/CTFStuff
hacknight/HN_Lab_1/python_101.py
2
6598
# -*- coding: utf-8 -*- #import my_math from my_math import factorial import os #import my_math def test_db(): return def test_network(): return def test_exception(): # opening file failed try: fi = open("testfile", 'r') fh = open("testfile", "w") fh.write("This is my test file for exception handling!!") except IOError: print "Error: can\'t find file or read data" else: print fi.read() print "Written content in the file successfully" fh.close() fi.close() return def test_module(): print '10! = %d'%(factorial(10)) return class Employee: 'Common base class for all employees' empCount = 0 def __init__(self, name, salary): self.name = name self.salary = salary Employee.empCount += 1 def displayCount(self): print "Total Employee %d" % Employee.empCount def displayEmployee(self): print "Name : ", self.name, ", Salary: ", self.salary def test_class(): "This would create first object of Employee class" emp1 = Employee("Zara", 2000) "This would create second object of Employee class" emp2 = Employee("Manni", 5000) emp1.displayEmployee() emp2.displayEmployee() print "Total Employee %d" % Employee.empCount print emp1.empCount # inheritence # overiding # operator overloading return def fib_1(n): """Print a Fibonacci series up to n.""" a, b = 0, 1 while b < n: print b a, b = b, a+b return cnt = 0 fib_tmp = {}# make fib faster def fib_2(n): """return the nth fib num""" global cnt cnt += 1 if n == 0: return 0 elif n == 1: return 1 elif n > 1: return fib_2(n-1) + fib_2(n-2) else: print 'invalid input' return None def simple_func(a, b, c): return a + b + c**3 def test_function(): print simple_func(1, 2, 3) fib_1(100) print fib_2(5) print 'fib_2 is called %d times'%(cnt) return def test_generator(): l1 = range(100) print l1 # the first 100 odd numbers l2 = [2*x+1 for x in range(100)] print l2 # gen a dict # gen a ascii code table dict1 = {x:chr(x) for x in range(128)} print dict1 # gen a 10*10 array l3 = [[10*x+y for y in range(10)] for x in range(10)] print l3 # cross product vec1 = [2, 4, 6] vec2 = [1, 3, 5] cross_product = [x*y for x in vec1 for y in vec2] print cross_product # using if vec_if = [x for x in l1 if x % 7 == 0] print vec_if print len(vec_if) return def test_file_io(): # write to a file fo = open('testfile', 'wt') for x in range(20): fo.write(str(x) + ',') fo.close() # read from a file fi = open('testfile', 'rt') # read as much as possible at one time! contents = fi.read() print contents list_num = contents.split(',') # read a line at a time # reset file obj position fi.seek(0) for line in fi: print line fi.seek(10) print fi.read(10) # tell the current position print fi.tell() fi.close() # create a dir import os os.mkdir("test_dir") # return def test_io(): # print function a = ['hello', 'this is fun', 'I love wargames'] for item in a: print item, len(item) # get input from keyboard # raw_input, get a line of input from keyboard as string x = str(raw_input("enter something:")) print x # input x = input("input your python expression: ") print x return def test_loops(): # for loops, break, continue # problem: check prime n = 23 prime = True for x in range(2, n): if n % x == 0: print '%d is not a prime since it has a factor %d'%(n, x) prime = False break if prime: print '%d is a prime'%(n) # using while loop do the same prime = True x = 2 while x < n: if n % x == 0: print '%d is not a prime since it has a factor %d'%(n, x) prime = False break x += 1 if prime: print '%d is a prime'%(n) # do while? n = 1 while True: if n < 10: print n n += 1 return def test_control_flow(): # get input from keyboard #x = int(raw_input("Please enter #:")) x = 5 if x < 0: x = 0 print 'Negative changed to zero' elif x == 0: print 'Zero' elif x == 1: print 'Single' else: print 'More' # no case statement return def test_dictionary(): # create a dictionary dict_1 = {'Alice': '2341', 'Beth': '9102', 'Cecil': '3258'} print dict_1 dict_2 = {x:x*'a' for x in range(10)} print dict_2 # add a new entry dict_1['newguy'] = '2323' print dict_1 # del a entry del dict_1['Beth'] print dict_1 # check for existance print dict_1.has_key('Beth') print 'Beth' in dict_1 print 'Alice' in dict_1 # update dict print dict_1['Alice'] dict_1['Alice'] = '323232' print dict_1['Alice'] # no duplicates! # make a copy copy_dict_1 = dict_1.copy() print copy_dict_1 # clear the dict dict_1.clear() print dict_1 return def test_list(): # items are ordered # items in list can be heterogeneous a = ['spam', 'eggs', 100, 1234, 2*2] b = [1, 2 ,3, 4] c = range(12) print a print b print c # access list elements print a[0] for num in b: num += 1 print b for i in range(len(b)): b[i] += 1 print b # loop through a list for item in a: print item # add a new item to a list b.append(6) print b # delete a item based on location del b[0] del b[-1] print b #check membership if 'spam' in a: print 'got it' else: print 'spam is not in list a' # lists cancatenation d = a + b + c print d # list repetiion print 2*a # nested list print max(a) a.sort() print a a.reverse() print a # index function print index('spam') return def test_str(): """play with string""" str_1 = "hacking is fun" print str_1 + 16*'a' print str_1 + 16*'\x61' print len(str_1) # take a substring # str[left:right] print str_1[:] print str_1[:5] # do not modify char in a string #str_1[0] = 'H' # print the last char print str_1[-1] # check a string's hex print str_1.encode('hex') # copy a string str_2 = str_1 str_3 = str_1[:-1] print id(str_2) == id(str_1) print id(str_3) == id(str_1) print str_1 print str_2 return def test_var(): a = 5 b = 1.2 c = 0xdeadbeef d = u'\xde\xad\xbe\xef' e = 8 * '\x00' f = 'abcd' ff = '\x61\x62\x63\x64' kk = u'你好' g = True h = False j = 0x61 print not g print a, b, c print a+b print type(c) print type(a) print type(d) print hex(c) print f, ff print chr(j) print kk.encode('utf-8') print d.encode('utf-8') # the id function # global var return if __name__ == "__main__": #test_var() test_str() #test_list() #test_dictionary() #test_control_flow() #test_loops() #test_function() #test_generator() #test_module() #test_io() #test_file_io() #test_class() #test_exception()
mit
384,615,790,614,466,200
16.869919
62
0.615408
false
canvasnetworks/canvas
common/boto/s3/acl.py
17
5397
# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/ # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. from boto.s3.user import User CannedACLStrings = ['private', 'public-read', 'public-read-write', 'authenticated-read', 'bucket-owner-read', 'bucket-owner-full-control'] class Policy: def __init__(self, parent=None): self.parent = parent self.acl = None def __repr__(self): grants = [] for g in self.acl.grants: if g.id == self.owner.id: grants.append("%s (owner) = %s" % (g.display_name, g.permission)) else: if g.type == 'CanonicalUser': u = g.display_name elif g.type == 'Group': u = g.uri else: u = g.email_address grants.append("%s = %s" % (u, g.permission)) return "<Policy: %s>" % ", ".join(grants) def startElement(self, name, attrs, connection): if name == 'Owner': self.owner = User(self) return self.owner elif name == 'AccessControlList': self.acl = ACL(self) return self.acl else: return None def endElement(self, name, value, connection): if name == 'Owner': pass elif name == 'AccessControlList': pass else: setattr(self, name, value) def to_xml(self): s = '<AccessControlPolicy>' s += self.owner.to_xml() s += self.acl.to_xml() s += '</AccessControlPolicy>' return s class ACL: def __init__(self, policy=None): self.policy = policy self.grants = [] def add_grant(self, grant): self.grants.append(grant) def add_email_grant(self, permission, email_address): grant = Grant(permission=permission, type='AmazonCustomerByEmail', email_address=email_address) self.grants.append(grant) def add_user_grant(self, permission, user_id, display_name=None): grant = Grant(permission=permission, type='CanonicalUser', id=user_id, display_name=display_name) self.grants.append(grant) def startElement(self, name, attrs, connection): if name == 'Grant': self.grants.append(Grant(self)) return self.grants[-1] else: return None def endElement(self, name, value, connection): if name == 'Grant': pass else: setattr(self, name, value) def to_xml(self): s = '<AccessControlList>' for grant in self.grants: s += grant.to_xml() s += '</AccessControlList>' return s class Grant: NameSpace = 'xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"' def __init__(self, permission=None, type=None, id=None, display_name=None, uri=None, email_address=None): self.permission = permission self.id = id self.display_name = display_name self.uri = uri self.email_address = email_address self.type = type def startElement(self, name, attrs, connection): if name == 'Grantee': self.type = attrs['xsi:type'] return None def endElement(self, name, value, connection): if name == 'ID': self.id = value elif name == 'DisplayName': self.display_name = value elif name == 'URI': self.uri = value elif name == 'EmailAddress': self.email_address = value elif name == 'Grantee': pass elif name == 'Permission': self.permission = value else: setattr(self, name, value) def to_xml(self): s = '<Grant>' s += '<Grantee %s xsi:type="%s">' % (self.NameSpace, self.type) if self.type == 'CanonicalUser': s += '<ID>%s</ID>' % self.id s += '<DisplayName>%s</DisplayName>' % self.display_name elif self.type == 'Group': s += '<URI>%s</URI>' % self.uri else: s += '<EmailAddress>%s</EmailAddress>' % self.email_address s += '</Grantee>' s += '<Permission>%s</Permission>' % self.permission s += '</Grant>' return s
bsd-3-clause
1,424,259,395,550,491,400
32.110429
105
0.569205
false
dana-i2cat/felix
vt_manager/src/python/vt_manager/communication/XmlRpcClient.py
3
1276
import xmlrpclib, logging from urlparse import urlparse """ author: msune, CarolinaFernandez Server monitoring thread """ class XmlRpcClient(): """ Calling a remote method with variable number of parameters """ @staticmethod def callRPCMethodBasicAuth(url,userName,password,methodName,*params): result = None #Incrust basic authentication parsed = urlparse(url) newUrl = parsed.scheme+"://"+userName+":"+password+"@"+parsed.netloc+parsed.path if not parsed.query == "": newUrl += "?"+parsed.query try: result = XmlRpcClient.callRPCMethod(newUrl,methodName,*params) except Exception: raise return result @staticmethod def callRPCMethod(url,methodName,*params): result = None try: server = xmlrpclib.Server(url) result = getattr(server,methodName)(*params) except Exception as e: turl=url.split('@') if len(turl)>1: url = turl[0].split('//')[0]+'//'+turl[-1] te =str(e) if '@' in te: e=te[0:te.find('for ')]+te[te.find('@')+1:] logging.error("XMLRPC Client error: can't connect to method %s at %s" % (methodName, url)) logging.error(e) raise Exception("XMLRPC Client error: can't connect to method %s at %s\n" % (methodName, url) + str(e)) return result
apache-2.0
4,492,516,822,640,603,000
27.355556
106
0.659091
false
westinedu/newertrends
django/template/__init__.py
561
3247
""" This is the Django template system. How it works: The Lexer.tokenize() function converts a template string (i.e., a string containing markup with custom template tags) to tokens, which can be either plain text (TOKEN_TEXT), variables (TOKEN_VAR) or block statements (TOKEN_BLOCK). The Parser() class takes a list of tokens in its constructor, and its parse() method returns a compiled template -- which is, under the hood, a list of Node objects. Each Node is responsible for creating some sort of output -- e.g. simple text (TextNode), variable values in a given context (VariableNode), results of basic logic (IfNode), results of looping (ForNode), or anything else. The core Node types are TextNode, VariableNode, IfNode and ForNode, but plugin modules can define their own custom node types. Each Node has a render() method, which takes a Context and returns a string of the rendered node. For example, the render() method of a Variable Node returns the variable's value as a string. The render() method of an IfNode returns the rendered output of whatever was inside the loop, recursively. The Template class is a convenient wrapper that takes care of template compilation and rendering. Usage: The only thing you should ever use directly in this file is the Template class. Create a compiled template object with a template_string, then call render() with a context. In the compilation stage, the TemplateSyntaxError exception will be raised if the template doesn't have proper syntax. Sample code: >>> from django import template >>> s = u'<html>{% if test %}<h1>{{ varvalue }}</h1>{% endif %}</html>' >>> t = template.Template(s) (t is now a compiled template, and its render() method can be called multiple times with multiple contexts) >>> c = template.Context({'test':True, 'varvalue': 'Hello'}) >>> t.render(c) u'<html><h1>Hello</h1></html>' >>> c = template.Context({'test':False, 'varvalue': 'Hello'}) >>> t.render(c) u'<html></html>' """ # Template lexing symbols from django.template.base import (ALLOWED_VARIABLE_CHARS, BLOCK_TAG_END, BLOCK_TAG_START, COMMENT_TAG_END, COMMENT_TAG_START, FILTER_ARGUMENT_SEPARATOR, FILTER_SEPARATOR, SINGLE_BRACE_END, SINGLE_BRACE_START, TOKEN_BLOCK, TOKEN_COMMENT, TOKEN_TEXT, TOKEN_VAR, TRANSLATOR_COMMENT_MARK, UNKNOWN_SOURCE, VARIABLE_ATTRIBUTE_SEPARATOR, VARIABLE_TAG_END, VARIABLE_TAG_START, filter_re, tag_re) # Exceptions from django.template.base import (ContextPopException, InvalidTemplateLibrary, TemplateDoesNotExist, TemplateEncodingError, TemplateSyntaxError, VariableDoesNotExist) # Template parts from django.template.base import (Context, FilterExpression, Lexer, Node, NodeList, Parser, RequestContext, Origin, StringOrigin, Template, TextNode, Token, TokenParser, Variable, VariableNode, constant_string, filter_raw_string) # Compiling templates from django.template.base import (compile_string, resolve_variable, unescape_string_literal, generic_tag_compiler) # Library management from django.template.base import (Library, add_to_builtins, builtins, get_library, get_templatetags_modules, get_text_list, import_library, libraries) __all__ = ('Template', 'Context', 'RequestContext', 'compile_string')
bsd-3-clause
7,799,414,344,892,095,000
39.5875
83
0.755159
false
dhermes/project-euler
python/complete/no190.py
1
1063
#!/usr/bin/env python # Let S_m = (x_1, x_2, ... , x_m) be the m-tuple of positive real # numbers with x_1 + x_2 + ... + x_m = m for which # P_m = x_1 * x_2^2 * ... * x_m^m is maximised. # For example, it can be verified that [P_10] = 4112 # ([] is the integer part function). # Find SUM[P_m] for 2 <= m <= 15. # -------- LAGRANGE -------- # maximize f(x,...) given g(x,....) = c # set ratio of partials equal to lambda # Since g = x_1 + ... + x_m # We need d(P_m)/d(x_i) = i P_m/x_i = lambda # Hence i/x_i = 1/x_1, x_i = i*x_1 # m = x_1(1 + ... + m) = x_1(m)(m+1)/2 # x_1 = 2/(m + 1) # P_m = (2/m+1)**(m*(m+1)/2)*(1*2**2*...*m**m) # P_10 = (2/11)**(55)*(1*4*...*(10**10)) = 4112.0850028536197 import operator from math import floor from python.decorators import euler_timer def P(m): return reduce(operator.mul, [((2 * n) / (1.0 * (m + 1))) ** n for n in range(1, m + 1)]) def main(verbose=False): return int(sum(floor(P(n)) for n in range(2, 16))) if __name__ == '__main__': print euler_timer(190)(main)(verbose=True)
apache-2.0
3,962,063,337,530,785,000
25.575
78
0.521167
false
gcblue/gcblue
bin/Lib/test/test_msilib.py
129
1468
""" Test suite for the code in msilib """ import unittest import os from test_support import run_unittest, import_module msilib = import_module('msilib') class Test_make_id(unittest.TestCase): #http://msdn.microsoft.com/en-us/library/aa369212(v=vs.85).aspx """The Identifier data type is a text string. Identifiers may contain the ASCII characters A-Z (a-z), digits, underscores (_), or periods (.). However, every identifier must begin with either a letter or an underscore. """ def test_is_no_change_required(self): self.assertEqual( msilib.make_id("short"), "short") self.assertEqual( msilib.make_id("nochangerequired"), "nochangerequired") self.assertEqual( msilib.make_id("one.dot"), "one.dot") self.assertEqual( msilib.make_id("_"), "_") self.assertEqual( msilib.make_id("a"), "a") #self.assertEqual( # msilib.make_id(""), "") def test_invalid_first_char(self): self.assertEqual( msilib.make_id("9.short"), "_9.short") self.assertEqual( msilib.make_id(".short"), "_.short") def test_invalid_any_char(self): self.assertEqual( msilib.make_id(".s\x82ort"), "_.s_ort") self.assertEqual ( msilib.make_id(".s\x82o?*+rt"), "_.s_o___rt") def test_main(): run_unittest(__name__) if __name__ == '__main__': test_main()
bsd-3-clause
4,218,803,510,297,476,000
30.913043
77
0.583106
false
mhugo/QGIS
tests/src/python/test_provider_ogr_gpkg.py
4
59492
# -*- coding: utf-8 -*- """QGIS Unit tests for the OGR/GPKG provider. .. note:: This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. """ __author__ = 'Even Rouault' __date__ = '2016-04-21' __copyright__ = 'Copyright 2016, Even Rouault' import os import re import shutil import sys import tempfile import time import qgis # NOQA from osgeo import gdal, ogr from qgis.core import (QgsFeature, QgsCoordinateReferenceSystem, QgsFeatureRequest, QgsFeatureSink, QgsFields, QgsField, QgsFieldConstraints, QgsGeometry, QgsProviderRegistry, QgsRectangle, QgsSettings, QgsVectorLayer, QgsVectorLayerExporter, QgsPointXY, QgsProject, QgsWkbTypes, QgsDataProvider, QgsVectorDataProvider) from qgis.PyQt.QtCore import QCoreApplication, QVariant from qgis.testing import start_app, unittest from qgis.utils import spatialite_connect from utilities import unitTestDataPath TEST_DATA_DIR = unitTestDataPath() def GDAL_COMPUTE_VERSION(maj, min, rev): return ((maj) * 1000000 + (min) * 10000 + (rev) * 100) class ErrorReceiver(): def __init__(self): self.msg = None def receiveError(self, msg): self.msg = msg def count_opened_filedescriptors(filename_to_test): count = -1 if sys.platform.startswith('linux'): count = 0 open_files_dirname = '/proc/%d/fd' % os.getpid() filenames = os.listdir(open_files_dirname) for filename in filenames: full_filename = open_files_dirname + '/' + filename if os.path.exists(full_filename): link = os.readlink(full_filename) if os.path.basename(link) == os.path.basename(filename_to_test): count += 1 return count class TestPyQgsOGRProviderGpkg(unittest.TestCase): @classmethod def setUpClass(cls): """Run before all tests""" QCoreApplication.setOrganizationName("QGIS_Test") QCoreApplication.setOrganizationDomain("TestPyQgsOGRProviderGpkg.com") QCoreApplication.setApplicationName("TestPyQgsOGRProviderGpkg") QgsSettings().clear() start_app() # Create test layer cls.basetestpath = tempfile.mkdtemp() @classmethod def tearDownClass(cls): """Run after all tests""" shutil.rmtree(cls.basetestpath, True) QgsSettings().clear() def testDecodeUri(self): filename = '/home/to/path/my_file.gpkg' registry = QgsProviderRegistry.instance() uri = filename components = registry.decodeUri('ogr', uri) self.assertEqual(components["path"], filename) uri = '{}|layername=test'.format(filename) components = registry.decodeUri('ogr', uri) self.assertEqual(components["path"], filename) self.assertEqual(components["layerName"], 'test') uri = '{}|layerid=0'.format(filename) components = registry.decodeUri('ogr', uri) self.assertEqual(components["path"], filename) self.assertEqual(components["layerId"], 0) def testSingleToMultiPolygonPromotion(self): tmpfile = os.path.join(self.basetestpath, 'testSingleToMultiPolygonPromotion.gpkg') ds = ogr.GetDriverByName('GPKG').CreateDataSource(tmpfile) ds.CreateLayer('test', geom_type=ogr.wkbMultiPolygon) ds = None vl = QgsVectorLayer('{}|layerid=0'.format(tmpfile), 'test', 'ogr') f = QgsFeature() f.setGeometry(QgsGeometry.fromWkt('POLYGON ((0 0,0 1,1 1,0 0))')) vl.dataProvider().addFeatures([f]) got = [feat for feat in vl.getFeatures()][0] got_geom = got.geometry() reference = QgsGeometry.fromWkt('MultiPolygon (((0 0, 0 1, 1 1, 0 0)))') # The geometries must be binarily identical self.assertEqual(got_geom.asWkb(), reference.asWkb(), 'Expected {}, got {}'.format(reference.asWkt(), got_geom.asWkt())) def testCurveGeometryType(self): tmpfile = os.path.join(self.basetestpath, 'testCurveGeometryType.gpkg') ds = ogr.GetDriverByName('GPKG').CreateDataSource(tmpfile) ds.CreateLayer('test', geom_type=ogr.wkbCurvePolygon) ds = None vl = QgsVectorLayer('{}'.format(tmpfile), 'test', 'ogr') self.assertEqual(vl.dataProvider().subLayers(), [QgsDataProvider.SUBLAYER_SEPARATOR.join(['0', 'test', '0', 'CurvePolygon', 'geom'])]) f = QgsFeature() f.setGeometry(QgsGeometry.fromWkt('POLYGON ((0 0,0 1,1 1,0 0))')) vl.dataProvider().addFeatures([f]) got = [feat for feat in vl.getFeatures()][0] got_geom = got.geometry() reference = QgsGeometry.fromWkt('CurvePolygon (((0 0, 0 1, 1 1, 0 0)))') # The geometries must be binarily identical self.assertEqual(got_geom.asWkb(), reference.asWkb(), 'Expected {}, got {}'.format(reference.asWkt(), got_geom.asWkt())) def internalTestBug15351(self, orderClosing): tmpfile = os.path.join(self.basetestpath, 'testBug15351.gpkg') ds = ogr.GetDriverByName('GPKG').CreateDataSource(tmpfile) lyr = ds.CreateLayer('test', geom_type=ogr.wkbPoint) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('POINT(0 0)')) lyr.CreateFeature(f) f = None ds = None vl = QgsVectorLayer(u'{}'.format(tmpfile), u'test', u'ogr') self.assertTrue(vl.startEditing()) self.assertTrue(vl.changeGeometry(1, QgsGeometry.fromWkt('Point (3 50)'))) # Iterate over features (will open a new OGR connection), but do not # close the iterator for now it = vl.getFeatures() f = QgsFeature() it.nextFeature(f) if orderClosing == 'closeIter_commit_closeProvider': it = None # Commit changes cbk = ErrorReceiver() vl.dataProvider().raiseError.connect(cbk.receiveError) self.assertTrue(vl.commitChanges()) self.assertIsNone(cbk.msg) # Close layer and iterator in different orders if orderClosing == 'closeIter_commit_closeProvider': vl = None elif orderClosing == 'commit_closeProvider_closeIter': vl = None it = None else: assert orderClosing == 'commit_closeIter_closeProvider' it = None vl = None # Test that we succeeded restoring default journal mode, and we # are not let in WAL mode. ds = ogr.Open(tmpfile) lyr = ds.ExecuteSQL('PRAGMA journal_mode') f = lyr.GetNextFeature() res = f.GetField(0) ds.ReleaseResultSet(lyr) ds = None self.assertEqual(res, 'delete') # We need GDAL 2.0 to issue PRAGMA journal_mode # Note: for that case, we don't strictly need turning on WAL def testBug15351_closeIter_commit_closeProvider(self): self.internalTestBug15351('closeIter_commit_closeProvider') # We need GDAL 2.0 to issue PRAGMA journal_mode def testBug15351_commit_closeProvider_closeIter(self): self.internalTestBug15351('commit_closeProvider_closeIter') # We need GDAL 2.0 to issue PRAGMA journal_mode def testBug15351_commit_closeIter_closeProvider(self): self.internalTestBug15351('commit_closeIter_closeProvider') @unittest.skip(int(gdal.VersionInfo('VERSION_NUM')) < GDAL_COMPUTE_VERSION(2, 1, 2)) def testGeopackageExtentUpdate(self): ''' test https://github.com/qgis/QGIS/issues/23209 ''' tmpfile = os.path.join(self.basetestpath, 'testGeopackageExtentUpdate.gpkg') ds = ogr.GetDriverByName('GPKG').CreateDataSource(tmpfile) lyr = ds.CreateLayer('test', geom_type=ogr.wkbPoint) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('POINT(0 0)')) lyr.CreateFeature(f) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('POINT(1 1)')) lyr.CreateFeature(f) f = None f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('POINT(1 0.5)')) lyr.CreateFeature(f) f = None gdal.ErrorReset() ds.ExecuteSQL('RECOMPUTE EXTENT ON test') has_error = gdal.GetLastErrorMsg() != '' ds = None if has_error: print('Too old GDAL trunk version. Please update') return vl = QgsVectorLayer(u'{}'.format(tmpfile), u'test', u'ogr') # Test moving a geometry that touches the bbox self.assertTrue(vl.startEditing()) self.assertTrue(vl.changeGeometry(1, QgsGeometry.fromWkt('Point (0.5 0)'))) self.assertTrue(vl.commitChanges()) reference = QgsGeometry.fromRect(QgsRectangle(0.5, 0.0, 1.0, 1.0)) provider_extent = QgsGeometry.fromRect(vl.extent()) self.assertTrue(QgsGeometry.compare(provider_extent.asPolygon()[0], reference.asPolygon()[0], 0.00001), provider_extent.asPolygon()[0]) # Test deleting a geometry that touches the bbox self.assertTrue(vl.startEditing()) self.assertTrue(vl.deleteFeature(2)) self.assertTrue(vl.commitChanges()) reference = QgsGeometry.fromRect(QgsRectangle(0.5, 0.0, 1.0, 0.5)) provider_extent = QgsGeometry.fromRect(vl.extent()) self.assertTrue(QgsGeometry.compare(provider_extent.asPolygon()[0], reference.asPolygon()[0], 0.00001), provider_extent.asPolygon()[0]) def testSelectSubsetString(self): tmpfile = os.path.join(self.basetestpath, 'testSelectSubsetString.gpkg') ds = ogr.GetDriverByName('GPKG').CreateDataSource(tmpfile) lyr = ds.CreateLayer('test', geom_type=ogr.wkbMultiPolygon) lyr.CreateField(ogr.FieldDefn('foo', ogr.OFTString)) f = ogr.Feature(lyr.GetLayerDefn()) f['foo'] = 'bar' lyr.CreateFeature(f) f = None f = ogr.Feature(lyr.GetLayerDefn()) f['foo'] = 'baz' lyr.CreateFeature(f) f = None ds = None vl = QgsVectorLayer('{}|layerid=0'.format(tmpfile), 'test', 'ogr') vl.setSubsetString("SELECT fid, foo FROM test WHERE foo = 'baz'") got = [feat for feat in vl.getFeatures()] self.assertEqual(len(got), 1) del vl testdata_path = unitTestDataPath('provider') shutil.copy(os.path.join(testdata_path, 'bug_19826.gpkg'), tmpfile) vl = QgsVectorLayer('{}|layerid=0'.format(tmpfile, 'test', 'ogr')) vl.setSubsetString("name = 'two'") got = [feat for feat in vl.getFeatures()] self.assertEqual(len(got), 1) attributes = got[0].attributes() self.assertEqual(got[0].id(), 2) self.assertEqual(attributes[0], 2) self.assertEqual(attributes[1], 'two') self.assertNotEqual(attributes[2], None) # Request by FeatureId on a subset layer got = [feat for feat in vl.getFeatures(QgsFeatureRequest(2))] self.assertEqual(len(got), 1) attributes = got[0].attributes() self.assertEqual(got[0].id(), 2) self.assertEqual(attributes[0], 2) self.assertEqual(attributes[1], 'two') self.assertNotEqual(attributes[2], None) request = QgsFeatureRequest(2).setSubsetOfAttributes([0]) got = [feat for feat in vl.getFeatures(request)] self.assertEqual(len(got), 1) attributes = got[0].attributes() self.assertEqual(got[0].id(), 2) self.assertEqual(attributes[0], 2) self.assertEqual(attributes[1], None) self.assertEqual(attributes[2], None) # Request by FeatureId on a subset layer. The name = 'two' filter # only returns FID 2, so requesting on FID 1 should return nothing # but this is broken now. got = [feat for feat in vl.getFeatures(QgsFeatureRequest(1))] self.assertEqual(len(got), 1) # this is the current behavior, broken def testEditSubsetString(self): tmpfile = os.path.join(self.basetestpath, 'testEditSubsetString.gpkg') ds = ogr.GetDriverByName('GPKG').CreateDataSource(tmpfile) lyr = ds.CreateLayer('test', geom_type=ogr.wkbMultiPolygon) lyr.CreateField(ogr.FieldDefn('foo', ogr.OFTString)) f = ogr.Feature(lyr.GetLayerDefn()) f['foo'] = 'bar' lyr.CreateFeature(f) f = None f = ogr.Feature(lyr.GetLayerDefn()) f['foo'] = 'baz' lyr.CreateFeature(f) f = None ds = None vl = QgsVectorLayer('{}|layerid=0'.format(tmpfile), 'test', 'ogr') self.assertEqual(vl.dataProvider().featureCount(), 2) # Test adding features vl.setSubsetString("foo = 'baz'") self.assertTrue(vl.startEditing()) feature = QgsFeature(vl.fields()) feature['foo'] = 'abc' vl.addFeature(feature) vl.commitChanges() vl.setSubsetString(None) self.assertEqual(vl.dataProvider().featureCount(), 3) # Test deleting a feature vl.setSubsetString("foo = 'baz'") self.assertTrue(vl.startEditing()) vl.deleteFeature(1) vl.commitChanges() vl.setSubsetString(None) self.assertEqual(vl.dataProvider().featureCount(), 2) # Test editing a feature vl.setSubsetString("foo = 'baz'") self.assertTrue(vl.startEditing()) vl.changeAttributeValue(2, 1, 'xx') vl.commitChanges() vl.setSubsetString(None) self.assertEqual(set((feat['foo'] for feat in vl.getFeatures())), set(['xx', 'abc'])) def testStyle(self): # First test with invalid URI vl = QgsVectorLayer('/idont/exist.gpkg', 'test', 'ogr') self.assertFalse(vl.dataProvider().isSaveAndLoadStyleToDatabaseSupported()) related_count, idlist, namelist, desclist, errmsg = vl.listStylesInDatabase() self.assertEqual(related_count, -1) self.assertEqual(idlist, []) self.assertEqual(namelist, []) self.assertEqual(desclist, []) self.assertNotEqual(errmsg, "") qml, errmsg = vl.getStyleFromDatabase("1") self.assertEqual(qml, "") self.assertNotEqual(errmsg, "") qml, success = vl.loadNamedStyle('/idont/exist.gpkg') self.assertFalse(success) errorMsg = vl.saveStyleToDatabase("name", "description", False, "") self.assertNotEqual(errorMsg, "") # Now with valid URI tmpfile = os.path.join(self.basetestpath, 'testStyle.gpkg') ds = ogr.GetDriverByName('GPKG').CreateDataSource(tmpfile) lyr = ds.CreateLayer('test', geom_type=ogr.wkbMultiPolygon) lyr.CreateField(ogr.FieldDefn('foo', ogr.OFTString)) f = ogr.Feature(lyr.GetLayerDefn()) f['foo'] = 'bar' lyr.CreateFeature(f) f = None lyr = ds.CreateLayer('test2', geom_type=ogr.wkbMultiPolygon) lyr.CreateField(ogr.FieldDefn('foo', ogr.OFTString)) f = ogr.Feature(lyr.GetLayerDefn()) f['foo'] = 'bar' lyr.CreateFeature(f) f = None ds = None vl = QgsVectorLayer('{}|layername=test'.format(tmpfile), 'test', 'ogr') self.assertTrue(vl.isValid()) vl2 = QgsVectorLayer('{}|layername=test2'.format(tmpfile), 'test2', 'ogr') self.assertTrue(vl2.isValid()) self.assertTrue(vl.dataProvider().isSaveAndLoadStyleToDatabaseSupported()) related_count, idlist, namelist, desclist, errmsg = vl.listStylesInDatabase() self.assertEqual(related_count, 0) self.assertEqual(idlist, []) self.assertEqual(namelist, []) self.assertEqual(desclist, []) self.assertNotEqual(errmsg, "") qml, errmsg = vl.getStyleFromDatabase("not_existing") self.assertEqual(qml, "") self.assertNotEqual(errmsg, "") qml, success = vl.loadNamedStyle('{}|layerid=0'.format(tmpfile)) self.assertFalse(success) errorMsg = vl.saveStyleToDatabase("name", "description", False, "") self.assertEqual(errorMsg, "") qml, errmsg = vl.getStyleFromDatabase("not_existing") self.assertEqual(qml, "") self.assertNotEqual(errmsg, "") related_count, idlist, namelist, desclist, errmsg = vl.listStylesInDatabase() self.assertEqual(related_count, 1) self.assertEqual(errmsg, "") self.assertEqual(idlist, ['1']) self.assertEqual(namelist, ['name']) self.assertEqual(desclist, ['description']) qml, errmsg = vl.getStyleFromDatabase("100") self.assertEqual(qml, "") self.assertNotEqual(errmsg, "") qml, errmsg = vl.getStyleFromDatabase("1") self.assertTrue(qml.startswith('<!DOCTYPE qgis'), qml) self.assertEqual(errmsg, "") # Try overwrite it but simulate answer no settings = QgsSettings() settings.setValue("/qgis/overwriteStyle", False) errorMsg = vl.saveStyleToDatabase("name", "description_bis", False, "") self.assertNotEqual(errorMsg, "") related_count, idlist, namelist, desclist, errmsg = vl.listStylesInDatabase() self.assertEqual(related_count, 1) self.assertEqual(errmsg, "") self.assertEqual(idlist, ['1']) self.assertEqual(namelist, ['name']) self.assertEqual(desclist, ['description']) # Try overwrite it and simulate answer yes settings = QgsSettings() settings.setValue("/qgis/overwriteStyle", True) errorMsg = vl.saveStyleToDatabase("name", "description_bis", False, "") self.assertEqual(errorMsg, "") related_count, idlist, namelist, desclist, errmsg = vl.listStylesInDatabase() self.assertEqual(related_count, 1) self.assertEqual(errmsg, "") self.assertEqual(idlist, ['1']) self.assertEqual(namelist, ['name']) self.assertEqual(desclist, ['description_bis']) errorMsg = vl2.saveStyleToDatabase("name_test2", "description_test2", True, "") self.assertEqual(errorMsg, "") errorMsg = vl.saveStyleToDatabase("name2", "description2", True, "") self.assertEqual(errorMsg, "") errorMsg = vl.saveStyleToDatabase("name3", "description3", True, "") self.assertEqual(errorMsg, "") related_count, idlist, namelist, desclist, errmsg = vl.listStylesInDatabase() self.assertEqual(related_count, 3) self.assertEqual(errmsg, "") self.assertEqual(idlist, ['1', '3', '4', '2']) self.assertEqual(namelist, ['name', 'name2', 'name3', 'name_test2']) self.assertEqual(desclist, ['description_bis', 'description2', 'description3', 'description_test2']) # Check that layers_style table is not list in subLayers() vl = QgsVectorLayer(tmpfile, 'test', 'ogr') sublayers = vl.dataProvider().subLayers() self.assertEqual(len(sublayers), 2, sublayers) def testDisablewalForSqlite3(self): ''' Test disabling walForSqlite3 setting ''' QgsSettings().setValue("/qgis/walForSqlite3", False) tmpfile = os.path.join(self.basetestpath, 'testDisablewalForSqlite3.gpkg') ds = ogr.GetDriverByName('GPKG').CreateDataSource(tmpfile) lyr = ds.CreateLayer('test', geom_type=ogr.wkbPoint) lyr.CreateField(ogr.FieldDefn('attr0', ogr.OFTInteger)) lyr.CreateField(ogr.FieldDefn('attr1', ogr.OFTInteger)) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('POINT(0 0)')) lyr.CreateFeature(f) f = None ds = None vl = QgsVectorLayer(u'{}'.format(tmpfile), u'test', u'ogr') # Test that we are using default delete mode and not WAL ds = ogr.Open(tmpfile) lyr = ds.ExecuteSQL('PRAGMA journal_mode') f = lyr.GetNextFeature() res = f.GetField(0) ds.ReleaseResultSet(lyr) ds = None self.assertEqual(res, 'delete') self.assertTrue(vl.startEditing()) feature = next(vl.getFeatures()) self.assertTrue(vl.changeAttributeValue(feature.id(), 1, 1001)) # Commit changes cbk = ErrorReceiver() vl.dataProvider().raiseError.connect(cbk.receiveError) self.assertTrue(vl.commitChanges()) self.assertIsNone(cbk.msg) vl = None QgsSettings().setValue("/qgis/walForSqlite3", None) def testSimulatedDBManagerImport(self): uri = 'point?field=f1:int' uri += '&field=f2:double(6,4)' uri += '&field=f3:string(20)' mem_lyr = QgsVectorLayer(uri, "x", "memory") self.assertTrue(mem_lyr.isValid()) f = QgsFeature(mem_lyr.fields()) f['f1'] = 1 f['f2'] = 123.456 f['f3'] = '12345678.90123456789' f2 = QgsFeature(mem_lyr.fields()) f2['f1'] = 2 mem_lyr.dataProvider().addFeatures([f, f2]) # Test creating new DB tmpfile = os.path.join(self.basetestpath, 'testSimulatedDBManagerImport.gpkg') options = {} options['driverName'] = 'GPKG' err = QgsVectorLayerExporter.exportLayer(mem_lyr, tmpfile, "ogr", mem_lyr.crs(), False, options) self.assertEqual(err[0], QgsVectorLayerExporter.NoError, 'unexpected import error {0}'.format(err)) lyr = QgsVectorLayer(tmpfile, "y", "ogr") self.assertTrue(lyr.isValid()) features = lyr.getFeatures() f = next(features) self.assertEqual(f['f1'], 1) self.assertEqual(f['f2'], 123.456) self.assertEqual(f['f3'], '12345678.90123456789') f = next(features) self.assertEqual(f['f1'], 2) features = None del lyr # Test updating existing DB, by adding a new layer mem_lyr = QgsVectorLayer(uri, "x", "memory") self.assertTrue(mem_lyr.isValid()) f = QgsFeature(mem_lyr.fields()) f['f1'] = 1 f['f2'] = 2 mem_lyr.dataProvider().addFeatures([f]) options = {} options['update'] = True options['driverName'] = 'GPKG' options['layerName'] = 'my_out_table' err = QgsVectorLayerExporter.exportLayer(mem_lyr, tmpfile, "ogr", mem_lyr.crs(), False, options) self.assertEqual(err[0], QgsVectorLayerExporter.NoError, 'unexpected import error {0}'.format(err)) lyr = QgsVectorLayer(tmpfile + "|layername=my_out_table", "y", "ogr") self.assertTrue(lyr.isValid()) features = lyr.getFeatures() f = next(features) self.assertEqual(f['f1'], 1) self.assertEqual(f['f2'], 2) features = None del lyr # Test overwriting without overwrite option err = QgsVectorLayerExporter.exportLayer(mem_lyr, tmpfile, "ogr", mem_lyr.crs(), False, options) self.assertEqual(err[0], QgsVectorLayerExporter.ErrCreateDataSource) # Test overwriting, without specifying a layer name mem_lyr = QgsVectorLayer(uri, "x", "memory") self.assertTrue(mem_lyr.isValid()) f = QgsFeature(mem_lyr.fields()) f['f1'] = 3 f['f2'] = 4 mem_lyr.dataProvider().addFeatures([f]) options = {} options['driverName'] = 'GPKG' options['overwrite'] = True err = QgsVectorLayerExporter.exportLayer(mem_lyr, tmpfile, "ogr", mem_lyr.crs(), False, options) self.assertEqual(err[0], QgsVectorLayerExporter.NoError, 'unexpected import error {0}'.format(err)) lyr = QgsVectorLayer(tmpfile, "y", "ogr") self.assertTrue(lyr.isValid()) features = lyr.getFeatures() f = next(features) self.assertEqual(f['f1'], 3) self.assertEqual(f['f2'], 4) features = None def testExportLayerToExistingDatabase(self): fields = QgsFields() fields.append(QgsField('f1', QVariant.Int)) tmpfile = os.path.join(self.basetestpath, 'testCreateNewGeopackage.gpkg') options = {} options['update'] = True options['driverName'] = 'GPKG' options['layerName'] = 'table1' exporter = QgsVectorLayerExporter(tmpfile, "ogr", fields, QgsWkbTypes.Polygon, QgsCoordinateReferenceSystem(3111), False, options) self.assertFalse(exporter.errorCode(), 'unexpected export error {}: {}'.format(exporter.errorCode(), exporter.errorMessage())) options['layerName'] = 'table2' exporter = QgsVectorLayerExporter(tmpfile, "ogr", fields, QgsWkbTypes.Point, QgsCoordinateReferenceSystem(3113), False, options) self.assertFalse(exporter.errorCode(), 'unexpected export error {} : {}'.format(exporter.errorCode(), exporter.errorMessage())) del exporter # make sure layers exist lyr = QgsVectorLayer('{}|layername=table1'.format(tmpfile), "lyr1", "ogr") self.assertTrue(lyr.isValid()) self.assertEqual(lyr.crs().authid(), 'EPSG:3111') self.assertEqual(lyr.wkbType(), QgsWkbTypes.Polygon) lyr2 = QgsVectorLayer('{}|layername=table2'.format(tmpfile), "lyr2", "ogr") self.assertTrue(lyr2.isValid()) self.assertEqual(lyr2.crs().authid(), 'EPSG:3113') self.assertEqual(lyr2.wkbType(), QgsWkbTypes.Point) def testGeopackageTwoLayerEdition(self): ''' test https://github.com/qgis/QGIS/issues/24933 ''' tmpfile = os.path.join(self.basetestpath, 'testGeopackageTwoLayerEdition.gpkg') ds = ogr.GetDriverByName('GPKG').CreateDataSource(tmpfile) lyr = ds.CreateLayer('layer1', geom_type=ogr.wkbPoint) lyr.CreateField(ogr.FieldDefn('attr', ogr.OFTInteger)) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('POINT(0 0)')) lyr.CreateFeature(f) f = None lyr = ds.CreateLayer('layer2', geom_type=ogr.wkbPoint) lyr.CreateField(ogr.FieldDefn('attr', ogr.OFTInteger)) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('POINT(1 1)')) lyr.CreateFeature(f) f = None ds = None vl1 = QgsVectorLayer(u'{}'.format(tmpfile) + "|layername=layer1", u'layer1', u'ogr') vl2 = QgsVectorLayer(u'{}'.format(tmpfile) + "|layername=layer2", u'layer2', u'ogr') # Edit vl1, vl2 multiple times self.assertTrue(vl1.startEditing()) self.assertTrue(vl2.startEditing()) self.assertTrue(vl1.changeGeometry(1, QgsGeometry.fromWkt('Point (2 2)'))) self.assertTrue(vl2.changeGeometry(1, QgsGeometry.fromWkt('Point (3 3)'))) self.assertTrue(vl1.commitChanges()) self.assertTrue(vl2.commitChanges()) self.assertTrue(vl1.startEditing()) self.assertTrue(vl2.startEditing()) self.assertTrue(vl1.changeAttributeValue(1, 1, 100)) self.assertTrue(vl2.changeAttributeValue(1, 1, 101)) self.assertTrue(vl1.commitChanges()) self.assertTrue(vl2.commitChanges()) self.assertTrue(vl1.startEditing()) self.assertTrue(vl2.startEditing()) self.assertTrue(vl1.changeGeometry(1, QgsGeometry.fromWkt('Point (4 4)'))) self.assertTrue(vl2.changeGeometry(1, QgsGeometry.fromWkt('Point (5 5)'))) self.assertTrue(vl1.commitChanges()) self.assertTrue(vl2.commitChanges()) vl1 = None vl2 = None # Check everything is as expected after re-opening vl1 = QgsVectorLayer(u'{}'.format(tmpfile) + "|layername=layer1", u'layer1', u'ogr') vl2 = QgsVectorLayer(u'{}'.format(tmpfile) + "|layername=layer2", u'layer2', u'ogr') got = [feat for feat in vl1.getFeatures()][0] got_geom = got.geometry() self.assertEqual(got['attr'], 100) reference = QgsGeometry.fromWkt('Point (4 4)') self.assertEqual(got_geom.asWkb(), reference.asWkb(), 'Expected {}, got {}'.format(reference.asWkt(), got_geom.asWkt())) got = [feat for feat in vl2.getFeatures()][0] got_geom = got.geometry() self.assertEqual(got['attr'], 101) reference = QgsGeometry.fromWkt('Point (5 5)') self.assertEqual(got_geom.asWkb(), reference.asWkb(), 'Expected {}, got {}'.format(reference.asWkt(), got_geom.asWkt())) def testReplaceLayerWhileOpen(self): ''' Replace an existing geopackage layer whilst it's open in the project''' tmpfile = os.path.join(self.basetestpath, 'testGeopackageReplaceOpenLayer.gpkg') ds = ogr.GetDriverByName('GPKG').CreateDataSource(tmpfile) lyr = ds.CreateLayer('layer1', geom_type=ogr.wkbPoint) lyr.CreateField(ogr.FieldDefn('attr', ogr.OFTInteger)) lyr.CreateField(ogr.FieldDefn('attr2', ogr.OFTInteger)) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('POINT(0 0)')) lyr.CreateFeature(f) f = None vl1 = QgsVectorLayer(u'{}'.format(tmpfile) + "|layername=layer1", u'layer1', u'ogr') p = QgsProject() p.addMapLayer(vl1) request = QgsFeatureRequest().setSubsetOfAttributes([0]) features = [f for f in vl1.getFeatures(request)] self.assertEqual(len(features), 1) # now, overwrite the layer with a different geometry type and fields ds.DeleteLayer('layer1') lyr = ds.CreateLayer('layer1', geom_type=ogr.wkbLineString) lyr.CreateField(ogr.FieldDefn('attr', ogr.OFTString)) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('LineString(0 0, 1 1)')) lyr.CreateFeature(f) f = None vl2 = QgsVectorLayer(u'{}'.format(tmpfile) + "|layername=layer1", u'layer2', u'ogr') p.addMapLayer(vl2) features = [f for f in vl1.getFeatures(request)] self.assertEqual(len(features), 1) def testSublayerWithComplexLayerName(self): ''' Test reading a gpkg with a sublayer name containing : ''' tmpfile = os.path.join(self.basetestpath, 'testGeopackageComplexLayerName.gpkg') ds = ogr.GetDriverByName('GPKG').CreateDataSource(tmpfile) lyr = ds.CreateLayer('layer1:', geom_type=ogr.wkbPoint, options=['GEOMETRY_NAME=geom:']) lyr.CreateField(ogr.FieldDefn('attr', ogr.OFTInteger)) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('POINT(0 0)')) lyr.CreateFeature(f) f = None vl = QgsVectorLayer(u'{}'.format(tmpfile), u'layer', u'ogr') self.assertEqual(vl.dataProvider().subLayers(), [QgsDataProvider.SUBLAYER_SEPARATOR.join(['0', 'layer1:', '1', 'Point', 'geom:'])]) def testGeopackageManyLayers(self): ''' test opening more than 64 layers without running out of Spatialite connections ''' tmpfile = os.path.join(self.basetestpath, 'testGeopackageManyLayers.gpkg') ds = ogr.GetDriverByName('GPKG').CreateDataSource(tmpfile) for i in range(70): lyr = ds.CreateLayer('layer%d' % i, geom_type=ogr.wkbPoint) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('POINT(%d 0)' % i)) lyr.CreateFeature(f) f = None ds = None vl_tab = [] for i in range(70): layername = 'layer%d' % i vl = QgsVectorLayer(u'{}'.format(tmpfile) + "|layername=" + layername, layername, u'ogr') self.assertTrue(vl.isValid()) vl_tab += [vl] count = count_opened_filedescriptors(tmpfile) if count > 0: self.assertEqual(count, 1) for i in range(70): got = [feat for feat in vl.getFeatures()] self.assertTrue(len(got) == 1) # We shouldn't have more than 2 file handles opened: # one shared by the QgsOgrProvider object # one shared by the feature iterators count = count_opened_filedescriptors(tmpfile) if count > 0: self.assertEqual(count, 2) # Re-open an already opened layers. We should get a new handle layername = 'layer%d' % 0 vl_extra0 = QgsVectorLayer(u'{}'.format(tmpfile) + "|layername=" + layername, layername, u'ogr') self.assertTrue(vl_extra0.isValid()) countNew = count_opened_filedescriptors(tmpfile) if countNew > 0: self.assertLessEqual(countNew, 4) # for some reason we get 4 and not 3 layername = 'layer%d' % 1 vl_extra1 = QgsVectorLayer(u'{}'.format(tmpfile) + "|layername=" + layername, layername, u'ogr') self.assertTrue(vl_extra1.isValid()) countNew2 = count_opened_filedescriptors(tmpfile) self.assertEqual(countNew2, countNew) def testGeopackageRefreshIfTableListUpdated(self): ''' test that creating/deleting a layer is reflected when opening a new layer ''' tmpfile = os.path.join(self.basetestpath, 'testGeopackageRefreshIfTableListUpdated.gpkg') ds = ogr.GetDriverByName('GPKG').CreateDataSource(tmpfile) ds.CreateLayer('test', geom_type=ogr.wkbPoint) ds = None vl = QgsVectorLayer(u'{}'.format(tmpfile) + "|layername=" + "test", 'test', u'ogr') self.assertTrue(vl.extent().isNull()) time.sleep(1) # so timestamp gets updated ds = ogr.Open(tmpfile, update=1) ds.CreateLayer('test2', geom_type=ogr.wkbPoint) ds = None vl2 = QgsVectorLayer(u'{}'.format(tmpfile), 'test', u'ogr') vl2.subLayers() self.assertEqual(vl2.dataProvider().subLayers(), [QgsDataProvider.SUBLAYER_SEPARATOR.join(['0', 'test', '0', 'Point', 'geom']), QgsDataProvider.SUBLAYER_SEPARATOR.join(['1', 'test2', '0', 'Point', 'geom'])]) def testGeopackageLargeFID(self): tmpfile = os.path.join(self.basetestpath, 'testGeopackageLargeFID.gpkg') ds = ogr.GetDriverByName('GPKG').CreateDataSource(tmpfile) lyr = ds.CreateLayer('test', geom_type=ogr.wkbPoint) lyr.CreateField(ogr.FieldDefn('str_field', ogr.OFTString)) ds = None vl = QgsVectorLayer(u'{}'.format(tmpfile) + "|layername=" + "test", 'test', u'ogr') f = QgsFeature() f.setAttributes([1234567890123, None]) self.assertTrue(vl.startEditing()) self.assertTrue(vl.dataProvider().addFeatures([f])) self.assertTrue(vl.commitChanges()) got = [feat for feat in vl.getFeatures()][0] self.assertEqual(got['fid'], 1234567890123) self.assertTrue(vl.startEditing()) self.assertTrue(vl.changeGeometry(1234567890123, QgsGeometry.fromWkt('Point (3 50)'))) self.assertTrue(vl.changeAttributeValue(1234567890123, 1, 'foo')) self.assertTrue(vl.commitChanges()) got = [feat for feat in vl.getFeatures()][0] self.assertEqual(got['str_field'], 'foo') got_geom = got.geometry() self.assertIsNotNone(got_geom) self.assertTrue(vl.startEditing()) self.assertTrue(vl.deleteFeature(1234567890123)) self.assertTrue(vl.commitChanges()) def test_AddFeatureNullFid(self): """Test gpkg feature with NULL fid can be added""" tmpfile = os.path.join(self.basetestpath, 'testGeopackageSplitFeatures.gpkg') ds = ogr.GetDriverByName('GPKG').CreateDataSource(tmpfile) lyr = ds.CreateLayer('test', geom_type=ogr.wkbPolygon) lyr.CreateField(ogr.FieldDefn('str_field', ogr.OFTString)) ds = None layer = QgsVectorLayer(u'{}'.format(tmpfile) + "|layername=" + "test", 'test', u'ogr') # Check that pk field has unique constraint fields = layer.fields() pkfield = fields.at(0) self.assertTrue(pkfield.constraints().constraints() & QgsFieldConstraints.ConstraintUnique) # Test add feature with default Fid (NULL) layer.startEditing() f = QgsFeature() feat = QgsFeature(layer.fields()) feat.setGeometry(QgsGeometry.fromWkt('Polygon ((0 0, 0 1, 1 1, 1 0, 0 0))')) feat.setAttribute(1, 'test_value') layer.addFeature(feat) self.assertTrue(layer.commitChanges()) self.assertEqual(layer.featureCount(), 1) def test_SplitFeature(self): """Test gpkg feature can be split""" tmpfile = os.path.join(self.basetestpath, 'testGeopackageSplitFeatures.gpkg') ds = ogr.GetDriverByName('GPKG').CreateDataSource(tmpfile) lyr = ds.CreateLayer('test', geom_type=ogr.wkbPolygon) lyr.CreateField(ogr.FieldDefn('str_field', ogr.OFTString)) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('POLYGON ((0 0,0 1,1 1,1 0,0 0))')) lyr.CreateFeature(f) f = None ds = None # Split features layer = QgsVectorLayer(u'{}'.format(tmpfile) + "|layername=" + "test", 'test', u'ogr') self.assertTrue(layer.isValid()) self.assertTrue(layer.isSpatial()) self.assertEqual([f for f in layer.getFeatures()][0].geometry().asWkt(), 'Polygon ((0 0, 0 1, 1 1, 1 0, 0 0))') layer.startEditing() self.assertEqual(layer.splitFeatures([QgsPointXY(0.5, 0), QgsPointXY(0.5, 1)], 0), 0) self.assertTrue(layer.commitChanges()) self.assertEqual(layer.featureCount(), 2) layer = QgsVectorLayer(u'{}'.format(tmpfile) + "|layername=" + "test", 'test', u'ogr') self.assertEqual(layer.featureCount(), 2) self.assertEqual([f for f in layer.getFeatures()][0].geometry().asWkt(), 'Polygon ((0.5 0, 0.5 1, 1 1, 1 0, 0.5 0))') self.assertEqual([f for f in layer.getFeatures()][1].geometry().asWkt(), 'Polygon ((0.5 1, 0.5 0, 0 0, 0 1, 0.5 1))') def testCreateAttributeIndex(self): tmpfile = os.path.join(self.basetestpath, 'testGeopackageAttributeIndex.gpkg') ds = ogr.GetDriverByName('GPKG').CreateDataSource(tmpfile) lyr = ds.CreateLayer('test', geom_type=ogr.wkbPolygon) lyr.CreateField(ogr.FieldDefn('str_field', ogr.OFTString)) lyr.CreateField(ogr.FieldDefn('str_field2', ogr.OFTString)) f = None ds = None vl = QgsVectorLayer(u'{}'.format(tmpfile) + "|layername=" + "test", 'test', u'ogr') self.assertTrue(vl.isValid()) self.assertTrue(vl.dataProvider().capabilities() & QgsVectorDataProvider.CreateAttributeIndex) self.assertFalse(vl.dataProvider().createAttributeIndex(-1)) self.assertFalse(vl.dataProvider().createAttributeIndex(100)) # should not be allowed - there's already a index on the primary key self.assertFalse(vl.dataProvider().createAttributeIndex(0)) self.assertTrue(vl.dataProvider().createAttributeIndex(1)) con = spatialite_connect(tmpfile, isolation_level=None) cur = con.cursor() rs = cur.execute("SELECT * FROM sqlite_master WHERE type='index' AND tbl_name='test'") res = [row for row in rs] self.assertEqual(len(res), 1) index_name = res[0][1] rs = cur.execute("PRAGMA index_info({})".format(index_name)) res = [row for row in rs] self.assertEqual(len(res), 1) self.assertEqual(res[0][2], 'str_field') # second index self.assertTrue(vl.dataProvider().createAttributeIndex(2)) rs = cur.execute("SELECT * FROM sqlite_master WHERE type='index' AND tbl_name='test'") res = [row for row in rs] self.assertEqual(len(res), 2) indexed_columns = [] for row in res: index_name = row[1] rs = cur.execute("PRAGMA index_info({})".format(index_name)) res = [row for row in rs] self.assertEqual(len(res), 1) indexed_columns.append(res[0][2]) self.assertCountEqual(indexed_columns, ['str_field', 'str_field2']) con.close() def testCreateSpatialIndex(self): tmpfile = os.path.join(self.basetestpath, 'testGeopackageSpatialIndex.gpkg') ds = ogr.GetDriverByName('GPKG').CreateDataSource(tmpfile) lyr = ds.CreateLayer('test', geom_type=ogr.wkbPolygon, options=['SPATIAL_INDEX=NO']) lyr.CreateField(ogr.FieldDefn('str_field', ogr.OFTString)) lyr.CreateField(ogr.FieldDefn('str_field2', ogr.OFTString)) f = None ds = None vl = QgsVectorLayer(u'{}'.format(tmpfile) + "|layername=" + "test", 'test', u'ogr') self.assertTrue(vl.isValid()) self.assertTrue(vl.dataProvider().capabilities() & QgsVectorDataProvider.CreateSpatialIndex) self.assertTrue(vl.dataProvider().createSpatialIndex()) def testSubSetStringEditable_bug17795_but_with_modified_behavior(self): """Test that a layer is editable after setting a subset""" tmpfile = os.path.join(self.basetestpath, 'testSubSetStringEditable_bug17795.gpkg') shutil.copy(TEST_DATA_DIR + '/' + 'provider/bug_17795.gpkg', tmpfile) isEditable = QgsVectorDataProvider.ChangeAttributeValues testPath = tmpfile + '|layername=bug_17795' vl = QgsVectorLayer(testPath, 'subset_test', 'ogr') self.assertTrue(vl.isValid()) self.assertTrue(vl.dataProvider().capabilities() & isEditable) vl = QgsVectorLayer(testPath, 'subset_test', 'ogr') vl.setSubsetString('') self.assertTrue(vl.isValid()) self.assertTrue(vl.dataProvider().capabilities() & isEditable) vl = QgsVectorLayer(testPath, 'subset_test', 'ogr') vl.setSubsetString('"category" = \'one\'') self.assertTrue(vl.isValid()) self.assertTrue(vl.dataProvider().capabilities() & isEditable) vl.setSubsetString('') self.assertTrue(vl.dataProvider().capabilities() & isEditable) def testSubsetStringExtent_bug17863(self): """Check that the extent is correct when applied in the ctor and when modified after a subset string is set """ def _lessdigits(s): return re.sub(r'(\d+\.\d{3})\d+', r'\1', s) tmpfile = os.path.join(self.basetestpath, 'testSubsetStringExtent_bug17863.gpkg') shutil.copy(TEST_DATA_DIR + '/' + 'provider/bug_17795.gpkg', tmpfile) testPath = tmpfile + '|layername=bug_17795' subSetString = '"name" = \'int\'' subSet = '|layername=bug_17795|subset=%s' % subSetString # unfiltered vl = QgsVectorLayer(testPath, 'test', 'ogr') self.assertTrue(vl.isValid()) unfiltered_extent = _lessdigits(vl.extent().toString()) del(vl) # filter after construction ... subSet_vl2 = QgsVectorLayer(testPath, 'test', 'ogr') self.assertEqual(_lessdigits(subSet_vl2.extent().toString()), unfiltered_extent) # ... apply filter now! subSet_vl2.setSubsetString(subSetString) self.assertEqual(subSet_vl2.subsetString(), subSetString) self.assertNotEqual(_lessdigits(subSet_vl2.extent().toString()), unfiltered_extent) filtered_extent = _lessdigits(subSet_vl2.extent().toString()) del(subSet_vl2) # filtered in constructor subSet_vl = QgsVectorLayer(testPath + subSet, 'subset_test', 'ogr') self.assertEqual(subSet_vl.subsetString(), subSetString) self.assertTrue(subSet_vl.isValid()) # This was failing in bug 17863 self.assertEqual(_lessdigits(subSet_vl.extent().toString()), filtered_extent) self.assertNotEqual(_lessdigits(subSet_vl.extent().toString()), unfiltered_extent) def testRequestWithoutGeometryOnLayerMixedGeometry(self): """ Test bugfix for https://github.com/qgis/QGIS/issues/26907 """ # Issue is more a generic one of the OGR provider, but easy to trigger with GPKG tmpfile = os.path.join(self.basetestpath, 'testRequestWithoutGeometryOnLayerMixedGeometry.gpkg') ds = ogr.GetDriverByName('GPKG').CreateDataSource(tmpfile) lyr = ds.CreateLayer('test', geom_type=ogr.wkbUnknown, options=['SPATIAL_INDEX=NO']) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('POINT(0 1)')) lyr.CreateFeature(f) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('LINESTRING(0 0,1 0)')) lyr.CreateFeature(f) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('LINESTRING(0 0,1 0)')) lyr.CreateFeature(f) f = None ds = None vl = QgsVectorLayer(u'{}'.format(tmpfile) + "|geometrytype=Point|layername=" + "test", 'test', u'ogr') self.assertTrue(vl.isValid()) request = QgsFeatureRequest().setFlags(QgsFeatureRequest.NoGeometry) features = [f for f in vl.getFeatures(request)] self.assertEqual(len(features), 1) def testAddingTwoIntFieldsWithWidth(self): """ Test buggfix for https://github.com/qgis/QGIS/issues/26840 """ tmpfile = os.path.join(self.basetestpath, 'testRequestWithoutGeometryOnLayerMixedGeometry.gpkg') ds = ogr.GetDriverByName('GPKG').CreateDataSource(tmpfile) lyr = ds.CreateLayer('test', geom_type=ogr.wkbPoint, options=['SPATIAL_INDEX=NO']) lyr.CreateField(ogr.FieldDefn('a', ogr.OFTInteger)) ds = None vl = QgsVectorLayer(u'{}'.format(tmpfile) + "|layername=" + "test", 'test', u'ogr') self.assertTrue(vl.isValid()) vl.startEditing() self.assertTrue(vl.addAttribute(QgsField("b", QVariant.Int, "integer", 10))) self.assertTrue(vl.commitChanges()) vl.startEditing() self.assertTrue(vl.addAttribute(QgsField("c", QVariant.Int, "integer", 10))) self.assertTrue(vl.commitChanges()) def testApproxFeatureCountAndExtent(self): """ Test perf improvement for for https://github.com/qgis/QGIS/issues/26292 """ tmpfile = os.path.join(self.basetestpath, 'testApproxFeatureCountAndExtent.gpkg') ds = ogr.GetDriverByName('GPKG').CreateDataSource(tmpfile) lyr = ds.CreateLayer('test', geom_type=ogr.wkbPoint) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('POINT(0 1)')) lyr.CreateFeature(f) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('POINT(2 3)')) lyr.CreateFeature(f) fid = f.GetFID() f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('POINT(4 5)')) lyr.CreateFeature(f) lyr.DeleteFeature(fid) ds = None ds = ogr.Open(tmpfile, update=1) ds.ExecuteSQL('DROP TABLE gpkg_ogr_contents') ds = None os.environ['QGIS_GPKG_FC_THRESHOLD'] = '1' vl = QgsVectorLayer(u'{}'.format(tmpfile) + "|layername=" + "test", 'test', u'ogr') self.assertTrue(vl.isValid()) fc = vl.featureCount() del os.environ['QGIS_GPKG_FC_THRESHOLD'] self.assertEqual(fc, 3) # didn't notice the hole reference = QgsGeometry.fromRect(QgsRectangle(0, 1, 4, 5)) provider_extent = QgsGeometry.fromRect(vl.extent()) self.assertTrue(QgsGeometry.compare(provider_extent.asPolygon()[0], reference.asPolygon()[0], 0.00001), provider_extent.asPolygon()[0]) def testRegenerateFid(self): """ Test regenerating feature ids """ fields = QgsFields() fields.append(QgsField('fid', QVariant.Int)) fields.append(QgsField('f1', QVariant.Int)) tmpfile = os.path.join(self.basetestpath, 'testRegenerateFid.gpkg') options = {} options['update'] = True options['driverName'] = 'GPKG' options['layerName'] = 'table1' exporter = QgsVectorLayerExporter(tmpfile, "ogr", fields, QgsWkbTypes.Polygon, QgsCoordinateReferenceSystem(3111), False, options, QgsFeatureSink.RegeneratePrimaryKey) self.assertFalse(exporter.errorCode(), 'unexpected export error {}: {}'.format(exporter.errorCode(), exporter.errorMessage())) feat = QgsFeature(fields) feat['fid'] = 0 feat['f1'] = 10 exporter.addFeature(feat) feat['fid'] = 0 feat['f1'] = 20 exporter.addFeature(feat) feat['fid'] = 1 feat['f1'] = 30 exporter.addFeature(feat) feat['fid'] = 1 feat['f1'] = 40 exporter.addFeature(feat) del exporter # make sure layers exist lyr = QgsVectorLayer('{}|layername=table1'.format(tmpfile), "lyr1", "ogr") self.assertTrue(lyr.isValid()) self.assertEqual(lyr.crs().authid(), 'EPSG:3111') self.assertEqual(lyr.wkbType(), QgsWkbTypes.Polygon) values = set([f['f1'] for f in lyr.getFeatures()]) self.assertEqual(values, set([10, 20, 30, 40])) fids = set([f['fid'] for f in lyr.getFeatures()]) self.assertEqual(len(fids), 4) def testTransaction(self): tmpfile = os.path.join(self.basetestpath, 'testTransaction.gpkg') ds = ogr.GetDriverByName('GPKG').CreateDataSource(tmpfile) lyr = ds.CreateLayer('lyr1', geom_type=ogr.wkbPoint) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('POINT(0 1)')) lyr.CreateFeature(f) lyr = ds.CreateLayer('lyr2', geom_type=ogr.wkbPoint) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('POINT(2 3)')) lyr.CreateFeature(f) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('POINT(4 5)')) lyr.CreateFeature(f) ds = None vl1 = QgsVectorLayer(u'{}'.format(tmpfile) + "|layername=" + "lyr1", 'test', u'ogr') self.assertTrue(vl1.isValid()) vl2 = QgsVectorLayer(u'{}'.format(tmpfile) + "|layername=" + "lyr2", 'test', u'ogr') self.assertTrue(vl2.isValid()) # prepare a project with transactions enabled p = QgsProject() p.setAutoTransaction(True) p.addMapLayers([vl1, vl2]) self.assertTrue(vl1.startEditing()) self.assertIsNotNone(vl1.dataProvider().transaction()) self.assertTrue(vl1.deleteFeature(1)) # An iterator opened on the layer should see the feature deleted self.assertEqual(len([f for f in vl1.getFeatures(QgsFeatureRequest())]), 0) # But not if opened from another connection vl1_external = QgsVectorLayer(u'{}'.format(tmpfile) + "|layername=" + "lyr1", 'test', u'ogr') self.assertTrue(vl1_external.isValid()) self.assertEqual(len([f for f in vl1_external.getFeatures(QgsFeatureRequest())]), 1) del vl1_external self.assertTrue(vl1.commitChanges()) # Should still get zero features on vl1 self.assertEqual(len([f for f in vl1.getFeatures(QgsFeatureRequest())]), 0) self.assertEqual(len([f for f in vl2.getFeatures(QgsFeatureRequest())]), 2) # Test undo/redo self.assertTrue(vl2.startEditing()) self.assertIsNotNone(vl2.dataProvider().transaction()) self.assertTrue(vl2.editBuffer().deleteFeature(1)) self.assertEqual(len([f for f in vl2.getFeatures(QgsFeatureRequest())]), 1) self.assertTrue(vl2.editBuffer().deleteFeature(2)) self.assertEqual(len([f for f in vl2.getFeatures(QgsFeatureRequest())]), 0) vl2.undoStack().undo() self.assertEqual(len([f for f in vl2.getFeatures(QgsFeatureRequest())]), 1) vl2.undoStack().undo() self.assertEqual(len([f for f in vl2.getFeatures(QgsFeatureRequest())]), 2) vl2.undoStack().redo() self.assertEqual(len([f for f in vl2.getFeatures(QgsFeatureRequest())]), 1) self.assertTrue(vl2.commitChanges()) self.assertEqual(len([f for f in vl2.getFeatures(QgsFeatureRequest())]), 1) del vl1 del vl2 vl2_external = QgsVectorLayer(u'{}'.format(tmpfile) + "|layername=" + "lyr2", 'test', u'ogr') self.assertTrue(vl2_external.isValid()) self.assertEqual(len([f for f in vl2_external.getFeatures(QgsFeatureRequest())]), 1) del vl2_external def testJson(self): if int(gdal.VersionInfo('VERSION_NUM')) < GDAL_COMPUTE_VERSION(2, 4, 0): return tmpfile = os.path.join(self.basetestpath, 'test_json.gpkg') testdata_path = unitTestDataPath('provider') shutil.copy(os.path.join(unitTestDataPath('provider'), 'test_json.gpkg'), tmpfile) vl = QgsVectorLayer('{}|layerid=0'.format(tmpfile, 'foo', 'ogr')) self.assertTrue(vl.isValid()) fields = vl.dataProvider().fields() self.assertEqual(fields.at(fields.indexFromName('json_content')).type(), QVariant.Map) fi = vl.getFeatures(QgsFeatureRequest()) f = QgsFeature() #test reading dict value from attribute while fi.nextFeature(f): if f['fid'] == 1: self.assertIsInstance(f['json_content'], dict) self.assertEqual(f['json_content'], {'foo': 'bar'}) #test changing dict value in attribute f['json_content'] = {'foo': 'baz'} self.assertEqual(f['json_content'], {'foo': 'baz'}) #test changint dict to list f['json_content'] = ['eins', 'zwei', 'drei'] self.assertEqual(f['json_content'], ['eins', 'zwei', 'drei']) #test changing list value in attribute f['json_content'] = ['eins', 'zwei', 'drei', 4] self.assertEqual(f['json_content'], ['eins', 'zwei', 'drei', 4]) #test changing to complex json structure f['json_content'] = {'name': 'Lily', 'age': '0', 'cars': {'car1': ['fiat tipo', 'fiat punto', 'davoser schlitten'], 'car2': 'bobbycar', 'car3': 'tesla'}} self.assertEqual(f['json_content'], {'name': 'Lily', 'age': '0', 'cars': {'car1': ['fiat tipo', 'fiat punto', 'davoser schlitten'], 'car2': 'bobbycar', 'car3': 'tesla'}}) #test adding attribute vl.startEditing() self.assertTrue(vl.addAttribute(QgsField('json_content2', QVariant.Map, "JSON", 60, 0, 'no comment', QVariant.String))) self.assertTrue(vl.commitChanges()) vl.startEditing() self.assertTrue(vl.addAttribute(QgsField('json_content3', QVariant.Map, "JSON", 60, 0, 'no comment', QVariant.String))) self.assertTrue(vl.commitChanges()) #test setting values to new attributes while fi.nextFeature(f): if f['fid'] == 2: f['json_content'] = {'uno': 'foo'} f['json_content2'] = ['uno', 'due', 'tre'] f['json_content3'] = {'uno': ['uno', 'due', 'tre']} self.assertEqual(f['json_content'], {'foo': 'baz'}) self.assertEqual(f['json_content2'], ['uno', 'due', 'tre']) self.assertEqual(f['json_content3'], {'uno': ['uno', 'due', 'tre']}) #test deleting attribute vl.startEditing() self.assertTrue(vl.deleteAttribute(vl.fields().indexFromName('json_content3'))) self.assertTrue(vl.commitChanges()) #test if index of existent field is not -1 and the one of the deleted is -1 self.assertNotEqual(vl.fields().indexFromName('json_content2'), -1) self.assertEqual(vl.fields().indexFromName('json_content3'), -1) def test_quote_identifier(self): """Regression #21100""" tmpfile = os.path.join(self.basetestpath, 'bug_21100-wierd_field_names.gpkg') # spellok shutil.copy(os.path.join(unitTestDataPath(''), 'bug_21100-wierd_field_names.gpkg'), tmpfile) # spellok vl = QgsVectorLayer('{}|layerid=0'.format(tmpfile), 'foo', 'ogr') self.assertTrue(vl.isValid()) for i in range(1, len(vl.fields())): self.assertEqual(vl.uniqueValues(i), {'a', 'b', 'c'}) def testGeopackageLayerMetadata(self): """ Geopackage layer description and identifier should be read into layer metadata automatically """ tmpfile = os.path.join(self.basetestpath, 'testGeopackageLayerMetadata.gpkg') ds = ogr.GetDriverByName('GPKG').CreateDataSource(tmpfile) lyr = ds.CreateLayer('layer1', geom_type=ogr.wkbPoint) lyr.SetMetadataItem('DESCRIPTION', "my desc") lyr.SetMetadataItem('IDENTIFIER', "my title") # see geopackage specs -- "'identifier' is analogous to 'title'" lyr.CreateField(ogr.FieldDefn('attr', ogr.OFTInteger)) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('POINT(0 0)')) lyr.CreateFeature(f) f = None vl1 = QgsVectorLayer(u'{}'.format(tmpfile) + "|layername=" + "layer1", 'test', u'ogr') self.assertTrue(vl1.isValid()) self.assertEqual(vl1.metadata().title(), 'my title') self.assertEqual(vl1.metadata().abstract(), 'my desc') def testUniqueValuesOnFidColumn(self): """Test regression #21311 OGR provider returns an empty set for GPKG uniqueValues""" tmpfile = os.path.join(self.basetestpath, 'testGeopackageUniqueValuesOnFidColumn.gpkg') ds = ogr.GetDriverByName('GPKG').CreateDataSource(tmpfile) lyr = ds.CreateLayer('test', geom_type=ogr.wkbPolygon) lyr.CreateField(ogr.FieldDefn('str_field', ogr.OFTString)) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('POLYGON ((0 0,0 1,1 1,1 0,0 0))')) f.SetField('str_field', 'one') lyr.CreateFeature(f) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('POLYGON ((0 0,0 2,2 2,2 0,0 0))')) f.SetField('str_field', 'two') lyr.CreateFeature(f) f = None ds = None vl1 = QgsVectorLayer('{}'.format(tmpfile) + "|layername=" + "test", 'test', 'ogr') self.assertTrue(vl1.isValid()) self.assertEqual(vl1.uniqueValues(0), {1, 2}) self.assertEqual(vl1.uniqueValues(1), {'one', 'two'}) def testForeignKeyViolation(self): """Test that we can open a dataset with a foreign key violation""" tmpfile = os.path.join(self.basetestpath, 'testForeignKeyViolation.gpkg') ds = ogr.GetDriverByName('GPKG').CreateDataSource(tmpfile) lyr = ds.CreateLayer('test', geom_type=ogr.wkbPoint) f = ogr.Feature(lyr.GetLayerDefn()) f.SetGeometry(ogr.CreateGeometryFromWkt('POINT(0 1)')) lyr.CreateFeature(f) ds.ExecuteSQL("PRAGMA foreign_keys = OFF") ds.ExecuteSQL("CREATE TABLE foo(id INTEGER)") ds.ExecuteSQL("CREATE TABLE bar(fkey INTEGER, CONSTRAINT fkey_constraint FOREIGN KEY (fkey) REFERENCES foo(id))") ds.ExecuteSQL("INSERT INTO bar VALUES (1)") ds = None vl = QgsVectorLayer('{}'.format(tmpfile) + "|layername=" + "test", 'test', 'ogr') self.assertTrue(vl.isValid()) fids = set([f['fid'] for f in vl.getFeatures()]) self.assertEqual(len(fids), 1) if __name__ == '__main__': unittest.main()
gpl-2.0
-3,331,028,875,646,041,600
42.551977
186
0.623479
false
huanpc/lab_cloud_computing
docs/learning-by-doing/week09_10_connectDB_restApi/auto scaling system/constant.py
1
3300
__author__ = 'huanpc' CPU_THRESHOLD_UP = 0.1 CPU_THRESHOLD_DOWN = 0.001 MEM_THRESHOLD_UP = 15700000.0 MEM_THRESHOLD_DOWN = 2097152.0 HOST = '25.22.28.94' PORT = 8086 USER = 'root' PASS = 'root' DATABASE = 'cadvisor' SELECT_CPU = 'derivative(cpu_cumulative_usage)' SELECT_MEMORY = 'median(memory_usage)' SERIES = '"stats"' APP_NAME = 'demo-server' NAME = '' WHERE_BEGIN = 'container_name =~ /.*' WHERE_END = '.*/ and time>now()-5m' GROUP_BY = "time(10s), container_name" CONDITION = " limit 1 " JSON_APP_DEFINE = './demo_web_server.json' APP_ID = 'demo-server' MARATHON_URI = 'localhost:8080' HEADER = {'Content-Type': 'application/json'} # scale SCALE_LINK = '/v2/apps/' + APP_ID + '?force=true' TIME_DELAY_LONG = 15 TIME_DELAY_SORT = 5 ROOT_PASSWORD = '444455555' MODEL_ENGINE = 'mysql+pymysql://root:autoscaling@secret@127.0.0.1:3306/policydb' SCHEMA = ''' # PolicyDB # apps.enabled: 0-not scaled, 1-scaled # apps.locked: 0-unlocked, 1-locked # apps.next_time: time in the future the app'll be checked for scaling # next_time = last success caused by policyX + policyX.cooldown_period # policies.metric_type: 0-CPU, 1-memory # policies.cooldown_period: in second # policies.measurement_period: in second # deleted: 0-active, 1-deleted DROP DATABASE IF EXISTS policydb; CREATE DATABASE policydb; USE policydb; CREATE TABLE apps(\ Id INT AUTO_INCREMENT PRIMARY KEY, \ app_uuid VARCHAR(255), \ name VARCHAR(255), \ min_instances SMALLINT UNSIGNED, \ max_instances SMALLINT UNSIGNED, \ enabled TINYINT UNSIGNED, \ locked TINYINT UNSIGNED, \ next_time INT \ ); CREATE TABLE policies(\ Id INT AUTO_INCREMENT PRIMARY KEY, \ app_uuid VARCHAR(255), \ policy_uuid VARCHAR(255), \ metric_type TINYINT UNSIGNED, \ upper_threshold FLOAT, \ lower_threshold FLOAT, \ instances_out SMALLINT UNSIGNED, \ instances_in SMALLINT UNSIGNED, \ cooldown_period SMALLINT UNSIGNED, \ measurement_period SMALLINT UNSIGNED, \ deleted TINYINT UNSIGNED \ ); # tuna CREATE TABLE crons(\ Id INT AUTO_INCREMENT PRIMARY KEY, \ app_uuid VARCHAR(255), \ cron_uuid VARCHAR(255), \ min_instances SMALLINT UNSIGNED, \ max_instances SMALLINT UNSIGNED, \ cron_string VARCHAR(255), \ deleted TINYINT UNSIGNED \ ); # end tuna ----- # Test data # Stresser INSERT INTO apps(app_uuid, name, min_instances, max_instances, enabled, locked, next_time) \ VALUES ("f5bfcbad-7daa-4317-97cc-e42ae46b6ad1", "java-allocateMemory", 1, 5, 1, 0, 0); INSERT INTO policies(app_uuid, policy_uuid, metric_type, upper_threshold, lower_threshold, instances_out, instances_in, cooldown_period, measurement_period, deleted) \ VALUES ("f5bfcbad-7daa-4317-97cc-e42ae46b6ad1", "b3da4493-58f1-4d65-bf43-e52e7de62151", 1, 0.7, 0.3, 1, 1, 30, 10, 0); # INSERT INTO policies(app_uuid, policy_uuid, metric_type, upper_threshold, lower_threshold, instances_out, instances_in, cooldown_period, measurement_period, deleted) \ # VALUES ("f5bfcbad-7daa-4317-97cc-e42ae46b6ad1", "b3da4493-58f1-4d65-bf43-e52e7dpolicy", 1, 0.7, 0.3, 1, 1, 30, 10, 0); INSERT INTO crons(app_uuid, cron_uuid, min_instances, max_instances, cron_string, deleted) \ VALUES ("f5bfcbad-7daa-4317-97cc-e42ae46b6ad1", "b3da4493-58f1-4d65-bf43-e52eacascron", 1, 10, "* * * * * *", false); '''
apache-2.0
8,404,436,930,996,980,000
33.736842
169
0.701818
false
martinohanlon/pelmetcam
GPSController.py
1
2913
from gps import * import time import datetime import threading import math class GpsUtils(): MPS_TO_MPH = 2.2369362920544 @staticmethod def latLongToXY(lat, lon): rMajor = 6378137 # Equatorial Radius, WGS84 shift = math.pi * rMajor x = lon * shift / 180 y = math.log(math.tan((90 + lat) * math.pi / 360)) / (math.pi / 180) y = y * shift / 180 return x,y class GpsController(threading.Thread): def __init__(self): threading.Thread.__init__(self) self.gpsd = gps(mode=WATCH_ENABLE) #starting the stream of info self.running = False def run(self): self.running = True while self.running: # grab EACH set of gpsd info to clear the buffer self.gpsd.next() def stopController(self): self.running = False @property def fix(self): return self.gpsd.fix @property def utc(self): return self.gpsd.utc @property def satellites(self): return self.gpsd.satellites @property def fixdatetime(self): #return None if we cant get a time UTCTime = None try: # have we got a fix? if self.fix.mode != 1: #strip time from utc UTCTime = time.strptime(self.utc, "%Y-%m-%dT%H:%M:%S.%fz") #convert time struct to datetime UTCTime = datetime.datetime.fromtimestamp(time.mktime(UTCTime)) except: #return None if we get an error UTCTime = None return UTCTime if __name__ == '__main__': gpsc = GpsController() # create the thread try: gpsc.start() # start it up while True: print "latitude ", gpsc.fix.latitude print "longitude ", gpsc.fix.longitude print "time utc ", gpsc.utc, " + ", gpsc.gpsd.fix.time print "altitude (m)", gpsc.fix.altitude #print "eps ", gpsc.gpsd.fix.eps #print "epx ", gpsc.gpsd.fix.epx #print "epv ", gpsc.gpsd.fix.epv #print "ept ", gpsc.gpsd.fix.ept print "speed (m/s) ", gpsc.fix.speed print "track ", gpsc.gpsd.fix.track print "mode ", gpsc.gpsd.fix.mode #print "sats ", gpsc.satellites print "climb ", gpsc.fix.climb print gpsc.fixdatetime x,y = GpsUtils.latLongToXY(gpsc.fix.latitude, gpsc.fix.longitude) print "x", x print "y", y time.sleep(0.5) #Ctrl C except KeyboardInterrupt: print "User cancelled" #Error except: print "Unexpected error:", sys.exc_info()[0] raise finally: print "Stopping gps controller" gpsc.stopController() #wait for the tread to finish gpsc.join() print "Done"
mit
-870,046,744,598,906,500
26.742857
79
0.544113
false
Symmetry-Innovations-Pty-Ltd/Python-2.7-for-QNX6.5.0-x86
usr/pkg/lib/python2.7/idlelib/IOBinding.py
16
20975
# changes by dscherer@cmu.edu # - IOBinding.open() replaces the current window with the opened file, # if the current window is both unmodified and unnamed # - IOBinding.loadfile() interprets Windows, UNIX, and Macintosh # end-of-line conventions, instead of relying on the standard library, # which will only understand the local convention. import os import types import sys import codecs import tempfile import tkFileDialog import tkMessageBox import re from Tkinter import * from SimpleDialog import SimpleDialog from idlelib.configHandler import idleConf try: from codecs import BOM_UTF8 except ImportError: # only available since Python 2.3 BOM_UTF8 = '\xef\xbb\xbf' # Try setting the locale, so that we can find out # what encoding to use try: import locale locale.setlocale(locale.LC_CTYPE, "") except (ImportError, locale.Error): pass # Encoding for file names filesystemencoding = sys.getfilesystemencoding() encoding = "ascii" if sys.platform == 'win32': # On Windows, we could use "mbcs". However, to give the user # a portable encoding name, we need to find the code page try: encoding = locale.getdefaultlocale()[1] codecs.lookup(encoding) except LookupError: pass else: try: # Different things can fail here: the locale module may not be # loaded, it may not offer nl_langinfo, or CODESET, or the # resulting codeset may be unknown to Python. We ignore all # these problems, falling back to ASCII encoding = locale.nl_langinfo(locale.CODESET) if encoding is None or encoding is '': # situation occurs on Mac OS X encoding = 'ascii' codecs.lookup(encoding) except (NameError, AttributeError, LookupError): # Try getdefaultlocale well: it parses environment variables, # which may give a clue. Unfortunately, getdefaultlocale has # bugs that can cause ValueError. try: encoding = locale.getdefaultlocale()[1] if encoding is None or encoding is '': # situation occurs on Mac OS X encoding = 'ascii' codecs.lookup(encoding) except (ValueError, LookupError): pass encoding = encoding.lower() coding_re = re.compile("coding[:=]\s*([-\w_.]+)") class EncodingMessage(SimpleDialog): "Inform user that an encoding declaration is needed." def __init__(self, master, enc): self.should_edit = False self.root = top = Toplevel(master) top.bind("<Return>", self.return_event) top.bind("<Escape>", self.do_ok) top.protocol("WM_DELETE_WINDOW", self.wm_delete_window) top.wm_title("I/O Warning") top.wm_iconname("I/O Warning") self.top = top l1 = Label(top, text="Non-ASCII found, yet no encoding declared. Add a line like") l1.pack(side=TOP, anchor=W) l2 = Entry(top, font="courier") l2.insert(0, "# -*- coding: %s -*-" % enc) # For some reason, the text is not selectable anymore if the # widget is disabled. # l2['state'] = DISABLED l2.pack(side=TOP, anchor = W, fill=X) l3 = Label(top, text="to your file\n" "Choose OK to save this file as %s\n" "Edit your general options to silence this warning" % enc) l3.pack(side=TOP, anchor = W) buttons = Frame(top) buttons.pack(side=TOP, fill=X) # Both return and cancel mean the same thing: do nothing self.default = self.cancel = 0 b1 = Button(buttons, text="Ok", default="active", command=self.do_ok) b1.pack(side=LEFT, fill=BOTH, expand=1) b2 = Button(buttons, text="Edit my file", command=self.do_edit) b2.pack(side=LEFT, fill=BOTH, expand=1) self._set_transient(master) def do_ok(self): self.done(0) def do_edit(self): self.done(1) def coding_spec(str): """Return the encoding declaration according to PEP 263. Raise LookupError if the encoding is declared but unknown. """ # Only consider the first two lines str = str.split("\n")[:2] str = "\n".join(str) match = coding_re.search(str) if not match: return None name = match.group(1) # Check whether the encoding is known import codecs try: codecs.lookup(name) except LookupError: # The standard encoding error does not indicate the encoding raise LookupError, "Unknown encoding "+name return name class IOBinding: def __init__(self, editwin): self.editwin = editwin self.text = editwin.text self.__id_open = self.text.bind("<<open-window-from-file>>", self.open) self.__id_save = self.text.bind("<<save-window>>", self.save) self.__id_saveas = self.text.bind("<<save-window-as-file>>", self.save_as) self.__id_savecopy = self.text.bind("<<save-copy-of-window-as-file>>", self.save_a_copy) self.fileencoding = None self.__id_print = self.text.bind("<<print-window>>", self.print_window) def close(self): # Undo command bindings self.text.unbind("<<open-window-from-file>>", self.__id_open) self.text.unbind("<<save-window>>", self.__id_save) self.text.unbind("<<save-window-as-file>>",self.__id_saveas) self.text.unbind("<<save-copy-of-window-as-file>>", self.__id_savecopy) self.text.unbind("<<print-window>>", self.__id_print) # Break cycles self.editwin = None self.text = None self.filename_change_hook = None def get_saved(self): return self.editwin.get_saved() def set_saved(self, flag): self.editwin.set_saved(flag) def reset_undo(self): self.editwin.reset_undo() filename_change_hook = None def set_filename_change_hook(self, hook): self.filename_change_hook = hook filename = None dirname = None def set_filename(self, filename): if filename and os.path.isdir(filename): self.filename = None self.dirname = filename else: self.filename = filename self.dirname = None self.set_saved(1) if self.filename_change_hook: self.filename_change_hook() def open(self, event=None, editFile=None): if self.editwin.flist: if not editFile: filename = self.askopenfile() else: filename=editFile if filename: # If the current window has no filename and hasn't been # modified, we replace its contents (no loss). Otherwise # we open a new window. But we won't replace the # shell window (which has an interp(reter) attribute), which # gets set to "not modified" at every new prompt. try: interp = self.editwin.interp except AttributeError: interp = None if not self.filename and self.get_saved() and not interp: self.editwin.flist.open(filename, self.loadfile) else: self.editwin.flist.open(filename) else: self.text.focus_set() return "break" # # Code for use outside IDLE: if self.get_saved(): reply = self.maybesave() if reply == "cancel": self.text.focus_set() return "break" if not editFile: filename = self.askopenfile() else: filename=editFile if filename: self.loadfile(filename) else: self.text.focus_set() return "break" eol = r"(\r\n)|\n|\r" # \r\n (Windows), \n (UNIX), or \r (Mac) eol_re = re.compile(eol) eol_convention = os.linesep # Default def loadfile(self, filename): try: # open the file in binary mode so that we can handle # end-of-line convention ourselves. f = open(filename,'rb') chars = f.read() f.close() except IOError, msg: tkMessageBox.showerror("I/O Error", str(msg), master=self.text) return False chars = self.decode(chars) # We now convert all end-of-lines to '\n's firsteol = self.eol_re.search(chars) if firsteol: self.eol_convention = firsteol.group(0) if isinstance(self.eol_convention, unicode): # Make sure it is an ASCII string self.eol_convention = self.eol_convention.encode("ascii") chars = self.eol_re.sub(r"\n", chars) self.text.delete("1.0", "end") self.set_filename(None) self.text.insert("1.0", chars) self.reset_undo() self.set_filename(filename) self.text.mark_set("insert", "1.0") self.text.see("insert") self.updaterecentfileslist(filename) return True def decode(self, chars): """Create a Unicode string If that fails, let Tcl try its best """ # Check presence of a UTF-8 signature first if chars.startswith(BOM_UTF8): try: chars = chars[3:].decode("utf-8") except UnicodeError: # has UTF-8 signature, but fails to decode... return chars else: # Indicates that this file originally had a BOM self.fileencoding = BOM_UTF8 return chars # Next look for coding specification try: enc = coding_spec(chars) except LookupError, name: tkMessageBox.showerror( title="Error loading the file", message="The encoding '%s' is not known to this Python "\ "installation. The file may not display correctly" % name, master = self.text) enc = None if enc: try: return unicode(chars, enc) except UnicodeError: pass # If it is ASCII, we need not to record anything try: return unicode(chars, 'ascii') except UnicodeError: pass # Finally, try the locale's encoding. This is deprecated; # the user should declare a non-ASCII encoding try: chars = unicode(chars, encoding) self.fileencoding = encoding except UnicodeError: pass return chars def maybesave(self): if self.get_saved(): return "yes" message = "Do you want to save %s before closing?" % ( self.filename or "this untitled document") confirm = tkMessageBox.askyesnocancel( title="Save On Close", message=message, default=tkMessageBox.YES, master=self.text) if confirm: reply = "yes" self.save(None) if not self.get_saved(): reply = "cancel" elif confirm is None: reply = "cancel" else: reply = "no" self.text.focus_set() return reply def save(self, event): if not self.filename: self.save_as(event) else: if self.writefile(self.filename): self.set_saved(True) try: self.editwin.store_file_breaks() except AttributeError: # may be a PyShell pass self.text.focus_set() return "break" def save_as(self, event): filename = self.asksavefile() if filename: if self.writefile(filename): self.set_filename(filename) self.set_saved(1) try: self.editwin.store_file_breaks() except AttributeError: pass self.text.focus_set() self.updaterecentfileslist(filename) return "break" def save_a_copy(self, event): filename = self.asksavefile() if filename: self.writefile(filename) self.text.focus_set() self.updaterecentfileslist(filename) return "break" def writefile(self, filename): self.fixlastline() chars = self.encode(self.text.get("1.0", "end-1c")) if self.eol_convention != "\n": chars = chars.replace("\n", self.eol_convention) try: f = open(filename, "wb") f.write(chars) f.flush() f.close() return True except IOError, msg: tkMessageBox.showerror("I/O Error", str(msg), master=self.text) return False def encode(self, chars): if isinstance(chars, types.StringType): # This is either plain ASCII, or Tk was returning mixed-encoding # text to us. Don't try to guess further. return chars # See whether there is anything non-ASCII in it. # If not, no need to figure out the encoding. try: return chars.encode('ascii') except UnicodeError: pass # If there is an encoding declared, try this first. try: enc = coding_spec(chars) failed = None except LookupError, msg: failed = msg enc = None if enc: try: return chars.encode(enc) except UnicodeError: failed = "Invalid encoding '%s'" % enc if failed: tkMessageBox.showerror( "I/O Error", "%s. Saving as UTF-8" % failed, master = self.text) # If there was a UTF-8 signature, use that. This should not fail if self.fileencoding == BOM_UTF8 or failed: return BOM_UTF8 + chars.encode("utf-8") # Try the original file encoding next, if any if self.fileencoding: try: return chars.encode(self.fileencoding) except UnicodeError: tkMessageBox.showerror( "I/O Error", "Cannot save this as '%s' anymore. Saving as UTF-8" \ % self.fileencoding, master = self.text) return BOM_UTF8 + chars.encode("utf-8") # Nothing was declared, and we had not determined an encoding # on loading. Recommend an encoding line. config_encoding = idleConf.GetOption("main","EditorWindow", "encoding") if config_encoding == 'utf-8': # User has requested that we save files as UTF-8 return BOM_UTF8 + chars.encode("utf-8") ask_user = True try: chars = chars.encode(encoding) enc = encoding if config_encoding == 'locale': ask_user = False except UnicodeError: chars = BOM_UTF8 + chars.encode("utf-8") enc = "utf-8" if not ask_user: return chars dialog = EncodingMessage(self.editwin.top, enc) dialog.go() if dialog.num == 1: # User asked us to edit the file encline = "# -*- coding: %s -*-\n" % enc firstline = self.text.get("1.0", "2.0") if firstline.startswith("#!"): # Insert encoding after #! line self.text.insert("2.0", encline) else: self.text.insert("1.0", encline) return self.encode(self.text.get("1.0", "end-1c")) return chars def fixlastline(self): c = self.text.get("end-2c") if c != '\n': self.text.insert("end-1c", "\n") def print_window(self, event): confirm = tkMessageBox.askokcancel( title="Print", message="Print to Default Printer", default=tkMessageBox.OK, master=self.text) if not confirm: self.text.focus_set() return "break" tempfilename = None saved = self.get_saved() if saved: filename = self.filename # shell undo is reset after every prompt, looks saved, probably isn't if not saved or filename is None: (tfd, tempfilename) = tempfile.mkstemp(prefix='IDLE_tmp_') filename = tempfilename os.close(tfd) if not self.writefile(tempfilename): os.unlink(tempfilename) return "break" platform = os.name printPlatform = True if platform == 'posix': #posix platform command = idleConf.GetOption('main','General', 'print-command-posix') command = command + " 2>&1" elif platform == 'nt': #win32 platform command = idleConf.GetOption('main','General','print-command-win') else: #no printing for this platform printPlatform = False if printPlatform: #we can try to print for this platform command = command % filename pipe = os.popen(command, "r") # things can get ugly on NT if there is no printer available. output = pipe.read().strip() status = pipe.close() if status: output = "Printing failed (exit status 0x%x)\n" % \ status + output if output: output = "Printing command: %s\n" % repr(command) + output tkMessageBox.showerror("Print status", output, master=self.text) else: #no printing for this platform message = "Printing is not enabled for this platform: %s" % platform tkMessageBox.showinfo("Print status", message, master=self.text) if tempfilename: os.unlink(tempfilename) return "break" opendialog = None savedialog = None filetypes = [ ("Python files", "*.py *.pyw", "TEXT"), ("Text files", "*.txt", "TEXT"), ("All files", "*"), ] def askopenfile(self): dir, base = self.defaultfilename("open") if not self.opendialog: self.opendialog = tkFileDialog.Open(master=self.text, filetypes=self.filetypes) filename = self.opendialog.show(initialdir=dir, initialfile=base) if isinstance(filename, unicode): filename = filename.encode(filesystemencoding) return filename def defaultfilename(self, mode="open"): if self.filename: return os.path.split(self.filename) elif self.dirname: return self.dirname, "" else: try: pwd = os.getcwd() except os.error: pwd = "" return pwd, "" def asksavefile(self): dir, base = self.defaultfilename("save") if not self.savedialog: self.savedialog = tkFileDialog.SaveAs(master=self.text, filetypes=self.filetypes) filename = self.savedialog.show(initialdir=dir, initialfile=base) if isinstance(filename, unicode): filename = filename.encode(filesystemencoding) return filename def updaterecentfileslist(self,filename): "Update recent file list on all editor windows" self.editwin.update_recent_files_list(filename) def test(): root = Tk() class MyEditWin: def __init__(self, text): self.text = text self.flist = None self.text.bind("<Control-o>", self.open) self.text.bind("<Control-s>", self.save) self.text.bind("<Alt-s>", self.save_as) self.text.bind("<Alt-z>", self.save_a_copy) def get_saved(self): return 0 def set_saved(self, flag): pass def reset_undo(self): pass def open(self, event): self.text.event_generate("<<open-window-from-file>>") def save(self, event): self.text.event_generate("<<save-window>>") def save_as(self, event): self.text.event_generate("<<save-window-as-file>>") def save_a_copy(self, event): self.text.event_generate("<<save-copy-of-window-as-file>>") text = Text(root) text.pack() text.focus_set() editwin = MyEditWin(text) io = IOBinding(editwin) root.mainloop() if __name__ == "__main__": test()
mit
3,166,070,688,643,800,000
34.311448
80
0.548653
false
bmispelon/csvkit
csvkit/sql.py
21
3016
#!/usr/bin/env python import datetime import six from sqlalchemy import Column, MetaData, Table, create_engine from sqlalchemy import BigInteger, Boolean, Date, DateTime, Float, Integer, String, Time from sqlalchemy.schema import CreateTable NoneType = type(None) DIALECTS = { 'access': 'access.base', 'firebird': 'firebird.kinterbasdb', 'informix': 'informix.informixdb', 'maxdb': 'maxdb.sapdb', 'mssql': 'mssql.pyodbc', 'mysql': 'mysql.mysqlconnector', 'oracle': 'oracle.cx_oracle', 'postgresql': 'postgresql.psycopg2', 'sqlite': 'sqlite.pysqlite', 'sybase': 'sybase.pyodbc' } NULL_COLUMN_MAX_LENGTH = 32 SQL_INTEGER_MAX = 2147483647 SQL_INTEGER_MIN = -2147483647 def make_column(column, no_constraints=False): """ Creates a sqlalchemy column from a csvkit Column. """ sql_column_kwargs = {} sql_type_kwargs = {} column_types = { bool: Boolean, #int: Integer, see special case below float: Float, datetime.datetime: DateTime, datetime.date: Date, datetime.time: Time, NoneType: String, six.text_type: String } if column.type in column_types: sql_column_type = column_types[column.type] elif column.type is int: column_max = max([v for v in column if v is not None]) column_min = min([v for v in column if v is not None]) if column_max > SQL_INTEGER_MAX or column_min < SQL_INTEGER_MIN: sql_column_type = BigInteger else: sql_column_type = Integer else: raise ValueError('Unexpected normalized column type: %s' % column.type) if no_constraints is False: if column.type is NoneType: sql_type_kwargs['length'] = NULL_COLUMN_MAX_LENGTH elif column.type is six.text_type: sql_type_kwargs['length'] = column.max_length() sql_column_kwargs['nullable'] = column.has_nulls() return Column(column.name, sql_column_type(**sql_type_kwargs), **sql_column_kwargs) def get_connection(connection_string): engine = create_engine(connection_string) metadata = MetaData(engine) return engine, metadata def make_table(csv_table, name='table_name', no_constraints=False, db_schema=None, metadata=None): """ Creates a sqlalchemy table from a csvkit Table. """ if not metadata: metadata = MetaData() sql_table = Table(csv_table.name, metadata, schema=db_schema) for column in csv_table: sql_table.append_column(make_column(column, no_constraints)) return sql_table def make_create_table_statement(sql_table, dialect=None): """ Generates a CREATE TABLE statement for a sqlalchemy table. """ if dialect: module = __import__('sqlalchemy.dialects.%s' % DIALECTS[dialect], fromlist=['dialect']) sql_dialect = module.dialect() else: sql_dialect = None return six.text_type(CreateTable(sql_table).compile(dialect=sql_dialect)).strip() + ';'
mit
191,187,529,732,256,830
28.568627
98
0.647546
false
CloudServer/cinder
cinder/openstack/common/scheduler/filters/capabilities_filter.py
26
2792
# Copyright (c) 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging import six from cinder.openstack.common.scheduler import filters from cinder.openstack.common.scheduler.filters import extra_specs_ops LOG = logging.getLogger(__name__) class CapabilitiesFilter(filters.BaseHostFilter): """HostFilter to work with resource (instance & volume) type records.""" def _satisfies_extra_specs(self, capabilities, resource_type): """Check that the capabilities provided by the services satisfy the extra specs associated with the resource type. """ extra_specs = resource_type.get('extra_specs', []) if not extra_specs: return True for key, req in six.iteritems(extra_specs): # Either not scope format, or in capabilities scope scope = key.split(':') if len(scope) > 1 and scope[0] != "capabilities": continue elif scope[0] == "capabilities": del scope[0] cap = capabilities for index in range(len(scope)): try: cap = cap.get(scope[index]) except AttributeError: return False if cap is None: return False if not extra_specs_ops.match(cap, req): LOG.debug("extra_spec requirement '%(req)s' " "does not match '%(cap)s'", {'req': req, 'cap': cap}) return False return True def host_passes(self, host_state, filter_properties): """Return a list of hosts that can create resource_type.""" # Note(zhiteng) Currently only Cinder and Nova are using # this filter, so the resource type is either instance or # volume. resource_type = filter_properties.get('resource_type') if not self._satisfies_extra_specs(host_state.capabilities, resource_type): LOG.debug("%(host_state)s fails resource_type extra_specs " "requirements", {'host_state': host_state}) return False return True
apache-2.0
-1,256,998,803,532,345,900
38.323944
78
0.602077
false
Technocaveman/There-is-no-Third-Step
node_modules/pygmentize-bundled/vendor/pygments/pygments/styles/fruity.py
364
1298
# -*- coding: utf-8 -*- """ pygments.styles.fruity ~~~~~~~~~~~~~~~~~~~~~~ pygments version of my "fruity" vim theme. :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ from pygments.style import Style from pygments.token import Token, Comment, Name, Keyword, \ Generic, Number, String, Whitespace class FruityStyle(Style): """ Pygments version of the "native" vim theme. """ background_color = '#111111' highlight_color = '#333333' styles = { Whitespace: '#888888', Token: '#ffffff', Generic.Output: '#444444 bg:#222222', Keyword: '#fb660a bold', Keyword.Pseudo: 'nobold', Number: '#0086f7 bold', Name.Tag: '#fb660a bold', Name.Variable: '#fb660a', Comment: '#008800 bg:#0f140f italic', Name.Attribute: '#ff0086 bold', String: '#0086d2', Name.Function: '#ff0086 bold', Generic.Heading: '#ffffff bold', Keyword.Type: '#cdcaa9 bold', Generic.Subheading: '#ffffff bold', Name.Constant: '#0086d2', Comment.Preproc: '#ff0007 bold' }
mit
3,813,035,958,184,197,000
29.904762
70
0.531587
false
arizvisa/syringe
template/lnkfile.py
1
40522
import ptypes, ndk, office.propertyset from ptypes import * from ndk.datatypes import * class uint0(pint.uint_t): length = 0 class GUID(ndk.GUID): pass @pbinary.littleendian class LinkFlags(pbinary.flags): _fields_ = [ (1, 'HasLinkTargetIDList'), # The shell link is saved with an item ID list (IDList). If this bit is set, a LinkTargetIDList structure (section 2.2) MUST follow the ShellLinkHeader. If this bit is not set, this structure MUST NOT be present. (1, 'HasLinkInfo'), # The shell link is saved with link information. If this bit is set, a LinkInfo structure (section 2.3) MUST be present. If this bit is not set, this structure MUST NOT be present. (1, 'HasName'), # The shell link is saved with a name string. If this bit is set, a NAME_STRING StringData structure (section 2.4) MUST be present. If this bit is not set, this structure MUST NOT be present. (1, 'HasRelativePath'), # The shell link is saved with a relative path string. If this bit is set, a RELATIVE_PATH StringData structure (section 2.4) MUST be present. If this bit is not set, this structure MUST NOT be present. (1, 'HasWorkingDir'), # The shell link is saved with a working directory string. If this bit is set, a WORKING_DIR StringData structure (section 2.4) MUST be present. If this bit is not set, this structure MUST NOT be present. (1, 'HasArguments'), # The shell link is saved with command line arguments. If this bit is set, a COMMAND_LINE_ARGUMENTS StringData structure (section 2.4) MUST be present. If this bit is not set, this structure MUST NOT be present. (1, 'HasIconLocation'), # The shell link is saved with an icon location string. If this bit is set, an ICON_LOCATION StringData structure (section 2.4) MUST be present. If this bit is not set, this structure MUST NOT be present. (1, 'IsUnicode'), # The shell link contains Unicode encoded strings. This bit SHOULD be set. If this bit is set, the StringData section contains Unicode-encoded strings; otherwise, it contains strings that are encoded using the system default code page. (1, 'ForceNoLinkInfo'), # The LinkInfo structure (section 2.3) is ignored. (1, 'HasExpString'), # The shell link is saved with an EnvironmentVariableDataBlock (section 2.5.4). (1, 'RunInSeparateProcess'), # The target is run in a separate virtual machine when launching a link target that is a 16-bit application. (1, 'Unused1'), # A bit that is undefined and MUST be ignored. (1, 'HasDarwinID'), # The shell link is saved with a DarwinDataBlock (section 2.5.3). (1, 'RunAsUser'), # The application is run as a different user when the target of the shell link is activated. (1, 'HasExpIcon'), # The shell link is saved with an IconEnvironmentDataBlock (section 2.5.5). (1, 'NoPidlAlias'), # The file system location is represented in the shell namespace when the path to an item is parsed into an IDList. (1, 'Unused2'), # A bit that is undefined and MUST be ignored. (1, 'RunWithShimLayer'), # The shell link is saved with a ShimDataBlock (section 2.5.8). (1, 'ForceNoLinkTrack'), # The TrackerDataBlock (section 2.5.10) is ignored. (1, 'EnableTargetMetadata'), # The shell link attempts to collect target properties and store them in the PropertyStoreDataBlock (section 2.5.7) when the link target is set. (1, 'DisableLinkPathTracking'), # The EnvironmentVariableDataBlock is ignored. (1, 'DisableKnownFolderTracking'), # The SpecialFolderDataBlock (section 2.5.9) and the KnownFolderDataBlock (section 2.5.6) are ignored when loading the shell link. If this bit is set, these extra data blocks SHOULD NOT be saved when saving the shell link. (1, 'DisableKnownFolderAlias'), # If the link has a KnownFolderDataBlock (section 2.5.6), the unaliased form of the known folder IDList SHOULD be used when translating the target IDList at the time that the link is loaded. (1, 'AllowLinkToLink'), # Creating a link that references another link is enabled. Otherwise, specifying a link as the target IDList SHOULD NOT be allowed. (1, 'UnaliasOnSave'), # When saving a link for which the target IDList is under a known folder, either the unaliased form of that known folder or the target IDList SHOULD be used. (1, 'PreferEnvironmentPath'), # The target IDList SHOULD NOT be stored; instead, the path specified in the EnvironmentVariableDataBlock (section 2.5.4) SHOULD be used to refer to the target. (1, 'KeepLocalIDListForUNCTarget'), # When the target is a UNC name that refers to a location on a local machine, the local path IDList in the PropertyStoreDataBlock (section 2.5.7) SHOULD be stored, so it can be used when the link is loaded on the local machine. (5, 'Unused'), ][::-1] @pbinary.littleendian class FileAttributesFlags(pbinary.flags): _fields_ = [ (1, 'FILE_ATTRIBUTE_READONLY'), # The file or directory is read-only. For a file, if this bit is set, applications can read the file but cannot write to it or delete it. For a directory, if this bit is set, applications cannot delete the directory. (1, 'FILE_ATTRIBUTE_HIDDEN'), # The file or directory is hidden. If this bit is set, the file or folder is not included in an ordinary directory listing. (1, 'FILE_ATTRIBUTE_SYSTEM'), # The file or directory is part of the operating system or is used exclusively by the operating system. (1, 'Reserved1'), # A bit that MUST be zero. (1, 'FILE_ATTRIBUTE_DIRECTORY'), # The link target is a directory instead of a file. (1, 'FILE_ATTRIBUTE_ARCHIVE'), # The file or directory is an archive file. Applications use this flag to mark files for backup or removal. (1, 'Reserved2'), # A bit that MUST be zero. (1, 'FILE_ATTRIBUTE_NORMAL'), # The file or directory has no other flags set. If this bit is 1, all other bits in this structure MUST be clear. (1, 'FILE_ATTRIBUTE_TEMPORARY'), # The file is being used for temporary storage. (1, 'FILE_ATTRIBUTE_SPARSE_FILE'), # The file is a sparse file. (1, 'FILE_ATTRIBUTE_REPARSE_POINT'), # The file or directory has an associated reparse point. (1, 'FILE_ATTRIBUTE_COMPRESSED'), # The file or directory is compressed. For a file, this means that all data in the file is compressed. For a directory, this means that compression is the default for newly created files and subdirectories. (1, 'FILE_ATTRIBUTE_OFFLINE'), # The data of the file is not immediately available. (1, 'FILE_ATTRIBUTE_NOT_CONTENT_INDEXED'), # The contents of the file need to be indexed. (1, 'FILE_ATTRIBUTE_ENCRYPTED'), # The file or directory is encrypted. For a file, this means that all data in the file is encrypted. For a directory, this means that encryption is the default for newly created files and subdirectories. (17, 'Unused'), ][::-1] class SW_SHOW(pint.enum, DWORD): _values_ = [ ('NORMAL', 1), ('MAXIMIZED', 3), ('MINNOACTIVE', 7), ] class VK_(pint.enum, BYTE): _values_ = [ ('None', 0x00), # No key assigned. ('VK_0', 0x30), # "0" key ('VK_1', 0x31), # "1" key ('VK_2', 0x32), # "2" key ('VK_3', 0x33), # "3" key ('VK_4', 0x34), # "4" key ('VK_5', 0x35), # "5" key ('VK_6', 0x36), # "6" key ('VK_7', 0x37), # "7" key ('VK_8', 0x38), # "8" key ('VK_9', 0x39), # "9" key ('VK_A', 0x41), # "A" key ('VK_B', 0x42), # "B" key ('VK_C', 0x43), # "C" key ('VK_D', 0x44), # "D" key ('VK_E', 0x45), # "E" key ('VK_F', 0x46), # "F" key ('VK_G', 0x47), # "G" key ('VK_H', 0x48), # "H" key ('VK_I', 0x49), # "I" key ('VK_J', 0x4A), # "J" key ('VK_K', 0x4B), # "K" key ('VK_L', 0x4C), # "L" key ('VK_M', 0x4D), # "M" key ('VK_N', 0x4E), # "N" key ('VK_O', 0x4F), # "O" key ('VK_P', 0x50), # "P" key ('VK_Q', 0x51), # "Q" key ('VK_R', 0x52), # "R" key ('VK_S', 0x53), # "S" key ('VK_T', 0x54), # "T" key ('VK_U', 0x55), # "U" key ('VK_V', 0x56), # "V" key ('VK_W', 0x57), # "W" key ('VK_X', 0x58), # "X" key ('VK_Y', 0x59), # "Y" key ('VK_Z', 0x5A), # "Z" key ('VK_F1', 0x70), # "F1" key ('VK_F2', 0x71), # "F2" key ('VK_F3', 0x72), # "F3" key ('VK_F4', 0x73), # "F4" key ('VK_F5', 0x74), # "F5" key ('VK_F6', 0x75), # "F6" key ('VK_F7', 0x76), # "F7" key ('VK_F8', 0x77), # "F8" key ('VK_F9', 0x78), # "F9" key ('VK_F10', 0x79), # "F10" key ('VK_F11', 0x7A), # "F11" key ('VK_F12', 0x7B), # "F12" key ('VK_F13', 0x7C), # "F13" key ('VK_F14', 0x7D), # "F14" key ('VK_F15', 0x7E), # "F15" key ('VK_F16', 0x7F), # "F16" key ('VK_F17', 0x80), # "F17" key ('VK_F18', 0x81), # "F18" key ('VK_F19', 0x82), # "F19" key ('VK_F20', 0x83), # "F20" key ('VK_F21', 0x84), # "F21" key ('VK_F22', 0x85), # "F22" key ('VK_F23', 0x86), # "F23" key ('VK_F24', 0x87), # "F24" key ('VK_NUMLOCK', 0x90), # "NUM LOCK" key ('VK_SCROLL', 0x91), # "SCROLL LOCK" key ] class HOTKEYF_(pbinary.flags): _fields_ = [ (5, 'RESERVED'), (1, 'ALT'), (1, 'CONTROL'), (1, 'SHIFT'), ] class HotKeyFlags(pstruct.type): _fields_ = [ (VK_, 'LowByte'), (HOTKEYF_, 'HighByte'), ] class ShellLinkHeader(pstruct.type): def blocksize(self): # If we're allocated, then we can just read our size field to determine # the blocksize, otherwise we need to cheat and assume it's a complete # structure. We do this by making a copy using the original blocksize # to allocate it and calculate the expected size. Fblocksize = super(ShellLinkHeader, self).blocksize return self['HeaderSize'].li.int() if self.value else self.copy(blocksize=Fblocksize).a.size() def __Reserved(type, required): def Freserved(self): expected = self['HeaderSize'].li return type if required <= expected.int() else uint0 return Freserved def __Padding(self): expected, fields = self['HeaderSize'].li, ['HeaderSize', 'LinkCLSID', 'LinkFlags', 'FileAttributes', 'CreationTime', 'AccessTime', 'WriteTime', 'FileSize', 'IconIndex', 'ShowCommand', 'HotKey', 'Reserved1', 'Reserved2', 'Reserved3'] return dyn.block(max(0, expected.int() - sum(self[fld].li.size() for fld in fields))) _fields_ = [ (DWORD, 'HeaderSize'), (CLSID, 'LinkCLSID'), (LinkFlags, 'LinkFlags'), (FileAttributesFlags, 'FileAttributes'), (FILETIME, 'CreationTime'), (FILETIME, 'AccessTime'), (FILETIME, 'WriteTime'), (DWORD, 'FileSize'), (DWORD, 'IconIndex'), (SW_SHOW, 'ShowCommand'), (HotKeyFlags, 'HotKey'), (__Reserved(WORD, 0x44), 'Reserved1'), (__Reserved(DWORD, 0x48), 'Reserved2'), (__Reserved(DWORD, 0x4c), 'Reserved3'), (__Padding, 'Padding'), ] class ItemID(pstruct.type): def __Data(self): expected, fields = self['ItemIDSize'].li, ['ItemIDSize'] return dyn.block(max(0, expected.int() - sum(self[fld].li.size() for fld in fields))) _fields_ = [ (WORD, 'ItemIDSize'), (__Data, 'Data'), ] class IDList(parray.terminated): _object_ = ItemID def isTerminator(self, item): return item['ItemIDSize'].int() == 0 class LinkTargetIDList(pstruct.type): def __padding_IDList(self): expected = self['IDListSize'].li.int() return dyn.block(max(0, expected - self['IDList'].li.size())) _fields_ = [ (WORD, 'IDListSize'), (IDList, 'IDList'), (__padding_IDList, 'padding(IDList)'), ] class DRIVE_(pint.enum, DWORD): _values_ = [ ('UNKNOWN', 0x00000000), # The drive type cannot be determined. ('NO_ROOT_DIR', 0x00000001), # The root path is invalid; for example, there is no volume mounted at the path. ('REMOVABLE', 0x00000002), # The drive has removable media, such as a floppy drive, thumb drive, or flash card reader. ('FIXED', 0x00000003), # The drive has fixed media, such as a hard drive or flash drive. ('REMOTE', 0x00000004), # The drive is a remote (network) drive. ('CDROM', 0x00000005), # The drive is a CD-ROM drive. ('RAMDISK', 0x00000006), # The drive is a RAM disk. ] class VolumeID(pstruct.type): def blocksize(self): # If we're allocated, then we can just read our size field to determine # the blocksize, otherwise we need to cheat and assume it's a complete # structure. We do this by making a copy using the original blocksize # to allocate it and calculate the expected size. Fblocksize = super(VolumeID, self).blocksize return self['VolumeIDSize'].li.int() if self.value else self.copy(blocksize=Fblocksize).a.size() def __VolumeLabelOffset(self): size = self['VolumeIDSize'].li if size.int() < 0x10: return dyn.rpointer(pstr.szstring, self, uint0) return dyn.rpointer(pstr.szstring, self, DWORD) def __VolumeLabelOffsetUnicode(self): size, offset = (self[fld].li for fld in ['VolumeIDSize', 'VolumeLabelOffset']) t = uint0 if any(item.int() < 0x14 for item in {size, offset}) else DWORD return dyn.rpointer(pstr.szwstring, self, t) def __Data(self): expected, fields = self['VolumeIDSize'].li.int(), ['VolumeIDSize', 'DriveType', 'DriveSerialNumber', 'VolumeLabelOffset', 'VolumeLabelOffsetUnicode'] return dyn.block(max(0, expected - sum(self[fld].li.size() for fld in fields))) _fields_ = [ (DWORD, 'VolumeIDSize'), (DRIVE_, 'DriveType'), (DWORD, 'DriveSerialNumber'), (__VolumeLabelOffset, 'VolumeLabelOffset'), (__VolumeLabelOffsetUnicode, 'VolumeLabelOffsetUnicode'), #(ptype.undefined, 'VolumeLabelOffset'), #(ptype.undefined, 'VolumeLabelOffsetUnicode'), # This data contains the two previously defined strings (__Data, 'Data'), ] class WNNC_NET_(pint.enum, DWORD): _values_ = [ ('AVID', 0x001A0000), ('DOCUSPACE', 0x001B0000), ('MANGOSOFT', 0x001C0000), ('SERNET', 0x001D0000), ('RIVERFRONT1', 0X001E0000), ('RIVERFRONT2', 0x001F0000), ('DECORB', 0x00200000), ('PROTSTOR', 0x00210000), ('FJ_REDIR', 0x00220000), ('DISTINCT', 0x00230000), ('TWINS', 0x00240000), ('RDR2SAMPLE', 0x00250000), ('CSC', 0x00260000), ('3IN1', 0x00270000), ('EXTENDNET', 0x00290000), ('STAC', 0x002A0000), ('FOXBAT', 0x002B0000), ('YAHOO', 0x002C0000), ('EXIFS', 0x002D0000), ('DAV', 0x002E0000), ('KNOWARE', 0x002F0000), ('OBJECT_DIRE', 0x00300000), ('MASFAX', 0x00310000), ('HOB_NFS', 0x00320000), ('SHIVA', 0x00330000), ('IBMAL', 0x00340000), ('LOCK', 0x00350000), ('TERMSRV', 0x00360000), ('SRT', 0x00370000), ('QUINCY', 0x00380000), ('OPENAFS', 0x00390000), ('AVID1', 0X003A0000), ('DFS', 0x003B0000), ('KWNP', 0x003C0000), ('ZENWORKS', 0x003D0000), ('DRIVEONWEB', 0x003E0000), ('VMWARE', 0x003F0000), ('RSFX', 0x00400000), ('MFILES', 0x00410000), ('MS_NFS', 0x00420000), ('GOOGLE', 0x00430000), ] class CommonNetworkRelativeLink(pstruct.type): def blocksize(self): # If we're allocated, then we can just read our size field to determine # the blocksize, otherwise we need to cheat and assume it's a complete # structure. We do this by making a copy using the original blocksize # to allocate it and calculate the expected size. Fblocksize = super(CommonNetworkRelativeLink, self).blocksize return self['CommonNetworkRelativeLinkSize'].li.int() if self.value else self.copy(blocksize=Fblocksize).a.size() @pbinary.littleendian class _CommonNetworkRelativeLinkFlags(pbinary.flags): _fields_ = [ (1, 'ValidDevice'), (1, 'ValidNetType'), (30, 'Unused'), ][::-1] def __CommonNetworkRelativeLinkOffset(target, required): def Foffset(self): expected = self['CommonNetworkRelativeLinkSize'].li t = uint0 if required < expected.int() else DWORD return dyn.rpointer(target, self, t) return Foffset def __CommonNetworkRelativeLinkData(self): expected, fields = self['CommonNetworkRelativeLinkSize'].li, ['CommonNetworkRelativeLinkSize', 'CommonNetworkRelativeLinkFlags', 'NetNameOffset', 'DeviceNameOffset', 'NetworkProviderType' 'NetNameOffsetUnicode', 'DeviceNameOffsetUnicode'] return dyn.block(max(0, expected.int() - sum(self[fld].li.size() for fld in fields))) _fields_ = [ (DWORD, 'CommonNetworkRelativeLinkSize'), (_CommonNetworkRelativeLinkFlags, 'CommonNetworkRelativeLinkFlags'), (__CommonNetworkRelativeLinkOffset(pstr.szstring, 0xc), 'NetNameOffset'), (__CommonNetworkRelativeLinkOffset(pstr.szstring, 0x10), 'DeviceNameOffset'), # ValidDevice (WNNC_NET_, 'NetworkProviderType'), # ValidNetType ### These are conditional depending on the size (__CommonNetworkRelativeLinkOffset(pstr.szwstring, 0x18), 'NetNameOffsetUnicode'), (__CommonNetworkRelativeLinkOffset(pstr.szwstring, 0x1c), 'DeviceNameOffsetUnicode'), ### These might be in an arbitrary order despite what the documentation claims #(pstr.szstring, 'NetName'), #(pstr.szstring, 'DeviceName'), #(pstr.szwstring, 'NetNameUnicode'), #(pstr.szwstring, 'DeviceNameUnicode'), (__CommonNetworkRelativeLinkData, 'CommonNetworkRelativeLinkData'), ] class LinkInfo(pstruct.type): def blocksize(self): # If we're allocated, then we can just read our size field to determine # the blocksize, otherwise we need to cheat and assume it's a complete # structure. We do this by making a copy using the original blocksize # to allocate it and calculate the expected size. Fblocksize = super(LinkInfo, self).blocksize return self['LinkInfoSize'].li.int() if self.value else self.copy(blocksize=Fblocksize).a.size() @pbinary.littleendian class _LinkInfoFlags(pbinary.flags): _fields_ = [ (1, 'VolumeIDAndLocalBasePath'), (1, 'CommonNetworkRelativeLinkAndPathSuffix'), (30, 'Unused'), ][::-1] def __LinkInfoHeaderOffset(target, required): def Foffset(self): expected = self['LinkInfoHeaderSize'].li t = DWORD if required <= expected.int() else uint0 return dyn.rpointer(target, self, t) return Foffset def __LinkInfoData(self): expected, header = (self[fld].li for fld in ['LinkInfoSize', 'LinkInfoHeaderSize']) return dyn.block(max(0, expected.int() - header.int())) _fields_ = [ (DWORD, 'LinkInfoSize'), (DWORD, 'LinkInfoHeaderSize'), (_LinkInfoFlags, 'LinkInfoFlags'), ### XXX: These are conditional depending on the LinkInfoFlags and sized by LinkInfoHeaderSize (__LinkInfoHeaderOffset(VolumeID, 0x10), 'VolumeIDOffset'), # VolumeIDAndLocalBasePath (__LinkInfoHeaderOffset(pstr.szstring, 0x14), 'LocalBasePathOffset'), # VolumeIDAndLocalBasePath (__LinkInfoHeaderOffset(CommonNetworkRelativeLink, 0x18), 'CommonNetworkRelativeLinkOffset'), # CommonNetworkRelativeLinkAndPathSuffix (__LinkInfoHeaderOffset(pstr.szstring, 0x1c), 'CommonPathSuffixOffset'), # (__LinkInfoHeaderOffset(pstr.szwstring, 0x20), 'LocalBasePathOffsetUnicode'), # VolumeIDAndLocalBasePath (__LinkInfoHeaderOffset(pstr.szwstring, 0x24), 'CommonPathSuffixOffsetUnicode'), # If size >= 0x24 ### These might be in an arbitrary order despite what the documentation claims #(VolumeID, 'VolumeID'), # #(pstr.szwstring, 'LocalBasePath'), # #(CommonNetworkRelativeLink, 'CommonNetworkRelativeLink'), #(pstr.szwstring, 'CommonPathSuffix'), # #(pstr.szwstring, 'LocalBasePathUnicode'), #(pstr.szwstring, 'CommonPathSuffixUnicode'), (__LinkInfoData, 'LinkInfoData'), ] class StringData(pstruct.type): _fields_ = [ (WORD, 'CountCharacters'), (pstr.string, 'String'), ] def str(self): item = self['String'] return item.str() def summary(self): count, string = self['CountCharacters'], self.str() return "(CountCharacters={:d}) String: {:s}".format(count.int(), string) class AnsiStringData(StringData): _fields_ = [ (WORD, 'CountCharacters'), (lambda self: dyn.clone(pstr.string, length=self['CountCharacters'].li.int()), 'String'), ] class UnicodeStringData(StringData): _fields_ = [ (WORD, 'CountCharacters'), (lambda self: dyn.clone(pstr.wstring, length=self['CountCharacters'].li.int()), 'String'), ] class EXTRA_DATA(ptype.definition): attribute, cache = 'signature', {} class EXTRA_DATA_BLOCK(pint.enum, DWORD): _values_ = [ ('CONSOLE_PROPS', 0xA0000002), # A ConsoleDataBlock structure (section 2.5.1). ('CONSOLE_FE_PROPS', 0xA0000004), # A ConsoleFEDataBlock structure (section 2.5.2). ('DARWIN_PROPS', 0xA0000006), # A DarwinDataBlock structure (section 2.5.3). ('ENVIRONMENT_PROPS', 0xA0000001), # An EnvironmentVariableDataBlock structure (section 2.5.4). ('ICON_ENVIRONMENT_PROPS', 0xA0000007), # An IconEnvironmentDataBlock structure (section 2.5.5). ('KNOWN_FOLDER_PROPS', 0xA000000B), # A KnownFolderDataBlock structure (section 2.5.6). ('PROPERTY_STORE_PROPS', 0xA0000009), # A PropertyStoreDataBlock structure (section 2.5.7). ('SHIM_PROPS', 0xA0000008), # A ShimDataBlock structure (section 2.5.8). ('SPECIAL_FOLDER_PROPS', 0xA0000005), # A SpecialFolderDataBlock structure (section 2.5.9). ('TRACKER_PROPS', 0xA0000003), # A TrackerDataBlock structure (section 2.5.10). ('VISTA_AND_ABOVE_IDLIST_PROPS', 0xA000000C), # A VistaAndAboveIDListDataBlock structure (section 2.5.11). ] class ExtraDataBlock(pstruct.type): def __BlockData(self): size, signature = (self[fld].li for fld in ['BlockSize', 'BlockSignature']) total, fields = self['BlockSize'].li.int(), ['BlockSize', 'BlockSignature'] expected = total - sum(self[fld].li.size() for fld in fields) return EXTRA_DATA.withdefault(signature.int(), ptype.block, length=max(0, expected)) def __padding_BlockData(self): expected, fields = self['BlockSize'].li.int(), ['BlockSize', 'BlockSignature', 'BlockData'] return dyn.block(max(0, expected - sum(self[fld].li.size() for fld in fields))) _fields_ = [ (DWORD, 'BlockSize'), (lambda self: dyn.clone(EXTRA_DATA_BLOCK, length=0) if self['BlockSize'].li.int() < 8 else EXTRA_DATA_BLOCK, 'BlockSignature'), (__BlockData, 'BlockData'), (__padding_BlockData, 'padding(BlockData)'), ] class ExtraData(parray.terminated): _object_ = ExtraDataBlock def isTerminator(self, item): return item['BlockSize'].int() < 4 ### extra data blocks class RGBI(pbinary.flags): _fields_ = [ (1, 'INTENSITY'), (1, 'RED'), (1, 'GREEN'), (1, 'BLUE'), ] class FOREGROUND_(RGBI): pass class BACKGROUND_(RGBI): pass class FF_(pbinary.enum): length, _values_ = 4, [ ('DONTCARE', 0), ('ROMAN', 1), ('SWISS', 2), ('MODERN', 3), ('SCRIPT', 4), ('DECORATIVE', 4), ] class TMPF_(pbinary.flags): _fields_ = [ (1, 'DEVICE'), (1, 'TRUETYPE'), (1, 'VECTOR'), (1, 'FIXED_PITCH'), ] @EXTRA_DATA.define class ConsoleDataBlock(pstruct.type): signature = 0xA0000002 @pbinary.littleendian class _FillAttributes(pbinary.flags): _fields_ = [ (8, 'Unused'), (BACKGROUND_, 'BACKGROUND'), (FOREGROUND_, 'FOREGROUND'), ] @pbinary.littleendian class _FontFamily(pbinary.struct): _fields_ = [ (24, 'Unused'), (FF_, 'Family'), (TMPF_, 'Pitch'), ] _fields_ = [ (_FillAttributes, 'FillAttributes'), (_FillAttributes, 'PopupFillAttributes'), (INT16, 'ScreenBufferSizeX'), (INT16, 'ScreenBufferSizeY'), (INT16, 'WindowSizeX'), (INT16, 'WindowSizeY'), (INT16, 'WindowOriginX'), (INT16, 'WindowOriginY'), (DWORD, 'Unused1'), (DWORD, 'Unused2'), (DWORD, 'FontSize'), (_FontFamily, 'FontFamily'), (DWORD, 'FontWeight'), (dyn.clone(pstr.wstring, length=32), 'Face Name'), (DWORD, 'CursorSize'), (DWORD, 'FullScreen'), (DWORD, 'QuickEdit'), (DWORD, 'InsertMode'), (DWORD, 'AutoPosition'), (DWORD, 'HistoryBufferSize'), (DWORD, 'NumberOfHistoryBuffers'), (DWORD, 'HistoryNoDup'), (dyn.array(DWORD, 16), 'ColorTable'), ] @EXTRA_DATA.define class ConsoleFEDataBlock(pstruct.type): signature = 0xA0000004 _fields_ = [ (DWORD, 'CodePage'), ] @EXTRA_DATA.define class DarwinDataBlock(pstruct.type): signature = 0xA0000006 def __padding(field, size): def Fpadding(self): return dyn.block(max(0, size - self[field].li.size())) return Fpadding _fields_ = [ (pstr.szstring, 'DarwinDataAnsi'), (__padding('DarwinDataAnsi', 260), 'padding(DarwinDataAnsi)'), (pstr.szwstring, 'DarwinDataUnicode'), (__padding('DarwinDataUnicode', 520), 'padding(DarwinDataUnicode)'), ] @EXTRA_DATA.define class EnvironmentVariableDataBlock(pstruct.type): signature = 0xA0000001 def __padding(field, size): def Fpadding(self): return dyn.block(max(0, size - self[field].li.size())) return Fpadding _fields_ = [ (pstr.szstring, 'TargetAnsi'), (__padding('TargetAnsi', 260), 'padding(TargetAnsi)'), (pstr.szwstring, 'TargetUnicode'), (__padding('TargetUnicode', 520), 'padding(TargetUnicode)'), ] @EXTRA_DATA.define class IconEnvironmentDataBlock(EnvironmentVariableDataBlock): signature = 0xA0000007 @EXTRA_DATA.define class KnownFolderDataBlock(pstruct.type): signature = 0xA000000B _fields_ = [ (GUID, 'KnownFolderID'), (DWORD, 'Offset'), ] class SerializedPropertyValueStringName(pstruct.type): _fields_ = [ (DWORD, 'Value Size'), (DWORD, 'Name Size'), (BYTE, 'Reserved'), (pstr.szwstring, 'Name'), (lambda self: dyn.block(max(0, self['Name Size'].li.int() - self['Name'].li.size())), 'padding(Name)'), (office.propertyset.TypedPropertyValue, 'Value'), (lambda self: dyn.block(max(0, self['Value Size'].li.int() - self['Value'].li.size())), 'padding(Value)'), ] class SerializedPropertyValueIntegerName(pstruct.type): _fields_ = [ (DWORD, 'Value Size'), (DWORD, 'Id'), (BYTE, 'Reserved'), (office.propertyset.TypedPropertyValue, 'Value'), (lambda self: dyn.block(max(0, self['Value Size'].li.int() - self['Value'].li.size())), 'padding(Value)'), ] @EXTRA_DATA.define class PropertyStoreDataBlock(pstruct.type): signature = 0xA0000009 class _Serialized_Property_Value(parray.terminated): def isTerminator(self, item): return item['Value Size'].int() == 0 def __Serialized_Property_Value(self): format = self['Format ID'] items = [component for component in format.iterate()] if items == [0xD5CDD505, 0x2E9C, 0x101B, 0x9397, 0x08002B2CF9AE]: t = SerializedPropertyValueStringName else: t = SerializedPropertyValueIntegerName return dyn.clone(self._Serialized_Property_Value, _object_=t) def __padding_Serialized_Property_Value(self): expected, fields = self['Storage Size'].li, ['Storage Size', 'Version', 'Format ID'] return dyn.block(max(0, expected.int() - sum(self[fld].li.size() for fld in fields))) _fields_ = [ (DWORD, 'Storage Size'), (DWORD, 'Version'), (GUID, 'Format ID'), (__Serialized_Property_Value, 'Serialized Property Value'), (__padding_Serialized_Property_Value, 'padding(Serialized Property Value)'), ] @EXTRA_DATA.define class ShimDataBlock(pstruct.type): signature = 0xA0000008 def __LayerName(self): p = self.parent if p: size = p['BlockSize'].li.int() - sum(p[fld].li.size() for fld in ['BlockSize', 'BlockSignature']) return dyn.clone(pstr.wstring, length=size // 2) return pstr.wstring _fields_ = [ (__LayerName, 'LayerName'), ] @EXTRA_DATA.define class SpecialFolderDataBlock(pstruct.type): signature = 0xA0000005 _fields_ = [ (DWORD, 'SpecialFolderID'), (DWORD, 'Offset'), ] @EXTRA_DATA.define class TrackerDataBlock(pstruct.type): signature = 0xA0000003 class _Droid(parray.type): length, _object_ = 2, GUID def summary(self): items = [item.str() for item in self] return "({:d}) {:s}".format(len(items), ', '.join(items)) _fields_ = [ (DWORD, 'Length'), (DWORD, 'Version'), (dyn.clone(pstr.string, length=16), 'MachineID'), (_Droid, 'Droid'), (_Droid, 'DroidBirth'), ] @EXTRA_DATA.define class VistaAndAboveIDListDataBlock(IDList): signature = 0xA000000C class File(pstruct.type): def __ConditionalType(flag, type): def Ftype(self): header = self['Header'].li return type if header['LinkFlags'][flag] else ptype.undefined return Ftype def __ConditionalStringData(flag): def FStringData(self): header = self['Header'].li string_t = UnicodeStringData if header['LinkFlags']['IsUnicode'] else AnsiStringData return string_t if header['LinkFlags'][flag] else ptype.undefined return FStringData _fields_ = [ (ShellLinkHeader, 'Header'), (__ConditionalType('HasLinkTargetIDList', LinkTargetIDList), 'IDList'), # HasLinkTargetIDList (__ConditionalType('HasLinkInfo', LinkInfo), 'Info'), # HasLinkInfo (__ConditionalStringData('HasName'), 'NAME_STRING'), # HasName (__ConditionalStringData('HasRelativePAth'), 'RELATIVE_PATH'), # HasRelativePath (__ConditionalStringData('HasWorkingDir'), 'WORKING_DIR'), # HasWorkingDir (__ConditionalStringData('HasArguments'), 'COMMAND_LINE_ARGUMENTS'), # HasArguments (__ConditionalStringData('HasIconLocation'), 'ICON_LOCATION'), # HasIconLocation (ExtraData, 'Extra'), ] if __name__ == '__main__': import builtins, operator, os, math, functools, itertools, sys, types # x0 x1 x2 x3 x4 x5 x6 x7 x8 x9 xA xB xC xD xE xF hexadecimal_representation = ''' 0000 4C 00 00 00 01 14 02 00 00 00 00 00 C0 00 00 00 0010 00 00 00 46 9B 00 08 00 20 00 00 00 D0 E9 EE F2 0020 15 15 C9 01 D0 E9 EE F2 15 15 C9 01 D0 E9 EE F2 0030 15 15 C9 01 00 00 00 00 00 00 00 00 01 00 00 00 0040 00 00 00 00 00 00 00 00 00 00 00 00 BD 00 14 00 0050 1F 50 E0 4F D0 20 EA 3A 69 10 A2 D8 08 00 2B 30 0060 30 9D 19 00 2F 43 3A 5C 00 00 00 00 00 00 00 00 0070 00 00 00 00 00 00 00 00 00 00 00 46 00 31 00 00 0080 00 00 00 2C 39 69 A3 10 00 74 65 73 74 00 00 32 0090 00 07 00 04 00 EF BE 2C 39 65 A3 2C 39 69 A3 26 00A0 00 00 00 03 1E 00 00 00 00 F5 1E 00 00 00 00 00 00B0 00 00 00 00 00 74 00 65 00 73 00 74 00 00 00 14 00C0 00 48 00 32 00 00 00 00 00 2C 39 69 A3 20 00 61 00D0 2E 74 78 74 00 34 00 07 00 04 00 EF BE 2C 39 69 00E0 A3 2C 39 69 A3 26 00 00 00 2D 6E 00 00 00 00 96 00F0 01 00 00 00 00 00 00 00 00 00 00 61 00 2E 00 74 0100 00 78 00 74 00 00 00 14 00 00 00 3C 00 00 00 1C 0110 00 00 00 01 00 00 00 1C 00 00 00 2D 00 00 00 00 0120 00 00 00 3B 00 00 00 11 00 00 00 03 00 00 00 81 0130 8A 7A 30 10 00 00 00 00 43 3A 5C 74 65 73 74 5C 0140 61 2E 74 78 74 00 00 07 00 2E 00 5C 00 61 00 2E 0150 00 74 00 78 00 74 00 07 00 43 00 3A 00 5C 00 74 0160 00 65 00 73 00 74 00 60 00 00 00 03 00 00 A0 58 0170 00 00 00 00 00 00 00 63 68 72 69 73 2D 78 70 73 0180 00 00 00 00 00 00 00 40 78 C7 94 47 FA C7 46 B3 0190 56 5C 2D C6 B6 D1 15 EC 46 CD 7B 22 7F DD 11 94 01A0 99 00 13 72 16 87 4A 40 78 C7 94 47 FA C7 46 B3 01B0 56 5C 2D C6 B6 D1 15 EC 46 CD 7B 22 7F DD 11 94 01C0 99 00 13 72 16 87 4A 00 00 00 00 ''' rows = map(operator.methodcaller('strip'), hexadecimal_representation.split('\n')) items = [item.replace(' ', '') for offset, item in map(operator.methodcaller('split', ' ', 1), filter(None, rows))] data = bytes().join(map(operator.methodcaller('decode', 'hex') if sys.version_info.major < 3 else bytes.fromhex, items)) # HeaderSize: (4 bytes, offset 0x0000), 0x0000004C as required. # LinkCLSID: (16 bytes, offset 0x0004), 00021401-0000-0000-C000-000000000046. # LinkFlags: (4 bytes, offset 0x0014), 0x0008009B means the following LinkFlags (section 2.1.1) are set: # HasLinkTargetIDList # HasLinkInfo # HasRelativePath # HasWorkingDir # IsUnicode # EnableTargetMetadata # FileAttributes: (4 bytes, offset 0x0018), 0x00000020, means the following FileAttributesFlags (section 2.1.2) are set: # FILE_ATTRIBUTE_ARCHIVE # CreationTime: (8 bytes, offset 0x001C) FILETIME 9/12/08, 8:27:17PM. # AccessTime: (8 bytes, offset 0x0024) FILETIME 9/12/08, 8:27:17PM. # WriteTime: (8 bytes, offset 0x002C) FILETIME 9/12/08, 8:27:17PM. # FileSize: (4 bytes, offset 0x0034), 0x00000000. # IconIndex: (4 bytes, offset 0x0038), 0x00000000. # ShowCommand: (4 bytes, offset 0x003C), SW_SHOWNORMAL(1). # Hotkey: (2 bytes, offset 0x0040), 0x0000. # Reserved: (2 bytes, offset 0x0042), 0x0000. # Reserved2: (4 bytes, offset 0x0044), 0 x00000000. # Reserved3: (4 bytes, offset 0x0048), 0 x00000000. # Because HasLinkTargetIDList is set, a LinkTargetIDList structure (section 2.2) follows: # IDListSize: (2 bytes, offset 0x004C), 0x00BD, the size of IDList. # IDList: (189 bytes, offset 0x004E) an IDList structure (section 2.2.1) follows: # ItemIDList: (187 bytes, offset 0x004E), ItemID structures (section 2.2.2) follow: # ItemIDSize: (2 bytes, offset 0x004E), 0x0014 # Data: (12 bytes, offset 0x0050), <18 bytes of data> [computer] # ItemIDSize: (2 bytes, offset 0x0062), 0x0019 # Data: (23 bytes, offset 0x0064), <23 bytes of data> [c:] # ItemIDSize: (2 bytes, offset 0x007B), 0x0046 # Data: (68 bytes, offset 0x007D), <68 bytes of data> [test] # ItemIDSize: (2 bytes, offset 0x00C1), 0x0048 # Data: (68 bytes, offset 0x00C3), <70 bytes of data> [a.txt] # TerminalID: (2 bytes, offset 0x0109), 0x0000 indicates the end of the IDList. # Because HasLinkInfo is set, a LinkInfo structure (section 2.3) follows: # LinkInfoSize: (4 bytes, offset 0x010B), 0x0000003C # LinkInfoHeaderSize: (4 bytes, offset 0x010F), 0x0000001C as specified in the LinkInfo structure definition. # LinkInfoFlags: (4 bytes, offset 0x0113), 0x00000001 VolumeIDAndLocalBasePath is set. # VolumeIDOffset: (4 bytes, offset 0x0117), 0x0000001C, references offset 0x0127. # LocalBasePathOffset: (4 bytes, offset 0x011B), 0x0000002D, references the character string "C:\test\a.txt". # CommonNetworkRelativeLinkOffset: (4 bytes, offset 0x011F), 0x00000000 indicates CommonNetworkRelativeLink is not present. # CommonPathSuffixOffset: (4 bytes, offset 0x0123), 0x0000003B, references offset 0x00000146, the character string "" (empty string). # VolumeID: (17 bytes, offset 0x0127), because VolumeIDAndLocalBasePath is set, a VolumeID structure (section 2.3.1) follows: # VolumeIDSize: (4 bytes, offset 0x0127), 0x00000011 indicates the size of the VolumeID structure. # DriveType: (4 bytes, offset 0x012B), DRIVE_FIXED(3). # DriveSerialNumber: (4 bytes, offset 0x012F), 0x307A8A81. # VolumeLabelOffset: (4 bytes, offset 0x0133), 0x00000010, indicates that Volume Label Offset Unicode is not specified and references offset 0x0137 where the Volume Label is stored. # Data: (1 byte, offset 0x0137), "" an empty character string. # LocalBasePath: (14 bytes, offset 0x0138), because VolumeIDAndLocalBasePath is set, the character string "c:\test\a.txt" is present. # CommonPathSuffix: (1 byte, offset 0x0146), "" an empty character string. # Because HasRelativePath is set, the RELATIVE_PATH StringData structure (section 2.4) follows: # CountCharacters: (2 bytes, offset 0x0147), 0x0007 Unicode characters. # String (14 bytes, offset 0x0149), the Unicode string: ".\a.txt". # Because HasWorkingDir is set, the WORKING_DIR StringData structure (section 2.4) follows: # CountCharacters: (2 bytes, offset 0x0157), 0x0007 Unicode characters. # String (14 bytes, offset 0x0159), the Unicode string: "c:\test". # Extra data section: (100 bytes, offset 0x0167), an ExtraData structure (section 2.5) follows: # ExtraDataBlock (96 bytes, offset 0x0167), the TrackerDataBlock structure (section 2.5.10) follows: # BlockSize: (4 bytes, offset 0x0167), 0x00000060 # BlockSignature: (4 bytes, offset 0x016B), 0xA000003, which identifies the TrackerDataBlock structure (section 2.5.10). # Length: (4 bytes, offset 0x016F), 0x00000058, the required minimum size of this extra data block. # Version: (4 bytes, offset 0x0173), 0x00000000, the required version. # MachineID: (16 bytes, offset 0x0177), the character string "chris-xps", with zero fill. # Droid: (32 bytes, offset 0x0187), 2 GUID values. # DroidBirth: (32 bytes, offset 0x01A7), 2 GUID values. # TerminalBlock: (4 bytes, offset 0x01C7), 0x00000000 indicates the end of the extra data section. import ptypes, lnkfile from lnkfile import * #importlib.reload(lnkfile) source = ptypes.setsource(ptypes.prov.bytes(data)) z = File() z = z.l print(z)
bsd-2-clause
8,419,824,990,856,185,000
47.704327
279
0.605054
false
chrisndodge/edx-platform
openedx/core/djangoapps/user_api/migrations/0001_initial.py
20
2731
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models import django.utils.timezone from django.conf import settings import model_utils.fields import django.core.validators from openedx.core.djangoapps.xmodule_django.models import CourseKeyField class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='UserCourseTag', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('key', models.CharField(max_length=255, db_index=True)), ('course_id', CourseKeyField(max_length=255, db_index=True)), ('value', models.TextField()), ('user', models.ForeignKey(related_name='+', to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='UserOrgTag', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, verbose_name='created', editable=False)), ('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, verbose_name='modified', editable=False)), ('key', models.CharField(max_length=255, db_index=True)), ('org', models.CharField(max_length=255, db_index=True)), ('value', models.TextField()), ('user', models.ForeignKey(related_name='+', to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='UserPreference', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('key', models.CharField(db_index=True, max_length=255, validators=[django.core.validators.RegexValidator(b'[-_a-zA-Z0-9]+')])), ('value', models.TextField()), ('user', models.ForeignKey(related_name='preferences', to=settings.AUTH_USER_MODEL)), ], ), migrations.AlterUniqueTogether( name='userpreference', unique_together=set([('user', 'key')]), ), migrations.AlterUniqueTogether( name='userorgtag', unique_together=set([('user', 'org', 'key')]), ), migrations.AlterUniqueTogether( name='usercoursetag', unique_together=set([('user', 'course_id', 'key')]), ), ]
agpl-3.0
5,830,344,502,259,138,000
43.048387
147
0.589894
false
jingxiang-li/kaggle-yelp
model/level3_model_rf.py
1
5669
from __future__ import division from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import numpy as np from sklearn.ensemble import RandomForestClassifier from sklearn.calibration import CalibratedClassifierCV from sklearn.metrics import f1_score import argparse from os import path import os from hyperopt import fmin, tpe, hp, STATUS_OK, Trials from utils import * import pickle np.random.seed(54568464) def parse_args(): parser = argparse.ArgumentParser() parser.add_argument('--yix', type=int, default=0) return parser.parse_args() # functions for hyperparameters optimization class Score: def __init__(self, X, y): self.y = y self.X = X def get_score(self, params): params['n_estimators'] = int(params['n_estimators']) params['max_depth'] = int(params['max_depth']) params['min_samples_split'] = int(params['min_samples_split']) params['min_samples_leaf'] = int(params['min_samples_leaf']) params['n_estimators'] = int(params['n_estimators']) print('Training with params:') print(params) # cross validation here scores = [] for train_ix, test_ix in makeKFold(5, self.y, 1): X_train, y_train = self.X[train_ix, :], self.y[train_ix] X_test, y_test = self.X[test_ix, :], self.y[test_ix] weight = y_train.shape[0] / (2 * np.bincount(y_train)) sample_weight = np.array([weight[i] for i in y_train]) clf = RandomForestClassifier(**params) cclf = CalibratedClassifierCV(base_estimator=clf, method='isotonic', cv=makeKFold(3, y_train, 1)) cclf.fit(X_train, y_train, sample_weight) pred = cclf.predict(X_test) scores.append(f1_score(y_true=y_test, y_pred=pred)) print(scores) score = np.mean(scores) print(score) return {'loss': -score, 'status': STATUS_OK} def optimize(trials, X, y, max_evals): space = { 'n_estimators': hp.quniform('n_estimators', 100, 500, 50), 'criterion': hp.choice('criterion', ['gini', 'entropy']), 'max_depth': hp.quniform('max_depth', 1, 7, 1), 'min_samples_split': hp.quniform('min_samples_split', 1, 9, 2), 'min_samples_leaf': hp.quniform('min_samples_leaf', 1, 5, 1), 'bootstrap': True, 'oob_score': True, 'n_jobs': -1 } s = Score(X, y) best = fmin(s.get_score, space, algo=tpe.suggest, trials=trials, max_evals=max_evals ) best['n_estimators'] = int(best['n_estimators']) best['max_depth'] = int(best['max_depth']) best['min_samples_split'] = int(best['min_samples_split']) best['min_samples_leaf'] = int(best['min_samples_leaf']) best['n_estimators'] = int(best['n_estimators']) best['criterion'] = ['gini', 'entropy'][best['criterion']] best['bootstrap'] = True best['oob_score'] = True best['n_jobs'] = -1 del s return best def out_fold_pred(params, X, y): # cross validation here preds = np.zeros((y.shape[0])) for train_ix, test_ix in makeKFold(5, y, 1): X_train, y_train = X[train_ix, :], y[train_ix] X_test = X[test_ix, :] weight = y_train.shape[0] / (2 * np.bincount(y_train)) sample_weight = np.array([weight[i] for i in y_train]) clf = RandomForestClassifier(**params) cclf = CalibratedClassifierCV(base_estimator=clf, method='isotonic', cv=makeKFold(3, y_train, 1)) cclf.fit(X_train, y_train, sample_weight) pred = cclf.predict_proba(X_test)[:, 1] preds[test_ix] = pred return preds def get_model(params, X, y): clf = RandomForestClassifier(**params) cclf = CalibratedClassifierCV(base_estimator=clf, method='isotonic', cv=makeKFold(3, y, 1)) weight = y.shape[0] / (2 * np.bincount(y)) sample_weight = np.array([weight[i] for i in y]) cclf.fit(X, y, sample_weight) return cclf args = parse_args() data_dir = '../level3-feature/' + str(args.yix) X_train = np.load(path.join(data_dir, 'X_train.npy')) X_test = np.load(path.join(data_dir, 'X_test.npy')) y_train = np.load(path.join(data_dir, 'y_train.npy')) print(X_train.shape, X_test.shape, y_train.shape) X_train_ext = np.load('../extra_ftrs/' + str(args.yix) + '/X_train_ext.npy') X_test_ext = np.load('../extra_ftrs/' + str(args.yix) + '/X_test_ext.npy') print(X_train_ext.shape, X_test_ext.shape) X_train = np.hstack((X_train, X_train_ext)) X_test = np.hstack((X_test, X_test_ext)) print('Add Extra') print(X_train.shape, X_test.shape, y_train.shape) # Now we have X_train, X_test, y_train trials = Trials() params = optimize(trials, X_train, y_train, 50) out_fold = out_fold_pred(params, X_train, y_train) clf = get_model(params, X_train, y_train) preds = clf.predict_proba(X_test)[:, 1] save_dir = '../level3-model-final/' + str(args.yix) print(save_dir) if not path.exists(save_dir): os.makedirs(save_dir) # save model, parameter, outFold_pred, pred with open(path.join(save_dir, 'model_rf.pkl'), 'wb') as f_model: pickle.dump(clf.calibrated_classifiers_, f_model) with open(path.join(save_dir, 'param_rf.pkl'), 'wb') as f_param: pickle.dump(params, f_param) np.save(path.join(save_dir, 'pred_rf.npy'), preds) np.save(path.join(save_dir, 'outFold_rf.npy'), out_fold)
mit
6,082,043,993,278,494,000
33.357576
76
0.594814
false
jreback/pandas
pandas/io/formats/html.py
2
23192
""" Module for formatting output data in HTML. """ from textwrap import dedent from typing import Any, Dict, Iterable, List, Mapping, Optional, Tuple, Union, cast from pandas._config import get_option from pandas._libs import lib from pandas import MultiIndex, option_context from pandas.io.common import is_url from pandas.io.formats.format import DataFrameFormatter, get_level_lengths from pandas.io.formats.printing import pprint_thing class HTMLFormatter: """ Internal class for formatting output data in html. This class is intended for shared functionality between DataFrame.to_html() and DataFrame._repr_html_(). Any logic in common with other output formatting methods should ideally be inherited from classes in format.py and this class responsible for only producing html markup. """ indent_delta = 2 def __init__( self, formatter: DataFrameFormatter, classes: Optional[Union[str, List[str], Tuple[str, ...]]] = None, border: Optional[int] = None, table_id: Optional[str] = None, render_links: bool = False, ) -> None: self.fmt = formatter self.classes = classes self.frame = self.fmt.frame self.columns = self.fmt.tr_frame.columns self.elements: List[str] = [] self.bold_rows = self.fmt.bold_rows self.escape = self.fmt.escape self.show_dimensions = self.fmt.show_dimensions if border is None: border = cast(int, get_option("display.html.border")) self.border = border self.table_id = table_id self.render_links = render_links self.col_space = { column: f"{value}px" if isinstance(value, int) else value for column, value in self.fmt.col_space.items() } def to_string(self) -> str: lines = self.render() if any(isinstance(x, str) for x in lines): lines = [str(x) for x in lines] return "\n".join(lines) def render(self) -> List[str]: self._write_table() if self.should_show_dimensions: by = chr(215) # × self.write( f"<p>{len(self.frame)} rows {by} {len(self.frame.columns)} columns</p>" ) return self.elements @property def should_show_dimensions(self): return self.fmt.should_show_dimensions @property def show_row_idx_names(self) -> bool: return self.fmt.show_row_idx_names @property def show_col_idx_names(self) -> bool: return self.fmt.show_col_idx_names @property def row_levels(self) -> int: if self.fmt.index: # showing (row) index return self.frame.index.nlevels elif self.show_col_idx_names: # see gh-22579 # Column misalignment also occurs for # a standard index when the columns index is named. # If the row index is not displayed a column of # blank cells need to be included before the DataFrame values. return 1 # not showing (row) index return 0 def _get_columns_formatted_values(self) -> Iterable: return self.columns @property def is_truncated(self) -> bool: return self.fmt.is_truncated @property def ncols(self) -> int: return len(self.fmt.tr_frame.columns) def write(self, s: Any, indent: int = 0) -> None: rs = pprint_thing(s) self.elements.append(" " * indent + rs) def write_th( self, s: Any, header: bool = False, indent: int = 0, tags: Optional[str] = None ) -> None: """ Method for writing a formatted <th> cell. If col_space is set on the formatter then that is used for the value of min-width. Parameters ---------- s : object The data to be written inside the cell. header : bool, default False Set to True if the <th> is for use inside <thead>. This will cause min-width to be set if there is one. indent : int, default 0 The indentation level of the cell. tags : str, default None Tags to include in the cell. Returns ------- A written <th> cell. """ col_space = self.col_space.get(s, None) if header and col_space is not None: tags = tags or "" tags += f'style="min-width: {col_space};"' self._write_cell(s, kind="th", indent=indent, tags=tags) def write_td(self, s: Any, indent: int = 0, tags: Optional[str] = None) -> None: self._write_cell(s, kind="td", indent=indent, tags=tags) def _write_cell( self, s: Any, kind: str = "td", indent: int = 0, tags: Optional[str] = None ) -> None: if tags is not None: start_tag = f"<{kind} {tags}>" else: start_tag = f"<{kind}>" if self.escape: # escape & first to prevent double escaping of & esc = {"&": r"&amp;", "<": r"&lt;", ">": r"&gt;"} else: esc = {} rs = pprint_thing(s, escape_chars=esc).strip() if self.render_links and is_url(rs): rs_unescaped = pprint_thing(s, escape_chars={}).strip() start_tag += f'<a href="{rs_unescaped}" target="_blank">' end_a = "</a>" else: end_a = "" self.write(f"{start_tag}{rs}{end_a}</{kind}>", indent) def write_tr( self, line: Iterable, indent: int = 0, indent_delta: int = 0, header: bool = False, align: Optional[str] = None, tags: Optional[Dict[int, str]] = None, nindex_levels: int = 0, ) -> None: if tags is None: tags = {} if align is None: self.write("<tr>", indent) else: self.write(f'<tr style="text-align: {align};">', indent) indent += indent_delta for i, s in enumerate(line): val_tag = tags.get(i, None) if header or (self.bold_rows and i < nindex_levels): self.write_th(s, indent=indent, header=header, tags=val_tag) else: self.write_td(s, indent, tags=val_tag) indent -= indent_delta self.write("</tr>", indent) def _write_table(self, indent: int = 0) -> None: _classes = ["dataframe"] # Default class. use_mathjax = get_option("display.html.use_mathjax") if not use_mathjax: _classes.append("tex2jax_ignore") if self.classes is not None: if isinstance(self.classes, str): self.classes = self.classes.split() if not isinstance(self.classes, (list, tuple)): raise TypeError( "classes must be a string, list, " f"or tuple, not {type(self.classes)}" ) _classes.extend(self.classes) if self.table_id is None: id_section = "" else: id_section = f' id="{self.table_id}"' self.write( f'<table border="{self.border}" class="{" ".join(_classes)}"{id_section}>', indent, ) if self.fmt.header or self.show_row_idx_names: self._write_header(indent + self.indent_delta) self._write_body(indent + self.indent_delta) self.write("</table>", indent) def _write_col_header(self, indent: int) -> None: is_truncated_horizontally = self.fmt.is_truncated_horizontally if isinstance(self.columns, MultiIndex): template = 'colspan="{span:d}" halign="left"' if self.fmt.sparsify: # GH3547 sentinel = lib.no_default else: sentinel = False levels = self.columns.format(sparsify=sentinel, adjoin=False, names=False) level_lengths = get_level_lengths(levels, sentinel) inner_lvl = len(level_lengths) - 1 for lnum, (records, values) in enumerate(zip(level_lengths, levels)): if is_truncated_horizontally: # modify the header lines ins_col = self.fmt.tr_col_num if self.fmt.sparsify: recs_new = {} # Increment tags after ... col. for tag, span in list(records.items()): if tag >= ins_col: recs_new[tag + 1] = span elif tag + span > ins_col: recs_new[tag] = span + 1 if lnum == inner_lvl: values = ( values[:ins_col] + ("...",) + values[ins_col:] ) else: # sparse col headers do not receive a ... values = ( values[:ins_col] + (values[ins_col - 1],) + values[ins_col:] ) else: recs_new[tag] = span # if ins_col lies between tags, all col headers # get ... if tag + span == ins_col: recs_new[ins_col] = 1 values = values[:ins_col] + ("...",) + values[ins_col:] records = recs_new inner_lvl = len(level_lengths) - 1 if lnum == inner_lvl: records[ins_col] = 1 else: recs_new = {} for tag, span in list(records.items()): if tag >= ins_col: recs_new[tag + 1] = span else: recs_new[tag] = span recs_new[ins_col] = 1 records = recs_new values = values[:ins_col] + ["..."] + values[ins_col:] # see gh-22579 # Column Offset Bug with to_html(index=False) with # MultiIndex Columns and Index. # Initially fill row with blank cells before column names. # TODO: Refactor to remove code duplication with code # block below for standard columns index. row = [""] * (self.row_levels - 1) if self.fmt.index or self.show_col_idx_names: # see gh-22747 # If to_html(index_names=False) do not show columns # index names. # TODO: Refactor to use _get_column_name_list from # DataFrameFormatter class and create a # _get_formatted_column_labels function for code # parity with DataFrameFormatter class. if self.fmt.show_index_names: name = self.columns.names[lnum] row.append(pprint_thing(name or "")) else: row.append("") tags = {} j = len(row) for i, v in enumerate(values): if i in records: if records[i] > 1: tags[j] = template.format(span=records[i]) else: continue j += 1 row.append(v) self.write_tr(row, indent, self.indent_delta, tags=tags, header=True) else: # see gh-22579 # Column misalignment also occurs for # a standard index when the columns index is named. # Initially fill row with blank cells before column names. # TODO: Refactor to remove code duplication with code block # above for columns MultiIndex. row = [""] * (self.row_levels - 1) if self.fmt.index or self.show_col_idx_names: # see gh-22747 # If to_html(index_names=False) do not show columns # index names. # TODO: Refactor to use _get_column_name_list from # DataFrameFormatter class. if self.fmt.show_index_names: row.append(self.columns.name or "") else: row.append("") row.extend(self._get_columns_formatted_values()) align = self.fmt.justify if is_truncated_horizontally: ins_col = self.row_levels + self.fmt.tr_col_num row.insert(ins_col, "...") self.write_tr(row, indent, self.indent_delta, header=True, align=align) def _write_row_header(self, indent: int) -> None: is_truncated_horizontally = self.fmt.is_truncated_horizontally row = [x if x is not None else "" for x in self.frame.index.names] + [""] * ( self.ncols + (1 if is_truncated_horizontally else 0) ) self.write_tr(row, indent, self.indent_delta, header=True) def _write_header(self, indent: int) -> None: self.write("<thead>", indent) if self.fmt.header: self._write_col_header(indent + self.indent_delta) if self.show_row_idx_names: self._write_row_header(indent + self.indent_delta) self.write("</thead>", indent) def _get_formatted_values(self) -> Dict[int, List[str]]: with option_context("display.max_colwidth", None): fmt_values = {i: self.fmt.format_col(i) for i in range(self.ncols)} return fmt_values def _write_body(self, indent: int) -> None: self.write("<tbody>", indent) fmt_values = self._get_formatted_values() # write values if self.fmt.index and isinstance(self.frame.index, MultiIndex): self._write_hierarchical_rows(fmt_values, indent + self.indent_delta) else: self._write_regular_rows(fmt_values, indent + self.indent_delta) self.write("</tbody>", indent) def _write_regular_rows( self, fmt_values: Mapping[int, List[str]], indent: int ) -> None: is_truncated_horizontally = self.fmt.is_truncated_horizontally is_truncated_vertically = self.fmt.is_truncated_vertically nrows = len(self.fmt.tr_frame) if self.fmt.index: fmt = self.fmt._get_formatter("__index__") if fmt is not None: index_values = self.fmt.tr_frame.index.map(fmt) else: index_values = self.fmt.tr_frame.index.format() row: List[str] = [] for i in range(nrows): if is_truncated_vertically and i == (self.fmt.tr_row_num): str_sep_row = ["..."] * len(row) self.write_tr( str_sep_row, indent, self.indent_delta, tags=None, nindex_levels=self.row_levels, ) row = [] if self.fmt.index: row.append(index_values[i]) # see gh-22579 # Column misalignment also occurs for # a standard index when the columns index is named. # Add blank cell before data cells. elif self.show_col_idx_names: row.append("") row.extend(fmt_values[j][i] for j in range(self.ncols)) if is_truncated_horizontally: dot_col_ix = self.fmt.tr_col_num + self.row_levels row.insert(dot_col_ix, "...") self.write_tr( row, indent, self.indent_delta, tags=None, nindex_levels=self.row_levels ) def _write_hierarchical_rows( self, fmt_values: Mapping[int, List[str]], indent: int ) -> None: template = 'rowspan="{span}" valign="top"' is_truncated_horizontally = self.fmt.is_truncated_horizontally is_truncated_vertically = self.fmt.is_truncated_vertically frame = self.fmt.tr_frame nrows = len(frame) assert isinstance(frame.index, MultiIndex) idx_values = frame.index.format(sparsify=False, adjoin=False, names=False) idx_values = list(zip(*idx_values)) if self.fmt.sparsify: # GH3547 sentinel = lib.no_default levels = frame.index.format(sparsify=sentinel, adjoin=False, names=False) level_lengths = get_level_lengths(levels, sentinel) inner_lvl = len(level_lengths) - 1 if is_truncated_vertically: # Insert ... row and adjust idx_values and # level_lengths to take this into account. ins_row = self.fmt.tr_row_num inserted = False for lnum, records in enumerate(level_lengths): rec_new = {} for tag, span in list(records.items()): if tag >= ins_row: rec_new[tag + 1] = span elif tag + span > ins_row: rec_new[tag] = span + 1 # GH 14882 - Make sure insertion done once if not inserted: dot_row = list(idx_values[ins_row - 1]) dot_row[-1] = "..." idx_values.insert(ins_row, tuple(dot_row)) inserted = True else: dot_row = list(idx_values[ins_row]) dot_row[inner_lvl - lnum] = "..." idx_values[ins_row] = tuple(dot_row) else: rec_new[tag] = span # If ins_row lies between tags, all cols idx cols # receive ... if tag + span == ins_row: rec_new[ins_row] = 1 if lnum == 0: idx_values.insert( ins_row, tuple(["..."] * len(level_lengths)) ) # GH 14882 - Place ... in correct level elif inserted: dot_row = list(idx_values[ins_row]) dot_row[inner_lvl - lnum] = "..." idx_values[ins_row] = tuple(dot_row) level_lengths[lnum] = rec_new level_lengths[inner_lvl][ins_row] = 1 for ix_col in range(len(fmt_values)): fmt_values[ix_col].insert(ins_row, "...") nrows += 1 for i in range(nrows): row = [] tags = {} sparse_offset = 0 j = 0 for records, v in zip(level_lengths, idx_values[i]): if i in records: if records[i] > 1: tags[j] = template.format(span=records[i]) else: sparse_offset += 1 continue j += 1 row.append(v) row.extend(fmt_values[j][i] for j in range(self.ncols)) if is_truncated_horizontally: row.insert( self.row_levels - sparse_offset + self.fmt.tr_col_num, "..." ) self.write_tr( row, indent, self.indent_delta, tags=tags, nindex_levels=len(levels) - sparse_offset, ) else: row = [] for i in range(len(frame)): if is_truncated_vertically and i == (self.fmt.tr_row_num): str_sep_row = ["..."] * len(row) self.write_tr( str_sep_row, indent, self.indent_delta, tags=None, nindex_levels=self.row_levels, ) idx_values = list( zip(*frame.index.format(sparsify=False, adjoin=False, names=False)) ) row = [] row.extend(idx_values[i]) row.extend(fmt_values[j][i] for j in range(self.ncols)) if is_truncated_horizontally: row.insert(self.row_levels + self.fmt.tr_col_num, "...") self.write_tr( row, indent, self.indent_delta, tags=None, nindex_levels=frame.index.nlevels, ) class NotebookFormatter(HTMLFormatter): """ Internal class for formatting output data in html for display in Jupyter Notebooks. This class is intended for functionality specific to DataFrame._repr_html_() and DataFrame.to_html(notebook=True) """ def _get_formatted_values(self) -> Dict[int, List[str]]: return {i: self.fmt.format_col(i) for i in range(self.ncols)} def _get_columns_formatted_values(self) -> List[str]: return self.columns.format() def write_style(self) -> None: # We use the "scoped" attribute here so that the desired # style properties for the data frame are not then applied # throughout the entire notebook. template_first = """\ <style scoped>""" template_last = """\ </style>""" template_select = """\ .dataframe %s { %s: %s; }""" element_props = [ ("tbody tr th:only-of-type", "vertical-align", "middle"), ("tbody tr th", "vertical-align", "top"), ] if isinstance(self.columns, MultiIndex): element_props.append(("thead tr th", "text-align", "left")) if self.show_row_idx_names: element_props.append( ("thead tr:last-of-type th", "text-align", "right") ) else: element_props.append(("thead th", "text-align", "right")) template_mid = "\n\n".join(map(lambda t: template_select % t, element_props)) template = dedent("\n".join((template_first, template_mid, template_last))) self.write(template) def render(self) -> List[str]: self.write("<div>") self.write_style() super().render() self.write("</div>") return self.elements
bsd-3-clause
8,511,209,384,092,984,000
37.018033
88
0.486525
false
cobrab11/black1-bot
extensions/collect.py
3
6973
# BS mark.1-55 # /* coding: utf-8 */ # BlackSmith plugin # This should be rewritten! BLACK_LIST = 'dynamic/blacklist.txt' CHAT_CACHE = {} AMSGBL = [] DirtyChats = [] def handler_chat_cache(stanza, ltype, source, body): try: subject = stanza.getTag('subject') except: subject = False if ltype != 'public' or subject or not source[2]: return header = u'[%s] %s» ' % (time.strftime('%H:%M:%S (%d.%m.%Y) GMT', time.gmtime()), source[2]) CHAT_CACHE[source[1]]['1'] = CHAT_CACHE[source[1]]['2'] if len(body) > 256: body = body[:256]+'[...]' CHAT_CACHE[source[1]]['2'] = header+body def handler_clean(mType, source, body): if source[1] in GROUPCHATS: if source[1] in DirtyChats: DirtyChats.remove(source[1]) if mType != "private": change_bot_status(source[1], u"Чистка...", "dnd") zero = xmpp.Message(source[1], "", typ = "groupchat") zero.setTag("body") count = 24 if check_number(body): number = int(body) if number < 51: count = number for msg in xrange(count): try: jClient.send(zero) except IOError: return INFO['outmsg'] += 1 if (msg != count): time.sleep(1.4) if mType != "private": message = STATUS[source[1]]["message"] status = STATUS[source[1]]["status"] change_bot_status(source[1], message, status) CHAT_CACHE[source[1]] = {"1": "", "2": ""} DirtyChats.append(source[1]) else: reply(mType, source, "В процессе.") else: reply(mType, source, "Только для чатов!") def last_chat_cache(type, source, body): confs = sorted(GROUPCHATS.keys()) if body: body = body.lower() if body in confs: conf = body elif check_number(body): number = int(body) - 1 if number >= 0 and number <= len(confs): conf = confs[number] else: conf = False else: conf = False if conf: cache = '' if CHAT_CACHE[conf]['1']: cache += '\n'+CHAT_CACHE[conf]['1'] if CHAT_CACHE[conf]['2']: cache += '\n'+CHAT_CACHE[conf]['2'] if not cache: cache = u'пусто' reply(type, source, cache) else: reply(type, source, u'меня там нет!') else: col, list = 0, '' for conf in confs: col = col + 1 list += u'\n№ '+str(col)+'. - '+conf reply(type, source, list) def handler_test(type, source, body): if time.localtime()[1:3] == (4, 1): testfr = [u"КОТЭ ОПАСНОСТЕ!!11", u"ТЕЛОИД11111", u"ГОЛАКТЕГО ОПАСНОСТЕ1111", u"ПЫЫщщщщщЩЩЬ!!111", u"АДИНАДИН!!!!" u"ЪЖСЛО111", u"ЧАКЕ НЕГОДУЕ......", u"ОНОТОЛЕ СЕРЧАЕ.", u"КОТЭ РАДУЕ!1", u"ПИПЛ ШОкЕ11"] answer = random.choice(testfr) else: testfr = {0: u"Всё в порядке! (Ошибок нет)", 1: u"Что-то сломалось… (1 ошибка)", 2: u"Что-то сломалось дважды... (2 ошибки)", 3: u"Что-то сломалось дважды и ещё раз сломалось… (3 ошибки) (!)", 4: u"Что-то пошло не так… (4 ошибки) (!!)", "more": u"Сегодня явно не мой день (%d ошибок) (!!!)"} Error = len(ERRORS.keys()) # meet you here! answer = testfr.get(Error, testfr["more"] % Error) reply(type, source, answer + (' (PID: %s)' % str(BOT_PID))) def handler_admin_message(type, source, body): if body: args = body.split() if len(args) >= 2: jid = args[0].strip() if "@" in jid and "." in jid: inst = jid.split('/')[0].lower() if "@conf" in jid and inst not in GROUPCHATS: reply(type, source, u'Меня нет в этой конференции.') else: mess = body[(body.find(' ') + 1):].strip() if len(mess) <= 1024: msg(jid, u'Сообщение от '+source[2]+': '+mess) reply(type, source, u'сделано') else: reply(type, source, u'Слишком длинное сообщение!') else: reply(type, source, u'Это вообще не JabberID!') else: reply(type, source, u'А что слать-то?') def handler_admin_say(type, source, body): if body: if len(body) <= 256: msg(source[1], body) else: msg(source[1], body[:256]) else: reply(type, source, u'Ну а дальше?') def handler_global_message(type, source, body): if body: for conf in GROUPCHATS.keys(): msg(conf, u'### Сообщение от '+source[2]+':\n'+body) reply(type, source, u'Сообщение успешно разослано.') else: reply(type, source, u'А что слать-то?') def handler_auto_message(type, source, body): if body: jid = handler_jid(source[0]) if jid in AMSGBL: reply(type, source, u'Тебе запрещено отсылать сообщения админу.') elif len(body) <= 1024: delivery(u'Сообщение от '+source[2]+' ('+jid+'): '+body) reply(type, source, u'сделано') else: reply(type, source, u'Слишком длинное сообщение!') else: reply(type, source, u'Ну а дальше?') def handler_amsg_blacklist(type, source, body): if body: args = body.split() if len(args) == 2: jid = args[1].strip() if "@" in jid and "." in jid: check = args[0].strip() if check == '+': if jid not in AMSGBL: AMSGBL.append(jid) write_file(BLACK_LIST, str(AMSGBL)) repl = u'добавил %s в чёрный список' % (jid) else: repl = u'этот жид и так там' elif check == '-': if jid in AMSGBL: AMSGBL.remove(jid) write_file(BLACK_LIST, str(AMSGBL)) repl = u'удалил %s из чёрного списка' % (jid) else: repl = u'этого жида и так там нет' else: repl = u'инвалид синтакс' else: repl = u'ан нет, это вообще не жид!' else: repl = u'инвалид синтакс' else: repl, col = u'Чёрный список:', 0 for jid in AMSGBL: col = col + 1 repl += '\n'+str(col)+'. '+jid if col == 0: repl = u'Чёрный список пуст' reply(type, source, repl) def amsg_blacklist_init(): if initialize_file(BLACK_LIST, '[]'): globals()['AMSGBL'] = eval(read_file(BLACK_LIST)) else: Print('\n\nError: can`t create black list file!', color2) def chat_cache_init(chat): CHAT_CACHE[chat] = {'1': '', '2': ''} DirtyChats.append(chat) handler_register("01eh", handler_chat_cache) command_handler(handler_clean, 15, "collect") command_handler(last_chat_cache, 20, "collect") command_handler(handler_test, 10, "collect") command_handler(handler_admin_message, 100, "collect") command_handler(handler_admin_say, 20, "collect") command_handler(handler_global_message, 100, "collect") command_handler(handler_auto_message, 10, "collect") command_handler(handler_amsg_blacklist, 100, "collect") handler_register("00si", amsg_blacklist_init) handler_register("01si", chat_cache_init)
apache-2.0
-2,683,236,169,448,569,300
28.237209
93
0.617404
false
systers/mailman
src/mailman/handlers/acknowledge.py
7
3436
# Copyright (C) 1998-2015 by the Free Software Foundation, Inc. # # This file is part of GNU Mailman. # # GNU Mailman is free software: you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free # Software Foundation, either version 3 of the License, or (at your option) # any later version. # # GNU Mailman is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for # more details. # # You should have received a copy of the GNU General Public License along with # GNU Mailman. If not, see <http://www.gnu.org/licenses/>. """Send an acknowledgment of the successful post to the sender. This only happens if the sender has set their AcknowledgePosts attribute. """ __all__ = [ 'Acknowledge', ] from mailman.core.i18n import _ from mailman.email.message import UserNotification from mailman.interfaces.handler import IHandler from mailman.interfaces.languages import ILanguageManager from mailman.utilities.i18n import make from mailman.utilities.string import oneline from zope.component import getUtility from zope.interface import implementer @implementer(IHandler) class Acknowledge: """Send an acknowledgment.""" name = 'acknowledge' description = _("""Send an acknowledgment of a posting.""") def process(self, mlist, msg, msgdata): """See `IHandler`.""" # Extract the sender's address and find them in the user database sender = msgdata.get('original_sender', msg.sender) member = mlist.members.get_member(sender) if member is None or not member.acknowledge_posts: # Either the sender is not a member, in which case we can't know # whether they want an acknowlegment or not, or they are a member # who definitely does not want an acknowlegment. return # Okay, they are a member that wants an acknowledgment of their post. # Give them their original subject. BAW: do we want to use the # decoded header? original_subject = msgdata.get( 'origsubj', msg.get('subject', _('(no subject)'))) # Get the user's preferred language. language_manager = getUtility(ILanguageManager) language = (language_manager[msgdata['lang']] if 'lang' in msgdata else member.preferred_language) # Now get the acknowledgement template. display_name = mlist.display_name text = make('postack.txt', mailing_list=mlist, language=language.code, wrap=False, subject=oneline(original_subject, in_unicode=True), list_name=mlist.list_name, display_name=display_name, listinfo_url=mlist.script_url('listinfo'), optionsurl=member.options_url, ) # Craft the outgoing message, with all headers and attributes # necessary for general delivery. Then enqueue it to the outgoing # queue. subject = _('$display_name post acknowledgment') usermsg = UserNotification(sender, mlist.bounces_address, subject, text, language) usermsg.send(mlist)
gpl-3.0
9,196,911,609,368,220,000
39.423529
78
0.655995
false
sciCloud/OLiMS
lims/browser/fields/datetimefield.py
2
2641
from time import strptime from dependencies.dependency import ClassSecurityInfo from dependencies.dependency import DateTime, safelocaltime from dependencies.dependency import DateTimeError from dependencies.dependency import registerField from dependencies.dependency import IDateTimeField from dependencies.dependency import * from dependencies.dependency import DateTimeField as DTF from lims import logger from dependencies.dependency import implements class DateTimeField(DTF): """A field that stores dates and times This is identical to the AT widget on which it's based, but it checks the i18n translation values for date formats. This does not specifically check the date_format_short_datepicker, so this means that date_formats should be identical between the python strftime and the jquery version. """ _properties = Field._properties.copy() _properties.update({ 'type': 'datetime', 'widget': CalendarWidget, }) implements(IDateTimeField) security = ClassSecurityInfo() security.declarePrivate('set') def set(self, instance, value, **kwargs): """ Check if value is an actual date/time value. If not, attempt to convert it to one; otherwise, set to None. Assign all properties passed as kwargs to object. """ val = value if not value: val = None elif not isinstance(value, DateTime): for fmt in ['date_format_long', 'date_format_short']: fmtstr = instance.translate(fmt, domain='bika', mapping={}) fmtstr = fmtstr.replace(r"${", '%').replace('}', '') try: val = strptime(value, fmtstr) except ValueError: continue try: val = DateTime(*list(val)[:-6]) except DateTimeError: val = None if val.timezoneNaive(): # Use local timezone for tz naive strings # see http://dev.plone.org/plone/ticket/10141 zone = val.localZone(safelocaltime(val.timeTime())) parts = val.parts()[:-1] + (zone,) val = DateTime(*parts) break else: logger.warning("DateTimeField failed to format date " "string '%s' with '%s'" % (value, fmtstr)) super(DateTimeField, self).set(instance, val, **kwargs) registerField(DateTimeField, title='Date Time', description='Used for storing date/time')
agpl-3.0
4,101,780,731,871,722,000
36.197183
77
0.605074
false
gbiggs/rtcshell
rtcshell/rtmgr.py
1
6160
#!/usr/bin/env python # -*- Python -*- # -*- coding: utf-8 -*- '''rtcshell Copyright (C) 2009-2010 Geoffrey Biggs RT-Synthesis Research Group Intelligent Systems Research Institute, National Institute of Advanced Industrial Science and Technology (AIST), Japan All rights reserved. Licensed under the Eclipse Public License -v 1.0 (EPL) http://www.opensource.org/licenses/eclipse-1.0.txt File: rtmgr.py Implementation of the command for controlling managers. ''' # $Source$ from optparse import OptionParser, OptionError import os from rtctree.exceptions import RtcTreeError, FailedToLoadModuleError, \ FailedToUnloadModuleError, \ FailedToCreateComponentError, \ FailedToDeleteComponentError from rtctree.tree import create_rtctree, InvalidServiceError, \ FailedToNarrowRootNamingError, \ NonRootPathError from rtctree.path import parse_path import sys from rtcshell import RTSH_PATH_USAGE, RTSH_VERSION from rtcshell.path import cmd_path_to_full_path def get_manager(cmd_path, full_path, tree=None): path, port = parse_path(full_path) if port: # Can't configure a port print >>sys.stderr, '{0}: Cannot access {1}: No such \ object.'.format(sys.argv[0], cmd_path) return None if not path[-1]: # There was a trailing slash - ignore it path = path[:-1] if not tree: tree = create_rtctree(paths=path) if not tree: return None object = tree.get_node(path) if not object: print >>sys.stderr, '{0}: Cannot access {1}: No such \ object.'.format(sys.argv[0], cmd_path) return tree, None if not object.is_manager: print >>sys.stderr, '{0}: Cannot access {1}: Not a \ manager.'.format(sys.argv[0], cmd_path) return tree, None return tree, object def load_module(cmd_path, full_path, module_path, init_func, tree=None): tree, mgr = get_manager(cmd_path, full_path, tree) if not mgr: return 1 try: mgr.load_module(module_path, init_func) except FailedToLoadModuleError: print >>sys.stderr, '{0}: Failed to load module {1}'.format(\ sys.argv[0], module_path) return 1 return 0 def unload_module(cmd_path, full_path, module_path, tree=None): tree, mgr = get_manager(cmd_path, full_path, tree) if not mgr: return 1 try: mgr.unload_module(module_path) except FailedToUnloadModuleError: print >>sys.stderr, '{0}: Failed to unload module {1}'.format(\ sys.argv[0], module_path) return 1 return 0 def create_component(cmd_path, full_path, module_name, tree=None): tree, mgr = get_manager(cmd_path, full_path, tree) if not mgr: return 1 try: mgr.create_component(module_name) except FailedToCreateComponentError: print >>sys.stderr, '{0}: Failed to create component from module \ {1}'.format(sys.argv[0], module_name) return 1 return 0 def delete_component(cmd_path, full_path, instance_name, tree=None): tree, mgr = get_manager(cmd_path, full_path, tree) if not mgr: return 1 try: mgr.delete_component(instance_name) except FailedToDeleteComponentError, e: print >>sys.stderr, '{0}: Failed to delete component {1}'.format(\ sys.argv[0], instance_name) return 1 return 0 def main(argv=None, tree=None): usage = '''Usage: %prog [options] <path> <command> [args] Control a manager, adding and removing shared libraries and components. To set a mananger's configuration, use rtconf. A command should be one of: load, unload, create, delete load <file system path> <init function> Load a shared library (DLL file or .so file) into the manager. unload <file system path> Unload a shared library (DLL file or .so file) from the manager. create <module name> Create a new component instance from a loaded shared library. Properties of the new component can be set by specifying them as part of the module name argument, prefixed by a question mark. For example, to set the instance name of a new component of type ConsoleIn, use: rtmgr manager.mgr create ConsoleIn?instance_name=blag delete <instance name> Delete a component instance from the manager, destroying it. ''' + RTSH_PATH_USAGE version = RTSH_VERSION parser = OptionParser(usage=usage, version=version) parser.add_option('-d', '--debug', dest='debug', action='store_true', default=False, help='Print debugging information. \ [Default: %default]') if argv: sys.argv = [sys.argv[0]] + argv try: options, args = parser.parse_args() except OptionError, e: print 'OptionError:', e return 1 if len(args) > 2: cmd_path = args[0] cmd = args[1] args = args[2:] else: print >>sys.stderr, usage return 1 full_path = cmd_path_to_full_path(cmd_path) if cmd == 'load': if len(args) != 2: print >>sys.stderr, '{0}: Incorrect number of arguments for load \ command.'.format(sys.argv[0]) return 1 return load_module(cmd_path, full_path, args[0], args[1], tree) elif cmd == 'unload': if len(args) != 1: print >>sys.stderr, '{0}: Incorrect number of arguments for \ unload command.'.format(sys.argv[0]) return 1 return unload_module(cmd_path, full_path, args[0], tree) elif cmd == 'create': if len(args) != 1: print >>sys.stderr, '{0}: Incorrect number of arguments for \ create command.'.format(sys.argv[0]) return 1 return create_component(cmd_path, full_path, args[0], tree) elif cmd == 'delete': if len(args) != 1: print >>sys.stderr, '{0}: Incorrect number of arguments for \ delete command.'.format(sys.argv[0]) return 1 return delete_component(cmd_path, full_path, args[0], tree) print >>sys.stderr, usage return 1 # vim: tw=79
epl-1.0
4,634,796,370,147,265,000
28.473684
78
0.631818
false
dhoffman34/django
tests/servers/tests.py
23
5966
# -*- encoding: utf-8 -*- """ Tests for django.core.servers. """ from __future__ import unicode_literals import os import socket from django.core.exceptions import ImproperlyConfigured from django.test import LiveServerTestCase from django.test import override_settings from django.utils.http import urlencode from django.utils.six.moves.urllib.error import HTTPError from django.utils.six.moves.urllib.request import urlopen from django.utils._os import upath from .models import Person TEST_ROOT = os.path.dirname(upath(__file__)) TEST_SETTINGS = { 'MEDIA_URL': '/media/', 'MEDIA_ROOT': os.path.join(TEST_ROOT, 'media'), 'STATIC_URL': '/static/', 'STATIC_ROOT': os.path.join(TEST_ROOT, 'static'), } @override_settings(ROOT_URLCONF='servers.urls') class LiveServerBase(LiveServerTestCase): available_apps = [ 'servers', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', ] fixtures = ['testdata.json'] @classmethod def setUpClass(cls): # Override settings cls.settings_override = override_settings(**TEST_SETTINGS) cls.settings_override.enable() super(LiveServerBase, cls).setUpClass() @classmethod def tearDownClass(cls): # Restore original settings cls.settings_override.disable() super(LiveServerBase, cls).tearDownClass() def urlopen(self, url): return urlopen(self.live_server_url + url) class LiveServerAddress(LiveServerBase): """ Ensure that the address set in the environment variable is valid. Refs #2879. """ @classmethod def setUpClass(cls): # Backup original environment variable address_predefined = 'DJANGO_LIVE_TEST_SERVER_ADDRESS' in os.environ old_address = os.environ.get('DJANGO_LIVE_TEST_SERVER_ADDRESS') # Just the host is not accepted cls.raises_exception('localhost', ImproperlyConfigured) # The host must be valid cls.raises_exception('blahblahblah:8081', socket.error) # The list of ports must be in a valid format cls.raises_exception('localhost:8081,', ImproperlyConfigured) cls.raises_exception('localhost:8081,blah', ImproperlyConfigured) cls.raises_exception('localhost:8081-', ImproperlyConfigured) cls.raises_exception('localhost:8081-blah', ImproperlyConfigured) cls.raises_exception('localhost:8081-8082-8083', ImproperlyConfigured) # Restore original environment variable if address_predefined: os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = old_address else: del os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] @classmethod def tearDownClass(cls): # skip it, as setUpClass doesn't call its parent either pass @classmethod def raises_exception(cls, address, exception): os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = address try: super(LiveServerAddress, cls).setUpClass() raise Exception("The line above should have raised an exception") except exception: pass finally: super(LiveServerAddress, cls).tearDownClass() def test_test_test(self): # Intentionally empty method so that the test is picked up by the # test runner and the overridden setUpClass() method is executed. pass class LiveServerViews(LiveServerBase): def test_404(self): """ Ensure that the LiveServerTestCase serves 404s. Refs #2879. """ try: self.urlopen('/') except HTTPError as err: self.assertEqual(err.code, 404, 'Expected 404 response') else: self.fail('Expected 404 response') def test_view(self): """ Ensure that the LiveServerTestCase serves views. Refs #2879. """ f = self.urlopen('/example_view/') self.assertEqual(f.read(), b'example view') def test_static_files(self): """ Ensure that the LiveServerTestCase serves static files. Refs #2879. """ f = self.urlopen('/static/example_static_file.txt') self.assertEqual(f.read().rstrip(b'\r\n'), b'example static file') def test_no_collectstatic_emulation(self): """ Test that LiveServerTestCase reports a 404 status code when HTTP client tries to access a static file that isn't explicitly put under STATIC_ROOT. """ try: self.urlopen('/static/another_app/another_app_static_file.txt') except HTTPError as err: self.assertEqual(err.code, 404, 'Expected 404 response') else: self.fail('Expected 404 response (got %d)' % err.code) def test_media_files(self): """ Ensure that the LiveServerTestCase serves media files. Refs #2879. """ f = self.urlopen('/media/example_media_file.txt') self.assertEqual(f.read().rstrip(b'\r\n'), b'example media file') def test_environ(self): f = self.urlopen('/environ_view/?%s' % urlencode({'q': 'тест'})) self.assertIn(b"QUERY_STRING: 'q=%D1%82%D0%B5%D1%81%D1%82'", f.read()) class LiveServerDatabase(LiveServerBase): def test_fixtures_loaded(self): """ Ensure that fixtures are properly loaded and visible to the live server thread. Refs #2879. """ f = self.urlopen('/model_view/') self.assertEqual(f.read().splitlines(), [b'jane', b'robert']) def test_database_writes(self): """ Ensure that data written to the database by a view can be read. Refs #2879. """ self.urlopen('/create_model_instance/') self.assertQuerysetEqual( Person.objects.all().order_by('pk'), ['jane', 'robert', 'emily'], lambda b: b.name )
bsd-3-clause
7,758,314,232,677,597,000
30.882353
79
0.631164
false
stone5495/NewsBlur
apps/profile/views.py
8
22968
import stripe import datetime from django.contrib.auth.decorators import login_required from django.views.decorators.http import require_POST from django.views.decorators.csrf import csrf_protect from django.contrib.auth import logout as logout_user from django.contrib.auth import login as login_user from django.db.models.aggregates import Sum from django.http import HttpResponse, HttpResponseRedirect from django.contrib.sites.models import Site from django.contrib.auth.models import User from django.contrib.admin.views.decorators import staff_member_required from django.core.urlresolvers import reverse from django.template import RequestContext from django.shortcuts import render_to_response from django.core.mail import mail_admins from django.conf import settings from apps.profile.models import Profile, PaymentHistory, RNewUserQueue, MRedeemedCode, MGiftCode from apps.reader.models import UserSubscription, UserSubscriptionFolders, RUserStory from apps.profile.forms import StripePlusPaymentForm, PLANS, DeleteAccountForm from apps.profile.forms import ForgotPasswordForm, ForgotPasswordReturnForm, AccountSettingsForm from apps.profile.forms import RedeemCodeForm from apps.reader.forms import SignupForm, LoginForm from apps.rss_feeds.models import MStarredStory, MStarredStoryCounts from apps.social.models import MSocialServices, MActivity, MSocialProfile from apps.analyzer.models import MClassifierTitle, MClassifierAuthor, MClassifierFeed, MClassifierTag from utils import json_functions as json from utils.user_functions import ajax_login_required from utils.view_functions import render_to from utils.user_functions import get_user from utils import log as logging from vendor.paypalapi.exceptions import PayPalAPIResponseError from vendor.paypal.standard.forms import PayPalPaymentsForm SINGLE_FIELD_PREFS = ('timezone','feed_pane_size','hide_mobile','send_emails', 'hide_getting_started', 'has_setup_feeds', 'has_found_friends', 'has_trained_intelligence',) SPECIAL_PREFERENCES = ('old_password', 'new_password', 'autofollow_friends', 'dashboard_date',) @ajax_login_required @require_POST @json.json_view def set_preference(request): code = 1 message = '' new_preferences = request.POST preferences = json.decode(request.user.profile.preferences) for preference_name, preference_value in new_preferences.items(): if preference_value in ['true','false']: preference_value = True if preference_value == 'true' else False if preference_name in SINGLE_FIELD_PREFS: setattr(request.user.profile, preference_name, preference_value) elif preference_name in SPECIAL_PREFERENCES: if preference_name == 'autofollow_friends': social_services = MSocialServices.get_user(request.user.pk) social_services.autofollow = preference_value social_services.save() elif preference_name == 'dashboard_date': request.user.profile.dashboard_date = datetime.datetime.utcnow() else: if preference_value in ["true", "false"]: preference_value = True if preference_value == "true" else False preferences[preference_name] = preference_value if preference_name == 'intro_page': logging.user(request, "~FBAdvancing intro to page ~FM~SB%s" % preference_value) request.user.profile.preferences = json.encode(preferences) request.user.profile.save() logging.user(request, "~FMSaving preference: %s" % new_preferences) response = dict(code=code, message=message, new_preferences=new_preferences) return response @ajax_login_required @json.json_view def get_preference(request): code = 1 preference_name = request.POST.get('preference') preferences = json.decode(request.user.profile.preferences) payload = preferences if preference_name: payload = preferences.get(preference_name) response = dict(code=code, payload=payload) return response @csrf_protect def login(request): form = LoginForm() if request.method == "POST": form = LoginForm(data=request.POST) if form.is_valid(): login_user(request, form.get_user()) logging.user(form.get_user(), "~FG~BBOAuth Login~FW") return HttpResponseRedirect(request.POST['next'] or reverse('index')) return render_to_response('accounts/login.html', { 'form': form, 'next': request.REQUEST.get('next', "") }, context_instance=RequestContext(request)) @csrf_protect def signup(request): form = SignupForm() if request.method == "POST": form = SignupForm(data=request.POST) if form.is_valid(): new_user = form.save() login_user(request, new_user) logging.user(new_user, "~FG~SB~BBNEW SIGNUP: ~FW%s" % new_user.email) new_user.profile.activate_free() return HttpResponseRedirect(request.POST['next'] or reverse('index')) return render_to_response('accounts/signup.html', { 'form': form, 'next': request.REQUEST.get('next', "") }, context_instance=RequestContext(request)) @login_required @csrf_protect def redeem_code(request): code = request.GET.get('code', None) form = RedeemCodeForm(initial={'gift_code': code}) if request.method == "POST": form = RedeemCodeForm(data=request.POST) if form.is_valid(): gift_code = request.POST['gift_code'] MRedeemedCode.redeem(user=request.user, gift_code=gift_code) return render_to_response('reader/paypal_return.xhtml', {}, context_instance=RequestContext(request)) return render_to_response('accounts/redeem_code.html', { 'form': form, 'code': request.REQUEST.get('code', ""), 'next': request.REQUEST.get('next', "") }, context_instance=RequestContext(request)) @ajax_login_required @require_POST @json.json_view def set_account_settings(request): code = -1 message = 'OK' form = AccountSettingsForm(user=request.user, data=request.POST) if form.is_valid(): form.save() code = 1 else: message = form.errors[form.errors.keys()[0]][0] payload = { "username": request.user.username, "email": request.user.email, "social_profile": MSocialProfile.profile(request.user.pk) } return dict(code=code, message=message, payload=payload) @ajax_login_required @require_POST @json.json_view def set_view_setting(request): code = 1 feed_id = request.POST['feed_id'] feed_view_setting = request.POST.get('feed_view_setting') feed_order_setting = request.POST.get('feed_order_setting') feed_read_filter_setting = request.POST.get('feed_read_filter_setting') feed_layout_setting = request.POST.get('feed_layout_setting') view_settings = json.decode(request.user.profile.view_settings) setting = view_settings.get(feed_id, {}) if isinstance(setting, basestring): setting = {'v': setting} if feed_view_setting: setting['v'] = feed_view_setting if feed_order_setting: setting['o'] = feed_order_setting if feed_read_filter_setting: setting['r'] = feed_read_filter_setting if feed_layout_setting: setting['l'] = feed_layout_setting view_settings[feed_id] = setting request.user.profile.view_settings = json.encode(view_settings) request.user.profile.save() logging.user(request, "~FMView settings: %s/%s/%s/%s" % (feed_view_setting, feed_order_setting, feed_read_filter_setting, feed_layout_setting)) response = dict(code=code) return response @ajax_login_required @require_POST @json.json_view def clear_view_setting(request): code = 1 view_setting_type = request.POST.get('view_setting_type') view_settings = json.decode(request.user.profile.view_settings) new_view_settings = {} removed = 0 for feed_id, view_setting in view_settings.items(): if view_setting_type == 'layout' and 'l' in view_setting: del view_setting['l'] removed += 1 if view_setting_type == 'view' and 'v' in view_setting: del view_setting['v'] removed += 1 if view_setting_type == 'order' and 'o' in view_setting: del view_setting['o'] removed += 1 if view_setting_type == 'order' and 'r' in view_setting: del view_setting['r'] removed += 1 new_view_settings[feed_id] = view_setting request.user.profile.view_settings = json.encode(new_view_settings) request.user.profile.save() logging.user(request, "~FMClearing view settings: %s (found %s)" % (view_setting_type, removed)) response = dict(code=code, view_settings=view_settings, removed=removed) return response @ajax_login_required @json.json_view def get_view_setting(request): code = 1 feed_id = request.POST['feed_id'] view_settings = json.decode(request.user.profile.view_settings) response = dict(code=code, payload=view_settings.get(feed_id)) return response @ajax_login_required @require_POST @json.json_view def set_collapsed_folders(request): code = 1 collapsed_folders = request.POST['collapsed_folders'] request.user.profile.collapsed_folders = collapsed_folders request.user.profile.save() logging.user(request, "~FMCollapsing folder: %s" % collapsed_folders) response = dict(code=code) return response @ajax_login_required def paypal_form(request): domain = Site.objects.get_current().domain paypal_dict = { "cmd": "_xclick-subscriptions", "business": "samuel@ofbrooklyn.com", "a3": "12.00", # price "p3": 1, # duration of each unit (depends on unit) "t3": "Y", # duration unit ("M for Month") "src": "1", # make payments recur "sra": "1", # reattempt payment on payment error "no_note": "1", # remove extra notes (optional) "item_name": "NewsBlur Premium Account", "notify_url": "http://%s%s" % (domain, reverse('paypal-ipn')), "return_url": "http://%s%s" % (domain, reverse('paypal-return')), "cancel_return": "http://%s%s" % (domain, reverse('index')), "custom": request.user.username, } # Create the instance. form = PayPalPaymentsForm(initial=paypal_dict, button_type="subscribe") logging.user(request, "~FBLoading paypal/feedchooser") # Output the button. return HttpResponse(form.render(), mimetype='text/html') def paypal_return(request): return render_to_response('reader/paypal_return.xhtml', { }, context_instance=RequestContext(request)) @login_required def activate_premium(request): return HttpResponseRedirect(reverse('index')) @ajax_login_required @json.json_view def profile_is_premium(request): # Check tries code = 0 retries = int(request.GET['retries']) profile = Profile.objects.get(user=request.user) subs = UserSubscription.objects.filter(user=request.user) total_subs = subs.count() activated_subs = subs.filter(active=True).count() if retries >= 30: code = -1 if not request.user.profile.is_premium: subject = "Premium activation failed: %s (%s/%s)" % (request.user, activated_subs, total_subs) message = """User: %s (%s) -- Email: %s""" % (request.user.username, request.user.pk, request.user.email) mail_admins(subject, message, fail_silently=True) request.user.profile.is_premium = True request.user.profile.save() return { 'is_premium': profile.is_premium, 'code': code, 'activated_subs': activated_subs, 'total_subs': total_subs, } @login_required def stripe_form(request): user = request.user success_updating = False stripe.api_key = settings.STRIPE_SECRET plan = int(request.GET.get('plan', 2)) plan = PLANS[plan-1][0] error = None if request.method == 'POST': zebra_form = StripePlusPaymentForm(request.POST, email=user.email) if zebra_form.is_valid(): user.email = zebra_form.cleaned_data['email'] user.save() current_premium = (user.profile.is_premium and user.profile.premium_expire and user.profile.premium_expire > datetime.datetime.now()) # Are they changing their existing card? if user.profile.stripe_id and current_premium: customer = stripe.Customer.retrieve(user.profile.stripe_id) try: card = customer.cards.create(card=zebra_form.cleaned_data['stripe_token']) except stripe.CardError: error = "This card was declined." else: customer.default_card = card.id customer.save() success_updating = True else: try: customer = stripe.Customer.create(**{ 'card': zebra_form.cleaned_data['stripe_token'], 'plan': zebra_form.cleaned_data['plan'], 'email': user.email, 'description': user.username, }) except stripe.CardError: error = "This card was declined." else: user.profile.strip_4_digits = zebra_form.cleaned_data['last_4_digits'] user.profile.stripe_id = customer.id user.profile.save() user.profile.activate_premium() # TODO: Remove, because webhooks are slow success_updating = True else: zebra_form = StripePlusPaymentForm(email=user.email, plan=plan) if success_updating: return render_to_response('reader/paypal_return.xhtml', {}, context_instance=RequestContext(request)) new_user_queue_count = RNewUserQueue.user_count() new_user_queue_position = RNewUserQueue.user_position(request.user.pk) new_user_queue_behind = 0 if new_user_queue_position >= 0: new_user_queue_behind = new_user_queue_count - new_user_queue_position new_user_queue_position -= 1 logging.user(request, "~BM~FBLoading Stripe form") return render_to_response('profile/stripe_form.xhtml', { 'zebra_form': zebra_form, 'publishable': settings.STRIPE_PUBLISHABLE, 'success_updating': success_updating, 'new_user_queue_count': new_user_queue_count - 1, 'new_user_queue_position': new_user_queue_position, 'new_user_queue_behind': new_user_queue_behind, 'error': error, }, context_instance=RequestContext(request) ) @render_to('reader/activities_module.xhtml') def load_activities(request): user = get_user(request) page = max(1, int(request.REQUEST.get('page', 1))) activities, has_next_page = MActivity.user(user.pk, page=page) return { 'activities': activities, 'page': page, 'has_next_page': has_next_page, 'username': 'You', } @ajax_login_required @json.json_view def payment_history(request): user = request.user if request.user.is_staff: user_id = request.REQUEST.get('user_id', request.user.pk) user = User.objects.get(pk=user_id) history = PaymentHistory.objects.filter(user=user) statistics = { "created_date": user.date_joined, "last_seen_date": user.profile.last_seen_on, "last_seen_ip": user.profile.last_seen_ip, "timezone": unicode(user.profile.timezone), "stripe_id": user.profile.stripe_id, "profile": user.profile, "feeds": UserSubscription.objects.filter(user=user).count(), "email": user.email, "read_story_count": RUserStory.read_story_count(user.pk), "feed_opens": UserSubscription.objects.filter(user=user).aggregate(sum=Sum('feed_opens'))['sum'], "training": { 'title': MClassifierTitle.objects.filter(user_id=user.pk).count(), 'tag': MClassifierTag.objects.filter(user_id=user.pk).count(), 'author': MClassifierAuthor.objects.filter(user_id=user.pk).count(), 'feed': MClassifierFeed.objects.filter(user_id=user.pk).count(), } } return { 'is_premium': user.profile.is_premium, 'premium_expire': user.profile.premium_expire, 'payments': history, 'statistics': statistics, } @ajax_login_required @json.json_view def cancel_premium(request): canceled = request.user.profile.cancel_premium() return { 'code': 1 if canceled else -1, } @staff_member_required @ajax_login_required @json.json_view def refund_premium(request): user_id = request.REQUEST.get('user_id') partial = request.REQUEST.get('partial', False) user = User.objects.get(pk=user_id) try: refunded = user.profile.refund_premium(partial=partial) except stripe.InvalidRequestError, e: refunded = e except PayPalAPIResponseError, e: refunded = e return {'code': 1 if refunded else -1, 'refunded': refunded} @staff_member_required @ajax_login_required @json.json_view def upgrade_premium(request): user_id = request.REQUEST.get('user_id') user = User.objects.get(pk=user_id) gift = MGiftCode.add(gifting_user_id=User.objects.get(username='samuel').pk, receiving_user_id=user.pk) MRedeemedCode.redeem(user, gift.gift_code) return {'code': user.profile.is_premium} @staff_member_required @ajax_login_required @json.json_view def never_expire_premium(request): user_id = request.REQUEST.get('user_id') user = User.objects.get(pk=user_id) if user.profile.is_premium: user.profile.premium_expire = None user.profile.save() return {'code': 1} return {'code': -1} @staff_member_required @ajax_login_required @json.json_view def update_payment_history(request): user_id = request.REQUEST.get('user_id') user = User.objects.get(pk=user_id) user.profile.setup_premium_history(check_premium=False) return {'code': 1} @login_required @render_to('profile/delete_account.xhtml') def delete_account(request): if request.method == 'POST': form = DeleteAccountForm(request.POST, user=request.user) if form.is_valid(): logging.user(request.user, "~SK~BC~FRDeleting ~SB%s~SN's account." % request.user.username) request.user.profile.delete_user(confirm=True) logout_user(request) return HttpResponseRedirect(reverse('index')) else: logging.user(request.user, "~BC~FRFailed attempt to delete ~SB%s~SN's account." % request.user.username) else: logging.user(request.user, "~BC~FRAttempting to delete ~SB%s~SN's account." % request.user.username) form = DeleteAccountForm(user=request.user) return { 'delete_form': form, } @render_to('profile/forgot_password.xhtml') def forgot_password(request): if request.method == 'POST': form = ForgotPasswordForm(request.POST) if form.is_valid(): logging.user(request.user, "~BC~FRForgot password: ~SB%s" % request.POST['email']) try: user = User.objects.get(email__iexact=request.POST['email']) except User.MultipleObjectsReturned: user = User.objects.filter(email__iexact=request.POST['email'])[0] user.profile.send_forgot_password_email() return HttpResponseRedirect(reverse('index')) else: logging.user(request.user, "~BC~FRFailed forgot password: ~SB%s~SN" % request.POST['email']) else: logging.user(request.user, "~BC~FRAttempting to retrieve forgotton password.") form = ForgotPasswordForm() return { 'forgot_password_form': form, } @login_required @render_to('profile/forgot_password_return.xhtml') def forgot_password_return(request): if request.method == 'POST': logging.user(request.user, "~BC~FRReseting ~SB%s~SN's password." % request.user.username) new_password = request.POST.get('password', '') request.user.set_password(new_password) request.user.save() return HttpResponseRedirect(reverse('index')) else: logging.user(request.user, "~BC~FRAttempting to reset ~SB%s~SN's password." % request.user.username) form = ForgotPasswordReturnForm() return { 'forgot_password_return_form': form, } @ajax_login_required @json.json_view def delete_starred_stories(request): timestamp = request.POST.get('timestamp', None) if timestamp: delete_date = datetime.datetime.fromtimestamp(int(timestamp)) else: delete_date = datetime.datetime.now() starred_stories = MStarredStory.objects.filter(user_id=request.user.pk, starred_date__lte=delete_date) stories_deleted = starred_stories.count() starred_stories.delete() MStarredStoryCounts.count_for_user(request.user.pk, total_only=True) starred_counts, starred_count = MStarredStoryCounts.user_counts(request.user.pk, include_total=True) logging.user(request.user, "~BC~FRDeleting %s/%s starred stories (%s)" % (stories_deleted, stories_deleted+starred_count, delete_date)) return dict(code=1, stories_deleted=stories_deleted, starred_counts=starred_counts, starred_count=starred_count) @ajax_login_required @json.json_view def delete_all_sites(request): request.user.profile.send_opml_export_email(reason="You have deleted all of your sites, so here's a backup just in case.") subs = UserSubscription.objects.filter(user=request.user) sub_count = subs.count() subs.delete() usf = UserSubscriptionFolders.objects.get(user=request.user) usf.folders = '[]' usf.save() logging.user(request.user, "~BC~FRDeleting %s sites" % sub_count) return dict(code=1) @login_required @render_to('profile/email_optout.xhtml') def email_optout(request): user = request.user user.profile.send_emails = False user.profile.save() return { "user": user, }
mit
5,065,385,919,666,274,000
36.408795
126
0.636973
false
knewmanTE/FrameworkBenchmarks
toolset/benchmark/test_types/json_type.py
36
1216
from benchmark.test_types.framework_test_type import FrameworkTestType from benchmark.test_types.verifications import ( basic_body_verification, verify_headers, verify_helloworld_object ) import json class JsonTestType(FrameworkTestType): def __init__(self): kwargs = { 'name': 'json', 'accept_header': self.accept('json'), 'requires_db': False, 'args': ['json_url'] } FrameworkTestType.__init__(self, **kwargs) def get_url(self): return self.json_url def verify(self, base_url): '''Validates the response is a JSON object of { 'message' : 'hello, world!' }. Case insensitive and quoting style is ignored ''' url = base_url + self.json_url headers, body = self.request_headers_and_body(url) response, problems = basic_body_verification(body, url) if len(problems) > 0: return problems problems += verify_helloworld_object(response, url) problems += verify_headers(headers, url, should_be='json') if len(problems) > 0: return problems else: return [('pass', '', url)]
bsd-3-clause
-994,630,005,704,520,600
26.022222
70
0.586349
false
Aaron1992/v2ex
mapreduce/handlers.py
20
27693
#!/usr/bin/env python # # Copyright 2010 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """Defines executor tasks handlers for MapReduce implementation.""" # Disable "Invalid method name" # pylint: disable-msg=C6409 import datetime import logging import math import os from mapreduce.lib import simplejson import time from google.appengine.api import memcache from google.appengine.api.labs import taskqueue from google.appengine.ext import db from mapreduce import base_handler from mapreduce import context from mapreduce import quota from mapreduce import model from mapreduce import quota from mapreduce import util # TODO(user): Make this a product of the reader or in quotas.py _QUOTA_BATCH_SIZE = 20 # The amount of time to perform scanning in one slice. New slice will be # scheduled as soon as current one takes this long. _SLICE_DURATION_SEC = 15 # Delay between consecutive controller callback invocations. _CONTROLLER_PERIOD_SEC = 2 class Error(Exception): """Base class for exceptions in this module.""" class NotEnoughArgumentsError(Error): """Required argument is missing.""" class NoDataError(Error): """There is no data present for a desired input.""" class MapperWorkerCallbackHandler(base_handler.BaseHandler): """Callback handler for mapreduce worker task. Request Parameters: mapreduce_spec: MapreduceSpec of the mapreduce serialized to json. shard_id: id of the shard. slice_id: id of the slice. """ def __init__(self, time_function=time.time): """Constructor. Args: time_function: time function to use to obtain current time. """ base_handler.BaseHandler.__init__(self) self._time = time_function def post(self): """Handle post request.""" spec = model.MapreduceSpec.from_json_str( self.request.get("mapreduce_spec")) self._start_time = self._time() shard_id = self.shard_id() # TODO(user): Make this prettier logging.debug("post: shard=%s slice=%s headers=%s", shard_id, self.slice_id(), self.request.headers) shard_state, control = db.get([ model.ShardState.get_key_by_shard_id(shard_id), model.MapreduceControl.get_key_by_job_id(spec.mapreduce_id), ]) if not shard_state: # We're letting this task to die. It's up to controller code to # reinitialize and restart the task. logging.error("State not found for shard ID %r; shutting down", shard_id) return if control and control.command == model.MapreduceControl.ABORT: logging.info("Abort command received by shard %d of job '%s'", shard_state.shard_number, shard_state.mapreduce_id) shard_state.active = False shard_state.result_status = model.ShardState.RESULT_ABORTED shard_state.put() model.MapreduceControl.abort(spec.mapreduce_id) return input_reader = self.input_reader(spec.mapper) if spec.mapper.params.get("enable_quota", True): quota_consumer = quota.QuotaConsumer( quota.QuotaManager(memcache.Client()), shard_id, _QUOTA_BATCH_SIZE) else: quota_consumer = None ctx = context.Context(spec, shard_state) context.Context._set(ctx) try: # consume quota ahead, because we do not want to run a datastore # query if there's not enough quota for the shard. if not quota_consumer or quota_consumer.check(): scan_aborted = False entity = None # We shouldn't fetch an entity from the reader if there's not enough # quota to process it. Perform all quota checks proactively. if not quota_consumer or quota_consumer.consume(): for entity in input_reader: if isinstance(entity, db.Model): shard_state.last_work_item = repr(entity.key()) else: shard_state.last_work_item = repr(entity)[:100] scan_aborted = not self.process_entity(entity, ctx) # Check if we've got enough quota for the next entity. if (quota_consumer and not scan_aborted and not quota_consumer.consume()): scan_aborted = True if scan_aborted: break else: scan_aborted = True if not scan_aborted: logging.info("Processing done for shard %d of job '%s'", shard_state.shard_number, shard_state.mapreduce_id) # We consumed extra quota item at the end of for loop. # Just be nice here and give it back :) if quota_consumer: quota_consumer.put(1) shard_state.active = False shard_state.result_status = model.ShardState.RESULT_SUCCESS # TODO(user): Mike said we don't want this happen in case of # exception while scanning. Figure out when it's appropriate to skip. ctx.flush() finally: context.Context._set(None) if quota_consumer: quota_consumer.dispose() # Rescheduling work should always be the last statement. It shouldn't happen # if there were any exceptions in code before it. if shard_state.active: self.reschedule(spec, input_reader) def process_entity(self, entity, ctx): """Process a single entity. Call mapper handler on the entity. Args: entity: an entity to process. ctx: current execution context. Returns: True if scan should be continued, False if scan should be aborted. """ ctx.counters.increment(context.COUNTER_MAPPER_CALLS) handler = ctx.mapreduce_spec.mapper.handler if util.is_generator_function(handler): for result in handler(entity): if callable(result): result(ctx) else: try: if len(result) == 2: logging.error("Collectors not implemented yet") else: logging.error("Got bad output tuple of length %d", len(result)) except TypeError: logging.error( "Handler yielded type %s, expected a callable or a tuple", result.__class__.__name__) else: handler(entity) if self._time() - self._start_time > _SLICE_DURATION_SEC: logging.debug("Spent %s seconds. Rescheduling", self._time() - self._start_time) return False return True def shard_id(self): """Get shard unique identifier of this task from request. Returns: shard identifier as string. """ return str(self.request.get("shard_id")) def slice_id(self): """Get slice unique identifier of this task from request. Returns: slice identifier as int. """ return int(self.request.get("slice_id")) def input_reader(self, mapper_spec): """Get the reader from mapper_spec initialized with the request's state. Args: mapper_spec: a mapper spec containing the immutable mapper state. Returns: An initialized InputReader. """ input_reader_spec_dict = simplejson.loads( self.request.get("input_reader_state")) return mapper_spec.input_reader_class().from_json( input_reader_spec_dict) @staticmethod def worker_parameters(mapreduce_spec, shard_id, slice_id, input_reader): """Fill in mapper worker task parameters. Returned parameters map is to be used as task payload, and it contains all the data, required by mapper worker to perform its function. Args: mapreduce_spec: specification of the mapreduce. shard_id: id of the shard (part of the whole dataset). slice_id: id of the slice (part of the shard). input_reader: InputReader containing the remaining inputs for this shard. Returns: string->string map of parameters to be used as task payload. """ return {"mapreduce_spec": mapreduce_spec.to_json_str(), "shard_id": shard_id, "slice_id": str(slice_id), "input_reader_state": input_reader.to_json_str()} @staticmethod def get_task_name(shard_id, slice_id): """Compute single worker task name. Args: shard_id: id of the shard (part of the whole dataset) as string. slice_id: id of the slice (part of the shard) as int. Returns: task name which should be used to process specified shard/slice. """ # Prefix the task name with something unique to this framework's # namespace so we don't conflict with user tasks on the queue. return "appengine-mrshard-%s-%s" % (shard_id, slice_id) def reschedule(self, mapreduce_spec, input_reader): """Reschedule worker task to continue scanning work. Args: mapreduce_spec: mapreduce specification. input_reader: remaining input reader to process. """ MapperWorkerCallbackHandler.schedule_slice( self.base_path(), mapreduce_spec, self.shard_id(), self.slice_id() + 1, input_reader) @classmethod def schedule_slice(cls, base_path, mapreduce_spec, shard_id, slice_id, input_reader, queue_name=None, eta=None, countdown=None): """Schedule slice scanning by adding it to the task queue. Args: base_path: base_path of mapreduce request handlers as string. mapreduce_spec: mapreduce specification as MapreduceSpec. shard_id: current shard id as string. slice_id: slice id as int. input_reader: remaining InputReader for given shard. queue_name: Optional queue to run on; uses the current queue of execution or the default queue if unspecified. eta: Absolute time when the MR should execute. May not be specified if 'countdown' is also supplied. This may be timezone-aware or timezone-naive. countdown: Time in seconds into the future that this MR should execute. Defaults to zero. """ task_params = MapperWorkerCallbackHandler.worker_parameters( mapreduce_spec, shard_id, slice_id, input_reader) task_name = MapperWorkerCallbackHandler.get_task_name(shard_id, slice_id) queue_name = os.environ.get("HTTP_X_APPENGINE_QUEUENAME", queue_name or "default") try: taskqueue.Task(url=base_path + "/worker_callback", params=task_params, name=task_name, eta=eta, countdown=countdown).add(queue_name) except (taskqueue.TombstonedTaskError, taskqueue.TaskAlreadyExistsError), e: logging.warning("Task %r with params %r already exists. %s: %s", task_name, task_params, e.__class__, e) class ControllerCallbackHandler(base_handler.BaseHandler): """Supervises mapreduce execution. Is also responsible for gathering execution status from shards together. This task is "continuously" running by adding itself again to taskqueue if mapreduce is still active. """ def __init__(self, time_function=time.time): """Constructor. Args: time_function: time function to use to obtain current time. """ base_handler.BaseHandler.__init__(self) self._time = time_function def post(self): """Handle post request.""" spec = model.MapreduceSpec.from_json_str( self.request.get("mapreduce_spec")) # TODO(user): Make this logging prettier. logging.debug("post: id=%s headers=%s", spec.mapreduce_id, self.request.headers) state, control = db.get([ model.MapreduceState.get_key_by_job_id(spec.mapreduce_id), model.MapreduceControl.get_key_by_job_id(spec.mapreduce_id), ]) if not state: logging.error("State not found for mapreduce_id '%s'; skipping", spec.mapreduce_id) return shard_states = model.ShardState.find_by_mapreduce_id(spec.mapreduce_id) if state.active and len(shard_states) != spec.mapper.shard_count: # Some shards were lost logging.error("Incorrect number of shard states: %d vs %d; " "aborting job '%s'", len(shard_states), spec.mapper.shard_count, spec.mapreduce_id) state.active = False state.result_status = model.MapreduceState.RESULT_FAILED model.MapreduceControl.abort(spec.mapreduce_id) active_shards = [s for s in shard_states if s.active] failed_shards = [s for s in shard_states if s.result_status == model.ShardState.RESULT_FAILED] aborted_shards = [s for s in shard_states if s.result_status == model.ShardState.RESULT_ABORTED] if state.active: state.active = bool(active_shards) state.active_shards = len(active_shards) state.failed_shards = len(failed_shards) state.aborted_shards = len(aborted_shards) if (not state.active and control and control.command == model.MapreduceControl.ABORT): # User-initiated abort *after* all shards have completed. logging.info("Abort signal received for job '%s'", spec.mapreduce_id) state.result_status = model.MapreduceState.RESULT_ABORTED if not state.active: state.active_shards = 0 if not state.result_status: # Set final result status derived from shard states. if [s for s in shard_states if s.result_status != model.ShardState.RESULT_SUCCESS]: state.result_status = model.MapreduceState.RESULT_FAILED else: state.result_status = model.MapreduceState.RESULT_SUCCESS logging.info("Final result for job '%s' is '%s'", spec.mapreduce_id, state.result_status) # We don't need a transaction here, since we change only statistics data, # and we don't care if it gets overwritten/slightly inconsistent. self.aggregate_state(state, shard_states) poll_time = state.last_poll_time state.last_poll_time = datetime.datetime.utcfromtimestamp(self._time()) if not state.active: # This is the last execution. # Enqueue done_callback if needed. def put_state(state): state.put() done_callback = spec.params.get( model.MapreduceSpec.PARAM_DONE_CALLBACK) if done_callback: taskqueue.Task( url=done_callback, headers={"Mapreduce-Id": spec.mapreduce_id}).add( spec.params.get( model.MapreduceSpec.PARAM_DONE_CALLBACK_QUEUE, "default"), transactional=True) db.run_in_transaction(put_state, state) return else: state.put() processing_rate = int(spec.mapper.params.get( "processing_rate") or model._DEFAULT_PROCESSING_RATE_PER_SEC) self.refill_quotas(poll_time, processing_rate, active_shards) ControllerCallbackHandler.reschedule( self.base_path(), spec, self.serial_id() + 1) def aggregate_state(self, mapreduce_state, shard_states): """Update current mapreduce state by aggregating shard states. Args: mapreduce_state: current mapreduce state as MapreduceState. shard_states: all shard states (active and inactive). list of ShardState. """ processed_counts = [] mapreduce_state.counters_map.clear() for shard_state in shard_states: mapreduce_state.counters_map.add_map(shard_state.counters_map) processed_counts.append(shard_state.counters_map.get( context.COUNTER_MAPPER_CALLS)) mapreduce_state.set_processed_counts(processed_counts) def refill_quotas(self, last_poll_time, processing_rate, active_shard_states): """Refill quotas for all active shards. Args: last_poll_time: Datetime with the last time the job state was updated. processing_rate: How many items to process per second overall. active_shard_states: All active shard states, list of ShardState. """ if not active_shard_states: return quota_manager = quota.QuotaManager(memcache.Client()) current_time = int(self._time()) last_poll_time = time.mktime(last_poll_time.timetuple()) total_quota_refill = processing_rate * max(0, current_time - last_poll_time) quota_refill = int(math.ceil( 1.0 * total_quota_refill / len(active_shard_states))) if not quota_refill: return # TODO(user): use batch memcache API to refill quota in one API call. for shard_state in active_shard_states: quota_manager.put(shard_state.shard_id, quota_refill) def serial_id(self): """Get serial unique identifier of this task from request. Returns: serial identifier as int. """ return int(self.request.get("serial_id")) @staticmethod def get_task_name(mapreduce_spec, serial_id): """Compute single controller task name. Args: mapreduce_spec: specification of the mapreduce. serial_id: id of the invocation as int. Returns: task name which should be used to process specified shard/slice. """ # Prefix the task name with something unique to this framework's # namespace so we don't conflict with user tasks on the queue. return "appengine-mrcontrol-%s-%s" % ( mapreduce_spec.mapreduce_id, serial_id) @staticmethod def controller_parameters(mapreduce_spec, serial_id): """Fill in controller task parameters. Returned parameters map is to be used as task payload, and it contains all the data, required by controller to perform its function. Args: mapreduce_spec: specification of the mapreduce. serial_id: id of the invocation as int. Returns: string->string map of parameters to be used as task payload. """ return {"mapreduce_spec": mapreduce_spec.to_json_str(), "serial_id": str(serial_id)} @classmethod def reschedule(cls, base_path, mapreduce_spec, serial_id, queue_name=None): """Schedule new update status callback task. Args: base_path: mapreduce handlers url base path as string. mapreduce_spec: mapreduce specification as MapreduceSpec. serial_id: id of the invocation as int. queue_name: The queue to schedule this task on. Will use the current queue of execution if not supplied. """ task_name = ControllerCallbackHandler.get_task_name( mapreduce_spec, serial_id) task_params = ControllerCallbackHandler.controller_parameters( mapreduce_spec, serial_id) if not queue_name: queue_name = os.environ.get("HTTP_X_APPENGINE_QUEUENAME", "default") try: taskqueue.Task(url=base_path + "/controller_callback", name=task_name, params=task_params, countdown=_CONTROLLER_PERIOD_SEC).add(queue_name) except (taskqueue.TombstonedTaskError, taskqueue.TaskAlreadyExistsError), e: logging.warning("Task %r with params %r already exists. %s: %s", task_name, task_params, e.__class__, e) class KickOffJobHandler(base_handler.BaseHandler): """Taskqueue handler which kicks off a mapreduce processing. Request Parameters: mapreduce_spec: MapreduceSpec of the mapreduce serialized to json. input_readers: List of InputReaders objects separated by semi-colons. """ def post(self): """Handles kick off request.""" spec = model.MapreduceSpec.from_json_str( self._get_required_param("mapreduce_spec")) input_readers_json = simplejson.loads( self._get_required_param("input_readers")) queue_name = os.environ.get("HTTP_X_APPENGINE_QUEUENAME", "default") mapper_input_reader_class = spec.mapper.input_reader_class() input_readers = [mapper_input_reader_class.from_json_str(reader_json) for reader_json in input_readers_json] KickOffJobHandler._schedule_shards( spec, input_readers, queue_name, self.base_path()) ControllerCallbackHandler.reschedule( self.base_path(), spec, queue_name=queue_name, serial_id=0) def _get_required_param(self, param_name): """Get a required request parameter. Args: param_name: name of request parameter to fetch. Returns: parameter value Raises: NotEnoughArgumentsError: if parameter is not specified. """ value = self.request.get(param_name) if not value: raise NotEnoughArgumentsError(param_name + " not specified") return value @classmethod def _schedule_shards(cls, spec, input_readers, queue_name, base_path): """Prepares shard states and schedules their execution. Args: spec: mapreduce specification as MapreduceSpec. input_readers: list of InputReaders describing shard splits. queue_name: The queue to run this job on. base_path: The base url path of mapreduce callbacks. """ # Note: it's safe to re-attempt this handler because: # - shard state has deterministic and unique key. # - schedule_slice will fall back gracefully if a task already exists. shard_states = [] for shard_number, input_reader in enumerate(input_readers): shard = model.ShardState.create_new(spec.mapreduce_id, shard_number) shard.shard_description = str(input_reader) shard_states.append(shard) # Retrievs already existing shards. existing_shard_states = db.get(shard.key() for shard in shard_states) existing_shard_keys = set(shard.key() for shard in existing_shard_states if shard is not None) # Puts only non-existing shards. db.put(shard for shard in shard_states if shard.key() not in existing_shard_keys) for shard_number, input_reader in enumerate(input_readers): shard_id = model.ShardState.shard_id_from_number( spec.mapreduce_id, shard_number) MapperWorkerCallbackHandler.schedule_slice( base_path, spec, shard_id, 0, input_reader, queue_name=queue_name) class StartJobHandler(base_handler.JsonHandler): """Command handler starts a mapreduce job.""" def handle(self): """Handles start request.""" # Mapper spec as form arguments. mapreduce_name = self._get_required_param("name") mapper_input_reader_spec = self._get_required_param("mapper_input_reader") mapper_handler_spec = self._get_required_param("mapper_handler") mapper_params = self._get_params( "mapper_params_validator", "mapper_params.") params = self._get_params( "params_validator", "params.") # Set some mapper param defaults if not present. mapper_params["processing_rate"] = int(mapper_params.get( "processing_rate") or model._DEFAULT_PROCESSING_RATE_PER_SEC) queue_name = mapper_params["queue_name"] = mapper_params.get( "queue_name", "default") # Validate the Mapper spec, handler, and input reader. mapper_spec = model.MapperSpec( mapper_handler_spec, mapper_input_reader_spec, mapper_params, int(mapper_params.get("shard_count", model._DEFAULT_SHARD_COUNT))) mapreduce_id = type(self)._start_map( mapreduce_name, mapper_spec, params, base_path=self.base_path(), queue_name=queue_name, _app=mapper_params.get("_app")) self.json_response["mapreduce_id"] = mapreduce_id def _get_params(self, validator_parameter, name_prefix): """Retrieves additional user-supplied params for the job and validates them. Args: validator_parameter: name of the request parameter which supplies validator for this parameter set. name_prefix: common prefix for all parameter names in the request. Raises: Any exception raised by the 'params_validator' request parameter if the params fail to validate. """ params_validator = self.request.get(validator_parameter) user_params = {} for key in self.request.arguments(): if key.startswith(name_prefix): values = self.request.get_all(key) adjusted_key = key[len(name_prefix):] if len(values) == 1: user_params[adjusted_key] = values[0] else: user_params[adjusted_key] = values if params_validator: resolved_validator = util.for_name(params_validator) resolved_validator(user_params) return user_params def _get_required_param(self, param_name): """Get a required request parameter. Args: param_name: name of request parameter to fetch. Returns: parameter value Raises: NotEnoughArgumentsError: if parameter is not specified. """ value = self.request.get(param_name) if not value: raise NotEnoughArgumentsError(param_name + " not specified") return value @classmethod def _start_map(cls, name, mapper_spec, mapreduce_params, base_path="/mapreduce", queue_name="default", eta=None, countdown=None, _app=None): # Check that handler can be instantiated. mapper_spec.get_handler() mapper_input_reader_class = mapper_spec.input_reader_class() mapper_input_readers = mapper_input_reader_class.split_input(mapper_spec) if not mapper_input_readers: raise NoDataError("Found no mapper input readers to process.") mapper_spec.shard_count = len(mapper_input_readers) state = model.MapreduceState.create_new() mapreduce_spec = model.MapreduceSpec( name, state.key().id_or_name(), mapper_spec.to_json(), mapreduce_params) state.mapreduce_spec = mapreduce_spec state.active = True state.active_shards = mapper_spec.shard_count if _app: state.app_id = _app # TODO(user): Initialize UI fields correctly. state.char_url = "" state.sparkline_url = "" def schedule_mapreduce(state, mapper_input_readers, eta, countdown): state.put() readers_json = [reader.to_json_str() for reader in mapper_input_readers] taskqueue.Task( url=base_path + "/kickoffjob_callback", params={"mapreduce_spec": state.mapreduce_spec.to_json_str(), "input_readers": simplejson.dumps(readers_json)}, eta=eta, countdown=countdown).add(queue_name, transactional=True) # Point of no return: We're actually going to run this job! db.run_in_transaction( schedule_mapreduce, state, mapper_input_readers, eta, countdown) return state.key().id_or_name() class CleanUpJobHandler(base_handler.JsonHandler): """Command to kick off tasks to clean up a job's data.""" def handle(self): # TODO(user): Have this kick off a task to clean up all MapreduceState, # ShardState, and MapreduceControl entities for a job ID. self.json_response["status"] = "This does nothing yet." class AbortJobHandler(base_handler.JsonHandler): """Command to abort a running job.""" def handle(self): model.MapreduceControl.abort(self.request.get("mapreduce_id")) self.json_response["status"] = "Abort signal sent."
bsd-3-clause
8,927,027,168,797,040,000
34.232824
80
0.654245
false
duqiao/django
tests/migrations/test_writer.py
76
23489
# -*- coding: utf-8 -*- from __future__ import unicode_literals import datetime import functools import math import os import re import tokenize import unittest import custom_migration_operations.more_operations import custom_migration_operations.operations from django.conf import settings from django.core.validators import EmailValidator, RegexValidator from django.db import migrations, models from django.db.migrations.writer import ( MigrationWriter, OperationWriter, SettingsReference, ) from django.test import SimpleTestCase, ignore_warnings from django.utils import datetime_safe, six from django.utils._os import upath from django.utils.deconstruct import deconstructible from django.utils.timezone import FixedOffset, get_default_timezone, utc from django.utils.translation import ugettext_lazy as _ from .models import FoodManager, FoodQuerySet class TestModel1(object): def upload_to(self): return "somewhere dynamic" thing = models.FileField(upload_to=upload_to) class OperationWriterTests(SimpleTestCase): def test_empty_signature(self): operation = custom_migration_operations.operations.TestOperation() buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.TestOperation(\n' '),' ) def test_args_signature(self): operation = custom_migration_operations.operations.ArgsOperation(1, 2) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ArgsOperation(\n' ' arg1=1,\n' ' arg2=2,\n' '),' ) def test_kwargs_signature(self): operation = custom_migration_operations.operations.KwargsOperation(kwarg1=1) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.KwargsOperation(\n' ' kwarg1=1,\n' '),' ) def test_args_kwargs_signature(self): operation = custom_migration_operations.operations.ArgsKwargsOperation(1, 2, kwarg2=4) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ArgsKwargsOperation(\n' ' arg1=1,\n' ' arg2=2,\n' ' kwarg2=4,\n' '),' ) def test_nested_args_signature(self): operation = custom_migration_operations.operations.ArgsOperation( custom_migration_operations.operations.ArgsOperation(1, 2), custom_migration_operations.operations.KwargsOperation(kwarg1=3, kwarg2=4) ) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ArgsOperation(\n' ' arg1=custom_migration_operations.operations.ArgsOperation(\n' ' arg1=1,\n' ' arg2=2,\n' ' ),\n' ' arg2=custom_migration_operations.operations.KwargsOperation(\n' ' kwarg1=3,\n' ' kwarg2=4,\n' ' ),\n' '),' ) def test_multiline_args_signature(self): operation = custom_migration_operations.operations.ArgsOperation("test\n arg1", "test\narg2") buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, "custom_migration_operations.operations.ArgsOperation(\n" " arg1='test\\n arg1',\n" " arg2='test\\narg2',\n" ")," ) def test_expand_args_signature(self): operation = custom_migration_operations.operations.ExpandArgsOperation([1, 2]) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ExpandArgsOperation(\n' ' arg=[\n' ' 1,\n' ' 2,\n' ' ],\n' '),' ) def test_nested_operation_expand_args_signature(self): operation = custom_migration_operations.operations.ExpandArgsOperation( arg=[ custom_migration_operations.operations.KwargsOperation( kwarg1=1, kwarg2=2, ), ] ) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ExpandArgsOperation(\n' ' arg=[\n' ' custom_migration_operations.operations.KwargsOperation(\n' ' kwarg1=1,\n' ' kwarg2=2,\n' ' ),\n' ' ],\n' '),' ) class WriterTests(SimpleTestCase): """ Tests the migration writer (makes migration files from Migration instances) """ def safe_exec(self, string, value=None): l = {} try: exec(string, globals(), l) except Exception as e: if value: self.fail("Could not exec %r (from value %r): %s" % (string.strip(), value, e)) else: self.fail("Could not exec %r: %s" % (string.strip(), e)) return l def serialize_round_trip(self, value): string, imports = MigrationWriter.serialize(value) return self.safe_exec("%s\ntest_value_result = %s" % ("\n".join(imports), string), value)['test_value_result'] def assertSerializedEqual(self, value): self.assertEqual(self.serialize_round_trip(value), value) def assertSerializedResultEqual(self, value, target): self.assertEqual(MigrationWriter.serialize(value), target) def assertSerializedFieldEqual(self, value): new_value = self.serialize_round_trip(value) self.assertEqual(value.__class__, new_value.__class__) self.assertEqual(value.max_length, new_value.max_length) self.assertEqual(value.null, new_value.null) self.assertEqual(value.unique, new_value.unique) def test_serialize_numbers(self): self.assertSerializedEqual(1) self.assertSerializedEqual(1.2) self.assertTrue(math.isinf(self.serialize_round_trip(float("inf")))) self.assertTrue(math.isinf(self.serialize_round_trip(float("-inf")))) self.assertTrue(math.isnan(self.serialize_round_trip(float("nan")))) def test_serialize_constants(self): self.assertSerializedEqual(None) self.assertSerializedEqual(True) self.assertSerializedEqual(False) def test_serialize_strings(self): self.assertSerializedEqual(b"foobar") string, imports = MigrationWriter.serialize(b"foobar") self.assertEqual(string, "b'foobar'") self.assertSerializedEqual("föobár") string, imports = MigrationWriter.serialize("foobar") self.assertEqual(string, "'foobar'") def test_serialize_multiline_strings(self): self.assertSerializedEqual(b"foo\nbar") string, imports = MigrationWriter.serialize(b"foo\nbar") self.assertEqual(string, "b'foo\\nbar'") self.assertSerializedEqual("föo\nbár") string, imports = MigrationWriter.serialize("foo\nbar") self.assertEqual(string, "'foo\\nbar'") def test_serialize_collections(self): self.assertSerializedEqual({1: 2}) self.assertSerializedEqual(["a", 2, True, None]) self.assertSerializedEqual({2, 3, "eighty"}) self.assertSerializedEqual({"lalalala": ["yeah", "no", "maybe"]}) self.assertSerializedEqual(_('Hello')) def test_serialize_builtin_types(self): self.assertSerializedEqual([list, tuple, dict, set, frozenset]) self.assertSerializedResultEqual( [list, tuple, dict, set, frozenset], ("[list, tuple, dict, set, frozenset]", set()) ) def test_serialize_functions(self): with six.assertRaisesRegex(self, ValueError, 'Cannot serialize function: lambda'): self.assertSerializedEqual(lambda x: 42) self.assertSerializedEqual(models.SET_NULL) string, imports = MigrationWriter.serialize(models.SET(42)) self.assertEqual(string, 'models.SET(42)') self.serialize_round_trip(models.SET(42)) def test_serialize_datetime(self): self.assertSerializedEqual(datetime.datetime.utcnow()) self.assertSerializedEqual(datetime.datetime.utcnow) self.assertSerializedEqual(datetime.datetime.today()) self.assertSerializedEqual(datetime.datetime.today) self.assertSerializedEqual(datetime.date.today()) self.assertSerializedEqual(datetime.date.today) self.assertSerializedEqual(datetime.datetime.now().time()) self.assertSerializedEqual(datetime.datetime(2014, 1, 1, 1, 1, tzinfo=get_default_timezone())) self.assertSerializedEqual(datetime.datetime(2013, 12, 31, 22, 1, tzinfo=FixedOffset(180))) self.assertSerializedResultEqual( datetime.datetime(2014, 1, 1, 1, 1), ("datetime.datetime(2014, 1, 1, 1, 1)", {'import datetime'}) ) self.assertSerializedResultEqual( datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc), ( "datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc)", {'import datetime', 'from django.utils.timezone import utc'}, ) ) def test_serialize_datetime_safe(self): self.assertSerializedResultEqual( datetime_safe.date(2014, 3, 31), ("datetime.date(2014, 3, 31)", {'import datetime'}) ) self.assertSerializedResultEqual( datetime_safe.time(10, 25), ("datetime.time(10, 25)", {'import datetime'}) ) self.assertSerializedResultEqual( datetime_safe.datetime(2014, 3, 31, 16, 4, 31), ("datetime.datetime(2014, 3, 31, 16, 4, 31)", {'import datetime'}) ) def test_serialize_fields(self): self.assertSerializedFieldEqual(models.CharField(max_length=255)) self.assertSerializedResultEqual( models.CharField(max_length=255), ("models.CharField(max_length=255)", {"from django.db import models"}) ) self.assertSerializedFieldEqual(models.TextField(null=True, blank=True)) self.assertSerializedResultEqual( models.TextField(null=True, blank=True), ("models.TextField(blank=True, null=True)", {'from django.db import models'}) ) def test_serialize_settings(self): self.assertSerializedEqual(SettingsReference(settings.AUTH_USER_MODEL, "AUTH_USER_MODEL")) self.assertSerializedResultEqual( SettingsReference("someapp.model", "AUTH_USER_MODEL"), ("settings.AUTH_USER_MODEL", {"from django.conf import settings"}) ) self.assertSerializedResultEqual( ((x, x * x) for x in range(3)), ("((0, 0), (1, 1), (2, 4))", set()) ) def test_serialize_compiled_regex(self): """ Make sure compiled regex can be serialized. """ regex = re.compile(r'^\w+$', re.U) self.assertSerializedEqual(regex) def test_serialize_class_based_validators(self): """ Ticket #22943: Test serialization of class-based validators, including compiled regexes. """ validator = RegexValidator(message="hello") string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator(message='hello')") self.serialize_round_trip(validator) # Test with a compiled regex. validator = RegexValidator(regex=re.compile(r'^\w+$', re.U)) string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator(regex=re.compile('^\\\\w+$', 32))") self.serialize_round_trip(validator) # Test a string regex with flag validator = RegexValidator(r'^[0-9]+$', flags=re.U) string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator('^[0-9]+$', flags=32)") self.serialize_round_trip(validator) # Test message and code validator = RegexValidator('^[-a-zA-Z0-9_]+$', 'Invalid', 'invalid') string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator('^[-a-zA-Z0-9_]+$', 'Invalid', 'invalid')") self.serialize_round_trip(validator) # Test with a subclass. validator = EmailValidator(message="hello") string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.EmailValidator(message='hello')") self.serialize_round_trip(validator) validator = deconstructible(path="migrations.test_writer.EmailValidator")(EmailValidator)(message="hello") string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "migrations.test_writer.EmailValidator(message='hello')") validator = deconstructible(path="custom.EmailValidator")(EmailValidator)(message="hello") with six.assertRaisesRegex(self, ImportError, "No module named '?custom'?"): MigrationWriter.serialize(validator) validator = deconstructible(path="django.core.validators.EmailValidator2")(EmailValidator)(message="hello") with self.assertRaisesMessage(ValueError, "Could not find object EmailValidator2 in django.core.validators."): MigrationWriter.serialize(validator) def test_serialize_empty_nonempty_tuple(self): """ Ticket #22679: makemigrations generates invalid code for (an empty tuple) default_permissions = () """ empty_tuple = () one_item_tuple = ('a',) many_items_tuple = ('a', 'b', 'c') self.assertSerializedEqual(empty_tuple) self.assertSerializedEqual(one_item_tuple) self.assertSerializedEqual(many_items_tuple) def test_serialize_builtins(self): string, imports = MigrationWriter.serialize(range) self.assertEqual(string, 'range') self.assertEqual(imports, set()) @unittest.skipUnless(six.PY2, "Only applies on Python 2") def test_serialize_direct_function_reference(self): """ Ticket #22436: You cannot use a function straight from its body (e.g. define the method and use it in the same body) """ with self.assertRaises(ValueError): self.serialize_round_trip(TestModel1.thing) def test_serialize_local_function_reference(self): """ Neither py2 or py3 can serialize a reference in a local scope. """ class TestModel2(object): def upload_to(self): return "somewhere dynamic" thing = models.FileField(upload_to=upload_to) with self.assertRaises(ValueError): self.serialize_round_trip(TestModel2.thing) def test_serialize_local_function_reference_message(self): """ Make sure user is seeing which module/function is the issue """ class TestModel2(object): def upload_to(self): return "somewhere dynamic" thing = models.FileField(upload_to=upload_to) with six.assertRaisesRegex(self, ValueError, '^Could not find function upload_to in migrations.test_writer'): self.serialize_round_trip(TestModel2.thing) def test_serialize_managers(self): self.assertSerializedEqual(models.Manager()) self.assertSerializedResultEqual( FoodQuerySet.as_manager(), ('migrations.models.FoodQuerySet.as_manager()', {'import migrations.models'}) ) self.assertSerializedEqual(FoodManager('a', 'b')) self.assertSerializedEqual(FoodManager('x', 'y', c=3, d=4)) def test_serialize_frozensets(self): self.assertSerializedEqual(frozenset()) self.assertSerializedEqual(frozenset("let it go")) def test_serialize_timedelta(self): self.assertSerializedEqual(datetime.timedelta()) self.assertSerializedEqual(datetime.timedelta(minutes=42)) def test_serialize_functools_partial(self): value = functools.partial(datetime.timedelta, 1, seconds=2) result = self.serialize_round_trip(value) self.assertEqual(result.func, value.func) self.assertEqual(result.args, value.args) self.assertEqual(result.keywords, value.keywords) def test_simple_migration(self): """ Tests serializing a simple migration. """ fields = { 'charfield': models.DateTimeField(default=datetime.datetime.utcnow), 'datetimefield': models.DateTimeField(default=datetime.datetime.utcnow), } options = { 'verbose_name': 'My model', 'verbose_name_plural': 'My models', } migration = type(str("Migration"), (migrations.Migration,), { "operations": [ migrations.CreateModel("MyModel", tuple(fields.items()), options, (models.Model,)), migrations.CreateModel("MyModel2", tuple(fields.items()), bases=(models.Model,)), migrations.CreateModel(name="MyModel3", fields=tuple(fields.items()), options=options, bases=(models.Model,)), migrations.DeleteModel("MyModel"), migrations.AddField("OtherModel", "datetimefield", fields["datetimefield"]), ], "dependencies": [("testapp", "some_other_one")], }) writer = MigrationWriter(migration) output = writer.as_string() # It should NOT be unicode. self.assertIsInstance(output, six.binary_type, "Migration as_string returned unicode") # We don't test the output formatting - that's too fragile. # Just make sure it runs for now, and that things look alright. result = self.safe_exec(output) self.assertIn("Migration", result) # In order to preserve compatibility with Python 3.2 unicode literals # prefix shouldn't be added to strings. tokens = tokenize.generate_tokens(six.StringIO(str(output)).readline) for token_type, token_source, (srow, scol), __, line in tokens: if token_type == tokenize.STRING: self.assertFalse( token_source.startswith('u'), "Unicode literal prefix found at %d:%d: %r" % ( srow, scol, line.strip() ) ) # Silence warning on Python 2: Not importing directory # 'tests/migrations/migrations_test_apps/without_init_file/migrations': # missing __init__.py @ignore_warnings(category=ImportWarning) def test_migration_path(self): test_apps = [ 'migrations.migrations_test_apps.normal', 'migrations.migrations_test_apps.with_package_model', 'migrations.migrations_test_apps.without_init_file', ] base_dir = os.path.dirname(os.path.dirname(upath(__file__))) for app in test_apps: with self.modify_settings(INSTALLED_APPS={'append': app}): migration = migrations.Migration('0001_initial', app.split('.')[-1]) expected_path = os.path.join(base_dir, *(app.split('.') + ['migrations', '0001_initial.py'])) writer = MigrationWriter(migration) self.assertEqual(writer.path, expected_path) def test_custom_operation(self): migration = type(str("Migration"), (migrations.Migration,), { "operations": [ custom_migration_operations.operations.TestOperation(), custom_migration_operations.operations.CreateModel(), migrations.CreateModel("MyModel", (), {}, (models.Model,)), custom_migration_operations.more_operations.TestOperation() ], "dependencies": [] }) writer = MigrationWriter(migration) output = writer.as_string() result = self.safe_exec(output) self.assertIn("custom_migration_operations", result) self.assertNotEqual( result['custom_migration_operations'].operations.TestOperation, result['custom_migration_operations'].more_operations.TestOperation ) def test_sorted_imports(self): """ #24155 - Tests ordering of imports. """ migration = type(str("Migration"), (migrations.Migration,), { "operations": [ migrations.AddField("mymodel", "myfield", models.DateTimeField( default=datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc), )), ] }) writer = MigrationWriter(migration) output = writer.as_string().decode('utf-8') self.assertIn( "import datetime\n" "from django.db import migrations, models\n" "from django.utils.timezone import utc\n", output ) def test_models_import_omitted(self): """ django.db.models shouldn't be imported if unused. """ migration = type(str("Migration"), (migrations.Migration,), { "operations": [ migrations.AlterModelOptions( name='model', options={'verbose_name': 'model', 'verbose_name_plural': 'models'}, ), ] }) writer = MigrationWriter(migration) output = writer.as_string().decode('utf-8') self.assertIn("from django.db import migrations\n", output) def test_deconstruct_class_arguments(self): # Yes, it doesn't make sense to use a class as a default for a # CharField. It does make sense for custom fields though, for example # an enumfield that takes the enum class as an argument. class DeconstructableInstances(object): def deconstruct(self): return ('DeconstructableInstances', [], {}) string = MigrationWriter.serialize(models.CharField(default=DeconstructableInstances))[0] self.assertEqual(string, "models.CharField(default=migrations.test_writer.DeconstructableInstances)")
bsd-3-clause
-8,721,334,162,716,889,000
41.468354
126
0.624143
false
cnsoft/kbengine-cocos2dx
kbe/res/scripts/common/Lib/test/test_itertools.py
6
69610
import unittest from test import support from itertools import * from weakref import proxy from decimal import Decimal from fractions import Fraction import sys import operator import random import copy import pickle from functools import reduce maxsize = support.MAX_Py_ssize_t minsize = -maxsize-1 def lzip(*args): return list(zip(*args)) def onearg(x): 'Test function of one argument' return 2*x def errfunc(*args): 'Test function that raises an error' raise ValueError def gen3(): 'Non-restartable source sequence' for i in (0, 1, 2): yield i def isEven(x): 'Test predicate' return x%2==0 def isOdd(x): 'Test predicate' return x%2==1 class StopNow: 'Class emulating an empty iterable.' def __iter__(self): return self def __next__(self): raise StopIteration def take(n, seq): 'Convenience function for partially consuming a long of infinite iterable' return list(islice(seq, n)) def prod(iterable): return reduce(operator.mul, iterable, 1) def fact(n): 'Factorial' return prod(range(1, n+1)) class TestBasicOps(unittest.TestCase): def test_accumulate(self): self.assertEqual(list(accumulate(range(10))), # one positional arg [0, 1, 3, 6, 10, 15, 21, 28, 36, 45]) self.assertEqual(list(accumulate(iterable=range(10))), # kw arg [0, 1, 3, 6, 10, 15, 21, 28, 36, 45]) for typ in int, complex, Decimal, Fraction: # multiple types self.assertEqual( list(accumulate(map(typ, range(10)))), list(map(typ, [0, 1, 3, 6, 10, 15, 21, 28, 36, 45]))) self.assertEqual(list(accumulate('abc')), ['a', 'ab', 'abc']) # works with non-numeric self.assertEqual(list(accumulate([])), []) # empty iterable self.assertEqual(list(accumulate([7])), [7]) # iterable of length one self.assertRaises(TypeError, accumulate, range(10), 5) # too many args self.assertRaises(TypeError, accumulate) # too few args self.assertRaises(TypeError, accumulate, x=range(10)) # unexpected kwd arg self.assertRaises(TypeError, list, accumulate([1, []])) # args that don't add def test_chain(self): def chain2(*iterables): 'Pure python version in the docs' for it in iterables: for element in it: yield element for c in (chain, chain2): self.assertEqual(list(c('abc', 'def')), list('abcdef')) self.assertEqual(list(c('abc')), list('abc')) self.assertEqual(list(c('')), []) self.assertEqual(take(4, c('abc', 'def')), list('abcd')) self.assertRaises(TypeError, list,c(2, 3)) def test_chain_from_iterable(self): self.assertEqual(list(chain.from_iterable(['abc', 'def'])), list('abcdef')) self.assertEqual(list(chain.from_iterable(['abc'])), list('abc')) self.assertEqual(list(chain.from_iterable([''])), []) self.assertEqual(take(4, chain.from_iterable(['abc', 'def'])), list('abcd')) self.assertRaises(TypeError, list, chain.from_iterable([2, 3])) def test_combinations(self): self.assertRaises(TypeError, combinations, 'abc') # missing r argument self.assertRaises(TypeError, combinations, 'abc', 2, 1) # too many arguments self.assertRaises(TypeError, combinations, None) # pool is not iterable self.assertRaises(ValueError, combinations, 'abc', -2) # r is negative self.assertEqual(list(combinations('abc', 32)), []) # r > n self.assertEqual(list(combinations(range(4), 3)), [(0,1,2), (0,1,3), (0,2,3), (1,2,3)]) def combinations1(iterable, r): 'Pure python version shown in the docs' pool = tuple(iterable) n = len(pool) if r > n: return indices = list(range(r)) yield tuple(pool[i] for i in indices) while 1: for i in reversed(range(r)): if indices[i] != i + n - r: break else: return indices[i] += 1 for j in range(i+1, r): indices[j] = indices[j-1] + 1 yield tuple(pool[i] for i in indices) def combinations2(iterable, r): 'Pure python version shown in the docs' pool = tuple(iterable) n = len(pool) for indices in permutations(range(n), r): if sorted(indices) == list(indices): yield tuple(pool[i] for i in indices) def combinations3(iterable, r): 'Pure python version from cwr()' pool = tuple(iterable) n = len(pool) for indices in combinations_with_replacement(range(n), r): if len(set(indices)) == r: yield tuple(pool[i] for i in indices) for n in range(7): values = [5*x-12 for x in range(n)] for r in range(n+2): result = list(combinations(values, r)) self.assertEqual(len(result), 0 if r>n else fact(n) / fact(r) / fact(n-r)) # right number of combs self.assertEqual(len(result), len(set(result))) # no repeats self.assertEqual(result, sorted(result)) # lexicographic order for c in result: self.assertEqual(len(c), r) # r-length combinations self.assertEqual(len(set(c)), r) # no duplicate elements self.assertEqual(list(c), sorted(c)) # keep original ordering self.assertTrue(all(e in values for e in c)) # elements taken from input iterable self.assertEqual(list(c), [e for e in values if e in c]) # comb is a subsequence of the input iterable self.assertEqual(result, list(combinations1(values, r))) # matches first pure python version self.assertEqual(result, list(combinations2(values, r))) # matches second pure python version self.assertEqual(result, list(combinations3(values, r))) # matches second pure python version # Test implementation detail: tuple re-use self.assertEqual(len(set(map(id, combinations('abcde', 3)))), 1) self.assertNotEqual(len(set(map(id, list(combinations('abcde', 3))))), 1) def test_combinations_with_replacement(self): cwr = combinations_with_replacement self.assertRaises(TypeError, cwr, 'abc') # missing r argument self.assertRaises(TypeError, cwr, 'abc', 2, 1) # too many arguments self.assertRaises(TypeError, cwr, None) # pool is not iterable self.assertRaises(ValueError, cwr, 'abc', -2) # r is negative self.assertEqual(list(cwr('ABC', 2)), [('A','A'), ('A','B'), ('A','C'), ('B','B'), ('B','C'), ('C','C')]) def cwr1(iterable, r): 'Pure python version shown in the docs' # number items returned: (n+r-1)! / r! / (n-1)! when n>0 pool = tuple(iterable) n = len(pool) if not n and r: return indices = [0] * r yield tuple(pool[i] for i in indices) while 1: for i in reversed(range(r)): if indices[i] != n - 1: break else: return indices[i:] = [indices[i] + 1] * (r - i) yield tuple(pool[i] for i in indices) def cwr2(iterable, r): 'Pure python version shown in the docs' pool = tuple(iterable) n = len(pool) for indices in product(range(n), repeat=r): if sorted(indices) == list(indices): yield tuple(pool[i] for i in indices) def numcombs(n, r): if not n: return 0 if r else 1 return fact(n+r-1) / fact(r)/ fact(n-1) for n in range(7): values = [5*x-12 for x in range(n)] for r in range(n+2): result = list(cwr(values, r)) self.assertEqual(len(result), numcombs(n, r)) # right number of combs self.assertEqual(len(result), len(set(result))) # no repeats self.assertEqual(result, sorted(result)) # lexicographic order regular_combs = list(combinations(values, r)) # compare to combs without replacement if n == 0 or r <= 1: self.assertEqual(result, regular_combs) # cases that should be identical else: self.assertTrue(set(result) >= set(regular_combs)) # rest should be supersets of regular combs for c in result: self.assertEqual(len(c), r) # r-length combinations noruns = [k for k,v in groupby(c)] # combo without consecutive repeats self.assertEqual(len(noruns), len(set(noruns))) # no repeats other than consecutive self.assertEqual(list(c), sorted(c)) # keep original ordering self.assertTrue(all(e in values for e in c)) # elements taken from input iterable self.assertEqual(noruns, [e for e in values if e in c]) # comb is a subsequence of the input iterable self.assertEqual(result, list(cwr1(values, r))) # matches first pure python version self.assertEqual(result, list(cwr2(values, r))) # matches second pure python version # Test implementation detail: tuple re-use self.assertEqual(len(set(map(id, cwr('abcde', 3)))), 1) self.assertNotEqual(len(set(map(id, list(cwr('abcde', 3))))), 1) def test_permutations(self): self.assertRaises(TypeError, permutations) # too few arguments self.assertRaises(TypeError, permutations, 'abc', 2, 1) # too many arguments self.assertRaises(TypeError, permutations, None) # pool is not iterable self.assertRaises(ValueError, permutations, 'abc', -2) # r is negative self.assertEqual(list(permutations('abc', 32)), []) # r > n self.assertRaises(TypeError, permutations, 'abc', 's') # r is not an int or None self.assertEqual(list(permutations(range(3), 2)), [(0,1), (0,2), (1,0), (1,2), (2,0), (2,1)]) def permutations1(iterable, r=None): 'Pure python version shown in the docs' pool = tuple(iterable) n = len(pool) r = n if r is None else r if r > n: return indices = list(range(n)) cycles = list(range(n-r+1, n+1))[::-1] yield tuple(pool[i] for i in indices[:r]) while n: for i in reversed(range(r)): cycles[i] -= 1 if cycles[i] == 0: indices[i:] = indices[i+1:] + indices[i:i+1] cycles[i] = n - i else: j = cycles[i] indices[i], indices[-j] = indices[-j], indices[i] yield tuple(pool[i] for i in indices[:r]) break else: return def permutations2(iterable, r=None): 'Pure python version shown in the docs' pool = tuple(iterable) n = len(pool) r = n if r is None else r for indices in product(range(n), repeat=r): if len(set(indices)) == r: yield tuple(pool[i] for i in indices) for n in range(7): values = [5*x-12 for x in range(n)] for r in range(n+2): result = list(permutations(values, r)) self.assertEqual(len(result), 0 if r>n else fact(n) / fact(n-r)) # right number of perms self.assertEqual(len(result), len(set(result))) # no repeats self.assertEqual(result, sorted(result)) # lexicographic order for p in result: self.assertEqual(len(p), r) # r-length permutations self.assertEqual(len(set(p)), r) # no duplicate elements self.assertTrue(all(e in values for e in p)) # elements taken from input iterable self.assertEqual(result, list(permutations1(values, r))) # matches first pure python version self.assertEqual(result, list(permutations2(values, r))) # matches second pure python version if r == n: self.assertEqual(result, list(permutations(values, None))) # test r as None self.assertEqual(result, list(permutations(values))) # test default r # Test implementation detail: tuple re-use self.assertEqual(len(set(map(id, permutations('abcde', 3)))), 1) self.assertNotEqual(len(set(map(id, list(permutations('abcde', 3))))), 1) def test_combinatorics(self): # Test relationships between product(), permutations(), # combinations() and combinations_with_replacement(). for n in range(6): s = 'ABCDEFG'[:n] for r in range(8): prod = list(product(s, repeat=r)) cwr = list(combinations_with_replacement(s, r)) perm = list(permutations(s, r)) comb = list(combinations(s, r)) # Check size self.assertEqual(len(prod), n**r) self.assertEqual(len(cwr), (fact(n+r-1) / fact(r)/ fact(n-1)) if n else (not r)) self.assertEqual(len(perm), 0 if r>n else fact(n) / fact(n-r)) self.assertEqual(len(comb), 0 if r>n else fact(n) / fact(r) / fact(n-r)) # Check lexicographic order without repeated tuples self.assertEqual(prod, sorted(set(prod))) self.assertEqual(cwr, sorted(set(cwr))) self.assertEqual(perm, sorted(set(perm))) self.assertEqual(comb, sorted(set(comb))) # Check interrelationships self.assertEqual(cwr, [t for t in prod if sorted(t)==list(t)]) # cwr: prods which are sorted self.assertEqual(perm, [t for t in prod if len(set(t))==r]) # perm: prods with no dups self.assertEqual(comb, [t for t in perm if sorted(t)==list(t)]) # comb: perms that are sorted self.assertEqual(comb, [t for t in cwr if len(set(t))==r]) # comb: cwrs without dups self.assertEqual(comb, list(filter(set(cwr).__contains__, perm))) # comb: perm that is a cwr self.assertEqual(comb, list(filter(set(perm).__contains__, cwr))) # comb: cwr that is a perm self.assertEqual(comb, sorted(set(cwr) & set(perm))) # comb: both a cwr and a perm def test_compress(self): self.assertEqual(list(compress(data='ABCDEF', selectors=[1,0,1,0,1,1])), list('ACEF')) self.assertEqual(list(compress('ABCDEF', [1,0,1,0,1,1])), list('ACEF')) self.assertEqual(list(compress('ABCDEF', [0,0,0,0,0,0])), list('')) self.assertEqual(list(compress('ABCDEF', [1,1,1,1,1,1])), list('ABCDEF')) self.assertEqual(list(compress('ABCDEF', [1,0,1])), list('AC')) self.assertEqual(list(compress('ABC', [0,1,1,1,1,1])), list('BC')) n = 10000 data = chain.from_iterable(repeat(range(6), n)) selectors = chain.from_iterable(repeat((0, 1))) self.assertEqual(list(compress(data, selectors)), [1,3,5] * n) self.assertRaises(TypeError, compress, None, range(6)) # 1st arg not iterable self.assertRaises(TypeError, compress, range(6), None) # 2nd arg not iterable self.assertRaises(TypeError, compress, range(6)) # too few args self.assertRaises(TypeError, compress, range(6), None) # too many args def test_count(self): self.assertEqual(lzip('abc',count()), [('a', 0), ('b', 1), ('c', 2)]) self.assertEqual(lzip('abc',count(3)), [('a', 3), ('b', 4), ('c', 5)]) self.assertEqual(take(2, lzip('abc',count(3))), [('a', 3), ('b', 4)]) self.assertEqual(take(2, zip('abc',count(-1))), [('a', -1), ('b', 0)]) self.assertEqual(take(2, zip('abc',count(-3))), [('a', -3), ('b', -2)]) self.assertRaises(TypeError, count, 2, 3, 4) self.assertRaises(TypeError, count, 'a') self.assertEqual(list(islice(count(maxsize-5), 10)), list(range(maxsize-5, maxsize+5))) self.assertEqual(list(islice(count(-maxsize-5), 10)), list(range(-maxsize-5, -maxsize+5))) self.assertEqual(list(islice(count(10, maxsize+5), 3)), list(range(10, 10+3*(maxsize+5), maxsize+5))) c = count(3) self.assertEqual(repr(c), 'count(3)') next(c) self.assertEqual(repr(c), 'count(4)') c = count(-9) self.assertEqual(repr(c), 'count(-9)') next(c) self.assertEqual(repr(count(10.25)), 'count(10.25)') self.assertEqual(next(c), -8) for i in (-sys.maxsize-5, -sys.maxsize+5 ,-10, -1, 0, 10, sys.maxsize-5, sys.maxsize+5): # Test repr (ignoring the L in longs) r1 = repr(count(i)).replace('L', '') r2 = 'count(%r)'.__mod__(i).replace('L', '') self.assertEqual(r1, r2) # check copy, deepcopy, pickle for value in -3, 3, maxsize-5, maxsize+5: c = count(value) self.assertEqual(next(copy.copy(c)), value) self.assertEqual(next(copy.deepcopy(c)), value) self.assertEqual(next(pickle.loads(pickle.dumps(c))), value) #check proper internal error handling for large "step' sizes count(1, maxsize+5); sys.exc_info() def test_count_with_stride(self): self.assertEqual(lzip('abc',count(2,3)), [('a', 2), ('b', 5), ('c', 8)]) self.assertEqual(lzip('abc',count(start=2,step=3)), [('a', 2), ('b', 5), ('c', 8)]) self.assertEqual(lzip('abc',count(step=-1)), [('a', 0), ('b', -1), ('c', -2)]) self.assertEqual(lzip('abc',count(2,0)), [('a', 2), ('b', 2), ('c', 2)]) self.assertEqual(lzip('abc',count(2,1)), [('a', 2), ('b', 3), ('c', 4)]) self.assertEqual(lzip('abc',count(2,3)), [('a', 2), ('b', 5), ('c', 8)]) self.assertEqual(take(20, count(maxsize-15, 3)), take(20, range(maxsize-15, maxsize+100, 3))) self.assertEqual(take(20, count(-maxsize-15, 3)), take(20, range(-maxsize-15,-maxsize+100, 3))) self.assertEqual(take(3, count(2, 3.25-4j)), [2, 5.25-4j, 8.5-8j]) self.assertEqual(take(3, count(Decimal('1.1'), Decimal('.1'))), [Decimal('1.1'), Decimal('1.2'), Decimal('1.3')]) self.assertEqual(take(3, count(Fraction(2,3), Fraction(1,7))), [Fraction(2,3), Fraction(17,21), Fraction(20,21)]) self.assertEqual(repr(take(3, count(10, 2.5))), repr([10, 12.5, 15.0])) c = count(3, 5) self.assertEqual(repr(c), 'count(3, 5)') next(c) self.assertEqual(repr(c), 'count(8, 5)') c = count(-9, 0) self.assertEqual(repr(c), 'count(-9, 0)') next(c) self.assertEqual(repr(c), 'count(-9, 0)') c = count(-9, -3) self.assertEqual(repr(c), 'count(-9, -3)') next(c) self.assertEqual(repr(c), 'count(-12, -3)') self.assertEqual(repr(c), 'count(-12, -3)') self.assertEqual(repr(count(10.5, 1.25)), 'count(10.5, 1.25)') self.assertEqual(repr(count(10.5, 1)), 'count(10.5)') # suppress step=1 when it's an int self.assertEqual(repr(count(10.5, 1.00)), 'count(10.5, 1.0)') # do show float values lilke 1.0 for i in (-sys.maxsize-5, -sys.maxsize+5 ,-10, -1, 0, 10, sys.maxsize-5, sys.maxsize+5): for j in (-sys.maxsize-5, -sys.maxsize+5 ,-10, -1, 0, 1, 10, sys.maxsize-5, sys.maxsize+5): # Test repr (ignoring the L in longs) r1 = repr(count(i, j)).replace('L', '') if j == 1: r2 = ('count(%r)' % i).replace('L', '') else: r2 = ('count(%r, %r)' % (i, j)).replace('L', '') self.assertEqual(r1, r2) def test_cycle(self): self.assertEqual(take(10, cycle('abc')), list('abcabcabca')) self.assertEqual(list(cycle('')), []) self.assertRaises(TypeError, cycle) self.assertRaises(TypeError, cycle, 5) self.assertEqual(list(islice(cycle(gen3()),10)), [0,1,2,0,1,2,0,1,2,0]) def test_groupby(self): # Check whether it accepts arguments correctly self.assertEqual([], list(groupby([]))) self.assertEqual([], list(groupby([], key=id))) self.assertRaises(TypeError, list, groupby('abc', [])) self.assertRaises(TypeError, groupby, None) self.assertRaises(TypeError, groupby, 'abc', lambda x:x, 10) # Check normal input s = [(0, 10, 20), (0, 11,21), (0,12,21), (1,13,21), (1,14,22), (2,15,22), (3,16,23), (3,17,23)] dup = [] for k, g in groupby(s, lambda r:r[0]): for elem in g: self.assertEqual(k, elem[0]) dup.append(elem) self.assertEqual(s, dup) # Check nested case dup = [] for k, g in groupby(s, lambda r:r[0]): for ik, ig in groupby(g, lambda r:r[2]): for elem in ig: self.assertEqual(k, elem[0]) self.assertEqual(ik, elem[2]) dup.append(elem) self.assertEqual(s, dup) # Check case where inner iterator is not used keys = [k for k, g in groupby(s, lambda r:r[0])] expectedkeys = set([r[0] for r in s]) self.assertEqual(set(keys), expectedkeys) self.assertEqual(len(keys), len(expectedkeys)) # Exercise pipes and filters style s = 'abracadabra' # sort s | uniq r = [k for k, g in groupby(sorted(s))] self.assertEqual(r, ['a', 'b', 'c', 'd', 'r']) # sort s | uniq -d r = [k for k, g in groupby(sorted(s)) if list(islice(g,1,2))] self.assertEqual(r, ['a', 'b', 'r']) # sort s | uniq -c r = [(len(list(g)), k) for k, g in groupby(sorted(s))] self.assertEqual(r, [(5, 'a'), (2, 'b'), (1, 'c'), (1, 'd'), (2, 'r')]) # sort s | uniq -c | sort -rn | head -3 r = sorted([(len(list(g)) , k) for k, g in groupby(sorted(s))], reverse=True)[:3] self.assertEqual(r, [(5, 'a'), (2, 'r'), (2, 'b')]) # iter.__next__ failure class ExpectedError(Exception): pass def delayed_raise(n=0): for i in range(n): yield 'yo' raise ExpectedError def gulp(iterable, keyp=None, func=list): return [func(g) for k, g in groupby(iterable, keyp)] # iter.__next__ failure on outer object self.assertRaises(ExpectedError, gulp, delayed_raise(0)) # iter.__next__ failure on inner object self.assertRaises(ExpectedError, gulp, delayed_raise(1)) # __cmp__ failure class DummyCmp: def __eq__(self, dst): raise ExpectedError s = [DummyCmp(), DummyCmp(), None] # __eq__ failure on outer object self.assertRaises(ExpectedError, gulp, s, func=id) # __eq__ failure on inner object self.assertRaises(ExpectedError, gulp, s) # keyfunc failure def keyfunc(obj): if keyfunc.skip > 0: keyfunc.skip -= 1 return obj else: raise ExpectedError # keyfunc failure on outer object keyfunc.skip = 0 self.assertRaises(ExpectedError, gulp, [None], keyfunc) keyfunc.skip = 1 self.assertRaises(ExpectedError, gulp, [None, None], keyfunc) def test_filter(self): self.assertEqual(list(filter(isEven, range(6))), [0,2,4]) self.assertEqual(list(filter(None, [0,1,0,2,0])), [1,2]) self.assertEqual(list(filter(bool, [0,1,0,2,0])), [1,2]) self.assertEqual(take(4, filter(isEven, count())), [0,2,4,6]) self.assertRaises(TypeError, filter) self.assertRaises(TypeError, filter, lambda x:x) self.assertRaises(TypeError, filter, lambda x:x, range(6), 7) self.assertRaises(TypeError, filter, isEven, 3) self.assertRaises(TypeError, next, filter(range(6), range(6))) def test_filterfalse(self): self.assertEqual(list(filterfalse(isEven, range(6))), [1,3,5]) self.assertEqual(list(filterfalse(None, [0,1,0,2,0])), [0,0,0]) self.assertEqual(list(filterfalse(bool, [0,1,0,2,0])), [0,0,0]) self.assertEqual(take(4, filterfalse(isEven, count())), [1,3,5,7]) self.assertRaises(TypeError, filterfalse) self.assertRaises(TypeError, filterfalse, lambda x:x) self.assertRaises(TypeError, filterfalse, lambda x:x, range(6), 7) self.assertRaises(TypeError, filterfalse, isEven, 3) self.assertRaises(TypeError, next, filterfalse(range(6), range(6))) def test_zip(self): # XXX This is rather silly now that builtin zip() calls zip()... ans = [(x,y) for x, y in zip('abc',count())] self.assertEqual(ans, [('a', 0), ('b', 1), ('c', 2)]) self.assertEqual(list(zip('abc', range(6))), lzip('abc', range(6))) self.assertEqual(list(zip('abcdef', range(3))), lzip('abcdef', range(3))) self.assertEqual(take(3,zip('abcdef', count())), lzip('abcdef', range(3))) self.assertEqual(list(zip('abcdef')), lzip('abcdef')) self.assertEqual(list(zip()), lzip()) self.assertRaises(TypeError, zip, 3) self.assertRaises(TypeError, zip, range(3), 3) # Check tuple re-use (implementation detail) self.assertEqual([tuple(list(pair)) for pair in zip('abc', 'def')], lzip('abc', 'def')) self.assertEqual([pair for pair in zip('abc', 'def')], lzip('abc', 'def')) ids = list(map(id, zip('abc', 'def'))) self.assertEqual(min(ids), max(ids)) ids = list(map(id, list(zip('abc', 'def')))) self.assertEqual(len(dict.fromkeys(ids)), len(ids)) def test_ziplongest(self): for args in [ ['abc', range(6)], [range(6), 'abc'], [range(1000), range(2000,2100), range(3000,3050)], [range(1000), range(0), range(3000,3050), range(1200), range(1500)], [range(1000), range(0), range(3000,3050), range(1200), range(1500), range(0)], ]: target = [tuple([arg[i] if i < len(arg) else None for arg in args]) for i in range(max(map(len, args)))] self.assertEqual(list(zip_longest(*args)), target) self.assertEqual(list(zip_longest(*args, **{})), target) target = [tuple((e is None and 'X' or e) for e in t) for t in target] # Replace None fills with 'X' self.assertEqual(list(zip_longest(*args, **dict(fillvalue='X'))), target) self.assertEqual(take(3,zip_longest('abcdef', count())), list(zip('abcdef', range(3)))) # take 3 from infinite input self.assertEqual(list(zip_longest()), list(zip())) self.assertEqual(list(zip_longest([])), list(zip([]))) self.assertEqual(list(zip_longest('abcdef')), list(zip('abcdef'))) self.assertEqual(list(zip_longest('abc', 'defg', **{})), list(zip(list('abc')+[None], 'defg'))) # empty keyword dict self.assertRaises(TypeError, zip_longest, 3) self.assertRaises(TypeError, zip_longest, range(3), 3) for stmt in [ "zip_longest('abc', fv=1)", "zip_longest('abc', fillvalue=1, bogus_keyword=None)", ]: try: eval(stmt, globals(), locals()) except TypeError: pass else: self.fail('Did not raise Type in: ' + stmt) # Check tuple re-use (implementation detail) self.assertEqual([tuple(list(pair)) for pair in zip_longest('abc', 'def')], list(zip('abc', 'def'))) self.assertEqual([pair for pair in zip_longest('abc', 'def')], list(zip('abc', 'def'))) ids = list(map(id, zip_longest('abc', 'def'))) self.assertEqual(min(ids), max(ids)) ids = list(map(id, list(zip_longest('abc', 'def')))) self.assertEqual(len(dict.fromkeys(ids)), len(ids)) def test_bug_7244(self): class Repeater: # this class is similar to itertools.repeat def __init__(self, o, t, e): self.o = o self.t = int(t) self.e = e def __iter__(self): # its iterator is itself return self def __next__(self): if self.t > 0: self.t -= 1 return self.o else: raise self.e # Formerly this code in would fail in debug mode # with Undetected Error and Stop Iteration r1 = Repeater(1, 3, StopIteration) r2 = Repeater(2, 4, StopIteration) def run(r1, r2): result = [] for i, j in zip_longest(r1, r2, fillvalue=0): with support.captured_output('stdout'): print((i, j)) result.append((i, j)) return result self.assertEqual(run(r1, r2), [(1,2), (1,2), (1,2), (0,2)]) # Formerly, the RuntimeError would be lost # and StopIteration would stop as expected r1 = Repeater(1, 3, RuntimeError) r2 = Repeater(2, 4, StopIteration) it = zip_longest(r1, r2, fillvalue=0) self.assertEqual(next(it), (1, 2)) self.assertEqual(next(it), (1, 2)) self.assertEqual(next(it), (1, 2)) self.assertRaises(RuntimeError, next, it) def test_product(self): for args, result in [ ([], [()]), # zero iterables (['ab'], [('a',), ('b',)]), # one iterable ([range(2), range(3)], [(0,0), (0,1), (0,2), (1,0), (1,1), (1,2)]), # two iterables ([range(0), range(2), range(3)], []), # first iterable with zero length ([range(2), range(0), range(3)], []), # middle iterable with zero length ([range(2), range(3), range(0)], []), # last iterable with zero length ]: self.assertEqual(list(product(*args)), result) for r in range(4): self.assertEqual(list(product(*(args*r))), list(product(*args, **dict(repeat=r)))) self.assertEqual(len(list(product(*[range(7)]*6))), 7**6) self.assertRaises(TypeError, product, range(6), None) def product1(*args, **kwds): pools = list(map(tuple, args)) * kwds.get('repeat', 1) n = len(pools) if n == 0: yield () return if any(len(pool) == 0 for pool in pools): return indices = [0] * n yield tuple(pool[i] for pool, i in zip(pools, indices)) while 1: for i in reversed(range(n)): # right to left if indices[i] == len(pools[i]) - 1: continue indices[i] += 1 for j in range(i+1, n): indices[j] = 0 yield tuple(pool[i] for pool, i in zip(pools, indices)) break else: return def product2(*args, **kwds): 'Pure python version used in docs' pools = list(map(tuple, args)) * kwds.get('repeat', 1) result = [[]] for pool in pools: result = [x+[y] for x in result for y in pool] for prod in result: yield tuple(prod) argtypes = ['', 'abc', '', range(0), range(4), dict(a=1, b=2, c=3), set('abcdefg'), range(11), tuple(range(13))] for i in range(100): args = [random.choice(argtypes) for j in range(random.randrange(5))] expected_len = prod(map(len, args)) self.assertEqual(len(list(product(*args))), expected_len) self.assertEqual(list(product(*args)), list(product1(*args))) self.assertEqual(list(product(*args)), list(product2(*args))) args = map(iter, args) self.assertEqual(len(list(product(*args))), expected_len) # Test implementation detail: tuple re-use self.assertEqual(len(set(map(id, product('abc', 'def')))), 1) self.assertNotEqual(len(set(map(id, list(product('abc', 'def'))))), 1) def test_repeat(self): self.assertEqual(list(repeat(object='a', times=3)), ['a', 'a', 'a']) self.assertEqual(lzip(range(3),repeat('a')), [(0, 'a'), (1, 'a'), (2, 'a')]) self.assertEqual(list(repeat('a', 3)), ['a', 'a', 'a']) self.assertEqual(take(3, repeat('a')), ['a', 'a', 'a']) self.assertEqual(list(repeat('a', 0)), []) self.assertEqual(list(repeat('a', -3)), []) self.assertRaises(TypeError, repeat) self.assertRaises(TypeError, repeat, None, 3, 4) self.assertRaises(TypeError, repeat, None, 'a') r = repeat(1+0j) self.assertEqual(repr(r), 'repeat((1+0j))') r = repeat(1+0j, 5) self.assertEqual(repr(r), 'repeat((1+0j), 5)') list(r) self.assertEqual(repr(r), 'repeat((1+0j), 0)') def test_map(self): self.assertEqual(list(map(operator.pow, range(3), range(1,7))), [0**1, 1**2, 2**3]) def tupleize(*args): return args self.assertEqual(list(map(tupleize, 'abc', range(5))), [('a',0),('b',1),('c',2)]) self.assertEqual(list(map(tupleize, 'abc', count())), [('a',0),('b',1),('c',2)]) self.assertEqual(take(2,map(tupleize, 'abc', count())), [('a',0),('b',1)]) self.assertEqual(list(map(operator.pow, [])), []) self.assertRaises(TypeError, map) self.assertRaises(TypeError, list, map(None, range(3), range(3))) self.assertRaises(TypeError, map, operator.neg) self.assertRaises(TypeError, next, map(10, range(5))) self.assertRaises(ValueError, next, map(errfunc, [4], [5])) self.assertRaises(TypeError, next, map(onearg, [4], [5])) def test_starmap(self): self.assertEqual(list(starmap(operator.pow, zip(range(3), range(1,7)))), [0**1, 1**2, 2**3]) self.assertEqual(take(3, starmap(operator.pow, zip(count(), count(1)))), [0**1, 1**2, 2**3]) self.assertEqual(list(starmap(operator.pow, [])), []) self.assertEqual(list(starmap(operator.pow, [iter([4,5])])), [4**5]) self.assertRaises(TypeError, list, starmap(operator.pow, [None])) self.assertRaises(TypeError, starmap) self.assertRaises(TypeError, starmap, operator.pow, [(4,5)], 'extra') self.assertRaises(TypeError, next, starmap(10, [(4,5)])) self.assertRaises(ValueError, next, starmap(errfunc, [(4,5)])) self.assertRaises(TypeError, next, starmap(onearg, [(4,5)])) def test_islice(self): for args in [ # islice(args) should agree with range(args) (10, 20, 3), (10, 3, 20), (10, 20), (10, 3), (20,) ]: self.assertEqual(list(islice(range(100), *args)), list(range(*args))) for args, tgtargs in [ # Stop when seqn is exhausted ((10, 110, 3), ((10, 100, 3))), ((10, 110), ((10, 100))), ((110,), (100,)) ]: self.assertEqual(list(islice(range(100), *args)), list(range(*tgtargs))) # Test stop=None self.assertEqual(list(islice(range(10), None)), list(range(10))) self.assertEqual(list(islice(range(10), None, None)), list(range(10))) self.assertEqual(list(islice(range(10), None, None, None)), list(range(10))) self.assertEqual(list(islice(range(10), 2, None)), list(range(2, 10))) self.assertEqual(list(islice(range(10), 1, None, 2)), list(range(1, 10, 2))) # Test number of items consumed SF #1171417 it = iter(range(10)) self.assertEqual(list(islice(it, 3)), list(range(3))) self.assertEqual(list(it), list(range(3, 10))) # Test invalid arguments self.assertRaises(TypeError, islice, range(10)) self.assertRaises(TypeError, islice, range(10), 1, 2, 3, 4) self.assertRaises(ValueError, islice, range(10), -5, 10, 1) self.assertRaises(ValueError, islice, range(10), 1, -5, -1) self.assertRaises(ValueError, islice, range(10), 1, 10, -1) self.assertRaises(ValueError, islice, range(10), 1, 10, 0) self.assertRaises(ValueError, islice, range(10), 'a') self.assertRaises(ValueError, islice, range(10), 'a', 1) self.assertRaises(ValueError, islice, range(10), 1, 'a') self.assertRaises(ValueError, islice, range(10), 'a', 1, 1) self.assertRaises(ValueError, islice, range(10), 1, 'a', 1) self.assertEqual(len(list(islice(count(), 1, 10, maxsize))), 1) # Issue #10323: Less islice in a predictable state c = count() self.assertEqual(list(islice(c, 1, 3, 50)), [1]) self.assertEqual(next(c), 3) def test_takewhile(self): data = [1, 3, 5, 20, 2, 4, 6, 8] underten = lambda x: x<10 self.assertEqual(list(takewhile(underten, data)), [1, 3, 5]) self.assertEqual(list(takewhile(underten, [])), []) self.assertRaises(TypeError, takewhile) self.assertRaises(TypeError, takewhile, operator.pow) self.assertRaises(TypeError, takewhile, operator.pow, [(4,5)], 'extra') self.assertRaises(TypeError, next, takewhile(10, [(4,5)])) self.assertRaises(ValueError, next, takewhile(errfunc, [(4,5)])) t = takewhile(bool, [1, 1, 1, 0, 0, 0]) self.assertEqual(list(t), [1, 1, 1]) self.assertRaises(StopIteration, next, t) def test_dropwhile(self): data = [1, 3, 5, 20, 2, 4, 6, 8] underten = lambda x: x<10 self.assertEqual(list(dropwhile(underten, data)), [20, 2, 4, 6, 8]) self.assertEqual(list(dropwhile(underten, [])), []) self.assertRaises(TypeError, dropwhile) self.assertRaises(TypeError, dropwhile, operator.pow) self.assertRaises(TypeError, dropwhile, operator.pow, [(4,5)], 'extra') self.assertRaises(TypeError, next, dropwhile(10, [(4,5)])) self.assertRaises(ValueError, next, dropwhile(errfunc, [(4,5)])) def test_tee(self): n = 200 def irange(n): for i in range(n): yield i a, b = tee([]) # test empty iterator self.assertEqual(list(a), []) self.assertEqual(list(b), []) a, b = tee(irange(n)) # test 100% interleaved self.assertEqual(lzip(a,b), lzip(range(n), range(n))) a, b = tee(irange(n)) # test 0% interleaved self.assertEqual(list(a), list(range(n))) self.assertEqual(list(b), list(range(n))) a, b = tee(irange(n)) # test dealloc of leading iterator for i in range(100): self.assertEqual(next(a), i) del a self.assertEqual(list(b), list(range(n))) a, b = tee(irange(n)) # test dealloc of trailing iterator for i in range(100): self.assertEqual(next(a), i) del b self.assertEqual(list(a), list(range(100, n))) for j in range(5): # test randomly interleaved order = [0]*n + [1]*n random.shuffle(order) lists = ([], []) its = tee(irange(n)) for i in order: value = next(its[i]) lists[i].append(value) self.assertEqual(lists[0], list(range(n))) self.assertEqual(lists[1], list(range(n))) # test argument format checking self.assertRaises(TypeError, tee) self.assertRaises(TypeError, tee, 3) self.assertRaises(TypeError, tee, [1,2], 'x') self.assertRaises(TypeError, tee, [1,2], 3, 'x') # tee object should be instantiable a, b = tee('abc') c = type(a)('def') self.assertEqual(list(c), list('def')) # test long-lagged and multi-way split a, b, c = tee(range(2000), 3) for i in range(100): self.assertEqual(next(a), i) self.assertEqual(list(b), list(range(2000))) self.assertEqual([next(c), next(c)], list(range(2))) self.assertEqual(list(a), list(range(100,2000))) self.assertEqual(list(c), list(range(2,2000))) # test values of n self.assertRaises(TypeError, tee, 'abc', 'invalid') self.assertRaises(ValueError, tee, [], -1) for n in range(5): result = tee('abc', n) self.assertEqual(type(result), tuple) self.assertEqual(len(result), n) self.assertEqual([list(x) for x in result], [list('abc')]*n) # tee pass-through to copyable iterator a, b = tee('abc') c, d = tee(a) self.assertTrue(a is c) # test tee_new t1, t2 = tee('abc') tnew = type(t1) self.assertRaises(TypeError, tnew) self.assertRaises(TypeError, tnew, 10) t3 = tnew(t1) self.assertTrue(list(t1) == list(t2) == list(t3) == list('abc')) # test that tee objects are weak referencable a, b = tee(range(10)) p = proxy(a) self.assertEqual(getattr(p, '__class__'), type(b)) del a self.assertRaises(ReferenceError, getattr, p, '__class__') # Issue 13454: Crash when deleting backward iterator from tee() def test_tee_del_backward(self): forward, backward = tee(repeat(None, 20000000)) any(forward) # exhaust the iterator del backward def test_StopIteration(self): self.assertRaises(StopIteration, next, zip()) for f in (chain, cycle, zip, groupby): self.assertRaises(StopIteration, next, f([])) self.assertRaises(StopIteration, next, f(StopNow())) self.assertRaises(StopIteration, next, islice([], None)) self.assertRaises(StopIteration, next, islice(StopNow(), None)) p, q = tee([]) self.assertRaises(StopIteration, next, p) self.assertRaises(StopIteration, next, q) p, q = tee(StopNow()) self.assertRaises(StopIteration, next, p) self.assertRaises(StopIteration, next, q) self.assertRaises(StopIteration, next, repeat(None, 0)) for f in (filter, filterfalse, map, takewhile, dropwhile, starmap): self.assertRaises(StopIteration, next, f(lambda x:x, [])) self.assertRaises(StopIteration, next, f(lambda x:x, StopNow())) class TestExamples(unittest.TestCase): def test_accumlate(self): self.assertEqual(list(accumulate([1,2,3,4,5])), [1, 3, 6, 10, 15]) def test_chain(self): self.assertEqual(''.join(chain('ABC', 'DEF')), 'ABCDEF') def test_chain_from_iterable(self): self.assertEqual(''.join(chain.from_iterable(['ABC', 'DEF'])), 'ABCDEF') def test_combinations(self): self.assertEqual(list(combinations('ABCD', 2)), [('A','B'), ('A','C'), ('A','D'), ('B','C'), ('B','D'), ('C','D')]) self.assertEqual(list(combinations(range(4), 3)), [(0,1,2), (0,1,3), (0,2,3), (1,2,3)]) def test_combinations_with_replacement(self): self.assertEqual(list(combinations_with_replacement('ABC', 2)), [('A','A'), ('A','B'), ('A','C'), ('B','B'), ('B','C'), ('C','C')]) def test_compress(self): self.assertEqual(list(compress('ABCDEF', [1,0,1,0,1,1])), list('ACEF')) def test_count(self): self.assertEqual(list(islice(count(10), 5)), [10, 11, 12, 13, 14]) def test_cycle(self): self.assertEqual(list(islice(cycle('ABCD'), 12)), list('ABCDABCDABCD')) def test_dropwhile(self): self.assertEqual(list(dropwhile(lambda x: x<5, [1,4,6,4,1])), [6,4,1]) def test_groupby(self): self.assertEqual([k for k, g in groupby('AAAABBBCCDAABBB')], list('ABCDAB')) self.assertEqual([(list(g)) for k, g in groupby('AAAABBBCCD')], [list('AAAA'), list('BBB'), list('CC'), list('D')]) def test_filter(self): self.assertEqual(list(filter(lambda x: x%2, range(10))), [1,3,5,7,9]) def test_filterfalse(self): self.assertEqual(list(filterfalse(lambda x: x%2, range(10))), [0,2,4,6,8]) def test_map(self): self.assertEqual(list(map(pow, (2,3,10), (5,2,3))), [32, 9, 1000]) def test_islice(self): self.assertEqual(list(islice('ABCDEFG', 2)), list('AB')) self.assertEqual(list(islice('ABCDEFG', 2, 4)), list('CD')) self.assertEqual(list(islice('ABCDEFG', 2, None)), list('CDEFG')) self.assertEqual(list(islice('ABCDEFG', 0, None, 2)), list('ACEG')) def test_zip(self): self.assertEqual(list(zip('ABCD', 'xy')), [('A', 'x'), ('B', 'y')]) def test_zip_longest(self): self.assertEqual(list(zip_longest('ABCD', 'xy', fillvalue='-')), [('A', 'x'), ('B', 'y'), ('C', '-'), ('D', '-')]) def test_permutations(self): self.assertEqual(list(permutations('ABCD', 2)), list(map(tuple, 'AB AC AD BA BC BD CA CB CD DA DB DC'.split()))) self.assertEqual(list(permutations(range(3))), [(0,1,2), (0,2,1), (1,0,2), (1,2,0), (2,0,1), (2,1,0)]) def test_product(self): self.assertEqual(list(product('ABCD', 'xy')), list(map(tuple, 'Ax Ay Bx By Cx Cy Dx Dy'.split()))) self.assertEqual(list(product(range(2), repeat=3)), [(0,0,0), (0,0,1), (0,1,0), (0,1,1), (1,0,0), (1,0,1), (1,1,0), (1,1,1)]) def test_repeat(self): self.assertEqual(list(repeat(10, 3)), [10, 10, 10]) def test_stapmap(self): self.assertEqual(list(starmap(pow, [(2,5), (3,2), (10,3)])), [32, 9, 1000]) def test_takewhile(self): self.assertEqual(list(takewhile(lambda x: x<5, [1,4,6,4,1])), [1,4]) class TestGC(unittest.TestCase): def makecycle(self, iterator, container): container.append(iterator) next(iterator) del container, iterator def test_accumulate(self): a = [] self.makecycle(accumulate([1,2,a,3]), a) def test_chain(self): a = [] self.makecycle(chain(a), a) def test_chain_from_iterable(self): a = [] self.makecycle(chain.from_iterable([a]), a) def test_combinations(self): a = [] self.makecycle(combinations([1,2,a,3], 3), a) def test_combinations_with_replacement(self): a = [] self.makecycle(combinations_with_replacement([1,2,a,3], 3), a) def test_compress(self): a = [] self.makecycle(compress('ABCDEF', [1,0,1,0,1,0]), a) def test_count(self): a = [] Int = type('Int', (int,), dict(x=a)) self.makecycle(count(Int(0), Int(1)), a) def test_cycle(self): a = [] self.makecycle(cycle([a]*2), a) def test_dropwhile(self): a = [] self.makecycle(dropwhile(bool, [0, a, a]), a) def test_groupby(self): a = [] self.makecycle(groupby([a]*2, lambda x:x), a) def test_issue2246(self): # Issue 2246 -- the _grouper iterator was not included in GC n = 10 keyfunc = lambda x: x for i, j in groupby(range(n), key=keyfunc): keyfunc.__dict__.setdefault('x',[]).append(j) def test_filter(self): a = [] self.makecycle(filter(lambda x:True, [a]*2), a) def test_filterfalse(self): a = [] self.makecycle(filterfalse(lambda x:False, a), a) def test_zip(self): a = [] self.makecycle(zip([a]*2, [a]*3), a) def test_zip_longest(self): a = [] self.makecycle(zip_longest([a]*2, [a]*3), a) b = [a, None] self.makecycle(zip_longest([a]*2, [a]*3, fillvalue=b), a) def test_map(self): a = [] self.makecycle(map(lambda x:x, [a]*2), a) def test_islice(self): a = [] self.makecycle(islice([a]*2, None), a) def test_permutations(self): a = [] self.makecycle(permutations([1,2,a,3], 3), a) def test_product(self): a = [] self.makecycle(product([1,2,a,3], repeat=3), a) def test_repeat(self): a = [] self.makecycle(repeat(a), a) def test_starmap(self): a = [] self.makecycle(starmap(lambda *t: t, [(a,a)]*2), a) def test_takewhile(self): a = [] self.makecycle(takewhile(bool, [1, 0, a, a]), a) def R(seqn): 'Regular generator' for i in seqn: yield i class G: 'Sequence using __getitem__' def __init__(self, seqn): self.seqn = seqn def __getitem__(self, i): return self.seqn[i] class I: 'Sequence using iterator protocol' def __init__(self, seqn): self.seqn = seqn self.i = 0 def __iter__(self): return self def __next__(self): if self.i >= len(self.seqn): raise StopIteration v = self.seqn[self.i] self.i += 1 return v class Ig: 'Sequence using iterator protocol defined with a generator' def __init__(self, seqn): self.seqn = seqn self.i = 0 def __iter__(self): for val in self.seqn: yield val class X: 'Missing __getitem__ and __iter__' def __init__(self, seqn): self.seqn = seqn self.i = 0 def __next__(self): if self.i >= len(self.seqn): raise StopIteration v = self.seqn[self.i] self.i += 1 return v class N: 'Iterator missing __next__()' def __init__(self, seqn): self.seqn = seqn self.i = 0 def __iter__(self): return self class E: 'Test propagation of exceptions' def __init__(self, seqn): self.seqn = seqn self.i = 0 def __iter__(self): return self def __next__(self): 3 // 0 class S: 'Test immediate stop' def __init__(self, seqn): pass def __iter__(self): return self def __next__(self): raise StopIteration def L(seqn): 'Test multiple tiers of iterators' return chain(map(lambda x:x, R(Ig(G(seqn))))) class TestVariousIteratorArgs(unittest.TestCase): def test_accumulate(self): s = [1,2,3,4,5] r = [1,3,6,10,15] n = len(s) for g in (G, I, Ig, L, R): self.assertEqual(list(accumulate(g(s))), r) self.assertEqual(list(accumulate(S(s))), []) self.assertRaises(TypeError, accumulate, X(s)) self.assertRaises(TypeError, accumulate, N(s)) self.assertRaises(ZeroDivisionError, list, accumulate(E(s))) def test_chain(self): for s in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5)): for g in (G, I, Ig, S, L, R): self.assertEqual(list(chain(g(s))), list(g(s))) self.assertEqual(list(chain(g(s), g(s))), list(g(s))+list(g(s))) self.assertRaises(TypeError, list, chain(X(s))) self.assertRaises(TypeError, list, chain(N(s))) self.assertRaises(ZeroDivisionError, list, chain(E(s))) def test_compress(self): for s in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5)): n = len(s) for g in (G, I, Ig, S, L, R): self.assertEqual(list(compress(g(s), repeat(1))), list(g(s))) self.assertRaises(TypeError, compress, X(s), repeat(1)) self.assertRaises(TypeError, compress, N(s), repeat(1)) self.assertRaises(ZeroDivisionError, list, compress(E(s), repeat(1))) def test_product(self): for s in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5)): self.assertRaises(TypeError, product, X(s)) self.assertRaises(TypeError, product, N(s)) self.assertRaises(ZeroDivisionError, product, E(s)) def test_cycle(self): for s in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5)): for g in (G, I, Ig, S, L, R): tgtlen = len(s) * 3 expected = list(g(s))*3 actual = list(islice(cycle(g(s)), tgtlen)) self.assertEqual(actual, expected) self.assertRaises(TypeError, cycle, X(s)) self.assertRaises(TypeError, cycle, N(s)) self.assertRaises(ZeroDivisionError, list, cycle(E(s))) def test_groupby(self): for s in (range(10), range(0), range(1000), (7,11), range(2000,2200,5)): for g in (G, I, Ig, S, L, R): self.assertEqual([k for k, sb in groupby(g(s))], list(g(s))) self.assertRaises(TypeError, groupby, X(s)) self.assertRaises(TypeError, groupby, N(s)) self.assertRaises(ZeroDivisionError, list, groupby(E(s))) def test_filter(self): for s in (range(10), range(0), range(1000), (7,11), range(2000,2200,5)): for g in (G, I, Ig, S, L, R): self.assertEqual(list(filter(isEven, g(s))), [x for x in g(s) if isEven(x)]) self.assertRaises(TypeError, filter, isEven, X(s)) self.assertRaises(TypeError, filter, isEven, N(s)) self.assertRaises(ZeroDivisionError, list, filter(isEven, E(s))) def test_filterfalse(self): for s in (range(10), range(0), range(1000), (7,11), range(2000,2200,5)): for g in (G, I, Ig, S, L, R): self.assertEqual(list(filterfalse(isEven, g(s))), [x for x in g(s) if isOdd(x)]) self.assertRaises(TypeError, filterfalse, isEven, X(s)) self.assertRaises(TypeError, filterfalse, isEven, N(s)) self.assertRaises(ZeroDivisionError, list, filterfalse(isEven, E(s))) def test_zip(self): for s in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5)): for g in (G, I, Ig, S, L, R): self.assertEqual(list(zip(g(s))), lzip(g(s))) self.assertEqual(list(zip(g(s), g(s))), lzip(g(s), g(s))) self.assertRaises(TypeError, zip, X(s)) self.assertRaises(TypeError, zip, N(s)) self.assertRaises(ZeroDivisionError, list, zip(E(s))) def test_ziplongest(self): for s in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5)): for g in (G, I, Ig, S, L, R): self.assertEqual(list(zip_longest(g(s))), list(zip(g(s)))) self.assertEqual(list(zip_longest(g(s), g(s))), list(zip(g(s), g(s)))) self.assertRaises(TypeError, zip_longest, X(s)) self.assertRaises(TypeError, zip_longest, N(s)) self.assertRaises(ZeroDivisionError, list, zip_longest(E(s))) def test_map(self): for s in (range(10), range(0), range(100), (7,11), range(20,50,5)): for g in (G, I, Ig, S, L, R): self.assertEqual(list(map(onearg, g(s))), [onearg(x) for x in g(s)]) self.assertEqual(list(map(operator.pow, g(s), g(s))), [x**x for x in g(s)]) self.assertRaises(TypeError, map, onearg, X(s)) self.assertRaises(TypeError, map, onearg, N(s)) self.assertRaises(ZeroDivisionError, list, map(onearg, E(s))) def test_islice(self): for s in ("12345", "", range(1000), ('do', 1.2), range(2000,2200,5)): for g in (G, I, Ig, S, L, R): self.assertEqual(list(islice(g(s),1,None,2)), list(g(s))[1::2]) self.assertRaises(TypeError, islice, X(s), 10) self.assertRaises(TypeError, islice, N(s), 10) self.assertRaises(ZeroDivisionError, list, islice(E(s), 10)) def test_starmap(self): for s in (range(10), range(0), range(100), (7,11), range(20,50,5)): for g in (G, I, Ig, S, L, R): ss = lzip(s, s) self.assertEqual(list(starmap(operator.pow, g(ss))), [x**x for x in g(s)]) self.assertRaises(TypeError, starmap, operator.pow, X(ss)) self.assertRaises(TypeError, starmap, operator.pow, N(ss)) self.assertRaises(ZeroDivisionError, list, starmap(operator.pow, E(ss))) def test_takewhile(self): for s in (range(10), range(0), range(1000), (7,11), range(2000,2200,5)): for g in (G, I, Ig, S, L, R): tgt = [] for elem in g(s): if not isEven(elem): break tgt.append(elem) self.assertEqual(list(takewhile(isEven, g(s))), tgt) self.assertRaises(TypeError, takewhile, isEven, X(s)) self.assertRaises(TypeError, takewhile, isEven, N(s)) self.assertRaises(ZeroDivisionError, list, takewhile(isEven, E(s))) def test_dropwhile(self): for s in (range(10), range(0), range(1000), (7,11), range(2000,2200,5)): for g in (G, I, Ig, S, L, R): tgt = [] for elem in g(s): if not tgt and isOdd(elem): continue tgt.append(elem) self.assertEqual(list(dropwhile(isOdd, g(s))), tgt) self.assertRaises(TypeError, dropwhile, isOdd, X(s)) self.assertRaises(TypeError, dropwhile, isOdd, N(s)) self.assertRaises(ZeroDivisionError, list, dropwhile(isOdd, E(s))) def test_tee(self): for s in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5)): for g in (G, I, Ig, S, L, R): it1, it2 = tee(g(s)) self.assertEqual(list(it1), list(g(s))) self.assertEqual(list(it2), list(g(s))) self.assertRaises(TypeError, tee, X(s)) self.assertRaises(TypeError, tee, N(s)) self.assertRaises(ZeroDivisionError, list, tee(E(s))[0]) class LengthTransparency(unittest.TestCase): def test_repeat(self): from test.test_iterlen import len self.assertEqual(len(repeat(None, 50)), 50) self.assertRaises(TypeError, len, repeat(None)) class RegressionTests(unittest.TestCase): def test_sf_793826(self): # Fix Armin Rigo's successful efforts to wreak havoc def mutatingtuple(tuple1, f, tuple2): # this builds a tuple t which is a copy of tuple1, # then calls f(t), then mutates t to be equal to tuple2 # (needs len(tuple1) == len(tuple2)). def g(value, first=[1]): if first: del first[:] f(next(z)) return value items = list(tuple2) items[1:1] = list(tuple1) gen = map(g, items) z = zip(*[gen]*len(tuple1)) next(z) def f(t): global T T = t first[:] = list(T) first = [] mutatingtuple((1,2,3), f, (4,5,6)) second = list(T) self.assertEqual(first, second) def test_sf_950057(self): # Make sure that chain() and cycle() catch exceptions immediately # rather than when shifting between input sources def gen1(): hist.append(0) yield 1 hist.append(1) raise AssertionError hist.append(2) def gen2(x): hist.append(3) yield 2 hist.append(4) if x: raise StopIteration hist = [] self.assertRaises(AssertionError, list, chain(gen1(), gen2(False))) self.assertEqual(hist, [0,1]) hist = [] self.assertRaises(AssertionError, list, chain(gen1(), gen2(True))) self.assertEqual(hist, [0,1]) hist = [] self.assertRaises(AssertionError, list, cycle(gen1())) self.assertEqual(hist, [0,1]) class SubclassWithKwargsTest(unittest.TestCase): def test_keywords_in_subclass(self): # count is not subclassable... for cls in (repeat, zip, filter, filterfalse, chain, map, starmap, islice, takewhile, dropwhile, cycle, compress): class Subclass(cls): def __init__(self, newarg=None, *args): cls.__init__(self, *args) try: Subclass(newarg=1) except TypeError as err: # we expect type errors because of wrong argument count self.assertNotIn("does not take keyword arguments", err.args[0]) libreftest = """ Doctest for examples in the library reference: libitertools.tex >>> amounts = [120.15, 764.05, 823.14] >>> for checknum, amount in zip(count(1200), amounts): ... print('Check %d is for $%.2f' % (checknum, amount)) ... Check 1200 is for $120.15 Check 1201 is for $764.05 Check 1202 is for $823.14 >>> import operator >>> for cube in map(operator.pow, range(1,4), repeat(3)): ... print(cube) ... 1 8 27 >>> reportlines = ['EuroPython', 'Roster', '', 'alex', '', 'laura', '', 'martin', '', 'walter', '', 'samuele'] >>> for name in islice(reportlines, 3, None, 2): ... print(name.title()) ... Alex Laura Martin Walter Samuele >>> from operator import itemgetter >>> d = dict(a=1, b=2, c=1, d=2, e=1, f=2, g=3) >>> di = sorted(sorted(d.items()), key=itemgetter(1)) >>> for k, g in groupby(di, itemgetter(1)): ... print(k, list(map(itemgetter(0), g))) ... 1 ['a', 'c', 'e'] 2 ['b', 'd', 'f'] 3 ['g'] # Find runs of consecutive numbers using groupby. The key to the solution # is differencing with a range so that consecutive numbers all appear in # same group. >>> data = [ 1, 4,5,6, 10, 15,16,17,18, 22, 25,26,27,28] >>> for k, g in groupby(enumerate(data), lambda t:t[0]-t[1]): ... print(list(map(operator.itemgetter(1), g))) ... [1] [4, 5, 6] [10] [15, 16, 17, 18] [22] [25, 26, 27, 28] >>> def take(n, iterable): ... "Return first n items of the iterable as a list" ... return list(islice(iterable, n)) >>> def enumerate(iterable, start=0): ... return zip(count(start), iterable) >>> def tabulate(function, start=0): ... "Return function(0), function(1), ..." ... return map(function, count(start)) >>> def nth(iterable, n, default=None): ... "Returns the nth item or a default value" ... return next(islice(iterable, n, None), default) >>> def quantify(iterable, pred=bool): ... "Count how many times the predicate is true" ... return sum(map(pred, iterable)) >>> def padnone(iterable): ... "Returns the sequence elements and then returns None indefinitely" ... return chain(iterable, repeat(None)) >>> def ncycles(iterable, n): ... "Returns the sequence elements n times" ... return chain(*repeat(iterable, n)) >>> def dotproduct(vec1, vec2): ... return sum(map(operator.mul, vec1, vec2)) >>> def flatten(listOfLists): ... return list(chain.from_iterable(listOfLists)) >>> def repeatfunc(func, times=None, *args): ... "Repeat calls to func with specified arguments." ... " Example: repeatfunc(random.random)" ... if times is None: ... return starmap(func, repeat(args)) ... else: ... return starmap(func, repeat(args, times)) >>> def pairwise(iterable): ... "s -> (s0,s1), (s1,s2), (s2, s3), ..." ... a, b = tee(iterable) ... try: ... next(b) ... except StopIteration: ... pass ... return zip(a, b) >>> def grouper(n, iterable, fillvalue=None): ... "grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx" ... args = [iter(iterable)] * n ... return zip_longest(*args, fillvalue=fillvalue) >>> def roundrobin(*iterables): ... "roundrobin('ABC', 'D', 'EF') --> A D E B F C" ... # Recipe credited to George Sakkis ... pending = len(iterables) ... nexts = cycle(iter(it).__next__ for it in iterables) ... while pending: ... try: ... for next in nexts: ... yield next() ... except StopIteration: ... pending -= 1 ... nexts = cycle(islice(nexts, pending)) >>> def powerset(iterable): ... "powerset([1,2,3]) --> () (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3)" ... s = list(iterable) ... return chain.from_iterable(combinations(s, r) for r in range(len(s)+1)) >>> def unique_everseen(iterable, key=None): ... "List unique elements, preserving order. Remember all elements ever seen." ... # unique_everseen('AAAABBBCCDAABBB') --> A B C D ... # unique_everseen('ABBCcAD', str.lower) --> A B C D ... seen = set() ... seen_add = seen.add ... if key is None: ... for element in iterable: ... if element not in seen: ... seen_add(element) ... yield element ... else: ... for element in iterable: ... k = key(element) ... if k not in seen: ... seen_add(k) ... yield element >>> def unique_justseen(iterable, key=None): ... "List unique elements, preserving order. Remember only the element just seen." ... # unique_justseen('AAAABBBCCDAABBB') --> A B C D A B ... # unique_justseen('ABBCcAD', str.lower) --> A B C A D ... return map(next, map(itemgetter(1), groupby(iterable, key))) This is not part of the examples but it tests to make sure the definitions perform as purported. >>> take(10, count()) [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] >>> list(enumerate('abc')) [(0, 'a'), (1, 'b'), (2, 'c')] >>> list(islice(tabulate(lambda x: 2*x), 4)) [0, 2, 4, 6] >>> nth('abcde', 3) 'd' >>> nth('abcde', 9) is None True >>> quantify(range(99), lambda x: x%2==0) 50 >>> a = [[1, 2, 3], [4, 5, 6]] >>> flatten(a) [1, 2, 3, 4, 5, 6] >>> list(repeatfunc(pow, 5, 2, 3)) [8, 8, 8, 8, 8] >>> import random >>> take(5, map(int, repeatfunc(random.random))) [0, 0, 0, 0, 0] >>> list(pairwise('abcd')) [('a', 'b'), ('b', 'c'), ('c', 'd')] >>> list(pairwise([])) [] >>> list(pairwise('a')) [] >>> list(islice(padnone('abc'), 0, 6)) ['a', 'b', 'c', None, None, None] >>> list(ncycles('abc', 3)) ['a', 'b', 'c', 'a', 'b', 'c', 'a', 'b', 'c'] >>> dotproduct([1,2,3], [4,5,6]) 32 >>> list(grouper(3, 'abcdefg', 'x')) [('a', 'b', 'c'), ('d', 'e', 'f'), ('g', 'x', 'x')] >>> list(roundrobin('abc', 'd', 'ef')) ['a', 'd', 'e', 'b', 'f', 'c'] >>> list(powerset([1,2,3])) [(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)] >>> all(len(list(powerset(range(n)))) == 2**n for n in range(18)) True >>> list(powerset('abcde')) == sorted(sorted(set(powerset('abcde'))), key=len) True >>> list(unique_everseen('AAAABBBCCDAABBB')) ['A', 'B', 'C', 'D'] >>> list(unique_everseen('ABBCcAD', str.lower)) ['A', 'B', 'C', 'D'] >>> list(unique_justseen('AAAABBBCCDAABBB')) ['A', 'B', 'C', 'D', 'A', 'B'] >>> list(unique_justseen('ABBCcAD', str.lower)) ['A', 'B', 'C', 'A', 'D'] """ __test__ = {'libreftest' : libreftest} def test_main(verbose=None): test_classes = (TestBasicOps, TestVariousIteratorArgs, TestGC, RegressionTests, LengthTransparency, SubclassWithKwargsTest, TestExamples) support.run_unittest(*test_classes) # verify reference counting if verbose and hasattr(sys, "gettotalrefcount"): import gc counts = [None] * 5 for i in range(len(counts)): support.run_unittest(*test_classes) gc.collect() counts[i] = sys.gettotalrefcount() print(counts) # doctest the examples in the library reference support.run_doctest(sys.modules[__name__], verbose) if __name__ == "__main__": test_main(verbose=True)
lgpl-3.0
4,241,538,907,568,273,400
39.612602
124
0.533889
false
slozier/ironpython2
Tests/test_bytes.py
2
65335
# Licensed to the .NET Foundation under one or more agreements. # The .NET Foundation licenses this file to you under the Apache 2.0 License. # See the LICENSE file in the project root for more information. import sys import unittest from iptest import IronPythonTestCase, ip_supported_encodings, is_cli, is_mono, is_osx, run_test types = [bytearray, bytes] class IndexableOC: def __init__(self, value): self.value = value def __index__(self): return self.value class Indexable(object): def __init__(self, value): self.value = value def __index__(self): return self.value class BytesTest(IronPythonTestCase): def test_capitalize(self): tests = [(b'foo', b'Foo'), (b' foo', b' foo'), (b'fOO', b'Foo'), (b' fOO BAR', b' foo bar'), (b'fOO BAR', b'Foo bar'), ] for testType in types: for data, result in tests: self.assertEqual(testType(data).capitalize(), result) y = b'' x = y.capitalize() self.assertEqual(id(x), id(y)) y = bytearray(b'') x = y.capitalize() self.assertTrue(id(x) != id(y), "bytearray.capitalize returned self") def test_center(self): for testType in types: self.assertEqual(testType(b'aa').center(4), b' aa ') self.assertEqual(testType(b'aa').center(4, b'*'), b'*aa*') self.assertEqual(testType(b'aa').center(4, '*'), b'*aa*') self.assertEqual(testType(b'aa').center(2), b'aa') self.assertEqual(testType(b'aa').center(2, '*'), b'aa') self.assertEqual(testType(b'aa').center(2, b'*'), b'aa') self.assertRaises(TypeError, testType(b'abc').center, 3, [2, ]) x = b'aa' self.assertEqual(id(x.center(2, '*')), id(x)) self.assertEqual(id(x.center(2, b'*')), id(x)) x = bytearray(b'aa') self.assertTrue(id(x.center(2, '*')) != id(x)) self.assertTrue(id(x.center(2, b'*')) != id(x)) def test_count(self): for testType in types: self.assertEqual(testType(b"adadad").count(b"d"), 3) self.assertEqual(testType(b"adbaddads").count(b"ad"), 3) self.assertEqual(testType(b"adbaddads").count(b"ad", 1, 8), 2) self.assertEqual(testType(b"adbaddads").count(b"ad", -1, -1), 0) self.assertEqual(testType(b"adbaddads").count(b"ad", 0, -1), 3) self.assertEqual(testType(b"adbaddads").count(b"", 0, -1), 9) self.assertEqual(testType(b"adbaddads").count(b"", 27), 0) self.assertRaises(TypeError, testType(b"adbaddads").count, [2,]) self.assertRaises(TypeError, testType(b"adbaddads").count, [2,], 0) self.assertRaises(TypeError, testType(b"adbaddads").count, [2,], 0, 1) def test_decode(self): for testType in types: self.assertEqual(testType(b'\xff\xfea\x00b\x00c\x00').decode('utf-16'), 'abc') def test_endswith(self): for testType in types: self.assertRaises(TypeError, testType(b'abcdef').endswith, ([], )) self.assertRaises(TypeError, testType(b'abcdef').endswith, []) self.assertRaises(TypeError, testType(b'abcdef').endswith, [], 0) self.assertRaises(TypeError, testType(b'abcdef').endswith, [], 0, 1) self.assertEqual(testType(b'abcdef').endswith(b'def'), True) self.assertEqual(testType(b'abcdef').endswith(b'def', -1, -2), False) self.assertEqual(testType(b'abcdef').endswith(b'def', 0, 42), True) self.assertEqual(testType(b'abcdef').endswith(b'def', 0, -7), False) self.assertEqual(testType(b'abcdef').endswith(b'def', 42, -7), False) self.assertEqual(testType(b'abcdef').endswith(b'def', 42), False) self.assertEqual(testType(b'abcdef').endswith(b'bar'), False) self.assertEqual(testType(b'abcdef').endswith((b'def', )), True) self.assertEqual(testType(b'abcdef').endswith((b'baz', )), False) self.assertEqual(testType(b'abcdef').endswith((b'baz', ), 0, 42), False) self.assertEqual(testType(b'abcdef').endswith((b'baz', ), 0, -42), False) for x in (0, 1, 2, 3, -10, -3, -4): self.assertEqual(testType(b"abcdef").endswith(b"def", x), True) self.assertEqual(testType(b"abcdef").endswith(b"de", x, 5), True) self.assertEqual(testType(b"abcdef").endswith(b"de", x, -1), True) self.assertEqual(testType(b"abcdef").endswith((b"def", ), x), True) self.assertEqual(testType(b"abcdef").endswith((b"de", ), x, 5), True) self.assertEqual(testType(b"abcdef").endswith((b"de", ), x, -1), True) for x in (4, 5, 6, 10, -1, -2): self.assertEqual(testType(b"abcdef").endswith((b"def", ), x), False) self.assertEqual(testType(b"abcdef").endswith((b"de", ), x, 5), False) self.assertEqual(testType(b"abcdef").endswith((b"de", ), x, -1), False) def test_expandtabs(self): for testType in types: self.assertTrue(testType(b"\ttext\t").expandtabs(0) == b"text") self.assertTrue(testType(b"\ttext\t").expandtabs(-10) == b"text") self.assertEqual(testType(b"\r\ntext\t").expandtabs(-10), b"\r\ntext") self.assertEqual(len(testType(b"aaa\taaa\taaa").expandtabs()), 19) self.assertEqual(testType(b"aaa\taaa\taaa").expandtabs(), b"aaa aaa aaa") self.assertRaises(OverflowError, bytearray(b'\t\t').expandtabs, sys.maxint) def test_extend(self): b = bytearray(b'abc') b.extend(b'def') self.assertEqual(b, b'abcdef') b.extend(bytearray(b'ghi')) self.assertEqual(b, b'abcdefghi') b = bytearray(b'abc') b.extend([2,3,4]) self.assertEqual(b, b'abc' + b'\x02\x03\x04') b = bytearray(b'abc') b.extend(memoryview(b"def")) self.assertEqual(b, b'abcdef') def test_find(self): for testType in types: self.assertEqual(testType(b"abcdbcda").find(b"cd", 1), 2) self.assertEqual(testType(b"abcdbcda").find(b"cd", 3), 5) self.assertEqual(testType(b"abcdbcda").find(b"cd", 7), -1) self.assertEqual(testType(b'abc').find(b'abc', -1, 1), -1) self.assertEqual(testType(b'abc').find(b'abc', 25), -1) self.assertEqual(testType(b'abc').find(b'add', 0, 3), -1) if testType == bytes: self.assertEqual(testType(b'abc').find(b'add', 0, None), -1) self.assertEqual(testType(b'abc').find(b'add', None, None), -1) self.assertEqual(testType(b'abc').find(b'', None, 0), 0) self.assertEqual(testType(b'x').find(b'x', None, 0), -1) self.assertEqual(testType(b'abc').find(b'', 0, 0), 0) self.assertEqual(testType(b'abc').find(b'', 0, 1), 0) self.assertEqual(testType(b'abc').find(b'', 0, 2), 0) self.assertEqual(testType(b'abc').find(b'', 0, 3), 0) self.assertEqual(testType(b'abc').find(b'', 0, 4), 0) self.assertEqual(testType(b'').find(b'', 0, 4), 0) self.assertEqual(testType(b'x').find(b'x', 0, 0), -1) self.assertEqual(testType(b'x').find(b'x', 3, 0), -1) self.assertEqual(testType(b'x').find(b'', 3, 0), -1) self.assertRaises(TypeError, testType(b'x').find, [1]) self.assertRaises(TypeError, testType(b'x').find, [1], 0) self.assertRaises(TypeError, testType(b'x').find, [1], 0, 1) def test_fromhex(self): for testType in types: if testType != str: self.assertRaises(ValueError, testType.fromhex, u'0') self.assertRaises(ValueError, testType.fromhex, u'A') self.assertRaises(ValueError, testType.fromhex, u'a') self.assertRaises(ValueError, testType.fromhex, u'aG') self.assertRaises(ValueError, testType.fromhex, u'Ga') self.assertEqual(testType.fromhex(u'00'), b'\x00') self.assertEqual(testType.fromhex(u'00 '), b'\x00') self.assertEqual(testType.fromhex(u'00 '), b'\x00') self.assertEqual(testType.fromhex(u'00 01'), b'\x00\x01') self.assertEqual(testType.fromhex(u'00 01 0a'), b'\x00\x01\x0a') self.assertEqual(testType.fromhex(u'00 01 0a 0B'), b'\x00\x01\x0a\x0B') self.assertEqual(testType.fromhex(u'00 a1 Aa 0B'), b'\x00\xA1\xAa\x0B') def test_index(self): for testType in types: self.assertRaises(TypeError, testType(b'abc').index, 257) self.assertEqual(testType(b'abc').index(b'a'), 0) self.assertEqual(testType(b'abc').index(b'a', 0, -1), 0) self.assertRaises(ValueError, testType(b'abc').index, b'c', 0, -1) self.assertRaises(ValueError, testType(b'abc').index, b'a', -1) self.assertEqual(testType(b'abc').index(b'ab'), 0) self.assertEqual(testType(b'abc').index(b'bc'), 1) self.assertRaises(ValueError, testType(b'abc').index, b'abcd') self.assertRaises(ValueError, testType(b'abc').index, b'e') self.assertRaises(TypeError, testType(b'x').index, [1]) self.assertRaises(TypeError, testType(b'x').index, [1], 0) self.assertRaises(TypeError, testType(b'x').index, [1], 0, 1) def test_insert(self): b = bytearray(b'abc') b.insert(0, ord('d')) self.assertEqual(b, b'dabc') b.insert(1000, ord('d')) self.assertEqual(b, b'dabcd') b.insert(-1, ord('d')) self.assertEqual(b, b'dabcdd') self.assertRaises(ValueError, b.insert, 0, 256) def check_is_method(self, methodName, result): for testType in types: self.assertEqual(getattr(testType(b''), methodName)(), False) for i in xrange(256): data = bytearray() data.append(i) self.assertTrue(getattr(testType(data), methodName)() == result(i), chr(i) + " (" + str(i) + ") should be " + str(result(i))) def test_isalnum(self): self.check_is_method('isalnum', lambda i : i >= ord('a') and i <= ord('z') or i >= ord('A') and i <= ord('Z') or i >= ord('0') and i <= ord('9')) def test_isalpha(self): self.check_is_method('isalpha', lambda i : i >= ord('a') and i <= ord('z') or i >= ord('A') and i <= ord('Z')) def test_isdigit(self): self.check_is_method('isdigit', lambda i : (i >= ord('0') and i <= ord('9'))) def test_islower(self): self.check_is_method('islower', lambda i : i >= ord('a') and i <= ord('z')) for testType in types: for i in xrange(256): if not chr(i).isupper(): self.assertEqual((testType(b'a') + testType([i])).islower(), True) def test_isspace(self): self.check_is_method('isspace', lambda i : i in [ord(' '), ord('\t'), ord('\f'), ord('\n'), ord('\r'), 11]) for testType in types: for i in xrange(256): if not chr(i).islower(): self.assertEqual((testType(b'A') + testType([i])).isupper(), True) def test_istitle(self): for testType in types: self.assertEqual(testType(b'').istitle(), False) self.assertEqual(testType(b'Foo').istitle(), True) self.assertEqual(testType(b'Foo Bar').istitle(), True) self.assertEqual(testType(b'FooBar').istitle(), False) self.assertEqual(testType(b'foo').istitle(), False) def test_isupper(self): self.check_is_method('isupper', lambda i : i >= ord('A') and i <= ord('Z')) def test_join(self): x = b'' self.assertEqual(id(x.join(b'')), id(x)) x = bytearray(x) self.assertTrue(id(x.join(b'')) != id(x)) x = b'abc' self.assertEqual(id(b'foo'.join([x])), id(x)) self.assertRaises(TypeError, b'foo'.join, [42]) x = bytearray(b'foo') self.assertTrue(id(bytearray(b'foo').join([x])) != id(x), "got back same object on single arg join w/ bytearray") for testType in types: self.assertEqual(testType(b'x').join([b'd', b'e', b'f']), b'dxexf') self.assertEqual(testType(b'x').join([b'd', b'e', b'f']), b'dxexf') self.assertEqual(type(testType(b'x').join([b'd', b'e', b'f'])), testType) if str != bytes: # works in Py3k/Ipy, not in Py2.6 self.assertEqual(b'x'.join([testType(b'd'), testType(b'e'), testType(b'f')]), b'dxexf') self.assertEqual(bytearray(b'x').join([testType(b'd'), testType(b'e'), testType(b'f')]), b'dxexf') self.assertEqual(testType(b'').join([]), b'') self.assertEqual(testType(b'').join((b'abc', )), b'abc') self.assertEqual(testType(b'').join((b'abc', b'def')), b'abcdef') self.assertRaises(TypeError, testType(b'').join, (42, )) def test_ljust(self): for testType in types: self.assertRaises(TypeError, testType(b'').ljust, 42, ' ') self.assertRaises(TypeError, testType(b'').ljust, 42, b' ') self.assertRaises(TypeError, testType(b'').ljust, 42, u'\u0100') self.assertEqual(testType(b'abc').ljust(4), b'abc ') self.assertEqual(testType(b'abc').ljust(4, b'x'), b'abcx') self.assertEqual(testType(b'abc').ljust(4, 'x'), b'abcx') x = b'abc' self.assertEqual(id(x.ljust(2)), id(x)) x = bytearray(x) self.assertTrue(id(x.ljust(2)) != id(x)) def test_lower(self): expected = b'\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f' \ b'\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !"#$%' \ b'&\'()*+,-./0123456789:;<=>?@abcdefghijklmnopqrstuvwxyz[\\]^_`' \ b'abcdefghijklmnopqrstuvwxyz{|}~\x7f\x80\x81\x82\x83\x84\x85\x86\x87\x88' \ b'\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99' \ b'\x9a\x9b\x9c\x9d\x9e\x9f\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa' \ b'\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb' \ b'\xbc\xbd\xbe\xbf\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc' \ b'\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd' \ b'\xde\xdf\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee' \ b'\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff' data = bytearray() for i in xrange(256): data.append(i) for testType in types: self.assertEqual(testType(data).lower(), expected) def test_lstrip(self): for testType in types: self.assertEqual(testType(b' abc').lstrip(), b'abc') self.assertEqual(testType(b' abc ').lstrip(), b'abc ') self.assertEqual(testType(b' ').lstrip(), b'') x = b'abc' self.assertEqual(id(x.lstrip()), id(x)) x = bytearray(x) self.assertTrue(id(x.lstrip()) != id(x)) def test_partition(self): for testType in types: self.assertRaises(TypeError, testType(b'').partition, None) self.assertRaises(ValueError, testType(b'').partition, b'') self.assertRaises(ValueError, testType(b'').partition, b'') if testType == bytearray: self.assertEqual(testType(b'a\x01c').partition([1]), (b'a', b'\x01', b'c')) else: self.assertRaises(TypeError, testType(b'a\x01c').partition, [1]) self.assertEqual(testType(b'abc').partition(b'b'), (b'a', b'b', b'c')) self.assertEqual(testType(b'abc').partition(b'd'), (b'abc', b'', b'')) x = testType(b'abc') one, two, three = x.partition(b'd') if testType == bytearray: self.assertTrue(id(one) != id(x)) else: self.assertEqual(id(one), id(x)) one, two, three = b''.partition(b'abc') self.assertEqual(id(one), id(two)) self.assertEqual(id(two), id(three)) one, two, three = bytearray().partition(b'abc') self.assertTrue(id(one) != id(two)) self.assertTrue(id(two) != id(three)) self.assertTrue(id(three) != id(one)) def test_pop(self): b = bytearray() self.assertRaises(IndexError, b.pop) self.assertRaises(IndexError, b.pop, 0) b = bytearray(b'abc') self.assertEqual(b.pop(), ord('c')) self.assertEqual(b, b'ab') b = bytearray(b'abc') b.pop(1) self.assertEqual(b, b'ac') b = bytearray(b'abc') b.pop(-1) self.assertEqual(b, b'ab') def test_replace(self): for testType in types: self.assertRaises(TypeError, testType(b'abc').replace, None, b'abc') self.assertRaises(TypeError, testType(b'abc').replace, b'abc', None) self.assertRaises(TypeError, testType(b'abc').replace, None, b'abc', 1) self.assertRaises(TypeError, testType(b'abc').replace, b'abc', None, 1) self.assertRaises(TypeError, testType(b'abc').replace, [1], b'abc') self.assertRaises(TypeError, testType(b'abc').replace, b'abc', [1]) self.assertRaises(TypeError, testType(b'abc').replace, [1], b'abc', 1) self.assertRaises(TypeError, testType(b'abc').replace, b'abc', [1], 1) self.assertEqual(testType(b'abc').replace(b'b', b'foo'), b'afooc') self.assertEqual(testType(b'abc').replace(b'b', b''), b'ac') self.assertEqual(testType(b'abcb').replace(b'b', b'foo', 1), b'afoocb') self.assertEqual(testType(b'abcb').replace(b'b', b'foo', 2), b'afoocfoo') self.assertEqual(testType(b'abcb').replace(b'b', b'foo', 3), b'afoocfoo') self.assertEqual(testType(b'abcb').replace(b'b', b'foo', -1), b'afoocfoo') self.assertEqual(testType(b'abcb').replace(b'', b'foo', 100), b'fooafoobfoocfoobfoo') self.assertEqual(testType(b'abcb').replace(b'', b'foo', 0), b'abcb') self.assertEqual(testType(b'abcb').replace(b'', b'foo', 1), b'fooabcb') self.assertEqual(testType(b'ooooooo').replace(b'o', b'u'), b'uuuuuuu') x = b'abc' self.assertEqual(id(x.replace(b'foo', b'bar', 0)), id(x)) if is_cli: # CPython bug in 2.6 - http://bugs.python.org/issue4348 x = bytearray(b'abc') self.assertTrue(id(x.replace(b'foo', b'bar', 0)) != id(x)) def test_remove(self): for toremove in (ord('a'), b'a', Indexable(ord('a')), IndexableOC(ord('a'))): b = bytearray(b'abc') b.remove(ord('a')) self.assertEqual(b, b'bc') self.assertRaises(ValueError, b.remove, ord('x')) b = bytearray(b'abc') self.assertRaises(TypeError, b.remove, bytearray(b'a')) def test_reverse(self): b = bytearray(b'abc') b.reverse() self.assertEqual(b, b'cba') # CoreCLR bug xxxx found in build 30324 from silverlight_w2 def test_rfind(self): for testType in types: self.assertEqual(testType(b"abcdbcda").rfind(b"cd", 1), 5) self.assertEqual(testType(b"abcdbcda").rfind(b"cd", 3), 5) self.assertEqual(testType(b"abcdbcda").rfind(b"cd", 7), -1) self.assertEqual(testType(b"abcdbcda").rfind(b"cd", -1, -2), -1) self.assertEqual(testType(b"abc").rfind(b"add", 3, 0), -1) self.assertEqual(testType(b'abc').rfind(b'bd'), -1) self.assertRaises(TypeError, testType(b'abc').rfind, [1]) self.assertRaises(TypeError, testType(b'abc').rfind, [1], 1) self.assertRaises(TypeError, testType(b'abc').rfind, [1], 1, 2) if testType == bytes: self.assertEqual(testType(b"abc").rfind(b"add", None, 0), -1) self.assertEqual(testType(b"abc").rfind(b"add", 3, None), -1) self.assertEqual(testType(b"abc").rfind(b"add", None, None), -1) self.assertEqual(testType(b'abc').rfind(b'', 0, 0), 0) self.assertEqual(testType(b'abc').rfind(b'', 0, 1), 1) self.assertEqual(testType(b'abc').rfind(b'', 0, 2), 2) self.assertEqual(testType(b'abc').rfind(b'', 0, 3), 3) self.assertEqual(testType(b'abc').rfind(b'', 0, 4), 3) self.assertEqual(testType(b'x').rfind(b'x', 0, 0), -1) self.assertEqual(testType(b'x').rfind(b'x', 3, 0), -1) self.assertEqual(testType(b'x').rfind(b'', 3, 0), -1) def test_rindex(self): for testType in types: self.assertRaises(TypeError, testType(b'abc').rindex, 257) self.assertEqual(testType(b'abc').rindex(b'a'), 0) self.assertEqual(testType(b'abc').rindex(b'a', 0, -1), 0) self.assertRaises(TypeError, testType(b'abc').rindex, [1]) self.assertRaises(TypeError, testType(b'abc').rindex, [1], 1) self.assertRaises(TypeError, testType(b'abc').rindex, [1], 1, 2) self.assertRaises(ValueError, testType(b'abc').rindex, b'c', 0, -1) self.assertRaises(ValueError, testType(b'abc').rindex, b'a', -1) def test_rjust(self): for testType in types: self.assertRaises(TypeError, testType(b'').rjust, 42, ' ') self.assertRaises(TypeError, testType(b'').rjust, 42, b' ') self.assertRaises(TypeError, testType(b'').rjust, 42, u'\u0100') self.assertRaises(TypeError, testType(b'').rjust, 42, [2]) self.assertEqual(testType(b'abc').rjust(4), b' abc') self.assertEqual(testType(b'abc').rjust(4, b'x'), b'xabc') self.assertEqual(testType(b'abc').rjust(4, 'x'), b'xabc') x = b'abc' self.assertEqual(id(x.rjust(2)), id(x)) x = bytearray(x) self.assertTrue(id(x.rjust(2)) != id(x)) def test_rpartition(self): for testType in types: self.assertRaises(TypeError, testType(b'').rpartition, None) self.assertRaises(ValueError, testType(b'').rpartition, b'') if testType == bytearray: self.assertEqual(testType(b'a\x01c').rpartition([1]), (b'a', b'\x01', b'c')) else: self.assertRaises(TypeError, testType(b'a\x01c').rpartition, [1]) self.assertEqual(testType(b'abc').rpartition(b'b'), (b'a', b'b', b'c')) self.assertEqual(testType(b'abc').rpartition(b'd'), (b'', b'', b'abc')) x = testType(b'abc') one, two, three = x.rpartition(b'd') if testType == bytearray: self.assertTrue(id(three) != id(x)) else: self.assertEqual(id(three), id(x)) b = testType(b'mississippi') self.assertEqual(b.rpartition(b'i'), (b'mississipp', b'i', b'')) self.assertEqual(type(b.rpartition(b'i')[0]), testType) self.assertEqual(type(b.rpartition(b'i')[1]), testType) self.assertEqual(type(b.rpartition(b'i')[2]), testType) b = testType(b'abcdefgh') self.assertEqual(b.rpartition(b'a'), (b'', b'a', b'bcdefgh')) one, two, three = b''.rpartition(b'abc') self.assertEqual(id(one), id(two)) self.assertEqual(id(two), id(three)) one, two, three = bytearray().rpartition(b'abc') self.assertTrue(id(one) != id(two)) self.assertTrue(id(two) != id(three)) self.assertTrue(id(three) != id(one)) def test_rsplit(self): for testType in types: x=testType(b"Hello Worllds") self.assertEqual(x.rsplit(), [b'Hello', b'Worllds']) s = x.rsplit(b"ll") self.assertTrue(s[0] == b"He") self.assertTrue(s[1] == b"o Wor") self.assertTrue(s[2] == b"ds") self.assertTrue(testType(b"1--2--3--4--5--6--7--8--9--0").rsplit(b"--", 2) == [b'1--2--3--4--5--6--7--8', b'9', b'0']) for temp_string in [b"", b" ", b" ", b"\t", b" \t", b"\t ", b"\t\t", b"\n", b"\n\n", b"\n \n"]: self.assertEqual(temp_string.rsplit(None), []) self.assertEqual(testType(b"ab").rsplit(None), [b"ab"]) self.assertEqual(testType(b"a b").rsplit(None), [b"a", b"b"]) self.assertRaises(TypeError, testType(b'').rsplit, [2]) self.assertRaises(TypeError, testType(b'').rsplit, [2], 2) def test_rstrip(self): for testType in types: self.assertEqual(testType(b'abc ').rstrip(), b'abc') self.assertEqual(testType(b' abc ').rstrip(), b' abc') self.assertEqual(testType(b' ').rstrip(), b'') self.assertEqual(testType(b'abcx').rstrip(b'x'), b'abc') self.assertEqual(testType(b'xabc').rstrip(b'x'), b'xabc') self.assertEqual(testType(b'x').rstrip(b'x'), b'') self.assertRaises(TypeError, testType(b'').rstrip, [2]) x = b'abc' self.assertEqual(id(x.rstrip()), id(x)) x = bytearray(x) self.assertTrue(id(x.rstrip()) != id(x)) def test_split(self): for testType in types: x=testType(b"Hello Worllds") self.assertRaises(ValueError, x.split, b'') self.assertEqual(x.split(None, 0), [b'Hello Worllds']) self.assertEqual(x.split(None, -1), [b'Hello', b'Worllds']) self.assertEqual(x.split(None, 2), [b'Hello', b'Worllds']) self.assertEqual(x.split(), [b'Hello', b'Worllds']) self.assertEqual(testType(b'abc').split(b'c'), [b'ab', b'']) self.assertEqual(testType(b'abcd').split(b'c'), [b'ab', b'd']) self.assertEqual(testType(b'abccdef').split(b'c'), [b'ab', b'', b'def']) s = x.split(b"ll") self.assertTrue(s[0] == b"He") self.assertTrue(s[1] == b"o Wor") self.assertTrue(s[2] == b"ds") self.assertTrue(testType(b"1,2,3,4,5,6,7,8,9,0").split(b",") == [b'1',b'2',b'3',b'4',b'5',b'6',b'7',b'8',b'9',b'0']) self.assertTrue(testType(b"1,2,3,4,5,6,7,8,9,0").split(b",", -1) == [b'1',b'2',b'3',b'4',b'5',b'6',b'7',b'8',b'9',b'0']) self.assertTrue(testType(b"1,2,3,4,5,6,7,8,9,0").split(b",", 2) == [b'1',b'2',b'3,4,5,6,7,8,9,0']) self.assertTrue(testType(b"1--2--3--4--5--6--7--8--9--0").split(b"--") == [b'1',b'2',b'3',b'4',b'5',b'6',b'7',b'8',b'9',b'0']) self.assertTrue(testType(b"1--2--3--4--5--6--7--8--9--0").split(b"--", -1) == [b'1',b'2',b'3',b'4',b'5',b'6',b'7',b'8',b'9',b'0']) self.assertTrue(testType(b"1--2--3--4--5--6--7--8--9--0").split(b"--", 2) == [b'1', b'2', b'3--4--5--6--7--8--9--0']) self.assertEqual(testType(b"").split(None), []) self.assertEqual(testType(b"ab").split(None), [b"ab"]) self.assertEqual(testType(b"a b").split(None), [b"a", b"b"]) self.assertEqual(bytearray(b' a bb c ').split(None, 1), [bytearray(b'a'), bytearray(b'bb c ')]) self.assertEqual(testType(b' ').split(), []) self.assertRaises(TypeError, testType(b'').split, [2]) self.assertRaises(TypeError, testType(b'').split, [2], 2) def test_splitlines(self): for testType in types: self.assertEqual(testType(b'foo\nbar\n').splitlines(), [b'foo', b'bar']) self.assertEqual(testType(b'foo\nbar\n').splitlines(True), [b'foo\n', b'bar\n']) self.assertEqual(testType(b'foo\r\nbar\r\n').splitlines(True), [b'foo\r\n', b'bar\r\n']) self.assertEqual(testType(b'foo\r\nbar\r\n').splitlines(), [b'foo', b'bar']) self.assertEqual(testType(b'foo\rbar\r').splitlines(True), [b'foo\r', b'bar\r']) self.assertEqual(testType(b'foo\nbar\nbaz').splitlines(), [b'foo', b'bar', b'baz']) self.assertEqual(testType(b'foo\nbar\nbaz').splitlines(True), [b'foo\n', b'bar\n', b'baz']) self.assertEqual(testType(b'foo\r\nbar\r\nbaz').splitlines(True), [b'foo\r\n', b'bar\r\n', b'baz']) self.assertEqual(testType(b'foo\rbar\rbaz').splitlines(True), [b'foo\r', b'bar\r', b'baz']) def test_startswith(self): for testType in types: self.assertRaises(TypeError, testType(b'abcdef').startswith, []) self.assertRaises(TypeError, testType(b'abcdef').startswith, [], 0) self.assertRaises(TypeError, testType(b'abcdef').startswith, [], 0, 1) self.assertEqual(testType(b"abcde").startswith(b'c', 2, 6), True) self.assertEqual(testType(b"abc").startswith(b'c', 4, 6), False) self.assertEqual(testType(b"abcde").startswith(b'cde', 2, 9), True) self.assertEqual(testType(b'abc').startswith(b'abcd', 4), False) self.assertEqual(testType(b'abc').startswith(b'abc', -3), True) self.assertEqual(testType(b'abc').startswith(b'abc', -10), True) self.assertEqual(testType(b'abc').startswith(b'abc', -3, 0), False) self.assertEqual(testType(b'abc').startswith(b'abc', -10, 0), False) self.assertEqual(testType(b'abc').startswith(b'abc', -10, -10), False) self.assertEqual(testType(b'abc').startswith(b'ab', 0, -1), True) self.assertEqual(testType(b'abc').startswith((b'abc', ), -10), True) self.assertEqual(testType(b'abc').startswith((b'abc', ), 10), False) self.assertEqual(testType(b'abc').startswith((b'abc', ), -10, 0), False) self.assertEqual(testType(b'abc').startswith((b'abc', ), 10, 0), False) self.assertEqual(testType(b'abc').startswith((b'abc', ), 1, -10), False) self.assertEqual(testType(b'abc').startswith((b'abc', ), 1, -1), False) self.assertEqual(testType(b'abc').startswith((b'abc', ), -1, -2), False) self.assertEqual(testType(b'abc').startswith((b'abc', b'def')), True) self.assertEqual(testType(b'abc').startswith((b'qrt', b'def')), False) self.assertEqual(testType(b'abc').startswith((b'abc', b'def'), -3), True) self.assertEqual(testType(b'abc').startswith((b'qrt', b'def'), -3), False) self.assertEqual(testType(b'abc').startswith((b'abc', b'def'), 0), True) self.assertEqual(testType(b'abc').startswith((b'qrt', b'def'), 0), False) self.assertEqual(testType(b'abc').startswith((b'abc', b'def'), -3, 3), True) self.assertEqual(testType(b'abc').startswith((b'qrt', b'def'), -3, 3), False) self.assertEqual(testType(b'abc').startswith((b'abc', b'def'), 0, 3), True) self.assertEqual(testType(b'abc').startswith((b'qrt', b'def'), 0, 3), False) hw = testType(b"hello world") self.assertTrue(hw.startswith(b"hello")) self.assertTrue(not hw.startswith(b"heloo")) self.assertTrue(hw.startswith(b"llo", 2)) self.assertTrue(not hw.startswith(b"lno", 2)) self.assertTrue(hw.startswith(b"wor", 6, 9)) self.assertTrue(not hw.startswith(b"wor", 6, 7)) self.assertTrue(not hw.startswith(b"wox", 6, 10)) self.assertTrue(not hw.startswith(b"wor", 6, 2)) def test_strip(self): for testType in types: self.assertEqual(testType(b'abc ').strip(), b'abc') self.assertEqual(testType(b' abc').strip(), b'abc') self.assertEqual(testType(b' abc ').strip(), b'abc') self.assertEqual(testType(b' ').strip(), b'') self.assertEqual(testType(b'abcx').strip(b'x'), b'abc') self.assertEqual(testType(b'xabc').strip(b'x'), b'abc') self.assertEqual(testType(b'xabcx').strip(b'x'), b'abc') self.assertEqual(testType(b'x').strip(b'x'), b'') x = b'abc' self.assertEqual(id(x.strip()), id(x)) x = bytearray(x) self.assertTrue(id(x.strip()) != id(x)) def test_swapcase(self): expected = b'\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f' \ b'\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !"#$%' \ b'&\'()*+,-./0123456789:;<=>?@abcdefghijklmnopqrstuvwxyz[\\]^_`' \ b'ABCDEFGHIJKLMNOPQRSTUVWXYZ{|}~\x7f\x80\x81\x82\x83\x84\x85\x86\x87\x88' \ b'\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99' \ b'\x9a\x9b\x9c\x9d\x9e\x9f\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa' \ b'\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb' \ b'\xbc\xbd\xbe\xbf\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc' \ b'\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd' \ b'\xde\xdf\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee' \ b'\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff' data = bytearray() for i in xrange(256): data.append(i) for testType in types: self.assertEqual(testType(b'123').swapcase(), b'123') b = testType(b'123') self.assertTrue(id(b.swapcase()) != id(b)) self.assertEqual(testType(b'abc').swapcase(), b'ABC') self.assertEqual(testType(b'ABC').swapcase(), b'abc') self.assertEqual(testType(b'ABc').swapcase(), b'abC') x = testType(data).swapcase() self.assertEqual(testType(data).swapcase(), expected) def test_title(self): for testType in types: self.assertEqual(testType(b'').title(), b'') self.assertEqual(testType(b'foo').title(), b'Foo') self.assertEqual(testType(b'Foo').title(), b'Foo') self.assertEqual(testType(b'foo bar baz').title(), b'Foo Bar Baz') for i in xrange(256): b = bytearray() b.append(i) if (b >= b'a' and b <= b'z') or (b >= b'A' and b <= 'Z'): continue inp = testType(b.join([b'foo', b'bar', b'baz'])) exp = b.join([b'Foo', b'Bar', b'Baz']) self.assertEqual(inp.title(), exp) x = b'' self.assertEqual(id(x.title()), id(x)) x = bytearray(b'') self.assertTrue(id(x.title()) != id(x)) def test_translate(self): identTable = bytearray() for i in xrange(256): identTable.append(i) repAtable = bytearray(identTable) repAtable[ord('A')] = ord('B') for testType in types: self.assertRaises(TypeError, testType(b'').translate, {}) self.assertRaises(ValueError, testType(b'foo').translate, b'') self.assertRaises(ValueError, testType(b'').translate, b'') self.assertEqual(testType(b'AAA').translate(repAtable), b'BBB') self.assertEqual(testType(b'AAA').translate(repAtable, b'A'), b'') self.assertRaises(TypeError, b''.translate, identTable, None) self.assertEqual(b'AAA'.translate(None, b'A'), b'') self.assertEqual(b'AAABBB'.translate(None, b'A'), b'BBB') self.assertEqual(b'AAA'.translate(None), b'AAA') self.assertEqual(bytearray(b'AAA').translate(None, b'A'), b'') self.assertEqual(bytearray(b'AAA').translate(None), b'AAA') b = b'abc' self.assertEqual(id(b.translate(None)), id(b)) b = b'' self.assertEqual(id(b.translate(identTable)), id(b)) b = b'' self.assertEqual(id(b.translate(identTable, b'')), id(b)) b = b'' self.assertEqual(id(b.translate(identTable, b'')), id(b)) if is_cli: # CPython bug 4348 - http://bugs.python.org/issue4348 b = bytearray(b'') self.assertTrue(id(b.translate(identTable)) != id(b)) self.assertRaises(TypeError, testType(b'').translate, []) self.assertRaises(TypeError, testType(b'').translate, [], []) def test_upper(self): expected = b'\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f' \ b'\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !"#$%' \ b'&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`' \ b'ABCDEFGHIJKLMNOPQRSTUVWXYZ{|}~\x7f\x80\x81\x82\x83\x84\x85\x86\x87\x88' \ b'\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99' \ b'\x9a\x9b\x9c\x9d\x9e\x9f\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa' \ b'\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb' \ b'\xbc\xbd\xbe\xbf\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc' \ b'\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd' \ b'\xde\xdf\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee' \ b'\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff' data = bytearray() for i in xrange(256): data.append(i) for testType in types: self.assertEqual(testType(data).upper(), expected) def test_zfill(self): for testType in types: self.assertEqual(testType(b'abc').zfill(0), b'abc') self.assertEqual(testType(b'abc').zfill(4), b'0abc') self.assertEqual(testType(b'+abc').zfill(5), b'+0abc') self.assertEqual(testType(b'-abc').zfill(5), b'-0abc') self.assertEqual(testType(b'').zfill(2), b'00') self.assertEqual(testType(b'+').zfill(2), b'+0') self.assertEqual(testType(b'-').zfill(2), b'-0') b = b'abc' self.assertEqual(id(b.zfill(0)), id(b)) b = bytearray(b) self.assertTrue(id(b.zfill(0)) != id(b)) def test_none(self): for testType in types: self.assertRaises(TypeError, testType(b'abc').replace, b"new") self.assertRaises(TypeError, testType(b'abc').replace, b"new", 2) self.assertRaises(TypeError, testType(b'abc').center, 0, None) if str != bytes: self.assertRaises(TypeError, testType(b'abc').fromhex, None) self.assertRaises(TypeError, testType(b'abc').decode, 'ascii', None) for fn in ['find', 'index', 'rfind', 'count', 'startswith', 'endswith']: f = getattr(testType(b'abc'), fn) self.assertRaises(TypeError, f, None) self.assertRaises(TypeError, f, None, 0) self.assertRaises(TypeError, f, None, 0, 2) self.assertRaises(TypeError, testType(b'abc').replace, None, b'ef') self.assertRaises(TypeError, testType(b'abc').replace, None, b'ef', 1) self.assertRaises(TypeError, testType(b'abc').replace, b'abc', None) self.assertRaises(TypeError, testType(b'abc').replace, b'abc', None, 1) def test_add_mul(self): for testType in types: self.assertRaises(TypeError, lambda: testType(b"a") + 3) self.assertRaises(TypeError, lambda: 3 + testType(b"a")) self.assertRaises(TypeError, lambda: "a" * "3") self.assertRaises(OverflowError, lambda: "a" * (sys.maxint + 1)) self.assertRaises(OverflowError, lambda: (sys.maxint + 1) * "a") class mylong(long): pass # multiply self.assertEqual("aaaa", "a" * 4L) self.assertEqual("aaaa", "a" * mylong(4L)) self.assertEqual("aaa", "a" * 3) self.assertEqual("a", "a" * True) self.assertEqual("", "a" * False) self.assertEqual("aaaa", 4L * "a") self.assertEqual("aaaa", mylong(4L) * "a") self.assertEqual("aaa", 3 * "a") self.assertEqual("a", True * "a") self.assertEqual("", False * "a" ) # zero-length string def test_empty_bytes(self): for testType in types: self.assertEqual(testType(b'').title(), b'') self.assertEqual(testType(b'').capitalize(), b'') self.assertEqual(testType(b'').count(b'a'), 0) table = testType(b'10') * 128 self.assertEqual(testType(b'').translate(table), b'') self.assertEqual(testType(b'').replace(b'a', b'ef'), b'') self.assertEqual(testType(b'').replace(b'bc', b'ef'), b'') self.assertEqual(testType(b'').split(), []) self.assertEqual(testType(b'').split(b' '), [b'']) self.assertEqual(testType(b'').split(b'a'), [b'']) def test_encode_decode(self): for testType in types: self.assertEqual(testType(b'abc').decode(), u'abc') def test_encode_decode_error(self): for testType in types: self.assertRaises(TypeError, testType(b'abc').decode, None) def test_bytes_subclass(self): for testType in types: class customstring(testType): def __str__(self): return 'xyz' def __repr__(self): return 'foo' def __hash__(self): return 42 def __mul__(self, count): return b'multiplied' def __add__(self, other): return 23 def __len__(self): return 2300 def __contains__(self, value): return False o = customstring(b'abc') self.assertEqual(str(o), "xyz") self.assertEqual(repr(o), "foo") self.assertEqual(hash(o), 42) self.assertEqual(o * 3, b'multiplied') self.assertEqual(o + b'abc', 23) self.assertEqual(len(o), 2300) self.assertEqual(b'a' in o, False) class custombytearray(bytearray): def __init__(self, value): bytearray.__init__(self) self.assertEqual(custombytearray(42), bytearray()) class custombytearray(bytearray): def __init__(self, value, **args): bytearray.__init__(self) self.assertEqual(custombytearray(42, x=42), bytearray()) def test_bytes_equals(self): for testType in types: x = testType(b'abc') == testType(b'abc') y = testType(b'def') == testType(b'def') self.assertEqual(id(x), id(y)) self.assertEqual(id(x), id(True)) x = testType(b'abc') != testType(b'abc') y = testType(b'def') != testType(b'def') self.assertEqual(id(x), id(y)) self.assertEqual(id(x), id(False)) x = testType(b'abcx') == testType(b'abc') y = testType(b'defx') == testType(b'def') self.assertEqual(id(x), id(y)) self.assertEqual(id(x), id(False)) x = testType(b'abcx') != testType(b'abc') y = testType(b'defx') != testType(b'def') self.assertEqual(id(x), id(y)) self.assertEqual(id(x), id(True)) def test_bytes_dict(self): self.assertTrue('__init__' not in bytes.__dict__.keys()) self.assertTrue('__init__' in bytearray.__dict__.keys()) for testType in types: extra_str_dict_keys = [ "__cmp__", "isdecimal", "isnumeric", "isunicode"] # "__radd__", #It's OK that __getattribute__ does not show up in the __dict__. It is #implemented. self.assertTrue(hasattr(testType, "__getattribute__"), str(testType) + " has no __getattribute__ method") for temp_key in extra_str_dict_keys: self.assertTrue(not temp_key in testType.__dict__.keys()) def test_bytes_to_numeric(self): for testType in types: class substring(testType): def __int__(self): return 1 def __complex__(self): return 1j def __float__(self): return 1.0 def __long__(self): return 1L class myfloat(float): pass class mylong(long): pass class myint(int): pass class mycomplex(complex): pass v = substring(b"123") self.assertEqual(float(v), 1.0) self.assertEqual(myfloat(v), 1.0) self.assertEqual(type(myfloat(v)), myfloat) self.assertEqual(long(v), 1L) self.assertEqual(mylong(v), 1L) self.assertEqual(type(mylong(v)), mylong) self.assertEqual(int(v), 1) self.assertEqual(myint(v), 1) self.assertEqual(type(myint(v)), myint) # str in 2.6 still supports this, but not in 3.0, we have the 3.0 behavior. if not is_cli and testType == bytes: self.assertEqual(complex(v), 123 + 0j) self.assertEqual(mycomplex(v), 123 + 0j) else: self.assertEqual(complex(v), 1j) self.assertEqual(mycomplex(v), 1j) class substring(testType): pass v = substring(b"123") self.assertEqual(long(v), 123L) self.assertEqual(int(v), 123) self.assertEqual(float(v), 123.0) self.assertEqual(mylong(v), 123L) self.assertEqual(type(mylong(v)), mylong) self.assertEqual(myint(v), 123) self.assertEqual(type(myint(v)), myint) if testType == str: # 2.6 allows this, 3.0 disallows this. self.assertEqual(complex(v), 123+0j) self.assertEqual(mycomplex(v), 123+0j) else: self.assertRaises(TypeError, complex, v) self.assertRaises(TypeError, mycomplex, v) def test_compares(self): a = b'A' b = b'B' bb = b'BB' aa = b'AA' ab = b'AB' ba = b'BA' for testType in types: for otherType in types: self.assertEqual(testType(a) > otherType(b), False) self.assertEqual(testType(a) < otherType(b), True) self.assertEqual(testType(a) <= otherType(b), True) self.assertEqual(testType(a) >= otherType(b), False) self.assertEqual(testType(a) == otherType(b), False) self.assertEqual(testType(a) != otherType(b), True) self.assertEqual(testType(b) > otherType(a), True) self.assertEqual(testType(b) < otherType(a), False) self.assertEqual(testType(b) <= otherType(a), False) self.assertEqual(testType(b) >= otherType(a), True) self.assertEqual(testType(b) == otherType(a), False) self.assertEqual(testType(b) != otherType(a), True) self.assertEqual(testType(a) > otherType(a), False) self.assertEqual(testType(a) < otherType(a), False) self.assertEqual(testType(a) <= otherType(a), True) self.assertEqual(testType(a) >= otherType(a), True) self.assertEqual(testType(a) == otherType(a), True) self.assertEqual(testType(a) != otherType(a), False) self.assertEqual(testType(aa) > otherType(b), False) self.assertEqual(testType(aa) < otherType(b), True) self.assertEqual(testType(aa) <= otherType(b), True) self.assertEqual(testType(aa) >= otherType(b), False) self.assertEqual(testType(aa) == otherType(b), False) self.assertEqual(testType(aa) != otherType(b), True) self.assertEqual(testType(bb) > otherType(a), True) self.assertEqual(testType(bb) < otherType(a), False) self.assertEqual(testType(bb) <= otherType(a), False) self.assertEqual(testType(bb) >= otherType(a), True) self.assertEqual(testType(bb) == otherType(a), False) self.assertEqual(testType(bb) != otherType(a), True) self.assertEqual(testType(ba) > otherType(b), True) self.assertEqual(testType(ba) < otherType(b), False) self.assertEqual(testType(ba) <= otherType(b), False) self.assertEqual(testType(ba) >= otherType(b), True) self.assertEqual(testType(ba) == otherType(b), False) self.assertEqual(testType(ba) != otherType(b), True) self.assertEqual(testType(ab) > otherType(a), True) self.assertEqual(testType(ab) < otherType(a), False) self.assertEqual(testType(ab) <= otherType(a), False) self.assertEqual(testType(ab) >= otherType(a), True) self.assertEqual(testType(ab) == otherType(a), False) self.assertEqual(testType(ab) != otherType(a), True) self.assertEqual(testType(ab) == [], False) self.assertEqual(testType(a) > None, True) self.assertEqual(testType(a) < None, False) self.assertEqual(testType(a) <= None, False) self.assertEqual(testType(a) >= None, True) self.assertEqual(None > testType(a), False) self.assertEqual(None < testType(a), True) self.assertEqual(None <= testType(a), True) self.assertEqual(None >= testType(a), False) def test_bytearray(self): self.assertRaises(TypeError, hash, bytearray(b'abc')) self.assertRaises(TypeError, bytearray(b'').__setitem__, None, b'abc') self.assertRaises(TypeError, bytearray(b'').__delitem__, None) x = bytearray(b'abc') del x[-1] self.assertEqual(x, b'ab') def f(): x = bytearray(b'abc') x[0:2] = [1j] self.assertRaises(TypeError, f) x = bytearray(b'abc') x[0:1] = [ord('d')] self.assertEqual(x, b'dbc') x = bytearray(b'abc') x[0:3] = x self.assertEqual(x, b'abc') x = bytearray(b'abc') del x[0] self.assertEqual(x, b'bc') x = bytearray(b'abc') x += b'foo' self.assertEqual(x, b'abcfoo') b = bytearray(b"abc") b1 = b b += b"def" self.assertEqual(b1, b) x = bytearray(b'abc') x += bytearray(b'foo') self.assertEqual(x, b'abcfoo') x = bytearray(b'abc') x *= 2 self.assertEqual(x, b'abcabc') x = bytearray(b'abcdefghijklmnopqrstuvwxyz') x[25:1] = b'x' * 24 self.assertEqual(x, b'abcdefghijklmnopqrstuvwxyxxxxxxxxxxxxxxxxxxxxxxxxz') x = bytearray(b'abcdefghijklmnopqrstuvwxyz') x[25:0] = b'x' * 25 self.assertEqual(x, b'abcdefghijklmnopqrstuvwxyxxxxxxxxxxxxxxxxxxxxxxxxxz') tests = ( ((0, 3, None), b'abc', b''), ((0, 2, None), b'abc', b'c'), ((4, 0, 2), b'abc', b'abc'), ((3, 0, 2), b'abc', b'abc'), ((3, 0, -2), b'abc', b'ab'), ((0, 3, 1), b'abc', b''), ((0, 2, 1), b'abc', b'c'), ((0, 3, 2), b'abc', b'b'), ((0, 2, 2), b'abc', b'bc'), ((0, 3, -1), b'abc', b'abc'), ((0, 2, -1), b'abc', b'abc'), ((3, 0, -1), b'abc', b'a'), ((2, 0, -1), b'abc', b'a'), ((4, 2, -1), b'abcdef', b'abcf'), ) for indexes, input, result in tests: x = bytearray(input) if indexes[2] == None: del x[indexes[0] : indexes[1]] self.assertEqual(x, result) else: del x[indexes[0] : indexes[1] : indexes[2]] self.assertEqual(x, result) class myint(int): pass class intobj(object): def __int__(self): return 42 x = bytearray(b'abe') x[-1] = ord('a') self.assertEqual(x, b'aba') x[-1] = IndexableOC(ord('r')) self.assertEqual(x, b'abr') x[-1] = Indexable(ord('s')) self.assertEqual(x, b'abs') def f(): x[-1] = IndexableOC(256) self.assertRaises(ValueError, f) def f(): x[-1] = Indexable(256) self.assertRaises(ValueError, f) x[-1] = b'b' self.assertEqual(x, b'abb') x[-1] = myint(ord('c')) self.assertEqual(x, b'abc') x[0:1] = 2 self.assertEqual(x, b'\x00\x00bc') x = bytearray(b'abc') x[0:1] = 2L self.assertEqual(x, b'\x00\x00bc') x[0:2] = b'a' self.assertEqual(x, b'abc') x[0:1] = b'd' self.assertEqual(x, b'dbc') x[0:1] = myint(3) self.assertEqual(x, b'\x00\x00\x00bc') x[0:3] = [ord('a'), ord('b'), ord('c')] self.assertEqual(x, b'abcbc') def f(): x[0:1] = intobj() self.assertRaises(TypeError, f) def f(): x[0:1] = sys.maxint # mono doesn't throw an OutOfMemoryException on Linux when the size is too large, # it does get a value error for trying to set capacity to a negative number if is_mono: self.assertRaises(ValueError, f) else: self.assertRaises(MemoryError, f) def f(): x[0:1] = sys.maxint+1 self.assertRaises(TypeError, f) for setval in [b'bar', bytearray(b'bar'), [b'b', b'a', b'r'], (b'b', b'a', b'r'), (98, b'a', b'r'), (Indexable(98), b'a', b'r'), (IndexableOC(98), b'a', b'r')]: x = bytearray(b'abc') x[0:3] = setval self.assertEqual(x, b'bar') x = bytearray(b'abc') x[1:4] = setval self.assertEqual(x, b'abar') x = bytearray(b'abc') x[0:2] = setval self.assertEqual(x, b'barc') x = bytearray(b'abc') x[4:0:2] = setval[-1:-1] self.assertEqual(x, b'abc') x = bytearray(b'abc') x[3:0:2] = setval[-1:-1] self.assertEqual(x, b'abc') x = bytearray(b'abc') x[3:0:-2] = setval[-1:-1] self.assertEqual(x, b'ab') x = bytearray(b'abc') x[3:0:-2] = setval[0:-2] self.assertEqual(x, b'abb') x = bytearray(b'abc') x[0:3:1] = setval self.assertEqual(x, b'bar') x = bytearray(b'abc') x[0:2:1] = setval self.assertEqual(x, b'barc') x = bytearray(b'abc') x[0:3:2] = setval[0:-1] self.assertEqual(x, b'bba') x = bytearray(b'abc') x[0:2:2] = setval[0:-2] self.assertEqual(x, b'bbc') x = bytearray(b'abc') x[0:3:-1] = setval[-1:-1] self.assertEqual(x, b'abc') x = bytearray(b'abc') x[0:2:-1] = setval[-1:-1] self.assertEqual(x, b'abc') x = bytearray(b'abc') x[3:0:-1] = setval[0:-1] self.assertEqual(x, b'aab') x = bytearray(b'abc') x[2:0:-1] = setval[0:-1] self.assertEqual(x, b'aab') x = bytearray(b'abcdef') def f():x[0:6:2] = b'a' self.assertRaises(ValueError, f) self.assertEqual(bytearray(source=b'abc'), bytearray(b'abc')) self.assertEqual(bytearray(source=2), bytearray(b'\x00\x00')) self.assertEqual(bytearray(b'abc').__alloc__(), 4) self.assertEqual(bytearray().__alloc__(), 0) def test_bytes(self): self.assertEqual(hash(b'abc'), hash(b'abc')) self.assertEqual(b'abc', B'abc') def test_operators(self): for testType in types: self.assertRaises(TypeError, lambda : testType(b'abc') * None) self.assertRaises(TypeError, lambda : testType(b'abc') + None) self.assertRaises(TypeError, lambda : None * testType(b'abc')) self.assertRaises(TypeError, lambda : None + testType(b'abc')) self.assertEqual(testType(b'abc') * 2, b'abcabc') if testType == bytearray: self.assertEqual(testType(b'abc')[0], ord('a')) self.assertEqual(testType(b'abc')[-1], ord('c')) else: self.assertEqual(testType(b'abc')[0], b'a') self.assertEqual(testType(b'abc')[-1], b'c') for otherType in types: self.assertEqual(testType(b'abc') + otherType(b'def'), b'abcdef') resType = type(testType(b'abc') + otherType(b'def')) if testType == bytearray or otherType == bytearray: self.assertEqual(resType, bytearray) else: self.assertEqual(resType, bytes) self.assertEqual(b'ab' in testType(b'abcd'), True) # 2.6 doesn't allow this for testType=bytes, so test for 3.0 in this case if testType is not bytes or hasattr(bytes, '__iter__'): self.assertEqual(ord(b'a') in testType(b'abcd'), True) self.assertRaises(ValueError, lambda : 256 in testType(b'abcd')) x = b'abc' self.assertEqual(x * 1, x) self.assertEqual(1 * x, x) self.assertEqual(id(x), id(x * 1)) self.assertEqual(id(x), id(1 * x)) x = bytearray(b'abc') self.assertEqual(x * 1, x) self.assertEqual(1 * x, x) self.assertTrue(id(x) != id(x * 1)) self.assertTrue(id(x) != id(1 * x)) def test_init(self): for testType in types: if testType != str: # skip on Cpy 2.6 for str type self.assertRaises(TypeError, testType, None, 'ascii') self.assertRaises(TypeError, testType, u'abc', None) self.assertRaises(TypeError, testType, [None]) self.assertEqual(testType(u'abc', 'ascii'), b'abc') self.assertEqual(testType(0), b'') self.assertEqual(testType(5), b'\x00\x00\x00\x00\x00') self.assertRaises(ValueError, testType, [256]) self.assertRaises(ValueError, testType, [257]) testType(range(256)) def f(): yield 42 self.assertEqual(bytearray(f()), b'*') def test_slicing(self): for testType in types: self.assertEqual(testType(b'abc')[0:3], b'abc') self.assertEqual(testType(b'abc')[0:2], b'ab') self.assertEqual(testType(b'abc')[3:0:2], b'') self.assertEqual(testType(b'abc')[3:0:2], b'') self.assertEqual(testType(b'abc')[3:0:-2], b'c') self.assertEqual(testType(b'abc')[3:0:-2], b'c') self.assertEqual(testType(b'abc')[0:3:1], b'abc') self.assertEqual(testType(b'abc')[0:2:1], b'ab') self.assertEqual(testType(b'abc')[0:3:2], b'ac') self.assertEqual(testType(b'abc')[0:2:2], b'a') self.assertEqual(testType(b'abc')[0:3:-1], b'') self.assertEqual(testType(b'abc')[0:2:-1], b'') self.assertEqual(testType(b'abc')[3:0:-1], b'cb') self.assertEqual(testType(b'abc')[2:0:-1], b'cb') self.assertRaises(TypeError, testType(b'abc').__getitem__, None) def test_ord(self): for testType in types: self.assertEqual(ord(testType(b'a')), 97) self.assertRaisesPartialMessage(TypeError, "expected a character, but string of length 2 found", ord, testType(b'aa')) def test_pickle(self): import cPickle for testType in types: self.assertEqual(cPickle.loads(cPickle.dumps(testType(range(256)))), testType(range(256))) @unittest.skipUnless(is_cli, 'IronPython specific test') def test_zzz_cli_features(self): import System import clr clr.AddReference('Microsoft.Dynamic') import Microsoft for testType in types: self.assertEqual(testType(b'abc').Count, 3) self.assertEqual(bytearray(b'abc').Contains(ord('a')), True) self.assertEqual(list(System.Collections.IEnumerable.GetEnumerator(bytearray(b'abc'))), [ord('a'), ord('b'), ord('c')]) self.assertEqual(testType(b'abc').IndexOf(ord('a')), 0) self.assertEqual(testType(b'abc').IndexOf(ord('d')), -1) myList = System.Collections.Generic.List[System.Byte]() myList.Add(ord('a')) myList.Add(ord('b')) myList.Add(ord('c')) self.assertEqual(testType(b'').join([myList]), b'abc') # bytearray ''' self.assertEqual(bytearray(b'abc') == 'abc', False) if not is_net40: self.assertEqual(Microsoft.Scripting.IValueEquality.ValueEquals(bytearray(b'abc'), 'abc'), False) ''' self.assertEqual(bytearray(b'abc') == 'abc', True) self.assertEqual(b'abc'.IsReadOnly, True) self.assertEqual(bytearray(b'abc').IsReadOnly, False) self.assertEqual(bytearray(b'abc').Remove(ord('a')), True) self.assertEqual(bytearray(b'abc').Remove(ord('d')), False) x = bytearray(b'abc') x.Clear() self.assertEqual(x, b'') x.Add(ord('a')) self.assertEqual(x, b'a') self.assertEqual(x.IndexOf(ord('a')), 0) self.assertEqual(x.IndexOf(ord('b')), -1) x.Insert(0, ord('b')) self.assertEqual(x, b'ba') x.RemoveAt(0) self.assertEqual(x, b'a') System.Collections.Generic.IList[System.Byte].__setitem__(x, 0, ord('b')) self.assertEqual(x, b'b') # bytes self.assertRaises(System.InvalidOperationException, b'abc'.Remove, ord('a')) self.assertRaises(System.InvalidOperationException, b'abc'.Remove, ord('d')) self.assertRaises(System.InvalidOperationException, b'abc'.Clear) self.assertRaises(System.InvalidOperationException, b'abc'.Add, ord('a')) self.assertRaises(System.InvalidOperationException, b'abc'.Insert, 0, ord('b')) self.assertRaises(System.InvalidOperationException, b'abc'.RemoveAt, 0) self.assertRaises(System.InvalidOperationException, System.Collections.Generic.IList[System.Byte].__setitem__, b'abc', 0, ord('b')) lst = System.Collections.Generic.List[System.Byte]() lst.Add(42) self.assertEqual(ord(lst), 42) lst.Add(42) self.assertRaisesMessage(TypeError, "expected a character, but string of length 2 found", ord, lst) def test_bytes_hashing(self): """test interaction of bytes w/ hashing modules""" import _sha, _sha256, _sha512, _md5 for hashLib in (_sha.new, _sha256.sha256, _sha512.sha512, _sha512.sha384, _md5.new): x = hashLib(b'abc') x.update(b'abc') #For now just make sure this doesn't throw temp = hashLib(bytearray(b'abc')) x.update(bytearray(b'abc')) def test_cp35493(self): self.assertEqual(bytearray(u'\xde\xad\xbe\xef\x80'), bytearray(b'\xde\xad\xbe\xef\x80')) def test_add(self): self.assertEqual(bytearray(b"abc") + memoryview(b"def"), b"abcdef") run_test(__name__)
apache-2.0
-8,719,912,513,521,506,000
44.089717
168
0.532088
false
offtools/linux-show-player
lisp/modules/gst_backend/elements/user_element.py
3
3423
# -*- coding: utf-8 -*- # # This file is part of Linux Show Player # # Copyright 2012-2016 Francesco Ceruti <ceppofrancy@gmail.com> # # Linux Show Player is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Linux Show Player is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Linux Show Player. If not, see <http://www.gnu.org/licenses/>. from PyQt5.QtCore import QT_TRANSLATE_NOOP from lisp.backend.media_element import ElementType, MediaType from lisp.core.has_properties import Property from lisp.modules.gst_backend.gi_repository import Gst from lisp.modules.gst_backend.gst_element import GstMediaElement class UserElement(GstMediaElement): ElementType = ElementType.Plugin MediaType = MediaType.Audio Name = QT_TRANSLATE_NOOP('MediaElementName', 'Custom Element') bin = Property(default='') def __init__(self, pipeline): super().__init__() self.pipeline = pipeline self.audio_convert_sink = Gst.ElementFactory.make("audioconvert", None) # A default assignment for the bin self.gst_bin = Gst.ElementFactory.make("identity", None) self.gst_bin.set_property("signal-handoffs", False) self.audio_convert_src = Gst.ElementFactory.make("audioconvert", None) pipeline.add(self.audio_convert_sink) pipeline.add(self.gst_bin) pipeline.add(self.audio_convert_src) self.audio_convert_sink.link(self.gst_bin) self.gst_bin.link(self.audio_convert_src) self._old_bin = self.gst_bin self.changed('bin').connect(self.__prepare_bin) def sink(self): return self.audio_convert_sink def src(self): return self.audio_convert_src def __prepare_bin(self, value): if value != '' and value != self._old_bin: self._old_bin = value # If in playing we need to restart the pipeline after unblocking playing = self.gst_bin.current_state == Gst.State.PLAYING # Block the stream pad = self.audio_convert_sink.sinkpads[0] probe = pad.add_probe(Gst.PadProbeType.BLOCK, lambda *a: 0, "") # Unlink the components self.audio_convert_sink.unlink(self.gst_bin) self.gst_bin.unlink(self.audio_convert_src) self.pipeline.remove(self.gst_bin) # Create the bin, when fail use a do-nothing element try: self.gst_bin = Gst.parse_bin_from_description(value, True) except Exception: self.gst_bin = Gst.ElementFactory.make("identity", None) self.gst_bin.set_property("signal-handoffs", False) # Link the components self.pipeline.add(self.gst_bin) self.audio_convert_sink.link(self.gst_bin) self.gst_bin.link(self.audio_convert_src) # Unblock the stream pad.remove_probe(probe) if playing: self.pipeline.set_state(Gst.State.PLAYING)
gpl-3.0
-7,158,419,560,938,443,000
36.626374
79
0.658779
false
pigeonflight/strider-plone
docker/appengine/lib/django-1.5/tests/regressiontests/serializers_regress/models.py
45
8072
""" A test spanning all the capabilities of all the serializers. This class sets up a model for each model field type (except for image types, because of the PIL dependency). """ from django.db import models from django.contrib.contenttypes import generic from django.contrib.contenttypes.models import ContentType # The following classes are for testing basic data # marshalling, including NULL values, where allowed. class BooleanData(models.Model): data = models.BooleanField() class CharData(models.Model): data = models.CharField(max_length=30, null=True) class DateData(models.Model): data = models.DateField(null=True) class DateTimeData(models.Model): data = models.DateTimeField(null=True) class DecimalData(models.Model): data = models.DecimalField(null=True, decimal_places=3, max_digits=5) class EmailData(models.Model): data = models.EmailField(null=True) class FileData(models.Model): data = models.FileField(null=True, upload_to='/foo/bar') class FilePathData(models.Model): data = models.FilePathField(null=True) class FloatData(models.Model): data = models.FloatField(null=True) class IntegerData(models.Model): data = models.IntegerField(null=True) class BigIntegerData(models.Model): data = models.BigIntegerField(null=True) # class ImageData(models.Model): # data = models.ImageField(null=True) class IPAddressData(models.Model): data = models.IPAddressField(null=True) class GenericIPAddressData(models.Model): data = models.GenericIPAddressField(null=True) class NullBooleanData(models.Model): data = models.NullBooleanField(null=True) class PositiveIntegerData(models.Model): data = models.PositiveIntegerField(null=True) class PositiveSmallIntegerData(models.Model): data = models.PositiveSmallIntegerField(null=True) class SlugData(models.Model): data = models.SlugField(null=True) class SmallData(models.Model): data = models.SmallIntegerField(null=True) class TextData(models.Model): data = models.TextField(null=True) class TimeData(models.Model): data = models.TimeField(null=True) class Tag(models.Model): """A tag on an item.""" data = models.SlugField() content_type = models.ForeignKey(ContentType) object_id = models.PositiveIntegerField() content_object = generic.GenericForeignKey() class Meta: ordering = ["data"] class GenericData(models.Model): data = models.CharField(max_length=30) tags = generic.GenericRelation(Tag) # The following test classes are all for validation # of related objects; in particular, forward, backward, # and self references. class Anchor(models.Model): """This is a model that can be used as something for other models to point at""" data = models.CharField(max_length=30) class Meta: ordering = ('id',) class NaturalKeyAnchorManager(models.Manager): def get_by_natural_key(self, data): return self.get(data=data) class NaturalKeyAnchor(models.Model): objects = NaturalKeyAnchorManager() data = models.CharField(max_length=100, unique=True) def natural_key(self): return (self.data,) class UniqueAnchor(models.Model): """This is a model that can be used as something for other models to point at""" data = models.CharField(unique=True, max_length=30) class FKData(models.Model): data = models.ForeignKey(Anchor, null=True) class FKDataNaturalKey(models.Model): data = models.ForeignKey(NaturalKeyAnchor, null=True) class M2MData(models.Model): data = models.ManyToManyField(Anchor, null=True) class O2OData(models.Model): # One to one field can't be null here, since it is a PK. data = models.OneToOneField(Anchor, primary_key=True) class FKSelfData(models.Model): data = models.ForeignKey('self', null=True) class M2MSelfData(models.Model): data = models.ManyToManyField('self', null=True, symmetrical=False) class FKDataToField(models.Model): data = models.ForeignKey(UniqueAnchor, null=True, to_field='data') class FKDataToO2O(models.Model): data = models.ForeignKey(O2OData, null=True) class M2MIntermediateData(models.Model): data = models.ManyToManyField(Anchor, null=True, through='Intermediate') class Intermediate(models.Model): left = models.ForeignKey(M2MIntermediateData) right = models.ForeignKey(Anchor) extra = models.CharField(max_length=30, blank=True, default="doesn't matter") # The following test classes are for validating the # deserialization of objects that use a user-defined # field as the primary key. # Some of these data types have been commented out # because they can't be used as a primary key on one # or all database backends. class BooleanPKData(models.Model): data = models.BooleanField(primary_key=True) class CharPKData(models.Model): data = models.CharField(max_length=30, primary_key=True) # class DatePKData(models.Model): # data = models.DateField(primary_key=True) # class DateTimePKData(models.Model): # data = models.DateTimeField(primary_key=True) class DecimalPKData(models.Model): data = models.DecimalField(primary_key=True, decimal_places=3, max_digits=5) class EmailPKData(models.Model): data = models.EmailField(primary_key=True) # class FilePKData(models.Model): # data = models.FileField(primary_key=True, upload_to='/foo/bar') class FilePathPKData(models.Model): data = models.FilePathField(primary_key=True) class FloatPKData(models.Model): data = models.FloatField(primary_key=True) class IntegerPKData(models.Model): data = models.IntegerField(primary_key=True) # class ImagePKData(models.Model): # data = models.ImageField(primary_key=True) class IPAddressPKData(models.Model): data = models.IPAddressField(primary_key=True) class GenericIPAddressPKData(models.Model): data = models.GenericIPAddressField(primary_key=True) # This is just a Boolean field with null=True, and we can't test a PK value of NULL. # class NullBooleanPKData(models.Model): # data = models.NullBooleanField(primary_key=True) class PositiveIntegerPKData(models.Model): data = models.PositiveIntegerField(primary_key=True) class PositiveSmallIntegerPKData(models.Model): data = models.PositiveSmallIntegerField(primary_key=True) class SlugPKData(models.Model): data = models.SlugField(primary_key=True) class SmallPKData(models.Model): data = models.SmallIntegerField(primary_key=True) # class TextPKData(models.Model): # data = models.TextField(primary_key=True) # class TimePKData(models.Model): # data = models.TimeField(primary_key=True) class ComplexModel(models.Model): field1 = models.CharField(max_length=10) field2 = models.CharField(max_length=10) field3 = models.CharField(max_length=10) # Tests for handling fields with pre_save functions, or # models with save functions that modify data class AutoNowDateTimeData(models.Model): data = models.DateTimeField(null=True, auto_now=True) class ModifyingSaveData(models.Model): data = models.IntegerField(null=True) def save(self): "A save method that modifies the data in the object" self.data = 666 super(ModifyingSaveData, self).save(raw) # Tests for serialization of models using inheritance. # Regression for #7202, #7350 class AbstractBaseModel(models.Model): parent_data = models.IntegerField() class Meta: abstract = True class InheritAbstractModel(AbstractBaseModel): child_data = models.IntegerField() class BaseModel(models.Model): parent_data = models.IntegerField() class InheritBaseModel(BaseModel): child_data = models.IntegerField() class ExplicitInheritBaseModel(BaseModel): parent = models.OneToOneField(BaseModel) child_data = models.IntegerField() class ProxyBaseModel(BaseModel): class Meta: proxy = True class ProxyProxyBaseModel(ProxyBaseModel): class Meta: proxy = True class LengthModel(models.Model): data = models.IntegerField() def __len__(self): return self.data
mit
-4,446,028,695,050,745,300
28.140794
84
0.739965
false
modelkayak/python_signal_examples
energy_fft.py
1
2685
import numpy as np import scipy from matplotlib import pyplot as plt from numpy import pi as pi # Plotting logic switches time_plot = True freq_plot = True # Oversample to make things look purty oversample = 100 # Frequencies to simulate f_min = 5 #[Hz] f_max = 10 #[Hz] f_list = np.arange(f_min,f_max) # Note: arange does not include the stop pt # Time array t_start = 0 #[s] t_stop = oversample/f_min #[s] f_samp = oversample*f_max #[Hz] t_step = 1/f_samp #[s] # Create a time span, but do not care about the number of points. # This will likely create sinc functions in the FFT. #t = np.arange(t_start,t_stop,t_step) # Use N points to make a faster FFT and to avoid # the addition of zeros at the end of the FFT array. # The addition of zeros will result in the mulitplication # of a box filter in the time domain, which results in # a sinc function in the frequency domain N = int(np.power(2,np.ceil(np.log2(t_stop/t_step)))) # Create a time span, but care about the number of points such that # the signal does not look like a sinc function in the freq. domain. # Source: U of RI ECE, ELE 436: Comm. Sys., FFT Tutorial t = np.linspace(t_start,t_stop,num=N,endpoint=True) # Create random amplitudes a_list = [np.random.randint(1,10) for i in f_list] # Create a time signal with random amplitudes for each frequency x = 0 for a,f in zip(a_list,f_list): x += a*np.sin(2*pi*f*t) # Take the FFT of the signal # Normalize by the size of x due to how a DTFT is taken # Take absoulte value because we only care about the real part # of the signal. X = np.abs(np.fft.fft(x)/x.size) # Get the labels for the frequencies, num pts and delta between them freq_labels = np.fft.fftfreq(N,t[1]-t[0]) # Plot the time signal if time_plot and not freq_plot: plt.figure('Time Domain View') plt.title("Time domain view of signal x") plt.plot(t,x) plt.xlim([0,5/f_min]) plt.xlabel("Time [s]") plt.ylabel("Amplitude") plt.show() # Or plot the frequecy if freq_plot and not time_plot: plt.figure('Frequency Domain View') plt.title("Frequency domain view of signal x") plt.plot(freq_labels,X) plt.xlim([-f_max,f_max]) plt.show() # Or plot both if freq_plot and time_plot: plt.subplot(211) plt.title("Time and frequency domain view of real signal x") plt.plot(t,x) plt.xlim([0,5/f_min]) # Limit the time shown to a small amount plt.xlabel("Time [s]") plt.ylabel("Amplitude") plt.subplot(212) plt.plot(freq_labels,X) plt.xlim([-f_max,f_max]) # Limit the freq shown to a small amount plt.xlabel("Frequency [Hz]") plt.ylabel("Magnitude (linear)") plt.show()
mit
-7,617,899,990,671,451,000
28.505495
75
0.67933
false
moxon6/chemlab
build/lib/chemlab/graphics/qttrajectory.py
5
12898
from PyQt4.QtGui import QMainWindow, QApplication, QDockWidget from PyQt4 import QtGui, QtCore from PyQt4.QtCore import Qt import os from .qtviewer import app from .qchemlabwidget import QChemlabWidget from .. import resources import numpy as np resources_dir = os.path.dirname(resources.__file__) class PlayStopButton(QtGui.QPushButton): play = QtCore.pyqtSignal() pause = QtCore.pyqtSignal() def __init__(self): css = ''' PlayStopButton { width: 30px; height: 30px; } ''' super(PlayStopButton, self).__init__() self.setStyleSheet(css) icon = QtGui.QIcon(os.path.join(resources_dir, 'play_icon.svg')) self.setIcon(icon) self.status = 'paused' self.clicked.connect(self.on_click) def on_click(self): if self.status == 'paused': self.status = 'playing' icon = QtGui.QIcon(os.path.join(resources_dir, 'pause_icon.svg')) self.setIcon(icon) self.play.emit() else: self.status = 'paused' icon = QtGui.QIcon(os.path.join(resources_dir, 'play_icon.svg')) self.setIcon(icon) self.pause.emit() def set_pause(self): self.status = 'paused' icon = QtGui.QIcon(os.path.join(resources_dir, 'play_icon.svg')) self.setIcon(icon) def set_play(self): self.status = 'playing' icon = QtGui.QIcon(os.path.join(resources_dir, 'pause_icon.svg')) self.setIcon(icon) class AnimationSlider(QtGui.QSlider): def __init__(self): super(AnimationSlider, self).__init__(Qt.Horizontal) self._cursor_adjustment = 7 #px def mousePressEvent(self, event): if event.button() == Qt.LeftButton: value = self.__pixelPosToRangeValue(event.x()-self._cursor_adjustment) self.setValue(value) event.accept() super(AnimationSlider, self).mousePressEvent(event) def __pixelPosToRangeValue(self, pos): opt = QtGui.QStyleOptionSlider() self.initStyleOption(opt) style = QtGui.QApplication.style() gr = style.subControlRect(style.CC_Slider, opt, style.SC_SliderGroove, self) sr = style.subControlRect(style.CC_Slider, opt, style.SC_SliderHandle, self) if self.orientation() == QtCore.Qt.Horizontal: slider_length = sr.width() slider_min = gr.x() slider_max = gr.right() - slider_length + 1 else: slider_length = sr.height() slider_min = gr.y() slider_max = gr.bottom() - slider_length + 1 return style.sliderValueFromPosition(self.minimum(), self.maximum(), pos-slider_min, slider_max-slider_min, opt.upsideDown) class TrajectoryControls(QtGui.QWidget): play = QtCore.pyqtSignal() pause = QtCore.pyqtSignal() frame_changed = QtCore.pyqtSignal(int) speed_changed = QtCore.pyqtSignal() def __init__(self, parent=None): super(TrajectoryControls, self).__init__(parent) self.current_index = 0 self.max_index = 0 self._timer = QtCore.QTimer(self) self._timer.timeout.connect(self.do_update) containerhb2 = QtGui.QWidget(parent) hb = QtGui.QHBoxLayout() # For controls vb = QtGui.QVBoxLayout() hb2 = QtGui.QHBoxLayout() # For settings vb.addWidget(containerhb2) vb.addLayout(hb) containerhb2.setLayout(hb2) containerhb2.setSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Minimum) hb2.addWidget(QtGui.QLabel('Speed')) self._speed_slider = QtGui.QSlider(Qt.Horizontal) self._speed_slider.resize(100, self._speed_slider.height()) self._speed_slider.setSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) self.speeds = np.linspace(15, 250, 11).astype(int) self.speeds = self.speeds.tolist() self.speeds.reverse() self._speed_slider.setMaximum(10) self._speed_slider.setValue(7) #self._speed_slider.valueChanged.connect(self.on_speed_changed) hb2.addWidget(self._speed_slider) hb2.addStretch(1) # Control buttons self.play_stop = PlayStopButton() hb.addWidget(self.play_stop) self.slider = AnimationSlider() hb.addWidget(self.slider, 2) self._label_tmp = '<b><FONT SIZE=30>{}</b>' self.timelabel = QtGui.QLabel(self._label_tmp.format('0.0')) hb.addWidget(self.timelabel) self._settings_button = QtGui.QPushButton() self._settings_button.setStyleSheet(''' QPushButton { width: 30px; height: 30px; }''') icon = QtGui.QIcon(os.path.join(resources_dir, 'settings_icon.svg')) self._settings_button.setIcon(icon) self._settings_button.clicked.connect(self._toggle_settings) hb.addWidget(self._settings_button) self.play_stop.setFocus() vb.setSizeConstraint(QtGui.QLayout.SetMaximumSize) containerhb2.setVisible(False) self._settings_pan = containerhb2 self.setLayout(vb) self.speed = self.speeds[self._speed_slider.value()] # Connecting all the signals self.play_stop.play.connect(self.on_play) self.play_stop.pause.connect(self.on_pause) self.slider.valueChanged.connect(self.on_slider_change) self.slider.sliderPressed.connect(self.on_slider_down) self.play_stop.setFocus() def _toggle_settings(self): self._settings_pan.setVisible(not self._settings_pan.isVisible()) def on_play(self): if self.current_index == self.max_index - 1: # Restart self.current_index = 0 self._timer.start(self.speed) def do_update(self): if self.current_index >= self.max_index: self.current_index = self.max_index - 1 self._timer.stop() self.play_stop.set_pause() else: self.current_index += 1 # This triggers on_slider_change self.slider.setSliderPosition(self.current_index) def next(self, skip=1): if self.current_index + skip >= self.max_index - 1: raise StopIteration else: self.slider.setValue(self.current_index + skip) # The current_index is changes def goto_frame(self, framenum): self.slider.setValue(framenum) def on_pause(self): self._timer.stop() def on_slider_change(self, value): #print 'Slider moved', value self.current_index = value self.frame_changed.emit(self.current_index) def on_slider_down(self): self._timer.stop() self.play_stop.set_pause() def on_speed_changed(self, index): self.speed = self.speeds[index] if self._timer.isActive(): self._timer.stop() self._timer.start(self.speed) def set_ticks(self, number): '''Set the number of frames to animate. ''' self.max_index = number self.current_index = 0 self.slider.setMaximum(self.max_index-1) self.slider.setMinimum(0) self.slider.setPageStep(1) def set_time(self, t): stime = format_time(t) label_tmp = '<b><FONT SIZE=30>{}</b>' self.timelabel.setText(label_tmp.format(stime)) class QtTrajectoryViewer(QMainWindow): """Bases: `PyQt4.QtGui.QMainWindow` Interface for viewing trajectory. It provides interface elements to play/pause and set the speed of the animation. **Example** To set up a QtTrajectoryViewer you have to add renderers to the scene, set the number of frames present in the animation by calling ;py:meth:`~chemlab.graphics.QtTrajectoryViewer.set_ticks` and define an update function. Below is an example taken from the function :py:func:`chemlab.graphics.display_trajectory`:: from chemlab.graphics import QtTrajectoryViewer # sys = some System # coords_list = some list of atomic coordinates v = QtTrajectoryViewer() sr = v.add_renderer(AtomRenderer, sys.r_array, sys.type_array, backend='impostors') br = v.add_renderer(BoxRenderer, sys.box_vectors) v.set_ticks(len(coords_list)) @v.update_function def on_update(index): sr.update_positions(coords_list[index]) br.update(sys.box_vectors) v.set_text(format_time(times[index])) v.widget.repaint() v.run() .. warning:: Use with caution, the API for this element is not fully stabilized and may be subject to change. """ def __init__(self): super(QtTrajectoryViewer, self).__init__() self.controls = QDockWidget() # Eliminate the dock titlebar title_widget = QtGui.QWidget(self) self.controls.setTitleBarWidget(title_widget) traj_controls = TrajectoryControls(self) self.controls.setWidget(traj_controls) # Molecular viewer self.widget = QChemlabWidget(self) self.setCentralWidget(self.widget) self.addDockWidget(Qt.DockWidgetArea(Qt.BottomDockWidgetArea), self.controls) self.show() # Replace in this way traj_controls.frame_changed.connect(self.on_frame_changed) self.traj_controls = traj_controls def set_ticks(self, number): self.traj_controls.set_ticks(number) def set_text(self, text): '''Update the time indicator in the interface. ''' self.traj_controls.timelabel.setText(self.traj_controls._label_tmp.format(text)) def on_frame_changed(self, index): self._update_function(index) def on_pause(self): self._timer.stop() def on_slider_change(self, value): self.current_index = value self._update_function(self.current_index) def on_slider_down(self): self._timer.stop() self.play_stop.set_pause() def on_speed_changed(self, index): self.speed = self.speeds[index] if self._timer.isActive(): self._timer.stop() self._timer.start(self.speed) def add_renderer(self, klass, *args, **kwargs): '''The behaviour of this function is the same as :py:meth:`chemlab.graphics.QtViewer.add_renderer`. ''' renderer = klass(self.widget, *args, **kwargs) self.widget.renderers.append(renderer) return renderer def add_ui(self, klass, *args, **kwargs): '''Add an UI element for the current scene. The approach is the same as renderers. .. warning:: The UI api is not yet finalized ''' ui = klass(self.widget, *args, **kwargs) self.widget.uis.append(ui) return ui def add_post_processing(self, klass, *args, **kwargs): pp = klass(self.widget, *args, **kwargs) self.widget.post_processing.append(pp) return pp def run(self): app.exec_() def update_function(self, func, frames=None): '''Set the function to be called when it's time to display a frame. *func* should be a function that takes one integer argument that represents the frame that has to be played:: def func(index): # Update the renderers to match the # current animation index ''' # Back-compatibility if frames is not None: self.traj_controls.set_ticks(frames) self._update_function = func def _toggle_settings(self): self._settings_pan.setVisible(not self._settings_pan.isVisible()) def format_time(t): if 0.0 <= t < 100.0: return '%.1f ps' % t elif 100.0 <= t < 1.0e5: return '%.1f ns' % (t/1e3) elif 1.0e5 <= t < 1.0e8: return '%.1f us' % (t/1e6) elif 1.0e8 <= t < 1.0e12: return '%.1f ms' % (t/1e9) elif 1.0e12 <= t < 1.0e15: return '%.1f s' % (t/1e12) if __name__ == '__main__': v = QtTrajectoryViewer() v.show() app.exec_()
gpl-3.0
5,636,350,738,547,121,000
30.615196
88
0.575361
false
gautam1858/tensorflow
tensorflow/python/kernel_tests/numerics_test.py
15
5145
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for tensorflow.ops.numerics.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.framework import test_util from tensorflow.python.ops import array_ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import numerics from tensorflow.python.platform import test class VerifyTensorAllFiniteTest(test.TestCase): def testVerifyTensorAllFiniteSucceeds(self): x_shape = [5, 4] x = np.random.random_sample(x_shape).astype(np.float32) with test_util.use_gpu(): t = constant_op.constant(x, shape=x_shape, dtype=dtypes.float32) t_verified = numerics.verify_tensor_all_finite(t, "Input is not a number.") self.assertAllClose(x, self.evaluate(t_verified)) def testVerifyTensorAllFiniteFails(self): x_shape = [5, 4] x = np.random.random_sample(x_shape).astype(np.float32) my_msg = "Input is not a number." # Test NaN. x[0] = np.nan with test_util.use_gpu(): with self.assertRaisesOpError(my_msg): t = constant_op.constant(x, shape=x_shape, dtype=dtypes.float32) t_verified = numerics.verify_tensor_all_finite(t, my_msg) self.evaluate(t_verified) # Test Inf. x[0] = np.inf with test_util.use_gpu(): with self.assertRaisesOpError(my_msg): t = constant_op.constant(x, shape=x_shape, dtype=dtypes.float32) t_verified = numerics.verify_tensor_all_finite(t, my_msg) self.evaluate(t_verified) @test_util.run_v1_only("b/120545219") class NumericsTest(test.TestCase): def testInf(self): with self.session(graph=ops.Graph()): t1 = constant_op.constant(1.0) t2 = constant_op.constant(0.0) a = math_ops.div(t1, t2) check = numerics.add_check_numerics_ops() a = control_flow_ops.with_dependencies([check], a) with self.assertRaisesOpError("Inf"): self.evaluate(a) def testNaN(self): with self.session(graph=ops.Graph()): t1 = constant_op.constant(0.0) t2 = constant_op.constant(0.0) a = math_ops.div(t1, t2) check = numerics.add_check_numerics_ops() a = control_flow_ops.with_dependencies([check], a) with self.assertRaisesOpError("NaN"): self.evaluate(a) def testBoth(self): with self.session(graph=ops.Graph()): t1 = constant_op.constant([1.0, 0.0]) t2 = constant_op.constant([0.0, 0.0]) a = math_ops.div(t1, t2) check = numerics.add_check_numerics_ops() a = control_flow_ops.with_dependencies([check], a) with self.assertRaisesOpError("Inf and NaN"): self.evaluate(a) def testPassThrough(self): with self.session(graph=ops.Graph()): t1 = constant_op.constant([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=[2, 3]) checked = array_ops.check_numerics(t1, message="pass through test") value = self.evaluate(checked) self.assertAllEqual(np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]]), value) self.assertEqual([2, 3], checked.get_shape()) def testControlFlowCond(self): predicate = array_ops.placeholder(dtypes.bool, shape=[]) _ = control_flow_ops.cond(predicate, lambda: constant_op.constant([37.]), lambda: constant_op.constant([42.])) with self.assertRaisesRegexp( ValueError, r"`tf\.add_check_numerics_ops\(\) is not compatible with " r"TensorFlow control flow operations such as `tf\.cond\(\)` " r"or `tf.while_loop\(\)`\."): numerics.add_check_numerics_ops() def testControlFlowWhile(self): predicate = array_ops.placeholder(dtypes.bool, shape=[]) _ = control_flow_ops.while_loop(lambda _: predicate, lambda _: constant_op.constant([37.]), [constant_op.constant([42.])]) with self.assertRaisesRegexp( ValueError, r"`tf\.add_check_numerics_ops\(\) is not compatible with " r"TensorFlow control flow operations such as `tf\.cond\(\)` " r"or `tf.while_loop\(\)`\."): numerics.add_check_numerics_ops() if __name__ == "__main__": test.main()
apache-2.0
-8,638,399,950,881,211,000
37.395522
80
0.641399
false
SnappleCap/oh-mainline
vendor/packages/Django/tests/regressiontests/utils/crypto.py
108
4625
from __future__ import unicode_literals import binascii import math import timeit import hashlib from django.utils import unittest from django.utils.crypto import constant_time_compare, pbkdf2 class TestUtilsCryptoMisc(unittest.TestCase): def test_constant_time_compare(self): # It's hard to test for constant time, just test the result. self.assertTrue(constant_time_compare(b'spam', b'spam')) self.assertFalse(constant_time_compare(b'spam', b'eggs')) self.assertTrue(constant_time_compare('spam', 'spam')) self.assertFalse(constant_time_compare('spam', 'eggs')) class TestUtilsCryptoPBKDF2(unittest.TestCase): # http://tools.ietf.org/html/draft-josefsson-pbkdf2-test-vectors-06 rfc_vectors = [ { "args": { "password": "password", "salt": "salt", "iterations": 1, "dklen": 20, "digest": hashlib.sha1, }, "result": "0c60c80f961f0e71f3a9b524af6012062fe037a6", }, { "args": { "password": "password", "salt": "salt", "iterations": 2, "dklen": 20, "digest": hashlib.sha1, }, "result": "ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957", }, { "args": { "password": "password", "salt": "salt", "iterations": 4096, "dklen": 20, "digest": hashlib.sha1, }, "result": "4b007901b765489abead49d926f721d065a429c1", }, # # this takes way too long :( # { # "args": { # "password": "password", # "salt": "salt", # "iterations": 16777216, # "dklen": 20, # "digest": hashlib.sha1, # }, # "result": "eefe3d61cd4da4e4e9945b3d6ba2158c2634e984", # }, { "args": { "password": "passwordPASSWORDpassword", "salt": "saltSALTsaltSALTsaltSALTsaltSALTsalt", "iterations": 4096, "dklen": 25, "digest": hashlib.sha1, }, "result": "3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038", }, { "args": { "password": "pass\0word", "salt": "sa\0lt", "iterations": 4096, "dklen": 16, "digest": hashlib.sha1, }, "result": "56fa6aa75548099dcc37d7f03425e0c3", }, ] regression_vectors = [ { "args": { "password": "password", "salt": "salt", "iterations": 1, "dklen": 20, "digest": hashlib.sha256, }, "result": "120fb6cffcf8b32c43e7225256c4f837a86548c9", }, { "args": { "password": "password", "salt": "salt", "iterations": 1, "dklen": 20, "digest": hashlib.sha512, }, "result": "867f70cf1ade02cff3752599a3a53dc4af34c7a6", }, { "args": { "password": "password", "salt": "salt", "iterations": 1000, "dklen": 0, "digest": hashlib.sha512, }, "result": ("afe6c5530785b6cc6b1c6453384731bd5ee432ee" "549fd42fb6695779ad8a1c5bf59de69c48f774ef" "c4007d5298f9033c0241d5ab69305e7b64eceeb8d" "834cfec"), }, # Check leading zeros are not stripped (#17481) { "args": { "password": b'\xba', "salt": "salt", "iterations": 1, "dklen": 20, "digest": hashlib.sha1, }, "result": '0053d3b91a7f1e54effebd6d68771e8a6e0b2c5b', }, ] def test_public_vectors(self): for vector in self.rfc_vectors: result = pbkdf2(**vector['args']) self.assertEqual(binascii.hexlify(result).decode('ascii'), vector['result']) def test_regression_vectors(self): for vector in self.regression_vectors: result = pbkdf2(**vector['args']) self.assertEqual(binascii.hexlify(result).decode('ascii'), vector['result'])
agpl-3.0
-801,880,466,785,421,400
30.678082
75
0.463568
false
jordanemedlock/psychtruths
temboo/core/Library/Basecamp/GetProject.py
5
3773
# -*- coding: utf-8 -*- ############################################################################### # # GetProject # Retrieves an individual project using a project id that you specify. # # Python versions 2.6, 2.7, 3.x # # Copyright 2014, Temboo Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, # either express or implied. See the License for the specific # language governing permissions and limitations under the License. # # ############################################################################### from temboo.core.choreography import Choreography from temboo.core.choreography import InputSet from temboo.core.choreography import ResultSet from temboo.core.choreography import ChoreographyExecution import json class GetProject(Choreography): def __init__(self, temboo_session): """ Create a new instance of the GetProject Choreo. A TembooSession object, containing a valid set of Temboo credentials, must be supplied. """ super(GetProject, self).__init__(temboo_session, '/Library/Basecamp/GetProject') def new_input_set(self): return GetProjectInputSet() def _make_result_set(self, result, path): return GetProjectResultSet(result, path) def _make_execution(self, session, exec_id, path): return GetProjectChoreographyExecution(session, exec_id, path) class GetProjectInputSet(InputSet): """ An InputSet with methods appropriate for specifying the inputs to the GetProject Choreo. The InputSet object is used to specify input parameters when executing this Choreo. """ def set_AccountName(self, value): """ Set the value of the AccountName input for this Choreo. ((required, string) The Basecamp account name for you or your company. This is the first part of your account URL.) """ super(GetProjectInputSet, self)._set_input('AccountName', value) def set_Password(self, value): """ Set the value of the Password input for this Choreo. ((required, password) Your Basecamp password. You can use the value 'X' when specifying an API Key for the Username input.) """ super(GetProjectInputSet, self)._set_input('Password', value) def set_ProjectId(self, value): """ Set the value of the ProjectId input for this Choreo. ((required, integer) The ID for the project you want to retrieve.) """ super(GetProjectInputSet, self)._set_input('ProjectId', value) def set_Username(self, value): """ Set the value of the Username input for this Choreo. ((required, string) Your Basecamp username or API Key.) """ super(GetProjectInputSet, self)._set_input('Username', value) class GetProjectResultSet(ResultSet): """ A ResultSet with methods tailored to the values returned by the GetProject Choreo. The ResultSet object is used to retrieve the results of a Choreo execution. """ def getJSONFromString(self, str): return json.loads(str) def get_Response(self): """ Retrieve the value for the "Response" output from this Choreo execution. ((xml) The response from Basecamp.) """ return self._output.get('Response', None) class GetProjectChoreographyExecution(ChoreographyExecution): def _make_result_set(self, response, path): return GetProjectResultSet(response, path)
apache-2.0
4,416,435,095,684,486,000
37.896907
185
0.670289
false
zzsza/TIL
python/crawling/advanced_link_crawler.py
1
1657
import re from urllib import robotparser from urllib.parse import urljoin from downloader import Downloader def get_robots_parser(robots_url): rp = robotparser.RobotFileParser() rp.set_url(robots_url) rp.read() return rp def get_links(html): webpage_regex = re.compile("""<a[^>]+href=["'](.*?)["']""", re.IGNORECASE) return webpage_regex.findall(html) def link_crawler(start_url, link_regex, robots_url=None, user_agent='wswp', proxies=None, delay=3, max_depth=4, num_retries=2, cache={}, scraper_callback=None): crawl_queue = [start_url] seen = {} if not robots_url: robots_url = '{}/robots.txt'.format(start_url) rp = get_robots_parser(robots_url) D = Downloader(delay=delay, user_agent=user_agent, proxies=proxies, cache=cache) while crawl_queue: url = crawl_queue.pop() if rp.can_fetch(user_agent, url): depth = seen.get(url, 0) if depth == max_depth: print('Skipping %s due to depth' % url) continue html = D(url, num_retries=num_retries) if not html: continue if scraper_callback: links = scraper_callback(url, html) or [] else: links = [] for link in get_links(html) + links: if re.match(link_regex, link): abs_link = urljoin(start_url, link) if abs_link not in seen: seen[abs_link] = depth + 1 crawl_queue.append(abs_link) else: print('Blocked by robots.txt:', url)
mit
-5,075,583,295,079,017,000
33.520833
101
0.554617
false
openstack/manila
manila/scheduler/scheduler_options.py
2
3492
# Copyright (c) 2011 OpenStack, LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ SchedulerOptions monitors a local .json file for changes and loads it if needed. This file is converted to a data structure and passed into the filtering and weighing functions which can use it for dynamic configuration. """ import datetime import os from oslo_config import cfg from oslo_log import log from oslo_serialization import jsonutils from oslo_utils import timeutils scheduler_json_config_location_opt = cfg.StrOpt( 'scheduler_json_config_location', default='', help='Absolute path to scheduler configuration JSON file.') CONF = cfg.CONF CONF.register_opt(scheduler_json_config_location_opt) LOG = log.getLogger(__name__) class SchedulerOptions(object): """Monitor and load local .json file for filtering and weighing. SchedulerOptions monitors a local .json file for changes and loads it if needed. This file is converted to a data structure and passed into the filtering and weighing functions which can use it for dynamic configuration. """ def __init__(self): super(SchedulerOptions, self).__init__() self.data = {} self.last_modified = None self.last_checked = None def _get_file_handle(self, filename): """Get file handle. Broken out for testing.""" return open(filename) def _get_file_timestamp(self, filename): """Get the last modified datetime. Broken out for testing.""" try: return os.path.getmtime(filename) except os.error: LOG.exception("Could not stat scheduler options file " "%(filename)s.", {"filename": filename}) raise def _load_file(self, handle): """Decode the JSON file. Broken out for testing.""" try: return jsonutils.load(handle) except ValueError: LOG.exception("Could not decode scheduler options.") return {} def _get_time_now(self): """Get current UTC. Broken out for testing.""" return timeutils.utcnow() def get_configuration(self, filename=None): """Check the json file for changes and load it if needed.""" if not filename: filename = CONF.scheduler_json_config_location if not filename: return self.data if self.last_checked: now = self._get_time_now() if now - self.last_checked < datetime.timedelta(minutes=5): return self.data last_modified = self._get_file_timestamp(filename) if (not last_modified or not self.last_modified or last_modified > self.last_modified): self.data = self._load_file(self._get_file_handle(filename)) self.last_modified = last_modified if not self.data: self.data = {} return self.data
apache-2.0
1,188,902,303,416,747,500
33.235294
78
0.649771
false
rbalda/neural_ocr
env/lib/python2.7/site-packages/django/contrib/gis/geoip/base.py
334
11859
import os import re import warnings from ctypes import c_char_p from django.contrib.gis.geoip.libgeoip import GEOIP_SETTINGS from django.contrib.gis.geoip.prototypes import ( GeoIP_country_code_by_addr, GeoIP_country_code_by_name, GeoIP_country_name_by_addr, GeoIP_country_name_by_name, GeoIP_database_info, GeoIP_delete, GeoIP_lib_version, GeoIP_open, GeoIP_record_by_addr, GeoIP_record_by_name, ) from django.core.validators import ipv4_re from django.utils import six from django.utils.deprecation import RemovedInDjango20Warning from django.utils.encoding import force_bytes, force_text # Regular expressions for recognizing the GeoIP free database editions. free_regex = re.compile(r'^GEO-\d{3}FREE') lite_regex = re.compile(r'^GEO-\d{3}LITE') class GeoIPException(Exception): pass class GeoIP(object): # The flags for GeoIP memory caching. # GEOIP_STANDARD - read database from filesystem, uses least memory. # # GEOIP_MEMORY_CACHE - load database into memory, faster performance # but uses more memory # # GEOIP_CHECK_CACHE - check for updated database. If database has been # updated, reload filehandle and/or memory cache. This option # is not thread safe. # # GEOIP_INDEX_CACHE - just cache the most frequently accessed index # portion of the database, resulting in faster lookups than # GEOIP_STANDARD, but less memory usage than GEOIP_MEMORY_CACHE - # useful for larger databases such as GeoIP Organization and # GeoIP City. Note, for GeoIP Country, Region and Netspeed # databases, GEOIP_INDEX_CACHE is equivalent to GEOIP_MEMORY_CACHE # # GEOIP_MMAP_CACHE - load database into mmap shared memory ( not available # on Windows). GEOIP_STANDARD = 0 GEOIP_MEMORY_CACHE = 1 GEOIP_CHECK_CACHE = 2 GEOIP_INDEX_CACHE = 4 GEOIP_MMAP_CACHE = 8 cache_options = {opt: None for opt in (0, 1, 2, 4, 8)} # Paths to the city & country binary databases. _city_file = '' _country_file = '' # Initially, pointers to GeoIP file references are NULL. _city = None _country = None def __init__(self, path=None, cache=0, country=None, city=None): """ Initializes the GeoIP object, no parameters are required to use default settings. Keyword arguments may be passed in to customize the locations of the GeoIP data sets. * path: Base directory to where GeoIP data is located or the full path to where the city or country data files (*.dat) are located. Assumes that both the city and country data sets are located in this directory; overrides the GEOIP_PATH settings attribute. * cache: The cache settings when opening up the GeoIP datasets, and may be an integer in (0, 1, 2, 4, 8) corresponding to the GEOIP_STANDARD, GEOIP_MEMORY_CACHE, GEOIP_CHECK_CACHE, GEOIP_INDEX_CACHE, and GEOIP_MMAP_CACHE, `GeoIPOptions` C API settings, respectively. Defaults to 0, meaning that the data is read from the disk. * country: The name of the GeoIP country data file. Defaults to 'GeoIP.dat'; overrides the GEOIP_COUNTRY settings attribute. * city: The name of the GeoIP city data file. Defaults to 'GeoLiteCity.dat'; overrides the GEOIP_CITY settings attribute. """ warnings.warn( "django.contrib.gis.geoip is deprecated in favor of " "django.contrib.gis.geoip2 and the MaxMind GeoLite2 database " "format.", RemovedInDjango20Warning, 2 ) # Checking the given cache option. if cache in self.cache_options: self._cache = cache else: raise GeoIPException('Invalid GeoIP caching option: %s' % cache) # Getting the GeoIP data path. if not path: path = GEOIP_SETTINGS.get('GEOIP_PATH') if not path: raise GeoIPException('GeoIP path must be provided via parameter or the GEOIP_PATH setting.') if not isinstance(path, six.string_types): raise TypeError('Invalid path type: %s' % type(path).__name__) if os.path.isdir(path): # Constructing the GeoIP database filenames using the settings # dictionary. If the database files for the GeoLite country # and/or city datasets exist, then try and open them. country_db = os.path.join(path, country or GEOIP_SETTINGS.get('GEOIP_COUNTRY', 'GeoIP.dat')) if os.path.isfile(country_db): self._country = GeoIP_open(force_bytes(country_db), cache) self._country_file = country_db city_db = os.path.join(path, city or GEOIP_SETTINGS.get('GEOIP_CITY', 'GeoLiteCity.dat')) if os.path.isfile(city_db): self._city = GeoIP_open(force_bytes(city_db), cache) self._city_file = city_db elif os.path.isfile(path): # Otherwise, some detective work will be needed to figure # out whether the given database path is for the GeoIP country # or city databases. ptr = GeoIP_open(force_bytes(path), cache) info = GeoIP_database_info(ptr) if lite_regex.match(info): # GeoLite City database detected. self._city = ptr self._city_file = path elif free_regex.match(info): # GeoIP Country database detected. self._country = ptr self._country_file = path else: raise GeoIPException('Unable to recognize database edition: %s' % info) else: raise GeoIPException('GeoIP path must be a valid file or directory.') def __del__(self): # Cleaning any GeoIP file handles lying around. if GeoIP_delete is None: return if self._country: GeoIP_delete(self._country) if self._city: GeoIP_delete(self._city) def __repr__(self): version = '' if GeoIP_lib_version is not None: version += ' [v%s]' % force_text(GeoIP_lib_version()) return '<%(cls)s%(version)s _country_file="%(country)s", _city_file="%(city)s">' % { 'cls': self.__class__.__name__, 'version': version, 'country': self._country_file, 'city': self._city_file, } def _check_query(self, query, country=False, city=False, city_or_country=False): "Helper routine for checking the query and database availability." # Making sure a string was passed in for the query. if not isinstance(query, six.string_types): raise TypeError('GeoIP query must be a string, not type %s' % type(query).__name__) # Extra checks for the existence of country and city databases. if city_or_country and not (self._country or self._city): raise GeoIPException('Invalid GeoIP country and city data files.') elif country and not self._country: raise GeoIPException('Invalid GeoIP country data file: %s' % self._country_file) elif city and not self._city: raise GeoIPException('Invalid GeoIP city data file: %s' % self._city_file) # Return the query string back to the caller. GeoIP only takes bytestrings. return force_bytes(query) def city(self, query): """ Returns a dictionary of city information for the given IP address or Fully Qualified Domain Name (FQDN). Some information in the dictionary may be undefined (None). """ enc_query = self._check_query(query, city=True) if ipv4_re.match(query): # If an IP address was passed in return GeoIP_record_by_addr(self._city, c_char_p(enc_query)) else: # If a FQDN was passed in. return GeoIP_record_by_name(self._city, c_char_p(enc_query)) def country_code(self, query): "Returns the country code for the given IP Address or FQDN." enc_query = self._check_query(query, city_or_country=True) if self._country: if ipv4_re.match(query): return GeoIP_country_code_by_addr(self._country, enc_query) else: return GeoIP_country_code_by_name(self._country, enc_query) else: return self.city(query)['country_code'] def country_name(self, query): "Returns the country name for the given IP Address or FQDN." enc_query = self._check_query(query, city_or_country=True) if self._country: if ipv4_re.match(query): return GeoIP_country_name_by_addr(self._country, enc_query) else: return GeoIP_country_name_by_name(self._country, enc_query) else: return self.city(query)['country_name'] def country(self, query): """ Returns a dictionary with the country code and name when given an IP address or a Fully Qualified Domain Name (FQDN). For example, both '24.124.1.80' and 'djangoproject.com' are valid parameters. """ # Returning the country code and name return {'country_code': self.country_code(query), 'country_name': self.country_name(query), } # #### Coordinate retrieval routines #### def coords(self, query, ordering=('longitude', 'latitude')): cdict = self.city(query) if cdict is None: return None else: return tuple(cdict[o] for o in ordering) def lon_lat(self, query): "Returns a tuple of the (longitude, latitude) for the given query." return self.coords(query) def lat_lon(self, query): "Returns a tuple of the (latitude, longitude) for the given query." return self.coords(query, ('latitude', 'longitude')) def geos(self, query): "Returns a GEOS Point object for the given query." ll = self.lon_lat(query) if ll: from django.contrib.gis.geos import Point return Point(ll, srid=4326) else: return None # #### GeoIP Database Information Routines #### @property def country_info(self): "Returns information about the GeoIP country database." if self._country is None: ci = 'No GeoIP Country data in "%s"' % self._country_file else: ci = GeoIP_database_info(self._country) return ci @property def city_info(self): "Returns information about the GeoIP city database." if self._city is None: ci = 'No GeoIP City data in "%s"' % self._city_file else: ci = GeoIP_database_info(self._city) return ci @property def info(self): "Returns information about the GeoIP library and databases in use." info = '' if GeoIP_lib_version: info += 'GeoIP Library:\n\t%s\n' % GeoIP_lib_version() return info + 'Country:\n\t%s\nCity:\n\t%s' % (self.country_info, self.city_info) # #### Methods for compatibility w/the GeoIP-Python API. #### @classmethod def open(cls, full_path, cache): return GeoIP(full_path, cache) def _rec_by_arg(self, arg): if self._city: return self.city(arg) else: return self.country(arg) region_by_addr = city region_by_name = city record_by_addr = _rec_by_arg record_by_name = _rec_by_arg country_code_by_addr = country_code country_code_by_name = country_code country_name_by_addr = country_name country_name_by_name = country_name
mit
2,515,451,417,160,843,300
39.613014
108
0.612362
false
zhanghenry/stocks
tests/utils_tests/test_http.py
220
8102
from __future__ import unicode_literals import sys import unittest from datetime import datetime from django.utils import http, six from django.utils.datastructures import MultiValueDict class TestUtilsHttp(unittest.TestCase): def test_same_origin_true(self): # Identical self.assertTrue(http.same_origin('http://foo.com/', 'http://foo.com/')) # One with trailing slash - see #15617 self.assertTrue(http.same_origin('http://foo.com', 'http://foo.com/')) self.assertTrue(http.same_origin('http://foo.com/', 'http://foo.com')) # With port self.assertTrue(http.same_origin('https://foo.com:8000', 'https://foo.com:8000/')) # No port given but according to RFC6454 still the same origin self.assertTrue(http.same_origin('http://foo.com', 'http://foo.com:80/')) self.assertTrue(http.same_origin('https://foo.com', 'https://foo.com:443/')) def test_same_origin_false(self): # Different scheme self.assertFalse(http.same_origin('http://foo.com', 'https://foo.com')) # Different host self.assertFalse(http.same_origin('http://foo.com', 'http://goo.com')) # Different host again self.assertFalse(http.same_origin('http://foo.com', 'http://foo.com.evil.com')) # Different port self.assertFalse(http.same_origin('http://foo.com:8000', 'http://foo.com:8001')) # No port given self.assertFalse(http.same_origin('http://foo.com', 'http://foo.com:8000/')) self.assertFalse(http.same_origin('https://foo.com', 'https://foo.com:8000/')) def test_urlencode(self): # 2-tuples (the norm) result = http.urlencode((('a', 1), ('b', 2), ('c', 3))) self.assertEqual(result, 'a=1&b=2&c=3') # A dictionary result = http.urlencode({'a': 1, 'b': 2, 'c': 3}) acceptable_results = [ # Need to allow all of these as dictionaries have to be treated as # unordered 'a=1&b=2&c=3', 'a=1&c=3&b=2', 'b=2&a=1&c=3', 'b=2&c=3&a=1', 'c=3&a=1&b=2', 'c=3&b=2&a=1' ] self.assertIn(result, acceptable_results) result = http.urlencode({'a': [1, 2]}, doseq=False) self.assertEqual(result, 'a=%5B%271%27%2C+%272%27%5D') result = http.urlencode({'a': [1, 2]}, doseq=True) self.assertEqual(result, 'a=1&a=2') result = http.urlencode({'a': []}, doseq=True) self.assertEqual(result, '') # A MultiValueDict result = http.urlencode(MultiValueDict({ 'name': ['Adrian', 'Simon'], 'position': ['Developer'] }), doseq=True) acceptable_results = [ # MultiValueDicts are similarly unordered 'name=Adrian&name=Simon&position=Developer', 'position=Developer&name=Adrian&name=Simon' ] self.assertIn(result, acceptable_results) def test_base36(self): # reciprocity works for n in [0, 1, 1000, 1000000]: self.assertEqual(n, http.base36_to_int(http.int_to_base36(n))) if six.PY2: self.assertEqual(sys.maxint, http.base36_to_int(http.int_to_base36(sys.maxint))) # bad input self.assertRaises(ValueError, http.int_to_base36, -1) if six.PY2: self.assertRaises(ValueError, http.int_to_base36, sys.maxint + 1) for n in ['1', 'foo', {1: 2}, (1, 2, 3), 3.141]: self.assertRaises(TypeError, http.int_to_base36, n) for n in ['#', ' ']: self.assertRaises(ValueError, http.base36_to_int, n) for n in [123, {1: 2}, (1, 2, 3), 3.141]: self.assertRaises(TypeError, http.base36_to_int, n) # more explicit output testing for n, b36 in [(0, '0'), (1, '1'), (42, '16'), (818469960, 'django')]: self.assertEqual(http.int_to_base36(n), b36) self.assertEqual(http.base36_to_int(b36), n) def test_is_safe_url(self): for bad_url in ('http://example.com', 'http:///example.com', 'https://example.com', 'ftp://exampel.com', r'\\example.com', r'\\\example.com', r'/\\/example.com', r'\\\example.com', r'\\example.com', r'\\//example.com', r'/\/example.com', r'\/example.com', r'/\example.com', 'http:///example.com', 'http:/\//example.com', 'http:\/example.com', 'http:/\example.com', 'javascript:alert("XSS")', '\njavascript:alert(x)', '\x08//example.com', '\n'): self.assertFalse(http.is_safe_url(bad_url, host='testserver'), "%s should be blocked" % bad_url) for good_url in ('/view/?param=http://example.com', '/view/?param=https://example.com', '/view?param=ftp://exampel.com', 'view/?param=//example.com', 'https://testserver/', 'HTTPS://testserver/', '//testserver/', '/url%20with%20spaces/'): self.assertTrue(http.is_safe_url(good_url, host='testserver'), "%s should be allowed" % good_url) def test_urlsafe_base64_roundtrip(self): bytestring = b'foo' encoded = http.urlsafe_base64_encode(bytestring) decoded = http.urlsafe_base64_decode(encoded) self.assertEqual(bytestring, decoded) def test_urlquote(self): self.assertEqual(http.urlquote('Paris & Orl\xe9ans'), 'Paris%20%26%20Orl%C3%A9ans') self.assertEqual(http.urlquote('Paris & Orl\xe9ans', safe="&"), 'Paris%20&%20Orl%C3%A9ans') self.assertEqual( http.urlunquote('Paris%20%26%20Orl%C3%A9ans'), 'Paris & Orl\xe9ans') self.assertEqual( http.urlunquote('Paris%20&%20Orl%C3%A9ans'), 'Paris & Orl\xe9ans') self.assertEqual(http.urlquote_plus('Paris & Orl\xe9ans'), 'Paris+%26+Orl%C3%A9ans') self.assertEqual(http.urlquote_plus('Paris & Orl\xe9ans', safe="&"), 'Paris+&+Orl%C3%A9ans') self.assertEqual( http.urlunquote_plus('Paris+%26+Orl%C3%A9ans'), 'Paris & Orl\xe9ans') self.assertEqual( http.urlunquote_plus('Paris+&+Orl%C3%A9ans'), 'Paris & Orl\xe9ans') class ETagProcessingTests(unittest.TestCase): def test_parsing(self): etags = http.parse_etags(r'"", "etag", "e\"t\"ag", "e\\tag", W/"weak"') self.assertEqual(etags, ['', 'etag', 'e"t"ag', r'e\tag', 'weak']) def test_quoting(self): quoted_etag = http.quote_etag(r'e\t"ag') self.assertEqual(quoted_etag, r'"e\\t\"ag"') class HttpDateProcessingTests(unittest.TestCase): def test_http_date(self): t = 1167616461.0 self.assertEqual(http.http_date(t), 'Mon, 01 Jan 2007 01:54:21 GMT') def test_cookie_date(self): t = 1167616461.0 self.assertEqual(http.cookie_date(t), 'Mon, 01-Jan-2007 01:54:21 GMT') def test_parsing_rfc1123(self): parsed = http.parse_http_date('Sun, 06 Nov 1994 08:49:37 GMT') self.assertEqual(datetime.utcfromtimestamp(parsed), datetime(1994, 11, 6, 8, 49, 37)) def test_parsing_rfc850(self): parsed = http.parse_http_date('Sunday, 06-Nov-94 08:49:37 GMT') self.assertEqual(datetime.utcfromtimestamp(parsed), datetime(1994, 11, 6, 8, 49, 37)) def test_parsing_asctime(self): parsed = http.parse_http_date('Sun Nov 6 08:49:37 1994') self.assertEqual(datetime.utcfromtimestamp(parsed), datetime(1994, 11, 6, 8, 49, 37))
bsd-3-clause
-5,960,138,391,426,787,000
40.979275
109
0.540731
false
vismartltd/edx-platform
common/djangoapps/track/tests/test_tracker.py
88
3481
from django.conf import settings from django.test import TestCase from django.test.utils import override_settings import track.tracker as tracker from track.backends import BaseBackend SIMPLE_SETTINGS = { 'default': { 'ENGINE': 'track.tests.test_tracker.DummyBackend', 'OPTIONS': { 'flag': True } } } MULTI_SETTINGS = { 'first': { 'ENGINE': 'track.tests.test_tracker.DummyBackend', }, 'second': { 'ENGINE': 'track.tests.test_tracker.DummyBackend', } } class TestTrackerInstantiation(TestCase): """Test that a helper function can instantiate backends from their name.""" def setUp(self): # pylint: disable=protected-access super(TestTrackerInstantiation, self).setUp() self.get_backend = tracker._instantiate_backend_from_name def test_instatiate_backend(self): name = 'track.tests.test_tracker.DummyBackend' options = {'flag': True} backend = self.get_backend(name, options) self.assertIsInstance(backend, DummyBackend) self.assertTrue(backend.flag) def test_instatiate_backends_with_invalid_values(self): def get_invalid_backend(name, parameters): return self.get_backend(name, parameters) options = {} name = 'track.backends.logger' self.assertRaises(ValueError, get_invalid_backend, name, options) name = 'track.backends.logger.Foo' self.assertRaises(ValueError, get_invalid_backend, name, options) name = 'this.package.does.not.exists' self.assertRaises(ValueError, get_invalid_backend, name, options) name = 'unittest.TestCase' self.assertRaises(ValueError, get_invalid_backend, name, options) class TestTrackerDjangoInstantiation(TestCase): """Test if backends are initialized properly from Django settings.""" @override_settings(TRACKING_BACKENDS=SIMPLE_SETTINGS) def test_django_simple_settings(self): """Test configuration of a simple backend""" backends = self._reload_backends() self.assertEqual(len(backends), 1) tracker.send({}) self.assertEqual(backends.values()[0].count, 1) @override_settings(TRACKING_BACKENDS=MULTI_SETTINGS) def test_django_multi_settings(self): """Test if multiple backends can be configured properly.""" backends = self._reload_backends().values() self.assertEqual(len(backends), 2) event_count = 10 for _ in xrange(event_count): tracker.send({}) self.assertEqual(backends[0].count, event_count) self.assertEqual(backends[1].count, event_count) @override_settings(TRACKING_BACKENDS=MULTI_SETTINGS) def test_django_remove_settings(self): """Test if a backend can be remove by setting it to None.""" settings.TRACKING_BACKENDS.update({'second': None}) backends = self._reload_backends() self.assertEqual(len(backends), 1) def _reload_backends(self): # pylint: disable=protected-access # Reset backends tracker._initialize_backends_from_django_settings() return tracker.backends class DummyBackend(BaseBackend): def __init__(self, **options): super(DummyBackend, self).__init__(**options) self.flag = options.get('flag', False) self.count = 0 # pylint: disable=unused-argument def send(self, event): self.count += 1
agpl-3.0
6,338,887,002,531,866,000
28.5
79
0.65441
false
andela-bojengwa/talk
venv/lib/python2.7/site-packages/pip/_vendor/requests/sessions.py
204
24273
# -*- coding: utf-8 -*- """ requests.session ~~~~~~~~~~~~~~~~ This module provides a Session object to manage and persist settings across requests (cookies, auth, proxies). """ import os from collections import Mapping from datetime import datetime from .auth import _basic_auth_str from .compat import cookielib, OrderedDict, urljoin, urlparse from .cookies import ( cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies) from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT from .hooks import default_hooks, dispatch_hook from .utils import to_key_val_list, default_headers, to_native_string from .exceptions import ( TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError) from .packages.urllib3._collections import RecentlyUsedContainer from .structures import CaseInsensitiveDict from .adapters import HTTPAdapter from .utils import ( requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies, get_auth_from_url ) from .status_codes import codes # formerly defined here, reexposed here for backward compatibility from .models import REDIRECT_STATI REDIRECT_CACHE_SIZE = 1000 def merge_setting(request_setting, session_setting, dict_class=OrderedDict): """ Determines appropriate setting for a given request, taking into account the explicit setting on that request, and the setting in the session. If a setting is a dictionary, they will be merged together using `dict_class` """ if session_setting is None: return request_setting if request_setting is None: return session_setting # Bypass if not a dictionary (e.g. verify) if not ( isinstance(session_setting, Mapping) and isinstance(request_setting, Mapping) ): return request_setting merged_setting = dict_class(to_key_val_list(session_setting)) merged_setting.update(to_key_val_list(request_setting)) # Remove keys that are set to None. for (k, v) in request_setting.items(): if v is None: del merged_setting[k] merged_setting = dict((k, v) for (k, v) in merged_setting.items() if v is not None) return merged_setting def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict): """ Properly merges both requests and session hooks. This is necessary because when request_hooks == {'response': []}, the merge breaks Session hooks entirely. """ if session_hooks is None or session_hooks.get('response') == []: return request_hooks if request_hooks is None or request_hooks.get('response') == []: return session_hooks return merge_setting(request_hooks, session_hooks, dict_class) class SessionRedirectMixin(object): def resolve_redirects(self, resp, req, stream=False, timeout=None, verify=True, cert=None, proxies=None): """Receives a Response. Returns a generator of Responses.""" i = 0 hist = [] # keep track of history while resp.is_redirect: prepared_request = req.copy() if i > 0: # Update history and keep track of redirects. hist.append(resp) new_hist = list(hist) resp.history = new_hist try: resp.content # Consume socket so it can be released except (ChunkedEncodingError, ContentDecodingError, RuntimeError): resp.raw.read(decode_content=False) if i >= self.max_redirects: raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects) # Release the connection back into the pool. resp.close() url = resp.headers['location'] method = req.method # Handle redirection without scheme (see: RFC 1808 Section 4) if url.startswith('//'): parsed_rurl = urlparse(resp.url) url = '%s:%s' % (parsed_rurl.scheme, url) # The scheme should be lower case... parsed = urlparse(url) url = parsed.geturl() # Facilitate relative 'location' headers, as allowed by RFC 7231. # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') # Compliant with RFC3986, we percent encode the url. if not parsed.netloc: url = urljoin(resp.url, requote_uri(url)) else: url = requote_uri(url) prepared_request.url = to_native_string(url) # Cache the url, unless it redirects to itself. if resp.is_permanent_redirect and req.url != prepared_request.url: self.redirect_cache[req.url] = prepared_request.url # http://tools.ietf.org/html/rfc7231#section-6.4.4 if (resp.status_code == codes.see_other and method != 'HEAD'): method = 'GET' # Do what the browsers do, despite standards... # First, turn 302s into GETs. if resp.status_code == codes.found and method != 'HEAD': method = 'GET' # Second, if a POST is responded to with a 301, turn it into a GET. # This bizarre behaviour is explained in Issue 1704. if resp.status_code == codes.moved and method == 'POST': method = 'GET' prepared_request.method = method # https://github.com/kennethreitz/requests/issues/1084 if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect): if 'Content-Length' in prepared_request.headers: del prepared_request.headers['Content-Length'] prepared_request.body = None headers = prepared_request.headers try: del headers['Cookie'] except KeyError: pass extract_cookies_to_jar(prepared_request._cookies, prepared_request, resp.raw) prepared_request._cookies.update(self.cookies) prepared_request.prepare_cookies(prepared_request._cookies) # Rebuild auth and proxy information. proxies = self.rebuild_proxies(prepared_request, proxies) self.rebuild_auth(prepared_request, resp) # Override the original request. req = prepared_request resp = self.send( req, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies, allow_redirects=False, ) extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) i += 1 yield resp def rebuild_auth(self, prepared_request, response): """ When being redirected we may want to strip authentication from the request to avoid leaking credentials. This method intelligently removes and reapplies authentication where possible to avoid credential loss. """ headers = prepared_request.headers url = prepared_request.url if 'Authorization' in headers: # If we get redirected to a new host, we should strip out any # authentication headers. original_parsed = urlparse(response.request.url) redirect_parsed = urlparse(url) if (original_parsed.hostname != redirect_parsed.hostname): del headers['Authorization'] # .netrc might have more auth for us on our new host. new_auth = get_netrc_auth(url) if self.trust_env else None if new_auth is not None: prepared_request.prepare_auth(new_auth) return def rebuild_proxies(self, prepared_request, proxies): """ This method re-evaluates the proxy configuration by considering the environment variables. If we are redirected to a URL covered by NO_PROXY, we strip the proxy configuration. Otherwise, we set missing proxy keys for this URL (in case they were stripped by a previous redirect). This method also replaces the Proxy-Authorization header where necessary. """ headers = prepared_request.headers url = prepared_request.url scheme = urlparse(url).scheme new_proxies = proxies.copy() if proxies is not None else {} if self.trust_env and not should_bypass_proxies(url): environ_proxies = get_environ_proxies(url) proxy = environ_proxies.get(scheme) if proxy: new_proxies.setdefault(scheme, environ_proxies[scheme]) if 'Proxy-Authorization' in headers: del headers['Proxy-Authorization'] try: username, password = get_auth_from_url(new_proxies[scheme]) except KeyError: username, password = None, None if username and password: headers['Proxy-Authorization'] = _basic_auth_str(username, password) return new_proxies class Session(SessionRedirectMixin): """A Requests session. Provides cookie persistence, connection-pooling, and configuration. Basic Usage:: >>> import requests >>> s = requests.Session() >>> s.get('http://httpbin.org/get') 200 """ __attrs__ = [ 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify', 'cert', 'prefetch', 'adapters', 'stream', 'trust_env', 'max_redirects', ] def __init__(self): #: A case-insensitive dictionary of headers to be sent on each #: :class:`Request <Request>` sent from this #: :class:`Session <Session>`. self.headers = default_headers() #: Default Authentication tuple or object to attach to #: :class:`Request <Request>`. self.auth = None #: Dictionary mapping protocol to the URL of the proxy (e.g. #: {'http': 'foo.bar:3128'}) to be used on each #: :class:`Request <Request>`. self.proxies = {} #: Event-handling hooks. self.hooks = default_hooks() #: Dictionary of querystring data to attach to each #: :class:`Request <Request>`. The dictionary values may be lists for #: representing multivalued query parameters. self.params = {} #: Stream response content default. self.stream = False #: SSL Verification default. self.verify = True #: SSL certificate default. self.cert = None #: Maximum number of redirects allowed. If the request exceeds this #: limit, a :class:`TooManyRedirects` exception is raised. self.max_redirects = DEFAULT_REDIRECT_LIMIT #: Should we trust the environment? self.trust_env = True #: A CookieJar containing all currently outstanding cookies set on this #: session. By default it is a #: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but #: may be any other ``cookielib.CookieJar`` compatible object. self.cookies = cookiejar_from_dict({}) # Default connection adapters. self.adapters = OrderedDict() self.mount('https://', HTTPAdapter()) self.mount('http://', HTTPAdapter()) # Only store 1000 redirects to prevent using infinite memory self.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE) def __enter__(self): return self def __exit__(self, *args): self.close() def prepare_request(self, request): """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it. The :class:`PreparedRequest` has settings merged from the :class:`Request <Request>` instance and those of the :class:`Session`. :param request: :class:`Request` instance to prepare with this session's settings. """ cookies = request.cookies or {} # Bootstrap CookieJar. if not isinstance(cookies, cookielib.CookieJar): cookies = cookiejar_from_dict(cookies) # Merge with session cookies merged_cookies = merge_cookies( merge_cookies(RequestsCookieJar(), self.cookies), cookies) # Set environment's basic authentication if not explicitly set. auth = request.auth if self.trust_env and not auth and not self.auth: auth = get_netrc_auth(request.url) p = PreparedRequest() p.prepare( method=request.method.upper(), url=request.url, files=request.files, data=request.data, json=request.json, headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict), params=merge_setting(request.params, self.params), auth=merge_setting(auth, self.auth), cookies=merged_cookies, hooks=merge_hooks(request.hooks, self.hooks), ) return p def request(self, method, url, params=None, data=None, headers=None, cookies=None, files=None, auth=None, timeout=None, allow_redirects=True, proxies=None, hooks=None, stream=None, verify=None, cert=None, json=None): """Constructs a :class:`Request <Request>`, prepares it and sends it. Returns :class:`Response <Response>` object. :param method: method for the new :class:`Request` object. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`. :param json: (optional) json to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. :param files: (optional) Dictionary of ``'filename': file-like-objects`` for multipart encoding upload. :param auth: (optional) Auth tuple or callable to enable Basic/Digest/Custom HTTP Auth. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a (`connect timeout, read timeout <user/advanced.html#timeouts>`_) tuple. :type timeout: float or tuple :param allow_redirects: (optional) Set to True by default. :type allow_redirects: bool :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. :param stream: (optional) whether to immediately download the response content. Defaults to ``False``. :param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. """ method = to_native_string(method) # Create the Request. req = Request( method = method.upper(), url = url, headers = headers, files = files, data = data or {}, json = json, params = params or {}, auth = auth, cookies = cookies, hooks = hooks, ) prep = self.prepare_request(req) proxies = proxies or {} settings = self.merge_environment_settings( prep.url, proxies, stream, verify, cert ) # Send the request. send_kwargs = { 'timeout': timeout, 'allow_redirects': allow_redirects, } send_kwargs.update(settings) resp = self.send(prep, **send_kwargs) return resp def get(self, url, **kwargs): """Sends a GET request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. """ kwargs.setdefault('allow_redirects', True) return self.request('GET', url, **kwargs) def options(self, url, **kwargs): """Sends a OPTIONS request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. """ kwargs.setdefault('allow_redirects', True) return self.request('OPTIONS', url, **kwargs) def head(self, url, **kwargs): """Sends a HEAD request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. """ kwargs.setdefault('allow_redirects', False) return self.request('HEAD', url, **kwargs) def post(self, url, data=None, json=None, **kwargs): """Sends a POST request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. """ return self.request('POST', url, data=data, json=json, **kwargs) def put(self, url, data=None, **kwargs): """Sends a PUT request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. """ return self.request('PUT', url, data=data, **kwargs) def patch(self, url, data=None, **kwargs): """Sends a PATCH request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. """ return self.request('PATCH', url, data=data, **kwargs) def delete(self, url, **kwargs): """Sends a DELETE request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. """ return self.request('DELETE', url, **kwargs) def send(self, request, **kwargs): """Send a given PreparedRequest.""" # Set defaults that the hooks can utilize to ensure they always have # the correct parameters to reproduce the previous request. kwargs.setdefault('stream', self.stream) kwargs.setdefault('verify', self.verify) kwargs.setdefault('cert', self.cert) kwargs.setdefault('proxies', self.proxies) # It's possible that users might accidentally send a Request object. # Guard against that specific failure case. if not isinstance(request, PreparedRequest): raise ValueError('You can only send PreparedRequests.') checked_urls = set() while request.url in self.redirect_cache: checked_urls.add(request.url) new_url = self.redirect_cache.get(request.url) if new_url in checked_urls: break request.url = new_url # Set up variables needed for resolve_redirects and dispatching of hooks allow_redirects = kwargs.pop('allow_redirects', True) stream = kwargs.get('stream') timeout = kwargs.get('timeout') verify = kwargs.get('verify') cert = kwargs.get('cert') proxies = kwargs.get('proxies') hooks = request.hooks # Get the appropriate adapter to use adapter = self.get_adapter(url=request.url) # Start time (approximately) of the request start = datetime.utcnow() # Send the request r = adapter.send(request, **kwargs) # Total elapsed time of the request (approximately) r.elapsed = datetime.utcnow() - start # Response manipulation hooks r = dispatch_hook('response', hooks, r, **kwargs) # Persist cookies if r.history: # If the hooks create history then we want those cookies too for resp in r.history: extract_cookies_to_jar(self.cookies, resp.request, resp.raw) extract_cookies_to_jar(self.cookies, request, r.raw) # Redirect resolving generator. gen = self.resolve_redirects(r, request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies) # Resolve redirects if allowed. history = [resp for resp in gen] if allow_redirects else [] # Shuffle things around if there's history. if history: # Insert the first (original) request at the start history.insert(0, r) # Get the last request made r = history.pop() r.history = history if not stream: r.content return r def merge_environment_settings(self, url, proxies, stream, verify, cert): """Check the environment and merge it with some settings.""" # Gather clues from the surrounding environment. if self.trust_env: # Set environment's proxies. env_proxies = get_environ_proxies(url) or {} for (k, v) in env_proxies.items(): proxies.setdefault(k, v) # Look for requests environment configuration and be compatible # with cURL. if verify is True or verify is None: verify = (os.environ.get('REQUESTS_CA_BUNDLE') or os.environ.get('CURL_CA_BUNDLE')) # Merge all the kwargs. proxies = merge_setting(proxies, self.proxies) stream = merge_setting(stream, self.stream) verify = merge_setting(verify, self.verify) cert = merge_setting(cert, self.cert) return {'verify': verify, 'proxies': proxies, 'stream': stream, 'cert': cert} def get_adapter(self, url): """Returns the appropriate connnection adapter for the given URL.""" for (prefix, adapter) in self.adapters.items(): if url.lower().startswith(prefix): return adapter # Nothing matches :-/ raise InvalidSchema("No connection adapters were found for '%s'" % url) def close(self): """Closes all adapters and as such the session""" for v in self.adapters.values(): v.close() def mount(self, prefix, adapter): """Registers a connection adapter to a prefix. Adapters are sorted in descending order by key length.""" self.adapters[prefix] = adapter keys_to_move = [k for k in self.adapters if len(k) < len(prefix)] for key in keys_to_move: self.adapters[key] = self.adapters.pop(key) def __getstate__(self): state = dict((attr, getattr(self, attr, None)) for attr in self.__attrs__) state['redirect_cache'] = dict(self.redirect_cache) return state def __setstate__(self, state): redirect_cache = state.pop('redirect_cache', {}) for attr, value in state.items(): setattr(self, attr, value) self.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE) for redirect, to in redirect_cache.items(): self.redirect_cache[redirect] = to def session(): """Returns a :class:`Session` for context-management.""" return Session()
mit
4,329,638,928,754,115,600
34.589443
115
0.604194
false
bala4901/odoo
addons/l10n_cn/__openerp__.py
91
1827
# -*- encoding: utf-8 -*- ############################################################################## # # Copyright (C) 2009 Gábor Dukai # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': '中国会计科目表 - Accounting', 'version': '1.0', 'category': 'Localization/Account Charts', 'author': 'openerp-china.org', 'maintainer':'openerp-china.org', 'website':'http://openerp-china.org', 'url': 'http://code.google.com/p/openerp-china/source/browse/#svn/trunk/l10n_cn', 'description': """ 添加中文省份数据 科目类型\会计科目表模板\增值税\辅助核算类别\管理会计凭证簿\财务会计凭证簿 ============================================================ """, 'depends': ['base','account'], 'demo': [], 'data': [ 'account_chart.xml', 'l10n_chart_cn_wizard.xml', 'base_data.xml', ], 'license': 'GPL-3', 'auto_install': False, 'installable': True, 'images': ['images/config_chart_l10n_cn.jpeg','images/l10n_cn_chart.jpeg'], } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
198,857,496,429,203,550
35.765957
85
0.573495
false
buzzer/pr2_imagination
race_simulation_run/tools/race_fluent_modification.py
2
6269
#! /usr/bin/env python import yaml import random import roslib class Fluent(yaml.YAMLObject): yaml_tag='!Fluent' def __setstate__(self, state): """ PyYaml does not call __init__. this is an init replacement. """ self.properties = [] self.Class_Instance = state['Class_Instance'] # fix for bug: if type(self.Class_Instance[0]) == type(False): self.Class_Instance[0] = '"On"' if self.Class_Instance[0] == 'On': self.Class_Instance[0] = '"On"' self.StartTime = state['StartTime'] self.FinishTime = state['FinishTime'] for preProp in state['Properties']: self.properties.append(Property(preProp[0], preProp[1], preProp[2])) def toYamlString(self, line_break='\n'): yString = '' yString += '!Fluent' + line_break yString += '{:<16} {:<20}'.format('Class_Instance: ', '[' + self.Class_Instance[0] + ', ' + self.Class_Instance[1] + ']') + line_break yString += '{:<16} {:<20}'.format('StartTime: ', str(self.StartTime)) + line_break yString += '{:<16} {:<20}'.format('FinishTime: ', str(self.FinishTime)) + line_break if len(self.properties) > 0: yString += 'Properties:' + line_break for prop in self.properties: yString += prop.toYamlString() else: yString += 'Properties: []' + line_break return yString def __str__(self): return self.toYamlString() class Property: def __init__(self, role_type, filler_type, role_filler): self.role_type = role_type self.filler_type = filler_type self.role_filler = role_filler for key, value in self.__dict__.items(): if type(value) == type(False): setattr(self, key, 'On') #if value == 'On': # setattr(self, key, '"On"') def toYamlString(self, line_break='\n'): return ' - [{}, {}, {}]'.format(self.role_type, '"' + self.filler_type + '"', self.role_filler) + line_break def __str__(self): return self.toYamlString() class FluentPoseModification(Fluent): yaml_tag='!FluentPoseModification' GROUP_CHOICE_MAP = {} def __setstate__(self,state): """ PyYaml does not call __init__. this is an init replacement. """ # apply group state... if not FluentPoseModification.GROUP_CHOICE_MAP.has_key(state['Group']): choice = random.randint(0, state['Choices']-1) FluentPoseModification.GROUP_CHOICE_MAP[state['Group']] = choice self.choice = FluentPoseModification.GROUP_CHOICE_MAP[state['Group']] self.Instance = state['Instance'] self.Modifications = state['Modifications'] self.Attachments = state['Attachments'] # fix for bug: #if type(self.Class_Instance[0]) == type(False): # self.Class_Instance[0] = '"On"' #if self.Class_Instance[0] == 'On': # self.Class_Instance[0] = '"On"' #self.StartTime = state['StartTime'] #self.FinishTime = state['FinishTime'] def getMod(self, propertyString): for mod in self.Modifications: if mod[0] == propertyString: return mod[1][self.choice] return 0 if __name__ == '__main__': # filepath = roslib.packages.get_pkg_dir('race_simulation_run') + '/data/test.yaml' # changedFilepath = roslib.packages.get_pkg_dir('race_simulation_run') + '/data/output.yaml' # replacementsPath = roslib.packages.get_pkg_dir('race_simulation_run') + '/data/replace.yaml' # # load fluents: # fluents = [] # with open(filepath) as f: # for fluent in yaml.load_all(f): # fluents.append(fluent) # # load modifications: # fluentPoseModifications = [] # with open(replacementsPath) as f: # for poseMod in yaml.load_all(f): # fluentPoseModifications.append(poseMod) # # modify fluents poses: # for fluent in fluents[:]: # for poseMod in fluentPoseModifications: # if (poseMod.Instance == fluent.Class_Instance[1]) \ # or (fluent.Class_Instance[1] in poseMod.Attachments): # for prop in fluent.properties: # if poseMod.getMod(prop.role_type) != 0: # prop.role_filler += poseMod.getMod(prop.role_type) # # generate new file: # with open(changedFilepath, 'w') as cf: # string = ('---\n').join(str(fluent) for fluent in fluents) # cf.write(string) # load initial knowledge and spawn objects fluents initialPath = roslib.packages.get_pkg_dir('race_static_knowledge') + '/data/race_initial_knowledge.yaml' spawnPath = roslib.packages.get_pkg_dir('race_simulation_run') + '/data/spawn_objects.yaml' replacementsPath = roslib.packages.get_pkg_dir('race_simulation_run') + '/data/replace.yaml' #replacementsPath = roslib.packages.get_pkg_dir('race_simulation_run') + '/data/replace_simple.yaml' tempFilePath = roslib.packages.get_pkg_dir('race_simulation_run') + '/data/output.yaml' # load fluents: fluents = [] with open(initialPath) as f: for fluent in yaml.load_all(f): fluents.append(fluent) with open(spawnPath) as f: for fluent in yaml.load_all(f): fluents.append(fluent) # load modifications: fluentPoseModifications = [] with open(replacementsPath) as f: for poseMod in yaml.load_all(f): fluentPoseModifications.append(poseMod) # modify fluents poses: for fluent in fluents[:]: for poseMod in fluentPoseModifications: if (poseMod.Instance == fluent.Class_Instance[1]) \ or (fluent.Class_Instance[1] in poseMod.Attachments): for prop in fluent.properties: if poseMod.getMod(prop.role_type) != 0: prop.role_filler += poseMod.getMod(prop.role_type) # generate new file: with open(tempFilePath, 'w') as cf: string = ('---\n').join(str(fluent) for fluent in fluents) cf.write(string)
bsd-2-clause
-449,458,975,980,125,250
36.094675
142
0.580156
false
nirmeshk/oh-mainline
vendor/packages/twisted/twisted/protocols/sip.py
20
41745
# -*- test-case-name: twisted.test.test_sip -*- # Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. """Session Initialization Protocol. Documented in RFC 2543. [Superceded by 3261] This module contains a deprecated implementation of HTTP Digest authentication. See L{twisted.cred.credentials} and L{twisted.cred._digest} for its new home. """ # system imports import socket, time, sys, random, warnings from zope.interface import implements, Interface # twisted imports from twisted.python import log, util from twisted.python.deprecate import deprecated from twisted.python.versions import Version from twisted.python.hashlib import md5 from twisted.internet import protocol, defer, reactor from twisted import cred import twisted.cred.error from twisted.cred.credentials import UsernameHashedPassword, UsernamePassword # sibling imports from twisted.protocols import basic PORT = 5060 # SIP headers have short forms shortHeaders = {"call-id": "i", "contact": "m", "content-encoding": "e", "content-length": "l", "content-type": "c", "from": "f", "subject": "s", "to": "t", "via": "v", } longHeaders = {} for k, v in shortHeaders.items(): longHeaders[v] = k del k, v statusCodes = { 100: "Trying", 180: "Ringing", 181: "Call Is Being Forwarded", 182: "Queued", 183: "Session Progress", 200: "OK", 300: "Multiple Choices", 301: "Moved Permanently", 302: "Moved Temporarily", 303: "See Other", 305: "Use Proxy", 380: "Alternative Service", 400: "Bad Request", 401: "Unauthorized", 402: "Payment Required", 403: "Forbidden", 404: "Not Found", 405: "Method Not Allowed", 406: "Not Acceptable", 407: "Proxy Authentication Required", 408: "Request Timeout", 409: "Conflict", # Not in RFC3261 410: "Gone", 411: "Length Required", # Not in RFC3261 413: "Request Entity Too Large", 414: "Request-URI Too Large", 415: "Unsupported Media Type", 416: "Unsupported URI Scheme", 420: "Bad Extension", 421: "Extension Required", 423: "Interval Too Brief", 480: "Temporarily Unavailable", 481: "Call/Transaction Does Not Exist", 482: "Loop Detected", 483: "Too Many Hops", 484: "Address Incomplete", 485: "Ambiguous", 486: "Busy Here", 487: "Request Terminated", 488: "Not Acceptable Here", 491: "Request Pending", 493: "Undecipherable", 500: "Internal Server Error", 501: "Not Implemented", 502: "Bad Gateway", # no donut 503: "Service Unavailable", 504: "Server Time-out", 505: "SIP Version not supported", 513: "Message Too Large", 600: "Busy Everywhere", 603: "Decline", 604: "Does not exist anywhere", 606: "Not Acceptable", } specialCases = { 'cseq': 'CSeq', 'call-id': 'Call-ID', 'www-authenticate': 'WWW-Authenticate', } def dashCapitalize(s): ''' Capitalize a string, making sure to treat - as a word seperator ''' return '-'.join([ x.capitalize() for x in s.split('-')]) def unq(s): if s[0] == s[-1] == '"': return s[1:-1] return s def DigestCalcHA1( pszAlg, pszUserName, pszRealm, pszPassword, pszNonce, pszCNonce, ): m = md5() m.update(pszUserName) m.update(":") m.update(pszRealm) m.update(":") m.update(pszPassword) HA1 = m.digest() if pszAlg == "md5-sess": m = md5() m.update(HA1) m.update(":") m.update(pszNonce) m.update(":") m.update(pszCNonce) HA1 = m.digest() return HA1.encode('hex') DigestCalcHA1 = deprecated(Version("Twisted", 9, 0, 0))(DigestCalcHA1) def DigestCalcResponse( HA1, pszNonce, pszNonceCount, pszCNonce, pszQop, pszMethod, pszDigestUri, pszHEntity, ): m = md5() m.update(pszMethod) m.update(":") m.update(pszDigestUri) if pszQop == "auth-int": m.update(":") m.update(pszHEntity) HA2 = m.digest().encode('hex') m = md5() m.update(HA1) m.update(":") m.update(pszNonce) m.update(":") if pszNonceCount and pszCNonce: # pszQop: m.update(pszNonceCount) m.update(":") m.update(pszCNonce) m.update(":") m.update(pszQop) m.update(":") m.update(HA2) hash = m.digest().encode('hex') return hash DigestCalcResponse = deprecated(Version("Twisted", 9, 0, 0))(DigestCalcResponse) _absent = object() class Via(object): """ A L{Via} is a SIP Via header, representing a segment of the path taken by the request. See RFC 3261, sections 8.1.1.7, 18.2.2, and 20.42. @ivar transport: Network protocol used for this leg. (Probably either "TCP" or "UDP".) @type transport: C{str} @ivar branch: Unique identifier for this request. @type branch: C{str} @ivar host: Hostname or IP for this leg. @type host: C{str} @ivar port: Port used for this leg. @type port C{int}, or None. @ivar rportRequested: Whether to request RFC 3581 client processing or not. @type rportRequested: C{bool} @ivar rportValue: Servers wishing to honor requests for RFC 3581 processing should set this parameter to the source port the request was received from. @type rportValue: C{int}, or None. @ivar ttl: Time-to-live for requests on multicast paths. @type ttl: C{int}, or None. @ivar maddr: The destination multicast address, if any. @type maddr: C{str}, or None. @ivar hidden: Obsolete in SIP 2.0. @type hidden: C{bool} @ivar otherParams: Any other parameters in the header. @type otherParams: C{dict} """ def __init__(self, host, port=PORT, transport="UDP", ttl=None, hidden=False, received=None, rport=_absent, branch=None, maddr=None, **kw): """ Set parameters of this Via header. All arguments correspond to attributes of the same name. To maintain compatibility with old SIP code, the 'rport' argument is used to determine the values of C{rportRequested} and C{rportValue}. If None, C{rportRequested} is set to True. (The deprecated method for doing this is to pass True.) If an integer, C{rportValue} is set to the given value. Any arguments not explicitly named here are collected into the C{otherParams} dict. """ self.transport = transport self.host = host self.port = port self.ttl = ttl self.hidden = hidden self.received = received if rport is True: warnings.warn( "rport=True is deprecated since Twisted 9.0.", DeprecationWarning, stacklevel=2) self.rportValue = None self.rportRequested = True elif rport is None: self.rportValue = None self.rportRequested = True elif rport is _absent: self.rportValue = None self.rportRequested = False else: self.rportValue = rport self.rportRequested = False self.branch = branch self.maddr = maddr self.otherParams = kw def _getrport(self): """ Returns the rport value expected by the old SIP code. """ if self.rportRequested == True: return True elif self.rportValue is not None: return self.rportValue else: return None def _setrport(self, newRPort): """ L{Base._fixupNAT} sets C{rport} directly, so this method sets C{rportValue} based on that. @param newRPort: The new rport value. @type newRPort: C{int} """ self.rportValue = newRPort self.rportRequested = False rport = property(_getrport, _setrport) def toString(self): """ Serialize this header for use in a request or response. """ s = "SIP/2.0/%s %s:%s" % (self.transport, self.host, self.port) if self.hidden: s += ";hidden" for n in "ttl", "branch", "maddr", "received": value = getattr(self, n) if value is not None: s += ";%s=%s" % (n, value) if self.rportRequested: s += ";rport" elif self.rportValue is not None: s += ";rport=%s" % (self.rport,) etc = self.otherParams.items() etc.sort() for k, v in etc: if v is None: s += ";" + k else: s += ";%s=%s" % (k, v) return s def parseViaHeader(value): """ Parse a Via header. @return: The parsed version of this header. @rtype: L{Via} """ parts = value.split(";") sent, params = parts[0], parts[1:] protocolinfo, by = sent.split(" ", 1) by = by.strip() result = {} pname, pversion, transport = protocolinfo.split("/") if pname != "SIP" or pversion != "2.0": raise ValueError, "wrong protocol or version: %r" % value result["transport"] = transport if ":" in by: host, port = by.split(":") result["port"] = int(port) result["host"] = host else: result["host"] = by for p in params: # it's the comment-striping dance! p = p.strip().split(" ", 1) if len(p) == 1: p, comment = p[0], "" else: p, comment = p if p == "hidden": result["hidden"] = True continue parts = p.split("=", 1) if len(parts) == 1: name, value = parts[0], None else: name, value = parts if name in ("rport", "ttl"): value = int(value) result[name] = value return Via(**result) class URL: """A SIP URL.""" def __init__(self, host, username=None, password=None, port=None, transport=None, usertype=None, method=None, ttl=None, maddr=None, tag=None, other=None, headers=None): self.username = username self.host = host self.password = password self.port = port self.transport = transport self.usertype = usertype self.method = method self.tag = tag self.ttl = ttl self.maddr = maddr if other == None: self.other = [] else: self.other = other if headers == None: self.headers = {} else: self.headers = headers def toString(self): l = []; w = l.append w("sip:") if self.username != None: w(self.username) if self.password != None: w(":%s" % self.password) w("@") w(self.host) if self.port != None: w(":%d" % self.port) if self.usertype != None: w(";user=%s" % self.usertype) for n in ("transport", "ttl", "maddr", "method", "tag"): v = getattr(self, n) if v != None: w(";%s=%s" % (n, v)) for v in self.other: w(";%s" % v) if self.headers: w("?") w("&".join([("%s=%s" % (specialCases.get(h) or dashCapitalize(h), v)) for (h, v) in self.headers.items()])) return "".join(l) def __str__(self): return self.toString() def __repr__(self): return '<URL %s:%s@%s:%r/%s>' % (self.username, self.password, self.host, self.port, self.transport) def parseURL(url, host=None, port=None): """Return string into URL object. URIs are of of form 'sip:user@example.com'. """ d = {} if not url.startswith("sip:"): raise ValueError("unsupported scheme: " + url[:4]) parts = url[4:].split(";") userdomain, params = parts[0], parts[1:] udparts = userdomain.split("@", 1) if len(udparts) == 2: userpass, hostport = udparts upparts = userpass.split(":", 1) if len(upparts) == 1: d["username"] = upparts[0] else: d["username"] = upparts[0] d["password"] = upparts[1] else: hostport = udparts[0] hpparts = hostport.split(":", 1) if len(hpparts) == 1: d["host"] = hpparts[0] else: d["host"] = hpparts[0] d["port"] = int(hpparts[1]) if host != None: d["host"] = host if port != None: d["port"] = port for p in params: if p == params[-1] and "?" in p: d["headers"] = h = {} p, headers = p.split("?", 1) for header in headers.split("&"): k, v = header.split("=") h[k] = v nv = p.split("=", 1) if len(nv) == 1: d.setdefault("other", []).append(p) continue name, value = nv if name == "user": d["usertype"] = value elif name in ("transport", "ttl", "maddr", "method", "tag"): if name == "ttl": value = int(value) d[name] = value else: d.setdefault("other", []).append(p) return URL(**d) def cleanRequestURL(url): """Clean a URL from a Request line.""" url.transport = None url.maddr = None url.ttl = None url.headers = {} def parseAddress(address, host=None, port=None, clean=0): """Return (name, uri, params) for From/To/Contact header. @param clean: remove unnecessary info, usually for From and To headers. """ address = address.strip() # simple 'sip:foo' case if address.startswith("sip:"): return "", parseURL(address, host=host, port=port), {} params = {} name, url = address.split("<", 1) name = name.strip() if name.startswith('"'): name = name[1:] if name.endswith('"'): name = name[:-1] url, paramstring = url.split(">", 1) url = parseURL(url, host=host, port=port) paramstring = paramstring.strip() if paramstring: for l in paramstring.split(";"): if not l: continue k, v = l.split("=") params[k] = v if clean: # rfc 2543 6.21 url.ttl = None url.headers = {} url.transport = None url.maddr = None return name, url, params class SIPError(Exception): def __init__(self, code, phrase=None): if phrase is None: phrase = statusCodes[code] Exception.__init__(self, "SIP error (%d): %s" % (code, phrase)) self.code = code self.phrase = phrase class RegistrationError(SIPError): """Registration was not possible.""" class Message: """A SIP message.""" length = None def __init__(self): self.headers = util.OrderedDict() # map name to list of values self.body = "" self.finished = 0 def addHeader(self, name, value): name = name.lower() name = longHeaders.get(name, name) if name == "content-length": self.length = int(value) self.headers.setdefault(name,[]).append(value) def bodyDataReceived(self, data): self.body += data def creationFinished(self): if (self.length != None) and (self.length != len(self.body)): raise ValueError, "wrong body length" self.finished = 1 def toString(self): s = "%s\r\n" % self._getHeaderLine() for n, vs in self.headers.items(): for v in vs: s += "%s: %s\r\n" % (specialCases.get(n) or dashCapitalize(n), v) s += "\r\n" s += self.body return s def _getHeaderLine(self): raise NotImplementedError class Request(Message): """A Request for a URI""" def __init__(self, method, uri, version="SIP/2.0"): Message.__init__(self) self.method = method if isinstance(uri, URL): self.uri = uri else: self.uri = parseURL(uri) cleanRequestURL(self.uri) def __repr__(self): return "<SIP Request %d:%s %s>" % (id(self), self.method, self.uri.toString()) def _getHeaderLine(self): return "%s %s SIP/2.0" % (self.method, self.uri.toString()) class Response(Message): """A Response to a URI Request""" def __init__(self, code, phrase=None, version="SIP/2.0"): Message.__init__(self) self.code = code if phrase == None: phrase = statusCodes[code] self.phrase = phrase def __repr__(self): return "<SIP Response %d:%s>" % (id(self), self.code) def _getHeaderLine(self): return "SIP/2.0 %s %s" % (self.code, self.phrase) class MessagesParser(basic.LineReceiver): """A SIP messages parser. Expects dataReceived, dataDone repeatedly, in that order. Shouldn't be connected to actual transport. """ version = "SIP/2.0" acceptResponses = 1 acceptRequests = 1 state = "firstline" # or "headers", "body" or "invalid" debug = 0 def __init__(self, messageReceivedCallback): self.messageReceived = messageReceivedCallback self.reset() def reset(self, remainingData=""): self.state = "firstline" self.length = None # body length self.bodyReceived = 0 # how much of the body we received self.message = None self.setLineMode(remainingData) def invalidMessage(self): self.state = "invalid" self.setRawMode() def dataDone(self): # clear out any buffered data that may be hanging around self.clearLineBuffer() if self.state == "firstline": return if self.state != "body": self.reset() return if self.length == None: # no content-length header, so end of data signals message done self.messageDone() elif self.length < self.bodyReceived: # aborted in the middle self.reset() else: # we have enough data and message wasn't finished? something is wrong raise RuntimeError, "this should never happen" def dataReceived(self, data): try: basic.LineReceiver.dataReceived(self, data) except: log.err() self.invalidMessage() def handleFirstLine(self, line): """Expected to create self.message.""" raise NotImplementedError def lineLengthExceeded(self, line): self.invalidMessage() def lineReceived(self, line): if self.state == "firstline": while line.startswith("\n") or line.startswith("\r"): line = line[1:] if not line: return try: a, b, c = line.split(" ", 2) except ValueError: self.invalidMessage() return if a == "SIP/2.0" and self.acceptResponses: # response try: code = int(b) except ValueError: self.invalidMessage() return self.message = Response(code, c) elif c == "SIP/2.0" and self.acceptRequests: self.message = Request(a, b) else: self.invalidMessage() return self.state = "headers" return else: assert self.state == "headers" if line: # XXX support multi-line headers try: name, value = line.split(":", 1) except ValueError: self.invalidMessage() return self.message.addHeader(name, value.lstrip()) if name.lower() == "content-length": try: self.length = int(value.lstrip()) except ValueError: self.invalidMessage() return else: # CRLF, we now have message body until self.length bytes, # or if no length was given, until there is no more data # from the connection sending us data. self.state = "body" if self.length == 0: self.messageDone() return self.setRawMode() def messageDone(self, remainingData=""): assert self.state == "body" self.message.creationFinished() self.messageReceived(self.message) self.reset(remainingData) def rawDataReceived(self, data): assert self.state in ("body", "invalid") if self.state == "invalid": return if self.length == None: self.message.bodyDataReceived(data) else: dataLen = len(data) expectedLen = self.length - self.bodyReceived if dataLen > expectedLen: self.message.bodyDataReceived(data[:expectedLen]) self.messageDone(data[expectedLen:]) return else: self.bodyReceived += dataLen self.message.bodyDataReceived(data) if self.bodyReceived == self.length: self.messageDone() class Base(protocol.DatagramProtocol): """Base class for SIP clients and servers.""" PORT = PORT debug = False def __init__(self): self.messages = [] self.parser = MessagesParser(self.addMessage) def addMessage(self, msg): self.messages.append(msg) def datagramReceived(self, data, addr): self.parser.dataReceived(data) self.parser.dataDone() for m in self.messages: self._fixupNAT(m, addr) if self.debug: log.msg("Received %r from %r" % (m.toString(), addr)) if isinstance(m, Request): self.handle_request(m, addr) else: self.handle_response(m, addr) self.messages[:] = [] def _fixupNAT(self, message, (srcHost, srcPort)): # RFC 2543 6.40.2, senderVia = parseViaHeader(message.headers["via"][0]) if senderVia.host != srcHost: senderVia.received = srcHost if senderVia.port != srcPort: senderVia.rport = srcPort message.headers["via"][0] = senderVia.toString() elif senderVia.rport == True: senderVia.received = srcHost senderVia.rport = srcPort message.headers["via"][0] = senderVia.toString() def deliverResponse(self, responseMessage): """Deliver response. Destination is based on topmost Via header.""" destVia = parseViaHeader(responseMessage.headers["via"][0]) # XXX we don't do multicast yet host = destVia.received or destVia.host port = destVia.rport or destVia.port or self.PORT destAddr = URL(host=host, port=port) self.sendMessage(destAddr, responseMessage) def responseFromRequest(self, code, request): """Create a response to a request message.""" response = Response(code) for name in ("via", "to", "from", "call-id", "cseq"): response.headers[name] = request.headers.get(name, [])[:] return response def sendMessage(self, destURL, message): """Send a message. @param destURL: C{URL}. This should be a *physical* URL, not a logical one. @param message: The message to send. """ if destURL.transport not in ("udp", None): raise RuntimeError, "only UDP currently supported" if self.debug: log.msg("Sending %r to %r" % (message.toString(), destURL)) self.transport.write(message.toString(), (destURL.host, destURL.port or self.PORT)) def handle_request(self, message, addr): """Override to define behavior for requests received @type message: C{Message} @type addr: C{tuple} """ raise NotImplementedError def handle_response(self, message, addr): """Override to define behavior for responses received. @type message: C{Message} @type addr: C{tuple} """ raise NotImplementedError class IContact(Interface): """A user of a registrar or proxy""" class Registration: def __init__(self, secondsToExpiry, contactURL): self.secondsToExpiry = secondsToExpiry self.contactURL = contactURL class IRegistry(Interface): """Allows registration of logical->physical URL mapping.""" def registerAddress(domainURL, logicalURL, physicalURL): """Register the physical address of a logical URL. @return: Deferred of C{Registration} or failure with RegistrationError. """ def unregisterAddress(domainURL, logicalURL, physicalURL): """Unregister the physical address of a logical URL. @return: Deferred of C{Registration} or failure with RegistrationError. """ def getRegistrationInfo(logicalURL): """Get registration info for logical URL. @return: Deferred of C{Registration} object or failure of LookupError. """ class ILocator(Interface): """Allow looking up physical address for logical URL.""" def getAddress(logicalURL): """Return physical URL of server for logical URL of user. @param logicalURL: a logical C{URL}. @return: Deferred which becomes URL or fails with LookupError. """ class Proxy(Base): """SIP proxy.""" PORT = PORT locator = None # object implementing ILocator def __init__(self, host=None, port=PORT): """Create new instance. @param host: our hostname/IP as set in Via headers. @param port: our port as set in Via headers. """ self.host = host or socket.getfqdn() self.port = port Base.__init__(self) def getVia(self): """Return value of Via header for this proxy.""" return Via(host=self.host, port=self.port) def handle_request(self, message, addr): # send immediate 100/trying message before processing #self.deliverResponse(self.responseFromRequest(100, message)) f = getattr(self, "handle_%s_request" % message.method, None) if f is None: f = self.handle_request_default try: d = f(message, addr) except SIPError, e: self.deliverResponse(self.responseFromRequest(e.code, message)) except: log.err() self.deliverResponse(self.responseFromRequest(500, message)) else: if d is not None: d.addErrback(lambda e: self.deliverResponse(self.responseFromRequest(e.code, message)) ) def handle_request_default(self, message, (srcHost, srcPort)): """Default request handler. Default behaviour for OPTIONS and unknown methods for proxies is to forward message on to the client. Since at the moment we are stateless proxy, thats basically everything. """ def _mungContactHeader(uri, message): message.headers['contact'][0] = uri.toString() return self.sendMessage(uri, message) viaHeader = self.getVia() if viaHeader.toString() in message.headers["via"]: # must be a loop, so drop message log.msg("Dropping looped message.") return message.headers["via"].insert(0, viaHeader.toString()) name, uri, tags = parseAddress(message.headers["to"][0], clean=1) # this is broken and needs refactoring to use cred d = self.locator.getAddress(uri) d.addCallback(self.sendMessage, message) d.addErrback(self._cantForwardRequest, message) def _cantForwardRequest(self, error, message): error.trap(LookupError) del message.headers["via"][0] # this'll be us self.deliverResponse(self.responseFromRequest(404, message)) def deliverResponse(self, responseMessage): """Deliver response. Destination is based on topmost Via header.""" destVia = parseViaHeader(responseMessage.headers["via"][0]) # XXX we don't do multicast yet host = destVia.received or destVia.host port = destVia.rport or destVia.port or self.PORT destAddr = URL(host=host, port=port) self.sendMessage(destAddr, responseMessage) def responseFromRequest(self, code, request): """Create a response to a request message.""" response = Response(code) for name in ("via", "to", "from", "call-id", "cseq"): response.headers[name] = request.headers.get(name, [])[:] return response def handle_response(self, message, addr): """Default response handler.""" v = parseViaHeader(message.headers["via"][0]) if (v.host, v.port) != (self.host, self.port): # we got a message not intended for us? # XXX note this check breaks if we have multiple external IPs # yay for suck protocols log.msg("Dropping incorrectly addressed message") return del message.headers["via"][0] if not message.headers["via"]: # this message is addressed to us self.gotResponse(message, addr) return self.deliverResponse(message) def gotResponse(self, message, addr): """Called with responses that are addressed at this server.""" pass class IAuthorizer(Interface): def getChallenge(peer): """Generate a challenge the client may respond to. @type peer: C{tuple} @param peer: The client's address @rtype: C{str} @return: The challenge string """ def decode(response): """Create a credentials object from the given response. @type response: C{str} """ class BasicAuthorizer: """Authorizer for insecure Basic (base64-encoded plaintext) authentication. This form of authentication is broken and insecure. Do not use it. """ implements(IAuthorizer) def __init__(self): """ This method exists solely to issue a deprecation warning. """ warnings.warn( "twisted.protocols.sip.BasicAuthorizer was deprecated " "in Twisted 9.0.0", category=DeprecationWarning, stacklevel=2) def getChallenge(self, peer): return None def decode(self, response): # At least one SIP client improperly pads its Base64 encoded messages for i in range(3): try: creds = (response + ('=' * i)).decode('base64') except: pass else: break else: # Totally bogus raise SIPError(400) p = creds.split(':', 1) if len(p) == 2: return UsernamePassword(*p) raise SIPError(400) class DigestedCredentials(UsernameHashedPassword): """Yet Another Simple Digest-MD5 authentication scheme""" def __init__(self, username, fields, challenges): warnings.warn( "twisted.protocols.sip.DigestedCredentials was deprecated " "in Twisted 9.0.0", category=DeprecationWarning, stacklevel=2) self.username = username self.fields = fields self.challenges = challenges def checkPassword(self, password): method = 'REGISTER' response = self.fields.get('response') uri = self.fields.get('uri') nonce = self.fields.get('nonce') cnonce = self.fields.get('cnonce') nc = self.fields.get('nc') algo = self.fields.get('algorithm', 'MD5') qop = self.fields.get('qop-options', 'auth') opaque = self.fields.get('opaque') if opaque not in self.challenges: return False del self.challenges[opaque] user, domain = self.username.split('@', 1) if uri is None: uri = 'sip:' + domain expected = DigestCalcResponse( DigestCalcHA1(algo, user, domain, password, nonce, cnonce), nonce, nc, cnonce, qop, method, uri, None, ) return expected == response class DigestAuthorizer: CHALLENGE_LIFETIME = 15 implements(IAuthorizer) def __init__(self): warnings.warn( "twisted.protocols.sip.DigestAuthorizer was deprecated " "in Twisted 9.0.0", category=DeprecationWarning, stacklevel=2) self.outstanding = {} def generateNonce(self): c = tuple([random.randrange(sys.maxint) for _ in range(3)]) c = '%d%d%d' % c return c def generateOpaque(self): return str(random.randrange(sys.maxint)) def getChallenge(self, peer): c = self.generateNonce() o = self.generateOpaque() self.outstanding[o] = c return ','.join(( 'nonce="%s"' % c, 'opaque="%s"' % o, 'qop-options="auth"', 'algorithm="MD5"', )) def decode(self, response): response = ' '.join(response.splitlines()) parts = response.split(',') auth = dict([(k.strip(), unq(v.strip())) for (k, v) in [p.split('=', 1) for p in parts]]) try: username = auth['username'] except KeyError: raise SIPError(401) try: return DigestedCredentials(username, auth, self.outstanding) except: raise SIPError(400) class RegisterProxy(Proxy): """A proxy that allows registration for a specific domain. Unregistered users won't be handled. """ portal = None registry = None # should implement IRegistry authorizers = {} def __init__(self, *args, **kw): Proxy.__init__(self, *args, **kw) self.liveChallenges = {} if "digest" not in self.authorizers: self.authorizers["digest"] = DigestAuthorizer() def handle_ACK_request(self, message, (host, port)): # XXX # ACKs are a client's way of indicating they got the last message # Responding to them is not a good idea. # However, we should keep track of terminal messages and re-transmit # if no ACK is received. pass def handle_REGISTER_request(self, message, (host, port)): """Handle a registration request. Currently registration is not proxied. """ if self.portal is None: # There is no portal. Let anyone in. self.register(message, host, port) else: # There is a portal. Check for credentials. if not message.headers.has_key("authorization"): return self.unauthorized(message, host, port) else: return self.login(message, host, port) def unauthorized(self, message, host, port): m = self.responseFromRequest(401, message) for (scheme, auth) in self.authorizers.iteritems(): chal = auth.getChallenge((host, port)) if chal is None: value = '%s realm="%s"' % (scheme.title(), self.host) else: value = '%s %s,realm="%s"' % (scheme.title(), chal, self.host) m.headers.setdefault('www-authenticate', []).append(value) self.deliverResponse(m) def login(self, message, host, port): parts = message.headers['authorization'][0].split(None, 1) a = self.authorizers.get(parts[0].lower()) if a: try: c = a.decode(parts[1]) except SIPError: raise except: log.err() self.deliverResponse(self.responseFromRequest(500, message)) else: c.username += '@' + self.host self.portal.login(c, None, IContact ).addCallback(self._cbLogin, message, host, port ).addErrback(self._ebLogin, message, host, port ).addErrback(log.err ) else: self.deliverResponse(self.responseFromRequest(501, message)) def _cbLogin(self, (i, a, l), message, host, port): # It's stateless, matey. What a joke. self.register(message, host, port) def _ebLogin(self, failure, message, host, port): failure.trap(cred.error.UnauthorizedLogin) self.unauthorized(message, host, port) def register(self, message, host, port): """Allow all users to register""" name, toURL, params = parseAddress(message.headers["to"][0], clean=1) contact = None if message.headers.has_key("contact"): contact = message.headers["contact"][0] if message.headers.get("expires", [None])[0] == "0": self.unregister(message, toURL, contact) else: # XXX Check expires on appropriate URL, and pass it to registry # instead of having registry hardcode it. if contact is not None: name, contactURL, params = parseAddress(contact, host=host, port=port) d = self.registry.registerAddress(message.uri, toURL, contactURL) else: d = self.registry.getRegistrationInfo(toURL) d.addCallbacks(self._cbRegister, self._ebRegister, callbackArgs=(message,), errbackArgs=(message,) ) def _cbRegister(self, registration, message): response = self.responseFromRequest(200, message) if registration.contactURL != None: response.addHeader("contact", registration.contactURL.toString()) response.addHeader("expires", "%d" % registration.secondsToExpiry) response.addHeader("content-length", "0") self.deliverResponse(response) def _ebRegister(self, error, message): error.trap(RegistrationError, LookupError) # XXX return error message, and alter tests to deal with # this, currently tests assume no message sent on failure def unregister(self, message, toURL, contact): try: expires = int(message.headers["expires"][0]) except ValueError: self.deliverResponse(self.responseFromRequest(400, message)) else: if expires == 0: if contact == "*": contactURL = "*" else: name, contactURL, params = parseAddress(contact) d = self.registry.unregisterAddress(message.uri, toURL, contactURL) d.addCallback(self._cbUnregister, message ).addErrback(self._ebUnregister, message ) def _cbUnregister(self, registration, message): msg = self.responseFromRequest(200, message) msg.headers.setdefault('contact', []).append(registration.contactURL.toString()) msg.addHeader("expires", "0") self.deliverResponse(msg) def _ebUnregister(self, registration, message): pass class InMemoryRegistry: """A simplistic registry for a specific domain.""" implements(IRegistry, ILocator) def __init__(self, domain): self.domain = domain # the domain we handle registration for self.users = {} # map username to (IDelayedCall for expiry, address URI) def getAddress(self, userURI): if userURI.host != self.domain: return defer.fail(LookupError("unknown domain")) if self.users.has_key(userURI.username): dc, url = self.users[userURI.username] return defer.succeed(url) else: return defer.fail(LookupError("no such user")) def getRegistrationInfo(self, userURI): if userURI.host != self.domain: return defer.fail(LookupError("unknown domain")) if self.users.has_key(userURI.username): dc, url = self.users[userURI.username] return defer.succeed(Registration(int(dc.getTime() - time.time()), url)) else: return defer.fail(LookupError("no such user")) def _expireRegistration(self, username): try: dc, url = self.users[username] except KeyError: return defer.fail(LookupError("no such user")) else: dc.cancel() del self.users[username] return defer.succeed(Registration(0, url)) def registerAddress(self, domainURL, logicalURL, physicalURL): if domainURL.host != self.domain: log.msg("Registration for domain we don't handle.") return defer.fail(RegistrationError(404)) if logicalURL.host != self.domain: log.msg("Registration for domain we don't handle.") return defer.fail(RegistrationError(404)) if self.users.has_key(logicalURL.username): dc, old = self.users[logicalURL.username] dc.reset(3600) else: dc = reactor.callLater(3600, self._expireRegistration, logicalURL.username) log.msg("Registered %s at %s" % (logicalURL.toString(), physicalURL.toString())) self.users[logicalURL.username] = (dc, physicalURL) return defer.succeed(Registration(int(dc.getTime() - time.time()), physicalURL)) def unregisterAddress(self, domainURL, logicalURL, physicalURL): return self._expireRegistration(logicalURL.username)
agpl-3.0
6,215,315,143,037,585,000
30.293103
119
0.569362
false
Grarak/grakernel-msm8930
scripts/gcc-wrapper.py
364
3936
#! /usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2011-2012, The Linux Foundation. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of The Linux Foundation nor # the names of its contributors may be used to endorse or promote # products derived from this software without specific prior written # permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NON-INFRINGEMENT ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; # OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR # OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF # ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # Invoke gcc, looking for warnings, and causing a failure if there are # non-whitelisted warnings. import errno import re import os import sys import subprocess # Note that gcc uses unicode, which may depend on the locale. TODO: # force LANG to be set to en_US.UTF-8 to get consistent warnings. allowed_warnings = set([ "alignment.c:327", "mmu.c:602", "return_address.c:62", "swab.h:49", "SemaLambda.cpp:946", "CGObjCGNU.cpp:1414", "BugReporter.h:146", "RegionStore.cpp:1904", "SymbolManager.cpp:484", "RewriteObjCFoundationAPI.cpp:737", "RewriteObjCFoundationAPI.cpp:696", "CommentParser.cpp:394", "CommentParser.cpp:391", "CommentParser.cpp:356", "LegalizeDAG.cpp:3646", "IRBuilder.h:844", "DataLayout.cpp:193", "transport.c:653", "xt_socket.c:307", "xt_socket.c:161", "inet_hashtables.h:356", "xc4000.c:1049", "xc4000.c:1063", ]) # Capture the name of the object file, can find it. ofile = None warning_re = re.compile(r'''(.*/|)([^/]+\.[a-z]+:\d+):(\d+:)? warning:''') def interpret_warning(line): """Decode the message from gcc. The messages we care about have a filename, and a warning""" line = line.rstrip('\n') m = warning_re.match(line) if m and m.group(2) not in allowed_warnings: print "error, forbidden warning:", m.group(2) # If there is a warning, remove any object if it exists. if ofile: try: os.remove(ofile) except OSError: pass sys.exit(1) def run_gcc(): args = sys.argv[1:] # Look for -o try: i = args.index('-o') global ofile ofile = args[i+1] except (ValueError, IndexError): pass compiler = sys.argv[0] try: proc = subprocess.Popen(args, stderr=subprocess.PIPE) for line in proc.stderr: print line, interpret_warning(line) result = proc.wait() except OSError as e: result = e.errno if result == errno.ENOENT: print args[0] + ':',e.strerror print 'Is your PATH set correctly?' else: print ' '.join(args), str(e) return result if __name__ == '__main__': status = run_gcc() sys.exit(status)
gpl-2.0
-4,037,045,619,782,924,300
32.355932
97
0.663872
false
landryb/QGIS
python/plugins/processing/algs/exampleprovider/ProcessingExampleProviderPlugin.py
26
1655
# -*- coding: utf-8 -*- """ *************************************************************************** __init__.py --------------------- Date : July 2013 Copyright : (C) 2013 by Victor Olaya Email : volayaf at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Victor Olaya' __date__ = 'July 2013' __copyright__ = '(C) 2013, Victor Olaya' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' import os import sys import inspect from processing.core.Processing import Processing from exampleprovider.ExampleAlgorithmProvider import ExampleAlgorithmProvider cmd_folder = os.path.split(inspect.getfile(inspect.currentframe()))[0] if cmd_folder not in sys.path: sys.path.insert(0, cmd_folder) class ProcessingExampleProviderPlugin: def __init__(self): self.provider = ExampleAlgorithmProvider() def initGui(self): Processing.addProvider(self.provider) def unload(self): Processing.removeProvider(self.provider)
gpl-2.0
6,290,126,447,250,553,000
32.1
77
0.49426
false
MalloyPower/parsing-python
front-end/testsuite-python-lib/Python-2.6/Lib/curses/has_key.py
81
5633
# # Emulation of has_key() function for platforms that don't use ncurses # import _curses # Table mapping curses keys to the terminfo capability name _capability_names = { _curses.KEY_A1: 'ka1', _curses.KEY_A3: 'ka3', _curses.KEY_B2: 'kb2', _curses.KEY_BACKSPACE: 'kbs', _curses.KEY_BEG: 'kbeg', _curses.KEY_BTAB: 'kcbt', _curses.KEY_C1: 'kc1', _curses.KEY_C3: 'kc3', _curses.KEY_CANCEL: 'kcan', _curses.KEY_CATAB: 'ktbc', _curses.KEY_CLEAR: 'kclr', _curses.KEY_CLOSE: 'kclo', _curses.KEY_COMMAND: 'kcmd', _curses.KEY_COPY: 'kcpy', _curses.KEY_CREATE: 'kcrt', _curses.KEY_CTAB: 'kctab', _curses.KEY_DC: 'kdch1', _curses.KEY_DL: 'kdl1', _curses.KEY_DOWN: 'kcud1', _curses.KEY_EIC: 'krmir', _curses.KEY_END: 'kend', _curses.KEY_ENTER: 'kent', _curses.KEY_EOL: 'kel', _curses.KEY_EOS: 'ked', _curses.KEY_EXIT: 'kext', _curses.KEY_F0: 'kf0', _curses.KEY_F1: 'kf1', _curses.KEY_F10: 'kf10', _curses.KEY_F11: 'kf11', _curses.KEY_F12: 'kf12', _curses.KEY_F13: 'kf13', _curses.KEY_F14: 'kf14', _curses.KEY_F15: 'kf15', _curses.KEY_F16: 'kf16', _curses.KEY_F17: 'kf17', _curses.KEY_F18: 'kf18', _curses.KEY_F19: 'kf19', _curses.KEY_F2: 'kf2', _curses.KEY_F20: 'kf20', _curses.KEY_F21: 'kf21', _curses.KEY_F22: 'kf22', _curses.KEY_F23: 'kf23', _curses.KEY_F24: 'kf24', _curses.KEY_F25: 'kf25', _curses.KEY_F26: 'kf26', _curses.KEY_F27: 'kf27', _curses.KEY_F28: 'kf28', _curses.KEY_F29: 'kf29', _curses.KEY_F3: 'kf3', _curses.KEY_F30: 'kf30', _curses.KEY_F31: 'kf31', _curses.KEY_F32: 'kf32', _curses.KEY_F33: 'kf33', _curses.KEY_F34: 'kf34', _curses.KEY_F35: 'kf35', _curses.KEY_F36: 'kf36', _curses.KEY_F37: 'kf37', _curses.KEY_F38: 'kf38', _curses.KEY_F39: 'kf39', _curses.KEY_F4: 'kf4', _curses.KEY_F40: 'kf40', _curses.KEY_F41: 'kf41', _curses.KEY_F42: 'kf42', _curses.KEY_F43: 'kf43', _curses.KEY_F44: 'kf44', _curses.KEY_F45: 'kf45', _curses.KEY_F46: 'kf46', _curses.KEY_F47: 'kf47', _curses.KEY_F48: 'kf48', _curses.KEY_F49: 'kf49', _curses.KEY_F5: 'kf5', _curses.KEY_F50: 'kf50', _curses.KEY_F51: 'kf51', _curses.KEY_F52: 'kf52', _curses.KEY_F53: 'kf53', _curses.KEY_F54: 'kf54', _curses.KEY_F55: 'kf55', _curses.KEY_F56: 'kf56', _curses.KEY_F57: 'kf57', _curses.KEY_F58: 'kf58', _curses.KEY_F59: 'kf59', _curses.KEY_F6: 'kf6', _curses.KEY_F60: 'kf60', _curses.KEY_F61: 'kf61', _curses.KEY_F62: 'kf62', _curses.KEY_F63: 'kf63', _curses.KEY_F7: 'kf7', _curses.KEY_F8: 'kf8', _curses.KEY_F9: 'kf9', _curses.KEY_FIND: 'kfnd', _curses.KEY_HELP: 'khlp', _curses.KEY_HOME: 'khome', _curses.KEY_IC: 'kich1', _curses.KEY_IL: 'kil1', _curses.KEY_LEFT: 'kcub1', _curses.KEY_LL: 'kll', _curses.KEY_MARK: 'kmrk', _curses.KEY_MESSAGE: 'kmsg', _curses.KEY_MOVE: 'kmov', _curses.KEY_NEXT: 'knxt', _curses.KEY_NPAGE: 'knp', _curses.KEY_OPEN: 'kopn', _curses.KEY_OPTIONS: 'kopt', _curses.KEY_PPAGE: 'kpp', _curses.KEY_PREVIOUS: 'kprv', _curses.KEY_PRINT: 'kprt', _curses.KEY_REDO: 'krdo', _curses.KEY_REFERENCE: 'kref', _curses.KEY_REFRESH: 'krfr', _curses.KEY_REPLACE: 'krpl', _curses.KEY_RESTART: 'krst', _curses.KEY_RESUME: 'kres', _curses.KEY_RIGHT: 'kcuf1', _curses.KEY_SAVE: 'ksav', _curses.KEY_SBEG: 'kBEG', _curses.KEY_SCANCEL: 'kCAN', _curses.KEY_SCOMMAND: 'kCMD', _curses.KEY_SCOPY: 'kCPY', _curses.KEY_SCREATE: 'kCRT', _curses.KEY_SDC: 'kDC', _curses.KEY_SDL: 'kDL', _curses.KEY_SELECT: 'kslt', _curses.KEY_SEND: 'kEND', _curses.KEY_SEOL: 'kEOL', _curses.KEY_SEXIT: 'kEXT', _curses.KEY_SF: 'kind', _curses.KEY_SFIND: 'kFND', _curses.KEY_SHELP: 'kHLP', _curses.KEY_SHOME: 'kHOM', _curses.KEY_SIC: 'kIC', _curses.KEY_SLEFT: 'kLFT', _curses.KEY_SMESSAGE: 'kMSG', _curses.KEY_SMOVE: 'kMOV', _curses.KEY_SNEXT: 'kNXT', _curses.KEY_SOPTIONS: 'kOPT', _curses.KEY_SPREVIOUS: 'kPRV', _curses.KEY_SPRINT: 'kPRT', _curses.KEY_SR: 'kri', _curses.KEY_SREDO: 'kRDO', _curses.KEY_SREPLACE: 'kRPL', _curses.KEY_SRIGHT: 'kRIT', _curses.KEY_SRSUME: 'kRES', _curses.KEY_SSAVE: 'kSAV', _curses.KEY_SSUSPEND: 'kSPD', _curses.KEY_STAB: 'khts', _curses.KEY_SUNDO: 'kUND', _curses.KEY_SUSPEND: 'kspd', _curses.KEY_UNDO: 'kund', _curses.KEY_UP: 'kcuu1' } def has_key(ch): if isinstance(ch, str): ch = ord(ch) # Figure out the correct capability name for the keycode. capability_name = _capability_names.get(ch) if capability_name is None: return False #Check the current terminal description for that capability; #if present, return true, else return false. if _curses.tigetstr( capability_name ): return True else: return False if __name__ == '__main__': # Compare the output of this implementation and the ncurses has_key, # on platforms where has_key is already available try: L = [] _curses.initscr() for key in _capability_names.keys(): system = _curses.has_key(key) python = has_key(key) if system != python: L.append( 'Mismatch for key %s, system=%i, Python=%i' % (_curses.keyname( key ), system, python) ) finally: _curses.endwin() for i in L: print i
mit
-3,731,281,184,704,951,300
28.338542
72
0.571987
false
mscuthbert/abjad
abjad/tools/timespantools/test/test_timespantools_Timespan_is_congruent_to_timespan.py
2
2925
# -*- encoding: utf-8 -*- from abjad import * def test_timespantools_Timespan_is_congruent_to_timespan_01(): timespan_1 = timespantools.Timespan(0, 15) timespan_2 = timespantools.Timespan(-10, -5) assert not timespan_1.is_congruent_to_timespan(timespan_2) def test_timespantools_Timespan_is_congruent_to_timespan_02(): timespan_1 = timespantools.Timespan(0, 15) timespan_2 = timespantools.Timespan(-10, 0) assert not timespan_1.is_congruent_to_timespan(timespan_2) def test_timespantools_Timespan_is_congruent_to_timespan_03(): timespan_1 = timespantools.Timespan(0, 15) timespan_2 = timespantools.Timespan(-10, 5) assert not timespan_1.is_congruent_to_timespan(timespan_2) def test_timespantools_Timespan_is_congruent_to_timespan_04(): timespan_1 = timespantools.Timespan(0, 15) timespan_2 = timespantools.Timespan(-10, 15) assert not timespan_1.is_congruent_to_timespan(timespan_2) def test_timespantools_Timespan_is_congruent_to_timespan_05(): timespan_1 = timespantools.Timespan(0, 15) timespan_2 = timespantools.Timespan(-10, 25) assert not timespan_1.is_congruent_to_timespan(timespan_2) def test_timespantools_Timespan_is_congruent_to_timespan_06(): timespan_1 = timespantools.Timespan(0, 15) timespan_2 = timespantools.Timespan(0, 10) assert not timespan_1.is_congruent_to_timespan(timespan_2) def test_timespantools_Timespan_is_congruent_to_timespan_07(): timespan_1 = timespantools.Timespan(0, 15) timespan_2 = timespantools.Timespan(0, 15) assert timespan_1.is_congruent_to_timespan(timespan_2) def test_timespantools_Timespan_is_congruent_to_timespan_08(): timespan_1 = timespantools.Timespan(0, 15) timespan_2 = timespantools.Timespan(5, 10) assert not timespan_1.is_congruent_to_timespan(timespan_2) def test_timespantools_Timespan_is_congruent_to_timespan_09(): timespan_1 = timespantools.Timespan(0, 15) timespan_2 = timespantools.Timespan(5, 15) assert not timespan_1.is_congruent_to_timespan(timespan_2) def test_timespantools_Timespan_is_congruent_to_timespan_10(): timespan_1 = timespantools.Timespan(0, 15) timespan_2 = timespantools.Timespan(0, 25) assert not timespan_1.is_congruent_to_timespan(timespan_2) def test_timespantools_Timespan_is_congruent_to_timespan_11(): timespan_1 = timespantools.Timespan(0, 15) timespan_2 = timespantools.Timespan(5, 25) assert not timespan_1.is_congruent_to_timespan(timespan_2) def test_timespantools_Timespan_is_congruent_to_timespan_12(): timespan_1 = timespantools.Timespan(0, 15) timespan_2 = timespantools.Timespan(15, 25) assert not timespan_1.is_congruent_to_timespan(timespan_2) def test_timespantools_Timespan_is_congruent_to_timespan_13(): timespan_1 = timespantools.Timespan(0, 15) timespan_2 = timespantools.Timespan(20, 25) assert not timespan_1.is_congruent_to_timespan(timespan_2)
gpl-3.0
-4,915,905,206,886,809,000
42.029412
62
0.742906
false
PySide/Shiboken
tests/samplebinding/multi_cpp_inheritance_test.py
6
3178
#!/usr/bin/env python # -*- coding: utf-8 -*- # # This file is part of the Shiboken Python Bindings Generator project. # # Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies). # # Contact: PySide team <contact@pyside.org> # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public License # version 2.1 as published by the Free Software Foundation. Please # review the following information to ensure the GNU Lesser General # Public License version 2.1 requirements will be met: # http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. # # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA '''Test cases for multiple inheritance''' import sys import unittest from sample import * class SimpleUseCase(ObjectType, Str): def __init__(self, name): ObjectType.__init__(self) Str.__init__(self, name) class SimpleUseCaseReverse(Str, ObjectType): def __init__(self, name): ObjectType.__init__(self) Str.__init__(self, name) class SimpleUseCase2(SimpleUseCase): def __init__(self, name): SimpleUseCase.__init__(self, name) class ComplexUseCase(SimpleUseCase2, Point): def __init__(self, name): SimpleUseCase2.__init__(self, name) Point.__init__(self) class ComplexUseCaseReverse(Point, SimpleUseCase2): def __init__(self, name): SimpleUseCase2.__init__(self, name) Point.__init__(self) class MultipleCppDerivedTest(unittest.TestCase): def testInstanciation(self): s = SimpleUseCase("Hi") self.assertEqual(s, "Hi") s.setObjectName(s) self.assertEqual(s.objectName(), "Hi") def testInstanciation2(self): s = SimpleUseCase2("Hi") self.assertEqual(s, "Hi") s.setObjectName(s) self.assertEqual(s.objectName(), "Hi") def testComplexInstanciation(self): c = ComplexUseCase("Hi") self.assertEqual(c, "Hi") c.setObjectName(c) self.assertEqual(c.objectName(), "Hi") c.setX(2); self.assertEqual(c.x(), 2) class MultipleCppDerivedReverseTest(unittest.TestCase): def testInstanciation(self): s = SimpleUseCaseReverse("Hi") self.assertEqual(s, "Hi") s.setObjectName(s) self.assertEqual(s.objectName(), "Hi") def testInstanciation2(self): s = SimpleUseCase2("Hi") self.assertEqual(s, "Hi") s.setObjectName(s) self.assertEqual(s.objectName(), "Hi") def testComplexInstanciation(self): c = ComplexUseCaseReverse("Hi") c.setObjectName(c) self.assertEqual(c.objectName(), "Hi") c.setX(2); self.assertEqual(c, Point(2, 0)) if __name__ == '__main__': unittest.main()
gpl-2.0
8,331,902,494,146,692,000
30.78
70
0.662052
false
bazitur/brackets-python-tools
pythonfiles/parso/python/token.py
2
1777
from __future__ import absolute_import from itertools import count from token import * from parso._compatibility import py_version _counter = count(N_TOKENS) # Never want to see this thing again. del N_TOKENS COMMENT = next(_counter) tok_name[COMMENT] = 'COMMENT' NL = next(_counter) tok_name[NL] = 'NL' # Sets the attributes that don't exist in these tok_name versions. if py_version >= 30: BACKQUOTE = next(_counter) tok_name[BACKQUOTE] = 'BACKQUOTE' else: RARROW = next(_counter) tok_name[RARROW] = 'RARROW' ELLIPSIS = next(_counter) tok_name[ELLIPSIS] = 'ELLIPSIS' if py_version < 35: ATEQUAL = next(_counter) tok_name[ATEQUAL] = 'ATEQUAL' ERROR_DEDENT = next(_counter) tok_name[ERROR_DEDENT] = 'ERROR_DEDENT' # Map from operator to number (since tokenize doesn't do this) opmap_raw = """\ ( LPAR ) RPAR [ LSQB ] RSQB : COLON , COMMA ; SEMI + PLUS - MINUS * STAR / SLASH | VBAR & AMPER < LESS > GREATER = EQUAL . DOT % PERCENT ` BACKQUOTE { LBRACE } RBRACE @ AT == EQEQUAL != NOTEQUAL <> NOTEQUAL <= LESSEQUAL >= GREATEREQUAL ~ TILDE ^ CIRCUMFLEX << LEFTSHIFT >> RIGHTSHIFT ** DOUBLESTAR += PLUSEQUAL -= MINEQUAL *= STAREQUAL /= SLASHEQUAL %= PERCENTEQUAL &= AMPEREQUAL |= VBAREQUAL @= ATEQUAL ^= CIRCUMFLEXEQUAL <<= LEFTSHIFTEQUAL >>= RIGHTSHIFTEQUAL **= DOUBLESTAREQUAL // DOUBLESLASH //= DOUBLESLASHEQUAL -> RARROW ... ELLIPSIS """ opmap = {} for line in opmap_raw.splitlines(): op, name = line.split() opmap[op] = globals()[name] def generate_token_id(string): """ Uses a token in the grammar (e.g. `'+'` or `'and'`returns the corresponding ID for it. The strings are part of the grammar file. """ try: return opmap[string] except KeyError: pass return globals()[string]
mit
-2,425,001,939,370,699,300
16.086538
79
0.665729
false
glaubitz/fs-uae-debian
arcade/launcher/settings/settings_header.py
2
1384
import fsui from fsbc.util import unused from launcher.ui.skin import Skin class SettingsHeader(fsui.Group): ICON_LEFT = 0 ICON_RIGHT = 1 def __init__( self, parent, icon, title, subtitle="", icon_position=ICON_RIGHT ): unused(subtitle) fsui.Group.__init__(self, parent) self.layout = fsui.HorizontalLayout() image = icon.image(48) self.image_view = fsui.ImageView(self, image) if icon_position == self.ICON_LEFT: self.layout.add(self.image_view) self.layout.add_spacer(20) # vert_layout = fsui.VerticalLayout() # self.layout.add( # vert_layout, expand=True, fill=False, valign=0.5) self.title_label = fsui.HeadingLabel(self, title) if Skin.fws() or True: font = fsui.Font("Roboto", 26) self.title_label.set_font(font) self.layout.add( self.title_label, expand=True, fill=False, valign=0.0 ) else: font = self.title_label.get_font() font.increase_size(3) self.title_label.set_font(font) self.layout.add( self.title_label, expand=True, fill=False, valign=0.5 ) if icon_position == self.ICON_RIGHT: self.layout.add_spacer(20) self.layout.add(self.image_view)
gpl-2.0
-7,743,135,693,777,367,000
30.454545
72
0.567197
false
twoh/leevee
env/Lib/encodings/aliases.py
418
14848
""" Encoding Aliases Support This module is used by the encodings package search function to map encodings names to module names. Note that the search function normalizes the encoding names before doing the lookup, so the mapping will have to map normalized encoding names to module names. Contents: The following aliases dictionary contains mappings of all IANA character set names for which the Python core library provides codecs. In addition to these, a few Python specific codec aliases have also been added. """ aliases = { # Please keep this list sorted alphabetically by value ! # ascii codec '646' : 'ascii', 'ansi_x3.4_1968' : 'ascii', 'ansi_x3_4_1968' : 'ascii', # some email headers use this non-standard name 'ansi_x3.4_1986' : 'ascii', 'cp367' : 'ascii', 'csascii' : 'ascii', 'ibm367' : 'ascii', 'iso646_us' : 'ascii', 'iso_646.irv_1991' : 'ascii', 'iso_ir_6' : 'ascii', 'us' : 'ascii', 'us_ascii' : 'ascii', # base64_codec codec 'base64' : 'base64_codec', 'base_64' : 'base64_codec', # big5 codec 'big5_tw' : 'big5', 'csbig5' : 'big5', # big5hkscs codec 'big5_hkscs' : 'big5hkscs', 'hkscs' : 'big5hkscs', # bz2_codec codec 'bz2' : 'bz2_codec', # cp037 codec '037' : 'cp037', 'csibm037' : 'cp037', 'ebcdic_cp_ca' : 'cp037', 'ebcdic_cp_nl' : 'cp037', 'ebcdic_cp_us' : 'cp037', 'ebcdic_cp_wt' : 'cp037', 'ibm037' : 'cp037', 'ibm039' : 'cp037', # cp1026 codec '1026' : 'cp1026', 'csibm1026' : 'cp1026', 'ibm1026' : 'cp1026', # cp1140 codec '1140' : 'cp1140', 'ibm1140' : 'cp1140', # cp1250 codec '1250' : 'cp1250', 'windows_1250' : 'cp1250', # cp1251 codec '1251' : 'cp1251', 'windows_1251' : 'cp1251', # cp1252 codec '1252' : 'cp1252', 'windows_1252' : 'cp1252', # cp1253 codec '1253' : 'cp1253', 'windows_1253' : 'cp1253', # cp1254 codec '1254' : 'cp1254', 'windows_1254' : 'cp1254', # cp1255 codec '1255' : 'cp1255', 'windows_1255' : 'cp1255', # cp1256 codec '1256' : 'cp1256', 'windows_1256' : 'cp1256', # cp1257 codec '1257' : 'cp1257', 'windows_1257' : 'cp1257', # cp1258 codec '1258' : 'cp1258', 'windows_1258' : 'cp1258', # cp424 codec '424' : 'cp424', 'csibm424' : 'cp424', 'ebcdic_cp_he' : 'cp424', 'ibm424' : 'cp424', # cp437 codec '437' : 'cp437', 'cspc8codepage437' : 'cp437', 'ibm437' : 'cp437', # cp500 codec '500' : 'cp500', 'csibm500' : 'cp500', 'ebcdic_cp_be' : 'cp500', 'ebcdic_cp_ch' : 'cp500', 'ibm500' : 'cp500', # cp775 codec '775' : 'cp775', 'cspc775baltic' : 'cp775', 'ibm775' : 'cp775', # cp850 codec '850' : 'cp850', 'cspc850multilingual' : 'cp850', 'ibm850' : 'cp850', # cp852 codec '852' : 'cp852', 'cspcp852' : 'cp852', 'ibm852' : 'cp852', # cp855 codec '855' : 'cp855', 'csibm855' : 'cp855', 'ibm855' : 'cp855', # cp857 codec '857' : 'cp857', 'csibm857' : 'cp857', 'ibm857' : 'cp857', # cp858 codec '858' : 'cp858', 'csibm858' : 'cp858', 'ibm858' : 'cp858', # cp860 codec '860' : 'cp860', 'csibm860' : 'cp860', 'ibm860' : 'cp860', # cp861 codec '861' : 'cp861', 'cp_is' : 'cp861', 'csibm861' : 'cp861', 'ibm861' : 'cp861', # cp862 codec '862' : 'cp862', 'cspc862latinhebrew' : 'cp862', 'ibm862' : 'cp862', # cp863 codec '863' : 'cp863', 'csibm863' : 'cp863', 'ibm863' : 'cp863', # cp864 codec '864' : 'cp864', 'csibm864' : 'cp864', 'ibm864' : 'cp864', # cp865 codec '865' : 'cp865', 'csibm865' : 'cp865', 'ibm865' : 'cp865', # cp866 codec '866' : 'cp866', 'csibm866' : 'cp866', 'ibm866' : 'cp866', # cp869 codec '869' : 'cp869', 'cp_gr' : 'cp869', 'csibm869' : 'cp869', 'ibm869' : 'cp869', # cp932 codec '932' : 'cp932', 'ms932' : 'cp932', 'mskanji' : 'cp932', 'ms_kanji' : 'cp932', # cp949 codec '949' : 'cp949', 'ms949' : 'cp949', 'uhc' : 'cp949', # cp950 codec '950' : 'cp950', 'ms950' : 'cp950', # euc_jis_2004 codec 'jisx0213' : 'euc_jis_2004', 'eucjis2004' : 'euc_jis_2004', 'euc_jis2004' : 'euc_jis_2004', # euc_jisx0213 codec 'eucjisx0213' : 'euc_jisx0213', # euc_jp codec 'eucjp' : 'euc_jp', 'ujis' : 'euc_jp', 'u_jis' : 'euc_jp', # euc_kr codec 'euckr' : 'euc_kr', 'korean' : 'euc_kr', 'ksc5601' : 'euc_kr', 'ks_c_5601' : 'euc_kr', 'ks_c_5601_1987' : 'euc_kr', 'ksx1001' : 'euc_kr', 'ks_x_1001' : 'euc_kr', # gb18030 codec 'gb18030_2000' : 'gb18030', # gb2312 codec 'chinese' : 'gb2312', 'csiso58gb231280' : 'gb2312', 'euc_cn' : 'gb2312', 'euccn' : 'gb2312', 'eucgb2312_cn' : 'gb2312', 'gb2312_1980' : 'gb2312', 'gb2312_80' : 'gb2312', 'iso_ir_58' : 'gb2312', # gbk codec '936' : 'gbk', 'cp936' : 'gbk', 'ms936' : 'gbk', # hex_codec codec 'hex' : 'hex_codec', # hp_roman8 codec 'roman8' : 'hp_roman8', 'r8' : 'hp_roman8', 'csHPRoman8' : 'hp_roman8', # hz codec 'hzgb' : 'hz', 'hz_gb' : 'hz', 'hz_gb_2312' : 'hz', # iso2022_jp codec 'csiso2022jp' : 'iso2022_jp', 'iso2022jp' : 'iso2022_jp', 'iso_2022_jp' : 'iso2022_jp', # iso2022_jp_1 codec 'iso2022jp_1' : 'iso2022_jp_1', 'iso_2022_jp_1' : 'iso2022_jp_1', # iso2022_jp_2 codec 'iso2022jp_2' : 'iso2022_jp_2', 'iso_2022_jp_2' : 'iso2022_jp_2', # iso2022_jp_2004 codec 'iso_2022_jp_2004' : 'iso2022_jp_2004', 'iso2022jp_2004' : 'iso2022_jp_2004', # iso2022_jp_3 codec 'iso2022jp_3' : 'iso2022_jp_3', 'iso_2022_jp_3' : 'iso2022_jp_3', # iso2022_jp_ext codec 'iso2022jp_ext' : 'iso2022_jp_ext', 'iso_2022_jp_ext' : 'iso2022_jp_ext', # iso2022_kr codec 'csiso2022kr' : 'iso2022_kr', 'iso2022kr' : 'iso2022_kr', 'iso_2022_kr' : 'iso2022_kr', # iso8859_10 codec 'csisolatin6' : 'iso8859_10', 'iso_8859_10' : 'iso8859_10', 'iso_8859_10_1992' : 'iso8859_10', 'iso_ir_157' : 'iso8859_10', 'l6' : 'iso8859_10', 'latin6' : 'iso8859_10', # iso8859_11 codec 'thai' : 'iso8859_11', 'iso_8859_11' : 'iso8859_11', 'iso_8859_11_2001' : 'iso8859_11', # iso8859_13 codec 'iso_8859_13' : 'iso8859_13', 'l7' : 'iso8859_13', 'latin7' : 'iso8859_13', # iso8859_14 codec 'iso_8859_14' : 'iso8859_14', 'iso_8859_14_1998' : 'iso8859_14', 'iso_celtic' : 'iso8859_14', 'iso_ir_199' : 'iso8859_14', 'l8' : 'iso8859_14', 'latin8' : 'iso8859_14', # iso8859_15 codec 'iso_8859_15' : 'iso8859_15', 'l9' : 'iso8859_15', 'latin9' : 'iso8859_15', # iso8859_16 codec 'iso_8859_16' : 'iso8859_16', 'iso_8859_16_2001' : 'iso8859_16', 'iso_ir_226' : 'iso8859_16', 'l10' : 'iso8859_16', 'latin10' : 'iso8859_16', # iso8859_2 codec 'csisolatin2' : 'iso8859_2', 'iso_8859_2' : 'iso8859_2', 'iso_8859_2_1987' : 'iso8859_2', 'iso_ir_101' : 'iso8859_2', 'l2' : 'iso8859_2', 'latin2' : 'iso8859_2', # iso8859_3 codec 'csisolatin3' : 'iso8859_3', 'iso_8859_3' : 'iso8859_3', 'iso_8859_3_1988' : 'iso8859_3', 'iso_ir_109' : 'iso8859_3', 'l3' : 'iso8859_3', 'latin3' : 'iso8859_3', # iso8859_4 codec 'csisolatin4' : 'iso8859_4', 'iso_8859_4' : 'iso8859_4', 'iso_8859_4_1988' : 'iso8859_4', 'iso_ir_110' : 'iso8859_4', 'l4' : 'iso8859_4', 'latin4' : 'iso8859_4', # iso8859_5 codec 'csisolatincyrillic' : 'iso8859_5', 'cyrillic' : 'iso8859_5', 'iso_8859_5' : 'iso8859_5', 'iso_8859_5_1988' : 'iso8859_5', 'iso_ir_144' : 'iso8859_5', # iso8859_6 codec 'arabic' : 'iso8859_6', 'asmo_708' : 'iso8859_6', 'csisolatinarabic' : 'iso8859_6', 'ecma_114' : 'iso8859_6', 'iso_8859_6' : 'iso8859_6', 'iso_8859_6_1987' : 'iso8859_6', 'iso_ir_127' : 'iso8859_6', # iso8859_7 codec 'csisolatingreek' : 'iso8859_7', 'ecma_118' : 'iso8859_7', 'elot_928' : 'iso8859_7', 'greek' : 'iso8859_7', 'greek8' : 'iso8859_7', 'iso_8859_7' : 'iso8859_7', 'iso_8859_7_1987' : 'iso8859_7', 'iso_ir_126' : 'iso8859_7', # iso8859_8 codec 'csisolatinhebrew' : 'iso8859_8', 'hebrew' : 'iso8859_8', 'iso_8859_8' : 'iso8859_8', 'iso_8859_8_1988' : 'iso8859_8', 'iso_ir_138' : 'iso8859_8', # iso8859_9 codec 'csisolatin5' : 'iso8859_9', 'iso_8859_9' : 'iso8859_9', 'iso_8859_9_1989' : 'iso8859_9', 'iso_ir_148' : 'iso8859_9', 'l5' : 'iso8859_9', 'latin5' : 'iso8859_9', # johab codec 'cp1361' : 'johab', 'ms1361' : 'johab', # koi8_r codec 'cskoi8r' : 'koi8_r', # latin_1 codec # # Note that the latin_1 codec is implemented internally in C and a # lot faster than the charmap codec iso8859_1 which uses the same # encoding. This is why we discourage the use of the iso8859_1 # codec and alias it to latin_1 instead. # '8859' : 'latin_1', 'cp819' : 'latin_1', 'csisolatin1' : 'latin_1', 'ibm819' : 'latin_1', 'iso8859' : 'latin_1', 'iso8859_1' : 'latin_1', 'iso_8859_1' : 'latin_1', 'iso_8859_1_1987' : 'latin_1', 'iso_ir_100' : 'latin_1', 'l1' : 'latin_1', 'latin' : 'latin_1', 'latin1' : 'latin_1', # mac_cyrillic codec 'maccyrillic' : 'mac_cyrillic', # mac_greek codec 'macgreek' : 'mac_greek', # mac_iceland codec 'maciceland' : 'mac_iceland', # mac_latin2 codec 'maccentraleurope' : 'mac_latin2', 'maclatin2' : 'mac_latin2', # mac_roman codec 'macroman' : 'mac_roman', # mac_turkish codec 'macturkish' : 'mac_turkish', # mbcs codec 'dbcs' : 'mbcs', # ptcp154 codec 'csptcp154' : 'ptcp154', 'pt154' : 'ptcp154', 'cp154' : 'ptcp154', 'cyrillic_asian' : 'ptcp154', # quopri_codec codec 'quopri' : 'quopri_codec', 'quoted_printable' : 'quopri_codec', 'quotedprintable' : 'quopri_codec', # rot_13 codec 'rot13' : 'rot_13', # shift_jis codec 'csshiftjis' : 'shift_jis', 'shiftjis' : 'shift_jis', 'sjis' : 'shift_jis', 's_jis' : 'shift_jis', # shift_jis_2004 codec 'shiftjis2004' : 'shift_jis_2004', 'sjis_2004' : 'shift_jis_2004', 's_jis_2004' : 'shift_jis_2004', # shift_jisx0213 codec 'shiftjisx0213' : 'shift_jisx0213', 'sjisx0213' : 'shift_jisx0213', 's_jisx0213' : 'shift_jisx0213', # tactis codec 'tis260' : 'tactis', # tis_620 codec 'tis620' : 'tis_620', 'tis_620_0' : 'tis_620', 'tis_620_2529_0' : 'tis_620', 'tis_620_2529_1' : 'tis_620', 'iso_ir_166' : 'tis_620', # utf_16 codec 'u16' : 'utf_16', 'utf16' : 'utf_16', # utf_16_be codec 'unicodebigunmarked' : 'utf_16_be', 'utf_16be' : 'utf_16_be', # utf_16_le codec 'unicodelittleunmarked' : 'utf_16_le', 'utf_16le' : 'utf_16_le', # utf_32 codec 'u32' : 'utf_32', 'utf32' : 'utf_32', # utf_32_be codec 'utf_32be' : 'utf_32_be', # utf_32_le codec 'utf_32le' : 'utf_32_le', # utf_7 codec 'u7' : 'utf_7', 'utf7' : 'utf_7', 'unicode_1_1_utf_7' : 'utf_7', # utf_8 codec 'u8' : 'utf_8', 'utf' : 'utf_8', 'utf8' : 'utf_8', 'utf8_ucs2' : 'utf_8', 'utf8_ucs4' : 'utf_8', # uu_codec codec 'uu' : 'uu_codec', # zlib_codec codec 'zip' : 'zlib_codec', 'zlib' : 'zlib_codec', }
mit
1,315,960,443,830,221,000
27.174573
83
0.419652
false
djw8605/htcondor
src/condor_contrib/condor_pigeon/src/condor_pigeon_client/skype_linux_tools/find_chat.py
10
4722
#! /usr/bin/python # # Displays all multi-chats and allows to open them. # # (c) Copyright 2007, Vincent Oberle, vincent@oberle.org # # This software may be used and distributed according to the terms # of the GNU Public License, incorporated herein by reference. import sys import re from optparse import OptionParser from skype_api import * appname = 'chat_finder' class SkypeChat: def __init__(self, _chunk_size = 5, debug = False): self.ids = None self.chunk = 0 self.chunk_size = _chunk_size self.topics = {} self.members = {} self.friendlyname = {} self.api = SkypeAPI(appname, debug) def init_chat_ids(self): ret = self.api.send_and_block('SEARCH CHATS') r = re.search (r'CHATS (.*)', ret) if r: self.ids = r.group(1).strip().split(', ') # convert percentage to actual value self.chunk_size = len(self.ids) * self.chunk_size / 100 # Gets chat info by chunks def get_all_chats(self): if not self.ids: self.init_chat_ids() lo = self.chunk self.hi = min([self.chunk + self.chunk_size - 1, len(self.ids) - 1]) for i in self.ids[lo:self.hi]: self.api.send('GET CHAT ' + i + ' TOPIC') self.api.send('GET CHAT ' + i + ' MEMBERS') self.api.send('GET CHAT ' + i + ' FRIENDLYNAME') while True: msgs = self.api.response_wait(1) if not msgs: break for reply in msgs: r = re.search (r'CHAT (\S+) TOPIC (.+)', reply) if r: self.topics[r.group(1).strip()] = r.group(2).strip() r = re.search (r'CHAT (\S+) MEMBERS (.+)', reply) if r: self.members[r.group(1).strip()] = r.group(2).strip() r = re.search (r'CHAT (\S+) FRIENDLYNAME (.+)', reply) if r: self.friendlyname[r.group(1).strip()] = r.group(2).strip() self.chunk = min([self.chunk + self.chunk_size, len(self.ids) - 1]) def open_chat(self, name): self.api.send('OPEN CHAT ' + name) def open_chat_by_index(self, index): self.open_chat(self.ids[index]) def print_chat_list(self, filter): for i in self.ids[:self.hi]: # display only multi-chats if not self.members.has_key(i) or len(self.members[i].split()) < 3: continue # string we filter on search_str = '' if self.topics.has_key(i): search_str = search_str + self.topics[i] elif self.friendlyname.has_key(i): search_str = search_str + self.friendlyname[i] if self.members.has_key(i): search_str = search_str + self.members[i] search_str = search_str.lower() if not filter or search_str.find(filter) >= 0: t = '' if self.topics.has_key(i): t = '**' + self.topics[i] + '**' elif self.friendlyname.has_key(i): t = '"' + self.friendlyname[i] + '"' if self.members.has_key(i): t = t + ' ' + self.members[i] if t: print str(self.ids.index(i)) + ': ' + t def perc_loaded(self): return str( int(float(self.chunk) / len(self.ids) * 100) ) if __name__ == "__main__": parser = OptionParser('%prog [options]') parser.add_option('-c', '--chunk', dest='chunk', default = 5, help='Percentage of total number of chats to load each time (100 for all)') parser.add_option('-d', '--debug', action='store_true', dest='debug', default = False, help='Print debug messages') options, args = parser.parse_args() if len(args): parser.print_help() sys.exit(0) try: api = SkypeChat(int(options.chunk), options.debug) except StandardError: print 'Could not connect to Skype. Check if "' + appname + '" is authorized to connect to Skype (Options - Public API)' sys.exit(0) print 'Please patient while chats are being loaded...' api.get_all_chats() filter = None refresh = True while True: if refresh: print '' api.print_chat_list(filter) refresh = False print 'Loaded: ' + api.perc_loaded() + ' %' print 'Quick help: "/word" filter by word; "/" clean filter; "m" load more chats; number to open a chat; "q" to quit' print '> ', sys.stdout.flush() result = sys.stdin.readline().strip().lower() if not result: continue elif result == '/': filter = None refresh = True elif result[0] == '/': filter = result[1:] refresh = True elif result.isdigit(): api.open_chat_by_index(int(result)) elif result[0] == '#': api.open_chat(result) elif result == 'm': print 'Loading more chats...' api.get_all_chats() refresh = True elif result == 'h': print 'Skype Chat Finder: Finds old multi-chats and allows to open them' print 'Commands:' print ' /word Filters chat topic and member list with "word"' print ' / Show all chats (reset filter)' print ' m Loads more chats' print ' Chat number Opens the chat window in Skype' print ' q Quits' elif result == 'q': break
apache-2.0
1,836,465,713,021,663,000
28.5125
121
0.623041
false
dangtrinhnt/django-saml2
djangosaml2/templatetags/idplist.py
8
1419
# Copyright (C) 2011-2012 Yaco Sistemas (http://www.yaco.es) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from django import template from djangosaml2.conf import config_settings_loader register = template.Library() class IdPListNode(template.Node): def __init__(self, variable_name): self.variable_name = variable_name def render(self, context): conf = config_settings_loader() context[self.variable_name] = conf.idps() return '' @register.tag def idplist(parser, token): try: tag_name, as_part, variable = token.split_contents() except ValueError: raise template.TemplateSyntaxError( '%r tag requires two arguments' % token.contents.split()[0]) if not as_part == 'as': raise template.TemplateSyntaxError( '%r tag first argument must be the literal "as"' % tag_name) return IdPListNode(variable)
apache-2.0
-4,295,575,218,021,276,700
31.25
74
0.696265
false
nevir/plexability
extern/depot_tools/third_party/boto/mashups/server.py
91
14043
# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/ # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. """ High-level abstraction of an EC2 server """ import boto import boto.utils from boto.mashups.iobject import IObject from boto.pyami.config import Config, BotoConfigPath from boto.mashups.interactive import interactive_shell from boto.sdb.db.model import Model from boto.sdb.db.property import StringProperty import os import StringIO class ServerSet(list): def __getattr__(self, name): results = [] is_callable = False for server in self: try: val = getattr(server, name) if callable(val): is_callable = True results.append(val) except: results.append(None) if is_callable: self.map_list = results return self.map return results def map(self, *args): results = [] for fn in self.map_list: results.append(fn(*args)) return results class Server(Model): @property def ec2(self): if self._ec2 is None: self._ec2 = boto.connect_ec2() return self._ec2 @classmethod def Inventory(cls): """ Returns a list of Server instances, one for each Server object persisted in the db """ l = ServerSet() rs = cls.find() for server in rs: l.append(server) return l @classmethod def Register(cls, name, instance_id, description=''): s = cls() s.name = name s.instance_id = instance_id s.description = description s.save() return s def __init__(self, id=None, **kw): Model.__init__(self, id, **kw) self._reservation = None self._instance = None self._ssh_client = None self._pkey = None self._config = None self._ec2 = None name = StringProperty(unique=True, verbose_name="Name") instance_id = StringProperty(verbose_name="Instance ID") config_uri = StringProperty() ami_id = StringProperty(verbose_name="AMI ID") zone = StringProperty(verbose_name="Availability Zone") security_group = StringProperty(verbose_name="Security Group", default="default") key_name = StringProperty(verbose_name="Key Name") elastic_ip = StringProperty(verbose_name="Elastic IP") instance_type = StringProperty(verbose_name="Instance Type") description = StringProperty(verbose_name="Description") log = StringProperty() def setReadOnly(self, value): raise AttributeError def getInstance(self): if not self._instance: if self.instance_id: try: rs = self.ec2.get_all_instances([self.instance_id]) except: return None if len(rs) > 0: self._reservation = rs[0] self._instance = self._reservation.instances[0] return self._instance instance = property(getInstance, setReadOnly, None, 'The Instance for the server') def getAMI(self): if self.instance: return self.instance.image_id ami = property(getAMI, setReadOnly, None, 'The AMI for the server') def getStatus(self): if self.instance: self.instance.update() return self.instance.state status = property(getStatus, setReadOnly, None, 'The status of the server') def getHostname(self): if self.instance: return self.instance.public_dns_name hostname = property(getHostname, setReadOnly, None, 'The public DNS name of the server') def getPrivateHostname(self): if self.instance: return self.instance.private_dns_name private_hostname = property(getPrivateHostname, setReadOnly, None, 'The private DNS name of the server') def getLaunchTime(self): if self.instance: return self.instance.launch_time launch_time = property(getLaunchTime, setReadOnly, None, 'The time the Server was started') def getConsoleOutput(self): if self.instance: return self.instance.get_console_output() console_output = property(getConsoleOutput, setReadOnly, None, 'Retrieve the console output for server') def getGroups(self): if self._reservation: return self._reservation.groups else: return None groups = property(getGroups, setReadOnly, None, 'The Security Groups controlling access to this server') def getConfig(self): if not self._config: remote_file = BotoConfigPath local_file = '%s.ini' % self.instance.id self.get_file(remote_file, local_file) self._config = Config(local_file) return self._config def setConfig(self, config): local_file = '%s.ini' % self.instance.id fp = open(local_file) config.write(fp) fp.close() self.put_file(local_file, BotoConfigPath) self._config = config config = property(getConfig, setConfig, None, 'The instance data for this server') def set_config(self, config): """ Set SDB based config """ self._config = config self._config.dump_to_sdb("botoConfigs", self.id) def load_config(self): self._config = Config(do_load=False) self._config.load_from_sdb("botoConfigs", self.id) def stop(self): if self.instance: self.instance.stop() def start(self): self.stop() ec2 = boto.connect_ec2() ami = ec2.get_all_images(image_ids = [str(self.ami_id)])[0] groups = ec2.get_all_security_groups(groupnames=[str(self.security_group)]) if not self._config: self.load_config() if not self._config.has_section("Credentials"): self._config.add_section("Credentials") self._config.set("Credentials", "aws_access_key_id", ec2.aws_access_key_id) self._config.set("Credentials", "aws_secret_access_key", ec2.aws_secret_access_key) if not self._config.has_section("Pyami"): self._config.add_section("Pyami") if self._manager.domain: self._config.set('Pyami', 'server_sdb_domain', self._manager.domain.name) self._config.set("Pyami", 'server_sdb_name', self.name) cfg = StringIO.StringIO() self._config.write(cfg) cfg = cfg.getvalue() r = ami.run(min_count=1, max_count=1, key_name=self.key_name, security_groups = groups, instance_type = self.instance_type, placement = self.zone, user_data = cfg) i = r.instances[0] self.instance_id = i.id self.put() if self.elastic_ip: ec2.associate_address(self.instance_id, self.elastic_ip) def reboot(self): if self.instance: self.instance.reboot() def get_ssh_client(self, key_file=None, host_key_file='~/.ssh/known_hosts', uname='root'): import paramiko if not self.instance: print 'No instance yet!' return if not self._ssh_client: if not key_file: iobject = IObject() key_file = iobject.get_filename('Path to OpenSSH Key file') self._pkey = paramiko.RSAKey.from_private_key_file(key_file) self._ssh_client = paramiko.SSHClient() self._ssh_client.load_system_host_keys() self._ssh_client.load_host_keys(os.path.expanduser(host_key_file)) self._ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self._ssh_client.connect(self.instance.public_dns_name, username=uname, pkey=self._pkey) return self._ssh_client def get_file(self, remotepath, localpath): ssh_client = self.get_ssh_client() sftp_client = ssh_client.open_sftp() sftp_client.get(remotepath, localpath) def put_file(self, localpath, remotepath): ssh_client = self.get_ssh_client() sftp_client = ssh_client.open_sftp() sftp_client.put(localpath, remotepath) def listdir(self, remotepath): ssh_client = self.get_ssh_client() sftp_client = ssh_client.open_sftp() return sftp_client.listdir(remotepath) def shell(self, key_file=None): ssh_client = self.get_ssh_client(key_file) channel = ssh_client.invoke_shell() interactive_shell(channel) def bundle_image(self, prefix, key_file, cert_file, size): print 'bundling image...' print '\tcopying cert and pk over to /mnt directory on server' ssh_client = self.get_ssh_client() sftp_client = ssh_client.open_sftp() path, name = os.path.split(key_file) remote_key_file = '/mnt/%s' % name self.put_file(key_file, remote_key_file) path, name = os.path.split(cert_file) remote_cert_file = '/mnt/%s' % name self.put_file(cert_file, remote_cert_file) print '\tdeleting %s' % BotoConfigPath # delete the metadata.ini file if it exists try: sftp_client.remove(BotoConfigPath) except: pass command = 'sudo ec2-bundle-vol ' command += '-c %s -k %s ' % (remote_cert_file, remote_key_file) command += '-u %s ' % self._reservation.owner_id command += '-p %s ' % prefix command += '-s %d ' % size command += '-d /mnt ' if self.instance.instance_type == 'm1.small' or self.instance_type == 'c1.medium': command += '-r i386' else: command += '-r x86_64' print '\t%s' % command t = ssh_client.exec_command(command) response = t[1].read() print '\t%s' % response print '\t%s' % t[2].read() print '...complete!' def upload_bundle(self, bucket, prefix): print 'uploading bundle...' command = 'ec2-upload-bundle ' command += '-m /mnt/%s.manifest.xml ' % prefix command += '-b %s ' % bucket command += '-a %s ' % self.ec2.aws_access_key_id command += '-s %s ' % self.ec2.aws_secret_access_key print '\t%s' % command ssh_client = self.get_ssh_client() t = ssh_client.exec_command(command) response = t[1].read() print '\t%s' % response print '\t%s' % t[2].read() print '...complete!' def create_image(self, bucket=None, prefix=None, key_file=None, cert_file=None, size=None): iobject = IObject() if not bucket: bucket = iobject.get_string('Name of S3 bucket') if not prefix: prefix = iobject.get_string('Prefix for AMI file') if not key_file: key_file = iobject.get_filename('Path to RSA private key file') if not cert_file: cert_file = iobject.get_filename('Path to RSA public cert file') if not size: size = iobject.get_int('Size (in MB) of bundled image') self.bundle_image(prefix, key_file, cert_file, size) self.upload_bundle(bucket, prefix) print 'registering image...' self.image_id = self.ec2.register_image('%s/%s.manifest.xml' % (bucket, prefix)) return self.image_id def attach_volume(self, volume, device="/dev/sdp"): """ Attach an EBS volume to this server :param volume: EBS Volume to attach :type volume: boto.ec2.volume.Volume :param device: Device to attach to (default to /dev/sdp) :type device: string """ if hasattr(volume, "id"): volume_id = volume.id else: volume_id = volume return self.ec2.attach_volume(volume_id=volume_id, instance_id=self.instance_id, device=device) def detach_volume(self, volume): """ Detach an EBS volume from this server :param volume: EBS Volume to detach :type volume: boto.ec2.volume.Volume """ if hasattr(volume, "id"): volume_id = volume.id else: volume_id = volume return self.ec2.detach_volume(volume_id=volume_id, instance_id=self.instance_id) def install_package(self, package_name): print 'installing %s...' % package_name command = 'yum -y install %s' % package_name print '\t%s' % command ssh_client = self.get_ssh_client() t = ssh_client.exec_command(command) response = t[1].read() print '\t%s' % response print '\t%s' % t[2].read() print '...complete!'
gpl-2.0
551,411,079,860,995,260
34.551899
103
0.588407
false
kalahbrown/HueBigSQL
desktop/core/ext-py/django-extensions-1.5.0/docs/conf.py
33
6439
# -*- coding: utf-8 -*- # # django-extensions documentation build configuration file, created by # sphinx-quickstart on Wed Apr 1 20:39:40 2009. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. #import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.append(os.path.abspath('.')) # -- General configuration ----------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8' # The master toctree document. master_doc = 'index' # General information about the project. project = u'django-extensions' copyright = u'Copyright (C) 2008-2015 Michael Trier, Bas van Oostveen and contributors' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '1.5' # The full version, including alpha/beta/rc tags. release = '1.5.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of documents that shouldn't be included in the build. #unused_docs = [] # List of directories, relative to source directory, that shouldn't be searched # for source files. exclude_trees = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_use_modindex = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = '' # Output file base name for HTML help builder. htmlhelp_basename = 'django-extensionsdoc' # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). #latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [( 'index', 'django-extensions.tex', u'django-extensions Documentation', u'Michael Trier, Bas van Oostveen, and contributors', 'manual' ), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # Additional stuff for the LaTeX preamble. #latex_preamble = '' # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_use_modindex = True
apache-2.0
4,065,926,971,494,356,000
32.190722
87
0.714397
false
kriswuollett/grpc
examples/python/multiplex/route_guide_resources.py
115
2221
# Copyright 2015, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Common resources used in the gRPC route guide example.""" import json import route_guide_pb2 def read_route_guide_database(): """Reads the route guide database. Returns: The full contents of the route guide database as a sequence of route_guide_pb2.Features. """ feature_list = [] with open("route_guide_db.json") as route_guide_db_file: for item in json.load(route_guide_db_file): feature = route_guide_pb2.Feature( name=item["name"], location=route_guide_pb2.Point( latitude=item["location"]["latitude"], longitude=item["location"]["longitude"])) feature_list.append(feature) return feature_list
bsd-3-clause
145,743,123,724,359,040
40.90566
72
0.742909
false
howethomas/synapse
tests/rest/client/v1/test_presence.py
2
12591
# -*- coding: utf-8 -*- # Copyright 2014 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests REST events for /presence paths.""" from tests import unittest from twisted.internet import defer from mock import Mock from ....utils import MockHttpResource, setup_test_homeserver from synapse.api.constants import PresenceState from synapse.handlers.presence import PresenceHandler from synapse.rest.client.v1 import presence from synapse.rest.client.v1 import events from synapse.types import UserID from synapse.util.async import run_on_reactor from collections import namedtuple OFFLINE = PresenceState.OFFLINE UNAVAILABLE = PresenceState.UNAVAILABLE ONLINE = PresenceState.ONLINE myid = "@apple:test" PATH_PREFIX = "/_matrix/client/api/v1" class JustPresenceHandlers(object): def __init__(self, hs): self.presence_handler = PresenceHandler(hs) class PresenceStateTestCase(unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.mock_resource = MockHttpResource(prefix=PATH_PREFIX) hs = yield setup_test_homeserver( datastore=Mock(spec=[ "get_presence_state", "set_presence_state", "insert_client_ip", ]), http_client=None, resource_for_client=self.mock_resource, resource_for_federation=self.mock_resource, ) hs.handlers = JustPresenceHandlers(hs) self.datastore = hs.get_datastore() self.datastore.get_app_service_by_token = Mock(return_value=None) def get_presence_list(*a, **kw): return defer.succeed([]) self.datastore.get_presence_list = get_presence_list def _get_user_by_token(token=None): return { "user": UserID.from_string(myid), "admin": False, "device_id": None, "token_id": 1, } hs.get_v1auth().get_user_by_token = _get_user_by_token room_member_handler = hs.handlers.room_member_handler = Mock( spec=[ "get_joined_rooms_for_user", ] ) def get_rooms_for_user(user): return defer.succeed([]) room_member_handler.get_joined_rooms_for_user = get_rooms_for_user presence.register_servlets(hs, self.mock_resource) self.u_apple = UserID.from_string(myid) @defer.inlineCallbacks def test_get_my_status(self): mocked_get = self.datastore.get_presence_state mocked_get.return_value = defer.succeed( {"state": ONLINE, "status_msg": "Available"} ) (code, response) = yield self.mock_resource.trigger("GET", "/presence/%s/status" % (myid), None) self.assertEquals(200, code) self.assertEquals( {"presence": ONLINE, "status_msg": "Available"}, response ) mocked_get.assert_called_with("apple") @defer.inlineCallbacks def test_set_my_status(self): mocked_set = self.datastore.set_presence_state mocked_set.return_value = defer.succeed({"state": OFFLINE}) (code, response) = yield self.mock_resource.trigger("PUT", "/presence/%s/status" % (myid), '{"presence": "unavailable", "status_msg": "Away"}') self.assertEquals(200, code) mocked_set.assert_called_with("apple", {"state": UNAVAILABLE, "status_msg": "Away"} ) class PresenceListTestCase(unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.mock_resource = MockHttpResource(prefix=PATH_PREFIX) hs = yield setup_test_homeserver( datastore=Mock(spec=[ "has_presence_state", "get_presence_state", "allow_presence_visible", "is_presence_visible", "add_presence_list_pending", "set_presence_list_accepted", "del_presence_list", "get_presence_list", "insert_client_ip", ]), http_client=None, resource_for_client=self.mock_resource, resource_for_federation=self.mock_resource, ) hs.handlers = JustPresenceHandlers(hs) self.datastore = hs.get_datastore() self.datastore.get_app_service_by_token = Mock(return_value=None) def has_presence_state(user_localpart): return defer.succeed( user_localpart in ("apple", "banana",) ) self.datastore.has_presence_state = has_presence_state def _get_user_by_token(token=None): return { "user": UserID.from_string(myid), "admin": False, "device_id": None, "token_id": 1, } hs.handlers.room_member_handler = Mock( spec=[ "get_joined_rooms_for_user", ] ) hs.get_v1auth().get_user_by_token = _get_user_by_token presence.register_servlets(hs, self.mock_resource) self.u_apple = UserID.from_string("@apple:test") self.u_banana = UserID.from_string("@banana:test") @defer.inlineCallbacks def test_get_my_list(self): self.datastore.get_presence_list.return_value = defer.succeed( [{"observed_user_id": "@banana:test", "accepted": True}], ) (code, response) = yield self.mock_resource.trigger("GET", "/presence/list/%s" % (myid), None) self.assertEquals(200, code) self.assertEquals([ {"user_id": "@banana:test", "presence": OFFLINE, "accepted": True}, ], response) self.datastore.get_presence_list.assert_called_with( "apple", accepted=True ) @defer.inlineCallbacks def test_invite(self): self.datastore.add_presence_list_pending.return_value = ( defer.succeed(()) ) self.datastore.is_presence_visible.return_value = defer.succeed( True ) (code, response) = yield self.mock_resource.trigger("POST", "/presence/list/%s" % (myid), """{"invite": ["@banana:test"]}""" ) self.assertEquals(200, code) self.datastore.add_presence_list_pending.assert_called_with( "apple", "@banana:test" ) self.datastore.set_presence_list_accepted.assert_called_with( "apple", "@banana:test" ) @defer.inlineCallbacks def test_drop(self): self.datastore.del_presence_list.return_value = ( defer.succeed(()) ) (code, response) = yield self.mock_resource.trigger("POST", "/presence/list/%s" % (myid), """{"drop": ["@banana:test"]}""" ) self.assertEquals(200, code) self.datastore.del_presence_list.assert_called_with( "apple", "@banana:test" ) class PresenceEventStreamTestCase(unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.mock_resource = MockHttpResource(prefix=PATH_PREFIX) # HIDEOUS HACKERY # TODO(paul): This should be injected in via the HomeServer DI system from synapse.streams.events import ( PresenceEventSource, NullSource, EventSources ) old_SOURCE_TYPES = EventSources.SOURCE_TYPES def tearDown(): EventSources.SOURCE_TYPES = old_SOURCE_TYPES self.tearDown = tearDown EventSources.SOURCE_TYPES = { k: NullSource for k in old_SOURCE_TYPES.keys() } EventSources.SOURCE_TYPES["presence"] = PresenceEventSource hs = yield setup_test_homeserver( http_client=None, resource_for_client=self.mock_resource, resource_for_federation=self.mock_resource, datastore=Mock(spec=[ "set_presence_state", "get_presence_list", "get_rooms_for_user", ]), clock=Mock(spec=[ "call_later", "cancel_call_later", "time_msec", "looping_call", ]), ) hs.get_clock().time_msec.return_value = 1000000 def _get_user_by_req(req=None): return (UserID.from_string(myid), "") hs.get_v1auth().get_user_by_req = _get_user_by_req presence.register_servlets(hs, self.mock_resource) events.register_servlets(hs, self.mock_resource) hs.handlers.room_member_handler = Mock(spec=[]) self.room_members = [] def get_rooms_for_user(user): if user in self.room_members: return ["a-room"] else: return [] hs.handlers.room_member_handler.get_joined_rooms_for_user = get_rooms_for_user hs.handlers.room_member_handler.get_room_members = ( lambda r: self.room_members if r == "a-room" else [] ) self.mock_datastore = hs.get_datastore() self.mock_datastore.get_app_service_by_token = Mock(return_value=None) self.mock_datastore.get_app_service_by_user_id = Mock( return_value=defer.succeed(None) ) self.mock_datastore.get_rooms_for_user = ( lambda u: [ namedtuple("Room", "room_id")(r) for r in get_rooms_for_user(UserID.from_string(u)) ] ) def get_profile_displayname(user_id): return defer.succeed("Frank") self.mock_datastore.get_profile_displayname = get_profile_displayname def get_profile_avatar_url(user_id): return defer.succeed(None) self.mock_datastore.get_profile_avatar_url = get_profile_avatar_url def user_rooms_intersect(user_list): room_member_ids = map(lambda u: u.to_string(), self.room_members) shared = all(map(lambda i: i in room_member_ids, user_list)) return defer.succeed(shared) self.mock_datastore.user_rooms_intersect = user_rooms_intersect def get_joined_hosts_for_room(room_id): return [] self.mock_datastore.get_joined_hosts_for_room = get_joined_hosts_for_room self.presence = hs.get_handlers().presence_handler self.u_apple = UserID.from_string("@apple:test") self.u_banana = UserID.from_string("@banana:test") @defer.inlineCallbacks def test_shortpoll(self): self.room_members = [self.u_apple, self.u_banana] self.mock_datastore.set_presence_state.return_value = defer.succeed( {"state": ONLINE} ) self.mock_datastore.get_presence_list.return_value = defer.succeed( [] ) (code, response) = yield self.mock_resource.trigger("GET", "/events?timeout=0", None) self.assertEquals(200, code) # We've forced there to be only one data stream so the tokens will # all be ours # I'll already get my own presence state change self.assertEquals({"start": "0_1_0", "end": "0_1_0", "chunk": []}, response ) self.mock_datastore.set_presence_state.return_value = defer.succeed( {"state": ONLINE} ) self.mock_datastore.get_presence_list.return_value = defer.succeed([]) yield self.presence.set_state(self.u_banana, self.u_banana, state={"presence": ONLINE} ) yield run_on_reactor() (code, response) = yield self.mock_resource.trigger("GET", "/events?from=s0_1_0&timeout=0", None) self.assertEquals(200, code) self.assertEquals({"start": "s0_1_0", "end": "s0_2_0", "chunk": [ {"type": "m.presence", "content": { "user_id": "@banana:test", "presence": ONLINE, "displayname": "Frank", "last_active_ago": 0, }}, ]}, response)
apache-2.0
-3,441,427,267,454,832,000
31.534884
86
0.581526
false
leiferikb/bitpop
src/third_party/pyftpdlib/src/demo/basic_ftpd.py
4
2796
#!/usr/bin/env python # $Id: basic_ftpd.py 977 2012-01-22 23:05:09Z g.rodola $ # pyftpdlib is released under the MIT license, reproduced below: # ====================================================================== # Copyright (C) 2007-2012 Giampaolo Rodola' <g.rodola@gmail.com> # # All Rights Reserved # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation # files (the "Software"), to deal in the Software without # restriction, including without limitation the rights to use, # copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following # conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR # OTHER DEALINGS IN THE SOFTWARE. # # ====================================================================== """A basic FTP server which uses a DummyAuthorizer for managing 'virtual users', setting a limit for incoming connections. """ import os from pyftpdlib import ftpserver def main(): # Instantiate a dummy authorizer for managing 'virtual' users authorizer = ftpserver.DummyAuthorizer() # Define a new user having full r/w permissions and a read-only # anonymous user authorizer.add_user('user', '12345', os.getcwd(), perm='elradfmwM') authorizer.add_anonymous(os.getcwd()) # Instantiate FTP handler class ftp_handler = ftpserver.FTPHandler ftp_handler.authorizer = authorizer # Define a customized banner (string returned when client connects) ftp_handler.banner = "pyftpdlib %s based ftpd ready." %ftpserver.__ver__ # Specify a masquerade address and the range of ports to use for # passive connections. Decomment in case you're behind a NAT. #ftp_handler.masquerade_address = '151.25.42.11' #ftp_handler.passive_ports = range(60000, 65535) # Instantiate FTP server class and listen to 0.0.0.0:21 address = ('', 21) ftpd = ftpserver.FTPServer(address, ftp_handler) # set a limit for connections ftpd.max_cons = 256 ftpd.max_cons_per_ip = 5 # start ftp server ftpd.serve_forever() if __name__ == '__main__': main()
gpl-3.0
-593,978,601,895,551,600
36.28
76
0.683834
false