repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
---|---|---|---|---|
aohta/shell-doctest | refs/heads/develop | shelldoctest/shelldoctest.py | 1 | #!/usr/bin/env python
"""
Shell Doctest module.
:Copyright: (c) 2009, the Shell Doctest Team All rights reserved.
:license: BSD, see LICENSE for more details.
"""
import commands
import doctest
import inspect
import re
import subprocess
import sys
import os
master = None
_EXC_WRAPPER = 'system_command("%s")'
def system_command(cmd, shell="bash"):
p = subprocess.Popen('%(shell)s -c "%(cmd)s"' % vars(),
shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
status, stdout, stderr = p.wait(), p.stdout.read().strip(), p.stderr.read().strip()
if status == 0 and stderr == "":
format = "%(stdout)s"
elif stdout != "":
format = "(%(status)d)%(stderr)s\n%(stdout)s"
else:
format = "(%(status)d)%(stderr)s"
result = format % vars()
if sys.version_info < (2, 5):
print result
return
print(result)
class ShellExample(doctest.Example):
def __init__(self, source, want, exc_msg=None, lineno=0, indent=0,
label=None,
options=None):
doctest.Example.__init__(self, source, want, exc_msg=None, lineno=lineno, indent=indent,
options=None)
self.label = label
class ShellDocTestParser(doctest.DocTestParser):
_PROMPT = "$"
_EXC_WRAPPER = _EXC_WRAPPER
_EXAMPLE_RE = re.compile(r'''
# Source consists of a PS1 line followed by zero or more PS2 lines.
(?P<source>
(?:^ (?P<indent> [ ]*)) # PS0 line: indent
(?: \[(?P<label>.+)\]\n)? # PS0 line: label
(?: (?P<user>[\w]*)@(?P<host>[\w\.-]*)\n)? # PS0 line: user@host
(?: [ ]* \$ .*) # PS1 line
(?:\n [ ]* \. [ ].*)*) # PS2 lines
\n?
# Want consists of any non-blank lines that do not start with PS1.
(?P<want> (?:(?![ ]*$) # Not a blank line
(?![ ]*\$) # Not a line starting with PS1
.*$\n? # But any other line
)*)
''', re.MULTILINE | re.VERBOSE)
def parse(self, string, name='<string>'):
string = string.expandtabs()
min_indent = self._min_indent(string)
if min_indent > 0:
string = '\n'.join([l[min_indent:] for l in string.split('\n')])
output = []
charno, lineno = 0, 0
for m in self._EXAMPLE_RE.finditer(string):
output.append(string[charno:m.start()])
lineno += string.count('\n', charno, m.start())
(source, options, want, exc_msg) = \
self._parse_example(m, name, lineno)
if not self._IS_BLANK_OR_COMMENT(source):
source = source.replace("\n","; ")
user = m.group('user')
host = m.group('host')
if host:
if user:
cmd_base = "ssh %(user)s@%(host)s '%(source)s'"
else:
cmd_base = "ssh %(host)s '%(source)s'"
source = cmd_base % vars()
output.append( ShellExample(self._EXC_WRAPPER % source.replace("\n","; "),
want, exc_msg, lineno=lineno,
label=m.group('label'),
indent=min_indent+len(m.group('indent')),
options=options) )
lineno += string.count('\n', m.start(), m.end())
charno = m.end()
output.append(string[charno:])
return output
def _parse_example(self, m, name, lineno):
indent = len(m.group('indent'))
source_lines = [sl for sl in m.group('source').split('\n') if sl.strip()[1] == " "]
self._check_prompt_blank(source_lines, indent, name, lineno)
self._check_prefix(source_lines[1:], ' '*indent + '.', name, lineno)
source = '\n'.join([sl[indent+len(self._PROMPT)+1:] for sl in source_lines])
want = m.group('want')
want_lines = want.split('\n')
if len(want_lines) > 1 and re.match(r' *$', want_lines[-1]):
del want_lines[-1]
self._check_prefix(want_lines, ' '*indent, name,
lineno + len(source_lines))
want = '\n'.join([wl[indent:] for wl in want_lines])
m = self._EXCEPTION_RE.match(want)
if m:
exc_msg = m.group('msg')
else:
exc_msg = None
options = self._find_options(source, name, lineno)
return source, options, want, exc_msg
def _check_prompt_blank(self, lines, indent, name, lineno):
for i, line in enumerate(lines):
if len(line) >= indent+len(self._PROMPT)+1 and line[indent+len(self._PROMPT)] != ' ':
raise ValueError('line %r of the docstring for %s '
'lacks blank after %s: %r' %
(lineno+i+1, name,
line[indent:indent+len(self._PROMPT)], line))
class ShellDocTestRunner(doctest.DocTestRunner):
_EXC_WRAPPER = _EXC_WRAPPER
_BEFORE, _AFTER = [len(i) for i in _EXC_WRAPPER.split("%s")]
def __init__(self, checker=None, verbose=None, verbose_level=None, optionflags=0):
doctest.DocTestRunner.__init__(self, checker=checker, verbose=verbose, optionflags=optionflags)
self._verbose_level = verbose_level
def report_start(self, out, test, example):
source = example.source[self._BEFORE:-(self._AFTER+1)] + "\n"
if self._verbose_level > 1:
out('Label:%s\n' % example.label)
if self._verbose:
if example.want:
out('Trying:\n' + doctest._indent(source) +
'Expecting:\n' + doctest._indent(example.want))
else:
out('Trying:\n' + doctest._indent(source) +
'Expecting nothing\n')
def _failure_header(self, test, example):
out = [self.DIVIDER]
if test.filename:
if test.lineno is not None and example.lineno is not None:
lineno = test.lineno + example.lineno + 1
else:
lineno = '?'
out.append('File "%s", line %s, in %s' %
(test.filename, lineno, test.name))
else:
out.append('Line %s, in %s' % (example.lineno+1, test.name))
out.append('Failed example:')
source = example.source[self._BEFORE:-(self._AFTER+1)] + "\n"
out.append(doctest._indent(source))
return '\n'.join(out)
def testfile(filename, module_relative=True, name=None, package=None,
globs=None, verbose=None, report=True, optionflags=doctest.ELLIPSIS|doctest.NORMALIZE_WHITESPACE,
extraglobs=None, raise_on_error=False, exclude_empty=False,
verbose_level=None, filters=None, encoding=None):
# Assemble the globals.
if globs == None:
globs = dict()
else:
globs = globs.copy()
if extraglobs is not None:
globs.update(extraglobs)
if '__name__' not in globs:
globs['__name__'] = '__main__'
globs.update({"system_command": system_command})
global master
# Relativize the path
with open(filename, 'U') as fp:
text = fp.read()
# If no name was given, then use the file's name.
if name is None:
name = os.path.basename(filename)
finder = doctest.DocTestFinder(parser=ShellDocTestParser(), exclude_empty=exclude_empty)
if raise_on_error:
runner = doctest.DebugRunner(verbose=verbose, optionflags=optionflags)
else:
runner = ShellDocTestRunner(verbose=verbose, verbose_level=verbose_level, optionflags=optionflags)
if encoding is not None:
text = text.decode(encoding)
# quote escape
text = text.replace('\"', '\\"').replace("\'", "\\'")
# Read the file, convert it to a test, and run it.
parser = ShellDocTestParser()
test = parser.get_doctest(text, globs, name, filename, 0)
# run shell doctest
runner.run(test)
if report:
runner.summarize()
if master is None:
master = runner
else:
master.merge(runner)
if sys.version_info < (2, 6):
return runner.failures, runner.tries
return doctest.TestResults(runner.failures, runner.tries)
def testmod(m=None, name=None, globs=None, verbose=None,
report=True, optionflags=doctest.ELLIPSIS, extraglobs=None,
raise_on_error=False, exclude_empty=False,
verbose_level=None, filters=None,
):
if globs == None:
globs = dict()
globs.update({"system_command": system_command})
global master
if m is None:
m = sys.modules.get('__main__')
if not inspect.ismodule(m):
raise TypeError("testmod: module required; %r" % (m,))
if name is None:
name = m.__name__
finder = doctest.DocTestFinder(parser=ShellDocTestParser(), exclude_empty=exclude_empty)
if raise_on_error:
runner = doctest.DebugRunner(verbose=verbose, optionflags=optionflags)
else:
runner = ShellDocTestRunner(verbose=verbose, verbose_level=verbose_level, optionflags=optionflags)
tests = finder.find(m, name, globs=globs, extraglobs=extraglobs)
if filters:
_tests = list()
z = dict([(k,v) for v,k in enumerate(filters)])
for test in tests:
test.examples = sorted(filter(lambda x: x.label in filters, test.examples),
cmp=lambda x,y: cmp(z[x.label], z[y.label]))
_tests.append(test)
tests = _tests
for test in tests:
runner.run(test)
if report:
runner.summarize()
if master is None:
master = runner
else:
master.merge(runner)
if sys.version_info < (2, 6):
return runner.failures, runner.tries
return doctest.TestResults(runner.failures, runner.tries)
if __name__ == "__main__":
testmod()
|
release-engineering/dockpulp | refs/heads/master | tests/test_imgutils.py | 2 | #!/usr/bin/python
# -*- coding: utf-8 -*-
from dockpulp import imgutils
import pytest
import tarfile
import os
from io import BytesIO
class TarWriter(object):
def __init__(self, outfile, directory=None):
mode = "w|bz2"
if hasattr(outfile, "write"):
self.tarfile = tarfile.open(fileobj=outfile, mode=mode)
else:
self.tarfile = tarfile.open(name=outfile, mode=mode)
self.directory = directory or ""
def __enter__(self):
"""Open Tarfile."""
return self
def __exit__(self, typ, val, tb):
"""Close Tarfile."""
self.tarfile.close()
def write_file(self, name, content):
buf = BytesIO(content)
arcname = os.path.join(self.directory, name)
ti = tarfile.TarInfo(arcname)
ti.size = len(content)
self.tarfile.addfile(ti, fileobj=buf)
# tests
class TestImgutils(object):
# Tests of methods from imgutils
@pytest.mark.parametrize('path', ['repositories', './repositories', ''])
@pytest.mark.parametrize('tarjson',
['{"foo": "test1", "bar": "test2"}',
'{"bar":{"test2": "a", "test3": "b"}}',
'{"bar":{"test2": "testmember", "test3": "testmember"}}'])
def test_check_repo(self, tmpdir, path, tarjson):
filename = str(tmpdir.join("archive.tar"))
with TarWriter(filename, directory='test/dir') as t:
t.write_file(path, str.encode(tarjson))
t.write_file('testmember', str.encode('testdata'))
if path == '':
assert imgutils.check_repo(filename) == 1
elif tarjson == '{"foo": "test1", "bar": "test2"}':
assert imgutils.check_repo(filename) == 2
elif tarjson == '{"bar":{"test2": "a", "test3": "b"}}':
assert imgutils.check_repo(filename) == 3
else:
assert imgutils.check_repo(filename) == 0
|
bryceliu/ansible | refs/heads/devel | lib/ansible/vars/__init__.py | 16 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from collections import defaultdict
from collections import MutableMapping
from six import iteritems
from jinja2.exceptions import UndefinedError
try:
from hashlib import sha1
except ImportError:
from sha import sha as sha1
from ansible import constants as C
from ansible.cli import CLI
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable, AnsibleFileNotFound
from ansible.inventory.host import Host
from ansible.parsing import DataLoader
from ansible.plugins import lookup_loader
from ansible.plugins.cache import FactCache
from ansible.template import Templar
from ansible.utils.debug import debug
from ansible.utils.listify import listify_lookup_plugin_terms
from ansible.utils.vars import combine_vars
from ansible.vars.hostvars import HostVars
from ansible.vars.unsafe_proxy import UnsafeProxy
VARIABLE_CACHE = dict()
HOSTVARS_CACHE = dict()
try:
from __main__ import display
display = display
except ImportError:
from ansible.utils.display import Display
display = Display()
def preprocess_vars(a):
'''
Ensures that vars contained in the parameter passed in are
returned as a list of dictionaries, to ensure for instance
that vars loaded from a file conform to an expected state.
'''
if a is None:
return None
elif not isinstance(a, list):
data = [ a ]
else:
data = a
for item in data:
if not isinstance(item, MutableMapping):
raise AnsibleError("variable files must contain either a dictionary of variables, or a list of dictionaries. Got: %s (%s)" % (a, type(a)))
return data
class VariableManager:
def __init__(self):
self._fact_cache = FactCache()
self._nonpersistent_fact_cache = defaultdict(dict)
self._vars_cache = defaultdict(dict)
self._extra_vars = defaultdict(dict)
self._host_vars_files = defaultdict(dict)
self._group_vars_files = defaultdict(dict)
self._inventory = None
self._omit_token = '__omit_place_holder__%s' % sha1(os.urandom(64)).hexdigest()
def __getstate__(self):
data = dict(
fact_cache = self._fact_cache.copy(),
np_fact_cache = self._nonpersistent_fact_cache.copy(),
vars_cache = self._vars_cache.copy(),
extra_vars = self._extra_vars.copy(),
host_vars_files = self._host_vars_files.copy(),
group_vars_files = self._group_vars_files.copy(),
omit_token = self._omit_token,
)
return data
def __setstate__(self, data):
self._fact_cache = data.get('fact_cache', defaultdict(dict))
self._nonpersistent_fact_cache = data.get('np_fact_cache', defaultdict(dict))
self._vars_cache = data.get('vars_cache', defaultdict(dict))
self._extra_vars = data.get('extra_vars', dict())
self._host_vars_files = data.get('host_vars_files', defaultdict(dict))
self._group_vars_files = data.get('group_vars_files', defaultdict(dict))
self._omit_token = data.get('omit_token', '__omit_place_holder__%s' % sha1(os.urandom(64)).hexdigest())
self._inventory = None
def _get_cache_entry(self, play=None, host=None, task=None):
play_id = "NONE"
if play:
play_id = play._uuid
host_id = "NONE"
if host:
host_id = host.get_name()
task_id = "NONE"
if task:
task_id = task._uuid
return "PLAY:%s;HOST:%s;TASK:%s" % (play_id, host_id, task_id)
@property
def extra_vars(self):
''' ensures a clean copy of the extra_vars are made '''
return self._extra_vars.copy()
@extra_vars.setter
def extra_vars(self, value):
''' ensures a clean copy of the extra_vars are used to set the value '''
assert isinstance(value, MutableMapping)
self._extra_vars = value.copy()
def set_inventory(self, inventory):
self._inventory = inventory
def _preprocess_vars(self, a):
'''
Ensures that vars contained in the parameter passed in are
returned as a list of dictionaries, to ensure for instance
that vars loaded from a file conform to an expected state.
'''
if a is None:
return None
elif not isinstance(a, list):
data = [ a ]
else:
data = a
for item in data:
if not isinstance(item, MutableMapping):
raise AnsibleError("variable files must contain either a dictionary of variables, or a list of dictionaries. Got: %s (%s)" % (a, type(a)))
return data
def get_vars(self, loader, play=None, host=None, task=None, include_hostvars=True, include_delegate_to=True, use_cache=True):
'''
Returns the variables, with optional "context" given via the parameters
for the play, host, and task (which could possibly result in different
sets of variables being returned due to the additional context).
The order of precedence is:
- play->roles->get_default_vars (if there is a play context)
- group_vars_files[host] (if there is a host context)
- host_vars_files[host] (if there is a host context)
- host->get_vars (if there is a host context)
- fact_cache[host] (if there is a host context)
- play vars (if there is a play context)
- play vars_files (if there's no host context, ignore
file names that cannot be templated)
- task->get_vars (if there is a task context)
- vars_cache[host] (if there is a host context)
- extra vars
'''
debug("in VariableManager get_vars()")
cache_entry = self._get_cache_entry(play=play, host=host, task=task)
if cache_entry in VARIABLE_CACHE and use_cache:
debug("vars are cached, returning them now")
return VARIABLE_CACHE[cache_entry]
all_vars = defaultdict(dict)
if play:
# first we compile any vars specified in defaults/main.yml
# for all roles within the specified play
for role in play.get_roles():
all_vars = combine_vars(all_vars, role.get_default_vars())
# if we have a task in this context, and that task has a role, make
# sure it sees its defaults above any other roles, as we previously
# (v1) made sure each task had a copy of its roles default vars
if task and task._role is not None:
all_vars = combine_vars(all_vars, task._role.get_default_vars())
if host:
# next, if a host is specified, we load any vars from group_vars
# files and then any vars from host_vars files which may apply to
# this host or the groups it belongs to
# we merge in vars from groups specified in the inventory (INI or script)
all_vars = combine_vars(all_vars, host.get_group_vars())
# then we merge in the special 'all' group_vars first, if they exist
if 'all' in self._group_vars_files:
data = preprocess_vars(self._group_vars_files['all'])
for item in data:
all_vars = combine_vars(all_vars, item)
for group in host.get_groups():
if group.name in self._group_vars_files and group.name != 'all':
for data in self._group_vars_files[group.name]:
data = preprocess_vars(data)
for item in data:
all_vars = combine_vars(all_vars, item)
# then we merge in vars from the host specified in the inventory (INI or script)
all_vars = combine_vars(all_vars, host.get_vars())
# then we merge in the host_vars/<hostname> file, if it exists
host_name = host.get_name()
if host_name in self._host_vars_files:
for data in self._host_vars_files[host_name]:
data = preprocess_vars(data)
for item in data:
all_vars = combine_vars(all_vars, item)
# finally, the facts caches for this host, if it exists
try:
host_facts = self._fact_cache.get(host.name, dict())
for k in host_facts.keys():
if host_facts[k] is not None and not isinstance(host_facts[k], UnsafeProxy):
host_facts[k] = UnsafeProxy(host_facts[k])
all_vars = combine_vars(all_vars, host_facts)
except KeyError:
pass
if play:
all_vars = combine_vars(all_vars, play.get_vars())
for vars_file_item in play.get_vars_files():
# create a set of temporary vars here, which incorporate the
# extra vars so we can properly template the vars_files entries
temp_vars = combine_vars(all_vars, self._extra_vars)
templar = Templar(loader=loader, variables=temp_vars)
# we assume each item in the list is itself a list, as we
# support "conditional includes" for vars_files, which mimics
# the with_first_found mechanism.
#vars_file_list = templar.template(vars_file_item)
vars_file_list = vars_file_item
if not isinstance(vars_file_list, list):
vars_file_list = [ vars_file_list ]
# now we iterate through the (potential) files, and break out
# as soon as we read one from the list. If none are found, we
# raise an error, which is silently ignored at this point.
try:
for vars_file in vars_file_list:
vars_file = templar.template(vars_file)
try:
data = preprocess_vars(loader.load_from_file(vars_file))
if data is not None:
for item in data:
all_vars = combine_vars(all_vars, item)
break
except AnsibleFileNotFound as e:
# we continue on loader failures
continue
except AnsibleParserError as e:
raise
else:
raise AnsibleFileNotFound("vars file %s was not found" % vars_file_item)
except (UndefinedError, AnsibleUndefinedVariable):
if host is not None and self._fact_cache.get(host.name, dict()).get('module_setup') and task is not None:
raise AnsibleUndefinedVariable("an undefined variable was found when attempting to template the vars_files item '%s'" % vars_file_item, obj=vars_file_item)
else:
# we do not have a full context here, and the missing variable could be
# because of that, so just show a warning and continue
display.vvv("skipping vars_file '%s' due to an undefined variable" % vars_file_item)
continue
if not C.DEFAULT_PRIVATE_ROLE_VARS:
for role in play.get_roles():
all_vars = combine_vars(all_vars, role.get_vars(include_params=False))
if task:
if task._role:
all_vars = combine_vars(all_vars, task._role.get_vars())
all_vars = combine_vars(all_vars, task.get_vars())
if host:
all_vars = combine_vars(all_vars, self._vars_cache.get(host.get_name(), dict()))
all_vars = combine_vars(all_vars, self._nonpersistent_fact_cache.get(host.name, dict()))
all_vars = combine_vars(all_vars, self._extra_vars)
# FIXME: make sure all special vars are here
# Finally, we create special vars
all_vars['playbook_dir'] = loader.get_basedir()
if host:
all_vars['group_names'] = [group.name for group in host.get_groups()]
if self._inventory is not None:
all_vars['groups'] = dict()
for (group_name, group) in iteritems(self._inventory.groups):
all_vars['groups'][group_name] = [h.name for h in group.get_hosts()]
if include_hostvars:
hostvars_cache_entry = self._get_cache_entry(play=play)
if hostvars_cache_entry in HOSTVARS_CACHE:
hostvars = HOSTVARS_CACHE[hostvars_cache_entry]
else:
hostvars = HostVars(play=play, inventory=self._inventory, loader=loader, variable_manager=self)
HOSTVARS_CACHE[hostvars_cache_entry] = hostvars
all_vars['hostvars'] = hostvars
if task:
if task._role:
all_vars['role_path'] = task._role._role_path
# if we have a task and we're delegating to another host, figure out the
# variables for that host now so we don't have to rely on hostvars later
if task.delegate_to is not None and include_delegate_to:
# we unfortunately need to template the delegate_to field here,
# as we're fetching vars before post_validate has been called on
# the task that has been passed in
templar = Templar(loader=loader, variables=all_vars)
items = []
if task.loop is not None:
if task.loop in lookup_loader:
#TODO: remove convert_bare true and deprecate this in with_
try:
loop_terms = listify_lookup_plugin_terms(terms=task.loop_args, templar=templar, loader=loader, fail_on_undefined=True, convert_bare=True)
except AnsibleUndefinedVariable as e:
if 'has no attribute' in str(e):
loop_terms = []
self._display.deprecated("Skipping task due to undefined attribute, in the future this will be a fatal error.")
else:
raise
items = lookup_loader.get(task.loop, loader=loader, templar=templar).run(terms=loop_terms, variables=all_vars)
else:
raise AnsibleError("Unexpected failure in finding the lookup named '%s' in the available lookup plugins" % task.loop)
else:
items = [None]
vars_copy = all_vars.copy()
delegated_host_vars = dict()
for item in items:
# update the variables with the item value for templating, in case we need it
if item is not None:
vars_copy['item'] = item
templar.set_available_variables(vars_copy)
delegated_host_name = templar.template(task.delegate_to, fail_on_undefined=False)
if delegated_host_name in delegated_host_vars:
# no need to repeat ourselves, as the delegate_to value
# does not appear to be tied to the loop item variable
continue
# a dictionary of variables to use if we have to create a new host below
new_delegated_host_vars = dict(
ansible_host=delegated_host_name,
ansible_user=C.DEFAULT_REMOTE_USER,
ansible_connection=C.DEFAULT_TRANSPORT,
)
# now try to find the delegated-to host in inventory, or failing that,
# create a new host on the fly so we can fetch variables for it
delegated_host = None
if self._inventory is not None:
delegated_host = self._inventory.get_host(delegated_host_name)
# try looking it up based on the address field, and finally
# fall back to creating a host on the fly to use for the var lookup
if delegated_host is None:
for h in self._inventory.get_hosts(ignore_limits_and_restrictions=True):
# check if the address matches, or if both the delegated_to host
# and the current host are in the list of localhost aliases
if h.address == delegated_host_name or h.name in C.LOCALHOST and delegated_host_name in C.LOCALHOST:
delegated_host = h
break
else:
delegated_host = Host(name=delegated_host_name)
delegated_host.vars.update(new_delegated_host_vars)
else:
delegated_host = Host(name=delegated_host_name)
delegated_host.vars.update(new_delegated_host_vars)
# now we go fetch the vars for the delegated-to host and save them in our
# master dictionary of variables to be used later in the TaskExecutor/PlayContext
delegated_host_vars[delegated_host_name] = self.get_vars(
loader=loader,
play=play,
host=delegated_host,
task=task,
include_delegate_to=False,
include_hostvars=False,
)
all_vars['ansible_delegated_vars'] = delegated_host_vars
if self._inventory is not None:
all_vars['inventory_dir'] = self._inventory.basedir()
if play:
# add the list of hosts in the play, as adjusted for limit/filters
# DEPRECATED: play_hosts should be deprecated in favor of ansible_play_hosts,
# however this would take work in the templating engine, so for now
# we'll add both so we can give users something transitional to use
host_list = [x.name for x in self._inventory.get_hosts()]
all_vars['play_hosts'] = host_list
all_vars['ansible_play_hosts'] = host_list
# the 'omit' value alows params to be left out if the variable they are based on is undefined
all_vars['omit'] = self._omit_token
all_vars['ansible_version'] = CLI.version_info(gitinfo=False)
if 'hostvars' in all_vars and host:
all_vars['vars'] = all_vars['hostvars'][host.get_name()]
#VARIABLE_CACHE[cache_entry] = all_vars
debug("done with get_vars()")
return all_vars
def _get_inventory_basename(self, path):
'''
Returns the basename minus the extension of the given path, so the
bare filename can be matched against host/group names later
'''
(name, ext) = os.path.splitext(os.path.basename(path))
if ext not in ('.yml', '.yaml'):
return os.path.basename(path)
else:
return name
def _load_inventory_file(self, path, loader):
'''
helper function, which loads the file and gets the
basename of the file without the extension
'''
if loader.is_directory(path):
data = dict()
try:
names = loader.list_directory(path)
except os.error as err:
raise AnsibleError("This folder cannot be listed: %s: %s." % (path, err.strerror))
# evaluate files in a stable order rather than whatever
# order the filesystem lists them.
names.sort()
# do not parse hidden files or dirs, e.g. .svn/
paths = [os.path.join(path, name) for name in names if not name.startswith('.')]
for p in paths:
_found, results = self._load_inventory_file(path=p, loader=loader)
if results is not None:
data = combine_vars(data, results)
else:
file_name, ext = os.path.splitext(path)
data = None
if not ext or ext not in C.YAML_FILENAME_EXTENSIONS:
for test_ext in C.YAML_FILENAME_EXTENSIONS:
new_path = path + test_ext
if loader.path_exists(new_path):
data = loader.load_from_file(new_path)
break
else:
if loader.path_exists(path):
data = loader.load_from_file(path)
name = self._get_inventory_basename(path)
return (name, data)
def add_host_vars_file(self, path, loader):
'''
Loads and caches a host_vars file in the _host_vars_files dict,
where the key to that dictionary is the basename of the file, minus
the extension, for matching against a given inventory host name
'''
(name, data) = self._load_inventory_file(path, loader)
if data:
if name not in self._host_vars_files:
self._host_vars_files[name] = []
self._host_vars_files[name].append(data)
return data
else:
return dict()
def add_group_vars_file(self, path, loader):
'''
Loads and caches a host_vars file in the _host_vars_files dict,
where the key to that dictionary is the basename of the file, minus
the extension, for matching against a given inventory host name
'''
(name, data) = self._load_inventory_file(path, loader)
if data:
if name not in self._group_vars_files:
self._group_vars_files[name] = []
self._group_vars_files[name].append(data)
return data
else:
return dict()
def set_host_facts(self, host, facts):
'''
Sets or updates the given facts for a host in the fact cache.
'''
assert isinstance(facts, dict)
if host.name not in self._fact_cache:
self._fact_cache[host.name] = facts
else:
try:
self._fact_cache[host.name].update(facts)
except KeyError:
self._fact_cache[host.name] = facts
def set_nonpersistent_facts(self, host, facts):
'''
Sets or updates the given facts for a host in the fact cache.
'''
assert isinstance(facts, dict)
if host.name not in self._nonpersistent_fact_cache:
self._nonpersistent_fact_cache[host.name] = facts
else:
try:
self._nonpersistent_fact_cache[host.name].update(facts)
except KeyError:
self._nonpersistent_fact_cache[host.name] = facts
def set_host_variable(self, host, varname, value):
'''
Sets a value in the vars_cache for a host.
'''
host_name = host.get_name()
if host_name not in self._vars_cache:
self._vars_cache[host_name] = dict()
self._vars_cache[host_name][varname] = value
|
technogesic/govhack2017 | refs/heads/master | music-lab/10_penrith-central/subway.py | 5 | ##
# TRACK 1
# TWO TRAINS
# From Data-Driven DJ (datadrivendj.com) by Brian Foo (brianfoo.com)
# This file builds the sequence file for use with ChucK from the data supplied
##
# Library dependancies
import csv
import json
import math
import os
import random
import re
import time
# Config
BPM = 120 # Beats per minute, e.g. 60, 75, 100, 120, 150
METERS_PER_BEAT = 75 # Higher numbers creates shorter songs
DIVISIONS_PER_BEAT = 4 # e.g. 4 = quarter notes, 8 = eighth notes
VARIANCE_MS = 20 # +/- milliseconds an instrument note should be off by to give it a little more "natural" feel
VARIANCE_RATE = 0 # for adding variance to the playback rate
INSTRUMENTS_INPUT_FILE = 'data/instruments.csv'
STATIONS_INPUT_FILE = 'data/stations.csv'
REPORT_SUMMARY_OUTPUT_FILE = 'data/report_summary.csv'
REPORT_SEQUENCE_OUTPUT_FILE = 'data/report_sequence.csv'
INSTRUMENTS_OUTPUT_FILE = 'data/ck_instruments.csv'
SEQUENCE_OUTPUT_FILE = 'data/ck_sequence.csv'
STATIONS_VISUALIZATION_OUTPUT_FILE = 'visualization/stations/data/stations.json'
MAP_VISUALIZATION_OUTPUT_FILE = 'visualization/map/data/stations.json'
INSTRUMENTS_DIR = 'instruments/'
WRITE_SEQUENCE = True
WRITE_REPORT = True
WRITE_JSON = True
# Calculations
BEAT_MS = round(60.0 / BPM * 1000)
ROUND_TO_NEAREST = round(BEAT_MS/DIVISIONS_PER_BEAT)
print('Building sequence at '+str(BPM)+' BPM ('+str(BEAT_MS)+'ms per beat)')
# Initialize Variables
instruments = []
stations = []
sequence = []
hindex = 0
# For creating pseudo-random numbers
def halton(index, base):
result = 0.0
f = 1.0 / base
i = 1.0 * index
while(i > 0):
result += f * (i % base)
i = math.floor(i / base)
f = f / base
return result
# Find index of first item that matches value
def findInList(list, key, value):
found = -1
for index, item in enumerate(list):
if item[key] == value:
found = index
break
return found
def roundToNearest(n, nearest):
return 1.0 * round(1.0*n/nearest) * nearest
# Read instruments from file
with open(INSTRUMENTS_INPUT_FILE, 'rb') as f:
r = csv.reader(f, delimiter=',')
next(r, None) # remove header
for name,type,price,bracket_min,bracket_max,file,from_gain,to_gain,from_tempo,to_tempo,gain_phase,tempo_phase,tempo_offset,interval_phase,interval,interval_offset,active in r:
if file and int(active):
index = len(instruments)
# build instrument object
instrument = {
'index': index,
'name': name,
'type': type.lower().replace(' ', '_'),
'bracket_min': float(bracket_min),
'bracket_max': float(bracket_max),
'price': int(price),
'file': INSTRUMENTS_DIR + file,
'from_gain': round(float(from_gain), 2),
'to_gain': round(float(to_gain), 2),
'from_tempo': float(from_tempo),
'to_tempo': float(to_tempo),
'gain_phase': int(gain_phase),
'tempo_phase': int(tempo_phase),
'from_beat_ms': int(round(BEAT_MS/float(from_tempo))),
'to_beat_ms': int(round(BEAT_MS/float(to_tempo))),
'tempo_offset': float(tempo_offset),
'interval_ms': int(int(interval_phase)*BEAT_MS),
'interval': int(interval),
'interval_offset': int(interval_offset)
}
# add instrument to instruments
instruments.append(instrument)
# Read stations from file
with open(STATIONS_INPUT_FILE, 'rb') as f:
r = csv.reader(f, delimiter=',')
next(r, None) # remove header
for name,lat,lng,income,borough in r:
index = len(stations)
stations.append({
'index': index,
'name': name,
'budget': float(int(income*52)/12),
'percentile': 0.0,
'lat': float(lat),
'lng': float(lng),
'beats': 0,
'distance': 0,
'duration': 0,
'borough': borough,
'borough_next': borough,
'instruments': []
})
# For calculating distance between two coords(lat, lng)
def distBetweenCoords(lat1, lng1, lat2, lng2):
earthRadius = 6371000 # meters
dLat = math.radians(lat2-lat1)
dLng = math.radians(lng2-lng1)
a = math.sin(dLat/2) * math.sin(dLat/2) + math.cos(math.radians(lat1)) * math.cos(math.radians(lat2)) * math.sin(dLng/2) * math.sin(dLng/2)
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1-a))
dist = float(earthRadius * c)
return dist
def getIncomePercentile(station, sorted_station_list):
percentile = 0.0
index = findInList(sorted_station_list, 'index', station['index'])
if index >= 0:
percentile = 1.0 * index / len(sorted_station_list) * 100
return percentile
# Buy instruments based on a specified budget
def buyInstruments(station, instruments_shelf):
budget = station['budget']
percentile = station['percentile']
instruments_cart = []
for i in instruments_shelf:
# skip if not in bracket
if percentile < i['bracket_min'] or percentile >= i['bracket_max']:
continue
# add to cart if in budget
elif i['price'] <= budget:
budget -= i['price']
if i['type'] != 'placeholder':
instruments_cart.append(i)
# out of budget, finished
else:
break
return instruments_cart
# Pre-process stations
min_distance = 0
max_distance = 0
total_distance = 0
total_beats = 0
total_ms = 0
min_duration = 0
max_duration = 0
# Create a list of stations sorted by budget
sorted_stations = stations[:]
sorted_stations = sorted(sorted_stations, key=lambda k: k['budget'])
# Loop through stations
for index, station in enumerate(stations):
# determine station's income percentile
stations[index]['percentile'] = getIncomePercentile(station, sorted_stations)
# determine the station's instruments based on budget
stations[index]['instruments'] = buyInstruments(stations[index], instruments)
if index > 0:
# determine distance between last station
distance = distBetweenCoords(station['lat'], station['lng'], stations[index-1]['lat'], stations[index-1]['lng'])
beats = int(round(distance / METERS_PER_BEAT))
duration = beats * BEAT_MS
stations[index-1]['distance'] = distance
stations[index-1]['beats'] = beats
stations[index-1]['duration'] = duration
stations[index-1]['borough_next'] = station['borough']
total_distance += distance
total_beats += beats
total_ms += duration
if distance > max_distance:
max_distance = distance
max_duration = duration
if distance < min_distance or min_distance == 0:
min_distance = distance
min_duration = duration
# Calculate how many beats
station_count = len(stations)-1
total_seconds = int(1.0*total_ms/1000)
seconds_per_station = int(1.0*total_seconds/station_count)
print('Total distance in meters: '+str(round(total_distance)))
print('Distance range in meters: ['+str(min_distance)+','+str(max_distance)+']')
print('Average beats per station: '+str(1.0*total_beats/station_count))
print('Average time per station: '+time.strftime('%M:%S', time.gmtime(seconds_per_station)))
print('Main sequence beats: '+str(total_beats))
print('Main sequence time: '+time.strftime('%M:%S', time.gmtime(total_seconds)) + '(' + str(total_seconds) + 's)')
# Multiplier based on sine curve
def getMultiplier(percent_complete):
radians = percent_complete * math.pi
multiplier = math.sin(radians)
if multiplier < 0:
multiplier = 0.0
elif multiplier > 1:
multplier = 1.0
return multiplier
# Retrieve gain based on current beat
def getGain(instrument, beat):
beats_per_phase = instrument['gain_phase']
percent_complete = float(beat % beats_per_phase) / beats_per_phase
multiplier = getMultiplier(percent_complete)
from_gain = instrument['from_gain']
to_gain = instrument['to_gain']
min_gain = min(from_gain, to_gain)
gain = multiplier * (to_gain - from_gain) + from_gain
gain = max(min_gain, round(gain, 2))
return gain
# Get beat duration in ms based on current point in time
def getBeatMs(instrument, beat, round_to):
from_beat_ms = instrument['from_beat_ms']
to_beat_ms = instrument['to_beat_ms']
beats_per_phase = instrument['tempo_phase']
percent_complete = float(beat % beats_per_phase) / beats_per_phase
multiplier = getMultiplier(percent_complete)
ms = multiplier * (to_beat_ms - from_beat_ms) + from_beat_ms
ms = int(roundToNearest(ms, round_to))
return ms
# Return if the instrument should be played in the given interval
def isValidInterval(instrument, elapsed_ms):
interval_ms = instrument['interval_ms']
interval = instrument['interval']
interval_offset = instrument['interval_offset']
return int(math.floor(1.0*elapsed_ms/interval_ms)) % interval == interval_offset
# Make sure there's no sudden drop in gain
def continueFromPrevious(instrument):
return instrument['bracket_min'] > 0 or instrument['bracket_max'] < 100
# Add beats to sequence
def addBeatsToSequence(instrument, duration, ms, beat_ms, round_to):
global sequence
global hindex
offset_ms = int(instrument['tempo_offset'] * beat_ms)
ms += offset_ms
previous_ms = int(ms)
from_beat_ms = instrument['from_beat_ms']
to_beat_ms = instrument['to_beat_ms']
min_ms = min(from_beat_ms, to_beat_ms)
remaining_duration = int(duration)
elapsed_duration = offset_ms
continue_from_prev = continueFromPrevious(instrument)
while remaining_duration >= min_ms:
elapsed_ms = int(ms)
elapsed_beat = int((elapsed_ms-previous_ms) / beat_ms)
# continue beat from previous
if continue_from_prev:
elapsed_beat = int(elapsed_ms / beat_ms)
this_beat_ms = getBeatMs(instrument, elapsed_beat, round_to)
# add to sequence if in valid interval
if isValidInterval(instrument, elapsed_ms):
h = halton(hindex, 3)
variance = int(h * VARIANCE_MS * 2 - VARIANCE_MS)
rate_variance = float(h * VARIANCE_RATE * 2 - VARIANCE_RATE)
sequence.append({
'instrument_index': instrument['index'],
'instrument': instrument,
'position': 0,
'gain': getGain(instrument, elapsed_beat),
'rate': 1.0 + rate_variance,
'elapsed_ms': max([elapsed_ms + variance, 0])
})
hindex += 1
remaining_duration -= this_beat_ms
elapsed_duration += this_beat_ms
ms += this_beat_ms
# Build main sequence
for instrument in instruments:
ms = 0
station_queue_duration = 0
if instrument['type'] == 'misc':
continue
# Each station in stations
for station in stations:
# Check if instrument is in this station
instrument_index = findInList(station['instruments'], 'index', instrument['index'])
# Instrument not here, just add the station duration and continue
if instrument_index < 0 and station_queue_duration > 0:
addBeatsToSequence(instrument, station_queue_duration, ms, BEAT_MS, ROUND_TO_NEAREST)
ms += station_queue_duration + station['duration']
station_queue_duration = 0
elif instrument_index < 0:
ms += station['duration']
else:
station_queue_duration += station['duration']
if station_queue_duration > 0:
addBeatsToSequence(instrument, station_queue_duration, ms, BEAT_MS, ROUND_TO_NEAREST)
# Calculate total time
total_seconds = int(1.0*total_ms/1000)
print('Total sequence time: '+time.strftime('%M:%S', time.gmtime(total_seconds)) + '(' + str(total_seconds) + 's)')
# Sort sequence
sequence = sorted(sequence, key=lambda k: k['elapsed_ms'])
# Add milliseconds to sequence
elapsed = 0
for index, step in enumerate(sequence):
sequence[index]['milliseconds'] = step['elapsed_ms'] - elapsed
elapsed = step['elapsed_ms']
# Write instruments to file
if WRITE_SEQUENCE:
with open(INSTRUMENTS_OUTPUT_FILE, 'wb') as f:
w = csv.writer(f)
for index, instrument in enumerate(instruments):
w.writerow([index])
w.writerow([instrument['file']])
f.seek(-2, os.SEEK_END) # remove newline
f.truncate()
print('Successfully wrote instruments to file: '+INSTRUMENTS_OUTPUT_FILE)
# Write sequence to file
if WRITE_SEQUENCE:
with open(SEQUENCE_OUTPUT_FILE, 'wb') as f:
w = csv.writer(f)
for step in sequence:
w.writerow([step['instrument_index']])
w.writerow([step['position']])
w.writerow([step['gain']])
w.writerow([step['rate']])
w.writerow([step['milliseconds']])
f.seek(-2, os.SEEK_END) # remove newline
f.truncate()
print('Successfully wrote sequence to file: '+SEQUENCE_OUTPUT_FILE)
# Write summary file
if WRITE_REPORT:
with open(REPORT_SUMMARY_OUTPUT_FILE, 'wb') as f:
w = csv.writer(f)
w.writerow(['Time', 'Name', 'Distance', 'Duration', 'Beats', 'Instruments'])
elapsed = 0
for station in stations:
duration = station['duration']
duration_f = time.strftime('%M:%S', time.gmtime(int(duration/1000)))
elapsed_f = time.strftime('%M:%S', time.gmtime(int(elapsed/1000)))
elapsed += duration
w.writerow([elapsed_f, station['name'], round(station['distance'], 2), duration_f, station['beats'], ' '.join([i['name'] for i in station['instruments']])])
print('Successfully wrote summary file: '+REPORT_SUMMARY_OUTPUT_FILE)
# Write sequence report to file
if WRITE_REPORT:
with open(REPORT_SEQUENCE_OUTPUT_FILE, 'wb') as f:
w = csv.writer(f)
w.writerow(['Time', 'Instrument', 'Gain'])
for step in sequence:
instrument = instruments[step['instrument_index']]
elapsed = step['elapsed_ms']
elapsed_f = time.strftime('%M:%S', time.gmtime(int(elapsed/1000)))
ms = elapsed % 1000
elapsed_f += '.' + str(ms)
w.writerow([elapsed_f, instrument['file'], step['gain']])
f.seek(-2, os.SEEK_END) # remove newline
f.truncate()
print('Successfully wrote sequence report to file: '+REPORT_SEQUENCE_OUTPUT_FILE)
# Write JSON data for the visualization
if WRITE_JSON:
json_data = []
elapsed_duration = 0
for station in stations:
json_data.append({
'name': station['name'],
'borough': station['borough'].upper(),
'borough_next': station['borough_next'].upper(),
'duration': station['duration'],
'elapsed_duration': elapsed_duration,
'min_duration': min_duration,
'lat': station['lat'],
'lng': station['lng']
})
elapsed_duration += station['duration']
with open(STATIONS_VISUALIZATION_OUTPUT_FILE, 'w') as outfile:
json.dump(json_data, outfile)
print('Successfully wrote to JSON file: '+STATIONS_VISUALIZATION_OUTPUT_FILE)
with open(MAP_VISUALIZATION_OUTPUT_FILE, 'w') as outfile:
json.dump(json_data, outfile)
print('Successfully wrote to JSON file: '+MAP_VISUALIZATION_OUTPUT_FILE)
|
emercs/BeagleBone-linux | refs/heads/3.14 | tools/perf/scripts/python/sctop.py | 11180 | # system call top
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Periodically displays system-wide system call totals, broken down by
# syscall. If a [comm] arg is specified, only syscalls called by
# [comm] are displayed. If an [interval] arg is specified, the display
# will be refreshed every [interval] seconds. The default interval is
# 3 seconds.
import os, sys, thread, time
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s sctop.py [comm] [interval]\n";
for_comm = None
default_interval = 3
interval = default_interval
if len(sys.argv) > 3:
sys.exit(usage)
if len(sys.argv) > 2:
for_comm = sys.argv[1]
interval = int(sys.argv[2])
elif len(sys.argv) > 1:
try:
interval = int(sys.argv[1])
except ValueError:
for_comm = sys.argv[1]
interval = default_interval
syscalls = autodict()
def trace_begin():
thread.start_new_thread(print_syscall_totals, (interval,))
pass
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals(interval):
while 1:
clear_term()
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
try:
print "%-40s %10d\n" % (syscall_name(id), val),
except TypeError:
pass
syscalls.clear()
time.sleep(interval)
|
yushu9/kosmosfs | refs/heads/master | src/python/flogger/flogger.py | 11 | #!/usr/bin/env python
#
# $Id$
#
# Copyright 2006 Kosmix Corp.
#
# Author: Blake Lewis (Kosmix Corp.)
#
# This file is part of Kosmos File System (KFS).
#
# Licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
#
# \file flogger.py
# \brief perform random sequence of KFS operations for testing
#
import kfs
import sys
import random
import ConfigParser
from stat import *
from readdirplus import *
TESTNAME = "flogger"
#
# Test parameter handling
#
default_test_params = {
"kfs_properties": "KfsClient.prp", # KFS property file
"log_file": TESTNAME + ".log", # log of operations
"op_count": "5000", # no. of ops to perform
"max_file_size": "100000", # maximum file size
"max_tree_depth": "100", # max. directory tree depth
"max_files": "10000", # max. no. of files
"max_directories": "1000", # max. no. of directories
"max_subdirs": "50", # max. subdirs in a directory
"max_dirsize": "500", # max. entries in a directory
"max_rw_size": "4096", # max. read or write length
"oplist" : "ascend descend mkdir rmdir create remove read write truncate",
"opweight" : "100 100 60 40 200 180 220 300 150" }
default_config_file = TESTNAME + ".cfg" # test configuration file
param_section = "Test parameters" # section heading in file
config = ConfigParser.ConfigParser(default_test_params)
def setup_params(config_file):
"""Read in the configuration file"""
if config_file:
config.read(config_file)
else:
config.add_section(param_section)
def get_param_int(name):
"""Look up an integer parameter"""
return config.getint(param_section, name)
def get_optional_param_int(name):
"""Look up a parameter; return -1 if undefined"""
if config.has_option(param_section, name):
return get_param_int(name)
else:
return -1
def get_param_string(name):
"""Look up a string parameter"""
return config.get(param_section, name)
#
# Initialize KFS client
#
def start_client(props):
"""Create an instance of the KFS client.
The KFS meta and chunkservers must already be running.
"""
try:
return kfs.client(props)
except:
print "Unable to start the KFS client."
print "Make sure that the meta- and chunkservers are running."
sys.exit(1)
#
# File system state information
# XXX make this a class to allow multiple test instances?
#
fs_state = {
"depth" : 0, # depth of current directory in tree
"files_created" : 0, # no. of files created
"dirs_created" : 0 } # no. of directorires created
def get_state(name):
return fs_state[name]
def set_state(name, value):
fs_state[name] = value
def change_state(name, delta):
fs_state[name] += delta
#
# Utility functions
#
def special(dir):
"""Is this a special directory name?"""
return dir in (".", "..", "/")
def isempty(client, path):
"""Is this an empty directory?"""
dirlist = client.readdir(path)
normal = [x for x in dirlist if not special(x)]
return len(normal) == 0
def files(dirlist):
"""extract plain files from a readdirplus list"""
return [f for f in dirlist if rd_isreg(f)]
def nonzero_files(dirlist):
return [f for f in files(dirlist) if rd_size(f) != 0]
def subdirs(dirlist):
"""extract subdirectories from a readdir list"""
return [d for d in dirlist if rd_isdir(d) and not special(rd_name(d))]
def emptydirs(client, dirlist):
"""extract empty subdirectories from a readdir list"""
return [d for d in subdirs(dirlist) if isempty(client, rd_name(d))]
def pick_one(namelist):
"""Pick a random name from the list"""
return random.choice(namelist)
def weighted_pick(weights):
"""
Given a list of N weights, return a random index 0 <= i < N
with the probability of each choice proportional to its weight.
"""
total = sum(weights)
fraction = random.randint(1, total)
i = -1
accum = 0
while accum < fraction:
i += 1
accum += weights[i]
return i
def invent_name(dirlist):
"""Choose a random name not already present"""
done = False
dirnames = [rd_name(d) for d in dirlist]
while not done:
name = pick_one(filenames)
done = name not in dirnames
return name
#
# Classes representing each test operation. Each class provides
# a weight function that determines the probability that the operation
# will get picked next, and a "doit" function that actually carries
# it out. A log is recorded in the form of a series of Unix commands
# so that the same sequence can be tried on a non-KFS file system
# (however, the read and write operations are not real commands).
#
# The naming scheme is important: for each operation listed in the
# "oplist" entry of the config file, we expect to find a corresponding
# <operation>_op class defined here.
#
class test_op:
"""Base class for test ops"""
def __init__(self, client, logfp, wt):
self.client = client # kfs client object
self.logfp = logfp # file handle for log
self.maxweight = wt # maximum weight for op
self.count = 0 # no. of times done
def weight(self, dirlist):
"Default: return max. weight unmodified"
return self.maxweight
def doit(self, dirlist):
self.count += 1
def log(self, msg):
"Append messages to the log file"
print >> self.logfp, msg
class ascend_op(test_op):
"""Go up one directory level"""
def weight(self, dirlist):
"""Weight to give to ascend op"""
d = get_state("depth")
reldepth = float(d) / get_param_int("max_tree_depth")
return int(self.maxweight * reldepth)
def doit(self, dirlist):
"""Move up one level in the directory tree"""
self.client.cd("..")
change_state("depth", -1)
self.log("cd ..")
test_op.doit(self, dirlist)
class descend_op(test_op):
"""Descend into a subdirectory"""
def weight(self, dirlist):
"""Weight for descend op"""
nsub = len(subdirs(dirlist))
if nsub == 0: return 0
d = get_state("depth")
reldepth = float(d) / get_param_int("max_tree_depth")
return int(self.maxweight * (1 - reldepth))
def doit(self, dirlist):
"""Move down into a random subdirectory"""
dirtuple = pick_one(subdirs(dirlist))
dir = rd_name(dirtuple)
self.client.cd(dir)
change_state("depth", 1)
self.log("cd " + dir)
test_op.doit(self, dirlist)
class mkdir_op(test_op):
"""Create a directory"""
def weight(self, dirlist):
"""Weight for mkdir"""
if get_state("dirs_created") == get_param_int("max_directories"):
return 0
nsub = len(subdirs(dirlist))
relpop = float(nsub) / get_param_int("max_subdirs")
return int(self.maxweight * (1 - relpop))
def doit(self, dirlist):
dir = invent_name(dirlist)
self.client.mkdir(dir)
change_state("dirs_created", 1)
self.log("mkdir " + dir)
test_op.doit(self, dirlist)
class rmdir_op(test_op):
"""Remove an empty directory"""
def weight(self, dirlist):
"""Weight for rmdir"""
nsub = len(emptydirs(self.client, dirlist))
return int(self.maxweight * nsub)
def doit(self, dirlist):
dirtuple = pick_one(emptydirs(self.client, dirlist))
dir = rd_name(dirtuple)
self.client.rmdir(dir)
change_state("dirs_created", -1)
self.log("rmdir " + dir)
test_op.doit(self, dirlist)
class create_op(test_op):
"""Create a file"""
def weight(self, dirlist):
"""Weight for create"""
if get_state("files_created") == get_param_int("max_files"):
return 0
nfile = len(files(dirlist))
relpop = float(nfile) / get_param_int("max_dirsize")
return int(self.maxweight * (1 - relpop))
def doit(self, dirlist):
name = invent_name(dirlist)
f = self.client.create(name)
f.close()
change_state("files_created", 1)
self.log("touch " + name)
test_op.doit(self, dirlist)
class remove_op(test_op):
"""Remove a file"""
def weight(self, dirlist):
"""Weight for remove"""
nfile = len(files(dirlist))
relpop = float(nfile) / get_param_int("max_dirsize")
return int(self.maxweight * relpop)
def doit(self, dirlist):
nametuple = pick_one(files(dirlist))
name = rd_name(nametuple)
self.client.remove(name)
change_state("files_created", -1)
self.log("rm " + name)
test_op.doit(self, dirlist)
class read_op(test_op):
"""Read from a file (no verification yet)"""
def weight(self, dirlist):
if len(nonzero_files(dirlist)) == 0:
return 0
else:
return self.maxweight
def doit(self, dirlist):
nametuple = pick_one(nonzero_files(dirlist))
name = rd_name(nametuple)
size = rd_size(nametuple)
if size == 0:
return
offset = random.randint(0, size - 1)
length = min(random.randint(1, size - offset),
get_param_int("max_rw_size"))
f = self.client.open(name, "r")
f.seek(offset)
result = f.read(length)
f.close()
self.log("read %d from %s @%d" % (length, name, offset))
test_op.doit(self, dirlist)
class write_op(test_op):
"""Write to a random file offset"""
def weight(self, dirlist):
if len(files(dirlist)) == 0:
return 0
else:
return self.maxweight
def doit(self, dirlist):
nametuple = pick_one(files(dirlist))
name = rd_name(nametuple)
biggest = get_param_int("max_file_size")
offset = random.randint(0, biggest - 1)
length = min(random.randint(1, biggest - offset),
get_param_int("max_rw_size"))
f = self.client.open(name, "w+")
f.seek(offset)
output = "x" * length
result = f.write(output)
f.close()
self.log("write %d to %s @%d" % (length, name, offset))
test_op.doit(self, dirlist)
class truncate_op(test_op):
"""Truncate at a random offset"""
def weight(self, dirlist):
if len(nonzero_files(dirlist)) == 0:
return 0
else:
return self.maxweight
def doit(self, dirlist):
nametuple = pick_one(nonzero_files(dirlist))
name = rd_name(nametuple)
size = rd_size(nametuple)
offset = random.randint(0, size - 1)
f = self.client.open(name, "w+")
f.truncate(offset)
f.close()
self.log("truncated %s @%d" % (name, offset))
test_op.doit(self, dirlist)
def run_test(config_file = default_config_file):
#
# Read test parameters, start up KFS client, open log, etc.
#
setup_params(config_file)
client = start_client(get_param_string("kfs_properties"))
logfp = open(get_param_string("log_file"), "w")
seed = get_optional_param_int("random_seed")
if seed != -1:
random.seed(seed)
global filenames
filenames = []
for w in open("/usr/share/dict/words", "r"):
filenames.append(w.rstrip())
#
# Instantiate objects for each test operation
#
opname = get_param_string("oplist").split(" ")
opweight = [int(w) for w in get_param_string("opweight").split(" ")]
ops = []
for i in range(len(opname)):
classname = opname[i] + "_op"
opclass = globals()[classname]
opinstance = opclass(client, logfp, opweight[i])
ops.append(opinstance)
#
# Repeatedly perform random operations until done
#
opsdone = 0
while opsdone != get_param_int("op_count"):
dirlist = client.readdirplus(".")
weights = [op.weight(dirlist) for op in ops]
i = weighted_pick(weights)
ops[i].doit(dirlist)
opsdone += 1
print "%d ops completed:" % (opsdone,)
for i in range(len(ops)):
print "%s\t%d" % (opname[i], ops[i].count)
if __name__ == "__main__":
if len(sys.argv) == 1:
config_file = default_config_file
elif sys.argv[1] == "-":
config_file = None
else:
config_file = sys.argv[1]
run_test(config_file)
|
surgebiswas/poker | refs/heads/master | PokerBots_2017/Johnny/wheel/tool/__init__.py | 232 | """
Wheel command-line utility.
"""
import os
import hashlib
import sys
import json
import wheel.paths
from glob import iglob
from .. import signatures
from ..util import (urlsafe_b64decode, urlsafe_b64encode, native, binary,
matches_requirement)
from ..install import WheelFile
def require_pkgresources(name):
try:
import pkg_resources
except ImportError:
raise RuntimeError("'{0}' needs pkg_resources (part of setuptools).".format(name))
import argparse
class WheelError(Exception): pass
# For testability
def get_keyring():
try:
from ..signatures import keys
import keyring
assert keyring.get_keyring().priority
except (ImportError, AssertionError):
raise WheelError("Install wheel[signatures] (requires keyring, keyrings.alt, pyxdg) for signatures.")
return keys.WheelKeys, keyring
def keygen(get_keyring=get_keyring):
"""Generate a public/private key pair."""
WheelKeys, keyring = get_keyring()
ed25519ll = signatures.get_ed25519ll()
wk = WheelKeys().load()
keypair = ed25519ll.crypto_sign_keypair()
vk = native(urlsafe_b64encode(keypair.vk))
sk = native(urlsafe_b64encode(keypair.sk))
kr = keyring.get_keyring()
kr.set_password("wheel", vk, sk)
sys.stdout.write("Created Ed25519 keypair with vk={0}\n".format(vk))
sys.stdout.write("in {0!r}\n".format(kr))
sk2 = kr.get_password('wheel', vk)
if sk2 != sk:
raise WheelError("Keyring is broken. Could not retrieve secret key.")
sys.stdout.write("Trusting {0} to sign and verify all packages.\n".format(vk))
wk.add_signer('+', vk)
wk.trust('+', vk)
wk.save()
def sign(wheelfile, replace=False, get_keyring=get_keyring):
"""Sign a wheel"""
WheelKeys, keyring = get_keyring()
ed25519ll = signatures.get_ed25519ll()
wf = WheelFile(wheelfile, append=True)
wk = WheelKeys().load()
name = wf.parsed_filename.group('name')
sign_with = wk.signers(name)[0]
sys.stdout.write("Signing {0} with {1}\n".format(name, sign_with[1]))
vk = sign_with[1]
kr = keyring.get_keyring()
sk = kr.get_password('wheel', vk)
keypair = ed25519ll.Keypair(urlsafe_b64decode(binary(vk)),
urlsafe_b64decode(binary(sk)))
record_name = wf.distinfo_name + '/RECORD'
sig_name = wf.distinfo_name + '/RECORD.jws'
if sig_name in wf.zipfile.namelist():
raise WheelError("Wheel is already signed.")
record_data = wf.zipfile.read(record_name)
payload = {"hash":"sha256=" + native(urlsafe_b64encode(hashlib.sha256(record_data).digest()))}
sig = signatures.sign(payload, keypair)
wf.zipfile.writestr(sig_name, json.dumps(sig, sort_keys=True))
wf.zipfile.close()
def unsign(wheelfile):
"""
Remove RECORD.jws from a wheel by truncating the zip file.
RECORD.jws must be at the end of the archive. The zip file must be an
ordinary archive, with the compressed files and the directory in the same
order, and without any non-zip content after the truncation point.
"""
import wheel.install
vzf = wheel.install.VerifyingZipFile(wheelfile, "a")
info = vzf.infolist()
if not (len(info) and info[-1].filename.endswith('/RECORD.jws')):
raise WheelError("RECORD.jws not found at end of archive.")
vzf.pop()
vzf.close()
def verify(wheelfile):
"""Verify a wheel.
The signature will be verified for internal consistency ONLY and printed.
Wheel's own unpack/install commands verify the manifest against the
signature and file contents.
"""
wf = WheelFile(wheelfile)
sig_name = wf.distinfo_name + '/RECORD.jws'
sig = json.loads(native(wf.zipfile.open(sig_name).read()))
verified = signatures.verify(sig)
sys.stderr.write("Signatures are internally consistent.\n")
sys.stdout.write(json.dumps(verified, indent=2))
sys.stdout.write('\n')
def unpack(wheelfile, dest='.'):
"""Unpack a wheel.
Wheel content will be unpacked to {dest}/{name}-{ver}, where {name}
is the package name and {ver} its version.
:param wheelfile: The path to the wheel.
:param dest: Destination directory (default to current directory).
"""
wf = WheelFile(wheelfile)
namever = wf.parsed_filename.group('namever')
destination = os.path.join(dest, namever)
sys.stderr.write("Unpacking to: %s\n" % (destination))
wf.zipfile.extractall(destination)
wf.zipfile.close()
def install(requirements, requirements_file=None,
wheel_dirs=None, force=False, list_files=False,
dry_run=False):
"""Install wheels.
:param requirements: A list of requirements or wheel files to install.
:param requirements_file: A file containing requirements to install.
:param wheel_dirs: A list of directories to search for wheels.
:param force: Install a wheel file even if it is not compatible.
:param list_files: Only list the files to install, don't install them.
:param dry_run: Do everything but the actual install.
"""
# If no wheel directories specified, use the WHEELPATH environment
# variable, or the current directory if that is not set.
if not wheel_dirs:
wheelpath = os.getenv("WHEELPATH")
if wheelpath:
wheel_dirs = wheelpath.split(os.pathsep)
else:
wheel_dirs = [ os.path.curdir ]
# Get a list of all valid wheels in wheel_dirs
all_wheels = []
for d in wheel_dirs:
for w in os.listdir(d):
if w.endswith('.whl'):
wf = WheelFile(os.path.join(d, w))
if wf.compatible:
all_wheels.append(wf)
# If there is a requirements file, add it to the list of requirements
if requirements_file:
# If the file doesn't exist, search for it in wheel_dirs
# This allows standard requirements files to be stored with the
# wheels.
if not os.path.exists(requirements_file):
for d in wheel_dirs:
name = os.path.join(d, requirements_file)
if os.path.exists(name):
requirements_file = name
break
with open(requirements_file) as fd:
requirements.extend(fd)
to_install = []
for req in requirements:
if req.endswith('.whl'):
# Explicitly specified wheel filename
if os.path.exists(req):
wf = WheelFile(req)
if wf.compatible or force:
to_install.append(wf)
else:
msg = ("{0} is not compatible with this Python. "
"--force to install anyway.".format(req))
raise WheelError(msg)
else:
# We could search on wheel_dirs, but it's probably OK to
# assume the user has made an error.
raise WheelError("No such wheel file: {}".format(req))
continue
# We have a requirement spec
# If we don't have pkg_resources, this will raise an exception
matches = matches_requirement(req, all_wheels)
if not matches:
raise WheelError("No match for requirement {}".format(req))
to_install.append(max(matches))
# We now have a list of wheels to install
if list_files:
sys.stdout.write("Installing:\n")
if dry_run:
return
for wf in to_install:
if list_files:
sys.stdout.write(" {0}\n".format(wf.filename))
continue
wf.install(force=force)
wf.zipfile.close()
def install_scripts(distributions):
"""
Regenerate the entry_points console_scripts for the named distribution.
"""
try:
from setuptools.command import easy_install
import pkg_resources
except ImportError:
raise RuntimeError("'wheel install_scripts' needs setuptools.")
for dist in distributions:
pkg_resources_dist = pkg_resources.get_distribution(dist)
install = wheel.paths.get_install_command(dist)
command = easy_install.easy_install(install.distribution)
command.args = ['wheel'] # dummy argument
command.finalize_options()
command.install_egg_scripts(pkg_resources_dist)
def convert(installers, dest_dir, verbose):
require_pkgresources('wheel convert')
# Only support wheel convert if pkg_resources is present
from ..wininst2wheel import bdist_wininst2wheel
from ..egg2wheel import egg2wheel
for pat in installers:
for installer in iglob(pat):
if os.path.splitext(installer)[1] == '.egg':
conv = egg2wheel
else:
conv = bdist_wininst2wheel
if verbose:
sys.stdout.write("{0}... ".format(installer))
sys.stdout.flush()
conv(installer, dest_dir)
if verbose:
sys.stdout.write("OK\n")
def parser():
p = argparse.ArgumentParser()
s = p.add_subparsers(help="commands")
def keygen_f(args):
keygen()
keygen_parser = s.add_parser('keygen', help='Generate signing key')
keygen_parser.set_defaults(func=keygen_f)
def sign_f(args):
sign(args.wheelfile)
sign_parser = s.add_parser('sign', help='Sign wheel')
sign_parser.add_argument('wheelfile', help='Wheel file')
sign_parser.set_defaults(func=sign_f)
def unsign_f(args):
unsign(args.wheelfile)
unsign_parser = s.add_parser('unsign', help=unsign.__doc__)
unsign_parser.add_argument('wheelfile', help='Wheel file')
unsign_parser.set_defaults(func=unsign_f)
def verify_f(args):
verify(args.wheelfile)
verify_parser = s.add_parser('verify', help=verify.__doc__)
verify_parser.add_argument('wheelfile', help='Wheel file')
verify_parser.set_defaults(func=verify_f)
def unpack_f(args):
unpack(args.wheelfile, args.dest)
unpack_parser = s.add_parser('unpack', help='Unpack wheel')
unpack_parser.add_argument('--dest', '-d', help='Destination directory',
default='.')
unpack_parser.add_argument('wheelfile', help='Wheel file')
unpack_parser.set_defaults(func=unpack_f)
def install_f(args):
install(args.requirements, args.requirements_file,
args.wheel_dirs, args.force, args.list_files)
install_parser = s.add_parser('install', help='Install wheels')
install_parser.add_argument('requirements', nargs='*',
help='Requirements to install.')
install_parser.add_argument('--force', default=False,
action='store_true',
help='Install incompatible wheel files.')
install_parser.add_argument('--wheel-dir', '-d', action='append',
dest='wheel_dirs',
help='Directories containing wheels.')
install_parser.add_argument('--requirements-file', '-r',
help="A file containing requirements to "
"install.")
install_parser.add_argument('--list', '-l', default=False,
dest='list_files',
action='store_true',
help="List wheels which would be installed, "
"but don't actually install anything.")
install_parser.set_defaults(func=install_f)
def install_scripts_f(args):
install_scripts(args.distributions)
install_scripts_parser = s.add_parser('install-scripts', help='Install console_scripts')
install_scripts_parser.add_argument('distributions', nargs='*',
help='Regenerate console_scripts for these distributions')
install_scripts_parser.set_defaults(func=install_scripts_f)
def convert_f(args):
convert(args.installers, args.dest_dir, args.verbose)
convert_parser = s.add_parser('convert', help='Convert egg or wininst to wheel')
convert_parser.add_argument('installers', nargs='*', help='Installers to convert')
convert_parser.add_argument('--dest-dir', '-d', default=os.path.curdir,
help="Directory to store wheels (default %(default)s)")
convert_parser.add_argument('--verbose', '-v', action='store_true')
convert_parser.set_defaults(func=convert_f)
def version_f(args):
from .. import __version__
sys.stdout.write("wheel %s\n" % __version__)
version_parser = s.add_parser('version', help='Print version and exit')
version_parser.set_defaults(func=version_f)
def help_f(args):
p.print_help()
help_parser = s.add_parser('help', help='Show this help')
help_parser.set_defaults(func=help_f)
return p
def main():
p = parser()
args = p.parse_args()
if not hasattr(args, 'func'):
p.print_help()
else:
# XXX on Python 3.3 we get 'args has no func' rather than short help.
try:
args.func(args)
return 0
except WheelError as e:
sys.stderr.write(e.message + "\n")
return 1
|
PolicyStat/django | refs/heads/master | tests/staticfiles_tests/test_liveserver.py | 34 | """
A subset of the tests in tests/servers/tests exercicing
django.contrib.staticfiles.testing.StaticLiveServerTestCase instead of
django.test.LiveServerTestCase.
"""
import os
from django.core.exceptions import ImproperlyConfigured
from django.test import modify_settings, override_settings
from django.utils.six.moves.urllib.request import urlopen
from django.utils._os import upath
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
TEST_ROOT = os.path.dirname(upath(__file__))
TEST_SETTINGS = {
'MEDIA_URL': '/media/',
'STATIC_URL': '/static/',
'MEDIA_ROOT': os.path.join(TEST_ROOT, 'project', 'site_media', 'media'),
'STATIC_ROOT': os.path.join(TEST_ROOT, 'project', 'site_media', 'static'),
}
class LiveServerBase(StaticLiveServerTestCase):
available_apps = []
@classmethod
def setUpClass(cls):
# Override settings
cls.settings_override = override_settings(**TEST_SETTINGS)
cls.settings_override.enable()
super(LiveServerBase, cls).setUpClass()
@classmethod
def tearDownClass(cls):
# Restore original settings
cls.settings_override.disable()
super(LiveServerBase, cls).tearDownClass()
class StaticLiveServerChecks(LiveServerBase):
@classmethod
def setUpClass(cls):
# Backup original environment variable
address_predefined = 'DJANGO_LIVE_TEST_SERVER_ADDRESS' in os.environ
old_address = os.environ.get('DJANGO_LIVE_TEST_SERVER_ADDRESS')
# If contrib.staticfiles isn't configured properly, the exception
# should bubble up to the main thread.
old_STATIC_URL = TEST_SETTINGS['STATIC_URL']
TEST_SETTINGS['STATIC_URL'] = None
cls.raises_exception('localhost:8081', ImproperlyConfigured)
TEST_SETTINGS['STATIC_URL'] = old_STATIC_URL
# Restore original environment variable
if address_predefined:
os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = old_address
else:
del os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS']
@classmethod
def tearDownClass(cls):
# skip it, as setUpClass doesn't call its parent either
pass
@classmethod
def raises_exception(cls, address, exception):
os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = address
try:
super(StaticLiveServerChecks, cls).setUpClass()
raise Exception("The line above should have raised an exception")
except exception:
pass
finally:
super(StaticLiveServerChecks, cls).tearDownClass()
def test_test_test(self):
# Intentionally empty method so that the test is picked up by the
# test runner and the overridden setUpClass() method is executed.
pass
class StaticLiveServerView(LiveServerBase):
def urlopen(self, url):
return urlopen(self.live_server_url + url)
# The test is going to access a static file stored in this application.
@modify_settings(INSTALLED_APPS={'append': 'staticfiles_tests.apps.test'})
def test_collectstatic_emulation(self):
"""
Test that StaticLiveServerTestCase use of staticfiles' serve() allows it
to discover app's static assets without having to collectstatic first.
"""
f = self.urlopen('/static/test/file.txt')
self.assertEqual(f.read().rstrip(b'\r\n'), b'In app media directory.')
|
josenavas/labman | refs/heads/master | labman/gui/test/test_plate.py | 1 | # ----------------------------------------------------------------------------
# Copyright (c) 2017-, labman development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
from json import dumps
from unittest import main
from tornado.escape import json_decode
from tornado.web import HTTPError
from labman.gui.testing import TestHandlerBase
from labman.db.plate import Plate
from labman.db.user import User
from labman.gui.handlers.plate import (
_get_plate, plate_handler_patch_request, plate_layout_handler_get_request,
plate_map_handler_get_request)
class TestUtils(TestHandlerBase):
def test_get_plate(self):
self.assertEqual(_get_plate('21'), Plate(21))
regex = 'Plate 100 doesn\'t exist'
with self.assertRaisesRegex(HTTPError, regex):
_get_plate(100)
def test_plate_map_handler_get_request(self):
regex = 'Plating process 100 doesn\'t exist'
with self.assertRaisesRegex(HTTPError, regex):
plate_map_handler_get_request(100)
obs = plate_map_handler_get_request(10)
exp_plate_confs = [[1, '96-well deep-well plate', 8, 12],
[2, '96-well microtiter plate', 8, 12],
[3, '384-well microtiter plate', 16, 24]]
exp_contr_desc = [
{'external_id': 'blank',
'description': 'gDNA extraction blanks. Represents an empty '
'extraction well.'},
{'external_id': 'empty',
'description': 'Empty well. Represents an empty well that should '
'not be included in library preparation.'},
{'external_id': 'vibrio.positive.control',
'description': 'Bacterial isolate control (Vibrio fischeri ES114)'
'. Represents an extraction well loaded with '
'Vibrio.'},
{'external_id': 'zymo.mock',
'description': 'Bacterial community control (Zymo Mock D6306). '
'Represents an extraction well loaded with Zymo '
'Mock community.'}]
exp = {'plate_confs': exp_plate_confs, 'plate_id': 21,
'process_id': 10, 'controls_description': exp_contr_desc}
self.assertEqual(obs, exp)
obs = plate_map_handler_get_request(None)
exp = {'plate_confs': exp_plate_confs, 'plate_id': None,
'process_id': None, 'controls_description': exp_contr_desc}
self.assertEqual(obs, exp)
def test_plate_handler_patch_request(self):
tester = Plate(21)
user = User('test@foo.bar')
# Incorrect path parameter
regex = 'Incorrect path parameter'
with self.assertRaisesRegex(HTTPError, regex):
plate_handler_patch_request(user, 21, 'replace', '/name/newname',
'NewName', None)
# Unknown attribute
regex = 'Attribute unknown not recognized'
with self.assertRaisesRegex(HTTPError, regex):
plate_handler_patch_request(user, 21, 'replace', '/unknown/',
'NewName', None)
# Unknown operation
regex = ('Operation add not supported. Current supported '
'operations: replace')
with self.assertRaisesRegex(HTTPError, regex):
plate_handler_patch_request(user, 21, 'add', '/name/',
'NewName', None)
# Plate doesn't exist
regex = 'Plate 100 doesn\'t exist'
with self.assertRaisesRegex(HTTPError, regex):
plate_handler_patch_request(user, 100, 'replace', '/name/',
'NewName', None)
# Test success - Name
plate_handler_patch_request(user, 21, 'replace', '/name/',
'NewName', None)
self.assertEqual(tester.external_id, 'NewName')
tester.external_id = 'Test plate 1'
# Test success - discarded
plate_handler_patch_request(user, 21, 'replace', '/discarded/',
True, None)
self.assertEqual(tester.discarded, True)
tester.discarded = False
def test_plate_layout_handler_get_request(self):
obs = plate_layout_handler_get_request(21)
self.assertEqual(len(obs), 8)
exp = [{'sample': '1.SKB1.640202.21.A1', 'notes': None},
{'sample': '1.SKB2.640194.21.A2', 'notes': None},
{'sample': '1.SKB3.640195.21.A3', 'notes': None},
{'sample': '1.SKB4.640189.21.A4', 'notes': None},
{'sample': '1.SKB5.640181.21.A5', 'notes': None},
{'sample': '1.SKB6.640176.21.A6', 'notes': None},
{'sample': '1.SKB7.640196.21.A7', 'notes': None},
{'sample': '1.SKB8.640193.21.A8', 'notes': None},
{'sample': '1.SKB9.640200.21.A9', 'notes': None},
{'sample': '1.SKD1.640179.21.A10', 'notes': None},
{'sample': '1.SKD2.640178.21.A11', 'notes': None},
{'sample': '1.SKD3.640198.21.A12', 'notes': None}]
self.assertEqual(obs[0], exp)
# The 7th row contains virio controls
exp = [{'sample': 'vibrio.positive.control.21.G%s' % i, 'notes': None}
for i in range(1, 13)]
self.assertEqual(obs[6], exp)
# The 8th row contains blanks
exp = [{'sample': 'blank.21.H%s' % i, 'notes': None}
for i in range(1, 12)]
self.assertEqual(obs[7][:-1], exp)
self.assertEqual(obs[7][11], {'sample': 'empty.21.H12', 'notes': None})
regex = 'Plate 100 doesn\'t exist'
with self.assertRaisesRegex(HTTPError, regex):
plate_layout_handler_get_request(100)
class TestPlateHandlers(TestHandlerBase):
def test_get_plate_list_handler(self):
response = self.get('/plate_list')
self.assertEqual(response.code, 200)
obs = json_decode(response.body)
self.assertCountEqual(obs.keys(), ['data'])
obs_data = obs['data']
self.assertEqual(len(obs_data), 26)
self.assertEqual(obs_data[0], [1, 'EMP 16S V4 primer plate 1', None])
response = self.get('/plate_list?plate_type=%5B%22sample%22%5D')
self.assertEqual(response.code, 200)
obs = json_decode(response.body)
self.assertCountEqual(obs.keys(), ['data'])
obs_data = obs['data']
self.assertEqual(len(obs_data), 1)
self.assertEqual(
obs_data[0], [
21, 'Test plate 1',
['Identification of the Microbiomes for Cannabis Soils']])
response = self.get(
'/plate_list?plate_type=%5B%22compressed+gDNA%22%2C+%22'
'normalized+gDNA%22%5D')
self.assertEqual(response.code, 200)
obs = json_decode(response.body)
self.assertCountEqual(obs.keys(), ['data'])
obs_data = obs['data']
self.assertEqual(len(obs_data), 2)
self.assertEqual(
obs_data,
[[24, 'Test compressed gDNA plate 1',
['Identification of the Microbiomes for Cannabis Soils']],
[25, 'Test normalized gDNA plate 1',
['Identification of the Microbiomes for Cannabis Soils']]])
response = self.get(
'/plate_list?plate_type=%5B%22compressed+gDNA%22%2C+%22'
'normalized+gDNA%22%5D&only_quantified=true')
self.assertEqual(response.code, 200)
obs = json_decode(response.body)
self.assertCountEqual(obs.keys(), ['data'])
obs_data = obs['data']
self.assertEqual(len(obs_data), 1)
self.assertEqual(
obs_data,
[[24, 'Test compressed gDNA plate 1',
['Identification of the Microbiomes for Cannabis Soils']]])
def test_get_plate_map_handler(self):
response = self.get('/plate')
self.assertEqual(response.code, 200)
self.assertNotEqual(response.body, '')
response = self.get('/plate?process_id=10')
self.assertEqual(response.code, 200)
self.assertNotEqual(response.body, '')
response = self.get('/plate?process_id=100')
self.assertEqual(response.code, 404)
self.assertNotEqual(response.body, '')
def test_get_plate_name_handler(self):
response = self.get('/platename')
# It is missing the parameter
self.assertEqual(response.code, 400)
# It doesn't exist
response = self.get('/platename?new-name=something')
self.assertEqual(response.code, 404)
# It exists
response = self.get('/platename?new-name=Test%20plate%201')
self.assertEqual(response.code, 200)
def test_get_plate_handler(self):
response = self.get('/plate/21/')
self.assertEqual(response.code, 200)
obs = json_decode(response.body)
exp = {'plate_id': 21,
'plate_name': 'Test plate 1',
'discarded': False,
'plate_configuration': [1, '96-well deep-well plate', 8, 12],
'notes': None,
'studies': [1],
'duplicates': [
[1, 1, '1.SKB1.640202.21.A1'],
[2, 1, '1.SKB1.640202.21.B1'],
[3, 1, '1.SKB1.640202.21.C1'],
[4, 1, '1.SKB1.640202.21.D1'],
[5, 1, '1.SKB1.640202.21.E1'],
[6, 1, '1.SKB1.640202.21.F1'],
[1, 2, '1.SKB2.640194.21.A2'],
[2, 2, '1.SKB2.640194.21.B2'],
[3, 2, '1.SKB2.640194.21.C2'],
[4, 2, '1.SKB2.640194.21.D2'],
[5, 2, '1.SKB2.640194.21.E2'],
[6, 2, '1.SKB2.640194.21.F2'],
[1, 3, '1.SKB3.640195.21.A3'],
[2, 3, '1.SKB3.640195.21.B3'],
[3, 3, '1.SKB3.640195.21.C3'],
[4, 3, '1.SKB3.640195.21.D3'],
[5, 3, '1.SKB3.640195.21.E3'],
[6, 3, '1.SKB3.640195.21.F3'],
[1, 4, '1.SKB4.640189.21.A4'],
[2, 4, '1.SKB4.640189.21.B4'],
[3, 4, '1.SKB4.640189.21.C4'],
[4, 4, '1.SKB4.640189.21.D4'],
[5, 4, '1.SKB4.640189.21.E4'],
[6, 4, '1.SKB4.640189.21.F4'],
[1, 5, '1.SKB5.640181.21.A5'],
[2, 5, '1.SKB5.640181.21.B5'],
[3, 5, '1.SKB5.640181.21.C5'],
[4, 5, '1.SKB5.640181.21.D5'],
[5, 5, '1.SKB5.640181.21.E5'],
[6, 5, '1.SKB5.640181.21.F5'],
[1, 6, '1.SKB6.640176.21.A6'],
[2, 6, '1.SKB6.640176.21.B6'],
[3, 6, '1.SKB6.640176.21.C6'],
[4, 6, '1.SKB6.640176.21.D6'],
[5, 6, '1.SKB6.640176.21.E6'],
[6, 6, '1.SKB6.640176.21.F6'],
[1, 7, '1.SKB7.640196.21.A7'],
[2, 7, '1.SKB7.640196.21.B7'],
[3, 7, '1.SKB7.640196.21.C7'],
[4, 7, '1.SKB7.640196.21.D7'],
[5, 7, '1.SKB7.640196.21.E7'],
[6, 7, '1.SKB7.640196.21.F7'],
[1, 8, '1.SKB8.640193.21.A8'],
[2, 8, '1.SKB8.640193.21.B8'],
[3, 8, '1.SKB8.640193.21.C8'],
[4, 8, '1.SKB8.640193.21.D8'],
[5, 8, '1.SKB8.640193.21.E8'],
[6, 8, '1.SKB8.640193.21.F8'],
[1, 9, '1.SKB9.640200.21.A9'],
[2, 9, '1.SKB9.640200.21.B9'],
[3, 9, '1.SKB9.640200.21.C9'],
[4, 9, '1.SKB9.640200.21.D9'],
[5, 9, '1.SKB9.640200.21.E9'],
[6, 9, '1.SKB9.640200.21.F9'],
[1, 10, '1.SKD1.640179.21.A10'],
[2, 10, '1.SKD1.640179.21.B10'],
[3, 10, '1.SKD1.640179.21.C10'],
[4, 10, '1.SKD1.640179.21.D10'],
[5, 10, '1.SKD1.640179.21.E10'],
[6, 10, '1.SKD1.640179.21.F10'],
[1, 11, '1.SKD2.640178.21.A11'],
[2, 11, '1.SKD2.640178.21.B11'],
[3, 11, '1.SKD2.640178.21.C11'],
[4, 11, '1.SKD2.640178.21.D11'],
[5, 11, '1.SKD2.640178.21.E11'],
[6, 11, '1.SKD2.640178.21.F11'],
[1, 12, '1.SKD3.640198.21.A12'],
[2, 12, '1.SKD3.640198.21.B12'],
[3, 12, '1.SKD3.640198.21.C12'],
[4, 12, '1.SKD3.640198.21.D12'],
[5, 12, '1.SKD3.640198.21.E12'],
[6, 12, '1.SKD3.640198.21.F12']],
'previous_plates': [],
'unknowns': []}
obs_duplicates = obs.pop('duplicates')
exp_duplicates = exp.pop('duplicates')
self.assertEqual(obs, exp)
self.assertCountEqual(obs_duplicates, exp_duplicates)
# Plate doesn't exist
response = self.get('/plate/100/')
self.assertEqual(response.code, 404)
def test_patch_plate_handler(self):
tester = Plate(21)
data = {'op': 'replace', 'path': '/name/', 'value': 'NewName'}
response = self.patch('/plate/21/', data)
self.assertEqual(response.code, 200)
self.assertEqual(tester.external_id, 'NewName')
tester.external_id = 'Test plate 1'
def test_patch_plate_discarded_handler(self):
tester = Plate(21)
data = {'op': 'replace', 'path': '/discarded/', 'value': True}
response = self.patch('/plate/21/', data)
self.assertEqual(response.code, 200)
self.assertEqual(tester.discarded, True)
tester.discarded = False
def test_get_plate_layout_handler(self):
response = self.get('/plate/21/layout')
self.assertEqual(response.code, 200)
obs = json_decode(response.body)
# Spot check some positions, since a more in-depth test has already
# been performed in test_plate_layout_handler_get_request
self.assertEqual(obs[0][0],
{'sample': '1.SKB1.640202.21.A1', 'notes': None})
self.assertEqual(obs[5][9],
{'sample': '1.SKD1.640179.21.F10', 'notes': None})
self.assertEqual(
obs[6][1], {'sample':
'vibrio.positive.control.21.G2', 'notes': None})
self.assertEqual(obs[7][4], {'sample': 'blank.21.H5', 'notes': None})
def test_get_plate_search_handler(self):
response = self.get('/plate_search')
self.assertEqual(response.code, 200)
self.assertNotEqual(response.body, '')
def test_post_plate_search_handler(self):
# Note: these tests don't exercise all the cases covered in
# db/tests/test_plate.py test_search; instead, they focus on
# testing at least one search based on each of the input
# fields, to verify that these are being passed through
# correctly to the db's Plate.search method.
# Test search by sample names:
post_data = {
'sample_names': dumps(['1.SKB1.640202', '1.SKB2.640194']),
'plate_comment_keywords': "",
'well_comment_keywords': "",
'operation': "INTERSECT"
}
response = self.post('/plate_search', post_data)
self.assertEqual(response.code, 200)
obs = json_decode(response.body)
self.assertCountEqual(obs.keys(), ['data'])
obs_data = obs['data']
self.assertEqual(len(obs_data), 1)
self.assertEqual(obs_data[0], [21, 'Test plate 1'])
# Test search by plate comment keywords:
# It looks like none of the plates in the test database have
# any notes, so it is necessary to add some to be able to
# test the keywords search functionality; the below is lifted
# verbatim from db/tests/test_plate.py test_search
plate22 = Plate(22)
plate23 = Plate(23)
# Add comments to a plate so we can actually test the
# search functionality
plate22.notes = 'Some interesting notes'
plate23.notes = 'More boring notes'
# end verbatim lift
post_data = {
'sample_names': dumps([]),
'plate_comment_keywords': 'interesting boring',
'well_comment_keywords': "",
'operation': "INTERSECT"
}
response = self.post('/plate_search', post_data)
self.assertEqual(response.code, 200)
obs = json_decode(response.body)
self.assertCountEqual(obs.keys(), ['data'])
obs_data = obs['data']
self.assertEqual(len(obs_data), 0)
# Test search by intersecting or unioning multiple search terms:
post_data = {
'sample_names': dumps(['1.SKB1.640202']),
'plate_comment_keywords': 'interesting boring',
'well_comment_keywords': "",
'operation': "INTERSECT"
}
response = self.post('/plate_search', post_data)
self.assertEqual(response.code, 200)
obs = json_decode(response.body)
self.assertCountEqual(obs.keys(), ['data'])
obs_data = obs['data']
self.assertEqual(len(obs_data), 0)
post_data = {
'sample_names': dumps(['1.SKB1.640202']),
'plate_comment_keywords': 'interesting boring',
'well_comment_keywords': "",
'operation': "UNION"
}
response = self.post('/plate_search', post_data)
self.assertEqual(response.code, 200)
obs = json_decode(response.body)
self.assertCountEqual(obs.keys(), ['data'])
obs_data = obs['data']
self.assertEqual(len(obs_data), 1)
self.assertEqual(obs_data[0], [21, 'Test plate 1'])
# Test search by well comment keywords:
# Add comments to some wells so can test well comment search
plate23.get_well(1, 1).composition.notes = 'What should I write?'
post_data = {
'sample_names': dumps([]),
'plate_comment_keywords': '',
'well_comment_keywords': "write",
'operation': "INTERSECT"
}
response = self.post('/plate_search', post_data)
self.assertEqual(response.code, 200)
obs = json_decode(response.body)
self.assertCountEqual(obs.keys(), ['data'])
obs_data = obs['data']
self.assertEqual(len(obs_data), 1)
self.assertEqual(obs_data[0], [23, 'Test 16S plate 1'])
def test_get_plate_process_handler(self):
response = self.get('/plate/21/process')
self.assertEqual(response.code, 200)
self.assertTrue(
response.effective_url.endswith('/plate?process_id=10'))
response = self.get('/plate/22/process')
self.assertEqual(response.code, 200)
self.assertTrue(
response.effective_url.endswith(
'/process/gdna_extraction?process_id=1'))
response = self.get('/plate/23/process')
self.assertEqual(response.code, 200)
self.assertTrue(
response.effective_url.endswith(
'/process/library_prep_16S?process_id=1'))
response = self.get('/plate/24/process')
self.assertEqual(response.code, 200)
self.assertTrue(
response.effective_url.endswith(
'/process/gdna_compression?process_id=1'))
response = self.get('/plate/25/process')
self.assertEqual(response.code, 200)
self.assertTrue(
response.effective_url.endswith(
'/process/normalize?process_id=1'))
response = self.get('/plate/26/process')
self.assertEqual(response.code, 200)
self.assertTrue(
response.effective_url.endswith(
'/process/library_prep_shotgun?process_id=1'))
if __name__ == '__main__':
main()
|
crosswalk-project/chromium-crosswalk-efl | refs/heads/efl/crosswalk-10/39.0.2171.19 | tools/telemetry/telemetry/core/browser_credentials.py | 25 | # Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import logging
import os
from telemetry.core import util
from telemetry.core.backends import codepen_credentials_backend
from telemetry.core.backends import facebook_credentials_backend
from telemetry.core.backends import google_credentials_backend
from telemetry.page.actions import action_runner
from telemetry.unittest import options_for_unittests
class CredentialsError(Exception):
"""Error that can be thrown when logging in."""
class BrowserCredentials(object):
def __init__(self, backends = None):
self._credentials = {}
self._credentials_path = None
self._extra_credentials = {}
if backends is None:
backends = [
codepen_credentials_backend.CodePenCredentialsBackend(),
facebook_credentials_backend.FacebookCredentialsBackend(),
google_credentials_backend.GoogleCredentialsBackend()]
self._backends = {}
for backend in backends:
self._backends[backend.credentials_type] = backend
def AddBackend(self, backend):
assert backend.credentials_type not in self._backends
self._backends[backend.credentials_type] = backend
def IsLoggedIn(self, credentials_type):
if credentials_type not in self._backends:
raise CredentialsError(
'Unrecognized credentials type: %s', credentials_type)
if credentials_type not in self._credentials:
return False
return self._backends[credentials_type].IsLoggedIn()
def CanLogin(self, credentials_type):
if credentials_type not in self._backends:
raise CredentialsError(
'Unrecognized credentials type: %s', credentials_type)
return credentials_type in self._credentials
def LoginNeeded(self, tab, credentials_type):
if credentials_type not in self._backends:
raise CredentialsError(
'Unrecognized credentials type: %s', credentials_type)
if credentials_type not in self._credentials:
return False
runner = action_runner.ActionRunner(tab)
return self._backends[credentials_type].LoginNeeded(
tab, runner, self._credentials[credentials_type])
def LoginNoLongerNeeded(self, tab, credentials_type):
assert credentials_type in self._backends
self._backends[credentials_type].LoginNoLongerNeeded(tab)
@property
def credentials_path(self): # pylint: disable=E0202
return self._credentials_path
@credentials_path.setter
def credentials_path(self, credentials_path): # pylint: disable=E0202
self._credentials_path = credentials_path
self._RebuildCredentials()
def Add(self, credentials_type, data):
if credentials_type not in self._extra_credentials:
self._extra_credentials[credentials_type] = {}
for k, v in data.items():
assert k not in self._extra_credentials[credentials_type]
self._extra_credentials[credentials_type][k] = v
self._RebuildCredentials()
def _ResetLoggedInState(self):
"""Makes the backends think we're not logged in even though we are.
Should only be used in unit tests to simulate --dont-override-profile.
"""
for backend in self._backends.keys():
self._backends[backend]._ResetLoggedInState() # pylint: disable=W0212
def _RebuildCredentials(self):
credentials = {}
if self._credentials_path == None:
pass
elif os.path.exists(self._credentials_path):
with open(self._credentials_path, 'r') as f:
credentials = json.loads(f.read())
# TODO(nduca): use system keychain, if possible.
homedir_credentials_path = os.path.expanduser('~/.telemetry-credentials')
homedir_credentials = {}
if (not options_for_unittests.GetCopy() and
os.path.exists(homedir_credentials_path)):
logging.info("Found ~/.telemetry-credentials. Its contents will be used "
"when no other credentials can be found.")
with open(homedir_credentials_path, 'r') as f:
homedir_credentials = json.loads(f.read())
self._credentials = {}
all_keys = set(credentials.keys()).union(
homedir_credentials.keys()).union(
self._extra_credentials.keys())
for k in all_keys:
if k in credentials:
self._credentials[k] = credentials[k]
if k in homedir_credentials:
logging.info("Will use ~/.telemetry-credentials for %s logins." % k)
self._credentials[k] = homedir_credentials[k]
if k in self._extra_credentials:
self._credentials[k] = self._extra_credentials[k]
def WarnIfMissingCredentials(self, page_set):
num_pages_missing_login = 0
missing_credentials = set()
for page in page_set:
if (page.credentials
and not self.CanLogin(page.credentials)):
num_pages_missing_login += 1
missing_credentials.add(page.credentials)
if num_pages_missing_login > 0:
files_to_tweak = []
if page_set.credentials_path:
files_to_tweak.append(
os.path.relpath(os.path.join(os.path.dirname(page_set.file_path),
page_set.credentials_path)))
files_to_tweak.append('~/.telemetry-credentials')
example_credentials_file = os.path.join(
util.GetTelemetryDir(), 'examples', 'credentials_example.json')
logging.warning("""
Credentials for %s were not found. %i pages will not be tested.
To fix this, either follow the instructions to authenticate to gsutil
here:
http://www.chromium.org/developers/telemetry/upload_to_cloud_storage,
or add your own credentials to:
%s
An example credentials file you can copy from is here:
%s\n""" % (', '.join(missing_credentials),
num_pages_missing_login,
' or '.join(files_to_tweak),
example_credentials_file))
|
rsjohnco/rez | refs/heads/resources2 | src/rez/utils/colorize.py | 3 | import sys
import logging
from rez.vendor import colorama
from rez.config import config
from rez.utils.platform_ import platform_
_initialised = False
def _init_colorama():
global _initialised
if not _initialised:
colorama.init()
_initialised = True
def stream_is_tty(stream):
"""Return true if the stream is a tty stream.
Returns:
bool
"""
isatty = getattr(stream, 'isatty', None)
return isatty and isatty()
def critical(str_):
""" Return the string wrapped with the appropriate styling of a critical
message. The styling will be determined based on the rez configuration.
Args:
str_ (str): The string to be wrapped.
Returns:
str: The string styled with the appropriate escape sequences.
"""
return _color_level(str_, 'critical')
def error(str_):
""" Return the string wrapped with the appropriate styling of an error
message. The styling will be determined based on the rez configuration.
Args:
str_ (str): The string to be wrapped.
Returns:
str: The string styled with the appropriate escape sequences.
"""
return _color_level(str_, 'error')
def warning(str_):
""" Return the string wrapped with the appropriate styling of a warning
message. The styling will be determined based on the rez configuration.
Args:
str_ (str): The string to be wrapped.
Returns:
str: The string styled with the appropriate escape sequences.
"""
return _color_level(str_, 'warning')
def info(str_):
""" Return the string wrapped with the appropriate styling of an info
message. The styling will be determined based on the rez configuration.
Args:
str_ (str): The string to be wrapped.
Returns:
str: The string styled with the appropriate escape sequences.
"""
return _color_level(str_, 'info')
def debug(str_):
""" Return the string wrapped with the appropriate styling of a debug
message. The styling will be determined based on the rez configuration.
Args:
str_ (str): The string to be wrapped.
Returns:
str: The string styled with the appropriate escape sequences.
"""
return _color_level(str_, 'debug')
def heading(str_):
""" Return the string wrapped with the appropriate styling of a heading
message. The styling will be determined based on the rez configuration.
Args:
str_ (str): The string to be wrapped.
Returns:
str: The string styled with the appropriate escape sequences.
"""
return _color_level(str_, 'heading')
def local(str_):
""" Return the string wrapped with the appropriate styling to display a
local package. The styling will be determined based on the rez
configuration.
Args:
str_ (str): The string to be wrapped.
Returns:
str: The string styled with the appropriate escape sequences.
"""
return _color_level(str_, 'local')
def implicit(str_):
""" Return the string wrapped with the appropriate styling to display an
implicit package. The styling will be determined based on the rez
configuration.
Args:
str_ (str): The string to be wrapped.
Returns:
str: The string styled with the appropriate escape sequences.
"""
return _color_level(str_, 'implicit')
def alias(str_):
""" Return the string wrapped with the appropriate styling to display a
tool alias. The styling will be determined based on the rez configuration.
Args:
str_ (str): The string to be wrapped.
Returns:
str: The string styled with the appropriate escape sequences.
"""
return _color_level(str_, 'alias')
def notset(str_):
""" Return the string wrapped with the appropriate escape sequences to
remove all styling.
Args:
str_ (str): The string to be wrapped.
Returns:
str: The string styled with the appropriate escape sequences.
"""
return _color(str_)
def _color_level(str_, level):
""" Return the string wrapped with the appropriate styling for the message
level. The styling will be determined based on the rez configuration.
Args:
str_ (str): The string to be wrapped.
level (str): The message level. Should be one of 'critical', 'error',
'warning', 'info' or 'debug'.
Returns:
str: The string styled with the appropriate escape sequences.
"""
fore_color, back_color, styles = _get_style_from_config(level)
return _color(str_, fore_color, back_color, styles)
def _color(str_, fore_color=None, back_color=None, styles=None):
""" Return the string wrapped with the appropriate styling escape sequences.
Args:
str_ (str): The string to be wrapped.
fore_color (str, optional): Any foreground color supported by the
`Colorama`_ module.
back_color (str, optional): Any background color supported by the
`Colorama`_ module.
styles (list of str, optional): Any styles supported by the `Colorama`_
module.
Returns:
str: The string styled with the appropriate escape sequences.
.. _Colorama:
https://pypi.python.org/pypi/colorama
"""
# TODO: Colorama is documented to work on Windows and trivial test case
# proves this to be the case, but it doesn't work in Rez. If the initialise
# is called in sec/rez/__init__.py then it does work, however as discussed
# in the following comment this is not always desirable. So until we can
# work out why we forcibly turn it off.
if not config.get("color_enabled", False) or platform_.name == "windows":
return str_
# lazily init colorama. This is important - we don't want to init at startup,
# because colorama prints a RESET_ALL character atexit. This in turn adds
# unexpected output when capturing the output of a command run in a
# ResolvedContext, for example.
_init_colorama()
colored = ""
if not styles:
styles = []
if fore_color:
colored += getattr(colorama.Fore, fore_color.upper(), '')
if back_color:
colored += getattr(colorama.Back, back_color.upper(), '')
for style in styles:
colored += getattr(colorama.Style, style.upper(), '')
return colored + str_ + colorama.Style.RESET_ALL
def _get_style_from_config(key):
fore_color = config.get("%s_fore" % key, '')
back_color = config.get("%s_back" % key, '')
styles = config.get("%s_styles" % key, None)
return fore_color, back_color, styles
class ColorizedStreamHandler(logging.StreamHandler):
"""A stream handler for use with the Python logger.
This handler uses the `Colorama`_ module to style the log messages based
on the rez configuration.
Attributes:
STYLES (dict): A mapping between the Python logger levels and a function
that can be used to provide the appropriate styling.
.. _Colorama:
https://pypi.python.org/pypi/colorama
"""
STYLES = {
50: critical,
40: error,
30: warning,
20: info,
10: debug,
0: notset,
}
@property
def is_tty(self):
"""Return true if the stream associated with this handler is a tty
stream.
Returns:
bool
"""
return stream_is_tty(self.stream)
def _get_style_function_for_level(self, level):
return self.STYLES.get(level, notset)
def emit(self, record):
"""Emit a record.
If the stream associated with this handler provides tty then the record
that is emitted with be formatted to include escape sequences for
appropriate styling.
"""
try:
message = self.format(record)
if not self.is_tty:
self.stream.write(message)
else:
style = self._get_style_function_for_level(record.levelno)
self.stream.write(style(message))
self.stream.write(getattr(self, 'terminator', '\n'))
self.flush()
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)
class Printer(object):
def __init__(self, buf=sys.stdout):
self.buf = buf
self.tty = stream_is_tty(buf)
def __call__(self, msg='', style=None):
print >> self.buf, self.get(msg, style)
def get(self, msg, style=None):
if style and self.tty:
msg = style(msg)
return msg
|
charbeljc/OCB | refs/heads/8.0 | addons/account/project/wizard/account_analytic_cost_ledger_report.py | 378 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import osv, fields
class account_analytic_cost_ledger(osv.osv_memory):
_name = 'account.analytic.cost.ledger'
_description = 'Account Analytic Cost Ledger'
_columns = {
'date1': fields.date('Start of period', required=True),
'date2': fields.date('End of period', required=True),
}
_defaults = {
'date1': lambda *a: time.strftime('%Y-01-01'),
'date2': lambda *a: time.strftime('%Y-%m-%d')
}
def check_report(self, cr, uid, ids, context=None):
if context is None:
context = {}
data = self.read(cr, uid, ids)[0]
datas = {
'ids': context.get('active_ids',[]),
'model': 'account.analytic.account',
'form': data
}
datas['form']['active_ids'] = context.get('active_ids', False)
return self.pool['report'].get_action(cr, uid, [], 'account.report_analyticcostledger', data=datas, context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
q1ang/scikit-learn | refs/heads/master | sklearn/linear_model/perceptron.py | 245 | # Author: Mathieu Blondel
# License: BSD 3 clause
from .stochastic_gradient import BaseSGDClassifier
from ..feature_selection.from_model import _LearntSelectorMixin
class Perceptron(BaseSGDClassifier, _LearntSelectorMixin):
"""Perceptron
Read more in the :ref:`User Guide <perceptron>`.
Parameters
----------
penalty : None, 'l2' or 'l1' or 'elasticnet'
The penalty (aka regularization term) to be used. Defaults to None.
alpha : float
Constant that multiplies the regularization term if regularization is
used. Defaults to 0.0001
fit_intercept : bool
Whether the intercept should be estimated or not. If False, the
data is assumed to be already centered. Defaults to True.
n_iter : int, optional
The number of passes over the training data (aka epochs).
Defaults to 5.
shuffle : bool, optional, default True
Whether or not the training data should be shuffled after each epoch.
random_state : int seed, RandomState instance, or None (default)
The seed of the pseudo random number generator to use when
shuffling the data.
verbose : integer, optional
The verbosity level
n_jobs : integer, optional
The number of CPUs to use to do the OVA (One Versus All, for
multi-class problems) computation. -1 means 'all CPUs'. Defaults
to 1.
eta0 : double
Constant by which the updates are multiplied. Defaults to 1.
class_weight : dict, {class_label: weight} or "balanced" or None, optional
Preset for the class_weight fit parameter.
Weights associated with classes. If not given, all classes
are supposed to have weight one.
The "balanced" mode uses the values of y to automatically adjust
weights inversely proportional to class frequencies in the input data
as ``n_samples / (n_classes * np.bincount(y))``
warm_start : bool, optional
When set to True, reuse the solution of the previous call to fit as
initialization, otherwise, just erase the previous solution.
Attributes
----------
coef_ : array, shape = [1, n_features] if n_classes == 2 else [n_classes,\
n_features]
Weights assigned to the features.
intercept_ : array, shape = [1] if n_classes == 2 else [n_classes]
Constants in decision function.
Notes
-----
`Perceptron` and `SGDClassifier` share the same underlying implementation.
In fact, `Perceptron()` is equivalent to `SGDClassifier(loss="perceptron",
eta0=1, learning_rate="constant", penalty=None)`.
See also
--------
SGDClassifier
References
----------
http://en.wikipedia.org/wiki/Perceptron and references therein.
"""
def __init__(self, penalty=None, alpha=0.0001, fit_intercept=True,
n_iter=5, shuffle=True, verbose=0, eta0=1.0, n_jobs=1,
random_state=0, class_weight=None, warm_start=False):
super(Perceptron, self).__init__(loss="perceptron",
penalty=penalty,
alpha=alpha, l1_ratio=0,
fit_intercept=fit_intercept,
n_iter=n_iter,
shuffle=shuffle,
verbose=verbose,
random_state=random_state,
learning_rate="constant",
eta0=eta0,
power_t=0.5,
warm_start=warm_start,
class_weight=class_weight,
n_jobs=n_jobs)
|
mindbody/API-Examples | refs/heads/master | SDKs/Python/test/test_get_client_referral_types_response.py | 1 | # coding: utf-8
"""
MINDBODY Public API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.get_client_referral_types_response import GetClientReferralTypesResponse # noqa: E501
from swagger_client.rest import ApiException
class TestGetClientReferralTypesResponse(unittest.TestCase):
"""GetClientReferralTypesResponse unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testGetClientReferralTypesResponse(self):
"""Test GetClientReferralTypesResponse"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.get_client_referral_types_response.GetClientReferralTypesResponse() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
GeraldLoeffler/nupic | refs/heads/master | nupic/encoders/multi.py | 7 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import capnp
from nupic.encoders.base import Encoder
from nupic.encoders.scalar import ScalarEncoder
from nupic.encoders.adaptivescalar import AdaptiveScalarEncoder
from nupic.encoders.date import DateEncoder
from nupic.encoders.logenc import LogEncoder
from nupic.encoders.category import CategoryEncoder
from nupic.encoders.sdrcategory import SDRCategoryEncoder
from nupic.encoders.delta import DeltaEncoder
from nupic.encoders.scalarspace import ScalarSpaceEncoder
from nupic.encoders.pass_through_encoder import PassThroughEncoder
from nupic.encoders.sparse_pass_through_encoder import SparsePassThroughEncoder
from nupic.encoders.coordinate import CoordinateEncoder
from nupic.encoders.geospatial_coordinate import GeospatialCoordinateEncoder
# multiencoder must be imported last because it imports * from this module!
from nupic.encoders.utils import bitsToString
from nupic.encoders.random_distributed_scalar import RandomDistributedScalarEncoder
from nupic.encoders.base import Encoder
from nupic.encoders.scalar_capnp import ScalarEncoderProto
# Map class to Cap'n Proto schema union attribute
_CLASS_ATTR_MAP = {
ScalarEncoder: "scalarEncoder",
AdaptiveScalarEncoder: "adaptivescalar",
DateEncoder: "dateEncoder",
LogEncoder: "logEncoder",
CategoryEncoder: "categoryEncoder",
CoordinateEncoder: "coordinateEncoder",
SDRCategoryEncoder: "sdrCategoryEncoder",
DeltaEncoder: "deltaEncoder",
PassThroughEncoder: "passThroughEncoder",
SparsePassThroughEncoder: "sparsePassThroughEncoder",
RandomDistributedScalarEncoder: "randomDistributedScalarEncoder"
}
# Invert for fast lookup in MultiEncoder.read()
_ATTR_CLASS_MAP = {value:key for key, value in _CLASS_ATTR_MAP.items()}
class MultiEncoder(Encoder):
"""A MultiEncoder encodes a dictionary or object with
multiple components. A MultiEncode contains a number
of sub-encoders, each of which encodes a separate component."""
# TODO expand this docstring to explain how the multiple encoders are combined
def __init__(self, encoderDescriptions=None):
self.width = 0
self.encoders = []
self.description = []
self.name = ''
if encoderDescriptions is not None:
self.addMultipleEncoders(encoderDescriptions)
def setFieldStats(self, fieldName, fieldStatistics ):
for (name, encoder, offset) in self.encoders:
encoder.setFieldStats(name, fieldStatistics)
def addEncoder(self, name, encoder):
self.encoders.append((name, encoder, self.width))
for d in encoder.getDescription():
self.description.append((d[0], d[1] + self.width))
self.width += encoder.getWidth()
self._flattenedEncoderList = None
self._flattenedFieldTypeList = None
def encodeIntoArray(self, obj, output):
for name, encoder, offset in self.encoders:
encoder.encodeIntoArray(self._getInputValue(obj, name), output[offset:])
def getDescription(self):
return self.description
def getWidth(self):
"""Represents the sum of the widths of each fields encoding."""
return self.width
def setLearning(self,learningEnabled):
encoders = self.getEncoderList()
for encoder in encoders:
encoder.setLearning(learningEnabled)
return
def encodeField(self, fieldName, value):
for name, encoder, offset in self.encoders:
if name == fieldName:
return encoder.encode(value)
def encodeEachField(self, inputRecord):
encodings = []
for name, encoder, offset in self.encoders:
encodings.append(encoder.encode(getattr(inputRecord, name)))
return encodings
def addMultipleEncoders(self, fieldEncodings):
"""
fieldEncodings -- a dict of dicts, mapping field names to the field params
dict.
Each field params dict has the following keys
1) data fieldname that matches the key ('fieldname')
2) an encoder type ('type')
3) and the encoder params (all other keys)
For example,
fieldEncodings={
'dateTime': dict(fieldname='dateTime', type='DateEncoder',
timeOfDay=(5,5)),
'attendeeCount': dict(fieldname='attendeeCount', type='ScalarEncoder',
name='attendeeCount', minval=0, maxval=250,
clipInput=True, w=5, resolution=10),
'consumption': dict(fieldname='consumption',type='ScalarEncoder',
name='consumption', minval=0,maxval=110,
clipInput=True, w=5, resolution=5),
}
would yield a vector with a part encoded by the DateEncoder,
and to parts seperately taken care of by the ScalarEncoder with the specified parameters.
The three seperate encodings are then merged together to the final vector, in such a way that
they are always at the same location within the vector.
"""
# Sort the encoders so that they end up in a controlled order
encoderList = sorted(fieldEncodings.items())
for key, fieldParams in encoderList:
if ':' not in key and fieldParams is not None:
fieldParams = fieldParams.copy()
fieldName = fieldParams.pop('fieldname')
encoderName = fieldParams.pop('type')
try:
self.addEncoder(fieldName, eval(encoderName)(**fieldParams))
except TypeError, e:
print ("#### Error in constructing %s encoder. Possibly missing "
"some required constructor parameters. Parameters "
"that were provided are: %s" % (encoderName, fieldParams))
raise
@classmethod
def read(cls, proto):
encoder = object.__new__(cls)
encoder.encoders = [None] * len(proto.encoders)
encoder.width = 0
for index, encoderProto in enumerate(proto.encoders):
# Identify which attr is set in union
encoderType = encoderProto.which()
encoderDetails = getattr(encoderProto, encoderType)
encoder.encoders[index] = (
encoderProto.name,
# Call class.read() where class is determined by _ATTR_CLASS_MAP
_ATTR_CLASS_MAP.get(encoderType).read(encoderDetails),
encoderProto.offset
)
encoder.width += encoder.encoders[index][1].getWidth()
# Derive description from encoder list
encoder.description = [(enc[1].name, enc[2]) for enc in encoder.encoders]
encoder.name = proto.name
return encoder
def write(self, proto):
proto.init("encoders", len(self.encoders))
for index, (name, encoder, offset) in enumerate(self.encoders):
encoderProto = proto.encoders[index]
encoderType = _CLASS_ATTR_MAP.get(encoder.__class__)
encoderProto.init(encoderType)
encoderDetails = getattr(encoderProto, encoderType)
encoder.write(encoderDetails)
encoderProto.name = name
encoderProto.offset = offset
proto.name = self.name
|
AKToronto/Bubba-Zombie | refs/heads/kk-4.4.x | scripts/tracing/draw_functrace.py | 14679 | #!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <fweisbec@gmail.com>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
|
Statoil/SegyIO | refs/heads/master | python/examples/about.py | 1 | import sys
from segyio import TraceField
import segyio
def list_byte_offset_names():
print("Available offsets and their corresponding byte value:")
for x in TraceField.enums():
print(" {}: {}".format(str(x), x))
if __name__ == '__main__':
if len(sys.argv) < 4:
list_byte_offset_names()
sys.exit("Usage: about.py [file] [inline] [crossline]")
# we need a way to convert from run-time inline/crossline argument (as
# text) to the internally used TraceField enum. Make a string -> TraceField
# map and look up into that. this dictionary comprehension creates that
fieldmap = {str(x).lower(): x for x in TraceField.enums()}
filename = sys.argv[1]
inline_name, crossline_name = sys.argv[2].lower(), sys.argv[3].lower()
# exit if inline or crossline are unknown
if inline_name not in fieldmap:
list_byte_offset_names()
sys.exit("Unknown inline field '{}'".format(sys.argv[2]))
if crossline_name not in fieldmap:
list_byte_offset_names()
sys.exit("Unknown crossline field '{}'".format(sys.argv[3]))
inline, crossline = fieldmap[inline_name], fieldmap[crossline_name]
with segyio.open(filename, "r", inline, crossline) as f:
print("About '{}':".format(filename))
print("Format type: {}".format(f.format))
print("Offset count: {}".format(f.offsets))
print("ilines: {}".format(", ".join(map(str, f.ilines))))
print("xlines: {}".format(", ".join(map(str, f.xlines))))
print("+------+")
with segyio.open(filename, "r", crossline, inline) as f:
# with swapped inline/crossline
print("About '{}':".format(filename))
print("Format type: {}".format(f.format))
print("Offset count: {}".format(f.offsets))
print("ilines: {}".format(", ".join(map(str, f.ilines))))
print("xlines: {}".format(", ".join(map(str, f.xlines))))
|
fly19890211/edx-platform | refs/heads/master | lms/djangoapps/courseware/migrations/0013_auto__add_field_studentfieldoverride_created__add_field_studentfieldov.py | 94 | # -*- coding: utf-8 -*-
# pylint: disable=invalid-name, missing-docstring, unused-argument, unused-import, line-too-long
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'StudentFieldOverride.created'
db.add_column('courseware_studentfieldoverride', 'created',
self.gf('model_utils.fields.AutoCreatedField')(default=datetime.datetime.now),
keep_default=False)
# Adding field 'StudentFieldOverride.modified'
db.add_column('courseware_studentfieldoverride', 'modified',
self.gf('model_utils.fields.AutoLastModifiedField')(default=datetime.datetime.now),
keep_default=False)
# Adding index on 'StudentFieldOverride', fields ['course_id', 'location', 'student']
db.create_index('courseware_studentfieldoverride', ['course_id', 'location', 'student_id'])
def backwards(self, orm):
# Deleting field 'StudentFieldOverride.created'
db.delete_column('courseware_studentfieldoverride', 'created')
# Deleting field 'StudentFieldOverride.modified'
db.delete_column('courseware_studentfieldoverride', 'modified')
# Removing index on 'StudentFieldOverride', fields ['course_id', 'location', 'student']
db.delete_index('courseware_studentfieldoverride', ['course_id', 'location', 'student_id'])
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'courseware.offlinecomputedgrade': {
'Meta': {'unique_together': "(('user', 'course_id'),)", 'object_name': 'OfflineComputedGrade'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'gradeset': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'courseware.offlinecomputedgradelog': {
'Meta': {'ordering': "['-created']", 'object_name': 'OfflineComputedGradeLog'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nstudents': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'seconds': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'courseware.studentfieldoverride': {
'Meta': {'unique_together': "(('course_id', 'field', 'location', 'student'),)", 'object_name': 'StudentFieldOverride'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'field': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('xmodule_django.models.LocationKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'student': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'value': ('django.db.models.fields.TextField', [], {'default': "'null'"})
},
'courseware.studentmodule': {
'Meta': {'unique_together': "(('student', 'module_state_key', 'course_id'),)", 'object_name': 'StudentModule'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'done': ('django.db.models.fields.CharField', [], {'default': "'na'", 'max_length': '8', 'db_index': 'True'}),
'grade': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_grade': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'module_state_key': ('xmodule_django.models.LocationKeyField', [], {'max_length': '255', 'db_column': "'module_id'", 'db_index': 'True'}),
'module_type': ('django.db.models.fields.CharField', [], {'default': "'problem'", 'max_length': '32', 'db_index': 'True'}),
'state': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'student': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'courseware.studentmodulehistory': {
'Meta': {'object_name': 'StudentModuleHistory'},
'created': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'grade': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_grade': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'student_module': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['courseware.StudentModule']"}),
'version': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'courseware.xmodulestudentinfofield': {
'Meta': {'unique_together': "(('student', 'field_name'),)", 'object_name': 'XModuleStudentInfoField'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'field_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'student': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'value': ('django.db.models.fields.TextField', [], {'default': "'null'"})
},
'courseware.xmodulestudentprefsfield': {
'Meta': {'unique_together': "(('student', 'module_type', 'field_name'),)", 'object_name': 'XModuleStudentPrefsField'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'field_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'module_type': ('xmodule_django.models.BlockTypeKeyField', [], {'max_length': '64', 'db_index': 'True'}),
'student': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'value': ('django.db.models.fields.TextField', [], {'default': "'null'"})
},
'courseware.xmoduleuserstatesummaryfield': {
'Meta': {'unique_together': "(('usage_id', 'field_name'),)", 'object_name': 'XModuleUserStateSummaryField'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'field_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'usage_id': ('xmodule_django.models.LocationKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'value': ('django.db.models.fields.TextField', [], {'default': "'null'"})
}
}
complete_apps = ['courseware']
|
CameronLonsdale/sec-tools | refs/heads/master | python2/lib/python2.7/site-packages/setuptools/depends.py | 336 | import sys
import imp
import marshal
from distutils.version import StrictVersion
from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN
from .py33compat import Bytecode
__all__ = [
'Require', 'find_module', 'get_module_constant', 'extract_constant'
]
class Require:
"""A prerequisite to building or installing a distribution"""
def __init__(self, name, requested_version, module, homepage='',
attribute=None, format=None):
if format is None and requested_version is not None:
format = StrictVersion
if format is not None:
requested_version = format(requested_version)
if attribute is None:
attribute = '__version__'
self.__dict__.update(locals())
del self.self
def full_name(self):
"""Return full package/distribution name, w/version"""
if self.requested_version is not None:
return '%s-%s' % (self.name, self.requested_version)
return self.name
def version_ok(self, version):
"""Is 'version' sufficiently up-to-date?"""
return self.attribute is None or self.format is None or \
str(version) != "unknown" and version >= self.requested_version
def get_version(self, paths=None, default="unknown"):
"""Get version number of installed module, 'None', or 'default'
Search 'paths' for module. If not found, return 'None'. If found,
return the extracted version attribute, or 'default' if no version
attribute was specified, or the value cannot be determined without
importing the module. The version is formatted according to the
requirement's version format (if any), unless it is 'None' or the
supplied 'default'.
"""
if self.attribute is None:
try:
f, p, i = find_module(self.module, paths)
if f:
f.close()
return default
except ImportError:
return None
v = get_module_constant(self.module, self.attribute, default, paths)
if v is not None and v is not default and self.format is not None:
return self.format(v)
return v
def is_present(self, paths=None):
"""Return true if dependency is present on 'paths'"""
return self.get_version(paths) is not None
def is_current(self, paths=None):
"""Return true if dependency is present and up-to-date on 'paths'"""
version = self.get_version(paths)
if version is None:
return False
return self.version_ok(version)
def find_module(module, paths=None):
"""Just like 'imp.find_module()', but with package support"""
parts = module.split('.')
while parts:
part = parts.pop(0)
f, path, (suffix, mode, kind) = info = imp.find_module(part, paths)
if kind == PKG_DIRECTORY:
parts = parts or ['__init__']
paths = [path]
elif parts:
raise ImportError("Can't find %r in %s" % (parts, module))
return info
def get_module_constant(module, symbol, default=-1, paths=None):
"""Find 'module' by searching 'paths', and extract 'symbol'
Return 'None' if 'module' does not exist on 'paths', or it does not define
'symbol'. If the module defines 'symbol' as a constant, return the
constant. Otherwise, return 'default'."""
try:
f, path, (suffix, mode, kind) = find_module(module, paths)
except ImportError:
# Module doesn't exist
return None
try:
if kind == PY_COMPILED:
f.read(8) # skip magic & date
code = marshal.load(f)
elif kind == PY_FROZEN:
code = imp.get_frozen_object(module)
elif kind == PY_SOURCE:
code = compile(f.read(), path, 'exec')
else:
# Not something we can parse; we'll have to import it. :(
if module not in sys.modules:
imp.load_module(module, f, path, (suffix, mode, kind))
return getattr(sys.modules[module], symbol, None)
finally:
if f:
f.close()
return extract_constant(code, symbol, default)
def extract_constant(code, symbol, default=-1):
"""Extract the constant value of 'symbol' from 'code'
If the name 'symbol' is bound to a constant value by the Python code
object 'code', return that value. If 'symbol' is bound to an expression,
return 'default'. Otherwise, return 'None'.
Return value is based on the first assignment to 'symbol'. 'symbol' must
be a global, or at least a non-"fast" local in the code block. That is,
only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
must be present in 'code.co_names'.
"""
if symbol not in code.co_names:
# name's not there, can't possibly be an assignment
return None
name_idx = list(code.co_names).index(symbol)
STORE_NAME = 90
STORE_GLOBAL = 97
LOAD_CONST = 100
const = default
for byte_code in Bytecode(code):
op = byte_code.opcode
arg = byte_code.arg
if op == LOAD_CONST:
const = code.co_consts[arg]
elif arg == name_idx and (op == STORE_NAME or op == STORE_GLOBAL):
return const
else:
const = default
def _update_globals():
"""
Patch the globals to remove the objects not available on some platforms.
XXX it'd be better to test assertions about bytecode instead.
"""
if not sys.platform.startswith('java') and sys.platform != 'cli':
return
incompatible = 'extract_constant', 'get_module_constant'
for name in incompatible:
del globals()[name]
__all__.remove(name)
_update_globals()
|
tempbottle/Nuitka | refs/heads/develop | nuitka/codegen/ConstantCodes.py | 1 | # Copyright 2015, Kay Hayen, mailto:kay.hayen@gmail.com
#
# Part of "Nuitka", an optimizing Python compiler that is compatible and
# integrates with CPython, but also works on its own.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Low level constant code generation.
This deals with constants, there creation, there access, and some checks about
them. Even mutable constants should not change during the course of the
program.
There are shared constants, which are created for multiple modules to use, you
can think of them as globals. And there are module local constants, which are
for a single module only.
"""
import ctypes
import re
import struct
from logging import warning
import marshal
from nuitka import Options
from nuitka.__past__ import iterItems, long, unicode # pylint: disable=W0622
from nuitka.codegen import Emission
from nuitka.Constants import (
constant_builtin_types,
getConstantWeight,
isMutable
)
from .BlobCodes import StreamData
from .Emission import SourceCodeCollector
from .Indentation import indented
from .Pickling import getStreamedConstant
from .templates.CodeTemplatesConstants import template_constants_reading
def generateConstantReferenceCode(to_name, expression, emit, context):
""" Assign the constant behind the expression to to_name."""
getConstantAccess(
to_name = to_name,
constant = expression.getConstant(),
emit = emit,
context = context
)
# One global stream of constant information. In the future it might make
# sense to have per module ones, for better locality of indexes within it,
# but we don't do this yet.
stream_data = StreamData()
# TODO: This is deprecated, and should be removed.
def getConstantCode(context, constant):
return context.getConstantCode(constant)
def getConstantCodeName(context, constant):
return context.getConstantCode(constant)
# TODO: The determination of this should already happen in Building or in a
# helper not during code generation.
_match_attribute_names = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*$")
def _isAttributeName(value):
# TODO: The exception is to make sure we intern the ".0" argument name
# used for generator expressions, iterator value.
return _match_attribute_names.match(value) or value == ".0"
# Indicator to standalone mode code, if we need pickling module early on, which
# we try to avoid, but can happen with things we cannot create directly.
_needs_pickle = False
def needsPickleInit():
return _needs_pickle
def _getUnstreamCode2(constant_value):
saved = getStreamedConstant(
constant_value = constant_value
)
assert type(saved) is bytes
# We need to remember having to use pickle, pylint: disable=W0603
global _needs_pickle
_needs_pickle = True
return stream_data.getStreamDataCode(saved)
def _getUnstreamCode(constant_value, constant_identifier):
""" Get code to assign given constant value to an identifier from a stream.
This uses pickle, and usage should be minimized.
"""
return "%s = UNSTREAM_CONSTANT( %s );" % (
constant_identifier,
_getUnstreamCode2(constant_value)
)
sizeof_long = ctypes.sizeof(ctypes.c_long)
max_unsigned_long = 2**(sizeof_long*8)-1
# The gcc gives a warning for -2**sizeof_long*8-1, which is still an "int", but
# seems to not work (without warning) as literal, so avoid it.
min_signed_long = -(2**(sizeof_long*8-1)-1)
done = set()
def _getConstantInitValueCode(constant_value, constant_type):
""" Return code, if possible, to create a constant.
It's only used for module local constants, like error messages, and
provides no caching of the values. When it returns "None", it is in
error.
"""
# This function is a case driven by returns, pylint: disable=R0911
if constant_type is unicode:
try:
encoded = constant_value.encode("utf-8")
if str is not unicode:
return "UNSTREAM_UNICODE( %s )" % (
stream_data.getStreamDataCode(encoded)
)
else:
return "UNSTREAM_STRING( %s, %d, %d )" % (
stream_data.getStreamDataCode(encoded, fixed_size = True),
len(constant_value),
1 if _isAttributeName(constant_value) else 0
)
except UnicodeEncodeError:
# TODO: try and use "surrogateescape" for this
return None
elif constant_type is str:
# Python3: Strings that can be encoded as UTF-8 are done more or less
# directly. When they cannot be expressed as UTF-8, that is rare not we
# can indeed use pickling.
assert str is not unicode
if len(constant_value) == 1:
return "UNSTREAM_CHAR( %d, %d )" % (
ord(constant_value[0]),
1 if _isAttributeName(constant_value) else 0
)
else:
return "UNSTREAM_STRING( %s, %d )" % (
stream_data.getStreamDataCode(constant_value),
1 if _isAttributeName(constant_value) else 0
)
elif constant_type is bytes:
assert str is unicode
return "UNSTREAM_BYTES( %s )" % (
stream_data.getStreamDataCode(constant_value)
)
else:
return None
def decideMarshal(constant_value):
""" Decide of a constant can be created using "marshal" module methods.
This is not the case for everything. A prominent exception is types,
they are constants, but the "marshal" module refuses to work with
them.
"""
constant_type = type(constant_value)
if constant_type is type:
# Types cannot be marshaled, there is no choice about it.
return False
elif constant_type is dict:
# Look at all the keys an values, if one of it cannot be marshaled,
# or should not, that is it.
for key, value in iterItems(constant_value):
if not decideMarshal(key):
return False
if not decideMarshal(value):
return False
elif constant_type in (tuple, list, set, frozenset):
for element_value in constant_value:
if not decideMarshal(element_value):
return False
return True
def isMarshalConstant(constant_value):
""" Decide if we want to use marshal to create a constant.
The reason we do this, is because creating dictionaries with 700
elements creates a lot of C code, while gaining usually no performance
at all. The MSVC compiler is especially notorious about hanging like
forever with this active, due to its optimizer not scaling.
Therefore we use a constant "weight" (how expensive it is), and apply
that to decide.
If marshal is not possible, or constant "weight" is too large, we
don't do it. Also, for some constants, marshal can fail, and return
other values. Check that too. In that case, we have to create it.
"""
if not decideMarshal(constant_value):
return False
if getConstantWeight(constant_value) < 20:
return False
marshal_value = marshal.dumps(constant_value)
restored = marshal.loads(marshal_value)
# TODO: Potentially warn about these.
return constant_value == restored
def attemptToMarshal(constant_identifier, constant_value, emit):
""" Try and marshal a value, if so decided. Indicate with return value.
See above for why marshal is only used in problematic cases.
"""
if not isMarshalConstant(constant_value):
return False
marshal_value = marshal.dumps(constant_value)
restored = marshal.loads(marshal_value)
# TODO: The check in isMarshalConstant is currently preventing this from
# happening.
if constant_value != restored:
warning("Problem with marshal of constant %r", constant_value)
return False
emit(
"%s = PyMarshal_ReadObjectFromString( (char *)%s );" % (
constant_identifier,
stream_data.getStreamDataCode(marshal_value)
)
)
return True
def _addConstantInitCode(context, emit, check, constant_type, constant_value,
constant_identifier, module_level):
""" Emit code for a specific constant to be prepared during init.
This may be module or global init. Code makes sure that nested
constants belong into the same scope.
"""
# This is just a wrapper to make sure that hash values become initialized
# for every constant too.
if constant_value in constant_builtin_types:
return
if constant_value is None:
return
if constant_value is False:
return
if constant_value is True:
return
if constant_value is Ellipsis:
return
# Do not repeat ourselves.
if constant_identifier in done:
return
if Options.shallTraceExecution():
emit("""puts("Creating constant: %s");""" % constant_identifier)
# Then it's a real named constant not yet created.
__addConstantInitCode(context, emit, check, constant_type, constant_value,
constant_identifier, module_level)
if Options.isDebug():
emit(
"""\
hash_%(constant_identifier)s = DEEP_HASH( %(constant_identifier)s );""" % {
"constant_identifier" : constant_identifier
}
)
check(
"""\
CHECK_OBJECT( %(constant_identifier)s );
assert( hash_%(constant_identifier)s == DEEP_HASH( %(constant_identifier)s ) );""" % {
"constant_identifier" : constant_identifier
}
)
if Options.isExperimental():
check(
"""\
if ( hash_%(constant_identifier)s == -1 ) puts("Note: Weak hash for: %(constant_identifier)s.");""" % {
"constant_identifier" : constant_identifier
}
)
def __addConstantInitCode(context, emit, check, constant_type, constant_value,
constant_identifier, module_level):
""" Emit code for a specific constant to be prepared during init.
This may be module or global init. Code makes sure that nested
constants belong into the same scope.
"""
# This has many cases, that all return, and do a lot.
# pylint: disable=R0911,R0912,R0914,R0915
# For the module level, we only mean to create constants that are used only
# inside of it. For the global level, it must must be single use.
if module_level:
if context.global_context.getConstantUseCount(constant_identifier) != 1:
return
else:
if context.getConstantUseCount(constant_identifier) == 1:
return
# Adding it to "done". We cannot have recursive constants, so this is OK
# to be done now.
done.add(constant_identifier)
# Use shortest code for ints and longs.
if constant_type is long:
# See above, same for long values. Note: These are of course not
# existent with Python3 which would have covered it before.
if constant_value >= 0 and constant_value <= max_unsigned_long:
emit (
"%s = PyLong_FromUnsignedLong( %sul );" % (
constant_identifier,
constant_value
)
)
return
elif constant_value < 0 and constant_value >= min_signed_long:
emit (
"%s = PyLong_FromLong( %sl );" % (
constant_identifier,
constant_value
)
)
return
elif constant_value == min_signed_long-1:
# There are compilers out there, that give warnings for the literal
# MININT when used. We work around that warning here.
emit(
"""\
%s = PyLong_FromLong( %sl ); // To be corrected with -1 in-place next lines.
CHECK_OBJECT( const_int_pos_1 );
%s = PyNumber_InPlaceSubtract( %s, const_int_pos_1 );""" % (
constant_identifier,
min_signed_long,
constant_identifier,
constant_identifier
)
)
return
else:
# Note, other longs cannot be handled like that yet. We might create
# code that does it better in the future, abusing e.g. internal
# representation of "long" integer values.
pass
elif constant_type is int:
if constant_value >= min_signed_long:
emit(
"%s = PyInt_FromLong( %sl );" % (
constant_identifier,
constant_value
)
)
return
else:
# There are compilers out there, that give warnings for the literal
# MININT when used. We work around that warning here.
assert constant_value == min_signed_long-1
emit(
"""\
%s = PyInt_FromLong( %sl ); // To be corrected in next line.
%s = PyNumber_InPlaceSubtract( %s, const_int_pos_1 );""" % (
constant_identifier,
min_signed_long,
constant_identifier,
constant_identifier
)
)
return
if constant_type is unicode:
# Attempting to marshal is OK, but esp. Python2 cannot do it for all
# "unicode" values.
if attemptToMarshal(constant_identifier, constant_value, emit):
return
try:
encoded = constant_value.encode("utf-8")
if str is not unicode:
emit(
"%s = UNSTREAM_UNICODE( %s );" % (
constant_identifier,
stream_data.getStreamDataCode(encoded)
)
)
else:
emit(
"%s = UNSTREAM_STRING( %s, %d );" % (
constant_identifier,
stream_data.getStreamDataCode(encoded),
1 if _isAttributeName(constant_value) else 0
)
)
return
except UnicodeEncodeError:
# So fall back to below code, which will unstream it then.
pass
elif constant_type is str:
# Python3: Strings that can be encoded as UTF-8 are done more or less
# directly. When they cannot be expressed as UTF-8, that is rare not we
# can indeed use pickling.
assert str is not unicode
if len(constant_value) == 1:
emit(
"%s = UNSTREAM_CHAR( %d, %d );" % (
constant_identifier,
ord(constant_value[0]),
1 if _isAttributeName(constant_value) else 0
)
)
else:
emit(
"%s = UNSTREAM_STRING( %s, %d );" % (
constant_identifier,
stream_data.getStreamDataCode(constant_value),
1 if _isAttributeName(constant_value) else 0
)
)
return
elif constant_type is bytes:
# Python3 only, for Python2, bytes do not happen.
assert str is unicode
emit(
"%s = UNSTREAM_BYTES( %s );" % (
constant_identifier,
stream_data.getStreamDataCode(constant_value)
)
)
return
if constant_type is float:
emit(
"%s = UNSTREAM_FLOAT( %s );" % (
constant_identifier,
stream_data.getStreamDataCode(
value = struct.pack("<d", constant_value),
fixed_size = True
)
)
)
return
if constant_type is dict:
# Not all dictionaries can or should be marshaled. For small ones,
# or ones with strange values, like "{1:type}", we have to do it.
if attemptToMarshal(constant_identifier, constant_value, emit):
return
emit(
"%s = _PyDict_NewPresized( %d );" % (
constant_identifier,
len(constant_value)
)
)
for key, value in iterItems(constant_value):
key_name = getConstantCodeName(context, key)
_addConstantInitCode(
emit = emit,
check = check,
constant_type = type(key),
constant_value = key,
constant_identifier = key_name,
module_level = module_level,
context = context
)
value_name = getConstantCodeName(context, value)
_addConstantInitCode(
emit = emit,
check = check,
constant_type = type(value),
constant_value = value,
constant_identifier = value_name,
module_level = module_level,
context = context
)
# TODO: Error checking for debug.
emit(
"PyDict_SetItem( %s, %s, %s );" % (
constant_identifier,
key_name,
value_name
)
)
emit(
"assert( PyDict_Size( %s ) == %d );" % (
constant_identifier,
len(constant_value)
)
)
return
if constant_type is tuple:
# Not all tuples can or should be marshaled. For small ones,
# or ones with strange values, like "(type,)", we have to do it.
if attemptToMarshal(constant_identifier, constant_value, emit):
return
emit(
"%s = PyTuple_New( %d );" % (
constant_identifier,
len(constant_value)
)
)
for count, element_value in enumerate(constant_value):
element_name = getConstantCodeName(
context = context,
constant = element_value
)
_addConstantInitCode(
emit = emit,
check = check,
constant_type = type(element_value),
constant_value = element_value,
constant_identifier = getConstantCodeName(
context = context,
constant = element_value
),
module_level = module_level,
context = context
)
# Do not take references, these won't be deleted ever.
emit(
"PyTuple_SET_ITEM( %s, %d, %s ); Py_INCREF( %s );" % (
constant_identifier,
count,
element_name,
element_name
)
)
return
if constant_type is list:
# Not all lists can or should be marshaled. For small ones,
# or ones with strange values, like "[type]", we have to do it.
if attemptToMarshal(constant_identifier, constant_value, emit):
return
emit(
"%s = PyList_New( %d );" % (
constant_identifier,
len(constant_value)
)
)
for count, element_value in enumerate(constant_value):
element_name = getConstantCodeName(
context = context,
constant = element_value
)
_addConstantInitCode(
emit = emit,
check = check,
constant_type = type(element_value),
constant_value = element_value,
constant_identifier = element_name,
module_level = module_level,
context = context
)
# Do not take references, these won't be deleted ever.
emit(
"PyList_SET_ITEM( %s, %d, %s ); Py_INCREF( %s );" % (
constant_identifier,
count,
element_name,
element_name
)
)
return
if constant_type is set:
# Not all sets can or should be marshaled. For small ones,
# or ones with strange values, like "{type}", we have to do it.
if attemptToMarshal(constant_identifier, constant_value, emit):
return
# TODO: Hinting size is really not possible?
emit(
"%s = PySet_New( NULL );" % constant_identifier
)
for element_value in constant_value:
element_name = getConstantCodeName(
context = context,
constant = element_value
)
_addConstantInitCode(
emit = emit,
check = check,
constant_type = type(element_value),
constant_value = element_value,
constant_identifier = element_name,
module_level = module_level,
context = context
)
emit(
"PySet_Add( %s, %s );" % (
constant_identifier,
element_name
)
)
emit(
"assert( PySet_Size( %s ) == %d );" % (
constant_identifier,
len(constant_value)
)
)
return
if constant_type is slice:
slice1_name = getConstantCodeName(context, constant_value.start)
_addConstantInitCode(
emit = emit,
check = check,
constant_type = type(constant_value.start),
constant_value = constant_value.start,
constant_identifier = slice1_name,
module_level = module_level,
context = context
)
slice2_name = getConstantCodeName(context, constant_value.stop)
_addConstantInitCode(
emit = emit,
check = check,
constant_type = type(constant_value.stop),
constant_value = constant_value.stop,
constant_identifier = slice2_name,
module_level = module_level,
context = context
)
slice3_name = getConstantCodeName(context, constant_value.step)
_addConstantInitCode(
emit = emit,
check = check,
constant_type = type(constant_value.step),
constant_value = constant_value.step,
constant_identifier = slice3_name,
module_level = module_level,
context = context
)
emit(
"%s = PySlice_New( %s, %s, %s );" % (
constant_identifier,
slice1_name,
slice2_name,
slice3_name
)
)
return
# TODO: Ranges could very well be created for Python3. And "frozenset" and
# set, are to be examined.
if constant_type in (frozenset, complex, unicode, long, range):
# Lets attempt marshal these.
if attemptToMarshal(constant_identifier, constant_value, emit):
return
emit(
_getUnstreamCode(constant_value, constant_identifier)
)
return
# Must not reach this, if we did, it's in error, and we need to know.
assert False, (type(constant_value), constant_value, constant_identifier)
def getConstantsInitCode(context):
emit = SourceCodeCollector()
check = SourceCodeCollector()
# Sort items by length and name, so we are deterministic and pretty.
sorted_constants = sorted(
iterItems(context.getConstants()),
key = lambda k: (len(k[0]), k[0])
)
for constant_identifier, constant_value in sorted_constants:
_addConstantInitCode(
emit = emit,
check = check,
constant_type = type(constant_value),
constant_value = constant_value,
constant_identifier = constant_identifier,
module_level = False,
context = context
)
return emit.codes, check.codes
def getConstantsDeclCode(context):
statements = []
# Sort items by length and name, so we are deterministic and pretty.
sorted_constants = sorted(
iterItems(context.getConstants()),
key = lambda k: (len(k[0]), k[0])
)
for constant_identifier, constant_value in sorted_constants:
# Need not declare built-in types.
if constant_value in constant_builtin_types:
continue
if constant_value is None:
continue
if constant_value is False:
continue
if constant_value is True:
continue
if constant_value is Ellipsis:
continue
if context.getConstantUseCount(constant_identifier) != 1:
statements.append("PyObject *%s;" % constant_identifier)
if Options.isDebug():
statements.append("Py_hash_t hash_%s;" % constant_identifier)
return statements
def getConstantAccess(to_name, constant, emit, context):
# Many cases, because for each type, we may copy or optimize by creating
# empty. pylint: disable=R0912,R0915
if type(constant) is dict:
if constant:
for key, value in iterItems(constant):
# key cannot be mutable.
assert not isMutable(key)
if isMutable(value):
needs_deep = True
break
else:
needs_deep = False
if needs_deep:
code = "DEEP_COPY( %s )" % getConstantCode(
constant = constant,
context = context
)
else:
code = "PyDict_Copy( %s )" % getConstantCode(
constant = constant,
context = context
)
else:
code = "PyDict_New()"
ref_count = 1
elif type(constant) is set:
if constant:
code = "PySet_New( %s )" % getConstantCode(
constant = constant,
context = context
)
else:
code = "PySet_New( NULL )"
ref_count = 1
elif type(constant) is list:
if constant:
for value in constant:
if isMutable(value):
needs_deep = True
break
else:
needs_deep = False
if needs_deep:
code = "DEEP_COPY( %s )" % getConstantCode(
constant = constant,
context = context
)
else:
code = "LIST_COPY( %s )" % getConstantCode(
constant = constant,
context = context
)
else:
code = "PyList_New( 0 )"
ref_count = 1
elif type(constant) is tuple:
for value in constant:
if isMutable(value):
needs_deep = True
break
else:
needs_deep = False
if needs_deep:
code = "DEEP_COPY( %s )" % getConstantCode(
constant = constant,
context = context
)
ref_count = 1
else:
code = getConstantCode(
context = context,
constant = constant
)
ref_count = 0
else:
code = getConstantCode(
context = context,
constant = constant
)
ref_count = 0
emit(
"%s = %s;" % (
to_name,
code,
)
)
if ref_count:
context.addCleanupTempName(to_name)
def getModuleConstantCode(constant):
assert type(constant) is str
result = _getConstantInitValueCode(
constant_value = constant,
constant_type = type(constant)
)
assert result is not None
return result
constant_counts = {}
def getConstantInitCodes(module_context):
decls = []
inits = Emission.SourceCodeCollector()
checks = Emission.SourceCodeCollector()
sorted_constants = sorted(
module_context.getConstants(),
key = lambda k: (len(k[0]), k[0])
)
global_context = module_context.global_context
for constant_identifier in sorted_constants:
if not constant_identifier.startswith("const_"):
continue
if global_context.getConstantUseCount(constant_identifier) == 1:
qualifier = "static"
constant_value = global_context.constants[constant_identifier]
_addConstantInitCode(
emit = inits,
check = checks,
constant_type = type(constant_value),
constant_value = constant_value,
constant_identifier = constant_identifier,
module_level = True,
context = module_context
)
else:
qualifier = "extern"
decls.append(
"%s PyObject *%s;" % (
qualifier,
constant_identifier
)
)
if Options.isDebug():
decls.append(
"%s Py_hash_t hash_%s;" % (
qualifier,
constant_identifier
)
)
return decls, inits.codes, checks.codes
def allocateNestedConstants(module_context):
def considerForDeferral(constant_value):
if isMarshalConstant(constant_value):
return
module_context.getConstantCode(constant_value)
constant_type = type(constant_value)
if constant_type in (tuple, list, set, frozenset):
for element in constant_value:
considerForDeferral(element)
elif constant_type is dict:
for key, value in iterItems(constant_value):
considerForDeferral(key)
considerForDeferral(value)
elif constant_type is slice:
considerForDeferral(constant_value.start)
considerForDeferral(constant_value.step)
considerForDeferral(constant_value.stop)
for constant_identifier in set(module_context.getConstants()):
constant_value = module_context.global_context.constants[
constant_identifier
]
constant_type = type(constant_value)
if constant_type in (tuple, dict, list, set, frozenset, slice):
considerForDeferral(constant_value)
def getConstantsDefinitionCode(context):
""" Create the code code "__constants.cpp" file.
This needs to create code to make all global constants (used in more
than one module) and create them.
"""
constant_inits, constant_checks = getConstantsInitCode(
context = context
)
constant_declarations = getConstantsDeclCode(
context = context
)
return template_constants_reading % {
"constant_declarations" : '\n'.join(constant_declarations),
"constant_inits" : indented(constant_inits),
"constant_checks" : indented(constant_checks)
}
|
diego-d5000/MisValesMd | refs/heads/master | env/lib/python2.7/site-packages/django/contrib/gis/geos/prototypes/errcheck.py | 1 | """
Error checking functions for GEOS ctypes prototype functions.
"""
from ctypes import c_void_p, string_at
from django.contrib.gis.geos.error import GEOSException
from django.contrib.gis.geos.prototypes.threadsafe import GEOSFunc
# Getting the `free` routine used to free the memory allocated for
# string pointers returned by GEOS.
free = GEOSFunc('GEOSFree')
free.argtypes = [c_void_p]
free.restype = None
def last_arg_byref(args):
"Returns the last C argument's value by reference."
return args[-1]._obj.value
def check_dbl(result, func, cargs):
"Checks the status code and returns the double value passed in by reference."
# Checking the status code
if result != 1:
return None
# Double passed in by reference, return its value.
return last_arg_byref(cargs)
def check_geom(result, func, cargs):
"Error checking on routines that return Geometries."
if not result:
raise GEOSException('Error encountered checking Geometry returned from GEOS C function "%s".' % func.__name__)
return result
def check_minus_one(result, func, cargs):
"Error checking on routines that should not return -1."
if result == -1:
raise GEOSException('Error encountered in GEOS C function "%s".' % func.__name__)
else:
return result
def check_predicate(result, func, cargs):
"Error checking for unary/binary predicate functions."
val = ord(result) # getting the ordinal from the character
if val == 1:
return True
elif val == 0:
return False
else:
raise GEOSException('Error encountered on GEOS C predicate function "%s".' % func.__name__)
def check_sized_string(result, func, cargs):
"""
Error checking for routines that return explicitly sized strings.
This frees the memory allocated by GEOS at the result pointer.
"""
if not result:
raise GEOSException('Invalid string pointer returned by GEOS C function "%s"' % func.__name__)
# A c_size_t object is passed in by reference for the second
# argument on these routines, and its needed to determine the
# correct size.
s = string_at(result, last_arg_byref(cargs))
# Freeing the memory allocated within GEOS
free(result)
return s
def check_string(result, func, cargs):
"""
Error checking for routines that return strings.
This frees the memory allocated by GEOS at the result pointer.
"""
if not result:
raise GEOSException('Error encountered checking string return value in GEOS C function "%s".' % func.__name__)
# Getting the string value at the pointer address.
s = string_at(result)
# Freeing the memory allocated within GEOS
free(result)
return s
def check_zero(result, func, cargs):
"Error checking on routines that should not return 0."
if result == 0:
raise GEOSException('Error encountered in GEOS C function "%s".' % func.__name__)
else:
return result
|
cristiana214/cristianachavez214-cristianachavez | refs/heads/master | python/src/Lib/lib2to3/fixes/fix_idioms.py | 53 | """Adjust some old Python 2 idioms to their modern counterparts.
* Change some type comparisons to isinstance() calls:
type(x) == T -> isinstance(x, T)
type(x) is T -> isinstance(x, T)
type(x) != T -> not isinstance(x, T)
type(x) is not T -> not isinstance(x, T)
* Change "while 1:" into "while True:".
* Change both
v = list(EXPR)
v.sort()
foo(v)
and the more general
v = EXPR
v.sort()
foo(v)
into
v = sorted(EXPR)
foo(v)
"""
# Author: Jacques Frechet, Collin Winter
# Local imports
from .. import fixer_base
from ..fixer_util import Call, Comma, Name, Node, syms
CMP = "(n='!=' | '==' | 'is' | n=comp_op< 'is' 'not' >)"
TYPE = "power< 'type' trailer< '(' x=any ')' > >"
class FixIdioms(fixer_base.BaseFix):
explicit = True # The user must ask for this fixer
PATTERN = r"""
isinstance=comparison< %s %s T=any >
|
isinstance=comparison< T=any %s %s >
|
while_stmt< 'while' while='1' ':' any+ >
|
sorted=any<
any*
simple_stmt<
expr_stmt< id1=any '='
power< list='list' trailer< '(' (not arglist<any+>) any ')' > >
>
'\n'
>
sort=
simple_stmt<
power< id2=any
trailer< '.' 'sort' > trailer< '(' ')' >
>
'\n'
>
next=any*
>
|
sorted=any<
any*
simple_stmt< expr_stmt< id1=any '=' expr=any > '\n' >
sort=
simple_stmt<
power< id2=any
trailer< '.' 'sort' > trailer< '(' ')' >
>
'\n'
>
next=any*
>
""" % (TYPE, CMP, CMP, TYPE)
def match(self, node):
r = super(FixIdioms, self).match(node)
# If we've matched one of the sort/sorted subpatterns above, we
# want to reject matches where the initial assignment and the
# subsequent .sort() call involve different identifiers.
if r and "sorted" in r:
if r["id1"] == r["id2"]:
return r
return None
return r
def transform(self, node, results):
if "isinstance" in results:
return self.transform_isinstance(node, results)
elif "while" in results:
return self.transform_while(node, results)
elif "sorted" in results:
return self.transform_sort(node, results)
else:
raise RuntimeError("Invalid match")
def transform_isinstance(self, node, results):
x = results["x"].clone() # The thing inside of type()
T = results["T"].clone() # The type being compared against
x.set_prefix("")
T.set_prefix(" ")
test = Call(Name("isinstance"), [x, Comma(), T])
if "n" in results:
test.set_prefix(" ")
test = Node(syms.not_test, [Name("not"), test])
test.set_prefix(node.get_prefix())
return test
def transform_while(self, node, results):
one = results["while"]
one.replace(Name("True", prefix=one.get_prefix()))
def transform_sort(self, node, results):
sort_stmt = results["sort"]
next_stmt = results["next"]
list_call = results.get("list")
simple_expr = results.get("expr")
if list_call:
list_call.replace(Name("sorted", prefix=list_call.get_prefix()))
elif simple_expr:
new = simple_expr.clone()
new.set_prefix("")
simple_expr.replace(Call(Name("sorted"), [new],
prefix=simple_expr.get_prefix()))
else:
raise RuntimeError("should not have reached here")
sort_stmt.remove()
if next_stmt:
next_stmt[0].set_prefix(sort_stmt.get_prefix())
|
higgsd/euler | refs/heads/master | py/34.py | 1 | # 40730
import euler
f = [1]
f += [euler.product(range(1, i + 1)) for i in range(1, 10)]
s = 0
for n in xrange(11, 7 * f[9]):
if n == sum([f[int(c)] for c in str(n)]):
s += n
print s
|
onecrayon/PopClip-Extensions | refs/heads/master | source/OneNote/rauth/utils.py | 25 | # -*- coding: utf-8 -*-
'''
rauth.utils
-----------
General utilities.
'''
from rauth.compat import quote, parse_qsl, is_basestring
from requests.structures import CaseInsensitiveDict as cidict
from requests.auth import AuthBase
FORM_URLENCODED = 'application/x-www-form-urlencoded'
ENTITY_METHODS = ('POST', 'PUT', 'PATCH')
OPTIONAL_OAUTH_PARAMS = ('oauth_callback', 'oauth_verifier', 'oauth_version')
def absolute_url(url):
return url.startswith(('http://', 'https://'))
def parse_utf8_qsl(s):
d = dict(parse_qsl(s))
for k, v in d.items(): # pragma: no cover
if not isinstance(k, bytes) and not isinstance(v, bytes):
# skip this iteration if we have no keys or values to update
continue
d.pop(k)
if isinstance(k, bytes):
k = k.decode('utf-8')
if isinstance(v, bytes):
v = v.decode('utf-8')
d[k] = v
return d
def get_sorted_params(params):
def sorting_gen():
for k in sorted(params.keys()):
yield '='.join((k, params[k]))
return '&'.join(sorting_gen())
class CaseInsensitiveDict(cidict):
def __init__(self, d=None):
lowered_d = {}
if d is not None:
if isinstance(d, dict):
lowered_d = self._get_lowered_d(d)
elif isinstance(d, list):
return self.__init__(dict(d))
return super(CaseInsensitiveDict, self).__init__(lowered_d)
def _get_lowered_d(self, d):
lowered_d = {}
for key in d:
if is_basestring(key):
lowered_d[key.lower()] = d[key]
else: # pragma: no cover
lowered_d[key] = d[key]
return lowered_d
def setdefault(self, key, default):
if is_basestring(key):
key = key.lower()
super(CaseInsensitiveDict, self).setdefault(key, default)
def update(self, d):
super(CaseInsensitiveDict, self).update(self._get_lowered_d(d))
class OAuth2Auth(AuthBase):
''' Attaches OAuth 2 Authentication to a given Request object. '''
def __init__(self, access_token):
self.access_token = access_token
def __call__(self, r):
r.headers['Authorization'] = 'Bearer ' + self.access_token
return r
class OAuth1Auth(AuthBase):
''' Attaches OAuth 1 Authentication to a given Request object. '''
def __init__(self, oauth_params, realm=None):
self.oauth_params = oauth_params
self.realm = realm or ''
def _get_auth_header(self):
''' Constructs and returns an authentication header. '''
realm = 'realm="{realm}"'.format(realm=self.realm)
params = ['{k}="{v}"'.format(k=k, v=quote(str(v), safe=''))
for k, v in self.oauth_params.items()]
return 'OAuth ' + ','.join([realm] + params)
def __call__(self, r):
r.headers['Authorization'] = self._get_auth_header()
return r
|
turbokongen/home-assistant | refs/heads/dev | homeassistant/util/location.py | 16 | """
Module with location helpers.
detect_location_info and elevation are mocked by default during tests.
"""
import asyncio
import collections
import math
from typing import Any, Dict, Optional, Tuple
import aiohttp
ELEVATION_URL = "https://api.open-elevation.com/api/v1/lookup"
IP_API = "http://ip-api.com/json"
IPAPI = "https://ipapi.co/json/"
# Constants from https://github.com/maurycyp/vincenty
# Earth ellipsoid according to WGS 84
# Axis a of the ellipsoid (Radius of the earth in meters)
AXIS_A = 6378137
# Flattening f = (a-b) / a
FLATTENING = 1 / 298.257223563
# Axis b of the ellipsoid in meters.
AXIS_B = 6356752.314245
MILES_PER_KILOMETER = 0.621371
MAX_ITERATIONS = 200
CONVERGENCE_THRESHOLD = 1e-12
LocationInfo = collections.namedtuple(
"LocationInfo",
[
"ip",
"country_code",
"country_name",
"region_code",
"region_name",
"city",
"zip_code",
"time_zone",
"latitude",
"longitude",
"use_metric",
],
)
async def async_detect_location_info(
session: aiohttp.ClientSession,
) -> Optional[LocationInfo]:
"""Detect location information."""
data = await _get_ipapi(session)
if data is None:
data = await _get_ip_api(session)
if data is None:
return None
data["use_metric"] = data["country_code"] not in ("US", "MM", "LR")
return LocationInfo(**data)
def distance(
lat1: Optional[float], lon1: Optional[float], lat2: float, lon2: float
) -> Optional[float]:
"""Calculate the distance in meters between two points.
Async friendly.
"""
if lat1 is None or lon1 is None:
return None
result = vincenty((lat1, lon1), (lat2, lon2))
if result is None:
return None
return result * 1000
# Author: https://github.com/maurycyp
# Source: https://github.com/maurycyp/vincenty
# License: https://github.com/maurycyp/vincenty/blob/master/LICENSE
def vincenty(
point1: Tuple[float, float], point2: Tuple[float, float], miles: bool = False
) -> Optional[float]:
"""
Vincenty formula (inverse method) to calculate the distance.
Result in kilometers or miles between two points on the surface of a
spheroid.
Async friendly.
"""
# short-circuit coincident points
if point1[0] == point2[0] and point1[1] == point2[1]:
return 0.0
# pylint: disable=invalid-name
U1 = math.atan((1 - FLATTENING) * math.tan(math.radians(point1[0])))
U2 = math.atan((1 - FLATTENING) * math.tan(math.radians(point2[0])))
L = math.radians(point2[1] - point1[1])
Lambda = L
sinU1 = math.sin(U1)
cosU1 = math.cos(U1)
sinU2 = math.sin(U2)
cosU2 = math.cos(U2)
for _ in range(MAX_ITERATIONS):
sinLambda = math.sin(Lambda)
cosLambda = math.cos(Lambda)
sinSigma = math.sqrt(
(cosU2 * sinLambda) ** 2 + (cosU1 * sinU2 - sinU1 * cosU2 * cosLambda) ** 2
)
if sinSigma == 0.0:
return 0.0 # coincident points
cosSigma = sinU1 * sinU2 + cosU1 * cosU2 * cosLambda
sigma = math.atan2(sinSigma, cosSigma)
sinAlpha = cosU1 * cosU2 * sinLambda / sinSigma
cosSqAlpha = 1 - sinAlpha ** 2
try:
cos2SigmaM = cosSigma - 2 * sinU1 * sinU2 / cosSqAlpha
except ZeroDivisionError:
cos2SigmaM = 0
C = FLATTENING / 16 * cosSqAlpha * (4 + FLATTENING * (4 - 3 * cosSqAlpha))
LambdaPrev = Lambda
Lambda = L + (1 - C) * FLATTENING * sinAlpha * (
sigma
+ C * sinSigma * (cos2SigmaM + C * cosSigma * (-1 + 2 * cos2SigmaM ** 2))
)
if abs(Lambda - LambdaPrev) < CONVERGENCE_THRESHOLD:
break # successful convergence
else:
return None # failure to converge
uSq = cosSqAlpha * (AXIS_A ** 2 - AXIS_B ** 2) / (AXIS_B ** 2)
A = 1 + uSq / 16384 * (4096 + uSq * (-768 + uSq * (320 - 175 * uSq)))
B = uSq / 1024 * (256 + uSq * (-128 + uSq * (74 - 47 * uSq)))
deltaSigma = (
B
* sinSigma
* (
cos2SigmaM
+ B
/ 4
* (
cosSigma * (-1 + 2 * cos2SigmaM ** 2)
- B
/ 6
* cos2SigmaM
* (-3 + 4 * sinSigma ** 2)
* (-3 + 4 * cos2SigmaM ** 2)
)
)
)
s = AXIS_B * A * (sigma - deltaSigma)
s /= 1000 # Conversion of meters to kilometers
if miles:
s *= MILES_PER_KILOMETER # kilometers to miles
return round(s, 6)
async def _get_ipapi(session: aiohttp.ClientSession) -> Optional[Dict[str, Any]]:
"""Query ipapi.co for location data."""
try:
resp = await session.get(IPAPI, timeout=5)
except (aiohttp.ClientError, asyncio.TimeoutError):
return None
try:
raw_info = await resp.json()
except (aiohttp.ClientError, ValueError):
return None
# ipapi allows 30k free requests/month. Some users exhaust those.
if raw_info.get("latitude") == "Sign up to access":
return None
return {
"ip": raw_info.get("ip"),
"country_code": raw_info.get("country"),
"country_name": raw_info.get("country_name"),
"region_code": raw_info.get("region_code"),
"region_name": raw_info.get("region"),
"city": raw_info.get("city"),
"zip_code": raw_info.get("postal"),
"time_zone": raw_info.get("timezone"),
"latitude": raw_info.get("latitude"),
"longitude": raw_info.get("longitude"),
}
async def _get_ip_api(session: aiohttp.ClientSession) -> Optional[Dict[str, Any]]:
"""Query ip-api.com for location data."""
try:
resp = await session.get(IP_API, timeout=5)
except (aiohttp.ClientError, asyncio.TimeoutError):
return None
try:
raw_info = await resp.json()
except (aiohttp.ClientError, ValueError):
return None
return {
"ip": raw_info.get("query"),
"country_code": raw_info.get("countryCode"),
"country_name": raw_info.get("country"),
"region_code": raw_info.get("region"),
"region_name": raw_info.get("regionName"),
"city": raw_info.get("city"),
"zip_code": raw_info.get("zip"),
"time_zone": raw_info.get("timezone"),
"latitude": raw_info.get("lat"),
"longitude": raw_info.get("lon"),
}
|
c-kuhlman/vision | refs/heads/master | software/builder/switchWindowsBuild.py | 5 | import os, sys, argparse, stat, traceback, shutil, subprocess, logging, json
import glob
logging.basicConfig(level=logging.WARNING)
log = logging.getLogger(os.path.basename(__file__))
mapping = [
[ "backend/*.cpp" , None ],
[ "backend/*.d" , None ],
[ "backend/*.h" , None ],
[ "backend/*.i" , None ],
[ "backend/*.l" , None ],
[ "backend/*.lo" , None ],
[ "backend/*.y" , None ],
[ "backend/*.yo" , None ],
[ "dbupdate/*.cpp" , None ],
[ "dbupdate/*.d" , None ],
[ "dbupdate/*.h" , None ],
[ "dbupdate/*.i" , None ],
[ "network/*.cpp" , None ],
[ "network/*.d" , None ],
[ "network/*.h" , None ],
[ "network/*.i" , None ],
[ "tools/*.cpp" , None ],
[ "tools/*.d" , None ],
[ "tools/*.h" , None ],
[ "tools/*.i" , None ],
[ "kernel/*.cpp" , None ],
[ "kernel/*.d" , None ],
[ "kernel/*.h" , None ],
[ "kernel/*.i" , None ],
[ "backend\VpOpenDialog_Not.h" , "VpOpenDialog.h" ],
[ "kernel\VpFileMapping_Win32_1x.h" , "VpFileMapping.h" ],
[ "kernel\VpSocket_Win32_1x.h" , "VpSocket.h" ],
[ "kernel\Vp_Win32_1x.h" , "Vp.h" ],
]
sourceMap = {}
destMap = {}
movesFile = ".windowsMoves.json"
def addMove(source, dest):
if dest in destMap:
log.warning(
"destination collision at '" + dest + "': '" + source + "' and '" + destMap[dest]
)
del sourceMap[destMap[dest]]
if source in sourceMap:
log.warning(
"source collision at '" + source + "': '" + dest + "' and '" + sourceMap[source]
)
del destMap[sourceMap[source]]
sourceMap[source] = dest
destMap[dest] = source
def buildMoves():
for maps in mapping:
srcglob = maps[0]
dest = maps[1]
moves = glob.glob(srcglob)
if dest is not None:
if len(moves) == 0:
log.warning(
"destination '" + dest + "' has no source: '" + str(srcglob)
)
elif len(moves) == 1:
source = moves[0]
addMove(source, dest)
else:
raise RuntimeError(
"collision within a single glob: " + srcglob + str(moves)
)
else:
if len(moves) == 0:
log.warning(
"mapping '" + str(srcglob) + "' had no matches"
)
for move in moves:
source = move
dest = os.path.basename(source)
addMove(source, dest)
def saveMoves(filename):
with open(filename, "w") as fo:
json.dump(destMap, fo)
def readMoves(filename):
global sourceMap
with open(filename) as fi:
sourceMap = json.load(fi)
def doMoves(movesDict):
for src, dest in movesDict.iteritems():
shutil.move(src, dest)
def main(args):
root = args.directory
os.chdir(os.path.join(root, "src"))
print os.path.exists(movesFile)
if not os.path.exists(movesFile):
print "Building File Moves"
buildMoves()
saveMoves(movesFile)
else:
readMoves(movesFile)
os.remove(movesFile)
print "Moving " + str(len(sourceMap)) + " files"
doMoves(sourceMap)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="switchWindowsBuild.py moves vision source to a layout that builds in Visual Studio. And back again"
)
parser.add_argument("directory",
help="directory to switch"
)
main(parser.parse_args())
|
raybuhr/grab | refs/heads/master | grab/spider/__init__.py | 12 | from grab.spider.base import Spider # noqa
from grab.spider.data import Data # noqa
from grab.spider.task import Task, inline_task # noqa
from grab.spider.error import * # noqa
|
EvgeneOskin/termius-cli | refs/heads/master | termius/account/__init__.py | 4 | # -*- coding: utf-8 -*-
"""Package with account command set."""
|
songmonit/CTTMSONLINE_V8 | refs/heads/master | addons/email_template/wizard/mail_compose_message.py | 197 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010-Today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
from openerp import tools
from openerp.osv import osv, fields
def _reopen(self, res_id, model):
return {'type': 'ir.actions.act_window',
'view_mode': 'form',
'view_type': 'form',
'res_id': res_id,
'res_model': self._name,
'target': 'new',
# save original model in context, because selecting the list of available
# templates requires a model in context
'context': {
'default_model': model,
},
}
class mail_compose_message(osv.TransientModel):
_inherit = 'mail.compose.message'
def default_get(self, cr, uid, fields, context=None):
""" Override to pre-fill the data when having a template in single-email mode
and not going through the view: the on_change is not called in that case. """
if context is None:
context = {}
res = super(mail_compose_message, self).default_get(cr, uid, fields, context=context)
if res.get('composition_mode') != 'mass_mail' and context.get('default_template_id') and res.get('model') and res.get('res_id'):
res.update(
self.onchange_template_id(
cr, uid, [], context['default_template_id'], res.get('composition_mode'),
res.get('model'), res.get('res_id'), context=context
)['value']
)
if fields is not None:
[res.pop(field, None) for field in res.keys() if field not in fields]
return res
_columns = {
'template_id': fields.many2one('email.template', 'Use template', select=True),
}
def send_mail(self, cr, uid, ids, context=None):
""" Override of send_mail to duplicate attachments linked to the email.template.
Indeed, basic mail.compose.message wizard duplicates attachments in mass
mailing mode. But in 'single post' mode, attachments of an email template
also have to be duplicated to avoid changing their ownership. """
if context is None:
context = {}
wizard_context = dict(context)
for wizard in self.browse(cr, uid, ids, context=context):
if wizard.template_id:
wizard_context['mail_notify_user_signature'] = False # template user_signature is added when generating body_html
wizard_context['mail_auto_delete'] = wizard.template_id.auto_delete # mass mailing: use template auto_delete value -> note, for emails mass mailing only
wizard_context['mail_server_id'] = wizard.template_id.mail_server_id.id
if not wizard.attachment_ids or wizard.composition_mode == 'mass_mail' or not wizard.template_id:
continue
new_attachment_ids = []
for attachment in wizard.attachment_ids:
if attachment in wizard.template_id.attachment_ids:
new_attachment_ids.append(self.pool.get('ir.attachment').copy(cr, uid, attachment.id, {'res_model': 'mail.compose.message', 'res_id': wizard.id}, context=context))
else:
new_attachment_ids.append(attachment.id)
self.write(cr, uid, wizard.id, {'attachment_ids': [(6, 0, new_attachment_ids)]}, context=context)
return super(mail_compose_message, self).send_mail(cr, uid, ids, context=wizard_context)
def onchange_template_id(self, cr, uid, ids, template_id, composition_mode, model, res_id, context=None):
""" - mass_mailing: we cannot render, so return the template values
- normal mode: return rendered values """
if template_id and composition_mode == 'mass_mail':
fields = ['subject', 'body_html', 'email_from', 'reply_to', 'mail_server_id']
template = self.pool['email.template'].browse(cr, uid, template_id, context=context)
values = dict((field, getattr(template, field)) for field in fields if getattr(template, field))
if template.attachment_ids:
values['attachment_ids'] = [att.id for att in template.attachment_ids]
if template.mail_server_id:
values['mail_server_id'] = template.mail_server_id.id
if template.user_signature and 'body_html' in values:
signature = self.pool.get('res.users').browse(cr, uid, uid, context).signature
values['body_html'] = tools.append_content_to_html(values['body_html'], signature, plaintext=False)
elif template_id:
values = self.generate_email_for_composer_batch(cr, uid, template_id, [res_id], context=context)[res_id]
# transform attachments into attachment_ids; not attached to the document because this will
# be done further in the posting process, allowing to clean database if email not send
ir_attach_obj = self.pool.get('ir.attachment')
for attach_fname, attach_datas in values.pop('attachments', []):
data_attach = {
'name': attach_fname,
'datas': attach_datas,
'datas_fname': attach_fname,
'res_model': 'mail.compose.message',
'res_id': 0,
'type': 'binary', # override default_type from context, possibly meant for another model!
}
values.setdefault('attachment_ids', list()).append(ir_attach_obj.create(cr, uid, data_attach, context=context))
else:
default_context = dict(context, default_composition_mode=composition_mode, default_model=model, default_res_id=res_id)
default_values = self.default_get(cr, uid, ['composition_mode', 'model', 'res_id', 'parent_id', 'partner_ids', 'subject', 'body', 'email_from', 'reply_to', 'attachment_ids', 'mail_server_id'], context=default_context)
values = dict((key, default_values[key]) for key in ['subject', 'body', 'partner_ids', 'email_from', 'reply_to', 'attachment_ids', 'mail_server_id'] if key in default_values)
if values.get('body_html'):
values['body'] = values.pop('body_html')
return {'value': values}
def save_as_template(self, cr, uid, ids, context=None):
""" hit save as template button: current form value will be a new
template attached to the current document. """
email_template = self.pool.get('email.template')
ir_model_pool = self.pool.get('ir.model')
for record in self.browse(cr, uid, ids, context=context):
model_ids = ir_model_pool.search(cr, uid, [('model', '=', record.model or 'mail.message')], context=context)
model_id = model_ids and model_ids[0] or False
model_name = ''
if model_id:
model_name = ir_model_pool.browse(cr, uid, model_id, context=context).name
template_name = "%s: %s" % (model_name, tools.ustr(record.subject))
values = {
'name': template_name,
'subject': record.subject or False,
'body_html': record.body or False,
'model_id': model_id or False,
'attachment_ids': [(6, 0, [att.id for att in record.attachment_ids])],
}
template_id = email_template.create(cr, uid, values, context=context)
# generate the saved template
template_values = record.onchange_template_id(template_id, record.composition_mode, record.model, record.res_id)['value']
template_values['template_id'] = template_id
record.write(template_values)
return _reopen(self, record.id, record.model)
#------------------------------------------------------
# Wizard validation and send
#------------------------------------------------------
def generate_email_for_composer_batch(self, cr, uid, template_id, res_ids, context=None, fields=None):
""" Call email_template.generate_email(), get fields relevant for
mail.compose.message, transform email_cc and email_to into partner_ids """
if context is None:
context = {}
if fields is None:
fields = ['subject', 'body_html', 'email_from', 'email_to', 'partner_to', 'email_cc', 'reply_to', 'attachment_ids', 'mail_server_id']
returned_fields = fields + ['partner_ids', 'attachments']
values = dict.fromkeys(res_ids, False)
ctx = dict(context, tpl_partners_only=True)
template_values = self.pool.get('email.template').generate_email_batch(cr, uid, template_id, res_ids, fields=fields, context=ctx)
for res_id in res_ids:
res_id_values = dict((field, template_values[res_id][field]) for field in returned_fields if template_values[res_id].get(field))
res_id_values['body'] = res_id_values.pop('body_html', '')
values[res_id] = res_id_values
return values
def render_message_batch(self, cr, uid, wizard, res_ids, context=None):
""" Override to handle templates. """
# generate composer values
composer_values = super(mail_compose_message, self).render_message_batch(cr, uid, wizard, res_ids, context)
# generate template-based values
if wizard.template_id:
template_values = self.generate_email_for_composer_batch(
cr, uid, wizard.template_id.id, res_ids,
fields=['email_to', 'partner_to', 'email_cc', 'attachment_ids', 'mail_server_id'],
context=context)
else:
template_values = {}
for res_id in res_ids:
if template_values.get(res_id):
# recipients are managed by the template
composer_values[res_id].pop('partner_ids')
composer_values[res_id].pop('email_to')
composer_values[res_id].pop('email_cc')
# remove attachments from template values as they should not be rendered
template_values[res_id].pop('attachment_ids', None)
else:
template_values[res_id] = dict()
# update template values by composer values
template_values[res_id].update(composer_values[res_id])
return template_values
def render_template_batch(self, cr, uid, template, model, res_ids, context=None, post_process=False):
return self.pool.get('email.template').render_template_batch(cr, uid, template, model, res_ids, context=context, post_process=post_process)
# Compatibility methods
def generate_email_for_composer(self, cr, uid, template_id, res_id, context=None):
return self.generate_email_for_composer_batch(cr, uid, template_id, [res_id], context)[res_id]
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
radzhome/AWS-ElasticBeanstalk-CLI | refs/heads/master | eb/macosx/python2.7/lib/iam/servicecall.py | 4 | #!/usr/bin/env python
# ==============================================================================
# Copyright 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Amazon Software License (the "License"). You may not use
# this file except in compliance with the License. A copy of the License is
# located at
#
# http://aws.amazon.com/asl/
#
# or in the "license" file accompanying this file. This file is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or
# implied. See the License for the specific language governing permissions
# and limitations under the License.
#==============================================================================
import logging as _logging
from lib.utility import misc
from lib.aws.webservice import AWSQueryClient, AWSSignature
from lib.aws.exception import AccessDeniedException, AwsErrorCode, AwsServiceException, \
MissingParameterException, InsufficientPrivilegesException, \
InvalidParameterValueException, OptInRequiredException
from lib.iam.exception import IamErrorCode, IamEntityAlreadyExistsException, \
IamNoSuchEntityException, IamMalformedPolicyDocumentException, IamLimitExceededException
from lib.iam.request import Request, Response
from lib.iam.model import InstanceProfile, Role
from scli.constants import IamEndpoint, IamRegion
log = _logging.getLogger('aws')
class IamClient(object):
'''
Web service client for IAM
'''
_signature_version = AWSSignature.SigV4
_api_version = u'2010-05-08'
_service_name = u'iam'
def __init__(self, accessKey, secretKey, result_format='json'):
'''
Constructor
'''
self._accessKey = accessKey
self._secretKey = secretKey
self._endpoint = IamEndpoint
self._format = result_format
self._region = IamRegion
self._client = AWSQueryClient(self._accessKey, self._secretKey,
self._endpoint, self._region,
self._service_name, self._format,
self._signature_version, self._api_version)
def call(self, request):
'''Make API call and translate AWSServiceException to more specific exception'''
try:
log.debug(request)
return_msg = self._client.call(request, self._format)
log.debug(u'Request ID: {0}'.format(return_msg.json().values()[0] \
[u'ResponseMetadata'][u'RequestId']))
return return_msg.json()
except AwsServiceException as ex:
log.debug(misc.to_unicode(ex))
# Translate general IAM exception
if misc.string_equal_ignore_case(ex.code, AwsErrorCode.AccessDenied):
raise AccessDeniedException(ex)
elif misc.string_equal_ignore_case(ex.code, AwsErrorCode.OptInRequired):
raise OptInRequiredException(ex)
elif misc.string_equal_ignore_case(ex.code, AwsErrorCode.InsufficientPrivileges):
raise InsufficientPrivilegesException(ex)
elif misc.string_equal_ignore_case(ex.code, AwsErrorCode.InvalidParameterValue):
raise InvalidParameterValueException(ex)
elif misc.string_equal_ignore_case(ex.code, AwsErrorCode.MissingParameter):
raise MissingParameterException(ex)
elif misc.string_equal_ignore_case(ex.code, IamErrorCode.EntityAlreadyExists):
raise IamEntityAlreadyExistsException(ex)
elif misc.string_equal_ignore_case(ex.code, IamErrorCode.NoSuchEntity):
raise IamNoSuchEntityException(ex)
elif misc.string_equal_ignore_case(ex.code, IamErrorCode.MalformedPolicyDocument):
raise IamMalformedPolicyDocumentException(ex)
elif misc.string_equal_ignore_case(ex.code, IamErrorCode.LimitExceeded):
raise IamLimitExceededException(ex)
raise
#---------------------------------------
# service calls
def create_role(self, role_name, assume_role_policy_document, path=None):
request = Request()
request.set_action(u'CreateRole')
request.set_role_name(role_name)
request.set_assume_role_policy_document(assume_role_policy_document)
if path is not None:
request.set_path(path)
try:
response = self.call(request)
except AwsServiceException:
raise
role = Role.from_json(response[u'CreateRoleResponse'][u'CreateRoleResult'][u'Role'])
request_id = response[u'CreateRoleResponse'][u'ResponseMetadata'][u'RequestId']
return Response(request_id, role)
def create_instance_profile(self, instance_profile_name, path=None):
request = Request()
request.set_action(u'CreateInstanceProfile')
request.set_instance_profile_name(instance_profile_name)
if path is not None:
request.set_path(path)
try:
response = self.call(request)
except AwsServiceException:
raise
profile = InstanceProfile.from_json(response[u'CreateInstanceProfileResponse'] \
[u'CreateInstanceProfileResult'][u'InstanceProfile'])
request_id = response[u'CreateInstanceProfileResponse'] \
[u'ResponseMetadata'][u'RequestId']
return Response(request_id, profile)
def add_role_to_instance_profile(self, role_name, instance_profile_name):
request = Request()
request.set_action(u'AddRoleToInstanceProfile')
request.set_role_name(role_name)
request.set_instance_profile_name(instance_profile_name)
try:
response = self.call(request)
except AwsServiceException:
raise
request_id = response[u'AddRoleToInstanceProfileResponse'] \
[u'ResponseMetadata'][u'RequestId']
return Response(request_id)
def put_role_policy(self, role_name, policy_name, policy_document):
request = Request()
request.set_action(u'PutRolePolicy')
request.set_role_name(role_name)
request.set_policy_name(policy_name)
request.set_policy_document(policy_document)
try:
response = self.call(request)
except AwsServiceException:
raise
request_id = response[u'PutRolePolicyResponse'] \
[u'ResponseMetadata'][u'RequestId']
return Response(request_id)
def list_instance_profiles(self, max_items=None, path_prefix=None, marker=None):
request = Request()
request.set_action(u'ListInstanceProfiles')
if max_items is not None:
request.set_max_items(max_items)
if path_prefix is not None:
request.set_path_prefix(path_prefix)
if marker is not None:
request.set_marker(marker)
try:
response = self.call(request)
except AwsServiceException:
raise
results = response[u'ListInstanceProfilesResponse'] \
[u'ListInstanceProfilesResult'][u'InstanceProfiles']
request_id = response[u'ListInstanceProfilesResponse'] \
[u'ResponseMetadata'][u'RequestId']
profiles = []
for result in results:
profiles.append(InstanceProfile.from_json(result))
return Response(request_id, profiles)
|
lidavidm/mathics-heroku | refs/heads/master | venv/lib/python2.7/site-packages/django/contrib/admin/options.py | 47 | import copy
import operator
from functools import partial, reduce, update_wrapper
from django import forms
from django.conf import settings
from django.contrib import messages
from django.contrib.admin import widgets, helpers
from django.contrib.admin.util import (unquote, flatten_fieldsets, get_deleted_objects,
model_format_dict, NestedObjects, lookup_needs_distinct)
from django.contrib.admin import validation
from django.contrib.admin.templatetags.admin_static import static
from django.contrib.admin.templatetags.admin_urls import add_preserved_filters
from django.contrib.auth import get_permission_codename
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import PermissionDenied, ValidationError, FieldError
from django.core.paginator import Paginator
from django.core.urlresolvers import reverse
from django.db import models, transaction, router
from django.db.models.constants import LOOKUP_SEP
from django.db.models.related import RelatedObject
from django.db.models.fields import BLANK_CHOICE_DASH, FieldDoesNotExist
from django.db.models.sql.constants import QUERY_TERMS
from django.forms.formsets import all_valid, DELETION_FIELD_NAME
from django.forms.models import (modelform_factory, modelformset_factory,
inlineformset_factory, BaseInlineFormSet, modelform_defines_fields)
from django.http import Http404, HttpResponseRedirect
from django.http.response import HttpResponseBase
from django.shortcuts import get_object_or_404
from django.template.response import SimpleTemplateResponse, TemplateResponse
from django.utils.decorators import method_decorator
from django.utils.datastructures import SortedDict
from django.utils.html import escape, escapejs
from django.utils.safestring import mark_safe
from django.utils import six
from django.utils.deprecation import RenameMethodsBase
from django.utils.http import urlencode
from django.utils.text import capfirst, get_text_list
from django.utils.translation import ugettext as _
from django.utils.translation import ungettext
from django.utils.encoding import force_text
from django.views.decorators.csrf import csrf_protect
IS_POPUP_VAR = '_popup'
HORIZONTAL, VERTICAL = 1, 2
# returns the <ul> class for a given radio_admin field
get_ul_class = lambda x: 'radiolist%s' % (' inline' if x == HORIZONTAL else '')
class IncorrectLookupParameters(Exception):
pass
# Defaults for formfield_overrides. ModelAdmin subclasses can change this
# by adding to ModelAdmin.formfield_overrides.
FORMFIELD_FOR_DBFIELD_DEFAULTS = {
models.DateTimeField: {
'form_class': forms.SplitDateTimeField,
'widget': widgets.AdminSplitDateTime
},
models.DateField: {'widget': widgets.AdminDateWidget},
models.TimeField: {'widget': widgets.AdminTimeWidget},
models.TextField: {'widget': widgets.AdminTextareaWidget},
models.URLField: {'widget': widgets.AdminURLFieldWidget},
models.IntegerField: {'widget': widgets.AdminIntegerFieldWidget},
models.BigIntegerField: {'widget': widgets.AdminBigIntegerFieldWidget},
models.CharField: {'widget': widgets.AdminTextInputWidget},
models.ImageField: {'widget': widgets.AdminFileWidget},
models.FileField: {'widget': widgets.AdminFileWidget},
models.EmailField: {'widget': widgets.AdminEmailInputWidget},
}
csrf_protect_m = method_decorator(csrf_protect)
class RenameBaseModelAdminMethods(forms.MediaDefiningClass, RenameMethodsBase):
renamed_methods = (
('queryset', 'get_queryset', PendingDeprecationWarning),
)
class BaseModelAdmin(six.with_metaclass(RenameBaseModelAdminMethods)):
"""Functionality common to both ModelAdmin and InlineAdmin."""
raw_id_fields = ()
fields = None
exclude = None
fieldsets = None
form = forms.ModelForm
filter_vertical = ()
filter_horizontal = ()
radio_fields = {}
prepopulated_fields = {}
formfield_overrides = {}
readonly_fields = ()
ordering = None
# validation
validator_class = validation.BaseValidator
@classmethod
def validate(cls, model):
validator = cls.validator_class()
validator.validate(cls, model)
def __init__(self):
overrides = FORMFIELD_FOR_DBFIELD_DEFAULTS.copy()
overrides.update(self.formfield_overrides)
self.formfield_overrides = overrides
def formfield_for_dbfield(self, db_field, **kwargs):
"""
Hook for specifying the form Field instance for a given database Field
instance.
If kwargs are given, they're passed to the form Field's constructor.
"""
request = kwargs.pop("request", None)
# If the field specifies choices, we don't need to look for special
# admin widgets - we just need to use a select widget of some kind.
if db_field.choices:
return self.formfield_for_choice_field(db_field, request, **kwargs)
# ForeignKey or ManyToManyFields
if isinstance(db_field, (models.ForeignKey, models.ManyToManyField)):
# Combine the field kwargs with any options for formfield_overrides.
# Make sure the passed in **kwargs override anything in
# formfield_overrides because **kwargs is more specific, and should
# always win.
if db_field.__class__ in self.formfield_overrides:
kwargs = dict(self.formfield_overrides[db_field.__class__], **kwargs)
# Get the correct formfield.
if isinstance(db_field, models.ForeignKey):
formfield = self.formfield_for_foreignkey(db_field, request, **kwargs)
elif isinstance(db_field, models.ManyToManyField):
formfield = self.formfield_for_manytomany(db_field, request, **kwargs)
# For non-raw_id fields, wrap the widget with a wrapper that adds
# extra HTML -- the "add other" interface -- to the end of the
# rendered output. formfield can be None if it came from a
# OneToOneField with parent_link=True or a M2M intermediary.
if formfield and db_field.name not in self.raw_id_fields:
related_modeladmin = self.admin_site._registry.get(
db_field.rel.to)
can_add_related = bool(related_modeladmin and
related_modeladmin.has_add_permission(request))
formfield.widget = widgets.RelatedFieldWidgetWrapper(
formfield.widget, db_field.rel, self.admin_site,
can_add_related=can_add_related)
return formfield
# If we've got overrides for the formfield defined, use 'em. **kwargs
# passed to formfield_for_dbfield override the defaults.
for klass in db_field.__class__.mro():
if klass in self.formfield_overrides:
kwargs = dict(copy.deepcopy(self.formfield_overrides[klass]), **kwargs)
return db_field.formfield(**kwargs)
# For any other type of field, just call its formfield() method.
return db_field.formfield(**kwargs)
def formfield_for_choice_field(self, db_field, request=None, **kwargs):
"""
Get a form Field for a database Field that has declared choices.
"""
# If the field is named as a radio_field, use a RadioSelect
if db_field.name in self.radio_fields:
# Avoid stomping on custom widget/choices arguments.
if 'widget' not in kwargs:
kwargs['widget'] = widgets.AdminRadioSelect(attrs={
'class': get_ul_class(self.radio_fields[db_field.name]),
})
if 'choices' not in kwargs:
kwargs['choices'] = db_field.get_choices(
include_blank=db_field.blank,
blank_choice=[('', _('None'))]
)
return db_field.formfield(**kwargs)
def get_field_queryset(self, db, db_field, request):
"""
If the ModelAdmin specifies ordering, the queryset should respect that
ordering. Otherwise don't specify the queryset, let the field decide
(returns None in that case).
"""
related_admin = self.admin_site._registry.get(db_field.rel.to, None)
if related_admin is not None:
ordering = related_admin.get_ordering(request)
if ordering is not None and ordering != ():
return db_field.rel.to._default_manager.using(db).order_by(*ordering).complex_filter(db_field.rel.limit_choices_to)
return None
def formfield_for_foreignkey(self, db_field, request=None, **kwargs):
"""
Get a form Field for a ForeignKey.
"""
db = kwargs.get('using')
if db_field.name in self.raw_id_fields:
kwargs['widget'] = widgets.ForeignKeyRawIdWidget(db_field.rel,
self.admin_site, using=db)
elif db_field.name in self.radio_fields:
kwargs['widget'] = widgets.AdminRadioSelect(attrs={
'class': get_ul_class(self.radio_fields[db_field.name]),
})
kwargs['empty_label'] = _('None') if db_field.blank else None
if not 'queryset' in kwargs:
queryset = self.get_field_queryset(db, db_field, request)
if queryset is not None:
kwargs['queryset'] = queryset
return db_field.formfield(**kwargs)
def formfield_for_manytomany(self, db_field, request=None, **kwargs):
"""
Get a form Field for a ManyToManyField.
"""
# If it uses an intermediary model that isn't auto created, don't show
# a field in admin.
if not db_field.rel.through._meta.auto_created:
return None
db = kwargs.get('using')
if db_field.name in self.raw_id_fields:
kwargs['widget'] = widgets.ManyToManyRawIdWidget(db_field.rel,
self.admin_site, using=db)
kwargs['help_text'] = ''
elif db_field.name in (list(self.filter_vertical) + list(self.filter_horizontal)):
kwargs['widget'] = widgets.FilteredSelectMultiple(db_field.verbose_name, (db_field.name in self.filter_vertical))
if not 'queryset' in kwargs:
queryset = self.get_field_queryset(db, db_field, request)
if queryset is not None:
kwargs['queryset'] = queryset
return db_field.formfield(**kwargs)
def _declared_fieldsets(self):
if self.fieldsets:
return self.fieldsets
elif self.fields:
return [(None, {'fields': self.fields})]
return None
declared_fieldsets = property(_declared_fieldsets)
def get_ordering(self, request):
"""
Hook for specifying field ordering.
"""
return self.ordering or () # otherwise we might try to *None, which is bad ;)
def get_readonly_fields(self, request, obj=None):
"""
Hook for specifying custom readonly fields.
"""
return self.readonly_fields
def get_prepopulated_fields(self, request, obj=None):
"""
Hook for specifying custom prepopulated fields.
"""
return self.prepopulated_fields
def get_queryset(self, request):
"""
Returns a QuerySet of all model instances that can be edited by the
admin site. This is used by changelist_view.
"""
qs = self.model._default_manager.get_queryset()
# TODO: this should be handled by some parameter to the ChangeList.
ordering = self.get_ordering(request)
if ordering:
qs = qs.order_by(*ordering)
return qs
def lookup_allowed(self, lookup, value):
model = self.model
# Check FKey lookups that are allowed, so that popups produced by
# ForeignKeyRawIdWidget, on the basis of ForeignKey.limit_choices_to,
# are allowed to work.
for l in model._meta.related_fkey_lookups:
for k, v in widgets.url_params_from_lookup_dict(l).items():
if k == lookup and v == value:
return True
parts = lookup.split(LOOKUP_SEP)
# Last term in lookup is a query term (__exact, __startswith etc)
# This term can be ignored.
if len(parts) > 1 and parts[-1] in QUERY_TERMS:
parts.pop()
# Special case -- foo__id__exact and foo__id queries are implied
# if foo has been specifically included in the lookup list; so
# drop __id if it is the last part. However, first we need to find
# the pk attribute name.
rel_name = None
for part in parts[:-1]:
try:
field, _, _, _ = model._meta.get_field_by_name(part)
except FieldDoesNotExist:
# Lookups on non-existent fields are ok, since they're ignored
# later.
return True
if hasattr(field, 'rel'):
if field.rel is None:
# This property or relation doesn't exist, but it's allowed
# since it's ignored in ChangeList.get_filters().
return True
model = field.rel.to
rel_name = field.rel.get_related_field().name
elif isinstance(field, RelatedObject):
model = field.model
rel_name = model._meta.pk.name
else:
rel_name = None
if rel_name and len(parts) > 1 and parts[-1] == rel_name:
parts.pop()
if len(parts) == 1:
return True
clean_lookup = LOOKUP_SEP.join(parts)
return clean_lookup in self.list_filter or clean_lookup == self.date_hierarchy
def has_add_permission(self, request):
"""
Returns True if the given request has permission to add an object.
Can be overridden by the user in subclasses.
"""
opts = self.opts
codename = get_permission_codename('add', opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename))
def has_change_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overridden by the user in subclasses. In such case it should
return True if the given request has permission to change the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to change *any* object of the given type.
"""
opts = self.opts
codename = get_permission_codename('change', opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename))
def has_delete_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overridden by the user in subclasses. In such case it should
return True if the given request has permission to delete the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to delete *any* object of the given type.
"""
opts = self.opts
codename = get_permission_codename('delete', opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename))
class ModelAdmin(BaseModelAdmin):
"Encapsulates all admin options and functionality for a given model."
list_display = ('__str__',)
list_display_links = ()
list_filter = ()
list_select_related = False
list_per_page = 100
list_max_show_all = 200
list_editable = ()
search_fields = ()
date_hierarchy = None
save_as = False
save_on_top = False
paginator = Paginator
preserve_filters = True
inlines = []
# Custom templates (designed to be over-ridden in subclasses)
add_form_template = None
change_form_template = None
change_list_template = None
delete_confirmation_template = None
delete_selected_confirmation_template = None
object_history_template = None
# Actions
actions = []
action_form = helpers.ActionForm
actions_on_top = True
actions_on_bottom = False
actions_selection_counter = True
# validation
validator_class = validation.ModelAdminValidator
def __init__(self, model, admin_site):
self.model = model
self.opts = model._meta
self.admin_site = admin_site
super(ModelAdmin, self).__init__()
def get_inline_instances(self, request, obj=None):
inline_instances = []
for inline_class in self.inlines:
inline = inline_class(self.model, self.admin_site)
if request:
if not (inline.has_add_permission(request) or
inline.has_change_permission(request, obj) or
inline.has_delete_permission(request, obj)):
continue
if not inline.has_add_permission(request):
inline.max_num = 0
inline_instances.append(inline)
return inline_instances
def get_urls(self):
from django.conf.urls import patterns, url
def wrap(view):
def wrapper(*args, **kwargs):
return self.admin_site.admin_view(view)(*args, **kwargs)
return update_wrapper(wrapper, view)
info = self.model._meta.app_label, self.model._meta.model_name
urlpatterns = patterns('',
url(r'^$',
wrap(self.changelist_view),
name='%s_%s_changelist' % info),
url(r'^add/$',
wrap(self.add_view),
name='%s_%s_add' % info),
url(r'^(.+)/history/$',
wrap(self.history_view),
name='%s_%s_history' % info),
url(r'^(.+)/delete/$',
wrap(self.delete_view),
name='%s_%s_delete' % info),
url(r'^(.+)/$',
wrap(self.change_view),
name='%s_%s_change' % info),
)
return urlpatterns
def urls(self):
return self.get_urls()
urls = property(urls)
@property
def media(self):
extra = '' if settings.DEBUG else '.min'
js = [
'core.js',
'admin/RelatedObjectLookups.js',
'jquery%s.js' % extra,
'jquery.init.js'
]
if self.actions is not None:
js.append('actions%s.js' % extra)
if self.prepopulated_fields:
js.extend(['urlify.js', 'prepopulate%s.js' % extra])
return forms.Media(js=[static('admin/js/%s' % url) for url in js])
def get_model_perms(self, request):
"""
Returns a dict of all perms for this model. This dict has the keys
``add``, ``change``, and ``delete`` mapping to the True/False for each
of those actions.
"""
return {
'add': self.has_add_permission(request),
'change': self.has_change_permission(request),
'delete': self.has_delete_permission(request),
}
def get_fieldsets(self, request, obj=None):
"Hook for specifying fieldsets for the add form."
if self.declared_fieldsets:
return self.declared_fieldsets
form = self.get_form(request, obj, fields=None)
fields = list(form.base_fields) + list(self.get_readonly_fields(request, obj))
return [(None, {'fields': fields})]
def get_form(self, request, obj=None, **kwargs):
"""
Returns a Form class for use in the admin add view. This is used by
add_view and change_view.
"""
if 'fields' in kwargs:
fields = kwargs.pop('fields')
else:
fields = flatten_fieldsets(self.get_fieldsets(request, obj))
if self.exclude is None:
exclude = []
else:
exclude = list(self.exclude)
exclude.extend(self.get_readonly_fields(request, obj))
if self.exclude is None and hasattr(self.form, '_meta') and self.form._meta.exclude:
# Take the custom ModelForm's Meta.exclude into account only if the
# ModelAdmin doesn't define its own.
exclude.extend(self.form._meta.exclude)
# if exclude is an empty list we pass None to be consistent with the
# default on modelform_factory
exclude = exclude or None
defaults = {
"form": self.form,
"fields": fields,
"exclude": exclude,
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
}
defaults.update(kwargs)
if defaults['fields'] is None and not modelform_defines_fields(defaults['form']):
defaults['fields'] = forms.ALL_FIELDS
try:
return modelform_factory(self.model, **defaults)
except FieldError as e:
raise FieldError('%s. Check fields/fieldsets/exclude attributes of class %s.'
% (e, self.__class__.__name__))
def get_changelist(self, request, **kwargs):
"""
Returns the ChangeList class for use on the changelist page.
"""
from django.contrib.admin.views.main import ChangeList
return ChangeList
def get_object(self, request, object_id):
"""
Returns an instance matching the primary key provided. ``None`` is
returned if no match is found (or the object_id failed validation
against the primary key field).
"""
queryset = self.get_queryset(request)
model = queryset.model
try:
object_id = model._meta.pk.to_python(object_id)
return queryset.get(pk=object_id)
except (model.DoesNotExist, ValidationError, ValueError):
return None
def get_changelist_form(self, request, **kwargs):
"""
Returns a Form class for use in the Formset on the changelist page.
"""
defaults = {
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
}
defaults.update(kwargs)
if (defaults.get('fields') is None
and not modelform_defines_fields(defaults.get('form'))):
defaults['fields'] = forms.ALL_FIELDS
return modelform_factory(self.model, **defaults)
def get_changelist_formset(self, request, **kwargs):
"""
Returns a FormSet class for use on the changelist page if list_editable
is used.
"""
defaults = {
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
}
defaults.update(kwargs)
return modelformset_factory(self.model,
self.get_changelist_form(request), extra=0,
fields=self.list_editable, **defaults)
def get_formsets(self, request, obj=None):
for inline in self.get_inline_instances(request, obj):
yield inline.get_formset(request, obj)
def get_paginator(self, request, queryset, per_page, orphans=0, allow_empty_first_page=True):
return self.paginator(queryset, per_page, orphans, allow_empty_first_page)
def log_addition(self, request, object):
"""
Log that an object has been successfully added.
The default implementation creates an admin LogEntry object.
"""
from django.contrib.admin.models import LogEntry, ADDITION
LogEntry.objects.log_action(
user_id=request.user.pk,
content_type_id=ContentType.objects.get_for_model(object).pk,
object_id=object.pk,
object_repr=force_text(object),
action_flag=ADDITION
)
def log_change(self, request, object, message):
"""
Log that an object has been successfully changed.
The default implementation creates an admin LogEntry object.
"""
from django.contrib.admin.models import LogEntry, CHANGE
LogEntry.objects.log_action(
user_id=request.user.pk,
content_type_id=ContentType.objects.get_for_model(object).pk,
object_id=object.pk,
object_repr=force_text(object),
action_flag=CHANGE,
change_message=message
)
def log_deletion(self, request, object, object_repr):
"""
Log that an object will be deleted. Note that this method is called
before the deletion.
The default implementation creates an admin LogEntry object.
"""
from django.contrib.admin.models import LogEntry, DELETION
LogEntry.objects.log_action(
user_id=request.user.pk,
content_type_id=ContentType.objects.get_for_model(self.model).pk,
object_id=object.pk,
object_repr=object_repr,
action_flag=DELETION
)
def action_checkbox(self, obj):
"""
A list_display column containing a checkbox widget.
"""
return helpers.checkbox.render(helpers.ACTION_CHECKBOX_NAME, force_text(obj.pk))
action_checkbox.short_description = mark_safe('<input type="checkbox" id="action-toggle" />')
action_checkbox.allow_tags = True
def get_actions(self, request):
"""
Return a dictionary mapping the names of all actions for this
ModelAdmin to a tuple of (callable, name, description) for each action.
"""
# If self.actions is explicitly set to None that means that we don't
# want *any* actions enabled on this page.
from django.contrib.admin.views.main import _is_changelist_popup
if self.actions is None or _is_changelist_popup(request):
return SortedDict()
actions = []
# Gather actions from the admin site first
for (name, func) in self.admin_site.actions:
description = getattr(func, 'short_description', name.replace('_', ' '))
actions.append((func, name, description))
# Then gather them from the model admin and all parent classes,
# starting with self and working back up.
for klass in self.__class__.mro()[::-1]:
class_actions = getattr(klass, 'actions', [])
# Avoid trying to iterate over None
if not class_actions:
continue
actions.extend([self.get_action(action) for action in class_actions])
# get_action might have returned None, so filter any of those out.
actions = filter(None, actions)
# Convert the actions into a SortedDict keyed by name.
actions = SortedDict([
(name, (func, name, desc))
for func, name, desc in actions
])
return actions
def get_action_choices(self, request, default_choices=BLANK_CHOICE_DASH):
"""
Return a list of choices for use in a form object. Each choice is a
tuple (name, description).
"""
choices = [] + default_choices
for func, name, description in six.itervalues(self.get_actions(request)):
choice = (name, description % model_format_dict(self.opts))
choices.append(choice)
return choices
def get_action(self, action):
"""
Return a given action from a parameter, which can either be a callable,
or the name of a method on the ModelAdmin. Return is a tuple of
(callable, name, description).
"""
# If the action is a callable, just use it.
if callable(action):
func = action
action = action.__name__
# Next, look for a method. Grab it off self.__class__ to get an unbound
# method instead of a bound one; this ensures that the calling
# conventions are the same for functions and methods.
elif hasattr(self.__class__, action):
func = getattr(self.__class__, action)
# Finally, look for a named method on the admin site
else:
try:
func = self.admin_site.get_action(action)
except KeyError:
return None
if hasattr(func, 'short_description'):
description = func.short_description
else:
description = capfirst(action.replace('_', ' '))
return func, action, description
def get_list_display(self, request):
"""
Return a sequence containing the fields to be displayed on the
changelist.
"""
return self.list_display
def get_list_display_links(self, request, list_display):
"""
Return a sequence containing the fields to be displayed as links
on the changelist. The list_display parameter is the list of fields
returned by get_list_display().
"""
if self.list_display_links or not list_display:
return self.list_display_links
else:
# Use only the first item in list_display as link
return list(list_display)[:1]
def get_list_filter(self, request):
"""
Returns a sequence containing the fields to be displayed as filters in
the right sidebar of the changelist page.
"""
return self.list_filter
def get_search_results(self, request, queryset, search_term):
"""
Returns a tuple containing a queryset to implement the search,
and a boolean indicating if the results may contain duplicates.
"""
# Apply keyword searches.
def construct_search(field_name):
if field_name.startswith('^'):
return "%s__istartswith" % field_name[1:]
elif field_name.startswith('='):
return "%s__iexact" % field_name[1:]
elif field_name.startswith('@'):
return "%s__search" % field_name[1:]
else:
return "%s__icontains" % field_name
use_distinct = False
if self.search_fields and search_term:
orm_lookups = [construct_search(str(search_field))
for search_field in self.search_fields]
for bit in search_term.split():
or_queries = [models.Q(**{orm_lookup: bit})
for orm_lookup in orm_lookups]
queryset = queryset.filter(reduce(operator.or_, or_queries))
if not use_distinct:
for search_spec in orm_lookups:
if lookup_needs_distinct(self.opts, search_spec):
use_distinct = True
break
return queryset, use_distinct
def get_preserved_filters(self, request):
"""
Returns the preserved filters querystring.
"""
match = request.resolver_match
if self.preserve_filters and match:
opts = self.model._meta
current_url = '%s:%s' % (match.app_name, match.url_name)
changelist_url = 'admin:%s_%s_changelist' % (opts.app_label, opts.model_name)
if current_url == changelist_url:
preserved_filters = request.GET.urlencode()
else:
preserved_filters = request.GET.get('_changelist_filters')
if preserved_filters:
return urlencode({'_changelist_filters': preserved_filters})
return ''
def construct_change_message(self, request, form, formsets):
"""
Construct a change message from a changed object.
"""
change_message = []
if form.changed_data:
change_message.append(_('Changed %s.') % get_text_list(form.changed_data, _('and')))
if formsets:
for formset in formsets:
for added_object in formset.new_objects:
change_message.append(_('Added %(name)s "%(object)s".')
% {'name': force_text(added_object._meta.verbose_name),
'object': force_text(added_object)})
for changed_object, changed_fields in formset.changed_objects:
change_message.append(_('Changed %(list)s for %(name)s "%(object)s".')
% {'list': get_text_list(changed_fields, _('and')),
'name': force_text(changed_object._meta.verbose_name),
'object': force_text(changed_object)})
for deleted_object in formset.deleted_objects:
change_message.append(_('Deleted %(name)s "%(object)s".')
% {'name': force_text(deleted_object._meta.verbose_name),
'object': force_text(deleted_object)})
change_message = ' '.join(change_message)
return change_message or _('No fields changed.')
def message_user(self, request, message, level=messages.INFO, extra_tags='',
fail_silently=False):
"""
Send a message to the user. The default implementation
posts a message using the django.contrib.messages backend.
Exposes almost the same API as messages.add_message(), but accepts the
positional arguments in a different order to maintain backwards
compatibility. For convenience, it accepts the `level` argument as
a string rather than the usual level number.
"""
if not isinstance(level, int):
# attempt to get the level if passed a string
try:
level = getattr(messages.constants, level.upper())
except AttributeError:
levels = messages.constants.DEFAULT_TAGS.values()
levels_repr = ', '.join('`%s`' % l for l in levels)
raise ValueError('Bad message level string: `%s`. '
'Possible values are: %s' % (level, levels_repr))
messages.add_message(request, level, message, extra_tags=extra_tags,
fail_silently=fail_silently)
def save_form(self, request, form, change):
"""
Given a ModelForm return an unsaved instance. ``change`` is True if
the object is being changed, and False if it's being added.
"""
return form.save(commit=False)
def save_model(self, request, obj, form, change):
"""
Given a model instance save it to the database.
"""
obj.save()
def delete_model(self, request, obj):
"""
Given a model instance delete it from the database.
"""
obj.delete()
def save_formset(self, request, form, formset, change):
"""
Given an inline formset save it to the database.
"""
formset.save()
def save_related(self, request, form, formsets, change):
"""
Given the ``HttpRequest``, the parent ``ModelForm`` instance, the
list of inline formsets and a boolean value based on whether the
parent is being added or changed, save the related objects to the
database. Note that at this point save_form() and save_model() have
already been called.
"""
form.save_m2m()
for formset in formsets:
self.save_formset(request, form, formset, change=change)
def render_change_form(self, request, context, add=False, change=False, form_url='', obj=None):
opts = self.model._meta
app_label = opts.app_label
preserved_filters = self.get_preserved_filters(request)
form_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, form_url)
context.update({
'add': add,
'change': change,
'has_add_permission': self.has_add_permission(request),
'has_change_permission': self.has_change_permission(request, obj),
'has_delete_permission': self.has_delete_permission(request, obj),
'has_file_field': True, # FIXME - this should check if form or formsets have a FileField,
'has_absolute_url': hasattr(self.model, 'get_absolute_url'),
'form_url': form_url,
'opts': opts,
'content_type_id': ContentType.objects.get_for_model(self.model).id,
'save_as': self.save_as,
'save_on_top': self.save_on_top,
})
if add and self.add_form_template is not None:
form_template = self.add_form_template
else:
form_template = self.change_form_template
return TemplateResponse(request, form_template or [
"admin/%s/%s/change_form.html" % (app_label, opts.model_name),
"admin/%s/change_form.html" % app_label,
"admin/change_form.html"
], context, current_app=self.admin_site.name)
def response_add(self, request, obj, post_url_continue=None):
"""
Determines the HttpResponse for the add_view stage.
"""
opts = obj._meta
pk_value = obj._get_pk_val()
preserved_filters = self.get_preserved_filters(request)
msg_dict = {'name': force_text(opts.verbose_name), 'obj': force_text(obj)}
# Here, we distinguish between different save types by checking for
# the presence of keys in request.POST.
if IS_POPUP_VAR in request.POST:
return SimpleTemplateResponse('admin/popup_response.html', {
'pk_value': escape(pk_value),
'obj': escapejs(obj)
})
elif "_continue" in request.POST:
msg = _('The %(name)s "%(obj)s" was added successfully. You may edit it again below.') % msg_dict
self.message_user(request, msg, messages.SUCCESS)
if post_url_continue is None:
post_url_continue = reverse('admin:%s_%s_change' %
(opts.app_label, opts.model_name),
args=(pk_value,),
current_app=self.admin_site.name)
post_url_continue = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, post_url_continue)
return HttpResponseRedirect(post_url_continue)
elif "_addanother" in request.POST:
msg = _('The %(name)s "%(obj)s" was added successfully. You may add another %(name)s below.') % msg_dict
self.message_user(request, msg, messages.SUCCESS)
redirect_url = request.path
redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url)
return HttpResponseRedirect(redirect_url)
else:
msg = _('The %(name)s "%(obj)s" was added successfully.') % msg_dict
self.message_user(request, msg, messages.SUCCESS)
return self.response_post_save_add(request, obj)
def response_change(self, request, obj):
"""
Determines the HttpResponse for the change_view stage.
"""
opts = self.model._meta
pk_value = obj._get_pk_val()
preserved_filters = self.get_preserved_filters(request)
msg_dict = {'name': force_text(opts.verbose_name), 'obj': force_text(obj)}
if "_continue" in request.POST:
msg = _('The %(name)s "%(obj)s" was changed successfully. You may edit it again below.') % msg_dict
self.message_user(request, msg, messages.SUCCESS)
redirect_url = request.path
redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url)
return HttpResponseRedirect(redirect_url)
elif "_saveasnew" in request.POST:
msg = _('The %(name)s "%(obj)s" was added successfully. You may edit it again below.') % msg_dict
self.message_user(request, msg, messages.SUCCESS)
redirect_url = reverse('admin:%s_%s_change' %
(opts.app_label, opts.model_name),
args=(pk_value,),
current_app=self.admin_site.name)
redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url)
return HttpResponseRedirect(redirect_url)
elif "_addanother" in request.POST:
msg = _('The %(name)s "%(obj)s" was changed successfully. You may add another %(name)s below.') % msg_dict
self.message_user(request, msg, messages.SUCCESS)
redirect_url = reverse('admin:%s_%s_add' %
(opts.app_label, opts.model_name),
current_app=self.admin_site.name)
redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url)
return HttpResponseRedirect(redirect_url)
else:
msg = _('The %(name)s "%(obj)s" was changed successfully.') % msg_dict
self.message_user(request, msg, messages.SUCCESS)
return self.response_post_save_change(request, obj)
def response_post_save_add(self, request, obj):
"""
Figure out where to redirect after the 'Save' button has been pressed
when adding a new object.
"""
opts = self.model._meta
if self.has_change_permission(request, None):
post_url = reverse('admin:%s_%s_changelist' %
(opts.app_label, opts.model_name),
current_app=self.admin_site.name)
preserved_filters = self.get_preserved_filters(request)
post_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, post_url)
else:
post_url = reverse('admin:index',
current_app=self.admin_site.name)
return HttpResponseRedirect(post_url)
def response_post_save_change(self, request, obj):
"""
Figure out where to redirect after the 'Save' button has been pressed
when editing an existing object.
"""
opts = self.model._meta
if self.has_change_permission(request, None):
post_url = reverse('admin:%s_%s_changelist' %
(opts.app_label, opts.model_name),
current_app=self.admin_site.name)
preserved_filters = self.get_preserved_filters(request)
post_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, post_url)
else:
post_url = reverse('admin:index',
current_app=self.admin_site.name)
return HttpResponseRedirect(post_url)
def response_action(self, request, queryset):
"""
Handle an admin action. This is called if a request is POSTed to the
changelist; it returns an HttpResponse if the action was handled, and
None otherwise.
"""
# There can be multiple action forms on the page (at the top
# and bottom of the change list, for example). Get the action
# whose button was pushed.
try:
action_index = int(request.POST.get('index', 0))
except ValueError:
action_index = 0
# Construct the action form.
data = request.POST.copy()
data.pop(helpers.ACTION_CHECKBOX_NAME, None)
data.pop("index", None)
# Use the action whose button was pushed
try:
data.update({'action': data.getlist('action')[action_index]})
except IndexError:
# If we didn't get an action from the chosen form that's invalid
# POST data, so by deleting action it'll fail the validation check
# below. So no need to do anything here
pass
action_form = self.action_form(data, auto_id=None)
action_form.fields['action'].choices = self.get_action_choices(request)
# If the form's valid we can handle the action.
if action_form.is_valid():
action = action_form.cleaned_data['action']
select_across = action_form.cleaned_data['select_across']
func = self.get_actions(request)[action][0]
# Get the list of selected PKs. If nothing's selected, we can't
# perform an action on it, so bail. Except we want to perform
# the action explicitly on all objects.
selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME)
if not selected and not select_across:
# Reminder that something needs to be selected or nothing will happen
msg = _("Items must be selected in order to perform "
"actions on them. No items have been changed.")
self.message_user(request, msg, messages.WARNING)
return None
if not select_across:
# Perform the action only on the selected objects
queryset = queryset.filter(pk__in=selected)
response = func(self, request, queryset)
# Actions may return an HttpResponse-like object, which will be
# used as the response from the POST. If not, we'll be a good
# little HTTP citizen and redirect back to the changelist page.
if isinstance(response, HttpResponseBase):
return response
else:
return HttpResponseRedirect(request.get_full_path())
else:
msg = _("No action selected.")
self.message_user(request, msg, messages.WARNING)
return None
@csrf_protect_m
@transaction.atomic
def add_view(self, request, form_url='', extra_context=None):
"The 'add' admin view for this model."
model = self.model
opts = model._meta
if not self.has_add_permission(request):
raise PermissionDenied
ModelForm = self.get_form(request)
formsets = []
inline_instances = self.get_inline_instances(request, None)
if request.method == 'POST':
form = ModelForm(request.POST, request.FILES)
if form.is_valid():
new_object = self.save_form(request, form, change=False)
form_validated = True
else:
form_validated = False
new_object = self.model()
prefixes = {}
for FormSet, inline in zip(self.get_formsets(request), inline_instances):
prefix = FormSet.get_default_prefix()
prefixes[prefix] = prefixes.get(prefix, 0) + 1
if prefixes[prefix] != 1 or not prefix:
prefix = "%s-%s" % (prefix, prefixes[prefix])
formset = FormSet(data=request.POST, files=request.FILES,
instance=new_object,
save_as_new="_saveasnew" in request.POST,
prefix=prefix, queryset=inline.get_queryset(request))
formsets.append(formset)
if all_valid(formsets) and form_validated:
self.save_model(request, new_object, form, False)
self.save_related(request, form, formsets, False)
self.log_addition(request, new_object)
return self.response_add(request, new_object)
else:
# Prepare the dict of initial data from the request.
# We have to special-case M2Ms as a list of comma-separated PKs.
initial = dict(request.GET.items())
for k in initial:
try:
f = opts.get_field(k)
except models.FieldDoesNotExist:
continue
if isinstance(f, models.ManyToManyField):
initial[k] = initial[k].split(",")
form = ModelForm(initial=initial)
prefixes = {}
for FormSet, inline in zip(self.get_formsets(request), inline_instances):
prefix = FormSet.get_default_prefix()
prefixes[prefix] = prefixes.get(prefix, 0) + 1
if prefixes[prefix] != 1 or not prefix:
prefix = "%s-%s" % (prefix, prefixes[prefix])
formset = FormSet(instance=self.model(), prefix=prefix,
queryset=inline.get_queryset(request))
formsets.append(formset)
adminForm = helpers.AdminForm(form, list(self.get_fieldsets(request)),
self.get_prepopulated_fields(request),
self.get_readonly_fields(request),
model_admin=self)
media = self.media + adminForm.media
inline_admin_formsets = []
for inline, formset in zip(inline_instances, formsets):
fieldsets = list(inline.get_fieldsets(request))
readonly = list(inline.get_readonly_fields(request))
prepopulated = dict(inline.get_prepopulated_fields(request))
inline_admin_formset = helpers.InlineAdminFormSet(inline, formset,
fieldsets, prepopulated, readonly, model_admin=self)
inline_admin_formsets.append(inline_admin_formset)
media = media + inline_admin_formset.media
context = {
'title': _('Add %s') % force_text(opts.verbose_name),
'adminform': adminForm,
'is_popup': IS_POPUP_VAR in request.REQUEST,
'media': media,
'inline_admin_formsets': inline_admin_formsets,
'errors': helpers.AdminErrorList(form, formsets),
'app_label': opts.app_label,
'preserved_filters': self.get_preserved_filters(request),
}
context.update(extra_context or {})
return self.render_change_form(request, context, form_url=form_url, add=True)
@csrf_protect_m
@transaction.atomic
def change_view(self, request, object_id, form_url='', extra_context=None):
"The 'change' admin view for this model."
model = self.model
opts = model._meta
obj = self.get_object(request, unquote(object_id))
if not self.has_change_permission(request, obj):
raise PermissionDenied
if obj is None:
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % {'name': force_text(opts.verbose_name), 'key': escape(object_id)})
if request.method == 'POST' and "_saveasnew" in request.POST:
return self.add_view(request, form_url=reverse('admin:%s_%s_add' %
(opts.app_label, opts.model_name),
current_app=self.admin_site.name))
ModelForm = self.get_form(request, obj)
formsets = []
inline_instances = self.get_inline_instances(request, obj)
if request.method == 'POST':
form = ModelForm(request.POST, request.FILES, instance=obj)
if form.is_valid():
form_validated = True
new_object = self.save_form(request, form, change=True)
else:
form_validated = False
new_object = obj
prefixes = {}
for FormSet, inline in zip(self.get_formsets(request, new_object), inline_instances):
prefix = FormSet.get_default_prefix()
prefixes[prefix] = prefixes.get(prefix, 0) + 1
if prefixes[prefix] != 1 or not prefix:
prefix = "%s-%s" % (prefix, prefixes[prefix])
formset = FormSet(request.POST, request.FILES,
instance=new_object, prefix=prefix,
queryset=inline.get_queryset(request))
formsets.append(formset)
if all_valid(formsets) and form_validated:
self.save_model(request, new_object, form, True)
self.save_related(request, form, formsets, True)
change_message = self.construct_change_message(request, form, formsets)
self.log_change(request, new_object, change_message)
return self.response_change(request, new_object)
else:
form = ModelForm(instance=obj)
prefixes = {}
for FormSet, inline in zip(self.get_formsets(request, obj), inline_instances):
prefix = FormSet.get_default_prefix()
prefixes[prefix] = prefixes.get(prefix, 0) + 1
if prefixes[prefix] != 1 or not prefix:
prefix = "%s-%s" % (prefix, prefixes[prefix])
formset = FormSet(instance=obj, prefix=prefix,
queryset=inline.get_queryset(request))
formsets.append(formset)
adminForm = helpers.AdminForm(form, self.get_fieldsets(request, obj),
self.get_prepopulated_fields(request, obj),
self.get_readonly_fields(request, obj),
model_admin=self)
media = self.media + adminForm.media
inline_admin_formsets = []
for inline, formset in zip(inline_instances, formsets):
fieldsets = list(inline.get_fieldsets(request, obj))
readonly = list(inline.get_readonly_fields(request, obj))
prepopulated = dict(inline.get_prepopulated_fields(request, obj))
inline_admin_formset = helpers.InlineAdminFormSet(inline, formset,
fieldsets, prepopulated, readonly, model_admin=self)
inline_admin_formsets.append(inline_admin_formset)
media = media + inline_admin_formset.media
context = {
'title': _('Change %s') % force_text(opts.verbose_name),
'adminform': adminForm,
'object_id': object_id,
'original': obj,
'is_popup': IS_POPUP_VAR in request.REQUEST,
'media': media,
'inline_admin_formsets': inline_admin_formsets,
'errors': helpers.AdminErrorList(form, formsets),
'app_label': opts.app_label,
'preserved_filters': self.get_preserved_filters(request),
}
context.update(extra_context or {})
return self.render_change_form(request, context, change=True, obj=obj, form_url=form_url)
@csrf_protect_m
def changelist_view(self, request, extra_context=None):
"""
The 'change list' admin view for this model.
"""
from django.contrib.admin.views.main import ERROR_FLAG
opts = self.model._meta
app_label = opts.app_label
if not self.has_change_permission(request, None):
raise PermissionDenied
list_display = self.get_list_display(request)
list_display_links = self.get_list_display_links(request, list_display)
list_filter = self.get_list_filter(request)
# Check actions to see if any are available on this changelist
actions = self.get_actions(request)
if actions:
# Add the action checkboxes if there are any actions available.
list_display = ['action_checkbox'] + list(list_display)
ChangeList = self.get_changelist(request)
try:
cl = ChangeList(request, self.model, list_display,
list_display_links, list_filter, self.date_hierarchy,
self.search_fields, self.list_select_related,
self.list_per_page, self.list_max_show_all, self.list_editable,
self)
except IncorrectLookupParameters:
# Wacky lookup parameters were given, so redirect to the main
# changelist page, without parameters, and pass an 'invalid=1'
# parameter via the query string. If wacky parameters were given
# and the 'invalid=1' parameter was already in the query string,
# something is screwed up with the database, so display an error
# page.
if ERROR_FLAG in request.GET.keys():
return SimpleTemplateResponse('admin/invalid_setup.html', {
'title': _('Database error'),
})
return HttpResponseRedirect(request.path + '?' + ERROR_FLAG + '=1')
# If the request was POSTed, this might be a bulk action or a bulk
# edit. Try to look up an action or confirmation first, but if this
# isn't an action the POST will fall through to the bulk edit check,
# below.
action_failed = False
selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME)
# Actions with no confirmation
if (actions and request.method == 'POST' and
'index' in request.POST and '_save' not in request.POST):
if selected:
response = self.response_action(request, queryset=cl.get_queryset(request))
if response:
return response
else:
action_failed = True
else:
msg = _("Items must be selected in order to perform "
"actions on them. No items have been changed.")
self.message_user(request, msg, messages.WARNING)
action_failed = True
# Actions with confirmation
if (actions and request.method == 'POST' and
helpers.ACTION_CHECKBOX_NAME in request.POST and
'index' not in request.POST and '_save' not in request.POST):
if selected:
response = self.response_action(request, queryset=cl.get_queryset(request))
if response:
return response
else:
action_failed = True
# If we're allowing changelist editing, we need to construct a formset
# for the changelist given all the fields to be edited. Then we'll
# use the formset to validate/process POSTed data.
formset = cl.formset = None
# Handle POSTed bulk-edit data.
if (request.method == "POST" and cl.list_editable and
'_save' in request.POST and not action_failed):
FormSet = self.get_changelist_formset(request)
formset = cl.formset = FormSet(request.POST, request.FILES, queryset=cl.result_list)
if formset.is_valid():
changecount = 0
for form in formset.forms:
if form.has_changed():
obj = self.save_form(request, form, change=True)
self.save_model(request, obj, form, change=True)
self.save_related(request, form, formsets=[], change=True)
change_msg = self.construct_change_message(request, form, None)
self.log_change(request, obj, change_msg)
changecount += 1
if changecount:
if changecount == 1:
name = force_text(opts.verbose_name)
else:
name = force_text(opts.verbose_name_plural)
msg = ungettext("%(count)s %(name)s was changed successfully.",
"%(count)s %(name)s were changed successfully.",
changecount) % {'count': changecount,
'name': name,
'obj': force_text(obj)}
self.message_user(request, msg, messages.SUCCESS)
return HttpResponseRedirect(request.get_full_path())
# Handle GET -- construct a formset for display.
elif cl.list_editable:
FormSet = self.get_changelist_formset(request)
formset = cl.formset = FormSet(queryset=cl.result_list)
# Build the list of media to be used by the formset.
if formset:
media = self.media + formset.media
else:
media = self.media
# Build the action form and populate it with available actions.
if actions:
action_form = self.action_form(auto_id=None)
action_form.fields['action'].choices = self.get_action_choices(request)
else:
action_form = None
selection_note_all = ungettext('%(total_count)s selected',
'All %(total_count)s selected', cl.result_count)
context = {
'module_name': force_text(opts.verbose_name_plural),
'selection_note': _('0 of %(cnt)s selected') % {'cnt': len(cl.result_list)},
'selection_note_all': selection_note_all % {'total_count': cl.result_count},
'title': cl.title,
'is_popup': cl.is_popup,
'cl': cl,
'media': media,
'has_add_permission': self.has_add_permission(request),
'opts': cl.opts,
'app_label': app_label,
'action_form': action_form,
'actions_on_top': self.actions_on_top,
'actions_on_bottom': self.actions_on_bottom,
'actions_selection_counter': self.actions_selection_counter,
'preserved_filters': self.get_preserved_filters(request),
}
context.update(extra_context or {})
return TemplateResponse(request, self.change_list_template or [
'admin/%s/%s/change_list.html' % (app_label, opts.model_name),
'admin/%s/change_list.html' % app_label,
'admin/change_list.html'
], context, current_app=self.admin_site.name)
@csrf_protect_m
@transaction.atomic
def delete_view(self, request, object_id, extra_context=None):
"The 'delete' admin view for this model."
opts = self.model._meta
app_label = opts.app_label
obj = self.get_object(request, unquote(object_id))
if not self.has_delete_permission(request, obj):
raise PermissionDenied
if obj is None:
raise Http404(
_('%(name)s object with primary key %(key)r does not exist.') %
{'name': force_text(opts.verbose_name), 'key': escape(object_id)}
)
using = router.db_for_write(self.model)
# Populate deleted_objects, a data structure of all related objects that
# will also be deleted.
(deleted_objects, perms_needed, protected) = get_deleted_objects(
[obj], opts, request.user, self.admin_site, using)
if request.POST: # The user has already confirmed the deletion.
if perms_needed:
raise PermissionDenied
obj_display = force_text(obj)
self.log_deletion(request, obj, obj_display)
self.delete_model(request, obj)
self.message_user(request, _(
'The %(name)s "%(obj)s" was deleted successfully.') % {
'name': force_text(opts.verbose_name),
'obj': force_text(obj_display)},
messages.SUCCESS)
if self.has_change_permission(request, None):
post_url = reverse('admin:%s_%s_changelist' %
(opts.app_label, opts.model_name),
current_app=self.admin_site.name)
preserved_filters = self.get_preserved_filters(request)
post_url = add_preserved_filters(
{'preserved_filters': preserved_filters, 'opts': opts}, post_url
)
else:
post_url = reverse('admin:index',
current_app=self.admin_site.name)
return HttpResponseRedirect(post_url)
object_name = force_text(opts.verbose_name)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": object_name}
else:
title = _("Are you sure?")
context = {
"title": title,
"object_name": object_name,
"object": obj,
"deleted_objects": deleted_objects,
"perms_lacking": perms_needed,
"protected": protected,
"opts": opts,
"app_label": app_label,
'preserved_filters': self.get_preserved_filters(request),
}
context.update(extra_context or {})
return TemplateResponse(request, self.delete_confirmation_template or [
"admin/%s/%s/delete_confirmation.html" % (app_label, opts.model_name),
"admin/%s/delete_confirmation.html" % app_label,
"admin/delete_confirmation.html"
], context, current_app=self.admin_site.name)
def history_view(self, request, object_id, extra_context=None):
"The 'history' admin view for this model."
from django.contrib.admin.models import LogEntry
# First check if the user can see this history.
model = self.model
obj = get_object_or_404(model, pk=unquote(object_id))
if not self.has_change_permission(request, obj):
raise PermissionDenied
# Then get the history for this object.
opts = model._meta
app_label = opts.app_label
action_list = LogEntry.objects.filter(
object_id=unquote(object_id),
content_type__id__exact=ContentType.objects.get_for_model(model).id
).select_related().order_by('action_time')
context = {
'title': _('Change history: %s') % force_text(obj),
'action_list': action_list,
'module_name': capfirst(force_text(opts.verbose_name_plural)),
'object': obj,
'app_label': app_label,
'opts': opts,
'preserved_filters': self.get_preserved_filters(request),
}
context.update(extra_context or {})
return TemplateResponse(request, self.object_history_template or [
"admin/%s/%s/object_history.html" % (app_label, opts.model_name),
"admin/%s/object_history.html" % app_label,
"admin/object_history.html"
], context, current_app=self.admin_site.name)
class InlineModelAdmin(BaseModelAdmin):
"""
Options for inline editing of ``model`` instances.
Provide ``name`` to specify the attribute name of the ``ForeignKey`` from
``model`` to its parent. This is required if ``model`` has more than one
``ForeignKey`` to its parent.
"""
model = None
fk_name = None
formset = BaseInlineFormSet
extra = 3
max_num = None
template = None
verbose_name = None
verbose_name_plural = None
can_delete = True
# validation
validator_class = validation.InlineValidator
def __init__(self, parent_model, admin_site):
self.admin_site = admin_site
self.parent_model = parent_model
self.opts = self.model._meta
super(InlineModelAdmin, self).__init__()
if self.verbose_name is None:
self.verbose_name = self.model._meta.verbose_name
if self.verbose_name_plural is None:
self.verbose_name_plural = self.model._meta.verbose_name_plural
@property
def media(self):
extra = '' if settings.DEBUG else '.min'
js = ['jquery%s.js' % extra, 'jquery.init.js', 'inlines%s.js' % extra]
if self.prepopulated_fields:
js.extend(['urlify.js', 'prepopulate%s.js' % extra])
if self.filter_vertical or self.filter_horizontal:
js.extend(['SelectBox.js', 'SelectFilter2.js'])
return forms.Media(js=[static('admin/js/%s' % url) for url in js])
def get_extra(self, request, obj=None, **kwargs):
"""Hook for customizing the number of extra inline forms."""
return self.extra
def get_max_num(self, request, obj=None, **kwargs):
"""Hook for customizing the max number of extra inline forms."""
return self.max_num
def get_formset(self, request, obj=None, **kwargs):
"""Returns a BaseInlineFormSet class for use in admin add/change views."""
if 'fields' in kwargs:
fields = kwargs.pop('fields')
else:
fields = flatten_fieldsets(self.get_fieldsets(request, obj))
if self.exclude is None:
exclude = []
else:
exclude = list(self.exclude)
exclude.extend(self.get_readonly_fields(request, obj))
if self.exclude is None and hasattr(self.form, '_meta') and self.form._meta.exclude:
# Take the custom ModelForm's Meta.exclude into account only if the
# InlineModelAdmin doesn't define its own.
exclude.extend(self.form._meta.exclude)
# if exclude is an empty list we use None, since that's the actual
# default
exclude = exclude or None
can_delete = self.can_delete and self.has_delete_permission(request, obj)
defaults = {
"form": self.form,
"formset": self.formset,
"fk_name": self.fk_name,
"fields": fields,
"exclude": exclude,
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
"extra": self.get_extra(request, obj, **kwargs),
"max_num": self.get_max_num(request, obj, **kwargs),
"can_delete": can_delete,
}
defaults.update(kwargs)
base_model_form = defaults['form']
class DeleteProtectedModelForm(base_model_form):
def hand_clean_DELETE(self):
"""
We don't validate the 'DELETE' field itself because on
templates it's not rendered using the field information, but
just using a generic "deletion_field" of the InlineModelAdmin.
"""
if self.cleaned_data.get(DELETION_FIELD_NAME, False):
using = router.db_for_write(self._meta.model)
collector = NestedObjects(using=using)
collector.collect([self.instance])
if collector.protected:
objs = []
for p in collector.protected:
objs.append(
# Translators: Model verbose name and instance representation, suitable to be an item in a list
_('%(class_name)s %(instance)s') % {
'class_name': p._meta.verbose_name,
'instance': p}
)
params = {'class_name': self._meta.model._meta.verbose_name,
'instance': self.instance,
'related_objects': get_text_list(objs, _('and'))}
msg = _("Deleting %(class_name)s %(instance)s would require "
"deleting the following protected related objects: "
"%(related_objects)s")
raise ValidationError(msg, code='deleting_protected', params=params)
def is_valid(self):
result = super(DeleteProtectedModelForm, self).is_valid()
self.hand_clean_DELETE()
return result
defaults['form'] = DeleteProtectedModelForm
if defaults['fields'] is None and not modelform_defines_fields(defaults['form']):
defaults['fields'] = forms.ALL_FIELDS
return inlineformset_factory(self.parent_model, self.model, **defaults)
def get_fieldsets(self, request, obj=None):
if self.declared_fieldsets:
return self.declared_fieldsets
form = self.get_formset(request, obj, fields=None).form
fields = list(form.base_fields) + list(self.get_readonly_fields(request, obj))
return [(None, {'fields': fields})]
def get_queryset(self, request):
queryset = super(InlineModelAdmin, self).get_queryset(request)
if not self.has_change_permission(request):
queryset = queryset.none()
return queryset
def has_add_permission(self, request):
if self.opts.auto_created:
# We're checking the rights to an auto-created intermediate model,
# which doesn't have its own individual permissions. The user needs
# to have the change permission for the related model in order to
# be able to do anything with the intermediate model.
return self.has_change_permission(request)
return super(InlineModelAdmin, self).has_add_permission(request)
def has_change_permission(self, request, obj=None):
opts = self.opts
if opts.auto_created:
# The model was auto-created as intermediary for a
# ManyToMany-relationship, find the target model
for field in opts.fields:
if field.rel and field.rel.to != self.parent_model:
opts = field.rel.to._meta
break
codename = get_permission_codename('change', opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename))
def has_delete_permission(self, request, obj=None):
if self.opts.auto_created:
# We're checking the rights to an auto-created intermediate model,
# which doesn't have its own individual permissions. The user needs
# to have the change permission for the related model in order to
# be able to do anything with the intermediate model.
return self.has_change_permission(request, obj)
return super(InlineModelAdmin, self).has_delete_permission(request, obj)
class StackedInline(InlineModelAdmin):
template = 'admin/edit_inline/stacked.html'
class TabularInline(InlineModelAdmin):
template = 'admin/edit_inline/tabular.html'
|
wswld/python-notes | refs/heads/master | patterns/descriptors/descriptor_as_validator.py | 1 | # Descriptors could be used as validators
import re
class DescriptorValidator(object):
def __get__(self, obj, obj_cls):
return self.x
def __set__(self, obj, value):
self.validate(value)
self.x = value
def validate(self, value):
# raise some exception if not validated
pass
class RegexValidator(DescriptorValidator):
def __init__(self, regexp):
self.regexp = regexp
def validate(self, value):
if not re.match(self.regexp, value):
raise ValueError("{} doesn't match: {}".format(value, str(self.regexp)))
else:
print 'Validated {} w/ regexp {}'.format(value, str(self.regexp))
class Site(object):
# This is not the perfect URL regexp out there, but it'll do
url = RegexValidator(regexp='http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+')
def __init__(self, name, url):
self.name = name
self.url = url
s1 = Site('first', 'http://first.com')
s2 = Site('second', 'https://second.co.uk')
s3 = Site('third', '###') |
nilnvoid/wagtail | refs/heads/master | wagtail/contrib/settings/registry.py | 4 | from __future__ import absolute_import, unicode_literals
from django.apps import apps
from django.contrib.auth.models import Permission
from django.core.urlresolvers import reverse
from django.utils.text import capfirst
from wagtail.wagtailadmin.menu import MenuItem
from wagtail.wagtailcore import hooks
from .permissions import user_can_edit_setting_type
class SettingMenuItem(MenuItem):
def __init__(self, model, icon='cog', classnames='', **kwargs):
icon_classes = 'icon icon-' + icon
if classnames:
classnames += ' ' + icon_classes
else:
classnames = icon_classes
self.model = model
super(SettingMenuItem, self).__init__(
label=capfirst(model._meta.verbose_name),
url=reverse('wagtailsettings:edit', args=[
model._meta.app_label, model._meta.model_name]),
classnames=classnames,
**kwargs)
def is_shown(self, request):
return user_can_edit_setting_type(request.user, self.model)
class Registry(list):
def register(self, model, **kwargs):
"""
Register a model as a setting, adding it to the wagtail admin menu
"""
# Don't bother registering this if it is already registered
if model in self:
return model
self.append(model)
# Register a new menu item in the settings menu
@hooks.register('register_settings_menu_item')
def menu_hook():
return SettingMenuItem(model, **kwargs)
@hooks.register('register_permissions')
def permissions_hook():
return Permission.objects.filter(
content_type__app_label=model._meta.app_label,
codename='change_{}'.format(model._meta.model_name))
return model
def register_decorator(self, model=None, **kwargs):
"""
Register a model as a setting in the Wagtail admin
"""
if model is None:
return lambda model: self.register(model, **kwargs)
return self.register(model, **kwargs)
def get_by_natural_key(self, app_label, model_name):
"""
Get a setting model using its app_label and model_name.
If the app_label.model_name combination is not a valid model, or the
model is not registered as a setting, returns None.
"""
try:
Model = apps.get_model(app_label, model_name)
except LookupError:
return None
if Model not in registry:
return None
return Model
registry = Registry()
register_setting = registry.register_decorator
|
eerwitt/tensorflow | refs/heads/master | tensorflow/contrib/ffmpeg/decode_audio_op_test.py | 75 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Tests for third_party.tensorflow.contrib.ffmpeg.decode_audio_op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os.path
from tensorflow.contrib import ffmpeg
from tensorflow.python.platform import resource_loader
from tensorflow.python.platform import test
class DecodeAudioOpTest(test.TestCase):
def _loadFileAndTest(self, filename, file_format, duration_sec,
samples_per_second, channel_count):
"""Loads an audio file and validates the output tensor.
Args:
filename: The filename of the input file.
file_format: The format of the input file.
duration_sec: The duration of the audio contained in the file in seconds.
samples_per_second: The desired sample rate in the output tensor.
channel_count: The desired channel count in the output tensor.
"""
with self.test_session():
path = os.path.join(resource_loader.get_data_files_path(), 'testdata',
filename)
with open(path, 'rb') as f:
contents = f.read()
audio_op = ffmpeg.decode_audio(
contents,
file_format=file_format,
samples_per_second=samples_per_second,
channel_count=channel_count)
audio = audio_op.eval()
self.assertEqual(len(audio.shape), 2)
self.assertNear(
duration_sec * samples_per_second,
audio.shape[0],
# Duration should be specified within 10%:
0.1 * audio.shape[0])
self.assertEqual(audio.shape[1], channel_count)
def testMonoMp3(self):
self._loadFileAndTest('mono_16khz.mp3', 'mp3', 0.57, 20000, 1)
self._loadFileAndTest('mono_16khz.mp3', 'mp3', 0.57, 20000, 2)
def testMonoMp4Mp3Codec(self):
# mp3 compressed audio streams in mp4 container.
self._loadFileAndTest('mono_16khz_mp3.mp4', 'mp4', 2.77, 20000, 1)
self._loadFileAndTest('mono_16khz_mp3.mp4', 'mp4', 2.77, 20000, 2)
def testMonoMp4AacCodec(self):
# aac compressed audio streams in mp4 container.
self._loadFileAndTest('mono_32khz_aac.mp4', 'mp4', 2.77, 20000, 1)
self._loadFileAndTest('mono_32khz_aac.mp4', 'mp4', 2.77, 20000, 2)
def testStereoMp3(self):
self._loadFileAndTest('stereo_48khz.mp3', 'mp3', 0.79, 50000, 1)
self._loadFileAndTest('stereo_48khz.mp3', 'mp3', 0.79, 20000, 2)
def testStereoMp4Mp3Codec(self):
# mp3 compressed audio streams in mp4 container.
self._loadFileAndTest('stereo_48khz_mp3.mp4', 'mp4', 0.79, 50000, 1)
self._loadFileAndTest('stereo_48khz_mp3.mp4', 'mp4', 0.79, 20000, 2)
def testStereoMp4AacCodec(self):
# aac compressed audio streams in mp4 container.
self._loadFileAndTest('stereo_48khz_aac.mp4', 'mp4', 0.79, 50000, 1)
self._loadFileAndTest('stereo_48khz_aac.mp4', 'mp4', 0.79, 20000, 2)
def testMonoWav(self):
self._loadFileAndTest('mono_10khz.wav', 'wav', 0.57, 5000, 1)
self._loadFileAndTest('mono_10khz.wav', 'wav', 0.57, 10000, 4)
def testOgg(self):
self._loadFileAndTest('mono_10khz.ogg', 'ogg', 0.57, 10000, 1)
def testInvalidFile(self):
with self.test_session():
contents = 'invalid file'
audio_op = ffmpeg.decode_audio(
contents,
file_format='wav',
samples_per_second=10000,
channel_count=2)
audio = audio_op.eval()
self.assertEqual(audio.shape, (0, 0))
if __name__ == '__main__':
test.main()
|
ryfeus/lambda-packs | refs/heads/master | HDF4_H5_NETCDF/source2.7/setuptools/command/py36compat.py | 286 | import os
from glob import glob
from distutils.util import convert_path
from distutils.command import sdist
from setuptools.extern.six.moves import filter
class sdist_add_defaults:
"""
Mix-in providing forward-compatibility for functionality as found in
distutils on Python 3.7.
Do not edit the code in this class except to update functionality
as implemented in distutils. Instead, override in the subclass.
"""
def add_defaults(self):
"""Add all the default files to self.filelist:
- README or README.txt
- setup.py
- test/test*.py
- all pure Python modules mentioned in setup script
- all files pointed by package_data (build_py)
- all files defined in data_files.
- all files defined as scripts.
- all C sources listed as part of extensions or C libraries
in the setup script (doesn't catch C headers!)
Warns if (README or README.txt) or setup.py are missing; everything
else is optional.
"""
self._add_defaults_standards()
self._add_defaults_optional()
self._add_defaults_python()
self._add_defaults_data_files()
self._add_defaults_ext()
self._add_defaults_c_libs()
self._add_defaults_scripts()
@staticmethod
def _cs_path_exists(fspath):
"""
Case-sensitive path existence check
>>> sdist_add_defaults._cs_path_exists(__file__)
True
>>> sdist_add_defaults._cs_path_exists(__file__.upper())
False
"""
if not os.path.exists(fspath):
return False
# make absolute so we always have a directory
abspath = os.path.abspath(fspath)
directory, filename = os.path.split(abspath)
return filename in os.listdir(directory)
def _add_defaults_standards(self):
standards = [self.READMES, self.distribution.script_name]
for fn in standards:
if isinstance(fn, tuple):
alts = fn
got_it = False
for fn in alts:
if self._cs_path_exists(fn):
got_it = True
self.filelist.append(fn)
break
if not got_it:
self.warn("standard file not found: should have one of " +
', '.join(alts))
else:
if self._cs_path_exists(fn):
self.filelist.append(fn)
else:
self.warn("standard file '%s' not found" % fn)
def _add_defaults_optional(self):
optional = ['test/test*.py', 'setup.cfg']
for pattern in optional:
files = filter(os.path.isfile, glob(pattern))
self.filelist.extend(files)
def _add_defaults_python(self):
# build_py is used to get:
# - python modules
# - files defined in package_data
build_py = self.get_finalized_command('build_py')
# getting python files
if self.distribution.has_pure_modules():
self.filelist.extend(build_py.get_source_files())
# getting package_data files
# (computed in build_py.data_files by build_py.finalize_options)
for pkg, src_dir, build_dir, filenames in build_py.data_files:
for filename in filenames:
self.filelist.append(os.path.join(src_dir, filename))
def _add_defaults_data_files(self):
# getting distribution.data_files
if self.distribution.has_data_files():
for item in self.distribution.data_files:
if isinstance(item, str):
# plain file
item = convert_path(item)
if os.path.isfile(item):
self.filelist.append(item)
else:
# a (dirname, filenames) tuple
dirname, filenames = item
for f in filenames:
f = convert_path(f)
if os.path.isfile(f):
self.filelist.append(f)
def _add_defaults_ext(self):
if self.distribution.has_ext_modules():
build_ext = self.get_finalized_command('build_ext')
self.filelist.extend(build_ext.get_source_files())
def _add_defaults_c_libs(self):
if self.distribution.has_c_libraries():
build_clib = self.get_finalized_command('build_clib')
self.filelist.extend(build_clib.get_source_files())
def _add_defaults_scripts(self):
if self.distribution.has_scripts():
build_scripts = self.get_finalized_command('build_scripts')
self.filelist.extend(build_scripts.get_source_files())
if hasattr(sdist.sdist, '_add_defaults_standards'):
# disable the functionality already available upstream
class sdist_add_defaults:
pass
|
igormarfin/trading-with-python | refs/heads/master | cookbook/cython/setup.py | 77 | from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
import numpy
ext = Extension("mean_c", ["mean_c.pyx"],
include_dirs = [numpy.get_include()])
setup(ext_modules=[ext],
cmdclass = {'build_ext': build_ext}) |
gvangool/django-extensions | refs/heads/master | django_extensions/management/commands/set_fake_emails.py | 27 | """
set_fake_emails.py
Give all users a new email account. Useful for testing in a
development environment. As such, this command is only available when
setting.DEBUG is True.
"""
from optparse import make_option
from django.conf import settings
from django.core.management.base import CommandError, NoArgsCommand
from django_extensions.management.utils import signalcommand
DEFAULT_FAKE_EMAIL = '%(username)s@example.com'
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option('--email', dest='default_email', default=DEFAULT_FAKE_EMAIL,
help='Use this as the new email format.'),
make_option('-a', '--no-admin', action="store_true", dest='no_admin', default=False,
help='Do not change administrator accounts'),
make_option('-s', '--no-staff', action="store_true", dest='no_staff', default=False,
help='Do not change staff accounts'),
make_option('--include', dest='include_regexp', default=None,
help='Include usernames matching this regexp.'),
make_option('--exclude', dest='exclude_regexp', default=None,
help='Exclude usernames matching this regexp.'),
make_option('--include-groups', dest='include_groups', default=None,
help='Include users matching this group. (use comma seperation for multiple groups)'),
make_option('--exclude-groups', dest='exclude_groups', default=None,
help='Exclude users matching this group. (use comma seperation for multiple groups)'),
)
help = '''DEBUG only: give all users a new email based on their account data ("%s" by default). Possible parameters are: username, first_name, last_name''' % (DEFAULT_FAKE_EMAIL, )
requires_system_checks = False
@signalcommand
def handle_noargs(self, **options):
if not settings.DEBUG:
raise CommandError('Only available in debug mode')
from django_extensions.compat import get_user_model
from django.contrib.auth.models import Group
email = options.get('default_email', DEFAULT_FAKE_EMAIL)
include_regexp = options.get('include_regexp', None)
exclude_regexp = options.get('exclude_regexp', None)
include_groups = options.get('include_groups', None)
exclude_groups = options.get('exclude_groups', None)
no_admin = options.get('no_admin', False)
no_staff = options.get('no_staff', False)
User = get_user_model()
users = User.objects.all()
if no_admin:
users = users.exclude(is_superuser=True)
if no_staff:
users = users.exclude(is_staff=True)
if exclude_groups:
groups = Group.objects.filter(name__in=exclude_groups.split(","))
if groups:
users = users.exclude(groups__in=groups)
else:
raise CommandError("No group matches filter: %s" % exclude_groups)
if include_groups:
groups = Group.objects.filter(name__in=include_groups.split(","))
if groups:
users = users.filter(groups__in=groups)
else:
raise CommandError("No groups matches filter: %s" % include_groups)
if exclude_regexp:
users = users.exclude(username__regex=exclude_regexp)
if include_regexp:
users = users.filter(username__regex=include_regexp)
for user in users:
user.email = email % {'username': user.username,
'first_name': user.first_name,
'last_name': user.last_name}
user.save()
print('Changed %d emails' % users.count())
|
israeleriston/scientific-week | refs/heads/master | backend/app/person/models.py | 1 | from app import db
class Person(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(60))
cpf = db.Column(db.String(14))
def __init__(self, name, cpf):
self.name = name
self.cpf = cpf
def __repr__(self):
return '<Product %d>' % self.id
|
rahulunair/nova | refs/heads/master | nova/tests/functional/notification_sample_tests/test_metrics.py | 2 | # Copyright 2018 NTT Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import nova.conf
from nova import context
from nova.tests.functional.notification_sample_tests \
import notification_sample_base
from nova.tests.unit import fake_notifier
CONF = nova.conf.CONF
class TestMetricsNotificationSample(
notification_sample_base.NotificationSampleTestBase):
def setUp(self):
self.flags(compute_monitors=['cpu.virt_driver'])
super(TestMetricsNotificationSample, self).setUp()
# Reset the cpu stats of the 'cpu.virt_driver' monitor
self.compute.manager.rt.monitors[0]._cpu_stats = {}
def test_metrics_update(self):
self.compute.manager.update_available_resource(
context.get_admin_context())
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'metrics-update',
replacements={'host_ip': CONF.my_ip},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
|
eblot/miscripts | refs/heads/master | Python/jtag/state.py | 1 | #!/usr/bin/env python
# Experiment with a JTAG TAP controller state machine
class State(object):
"""
"""
def __init__(self, name):
self.name = name
def __repr__(self):
return self.name
def setx(self, fstate, tstate):
self.exits = [fstate, tstate]
class StateMachine(object):
"""
"""
def __init__(self):
self.states = {}
for s in ['test_logic_reset',
'run_test_idle',
'select_dr_scan',
'capture_dr',
'shift_dr',
'exit_1_dr',
'pause_dr',
'exit_2_dr',
'update_dr',
'select_ir_scan',
'capture_ir',
'shift_ir',
'exit_1_ir',
'pause_ir',
'exit_2_ir',
'update_ir']:
self.states[s] = State(s)
self['test_logic_reset'].setx(self['run_test_idle'],
self['test_logic_reset'])
self['run_test_idle'].setx(self['run_test_idle'],
self['select_dr_scan'])
self['select_dr_scan'].setx(self['capture_dr'],
self['select_ir_scan'])
self['capture_dr'].setx(self['shift_dr'], self['exit_1_dr'])
self['shift_dr'].setx(self['shift_dr'], self['exit_1_dr'])
self['exit_1_dr'].setx(self['pause_dr'], self['update_dr'])
self['pause_dr'].setx(self['pause_dr'], self['exit_2_dr'])
self['exit_2_dr'].setx(self['shift_dr'], self['update_dr'])
self['update_dr'].setx(self['run_test_idle'],
self['select_dr_scan'])
self['select_ir_scan'].setx(self['capture_ir'],
self['test_logic_reset'])
self['capture_ir'].setx(self['shift_ir'], self['exit_1_ir'])
self['shift_ir'].setx(self['shift_ir'], self['exit_1_ir'])
self['exit_1_ir'].setx(self['pause_ir'], self['update_ir'])
self['pause_ir'].setx(self['pause_ir'], self['exit_2_ir'])
self['exit_2_ir'].setx(self['shift_ir'], self['update_ir'])
self['update_ir'].setx(self['run_test_idle'], self['select_dr_scan'])
self._current = self['test_logic_reset']
def __getitem__(self, name):
return self.states[name]
def find_best_path(self, from_, to):
source = self[from_]
target = self[to]
paths = []
def next_path(state, target, path):
# this test match the target, path is valid
if state == target:
return path+[state]
# candidate paths
paths = []
for n,x in enumerate(state.exits):
# next state is self (loop around), kill the path
if x == state:
continue
# next state already in upstream (loop back), kill the path
if x in path:
continue
# try the current path
npath = next_path(x, target, path + [state])
# downstream is a valid path, store it
if npath:
paths.append(npath)
# keep the shortest path
return paths and min([(len(l), l) for l in paths],
key=lambda x: x[0])[1] or []
return next_path(source, target, [])
def get_events(self, path):
events = []
for s,d in zip(path[:-1], path[1:]):
for e,x in enumerate(s.exits):
if x == d:
events.append(e)
if len(events) != len(path) - 1:
raise AssertionError("Invalid path")
return events
if __name__ == '__main__':
sm = StateMachine()
#path = sm.find_best_path('exit_2_ir', 'pause_ir')
#print path
path = sm.find_best_path('capture_dr', 'exit_1_ir')
print path
events = sm.get_events(path)
print events |
rexfrommars/havefun | refs/heads/master | python/RawEdoc/_permutations_x.py | 1 | #!/usr/bin/env python3
# TODO a non-recursive implement
def _p(x, left):
for index in range(len(x)):
if x[index][1] and left > 1:
x[index][1] -= 1
for item in _p(x, left - 1):
yield [x[index][0]] + item
x[index][1] += 1
elif x[index][1]:
yield [x[index][0]]
break
def permutations(s):
x = [[c, s.count(c)] for c in sorted(set(s))]
for p in _p(x, len(s)):
yield ''.join(p)
def test(s):
print('=' * 32)
print(' >> %s' % s)
for p in permutations(s):
print(' -- %s' % (p,) )
if __name__ == '__main__':
test('')
test('a')
test('ab')
test('abc')
|
zordsdavini/qtile | refs/heads/develop | test/layouts/test_verticaltile.py | 2 | # Copyright (c) 2011 Florian Mounier
# Copyright (c) 2012, 2014-2015 Tycho Andersen
# Copyright (c) 2013 Mattias Svala
# Copyright (c) 2013 Craig Barnes
# Copyright (c) 2014 ramnes
# Copyright (c) 2014 Sean Vig
# Copyright (c) 2014 Adi Sieker
# Copyright (c) 2014 Chris Wesseling
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import pytest
import libqtile.config
from libqtile import layout
from test.conftest import no_xinerama
from test.layouts.layout_utils import (
assert_dimensions,
assert_focus_path,
assert_focused,
)
class VerticalTileConfig:
auto_fullscreen = True
main = None
groups = [
libqtile.config.Group("a"),
libqtile.config.Group("b"),
libqtile.config.Group("c"),
libqtile.config.Group("d")
]
layouts = [
layout.VerticalTile(columns=2)
]
floating_layout = libqtile.layout.floating.Floating()
keys = []
mouse = []
screens = []
def verticaltile_config(x):
return no_xinerama(pytest.mark.parametrize("qtile", [VerticalTileConfig], indirect=True)(x))
@verticaltile_config
def test_verticaltile_simple(qtile):
qtile.test_window("one")
assert_dimensions(qtile, 0, 0, 800, 600)
qtile.test_window("two")
assert_dimensions(qtile, 0, 300, 798, 298)
qtile.test_window("three")
assert_dimensions(qtile, 0, 400, 798, 198)
@verticaltile_config
def test_verticaltile_maximize(qtile):
qtile.test_window("one")
assert_dimensions(qtile, 0, 0, 800, 600)
qtile.test_window("two")
assert_dimensions(qtile, 0, 300, 798, 298)
# Maximize the bottom layout, taking 75% of space
qtile.c.layout.maximize()
assert_dimensions(qtile, 0, 150, 798, 448)
@verticaltile_config
def test_verticaltile_window_focus_cycle(qtile):
# setup 3 tiled and two floating clients
qtile.test_window("one")
qtile.test_window("two")
qtile.test_window("float1")
qtile.c.window.toggle_floating()
qtile.test_window("float2")
qtile.c.window.toggle_floating()
qtile.test_window("three")
# test preconditions
assert qtile.c.layout.info()['clients'] == ['one', 'two', 'three']
# last added window has focus
assert_focused(qtile, "three")
# assert window focus cycle, according to order in layout
assert_focus_path(qtile, 'float1', 'float2', 'one', 'two', 'three')
|
ecliptik/ansible-modules-core | refs/heads/devel | cloud/rackspace/rax_cbs_attachments.py | 157 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# This is a DOCUMENTATION stub specific to this module, it extends
# a documentation fragment located in ansible.utils.module_docs_fragments
DOCUMENTATION = '''
---
module: rax_cbs_attachments
short_description: Manipulate Rackspace Cloud Block Storage Volume Attachments
description:
- Manipulate Rackspace Cloud Block Storage Volume Attachments
version_added: 1.6
options:
device:
description:
- The device path to attach the volume to, e.g. /dev/xvde
default: null
required: true
volume:
description:
- Name or id of the volume to attach/detach
default: null
required: true
server:
description:
- Name or id of the server to attach/detach
default: null
required: true
state:
description:
- Indicate desired state of the resource
choices:
- present
- absent
default: present
required: true
wait:
description:
- wait for the volume to be in 'in-use'/'available' state before returning
default: "no"
choices:
- "yes"
- "no"
wait_timeout:
description:
- how long before wait gives up, in seconds
default: 300
author:
- "Christopher H. Laco (@claco)"
- "Matt Martz (@sivel)"
extends_documentation_fragment: rackspace.openstack
'''
EXAMPLES = '''
- name: Attach a Block Storage Volume
gather_facts: False
hosts: local
connection: local
tasks:
- name: Storage volume attach request
local_action:
module: rax_cbs_attachments
credentials: ~/.raxpub
volume: my-volume
server: my-server
device: /dev/xvdd
region: DFW
wait: yes
state: present
register: my_volume
'''
try:
import pyrax
HAS_PYRAX = True
except ImportError:
HAS_PYRAX = False
def cloud_block_storage_attachments(module, state, volume, server, device,
wait, wait_timeout):
cbs = pyrax.cloud_blockstorage
cs = pyrax.cloudservers
if cbs is None or cs is None:
module.fail_json(msg='Failed to instantiate client. This '
'typically indicates an invalid region or an '
'incorrectly capitalized region name.')
changed = False
instance = {}
volume = rax_find_volume(module, pyrax, volume)
if not volume:
module.fail_json(msg='No matching storage volumes were found')
if state == 'present':
server = rax_find_server(module, pyrax, server)
if (volume.attachments and
volume.attachments[0]['server_id'] == server.id):
changed = False
elif volume.attachments:
module.fail_json(msg='Volume is attached to another server')
else:
try:
volume.attach_to_instance(server, mountpoint=device)
changed = True
except Exception, e:
module.fail_json(msg='%s' % e.message)
volume.get()
for key, value in vars(volume).iteritems():
if (isinstance(value, NON_CALLABLES) and
not key.startswith('_')):
instance[key] = value
result = dict(changed=changed)
if volume.status == 'error':
result['msg'] = '%s failed to build' % volume.id
elif wait:
attempts = wait_timeout / 5
pyrax.utils.wait_until(volume, 'status', 'in-use',
interval=5, attempts=attempts)
volume.get()
result['volume'] = rax_to_dict(volume)
if 'msg' in result:
module.fail_json(**result)
else:
module.exit_json(**result)
elif state == 'absent':
server = rax_find_server(module, pyrax, server)
if (volume.attachments and
volume.attachments[0]['server_id'] == server.id):
try:
volume.detach()
if wait:
pyrax.utils.wait_until(volume, 'status', 'available',
interval=3, attempts=0,
verbose=False)
changed = True
except Exception, e:
module.fail_json(msg='%s' % e.message)
volume.get()
changed = True
elif volume.attachments:
module.fail_json(msg='Volume is attached to another server')
result = dict(changed=changed, volume=rax_to_dict(volume))
if volume.status == 'error':
result['msg'] = '%s failed to build' % volume.id
if 'msg' in result:
module.fail_json(**result)
else:
module.exit_json(**result)
module.exit_json(changed=changed, volume=instance)
def main():
argument_spec = rax_argument_spec()
argument_spec.update(
dict(
device=dict(required=True),
volume=dict(required=True),
server=dict(required=True),
state=dict(default='present', choices=['present', 'absent']),
wait=dict(type='bool', default=False),
wait_timeout=dict(type='int', default=300)
)
)
module = AnsibleModule(
argument_spec=argument_spec,
required_together=rax_required_together()
)
if not HAS_PYRAX:
module.fail_json(msg='pyrax is required for this module')
device = module.params.get('device')
volume = module.params.get('volume')
server = module.params.get('server')
state = module.params.get('state')
wait = module.params.get('wait')
wait_timeout = module.params.get('wait_timeout')
setup_rax_module(module, pyrax)
cloud_block_storage_attachments(module, state, volume, server, device,
wait, wait_timeout)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.rax import *
### invoke the module
main()
|
broferek/ansible | refs/heads/devel | test/units/modules/network/check_point/test_checkpoint_host.py | 40 | # Copyright (c) 2018 Red Hat
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import
import pytest
from units.modules.utils import set_module_args, exit_json, fail_json, AnsibleFailJson, AnsibleExitJson
from ansible.module_utils import basic
from ansible.modules.network.check_point import checkpoint_host
OBJECT = {'name': 'foo', 'ipv4-address': '192.168.0.15'}
CREATE_PAYLOAD = {'name': 'foo', 'ip_address': '192.168.0.15'}
UPDATE_PAYLOAD = {'name': 'foo', 'ip_address': '192.168.0.16'}
DELETE_PAYLOAD = {'name': 'foo', 'state': 'absent'}
class TestCheckpointHost(object):
module = checkpoint_host
@pytest.fixture(autouse=True)
def module_mock(self, mocker):
return mocker.patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json)
@pytest.fixture
def connection_mock(self, mocker):
connection_class_mock = mocker.patch('ansible.modules.network.check_point.checkpoint_host.Connection')
return connection_class_mock.return_value
@pytest.fixture
def get_host_200(self, mocker):
mock_function = mocker.patch('ansible.modules.network.check_point.checkpoint_host.get_host')
mock_function.return_value = (200, OBJECT)
return mock_function.return_value
@pytest.fixture
def get_host_404(self, mocker):
mock_function = mocker.patch('ansible.modules.network.check_point.checkpoint_host.get_host')
mock_function.return_value = (404, 'Object not found')
return mock_function.return_value
def test_create(self, get_host_404, connection_mock):
connection_mock.send_request.return_value = (200, OBJECT)
result = self._run_module(CREATE_PAYLOAD)
assert result['changed']
assert 'checkpoint_hosts' in result
def test_create_idempotent(self, get_host_200, connection_mock):
connection_mock.send_request.return_value = (200, OBJECT)
result = self._run_module(CREATE_PAYLOAD)
assert not result['changed']
def test_update(self, get_host_200, connection_mock):
connection_mock.send_request.return_value = (200, OBJECT)
result = self._run_module(UPDATE_PAYLOAD)
assert result['changed']
def test_delete(self, get_host_200, connection_mock):
connection_mock.send_request.return_value = (200, OBJECT)
result = self._run_module(DELETE_PAYLOAD)
assert result['changed']
def test_delete_idempotent(self, get_host_404, connection_mock):
connection_mock.send_request.return_value = (200, OBJECT)
result = self._run_module(DELETE_PAYLOAD)
assert not result['changed']
def _run_module(self, module_args):
set_module_args(module_args)
with pytest.raises(AnsibleExitJson) as ex:
self.module.main()
return ex.value.args[0]
def _run_module_with_fail_json(self, module_args):
set_module_args(module_args)
with pytest.raises(AnsibleFailJson) as exc:
self.module.main()
result = exc.value.args[0]
return result
|
nbborlongan/geonode | refs/heads/master | geonode/people/utils.py | 33 | #########################################################################
#
# Copyright (C) 2012 OpenPlans
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from django.contrib.auth import get_user_model
from geonode import GeoNodeException
def get_default_user():
"""Create a default user
"""
superusers = get_user_model().objects.filter(
is_superuser=True).order_by('id')
if superusers.count() > 0:
# Return the first created superuser
return superusers[0]
else:
raise GeoNodeException('You must have an admin account configured '
'before importing data. '
'Try: django-admin.py createsuperuser')
def get_valid_user(user=None):
"""Gets the default user or creates it if it does not exist
"""
if user is None:
theuser = get_default_user()
elif isinstance(user, basestring):
theuser = get_user_model().objects.get(username=user)
elif user == user.get_anonymous():
raise GeoNodeException('The user uploading files must not '
'be anonymous')
else:
theuser = user
# FIXME: Pass a user in the unit tests that is not yet saved ;)
assert isinstance(theuser, get_user_model())
return theuser
def format_address(street=None, zipcode=None, city=None, area=None, country=None):
if country is not None and country == "USA":
address = ""
if city and area:
if street:
address += street+", "
address += city+", "+area
if zipcode:
address += " "+zipcode
elif (not city) and area:
if street:
address += street+", "
address += area
if zipcode:
address += " "+zipcode
elif city and (not area):
if street:
address += street+", "
address += city
if zipcode:
address += " "+zipcode
else:
if street:
address += ", "+street
if zipcode:
address += " "+zipcode
if address:
address += ", United States"
else:
address += "United States"
return address
else:
address = []
if street:
address.append(street)
if zipcode:
address.append(zipcode)
if city:
address.append(city)
if area:
address.append(area)
address.append(country)
return " ".join(address)
|
awkspace/ansible | refs/heads/devel | lib/ansible/modules/storage/netapp/na_ontap_export_policy.py | 59 | #!/usr/bin/python
# (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
module: na_ontap_export_policy
short_description: NetApp ONTAP manage export-policy
extends_documentation_fragment:
- netapp.na_ontap
version_added: '2.6'
author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com>
description:
- Create or destroy or rename export-policies on ONTAP
options:
state:
description:
- Whether the specified export policy should exist or not.
choices: ['present', 'absent']
default: present
name:
description:
- The name of the export-policy to manage.
required: true
from_name:
description:
- The name of the export-policy to be renamed.
version_added: '2.7'
vserver:
description:
- Name of the vserver to use.
'''
EXAMPLES = """
- name: Create Export Policy
na_ontap_export_policy:
state: present
name: ansiblePolicyName
vserver: vs_hack
hostname: "{{ netapp_hostname }}"
username: "{{ netapp_username }}"
password: "{{ netapp_password }}"
- name: Rename Export Policy
na_ontap_export_policy:
action: present
from_name: ansiblePolicyName
vserver: vs_hack
name: newPolicyName
hostname: "{{ netapp_hostname }}"
username: "{{ netapp_username }}"
password: "{{ netapp_password }}"
- name: Delete Export Policy
na_ontap_export_policy:
state: absent
name: ansiblePolicyName
vserver: vs_hack
hostname: "{{ netapp_hostname }}"
username: "{{ netapp_username }}"
password: "{{ netapp_password }}"
"""
RETURN = """
"""
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
import ansible.module_utils.netapp as netapp_utils
HAS_NETAPP_LIB = netapp_utils.has_netapp_lib()
class NetAppONTAPExportPolicy(object):
"""
Class with export policy methods
"""
def __init__(self):
self.argument_spec = netapp_utils.na_ontap_host_argument_spec()
self.argument_spec.update(dict(
state=dict(required=False, type='str', choices=['present', 'absent'], default='present'),
name=dict(required=True, type='str'),
from_name=dict(required=False, type='str', default=None),
vserver=dict(required=False, type='str')
))
self.module = AnsibleModule(
argument_spec=self.argument_spec,
required_if=[
('state', 'present', ['vserver'])
],
supports_check_mode=True
)
parameters = self.module.params
# set up state variables
self.state = parameters['state']
self.name = parameters['name']
self.from_name = parameters['from_name']
self.vserver = parameters['vserver']
if HAS_NETAPP_LIB is False:
self.module.fail_json(msg="the python NetApp-Lib module is required")
else:
self.server = netapp_utils.setup_na_ontap_zapi(module=self.module, vserver=self.vserver)
def get_export_policy(self, name=None):
"""
Return details about the export-policy
:param:
name : Name of the export-policy
:return: Details about the export-policy. None if not found.
:rtype: dict
"""
if name is None:
name = self.name
export_policy_iter = netapp_utils.zapi.NaElement('export-policy-get-iter')
export_policy_info = netapp_utils.zapi.NaElement('export-policy-info')
export_policy_info.add_new_child('policy-name', name)
query = netapp_utils.zapi.NaElement('query')
query.add_child_elem(export_policy_info)
export_policy_iter.add_child_elem(query)
result = self.server.invoke_successfully(export_policy_iter, True)
return_value = None
# check if query returns the expected export-policy
if result.get_child_by_name('num-records') and \
int(result.get_child_content('num-records')) == 1:
export_policy = result.get_child_by_name('attributes-list').get_child_by_name('export-policy-info').get_child_by_name('policy-name')
return_value = {
'policy-name': export_policy
}
return return_value
def create_export_policy(self):
"""
Creates an export policy
"""
export_policy_create = netapp_utils.zapi.NaElement.create_node_with_children(
'export-policy-create', **{'policy-name': self.name})
try:
self.server.invoke_successfully(export_policy_create,
enable_tunneling=True)
except netapp_utils.zapi.NaApiError as error:
self.module.fail_json(msg='Error creating export-policy %s: %s'
% (self.name, to_native(error)),
exception=traceback.format_exc())
def delete_export_policy(self):
"""
Delete export-policy
"""
export_policy_delete = netapp_utils.zapi.NaElement.create_node_with_children(
'export-policy-destroy', **{'policy-name': self.name, })
try:
self.server.invoke_successfully(export_policy_delete,
enable_tunneling=True)
except netapp_utils.zapi.NaApiError as error:
self.module.fail_json(msg='Error deleting export-policy %s: %s'
% (self.name,
to_native(error)), exception=traceback.format_exc())
def rename_export_policy(self):
"""
Rename the export-policy.
"""
export_policy_rename = netapp_utils.zapi.NaElement.create_node_with_children(
'export-policy-rename', **{'policy-name': self.from_name,
'new-policy-name': self.name})
try:
self.server.invoke_successfully(export_policy_rename,
enable_tunneling=True)
except netapp_utils.zapi.NaApiError as error:
self.module.fail_json(msg='Error renaming export-policy %s:%s'
% (self.name, to_native(error)),
exception=traceback.format_exc())
def apply(self):
"""
Apply action to export-policy
"""
changed = False
export_policy_exists = False
netapp_utils.ems_log_event("na_ontap_export_policy", self.server)
rename_flag = False
export_policy_details = self.get_export_policy()
if export_policy_details:
export_policy_exists = True
if self.state == 'absent': # delete
changed = True
else:
if self.state == 'present': # create or rename
if self.from_name is not None:
if self.get_export_policy(self.from_name):
changed = True
rename_flag = True
else:
self.module.fail_json(msg='Error renaming export-policy %s: does not exists' % self.from_name)
else: # create
changed = True
if changed:
if self.module.check_mode:
pass
else:
if self.state == 'present': # execute create or rename_export_policy
if rename_flag:
self.rename_export_policy()
else:
self.create_export_policy()
elif self.state == 'absent': # execute delete
self.delete_export_policy()
self.module.exit_json(changed=changed)
def main():
"""
Execute action
"""
export_policy = NetAppONTAPExportPolicy()
export_policy.apply()
if __name__ == '__main__':
main()
|
Yannig/ansible | refs/heads/devel | lib/ansible/module_utils/crypto.py | 19 | # -*- coding: utf-8 -*-
#
# (c) 2016, Yanis Guenane <yanis+ansible@guenane.org>
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
try:
from OpenSSL import crypto
except ImportError:
# An error will be raised in the calling class to let the end
# user know that OpenSSL couldn't be found.
pass
import abc
import errno
import hashlib
import os
from ansible.module_utils import six
from ansible.module_utils._text import to_bytes
class OpenSSLObjectError(Exception):
pass
def get_fingerprint(path, passphrase=None):
"""Generate the fingerprint of the public key. """
fingerprint = {}
privatekey = load_privatekey(path, passphrase)
try:
publickey = crypto.dump_publickey(crypto.FILETYPE_ASN1, privatekey)
for algo in hashlib.algorithms:
f = getattr(hashlib, algo)
pubkey_digest = f(publickey).hexdigest()
fingerprint[algo] = ':'.join(pubkey_digest[i:i + 2] for i in range(0, len(pubkey_digest), 2))
except AttributeError:
# If PyOpenSSL < 16.0 crypto.dump_publickey() will fail.
# By doing this we prevent the code from raising an error
# yet we return no value in the fingerprint hash.
pass
return fingerprint
def load_privatekey(path, passphrase=None):
"""Load the specified OpenSSL private key."""
try:
if passphrase:
privatekey = crypto.load_privatekey(crypto.FILETYPE_PEM,
open(path, 'rb').read(),
to_bytes(passphrase))
else:
privatekey = crypto.load_privatekey(crypto.FILETYPE_PEM,
open(path, 'rb').read())
return privatekey
except (IOError, OSError) as exc:
raise OpenSSLObjectError(exc)
def load_certificate(path):
"""Load the specified certificate."""
try:
cert_content = open(path, 'rb').read()
cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert_content)
return cert
except (IOError, OSError) as exc:
raise OpenSSLObjectError(exc)
def load_certificate_request(path):
"""Load the specified certificate signing request."""
try:
csr_content = open(path, 'rb').read()
csr = crypto.load_certificate_request(crypto.FILETYPE_PEM, csr_content)
return csr
except (IOError, OSError) as exc:
raise OpenSSLObjectError(exc)
@six.add_metaclass(abc.ABCMeta)
class OpenSSLObject(object):
def __init__(self, path, state, force, check_mode):
self.path = path
self.state = state
self.force = force
self.name = os.path.basename(path)
self.changed = False
self.check_mode = check_mode
def check(self, module, perms_required=True):
"""Ensure the resource is in its desired state."""
def _check_state():
return os.path.exists(self.path)
def _check_perms(module):
file_args = module.load_file_common_arguments(module.params)
return not module.set_fs_attributes_if_different(file_args, False)
if not perms_required:
return _check_state()
return _check_state() and _check_perms(module)
@abc.abstractmethod
def dump(self):
"""Serialize the object into a dictionary."""
pass
@abc.abstractmethod
def generate(self):
"""Generate the resource."""
pass
def remove(self):
"""Remove the resource from the filesystem."""
try:
os.remove(self.path)
self.changed = True
except OSError as exc:
if exc.errno != errno.ENOENT:
raise OpenSSLObjectError(exc)
else:
pass
|
bbmt-bbmt/MosaicoUtil | refs/heads/master | util.py | 1 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
from bs4 import BeautifulSoup
from bs4 import NavigableString
import json
import re
import sys
from tkinter import messagebox
import urllib
import psutil
import os
def modif_text(text):
# à mettre avant le traitement des espaces sinon le -e : peut devenir -e :
# obliger d'utiliser le code utf8 sinon le str(soup) va proteger les &
# Modification de la féminisation
result = re.sub("-e-s|-e", lambda m: "\u2011e" if m.group(0) == "-e" else "\u2011e\u2011s ", text)
# Suppression des espaces multiple
result = re.sub("( |\u00a0| )+", " ", result)
# Modification de la ponctuation
result = re.sub("[\u00a0 ]?([:!\?])[\u00a0 ]?", "\u00a0\\1 ", result)
# modification des guillements
result = re.sub('"[\u00a0 ]?((.|\s)*?)[\u00a0 ]?"', "\u00ab\u00a0\\1\u00a0\u00bb", result)
return result
def parse_json(json_dict, img_path):
# json_file = open("14112016.json", "r", encoding="utf-8")
# json_dict = json.load(json_file)
# result = parse_json(json_dict)
json_str = json.dumps(json_dict, indent=True)
json_list = json_str.split("\n")
i=0
for ligne in json_list:
try:
key, value = ligne.split(":", maxsplit=1)
except ValueError:
i=i+1
continue
# le -1 c'est à cause du point virgule de fin de ligne"
if value[-1] == ",":
value = value[:-1]
value = value.strip('" ')
value = value.replace('\\"','"')
if "text" in key[-5:].lower():
soup = BeautifulSoup(value, "lxml")
final_value = soup.decode(formatter=None)
final_value = final_value.replace('"','\\"')
texts = soup.find_all(string=True)
for t in texts:
if type(t) == NavigableString and t.parent.name != "style":
final_value = final_value.replace(t.strip(), modif_text(t).strip())
# le strip est important
# json_list[i]=json_list[i].replace(t.encode(formatter=None).strip(), modif_text(t).strip())
json_list[i]= key + ': "' + final_value +'"'
if "}" not in json_list[i+1]:
json_list[i] = json_list[i] +','
if '"src"' in key and img_path != "":
if img_path[-1:] != "/":
img_path = img_path + "/"
# on unquote 2 fois à cause de %2520
src = urllib.parse.unquote(value)
src = urllib.parse.unquote(src)
reg_img_name = re.search("/.*/(.*\.[\w]{3})",src)
try:
img_name = reg_img_name.group(1)
except AttributeError:
pass
else:
# si mosaico est dans le nom du fichier, ce n'est pas une image que l'on traite
if "mosaico" not in img_name.lower():
json_list[i] = key + ': "' + img_path + urllib.parse.quote(img_name) +'"'
if "}" not in json_list[i+1]:
json_list[i] = json_list[i] +','
i=i+1
return "\n".join(json_list)
def modif_balise(html):
# suppression des data-mce qui serve à rien
html = re.sub(" data-mce-.*?=[\"'].*?[\"']","",html)
# la fermeture des balises est faite automatiquement grace au parser lxml
# modification des <br> en <br/>
# html = html.replace("<br>", "<br/>")
# modification des <img ...> en <img ... />
# html = re.sub("(<img.*?)>", r"\1/>", html)
# modification des meta
# html = re.sub("(<meta.*?)>", r"\1/>", html)
# modification des hr
# html = re.sub("(<hr.*?)>", r"\1/>", html)
# suppression du footer
html = re.sub("<!-- footerBlock -->(.|\s)*?<!-- /footerBlock -->", "", html)
return html
def verif_html(html):
soup = BeautifulSoup(html, "lxml")
imgs = soup.find_all("img")
alt_text = 0
href_img = 0
for img in imgs:
try:
if img["alt"] == "":
alt_text = alt_text + 1
except KeyError:
alt_text = alt_text + 1
if img.parent.name != "a":
href_img = href_img + 1
message = "Attention il y a:\n"
if alt_text != 0:
message = message + "%s image(s) qui n'ont pas de texte alternatif" % alt_text
if href_img != 0:
message = message + "%s image(s) qui n'ont pas de lien" % href_img
if alt_text != 0 or href_img != 0:
messagebox.showinfo(message="Attention il y a:\n %s image(s) qui n'ont pas de texte alternatif.\n %s image(s) qui n'ont pas de lien"
% (alt_text, href_img), title="Avertissement" )
return
def firefox_running():
firefox_on = False
for pid in psutil.process_iter():
if "firefox" in pid.name():
firefox_on = True
break
return firefox_on
def firefox_path():
if "nt" in os.name:
return win_firefox_path()
elif "posix" in os.name:
return "firefox"
else:
raise Warning("impossible de determiner l'os")
def win_firefox_path():
win32_firefox_path = "C:\\Program Files (x86)" + "\\Mozilla Firefox\\firefox.exe"
if not os.path.isfile(win32_firefox_path):
win32_firefox_path = ""
win64_firefox_path = "C:\\Program Files" + "\\Mozilla Firefox\\firefox.exe"
if not os.path.isfile(win64_firefox_path):
win64_firefox_path = ""
if win64_firefox_path != "" and win32_firefox_path != "":
raise Warning("aucun path valide")
firefox_path = win32_firefox_path or win64_firefox_path
return firefox_path
def main(args):
pass
if __name__ == "__main__":
main(sys.argv)
|
ForgottenKahz/CloudOPC | refs/heads/master | venv/Lib/encodings/iso8859_1.py | 266 | """ Python Character Mapping Codec iso8859_1 generated from 'MAPPINGS/ISO8859/8859-1.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-1',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x04' # 0x04 -> END OF TRANSMISSION
'\x05' # 0x05 -> ENQUIRY
'\x06' # 0x06 -> ACKNOWLEDGE
'\x07' # 0x07 -> BELL
'\x08' # 0x08 -> BACKSPACE
'\t' # 0x09 -> HORIZONTAL TABULATION
'\n' # 0x0A -> LINE FEED
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x14' # 0x14 -> DEVICE CONTROL FOUR
'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x16 -> SYNCHRONOUS IDLE
'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x1a' # 0x1A -> SUBSTITUTE
'\x1b' # 0x1B -> ESCAPE
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> DELETE
'\x80' # 0x80 -> <control>
'\x81' # 0x81 -> <control>
'\x82' # 0x82 -> <control>
'\x83' # 0x83 -> <control>
'\x84' # 0x84 -> <control>
'\x85' # 0x85 -> <control>
'\x86' # 0x86 -> <control>
'\x87' # 0x87 -> <control>
'\x88' # 0x88 -> <control>
'\x89' # 0x89 -> <control>
'\x8a' # 0x8A -> <control>
'\x8b' # 0x8B -> <control>
'\x8c' # 0x8C -> <control>
'\x8d' # 0x8D -> <control>
'\x8e' # 0x8E -> <control>
'\x8f' # 0x8F -> <control>
'\x90' # 0x90 -> <control>
'\x91' # 0x91 -> <control>
'\x92' # 0x92 -> <control>
'\x93' # 0x93 -> <control>
'\x94' # 0x94 -> <control>
'\x95' # 0x95 -> <control>
'\x96' # 0x96 -> <control>
'\x97' # 0x97 -> <control>
'\x98' # 0x98 -> <control>
'\x99' # 0x99 -> <control>
'\x9a' # 0x9A -> <control>
'\x9b' # 0x9B -> <control>
'\x9c' # 0x9C -> <control>
'\x9d' # 0x9D -> <control>
'\x9e' # 0x9E -> <control>
'\x9f' # 0x9F -> <control>
'\xa0' # 0xA0 -> NO-BREAK SPACE
'\xa1' # 0xA1 -> INVERTED EXCLAMATION MARK
'\xa2' # 0xA2 -> CENT SIGN
'\xa3' # 0xA3 -> POUND SIGN
'\xa4' # 0xA4 -> CURRENCY SIGN
'\xa5' # 0xA5 -> YEN SIGN
'\xa6' # 0xA6 -> BROKEN BAR
'\xa7' # 0xA7 -> SECTION SIGN
'\xa8' # 0xA8 -> DIAERESIS
'\xa9' # 0xA9 -> COPYRIGHT SIGN
'\xaa' # 0xAA -> FEMININE ORDINAL INDICATOR
'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xac' # 0xAC -> NOT SIGN
'\xad' # 0xAD -> SOFT HYPHEN
'\xae' # 0xAE -> REGISTERED SIGN
'\xaf' # 0xAF -> MACRON
'\xb0' # 0xB0 -> DEGREE SIGN
'\xb1' # 0xB1 -> PLUS-MINUS SIGN
'\xb2' # 0xB2 -> SUPERSCRIPT TWO
'\xb3' # 0xB3 -> SUPERSCRIPT THREE
'\xb4' # 0xB4 -> ACUTE ACCENT
'\xb5' # 0xB5 -> MICRO SIGN
'\xb6' # 0xB6 -> PILCROW SIGN
'\xb7' # 0xB7 -> MIDDLE DOT
'\xb8' # 0xB8 -> CEDILLA
'\xb9' # 0xB9 -> SUPERSCRIPT ONE
'\xba' # 0xBA -> MASCULINE ORDINAL INDICATOR
'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xbc' # 0xBC -> VULGAR FRACTION ONE QUARTER
'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
'\xbe' # 0xBE -> VULGAR FRACTION THREE QUARTERS
'\xbf' # 0xBF -> INVERTED QUESTION MARK
'\xc0' # 0xC0 -> LATIN CAPITAL LETTER A WITH GRAVE
'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE
'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
'\xc3' # 0xC3 -> LATIN CAPITAL LETTER A WITH TILDE
'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE
'\xc6' # 0xC6 -> LATIN CAPITAL LETTER AE
'\xc7' # 0xC7 -> LATIN CAPITAL LETTER C WITH CEDILLA
'\xc8' # 0xC8 -> LATIN CAPITAL LETTER E WITH GRAVE
'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
'\xca' # 0xCA -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS
'\xcc' # 0xCC -> LATIN CAPITAL LETTER I WITH GRAVE
'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE
'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
'\xcf' # 0xCF -> LATIN CAPITAL LETTER I WITH DIAERESIS
'\xd0' # 0xD0 -> LATIN CAPITAL LETTER ETH (Icelandic)
'\xd1' # 0xD1 -> LATIN CAPITAL LETTER N WITH TILDE
'\xd2' # 0xD2 -> LATIN CAPITAL LETTER O WITH GRAVE
'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE
'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
'\xd5' # 0xD5 -> LATIN CAPITAL LETTER O WITH TILDE
'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xd7' # 0xD7 -> MULTIPLICATION SIGN
'\xd8' # 0xD8 -> LATIN CAPITAL LETTER O WITH STROKE
'\xd9' # 0xD9 -> LATIN CAPITAL LETTER U WITH GRAVE
'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE
'\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\xdd' # 0xDD -> LATIN CAPITAL LETTER Y WITH ACUTE
'\xde' # 0xDE -> LATIN CAPITAL LETTER THORN (Icelandic)
'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S (German)
'\xe0' # 0xE0 -> LATIN SMALL LETTER A WITH GRAVE
'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE
'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe3' # 0xE3 -> LATIN SMALL LETTER A WITH TILDE
'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE
'\xe6' # 0xE6 -> LATIN SMALL LETTER AE
'\xe7' # 0xE7 -> LATIN SMALL LETTER C WITH CEDILLA
'\xe8' # 0xE8 -> LATIN SMALL LETTER E WITH GRAVE
'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
'\xea' # 0xEA -> LATIN SMALL LETTER E WITH CIRCUMFLEX
'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
'\xec' # 0xEC -> LATIN SMALL LETTER I WITH GRAVE
'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE
'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\xef' # 0xEF -> LATIN SMALL LETTER I WITH DIAERESIS
'\xf0' # 0xF0 -> LATIN SMALL LETTER ETH (Icelandic)
'\xf1' # 0xF1 -> LATIN SMALL LETTER N WITH TILDE
'\xf2' # 0xF2 -> LATIN SMALL LETTER O WITH GRAVE
'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE
'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf5' # 0xF5 -> LATIN SMALL LETTER O WITH TILDE
'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
'\xf7' # 0xF7 -> DIVISION SIGN
'\xf8' # 0xF8 -> LATIN SMALL LETTER O WITH STROKE
'\xf9' # 0xF9 -> LATIN SMALL LETTER U WITH GRAVE
'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE
'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
'\xfd' # 0xFD -> LATIN SMALL LETTER Y WITH ACUTE
'\xfe' # 0xFE -> LATIN SMALL LETTER THORN (Icelandic)
'\xff' # 0xFF -> LATIN SMALL LETTER Y WITH DIAERESIS
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
Godiyos/python-for-android | refs/heads/master | python-modules/twisted/twisted/conch/insults/colors.py | 146 | """
You don't really want to use this module. Try helper.py instead.
"""
CLEAR = 0
BOLD = 1
DIM = 2
ITALIC = 3
UNDERSCORE = 4
BLINK_SLOW = 5
BLINK_FAST = 6
REVERSE = 7
CONCEALED = 8
FG_BLACK = 30
FG_RED = 31
FG_GREEN = 32
FG_YELLOW = 33
FG_BLUE = 34
FG_MAGENTA = 35
FG_CYAN = 36
FG_WHITE = 37
BG_BLACK = 40
BG_RED = 41
BG_GREEN = 42
BG_YELLOW = 43
BG_BLUE = 44
BG_MAGENTA = 45
BG_CYAN = 46
BG_WHITE = 47
|
googleapis/python-bigquery | refs/heads/master | tests/unit/test_dbapi__helpers.py | 1 | # Copyright 2017 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import decimal
import math
import operator as op
import unittest
import pytest
try:
import pyarrow
except ImportError: # pragma: NO COVER
pyarrow = None
import google.cloud._helpers
from google.cloud.bigquery import table, enums
from google.cloud.bigquery.dbapi import _helpers
from google.cloud.bigquery.dbapi import exceptions
from tests.unit.helpers import _to_pyarrow
class TestQueryParameters(unittest.TestCase):
def test_scalar_to_query_parameter(self):
expected_types = [
(True, "BOOL"),
(False, "BOOL"),
(123, "INT64"),
(-123456789, "INT64"),
(1.25, "FLOAT64"),
(b"I am some bytes", "BYTES"),
("I am a string", "STRING"),
(datetime.date(2017, 4, 1), "DATE"),
(datetime.time(12, 34, 56), "TIME"),
(datetime.datetime(2012, 3, 4, 5, 6, 7), "DATETIME"),
(
datetime.datetime(
2012, 3, 4, 5, 6, 7, tzinfo=google.cloud._helpers.UTC
),
"TIMESTAMP",
),
(decimal.Decimal("1.25"), "NUMERIC"),
(decimal.Decimal("9.9999999999999999999999999999999999999E+28"), "NUMERIC"),
(decimal.Decimal("1.0E+29"), "BIGNUMERIC"), # more than max NUMERIC value
(decimal.Decimal("1.123456789"), "NUMERIC"),
(decimal.Decimal("1.1234567891"), "BIGNUMERIC"), # scale > 9
(decimal.Decimal("12345678901234567890123456789.012345678"), "NUMERIC"),
(
decimal.Decimal("12345678901234567890123456789012345678"),
"BIGNUMERIC", # larger than max NUMERIC value, despite precision <=38
),
]
for value, expected_type in expected_types:
msg = "value: {} expected_type: {}".format(value, expected_type)
parameter = _helpers.scalar_to_query_parameter(value)
self.assertIsNone(parameter.name, msg=msg)
self.assertEqual(parameter.type_, expected_type, msg=msg)
self.assertEqual(parameter.value, value, msg=msg)
named_parameter = _helpers.scalar_to_query_parameter(value, name="myvar")
self.assertEqual(named_parameter.name, "myvar", msg=msg)
self.assertEqual(named_parameter.type_, expected_type, msg=msg)
self.assertEqual(named_parameter.value, value, msg=msg)
def test_scalar_to_query_parameter_w_unexpected_type(self):
with self.assertRaises(exceptions.ProgrammingError):
_helpers.scalar_to_query_parameter(value={"a": "dictionary"})
def test_scalar_to_query_parameter_w_special_floats(self):
nan_parameter = _helpers.scalar_to_query_parameter(float("nan"))
self.assertTrue(math.isnan(nan_parameter.value))
self.assertEqual(nan_parameter.type_, "FLOAT64")
inf_parameter = _helpers.scalar_to_query_parameter(float("inf"))
self.assertTrue(math.isinf(inf_parameter.value))
self.assertEqual(inf_parameter.type_, "FLOAT64")
def test_array_to_query_parameter_valid_argument(self):
expected_types = [
([True, False], "BOOL"),
([123, -456, 0], "INT64"),
([1.25, 2.50], "FLOAT64"),
([decimal.Decimal("1.25")], "NUMERIC"),
([decimal.Decimal("{d38}.{d38}".format(d38="9" * 38))], "BIGNUMERIC"),
([b"foo", b"bar"], "BYTES"),
(["foo", "bar"], "STRING"),
([datetime.date(2017, 4, 1), datetime.date(2018, 4, 1)], "DATE"),
([datetime.time(12, 34, 56), datetime.time(10, 20, 30)], "TIME"),
(
[
datetime.datetime(2012, 3, 4, 5, 6, 7),
datetime.datetime(2013, 1, 1, 10, 20, 30),
],
"DATETIME",
),
(
[
datetime.datetime(
2012, 3, 4, 5, 6, 7, tzinfo=google.cloud._helpers.UTC
),
datetime.datetime(
2013, 1, 1, 10, 20, 30, tzinfo=google.cloud._helpers.UTC
),
],
"TIMESTAMP",
),
]
for values, expected_type in expected_types:
msg = "value: {} expected_type: {}".format(values, expected_type)
parameter = _helpers.array_to_query_parameter(values)
self.assertIsNone(parameter.name, msg=msg)
self.assertEqual(parameter.array_type, expected_type, msg=msg)
self.assertEqual(parameter.values, values, msg=msg)
named_param = _helpers.array_to_query_parameter(values, name="my_param")
self.assertEqual(named_param.name, "my_param", msg=msg)
self.assertEqual(named_param.array_type, expected_type, msg=msg)
self.assertEqual(named_param.values, values, msg=msg)
def test_array_to_query_parameter_empty_argument(self):
with self.assertRaises(exceptions.ProgrammingError):
_helpers.array_to_query_parameter([])
def test_array_to_query_parameter_unsupported_sequence(self):
unsupported_iterables = [{10, 20, 30}, "foo", b"bar", bytearray([65, 75, 85])]
for iterable in unsupported_iterables:
with self.assertRaises(exceptions.ProgrammingError):
_helpers.array_to_query_parameter(iterable)
def test_array_to_query_parameter_sequence_w_invalid_elements(self):
with self.assertRaises(exceptions.ProgrammingError):
_helpers.array_to_query_parameter([object(), 2, 7])
def test_to_query_parameters_w_dict(self):
parameters = {"somebool": True, "somestring": "a-string-value"}
query_parameters = _helpers.to_query_parameters(parameters, {})
query_parameter_tuples = []
for param in query_parameters:
query_parameter_tuples.append((param.name, param.type_, param.value))
self.assertSequenceEqual(
sorted(query_parameter_tuples),
sorted(
[
("somebool", "BOOL", True),
("somestring", "STRING", "a-string-value"),
]
),
)
def test_to_query_parameters_w_dict_array_param(self):
parameters = {"somelist": [10, 20]}
query_parameters = _helpers.to_query_parameters(parameters, {})
self.assertEqual(len(query_parameters), 1)
param = query_parameters[0]
self.assertEqual(param.name, "somelist")
self.assertEqual(param.array_type, "INT64")
self.assertEqual(param.values, [10, 20])
def test_to_query_parameters_w_dict_dict_param(self):
parameters = {"my_param": {"foo": "bar"}}
with self.assertRaises(NotImplementedError):
_helpers.to_query_parameters(parameters, {})
def test_to_query_parameters_w_list(self):
parameters = [True, "a-string-value"]
query_parameters = _helpers.to_query_parameters(parameters, [None, None])
query_parameter_tuples = []
for param in query_parameters:
query_parameter_tuples.append((param.name, param.type_, param.value))
self.assertSequenceEqual(
sorted(query_parameter_tuples),
sorted([(None, "BOOL", True), (None, "STRING", "a-string-value")]),
)
def test_to_query_parameters_w_list_array_param(self):
parameters = [[10, 20]]
query_parameters = _helpers.to_query_parameters(parameters, [None])
self.assertEqual(len(query_parameters), 1)
param = query_parameters[0]
self.assertIsNone(param.name)
self.assertEqual(param.array_type, "INT64")
self.assertEqual(param.values, [10, 20])
def test_to_query_parameters_w_list_dict_param(self):
parameters = [{"foo": "bar"}]
with self.assertRaises(NotImplementedError):
_helpers.to_query_parameters(parameters, [None])
def test_to_query_parameters_none_argument(self):
query_parameters = _helpers.to_query_parameters(None, None)
self.assertEqual(query_parameters, [])
class TestToBqTableRows(unittest.TestCase):
def test_empty_iterable(self):
rows_iterable = iter([])
result = _helpers.to_bq_table_rows(rows_iterable)
self.assertEqual(list(result), [])
@unittest.skipIf(pyarrow is None, "Requires `pyarrow`")
def test_non_empty_iterable(self):
rows_iterable = [
dict(
one=_to_pyarrow(1.1),
four=_to_pyarrow(1.4),
two=_to_pyarrow(1.2),
three=_to_pyarrow(1.3),
),
dict(
one=_to_pyarrow(2.1),
four=_to_pyarrow(2.4),
two=_to_pyarrow(2.2),
three=_to_pyarrow(2.3),
),
]
result = _helpers.to_bq_table_rows(rows_iterable)
rows = list(result)
self.assertEqual(len(rows), 2)
row_1, row_2 = rows
self.assertIsInstance(row_1, table.Row)
self.assertIsInstance(row_2, table.Row)
field_value = op.itemgetter(1)
items = sorted(row_1.items(), key=field_value)
expected_items = [("one", 1.1), ("two", 1.2), ("three", 1.3), ("four", 1.4)]
self.assertEqual(items, expected_items)
items = sorted(row_2.items(), key=field_value)
expected_items = [("one", 2.1), ("two", 2.2), ("three", 2.3), ("four", 2.4)]
self.assertEqual(items, expected_items)
class TestRaiseOnClosedDecorator(unittest.TestCase):
def _make_class(self):
class Foo(object):
class_member = "class member"
def __init__(self):
self._closed = False
self.instance_member = "instance member"
def instance_method(self):
return self.instance_member
@classmethod
def class_method(cls): # pragma: NO COVER
return cls.class_member
@staticmethod
def static_method(): # pragma: NO COVER
return "static return value"
def _private_method(self):
return self.instance_member
return Foo
def test_preserves_method_names(self):
klass = self._make_class()
decorated_class = _helpers.raise_on_closed("I'm closed!")(klass)
instance = decorated_class()
self.assertEqual(instance.instance_method.__name__, "instance_method")
self.assertEqual(instance.class_method.__name__, "class_method")
self.assertEqual(instance.static_method.__name__, "static_method")
self.assertEqual(instance._private_method.__name__, "_private_method")
def test_methods_on_not_closed_instance(self):
klass = self._make_class()
decorated_class = _helpers.raise_on_closed("I'm closed!")(klass)
instance = decorated_class()
instance._closed = False
self.assertEqual(instance.instance_method(), "instance member")
self.assertEqual(instance.class_method(), "class member")
self.assertEqual(instance.static_method(), "static return value")
self.assertEqual(instance._private_method(), "instance member")
def test_public_instance_methods_on_closed_instance(self):
klass = self._make_class()
decorated_class = _helpers.raise_on_closed("I'm closed!")(klass)
instance = decorated_class()
instance._closed = True
with self.assertRaisesRegex(exceptions.ProgrammingError, "I'm closed!"):
instance.instance_method()
def test_methods_wo_public_instance_methods_on_closed_instance(self):
klass = self._make_class()
decorated_class = _helpers.raise_on_closed("I'm closed!")(klass)
instance = decorated_class()
instance._closed = True
# no errors expected
self.assertEqual(instance.class_method(), "class member")
self.assertEqual(instance.static_method(), "static return value")
self.assertEqual(instance._private_method(), "instance member")
def test_custom_class_closed_attribute(self):
klass = self._make_class()
decorated_class = _helpers.raise_on_closed(
"I'm closed!", closed_attr_name="_really_closed"
)(klass)
instance = decorated_class()
instance._closed = False
instance._really_closed = True
with self.assertRaisesRegex(exceptions.ProgrammingError, "I'm closed!"):
instance.instance_method()
def test_custom_on_closed_error_type(self):
klass = self._make_class()
decorated_class = _helpers.raise_on_closed(
"I'm closed!", exc_class=RuntimeError
)(klass)
instance = decorated_class()
instance._closed = True
with self.assertRaisesRegex(RuntimeError, "I'm closed!"):
instance.instance_method()
VALID_BQ_TYPES = [
(name, getattr(enums.SqlParameterScalarTypes, name)._type)
for name in dir(enums.SqlParameterScalarTypes)
if not name.startswith("_")
]
@pytest.mark.parametrize("alias, type_", VALID_BQ_TYPES)
def test_scalar_to_query_parameter_honors_given_type(alias, type_):
from google.cloud import bigquery
assert _helpers.scalar_to_query_parameter(1.23, None, alias) == (
bigquery.ScalarQueryParameter(None, type_, 1.23)
)
assert _helpers.scalar_to_query_parameter(None, "foo", alias) == (
bigquery.ScalarQueryParameter("foo", type_, None)
)
def test_scalar_to_query_parameter_honors_given_type_errors_on_invalid():
with pytest.raises(
google.cloud.bigquery.dbapi.exceptions.ProgrammingError,
match="The given parameter type, INT, for foo is not a valid BigQuery scalar type.",
):
_helpers.scalar_to_query_parameter(None, "foo", "INT")
@pytest.mark.parametrize("alias, type_", VALID_BQ_TYPES)
def test_array_to_query_parameter_honors_given_type(alias, type_):
from google.cloud import bigquery
assert _helpers.array_to_query_parameter([1.23], None, alias) == (
bigquery.ArrayQueryParameter(None, type_, [1.23])
)
assert _helpers.array_to_query_parameter((), "foo", alias) == (
bigquery.ArrayQueryParameter("foo", type_, ())
)
def test_array_to_query_parameter_honors_given_type_errors_on_invalid():
with pytest.raises(
google.cloud.bigquery.dbapi.exceptions.ProgrammingError,
match="The given parameter type, INT, for foo is not a valid BigQuery scalar type.",
):
_helpers.array_to_query_parameter((), "foo", "INT")
def test_to_query_parameters_dict_w_types():
from google.cloud import bigquery
assert sorted(
_helpers.to_query_parameters(
dict(i=1, x=1.2, y=None, z=[]), dict(x="numeric", y="string", z="float64")
),
key=lambda p: p.name,
) == [
bigquery.ScalarQueryParameter("i", "INT64", 1),
bigquery.ScalarQueryParameter("x", "NUMERIC", 1.2),
bigquery.ScalarQueryParameter("y", "STRING", None),
bigquery.ArrayQueryParameter("z", "FLOAT64", []),
]
def test_to_query_parameters_list_w_types():
from google.cloud import bigquery
assert _helpers.to_query_parameters(
[1, 1.2, None, []], [None, "numeric", "string", "float64"]
) == [
bigquery.ScalarQueryParameter(None, "INT64", 1),
bigquery.ScalarQueryParameter(None, "NUMERIC", 1.2),
bigquery.ScalarQueryParameter(None, "STRING", None),
bigquery.ArrayQueryParameter(None, "FLOAT64", []),
]
|
xapharius/HadoopML | refs/heads/master | Engine/src/tests/protocol/n_images_input_protocol_test.py | 2 | '''
Created on Apr 8, 2014
@author: Simon
'''
import unittest
from protocol.n_image_input_protocol import NImageInputProtocol
from skimage import io as skio
import struct
from numpy.testing.utils import assert_array_equal
from numpy.ma.testutils import assert_equal
import io
from encodings.base64_codec import base64_encode
import base64
class NImagesInputProtocolTest(unittest.TestCase):
def setUp(self):
self.protocol = NImageInputProtocol()
def testReadSingleImage(self):
f = open('../../../../data/test-images/cat_programming.png', "rb")
file_bytes = bytearray(f.read())
len_bytes = bytearray(struct.pack('>i', len(file_bytes)))
_, images = self.protocol.read(f.name + '\t' + base64_encode(str(len_bytes + file_bytes))[0])
exp = skio.imread('../../../../data/test-images/cat_programming.png')
assert_array_equal(images[0], exp)
def testWriteSingleImage(self):
image = skio.imread('../../../../data/test-images/cat_programming.png')
img_list = [image]
key = 'cat'
image_bytes = self.protocol.write(key, img_list)
byte_stream = io.BytesIO()
skio.imsave(byte_stream, image)
file_bytes = byte_stream.getvalue()
byte_stream.close()
len_bytes = bytearray(struct.pack('>i', len(file_bytes)))
assert_equal( image_bytes, key + '\t' + base64.b64encode(len_bytes + file_bytes) )
def testReadWriteReadSingleImage(self):
f = open('../../../../data/test-images/cat_programming.png', "rb")
file_bytes = bytearray(f.read())
len_bytes = bytearray(struct.pack('>i', len(file_bytes)))
exp = skio.imread('../../../../data/test-images/cat_programming.png')
key = 'cat'
key, images = self.protocol.read(key + '\t' + base64.b64encode(str(len_bytes + file_bytes)))
image_bytes = self.protocol.write(key, images)
_, images = self.protocol.read(image_bytes)
assert_array_equal(images[0], exp)
def testReadMultipleImages(self):
f = open('../../../../data/test-images/cat_programming.png', "rb")
file_bytes = bytearray(f.read())
len_bytes = bytearray(struct.pack('>i', len(file_bytes)))
# five times the same image
_, images = self.protocol.read(f.name + '\t' + base64_encode(str(len_bytes + file_bytes))[0]*5)
exp = skio.imread('../../../../data/test-images/cat_programming.png')
for img in images:
assert_array_equal(img, exp)
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testReadSingleImage']
unittest.main() |
eugeneponomarenko/qualitybots | refs/heads/master | src/appengine/models/test_suite.py | 26 | #!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""TestSuite model.
TestSuite model stores the date and time of a test suite, datastore keys of
test browsers and reference browser and its status.
"""
import datetime
import re
from google.appengine.ext import db
from common import enum
from models import browser
class TestSuite(db.Model):
"""TestSuite Model which stores various information about Test Run.
Attributes:
date: DateTime of test suite.
ref_browser: Reference browser entity (Reference Property).
test_browsers: List of reference browser keys.
description: Text description of test suite.
"""
date = db.DateTimeProperty()
ref_browser = db.ReferenceProperty(browser.Browser)
test_browsers = db.ListProperty(db.Key)
description = db.TextProperty(default='')
def GetNumSites(self):
"""Gets an estimate on number of URLs tested.
Returns:
Estimated number of sites tested.
"""
test_browsers_count = len(self.test_browsers)
ref_browsers_count = 1
test_data = self.page_data_set
return test_data.count() / (test_browsers_count + ref_browsers_count)
def GetTestBrowsers(self):
"""Gets list of test browser entities.
Returns:
List of test browser entities.
"""
return browser.Browser.get(self.test_browsers)
def GetTestBrowsersStringWithFlag(self):
"""Gets the TestBrowsers String with flag.
Returns:
Testbrowsers string with flag.
"""
return u', '.join([b.GetBrowserStringWithFlag()
for b in self.GetTestBrowsers()])
def AddTestBrowser(self, test_browser):
"""Add a given test browser's key name into test browser key list.
This method avoids adding duplicates.
Args:
test_browser: Test Browser Entity.
"""
key_to_add = test_browser.key()
# Let's make sure we don't add duplicate values.
if key_to_add not in self.test_browsers:
self.test_browsers.append(key_to_add)
self.put()
def _SplitDatetimeString(datetime_string):
"""Splits a datetime string into list of its components.
Args:
datetime_string: DateTime String Value.
Returns:
Componentized values of datetime string.
"""
p = r'([0-9]+)-([0-9]+)-([0-9]+) ([0-9]+):([0-9]+):([0-9]+).([0-9]+)'
return re.search(p, datetime_string).groups()
def GetDatetimeFromDatetimeString(datetime_string):
"""Creates datetime object from string datetime value.
Args:
datetime_string: DateTime String Value.
Returns:
DateTime Object.
"""
d = [int(v) for v in _SplitDatetimeString(datetime_string)]
return datetime.datetime(d[0], d[1], d[2], d[3], d[4], d[5], d[6])
def GetSuiteKeyNameFromDatetimeString(datetime_string):
"""Generates test suite key name from datetime string value.
Args:
datetime_string: DateTime String Value.
Returns:
Test Suite Key Name.
"""
d = _SplitDatetimeString(datetime_string)
return 'suite_' + ('_'.join(d))
def GetOrInsertSuite(suite_date, ref_browser_user_agent, ref_browser_channel):
"""Gets or inserts TestSuite.
Args:
suite_date: Test Suite Date.
ref_browser_user_agent: Reference Browser User Agent.
ref_browser_channel: String representing reference browser channel.
Returns:
Inserted/Retrieved Test Suite Key Name.
"""
key_name = GetSuiteKeyNameFromDatetimeString(suite_date)
# Let's see if suite exist already.
test_suite = TestSuite.get_by_key_name(key_name)
if not test_suite:
flag = None
date = GetDatetimeFromDatetimeString(suite_date)
# Let's check if ref_browser has flag or not. Flag are pipe separated
# from browser user agent. So let's check for pipe ('|') and parse it.
if ref_browser_user_agent.count('|'):
flag = ref_browser_user_agent.split('|')[1]
ref_browser = browser.GetOrInsertBrowser(ref_browser_user_agent,
ref_browser_channel, flag=flag)
else:
ref_browser = browser.GetOrInsertBrowser(ref_browser_user_agent,
ref_browser_channel)
test_suite = TestSuite.get_or_insert(key_name=key_name, date=date,
ref_browser=ref_browser,
test_browsers=[])
return test_suite
def GetLatestSuite():
"""Returns latest TestSuite entity."""
q = TestSuite.all().order('-date')
return q.get()
def UpdateRefBrowser(suite, new_ref_browser, delete_old_ref=False):
"""Updates reference browser in TestSuite.
Args:
suite: Test Suite Entity.
new_ref_browser: Reference Browser Entity.
delete_old_ref: Delete Old Reference flag (default: False).
Returns:
Updated Test Suite Entity.
"""
old_ref_browser = suite.ref_browser.key()
suite.ref_browser = new_ref_browser
suite.put()
if delete_old_ref:
# Let's delete old reference now.
db.delete(old_ref_browser)
return suite
|
ravindrapanda/tensorflow | refs/heads/master | tensorflow/python/platform/tf_logging.py | 9 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Logging utilities."""
# pylint: disable=unused-import
# pylint: disable=g-bad-import-order
# pylint: disable=invalid-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging as _logging
import os as _os
import sys as _sys
import time as _time
from logging import DEBUG
from logging import ERROR
from logging import FATAL
from logging import INFO
from logging import WARN
import threading
import six
from tensorflow.python.util.all_util import remove_undocumented
from tensorflow.python.util.tf_export import tf_export
# Don't use this directly. Use _get_logger() instead.
_logger = None
_logger_lock = threading.Lock()
def _get_logger():
global _logger
# Use double-checked locking to avoid taking lock unnecessarily.
if _logger:
return _logger
_logger_lock.acquire()
try:
if _logger:
return _logger
# Scope the TensorFlow logger to not conflict with users' loggers.
logger = _logging.getLogger('tensorflow')
# Don't further configure the TensorFlow logger if the root logger is
# already configured. This prevents double logging in those cases.
if not _logging.getLogger().handlers:
# Determine whether we are in an interactive environment
_interactive = False
try:
# This is only defined in interactive shells.
if _sys.ps1: _interactive = True
except AttributeError:
# Even now, we may be in an interactive shell with `python -i`.
_interactive = _sys.flags.interactive
# If we are in an interactive environment (like Jupyter), set loglevel
# to INFO and pipe the output to stdout.
if _interactive:
logger.setLevel(INFO)
_logging_target = _sys.stdout
else:
_logging_target = _sys.stderr
# Add the output handler.
_handler = _logging.StreamHandler(_logging_target)
_handler.setFormatter(_logging.Formatter(_logging.BASIC_FORMAT, None))
logger.addHandler(_handler)
_logger = logger
return _logger
finally:
_logger_lock.release()
@tf_export('logging.log')
def log(level, msg, *args, **kwargs):
_get_logger().log(level, msg, *args, **kwargs)
@tf_export('logging.debug')
def debug(msg, *args, **kwargs):
_get_logger().debug(msg, *args, **kwargs)
@tf_export('logging.error')
def error(msg, *args, **kwargs):
_get_logger().error(msg, *args, **kwargs)
@tf_export('logging.fatal')
def fatal(msg, *args, **kwargs):
_get_logger().fatal(msg, *args, **kwargs)
@tf_export('logging.info')
def info(msg, *args, **kwargs):
_get_logger().info(msg, *args, **kwargs)
@tf_export('logging.warn')
def warn(msg, *args, **kwargs):
_get_logger().warn(msg, *args, **kwargs)
@tf_export('logging.warning')
def warning(msg, *args, **kwargs):
_get_logger().warning(msg, *args, **kwargs)
_level_names = {
FATAL: 'FATAL',
ERROR: 'ERROR',
WARN: 'WARN',
INFO: 'INFO',
DEBUG: 'DEBUG',
}
# Mask to convert integer thread ids to unsigned quantities for logging
# purposes
_THREAD_ID_MASK = 2 * _sys.maxsize + 1
_log_prefix = None # later set to google2_log_prefix
# Counter to keep track of number of log entries per token.
_log_counter_per_token = {}
@tf_export('logging.TaskLevelStatusMessage')
def TaskLevelStatusMessage(msg):
error(msg)
@tf_export('logging.flush')
def flush():
raise NotImplementedError()
# Code below is taken from pyglib/logging
@tf_export('logging.vlog')
def vlog(level, msg, *args, **kwargs):
_get_logger().log(level, msg, *args, **kwargs)
def _GetNextLogCountPerToken(token):
"""Wrapper for _log_counter_per_token.
Args:
token: The token for which to look up the count.
Returns:
The number of times this function has been called with
*token* as an argument (starting at 0)
"""
global _log_counter_per_token # pylint: disable=global-variable-not-assigned
_log_counter_per_token[token] = 1 + _log_counter_per_token.get(token, -1)
return _log_counter_per_token[token]
@tf_export('logging.log_every_n')
def log_every_n(level, msg, n, *args):
"""Log 'msg % args' at level 'level' once per 'n' times.
Logs the 1st call, (N+1)st call, (2N+1)st call, etc.
Not threadsafe.
Args:
level: The level at which to log.
msg: The message to be logged.
n: The number of times this should be called before it is logged.
*args: The args to be substituted into the msg.
"""
count = _GetNextLogCountPerToken(_GetFileAndLine())
log_if(level, msg, not (count % n), *args)
@tf_export('logging.log_first_n')
def log_first_n(level, msg, n, *args): # pylint: disable=g-bad-name
"""Log 'msg % args' at level 'level' only first 'n' times.
Not threadsafe.
Args:
level: The level at which to log.
msg: The message to be logged.
n: The number of times this should be called before it is logged.
*args: The args to be substituted into the msg.
"""
count = _GetNextLogCountPerToken(_GetFileAndLine())
log_if(level, msg, count < n, *args)
@tf_export('logging.log_if')
def log_if(level, msg, condition, *args):
"""Log 'msg % args' at level 'level' only if condition is fulfilled."""
if condition:
vlog(level, msg, *args)
def _GetFileAndLine():
"""Returns (filename, linenumber) for the stack frame."""
# Use sys._getframe(). This avoids creating a traceback object.
# pylint: disable=protected-access
f = _sys._getframe()
# pylint: enable=protected-access
our_file = f.f_code.co_filename
f = f.f_back
while f:
code = f.f_code
if code.co_filename != our_file:
return (code.co_filename, f.f_lineno)
f = f.f_back
return ('<unknown>', 0)
def google2_log_prefix(level, timestamp=None, file_and_line=None):
"""Assemble a logline prefix using the google2 format."""
# pylint: disable=global-variable-not-assigned
global _level_names
# pylint: enable=global-variable-not-assigned
# Record current time
now = timestamp or _time.time()
now_tuple = _time.localtime(now)
now_microsecond = int(1e6 * (now % 1.0))
(filename, line) = file_and_line or _GetFileAndLine()
basename = _os.path.basename(filename)
# Severity string
severity = 'I'
if level in _level_names:
severity = _level_names[level][0]
s = '%c%02d%02d %02d:%02d:%02d.%06d %5d %s:%d] ' % (
severity,
now_tuple[1], # month
now_tuple[2], # day
now_tuple[3], # hour
now_tuple[4], # min
now_tuple[5], # sec
now_microsecond,
_get_thread_id(),
basename,
line)
return s
@tf_export('logging.get_verbosity')
def get_verbosity():
"""Return how much logging output will be produced."""
return _get_logger().getEffectiveLevel()
@tf_export('logging.set_verbosity')
def set_verbosity(v):
"""Sets the threshold for what messages will be logged."""
_get_logger().setLevel(v)
def _get_thread_id():
"""Get id of current thread, suitable for logging as an unsigned quantity."""
# pylint: disable=protected-access
thread_id = six.moves._thread.get_ident()
# pylint:enable=protected-access
return thread_id & _THREAD_ID_MASK
_log_prefix = google2_log_prefix
# Controls which methods from pyglib.logging are available within the project.
# Do not add methods here without also adding to platform/tf_logging.py.
_allowed_symbols = [
'DEBUG',
'ERROR',
'FATAL',
'INFO',
'TaskLevelStatusMessage',
'WARN',
'debug',
'error',
'fatal',
'flush',
'get_verbosity',
'info',
'log',
'log_if',
'log_every_n',
'log_first_n',
'set_verbosity',
'vlog',
'warn',
'warning',
]
tf_export('logging.DEBUG').export_constant(__name__, 'DEBUG')
tf_export('logging.ERROR').export_constant(__name__, 'ERROR')
tf_export('logging.FATAL').export_constant(__name__, 'FATAL')
tf_export('logging.INFO').export_constant(__name__, 'INFO')
tf_export('logging.WARN').export_constant(__name__, 'WARN')
remove_undocumented(__name__, _allowed_symbols)
|
sephii/django | refs/heads/master | tests/m2m_and_m2o/tests.py | 6 | from django.db.models import Q
from django.test import TestCase
from .models import Issue, User, UnicodeReferenceModel
class RelatedObjectTests(TestCase):
def test_related_objects_have_name_attribute(self):
for field_name in ('test_issue_client', 'test_issue_cc'):
obj = User._meta.get_field(field_name)
self.assertEqual(field_name, obj.field.related_query_name())
def test_m2m_and_m2o(self):
r = User.objects.create(username="russell")
g = User.objects.create(username="gustav")
i1 = Issue(num=1)
i1.client = r
i1.save()
i2 = Issue(num=2)
i2.client = r
i2.save()
i2.cc.add(r)
i3 = Issue(num=3)
i3.client = g
i3.save()
i3.cc.add(r)
self.assertQuerysetEqual(
Issue.objects.filter(client=r.id), [
1,
2,
],
lambda i: i.num
)
self.assertQuerysetEqual(
Issue.objects.filter(client=g.id), [
3,
],
lambda i: i.num
)
self.assertQuerysetEqual(
Issue.objects.filter(cc__id__exact=g.id), []
)
self.assertQuerysetEqual(
Issue.objects.filter(cc__id__exact=r.id), [
2,
3,
],
lambda i: i.num
)
# These queries combine results from the m2m and the m2o relationships.
# They're three ways of saying the same thing.
self.assertQuerysetEqual(
Issue.objects.filter(Q(cc__id__exact=r.id) | Q(client=r.id)), [
1,
2,
3,
],
lambda i: i.num
)
self.assertQuerysetEqual(
Issue.objects.filter(cc__id__exact=r.id) | Issue.objects.filter(client=r.id), [
1,
2,
3,
],
lambda i: i.num
)
self.assertQuerysetEqual(
Issue.objects.filter(Q(client=r.id) | Q(cc__id__exact=r.id)), [
1,
2,
3,
],
lambda i: i.num
)
class RelatedObjectUnicodeTests(TestCase):
def test_m2m_with_unicode_reference(self):
"""
Regression test for #6045: references to other models can be unicode
strings, providing they are directly convertible to ASCII.
"""
m1 = UnicodeReferenceModel.objects.create()
m2 = UnicodeReferenceModel.objects.create()
m2.others.add(m1) # used to cause an error (see ticket #6045)
m2.save()
list(m2.others.all()) # Force retrieval.
|
TAJaroszewski/lma_contrail_monitoring | refs/heads/master | deployment_scripts/puppet/modules/lma_contrail_monitoring/files/scripts/vrouter-flows-drop.py | 1 | #!/usr/bin/python
import signal
import sys
import urllib2
import xml.dom.minidom
plugin_name = "vrouter-flows-drop"
plugin_instance = "lma-contrail-extension"
plugin_interval = 60
plugin_type = 'gauge'
plugin_request = 'flow_action_drop'
def restore_sigchld():
signal.signal(signal.SIGCHLD, signal.SIG_DFL)
def log_verbose(msg):
collectd.info('%s plugin [verbose]: %s' % (plugin_name, msg))
def payload():
url = 'http://127.0.0.1:8085/Snh_KDropStatsReq'
req = urllib2.Request(url)
response = urllib2.urlopen(req)
p = response.read()
px = xml.dom.minidom.parseString(p)
itemlist = px.getElementsByTagName('KDropStatsResp')
return itemlist[0].getElementsByTagName("ds_" + plugin_request)[0].childNodes[0].toxml()
def configure_callback(conf):
for node in conf.children:
val = str(node.values[0])
def payload_callback():
log_verbose('Read callback called')
value = payload()
# log_verbose(
# 'Sending value: %s.%s=%s' % (plugin_name, '-'.join([val.plugin, val.type]), value))
val = collectd.Values(
plugin=plugin_name, # metric source
plugin_instance=plugin_instance,
type=plugin_type,
type_instance=plugin_name,
interval=plugin_interval,
meta={'0': True},
values=[value]
)
val.dispatch()
if __name__ == '__main__':
print "Plugin: " + plugin_name
payload = payload()
print("%s" % (payload))
sys.exit(0)
else:
import collectd
collectd.register_init(restore_sigchld)
collectd.register_config(configure_callback)
collectd.register_read(payload_callback, plugin_interval)
|
OCA/purchase-workflow | refs/heads/12.0 | purchase_allowed_product/models/__init__.py | 1 | from . import res_partner
from . import supplied_product_mixin
from . import account_invoice
from . import product
from . import purchase_order
from . import product_supplierinfo
|
foursquare/pants | refs/heads/master | src/python/pants/backend/jvm/tasks/jvm_compile/execution_graph.py | 1 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import queue
import sys
import threading
import traceback
from builtins import map, object, str
from collections import defaultdict, deque
from heapq import heappop, heappush
from pants.base.worker_pool import Work
class Job(object):
"""A unit of scheduling for the ExecutionGraph.
The ExecutionGraph represents a DAG of dependent work. A Job a node in the graph along with the
keys of its dependent jobs.
"""
def __init__(self, key, fn, dependencies, size=0, on_success=None, on_failure=None):
"""
:param key: Key used to reference and look up jobs
:param fn callable: The work to perform
:param dependencies: List of keys for dependent jobs
:param size: Estimated job size used for prioritization
:param on_success: Zero parameter callback to run if job completes successfully. Run on main
thread.
:param on_failure: Zero parameter callback to run if job completes successfully. Run on main
thread."""
self.key = key
self.fn = fn
self.dependencies = dependencies
self.size = size
self.on_success = on_success
self.on_failure = on_failure
def __call__(self):
self.fn()
def run_success_callback(self):
if self.on_success:
self.on_success()
def run_failure_callback(self):
if self.on_failure:
self.on_failure()
UNSTARTED = 'Unstarted'
QUEUED = 'Queued'
SUCCESSFUL = 'Successful'
FAILED = 'Failed'
CANCELED = 'Canceled'
class StatusTable(object):
DONE_STATES = {SUCCESSFUL, FAILED, CANCELED}
def __init__(self, keys, pending_dependencies_count):
self._statuses = {key: UNSTARTED for key in keys}
self._pending_dependencies_count = pending_dependencies_count
def mark_as(self, state, key):
self._statuses[key] = state
def mark_queued(self, key):
self.mark_as(QUEUED, key)
def unfinished_items(self):
"""Returns a list of (name, status) tuples, only including entries marked as unfinished."""
return [(key, stat) for key, stat in self._statuses.items() if stat not in self.DONE_STATES]
def failed_keys(self):
return [key for key, stat in self._statuses.items() if stat == FAILED]
def is_unstarted(self, key):
return self._statuses.get(key) is UNSTARTED
def mark_one_successful_dependency(self, key):
self._pending_dependencies_count[key] -= 1
def is_ready_to_submit(self, key):
return self.is_unstarted(key) and self._pending_dependencies_count[key] == 0
def are_all_done(self):
return all(s in self.DONE_STATES for s in self._statuses.values())
def has_failures(self):
return any(stat is FAILED for stat in self._statuses.values())
class ExecutionFailure(Exception):
"""Raised when work units fail during execution"""
def __init__(self, message, cause=None):
if cause:
message = "{}: {}".format(message, str(cause))
super(ExecutionFailure, self).__init__(message)
self.cause = cause
class UnexecutableGraphError(Exception):
"""Base exception class for errors that make an ExecutionGraph not executable"""
def __init__(self, msg):
super(UnexecutableGraphError, self).__init__("Unexecutable graph: {}".format(msg))
class NoRootJobError(UnexecutableGraphError):
def __init__(self):
super(NoRootJobError, self).__init__(
"All scheduled jobs have dependencies. There must be a circular dependency.")
class UnknownJobError(UnexecutableGraphError):
def __init__(self, undefined_dependencies):
super(UnknownJobError, self).__init__("Undefined dependencies {}"
.format(", ".join(map(repr, undefined_dependencies))))
class JobExistsError(UnexecutableGraphError):
def __init__(self, key):
super(JobExistsError, self).__init__("Job already scheduled {!r}"
.format(key))
class ThreadSafeCounter(object):
def __init__(self):
self.lock = threading.Lock()
self._counter = 0
def get(self):
with self.lock:
return self._counter
def increment(self):
with self.lock:
self._counter += 1
def decrement(self):
with self.lock:
self._counter -= 1
class ExecutionGraph(object):
"""A directed acyclic graph of work to execute.
This is currently only used within jvm compile, but the intent is to unify it with the future
global execution graph.
"""
def __init__(self, job_list, print_stack_trace):
"""
:param job_list Job: list of Jobs to schedule and run.
"""
self._print_stack_trace = print_stack_trace
self._dependencies = defaultdict(list)
self._dependees = defaultdict(list)
self._jobs = {}
self._job_keys_as_scheduled = []
self._job_keys_with_no_dependencies = []
for job in job_list:
self._schedule(job)
unscheduled_dependencies = set(self._dependees.keys()) - set(self._job_keys_as_scheduled)
if unscheduled_dependencies:
raise UnknownJobError(unscheduled_dependencies)
if len(self._job_keys_with_no_dependencies) == 0:
raise NoRootJobError()
self._job_priority = self._compute_job_priorities(job_list)
def format_dependee_graph(self):
return "\n".join([
"{} -> {{\n {}\n}}".format(key, ',\n '.join(self._dependees[key]))
for key in self._job_keys_as_scheduled
])
def _schedule(self, job):
key = job.key
dependency_keys = job.dependencies
self._job_keys_as_scheduled.append(key)
if key in self._jobs:
raise JobExistsError(key)
self._jobs[key] = job
if len(dependency_keys) == 0:
self._job_keys_with_no_dependencies.append(key)
self._dependencies[key] = dependency_keys
for dependency_key in dependency_keys:
self._dependees[dependency_key].append(key)
def _compute_job_priorities(self, job_list):
"""Walks the dependency graph breadth-first, starting from the most dependent tasks,
and computes the job priority as the sum of the jobs sizes along the critical path."""
job_size = {job.key: job.size for job in job_list}
job_priority = defaultdict(int)
bfs_queue = deque()
for job in job_list:
if len(self._dependees[job.key]) == 0:
job_priority[job.key] = job_size[job.key]
bfs_queue.append(job.key)
satisfied_dependees_count = defaultdict(int)
while len(bfs_queue) > 0:
job_key = bfs_queue.popleft()
for dependency_key in self._dependencies[job_key]:
job_priority[dependency_key] = \
max(job_priority[dependency_key],
job_size[dependency_key] + job_priority[job_key])
satisfied_dependees_count[dependency_key] += 1
if satisfied_dependees_count[dependency_key] == len(self._dependees[dependency_key]):
bfs_queue.append(dependency_key)
return job_priority
def execute(self, pool, log):
"""Runs scheduled work, ensuring all dependencies for each element are done before execution.
:param pool: A WorkerPool to run jobs on
:param log: logger for logging debug information and progress
submits all the work without any dependencies to the worker pool
when a unit of work finishes,
if it is successful
calls success callback
checks for dependees whose dependencies are all successful, and submits them
if it fails
calls failure callback
marks dependees as failed and queues them directly into the finished work queue
when all work is either successful or failed,
cleans up the work pool
if there's an exception on the main thread,
calls failure callback for unfinished work
aborts work pool
re-raises
"""
log.debug(self.format_dependee_graph())
status_table = StatusTable(self._job_keys_as_scheduled,
{key: len(self._jobs[key].dependencies) for key in self._job_keys_as_scheduled})
finished_queue = queue.Queue()
heap = []
jobs_in_flight = ThreadSafeCounter()
def put_jobs_into_heap(job_keys):
for job_key in job_keys:
# minus because jobs with larger priority should go first
heappush(heap, (-self._job_priority[job_key], job_key))
def try_to_submit_jobs_from_heap():
def worker(worker_key, work):
try:
work()
result = (worker_key, SUCCESSFUL, None)
except Exception:
_, exc_value, exc_traceback = sys.exc_info()
result = (worker_key, FAILED, (exc_value, traceback.format_tb(exc_traceback)))
finished_queue.put(result)
jobs_in_flight.decrement()
while len(heap) > 0 and jobs_in_flight.get() < pool.num_workers:
priority, job_key = heappop(heap)
jobs_in_flight.increment()
status_table.mark_queued(job_key)
pool.submit_async_work(Work(worker, [(job_key, (self._jobs[job_key]))]))
def submit_jobs(job_keys):
put_jobs_into_heap(job_keys)
try_to_submit_jobs_from_heap()
try:
submit_jobs(self._job_keys_with_no_dependencies)
while not status_table.are_all_done():
try:
finished_key, result_status, value = finished_queue.get(timeout=10)
except queue.Empty:
log.debug("Waiting on \n {}\n".format("\n ".join(
"{}: {}".format(key, state) for key, state in status_table.unfinished_items())))
try_to_submit_jobs_from_heap()
continue
finished_job = self._jobs[finished_key]
direct_dependees = self._dependees[finished_key]
status_table.mark_as(result_status, finished_key)
# Queue downstream tasks.
if result_status is SUCCESSFUL:
try:
finished_job.run_success_callback()
except Exception as e:
log.debug(traceback.format_exc())
raise ExecutionFailure("Error in on_success for {}".format(finished_key), e)
ready_dependees = []
for dependee in direct_dependees:
status_table.mark_one_successful_dependency(dependee)
if status_table.is_ready_to_submit(dependee):
ready_dependees.append(dependee)
submit_jobs(ready_dependees)
else: # Failed or canceled.
try:
finished_job.run_failure_callback()
except Exception as e:
log.debug(traceback.format_exc())
raise ExecutionFailure("Error in on_failure for {}".format(finished_key), e)
# Propagate failures downstream.
for dependee in direct_dependees:
if status_table.is_unstarted(dependee):
status_table.mark_queued(dependee)
finished_queue.put((dependee, CANCELED, None))
# Log success or failure for this job.
if result_status is FAILED:
exception, tb = value
log.error("{} failed: {}".format(finished_key, exception))
if self._print_stack_trace:
log.error('Traceback:\n{}'.format('\n'.join(tb)))
else:
log.debug("{} finished with status {}".format(finished_key, result_status))
except ExecutionFailure:
raise
except Exception as e:
# Call failure callbacks for jobs that are unfinished.
for key, state in status_table.unfinished_items():
self._jobs[key].run_failure_callback()
log.debug(traceback.format_exc())
raise ExecutionFailure("Error running job", e)
if status_table.has_failures():
raise ExecutionFailure("Failed jobs: {}".format(', '.join(status_table.failed_keys())))
|
pwoodworth/intellij-community | refs/heads/master | python/testData/inspections/PyUnresolvedReferencesInspection/staticMethodParameter.py | 83 | class A:
@staticmethod
def foo(q):
q.bar()
|
ThreatConnect-Inc/tcex | refs/heads/master | tcex/sessions/tc_session.py | 2 | """ThreatConnect Requests Session"""
# standard library
import base64
import hashlib
import hmac
import logging
import time
# third-party
import urllib3
from requests import Session, adapters, auth
from urllib3.util.retry import Retry
from ..utils import Utils
# disable ssl warning message
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
class HmacAuth(auth.AuthBase):
"""ThreatConnect HMAC Authorization"""
def __init__(self, access_id, secret_key):
"""Initialize the Class properties."""
super().__init__()
self._access_id = access_id
self._secret_key = secret_key
def __call__(self, r):
"""Override of parent __call__ method."""
timestamp = int(time.time())
signature = f'{r.path_url}:{r.method}:{timestamp}'
hmac_signature = hmac.new(
self._secret_key.encode(), signature.encode(), digestmod=hashlib.sha256
).digest()
authorization = f'TC {self._access_id}:{base64.b64encode(hmac_signature).decode()}'
r.headers['Authorization'] = authorization
r.headers['Timestamp'] = timestamp
return r
class TokenAuth(auth.AuthBase):
"""ThreatConnect Token Authorization"""
def __init__(self, token):
"""Initialize Class Properties."""
super().__init__()
self.token = token
def __call__(self, r):
"""Override of parent __call__ method."""
r.headers['Authorization'] = f'TC-Token {self.token.token}'
return r
class TcSession(Session):
"""ThreatConnect REST API Requests Session"""
def __init__(self, api_access_id, api_secret_key, base_url, logger=None):
"""Initialize the Class properties."""
super().__init__()
self.api_access_id = api_access_id
self.api_secret_key = api_secret_key
self.base_url = base_url.strip('/')
self.log = logger or logging.getLogger('session')
# properties
self._log_curl: bool = False
self._token = None
self.auth = None
self.utils = Utils()
# Add Retry
self.retry()
def _configure_auth(self):
"""Return Auth property for session."""
# Add ThreatConnect Authorization
if self.token_available:
# service Apps only use tokens and playbook/runtime Apps will use token if available
self.auth = TokenAuth(self.token)
self.log.debug('feature=tc-session, event=auth, type=token')
elif self.api_access_id and self.api_secret_key:
try:
# for external Apps or testing Apps locally
self.auth = HmacAuth(self.api_access_id, self.api_secret_key)
self.log.debug('feature=tc-session, event=auth, type=hmac')
except AttributeError: # pragma: no cover
raise RuntimeError('No valid ThreatConnect API credentials provided.')
else: # pragma: no cover
raise RuntimeError('No valid ThreatConnect API credentials provided.')
@property
def log_curl(self) -> bool:
"""Return whether or not requests will be logged as a curl command."""
return self._log_curl
@log_curl.setter
def log_curl(self, log_curl: bool):
"""Enable or disable logging curl commands."""
self._log_curl = log_curl
@property
def token(self):
"""Return token."""
return self._token
@token.setter
def token(self, token):
"""Set token."""
self._token = token
@property
def token_available(self):
"""Return true if the current App is a service App."""
return (
self.token is not None
and self.token.token is not None
and self.token.token_expires is not None
)
def request(self, method, url, **kwargs): # pylint: disable=arguments-differ
"""Override request method disabling verify on token renewal if disabled on session."""
if self.auth is None:
self._configure_auth()
# accept path for API calls instead of full URL
if not url.startswith('https'):
url = f'{self.base_url}{url}'
response = super().request(method, url, **kwargs)
# don't show curl message for logging commands
if '/v2/logs/app' not in url:
# APP-79 - adding logging of request as curl commands
if not response.ok or self.log_curl:
try:
self.log.debug(
self.utils.requests_to_curl(
response.request, proxies=self.proxies, verify=self.verify
)
)
except Exception: # nosec
pass # logging curl command is best effort
self.log.debug(
f'feature=tc-session, request-url={response.request.url}, '
f'status_code={response.status_code}, elapsed={response.elapsed}'
)
return response
def retry(self, retries=3, backoff_factor=0.3, status_forcelist=(500, 502, 504)):
"""Add retry to Requests Session
https://urllib3.readthedocs.io/en/latest/reference/urllib3.util.html#urllib3.util.retry.Retry
"""
retries = Retry(
total=retries,
read=retries,
connect=retries,
backoff_factor=backoff_factor,
status_forcelist=status_forcelist,
)
# mount all https requests
self.mount('https://', adapters.HTTPAdapter(max_retries=retries))
|
gquirozbogner/contentbox-master | refs/heads/master | third_party/django/conf/locale/nn/formats.py | 118 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. F Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j. F Y H:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y H:i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# Kept ISO formats as they are in first position
DATE_INPUT_FORMATS = (
'%Y-%m-%d', '%d.%m.%Y', '%d.%m.%y', # '2006-10-25', '25.10.2006', '25.10.06'
# '%d. %b %Y', '%d %b %Y', # '25. okt 2006', '25 okt 2006'
# '%d. %b. %Y', '%d %b. %Y', # '25. okt. 2006', '25 okt. 2006'
# '%d. %B %Y', '%d %B %Y', # '25. oktober 2006', '25 oktober 2006'
)
DATETIME_INPUT_FORMATS = (
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%Y-%m-%d', # '2006-10-25'
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
'%d.%m.%y %H:%M:%S', # '25.10.06 14:30:59'
'%d.%m.%y %H:%M:%S.%f', # '25.10.06 14:30:59.000200'
'%d.%m.%y %H:%M', # '25.10.06 14:30'
'%d.%m.%y', # '25.10.06'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '\xa0' # non-breaking space
NUMBER_GROUPING = 3
|
wainersm/buildbot | refs/heads/master | master/buildbot/steps/package/rpm/rpmspec.py | 11 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Portions Copyright Buildbot Team Members
# Portions Copyright Dan Radez <dradez+buildbot@redhat.com>
# Portions Copyright Steve 'Ashcrow' Milner <smilner+buildbot@redhat.com>
"""
library to populate parameters from and rpmspec file into a memory structure
"""
from __future__ import absolute_import
from __future__ import print_function
import re
from buildbot.steps.shell import ShellCommand
class RpmSpec(ShellCommand):
"""
read parameters out of an rpm spec file
"""
# initialize spec info vars and get them from the spec file
n_regex = re.compile(r'^Name:[ ]*([^\s]*)')
v_regex = re.compile(r'^Version:[ ]*([0-9\.]*)')
def __init__(self, specfile=None, **kwargs):
"""
Creates the RpmSpec object.
@type specfile: str
@param specfile: the name of the specfile to get the package
name and version from
@type kwargs: dict
@param kwargs: All further keyword arguments.
"""
ShellCommand.__init__(self, **kwargs)
self.specfile = specfile
self._pkg_name = None
self._pkg_version = None
self._loaded = False
def load(self):
"""
call this function after the file exists to populate properties
"""
# If we are given a string, open it up else assume it's something we
# can call read on.
if isinstance(self.specfile, str):
f = open(self.specfile, 'r')
else:
f = self.specfile
for line in f:
if self.v_regex.match(line):
self._pkg_version = self.v_regex.match(line).group(1)
if self.n_regex.match(line):
self._pkg_name = self.n_regex.match(line).group(1)
f.close()
self._loaded = True
# Read-only properties
loaded = property(lambda self: self._loaded)
pkg_name = property(lambda self: self._pkg_name)
pkg_version = property(lambda self: self._pkg_version)
|
drptbl/splinter | refs/heads/master | tests/test_element_list.py | 7 | # -*- coding: utf-8 -*-
# Copyright 2013 splinter authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
import unittest
from splinter.element_list import ElementList
from splinter.exceptions import ElementDoesNotExist
class Person(object):
"""Very simple class, just for tests"""
def __init__(self):
self.current_action = None
def walk(self):
self.current_action = "walking"
class ElementListTest(unittest.TestCase):
def test_method_that_verifies_if_the_list_is_empty(self):
"should verify if the list is empty"
the_list = ElementList([1, 2, 3])
self.assertFalse(the_list.is_empty())
self.assertTrue(ElementList([]).is_empty())
def test_property_first_and_last(self):
"""
should provide a \"first\" and a \"last\" properties
which returns the first and last element
"""
the_list = ElementList([1, 2, 3])
self.assertEqual(the_list[0], the_list.first)
self.assertEqual(the_list[2], the_list.last)
def test_call_method_on_first_element(self):
"""
when some method is missing on ElementList and
is present in element, it should be passed
"""
the_list = ElementList([Person(), Person(), Person()])
the_list.walk()
the_person = the_list.first
self.assertEqual("walking", the_person.current_action)
def test_raise_exception_on_indexerror(self):
"should raise ElementDoesNotExist exception on IndexError"
with self.assertRaises(ElementDoesNotExist):
ElementList([]).first
def test_raise_exception_on_indexerror_with_unicode_query(self):
"should raise ElementDoesNotExist exception on IndexError"
with self.assertRaises(ElementDoesNotExist):
ElementList([], query=u'.element[title=título]').first
def test_raise_attribute_error(self):
"""
should raise AttributeError when trying to access
a non-existent method on list and element
"""
with self.assertRaises(AttributeError):
the_list = ElementList([Person(), Person()])
the_list.talk()
def test_attribute_error_for_empty(self):
"""
should raise AttributeError when the list is empty
and someone tries to access a method or property on it
"""
with self.assertRaises(AttributeError):
the_list = ElementList([])
the_list.unknown_method()
def test_attribute_error_content(self):
"should raise AttributeError with right content"
with self.assertRaises(AttributeError) as cm:
the_list = ElementList([Person(), Person()])
the_list.talk()
expected_message = "'ElementList' object has no attribute 'talk'"
e = cm.exception
self.assertEqual(expected_message, e.args[0])
def test_not_found_exception_with_query_and_method(self):
"""
should receive the find method
and the query and use them in exception
"""
with self.assertRaises(ElementDoesNotExist) as cm:
the_list = ElementList([], find_by="id", query="menu")
the_list.first
expected_message = 'no elements could be found with id "menu"'
e = cm.exception
self.assertEqual(expected_message, e.args[0])
|
sauloal/cnidaria | refs/heads/master | scripts/venv/lib/python2.7/site-packages/matplotlib/tri/trifinder.py | 11 | from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from matplotlib.tri import Triangulation
import matplotlib._tri as _tri
class TriFinder(object):
"""
Abstract base class for classes used to find the triangles of a
Triangulation in which (x,y) points lie.
Rather than instantiate an object of a class derived from TriFinder, it is
usually better to use the function
:func:`matplotlib.tri.Triangulation.get_trifinder`.
Derived classes implement __call__(x,y) where x,y are array_like point
coordinates of the same shape.
"""
def __init__(self, triangulation):
if not isinstance(triangulation, Triangulation):
raise ValueError('Expected a Triangulation object')
self._triangulation = triangulation
class TrapezoidMapTriFinder(TriFinder):
"""
:class:`~matplotlib.tri.TriFinder` class implemented using the trapezoid
map algorithm from the book "Computational Geometry, Algorithms and
Applications", second edition, by M. de Berg, M. van Kreveld, M. Overmars
and O. Schwarzkopf.
The triangulation must be valid, i.e. it must not have duplicate points,
triangles formed from colinear points, or overlapping triangles. The
algorithm has some tolerance to triangles formed from colinear points, but
this should not be relied upon.
"""
def __init__(self, triangulation):
TriFinder.__init__(self, triangulation)
self._cpp_trifinder = _tri.TrapezoidMapTriFinder(
triangulation.get_cpp_triangulation())
self._initialize()
def __call__(self, x, y):
"""
Return an array containing the indices of the triangles in which the
specified x,y points lie, or -1 for points that do not lie within a
triangle.
*x*, *y* are array_like x and y coordinates of the same shape and any
number of dimensions.
Returns integer array with the same shape and *x* and *y*.
"""
# C++ checks arguments are OK.
return self._cpp_trifinder.find_many(x, y)
def _get_tree_stats(self):
"""
Return a python list containing the statistics about the node tree:
0: number of nodes (tree size)
1: number of unique nodes
2: number of trapezoids (tree leaf nodes)
3: number of unique trapezoids
4: maximum parent count (max number of times a node is repeated in
tree)
5: maximum depth of tree (one more than the maximum number of
comparisons needed to search through the tree)
6: mean of all trapezoid depths (one more than the average number
of comparisons needed to search through the tree)
"""
return self._cpp_trifinder.get_tree_stats()
def _initialize(self):
"""
Initialize the underlying C++ object. Can be called multiple times if,
for example, the triangulation is modified.
"""
self._cpp_trifinder.initialize()
def _print_tree(self):
"""
Print a text representation of the node tree, which is useful for
debugging purposes.
"""
self._cpp_trifinder.print_tree()
|
canwe/NewsBlur | refs/heads/master | utils/backups/s3.py | 14 | from boto.s3.connection import S3Connection
from boto.s3.key import Key
import os
import sys
if '/srv/newsblur' not in ' '.join(sys.path):
sys.path.append("/srv/newsblur")
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from django.conf import settings
ACCESS_KEY = settings.S3_ACCESS_KEY
SECRET = settings.S3_SECRET
BUCKET_NAME = settings.S3_BACKUP_BUCKET # Note that you need to create this bucket first
def save_file_in_s3(filename, name=None):
conn = S3Connection(ACCESS_KEY, SECRET)
bucket = conn.get_bucket(BUCKET_NAME)
k = Key(bucket)
k.key = name or filename
k.set_contents_from_filename(filename)
def get_file_from_s3(filename):
conn = S3Connection(ACCESS_KEY, SECRET)
bucket = conn.get_bucket(BUCKET_NAME)
k = Key(bucket)
k.key = filename
k.get_contents_to_filename(filename)
def list_backup_in_s3():
conn = S3Connection(ACCESS_KEY, SECRET)
bucket = conn.get_bucket(BUCKET_NAME)
for i, key in enumerate(bucket.get_all_keys()):
print "[%s] %s" % (i, key.name)
def delete_all_backups():
#FIXME: validate filename exists
conn = S3Connection(ACCESS_KEY, SECRET)
bucket = conn.get_bucket(BUCKET_NAME)
for i, key in enumerate(bucket.get_all_keys()):
print "deleting %s" % (key.name)
key.delete()
if __name__ == '__main__':
import sys
if len(sys.argv) < 3:
print 'Usage: %s <get/set/list/delete> <backup_filename>' % (sys.argv[0])
else:
if sys.argv[1] == 'set':
save_file_in_s3(sys.argv[2])
elif sys.argv[1] == 'get':
get_file_from_s3(sys.argv[2])
elif sys.argv[1] == 'list':
list_backup_in_s3()
elif sys.argv[1] == 'delete':
delete_all_backups()
else:
print 'Usage: %s <get/set/list/delete> <backup_filename>' % (sys.argv[0])
|
cychenyin/windmill | refs/heads/master | apscheduler/jobstores/memory.py | 1 | # coding: utf-8
from __future__ import absolute_import
from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError
from apscheduler.util import datetime_to_utc_timestamp
class MemoryJobStore(BaseJobStore):
"""
Stores jobs in an array in RAM. Provides no persistence support.
Plugin alias: ``memory``
"""
def __init__(self):
super(MemoryJobStore, self).__init__()
self._jobs = [] # list of (job, timestamp), sorted by next_run_time and job id (ascending)
self._jobs_index = {} # id -> (job, timestamp) lookup table
def lookup_job(self, job_id):
return self._jobs_index.get(job_id, (None, None))[0]
def get_due_jobs(self, now):
now_timestamp = datetime_to_utc_timestamp(now)
pending = []
for job, timestamp in self._jobs:
if timestamp is None or timestamp > now_timestamp:
break
pending.append(job)
return pending
def get_next_run_time(self):
return self._jobs[0][0].next_run_time if self._jobs else None
def get_all_jobs(self):
return [j[0] for j in self._jobs]
def add_job(self, job):
if job.id in self._jobs_index:
raise ConflictingIdError(job.id)
timestamp = datetime_to_utc_timestamp(job.next_run_time)
index = self._get_job_index(timestamp, job.id)
self._jobs.insert(index, (job, timestamp))
self._jobs_index[job.id] = (job, timestamp)
def update_job(self, job):
old_job, old_timestamp = self._jobs_index.get(job.id, (None, None))
if old_job is None:
raise JobLookupError(job.id)
# If the next run time has not changed, simply replace the job in its present index.
# Otherwise, reinsert the job to the list to preserve the ordering.
old_index = self._get_job_index(old_timestamp, old_job.id)
new_timestamp = datetime_to_utc_timestamp(job.next_run_time)
if old_timestamp == new_timestamp:
self._jobs[old_index] = (job, new_timestamp)
else:
del self._jobs[old_index]
new_index = self._get_job_index(new_timestamp, job.id)
self._jobs.insert(new_index, (job, new_timestamp))
self._jobs_index[old_job.id] = (job, new_timestamp)
def remove_job(self, job_id):
job, timestamp = self._jobs_index.get(job_id, (None, None))
if job is None:
raise JobLookupError(job_id)
index = self._get_job_index(timestamp, job_id)
del self._jobs[index]
del self._jobs_index[job.id]
def remove_all_jobs(self):
self._jobs = []
self._jobs_index = {}
def shutdown(self):
self.remove_all_jobs()
def _get_job_index(self, timestamp, job_id):
"""
Returns the index of the given job, or if it's not found, the index where the job should be inserted based on
the given timestamp.
:type timestamp: int
:type job_id: str
"""
lo, hi = 0, len(self._jobs)
timestamp = float('inf') if timestamp is None else timestamp
while lo < hi:
mid = (lo + hi) // 2
mid_job, mid_timestamp = self._jobs[mid]
mid_timestamp = float('inf') if mid_timestamp is None else mid_timestamp
if mid_timestamp > timestamp:
hi = mid
elif mid_timestamp < timestamp:
lo = mid + 1
elif mid_job.id > job_id:
hi = mid
elif mid_job.id < job_id:
lo = mid + 1
else:
return mid
return lo
|
redhat-openstack/python-openstackclient | refs/heads/master-patches | functional/tests/compute/v2/test_server.py | 1 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
from tempest.lib.common.utils import data_utils
from functional.common import test
from tempest.lib import exceptions
class ServerTests(test.TestCase):
"""Functional tests for openstack server commands."""
@classmethod
def get_flavor(cls):
# NOTE(rtheis): Get m1.tiny flavor since functional tests may
# create other flavors.
raw_output = cls.openstack('flavor show m1.tiny -c id -f value')
return raw_output.strip('\n')
@classmethod
def get_image(cls):
# NOTE(rtheis): Get public images since functional tests may
# create private images.
raw_output = cls.openstack('image list --public -f value -c ID')
ray = raw_output.split('\n')
idx = int(len(ray) / 2)
return ray[idx]
@classmethod
def get_network(cls):
try:
# NOTE(rtheis): Get private network since functional tests may
# create other networks.
raw_output = cls.openstack('network show private -c id -f value')
except exceptions.CommandFailed:
return ''
return ' --nic net-id=' + raw_output.strip('\n')
def server_create(self, name=None):
"""Create server. Add cleanup."""
name = name or data_utils.rand_uuid()
opts = self.get_show_opts(self.FIELDS)
flavor = self.get_flavor()
image = self.get_image()
network = self.get_network()
raw_output = self.openstack('--debug server create --flavor ' +
flavor +
' --image ' + image + network + ' ' +
name + opts)
if not raw_output:
self.fail('Server has not been created!')
self.addCleanup(self.server_delete, name)
def server_list(self, params=[]):
"""List servers."""
opts = self.get_list_opts(params)
return self.openstack('server list' + opts)
def server_delete(self, name):
"""Delete server by name."""
self.openstack('server delete ' + name)
def setUp(self):
"""Set necessary variables and create server."""
super(ServerTests, self).setUp()
self.NAME = data_utils.rand_name('TestServer')
self.OTHER_NAME = data_utils.rand_name('TestServer')
self.HEADERS = ['"Name"']
self.FIELDS = ['name']
self.IP_POOL = 'public'
self.server_create(self.NAME)
def test_server_rename(self):
"""Test server rename command.
Test steps:
1) Boot server in setUp
2) Rename server
3) Check output
4) Rename server back to original name
"""
raw_output = self.openstack('server set --name ' + self.OTHER_NAME +
' ' + self.NAME)
self.assertOutput("", raw_output)
self.assertNotIn(self.NAME, self.server_list(['Name']))
self.assertIn(self.OTHER_NAME, self.server_list(['Name']))
self.openstack('server set --name ' + self.NAME + ' ' +
self.OTHER_NAME)
def test_server_list(self):
"""Test server list command.
Test steps:
1) Boot server in setUp
2) List servers
3) Check output
"""
opts = self.get_list_opts(self.HEADERS)
raw_output = self.openstack('server list' + opts)
self.assertIn(self.NAME, raw_output)
def test_server_show(self):
"""Test server show command.
Test steps:
1) Boot server in setUp
2) Show server
3) Check output
"""
opts = self.get_show_opts(self.FIELDS)
raw_output = self.openstack('server show ' + self.NAME + opts)
self.assertEqual(self.NAME + "\n", raw_output)
def test_server_metadata(self):
"""Test command to set server metadata.
Test steps:
1) Boot server in setUp
2) Set properties for server
3) Check server properties in server show output
4) Unset properties for server
5) Check server properties in server show output
"""
self.wait_for_status("ACTIVE")
# metadata
raw_output = self.openstack(
'server set --property a=b --property c=d ' + self.NAME)
opts = self.get_show_opts(["name", "properties"])
raw_output = self.openstack('server show ' + self.NAME + opts)
self.assertEqual(self.NAME + "\na='b', c='d'\n", raw_output)
raw_output = self.openstack(
'server unset --property a ' + self.NAME)
opts = self.get_show_opts(["name", "properties"])
raw_output = self.openstack('server show ' + self.NAME + opts)
self.assertEqual(self.NAME + "\nc='d'\n", raw_output)
def test_server_suspend_resume(self):
"""Test server suspend and resume commands.
Test steps:
1) Boot server in setUp
2) Suspend server
3) Check for SUSPENDED server status
4) Resume server
5) Check for ACTIVE server status
"""
self.wait_for_status("ACTIVE")
# suspend
raw_output = self.openstack('server suspend ' + self.NAME)
self.assertEqual("", raw_output)
self.wait_for_status("SUSPENDED")
# resume
raw_output = self.openstack('server resume ' + self.NAME)
self.assertEqual("", raw_output)
self.wait_for_status("ACTIVE")
def test_server_lock_unlock(self):
"""Test server lock and unlock commands.
Test steps:
1) Boot server in setUp
2) Lock server
3) Check output
4) Unlock server
5) Check output
"""
self.wait_for_status("ACTIVE")
# lock
raw_output = self.openstack('server lock ' + self.NAME)
self.assertEqual("", raw_output)
# unlock
raw_output = self.openstack('server unlock ' + self.NAME)
self.assertEqual("", raw_output)
def test_server_pause_unpause(self):
"""Test server pause and unpause commands.
Test steps:
1) Boot server in setUp
2) Pause server
3) Check for PAUSED server status
4) Unpause server
5) Check for ACTIVE server status
"""
self.wait_for_status("ACTIVE")
# pause
raw_output = self.openstack('server pause ' + self.NAME)
self.assertEqual("", raw_output)
self.wait_for_status("PAUSED")
# unpause
raw_output = self.openstack('server unpause ' + self.NAME)
self.assertEqual("", raw_output)
self.wait_for_status("ACTIVE")
def test_server_rescue_unrescue(self):
"""Test server rescue and unrescue commands.
Test steps:
1) Boot server in setUp
2) Rescue server
3) Check for RESCUE server status
4) Unrescue server
5) Check for ACTIVE server status
"""
self.wait_for_status("ACTIVE")
# rescue
opts = self.get_show_opts(["adminPass"])
raw_output = self.openstack('server rescue ' + self.NAME + opts)
self.assertNotEqual("", raw_output)
self.wait_for_status("RESCUE")
# unrescue
raw_output = self.openstack('server unrescue ' + self.NAME)
self.assertEqual("", raw_output)
self.wait_for_status("ACTIVE")
def test_server_attach_detach_floating_ip(self):
"""Test commands to attach and detach floating IP for server.
Test steps:
1) Boot server in setUp
2) Create floating IP
3) Add floating IP to server
4) Check for floating IP in server show output
5) Remove floating IP from server
6) Check that floating IP is not in server show output
7) Delete floating IP
8) Check output
"""
self.wait_for_status("ACTIVE")
# attach ip
opts = self.get_show_opts(["id", "floating_ip_address"])
raw_output = self.openstack('ip floating create ' +
self.IP_POOL +
opts)
ip, ipid, rol = tuple(raw_output.split('\n'))
self.assertNotEqual("", ipid)
self.assertNotEqual("", ip)
raw_output = self.openstack('ip floating add ' + ip + ' ' + self.NAME)
self.assertEqual("", raw_output)
raw_output = self.openstack('server show ' + self.NAME)
self.assertIn(ip, raw_output)
# detach ip
raw_output = self.openstack('ip floating remove ' + ip + ' ' +
self.NAME)
self.assertEqual("", raw_output)
raw_output = self.openstack('server show ' + self.NAME)
self.assertNotIn(ip, raw_output)
raw_output = self.openstack('ip floating delete ' + ipid)
self.assertEqual("", raw_output)
def test_server_reboot(self):
"""Test server reboot command.
Test steps:
1) Boot server in setUp
2) Reboot server
3) Check for ACTIVE server status
"""
self.wait_for_status("ACTIVE")
# reboot
raw_output = self.openstack('server reboot ' + self.NAME)
self.assertEqual("", raw_output)
self.wait_for_status("ACTIVE")
def wait_for_status(self, expected_status='ACTIVE', wait=900, interval=30):
"""Wait until server reaches expected status."""
# TODO(thowe): Add a server wait command to osc
failures = ['ERROR']
total_sleep = 0
opts = self.get_show_opts(['status'])
while total_sleep < wait:
status = self.openstack('server show ' + self.NAME + opts)
status = status.rstrip()
print('Waiting for {} current status: {}'.format(expected_status,
status))
if status == expected_status:
break
self.assertNotIn(status, failures)
time.sleep(interval)
total_sleep += interval
status = self.openstack('server show ' + self.NAME + opts)
status = status.rstrip()
self.assertEqual(status, expected_status)
# give it a little bit more time
time.sleep(5)
|
IvanGavran/scrapy | refs/heads/master | scrapy/logformatter.py | 52 | import os
import logging
from twisted.python.failure import Failure
SCRAPEDMSG = u"Scraped from %(src)s" + os.linesep + "%(item)s"
DROPPEDMSG = u"Dropped: %(exception)s" + os.linesep + "%(item)s"
CRAWLEDMSG = u"Crawled (%(status)s) %(request)s (referer: %(referer)s)%(flags)s"
class LogFormatter(object):
"""Class for generating log messages for different actions.
All methods must return a dictionary listing the parameters `level`, `msg`
and `args` which are going to be used for constructing the log message when
calling logging.log.
Dictionary keys for the method outputs:
* `level` should be the log level for that action, you can use those
from the python logging library: logging.DEBUG, logging.INFO,
logging.WARNING, logging.ERROR and logging.CRITICAL.
* `msg` should be a string that can contain different formatting
placeholders. This string, formatted with the provided `args`, is going
to be the log message for that action.
* `args` should be a tuple or dict with the formatting placeholders for
`msg`. The final log message is computed as output['msg'] %
output['args'].
"""
def crawled(self, request, response, spider):
flags = ' %s' % str(response.flags) if response.flags else ''
return {
'level': logging.DEBUG,
'msg': CRAWLEDMSG,
'args': {
'status': response.status,
'request': request,
'referer': request.headers.get('Referer'),
'flags': flags,
}
}
def scraped(self, item, response, spider):
src = response.getErrorMessage() if isinstance(response, Failure) else response
return {
'level': logging.DEBUG,
'msg': SCRAPEDMSG,
'args': {
'src': src,
'item': item,
}
}
def dropped(self, item, exception, response, spider):
return {
'level': logging.WARNING,
'msg': DROPPEDMSG,
'args': {
'exception': exception,
'item': item,
}
}
@classmethod
def from_crawler(cls, crawler):
return cls()
|
yukoba/sympy | refs/heads/master | sympy/utilities/codegen.py | 30 | """
module for generating C, C++, Fortran77, Fortran90 and Octave/Matlab routines
that evaluate sympy expressions. This module is work in progress. Only the
milestones with a '+' character in the list below have been completed.
--- How is sympy.utilities.codegen different from sympy.printing.ccode? ---
We considered the idea to extend the printing routines for sympy functions in
such a way that it prints complete compilable code, but this leads to a few
unsurmountable issues that can only be tackled with dedicated code generator:
- For C, one needs both a code and a header file, while the printing routines
generate just one string. This code generator can be extended to support
.pyf files for f2py.
- SymPy functions are not concerned with programming-technical issues, such
as input, output and input-output arguments. Other examples are contiguous
or non-contiguous arrays, including headers of other libraries such as gsl
or others.
- It is highly interesting to evaluate several sympy functions in one C
routine, eventually sharing common intermediate results with the help
of the cse routine. This is more than just printing.
- From the programming perspective, expressions with constants should be
evaluated in the code generator as much as possible. This is different
for printing.
--- Basic assumptions ---
* A generic Routine data structure describes the routine that must be
translated into C/Fortran/... code. This data structure covers all
features present in one or more of the supported languages.
* Descendants from the CodeGen class transform multiple Routine instances
into compilable code. Each derived class translates into a specific
language.
* In many cases, one wants a simple workflow. The friendly functions in the
last part are a simple api on top of the Routine/CodeGen stuff. They are
easier to use, but are less powerful.
--- Milestones ---
+ First working version with scalar input arguments, generating C code,
tests
+ Friendly functions that are easier to use than the rigorous
Routine/CodeGen workflow.
+ Integer and Real numbers as input and output
+ Output arguments
+ InputOutput arguments
+ Sort input/output arguments properly
+ Contiguous array arguments (numpy matrices)
+ Also generate .pyf code for f2py (in autowrap module)
+ Isolate constants and evaluate them beforehand in double precision
+ Fortran 90
+ Octave/Matlab
- Common Subexpression Elimination
- User defined comments in the generated code
- Optional extra include lines for libraries/objects that can eval special
functions
- Test other C compilers and libraries: gcc, tcc, libtcc, gcc+gsl, ...
- Contiguous array arguments (sympy matrices)
- Non-contiguous array arguments (sympy matrices)
- ccode must raise an error when it encounters something that can not be
translated into c. ccode(integrate(sin(x)/x, x)) does not make sense.
- Complex numbers as input and output
- A default complex datatype
- Include extra information in the header: date, user, hostname, sha1
hash, ...
- Fortran 77
- C++
- Python
- ...
"""
from __future__ import print_function, division
import os
import textwrap
from sympy import __version__ as sympy_version
from sympy.core import Symbol, S, Expr, Tuple, Equality, Function
from sympy.core.compatibility import is_sequence, StringIO, string_types
from sympy.printing.codeprinter import AssignmentError
from sympy.printing.ccode import ccode, CCodePrinter
from sympy.printing.fcode import fcode, FCodePrinter
from sympy.printing.octave import octave_code, OctaveCodePrinter
from sympy.tensor import Idx, Indexed, IndexedBase
from sympy.matrices import (MatrixSymbol, ImmutableMatrix, MatrixBase,
MatrixExpr, MatrixSlice)
__all__ = [
# description of routines
"Routine", "DataType", "default_datatypes", "get_default_datatype",
"Argument", "InputArgument", "Result",
# routines -> code
"CodeGen", "CCodeGen", "FCodeGen", "OctaveCodeGen",
# friendly functions
"codegen", "make_routine",
]
#
# Description of routines
#
class Routine(object):
"""Generic description of evaluation routine for set of expressions.
A CodeGen class can translate instances of this class into code in a
particular language. The routine specification covers all the features
present in these languages. The CodeGen part must raise an exception
when certain features are not present in the target language. For
example, multiple return values are possible in Python, but not in C or
Fortran. Another example: Fortran and Python support complex numbers,
while C does not.
"""
def __init__(self, name, arguments, results, local_vars, global_vars):
"""Initialize a Routine instance.
Parameters
==========
name : string
Name of the routine.
arguments : list of Arguments
These are things that appear in arguments of a routine, often
appearing on the right-hand side of a function call. These are
commonly InputArguments but in some languages, they can also be
OutputArguments or InOutArguments (e.g., pass-by-reference in C
code).
results : list of Results
These are the return values of the routine, often appearing on
the left-hand side of a function call. The difference between
Results and OutputArguments and when you should use each is
language-specific.
local_vars : list of Symbols
These are used internally by the routine.
global_vars : list of Symbols
Variables which will not be passed into the function.
"""
# extract all input symbols and all symbols appearing in an expression
input_symbols = set([])
symbols = set([])
for arg in arguments:
if isinstance(arg, OutputArgument):
symbols.update(arg.expr.free_symbols)
elif isinstance(arg, InputArgument):
input_symbols.add(arg.name)
elif isinstance(arg, InOutArgument):
input_symbols.add(arg.name)
symbols.update(arg.expr.free_symbols)
else:
raise ValueError("Unknown Routine argument: %s" % arg)
for r in results:
if not isinstance(r, Result):
raise ValueError("Unknown Routine result: %s" % r)
symbols.update(r.expr.free_symbols)
# Check that all symbols in the expressions are covered by
# InputArguments/InOutArguments---subset because user could
# specify additional (unused) InputArguments or local_vars.
notcovered = symbols.difference(
input_symbols.union(local_vars).union(global_vars))
if notcovered != set([]):
raise ValueError("Symbols needed for output are not in input " +
", ".join([str(x) for x in notcovered]))
self.name = name
self.arguments = arguments
self.results = results
self.local_vars = local_vars
self.global_vars = global_vars
@property
def variables(self):
"""Returns a set of all variables possibly used in the routine.
For routines with unnamed return values, the dummies that may or
may not be used will be included in the set.
"""
v = set(self.local_vars)
for arg in self.arguments:
v.add(arg.name)
for res in self.results:
v.add(res.result_var)
return v
@property
def result_variables(self):
"""Returns a list of OutputArgument, InOutArgument and Result.
If return values are present, they are at the end ot the list.
"""
args = [arg for arg in self.arguments if isinstance(
arg, (OutputArgument, InOutArgument))]
args.extend(self.results)
return args
class DataType(object):
"""Holds strings for a certain datatype in different languages."""
def __init__(self, cname, fname, pyname, octname):
self.cname = cname
self.fname = fname
self.pyname = pyname
self.octname = octname
default_datatypes = {
"int": DataType("int", "INTEGER*4", "int", ""),
"float": DataType("double", "REAL*8", "float", "")
}
def get_default_datatype(expr):
"""Derives an appropriate datatype based on the expression."""
if expr.is_integer:
return default_datatypes["int"]
elif isinstance(expr, MatrixBase):
for element in expr:
if not element.is_integer:
return default_datatypes["float"]
return default_datatypes["int"]
else:
return default_datatypes["float"]
class Variable(object):
"""Represents a typed variable."""
def __init__(self, name, datatype=None, dimensions=None, precision=None):
"""Return a new variable.
Parameters
==========
name : Symbol or MatrixSymbol
datatype : optional
When not given, the data type will be guessed based on the
assumptions on the symbol argument.
dimension : sequence containing tupes, optional
If present, the argument is interpreted as an array, where this
sequence of tuples specifies (lower, upper) bounds for each
index of the array.
precision : int, optional
Controls the precision of floating point constants.
"""
if not isinstance(name, (Symbol, MatrixSymbol)):
raise TypeError("The first argument must be a sympy symbol.")
if datatype is None:
datatype = get_default_datatype(name)
elif not isinstance(datatype, DataType):
raise TypeError("The (optional) `datatype' argument must be an "
"instance of the DataType class.")
if dimensions and not isinstance(dimensions, (tuple, list)):
raise TypeError(
"The dimension argument must be a sequence of tuples")
self._name = name
self._datatype = {
'C': datatype.cname,
'FORTRAN': datatype.fname,
'OCTAVE': datatype.octname,
'PYTHON': datatype.pyname
}
self.dimensions = dimensions
self.precision = precision
@property
def name(self):
return self._name
def get_datatype(self, language):
"""Returns the datatype string for the requested langage.
Examples
========
>>> from sympy import Symbol
>>> from sympy.utilities.codegen import Variable
>>> x = Variable(Symbol('x'))
>>> x.get_datatype('c')
'double'
>>> x.get_datatype('fortran')
'REAL*8'
"""
try:
return self._datatype[language.upper()]
except KeyError:
raise CodeGenError("Has datatypes for languages: %s" %
", ".join(self._datatype))
class Argument(Variable):
"""An abstract Argument data structure: a name and a data type.
This structure is refined in the descendants below.
"""
pass
class InputArgument(Argument):
pass
class ResultBase(object):
"""Base class for all "outgoing" information from a routine.
Objects of this class stores a sympy expression, and a sympy object
representing a result variable that will be used in the generated code
only if necessary.
"""
def __init__(self, expr, result_var):
self.expr = expr
self.result_var = result_var
class OutputArgument(Argument, ResultBase):
"""OutputArgument are always initialized in the routine."""
def __init__(self, name, result_var, expr, datatype=None, dimensions=None, precision=None):
"""Return a new variable.
Parameters
==========
name : Symbol, MatrixSymbol
The name of this variable. When used for code generation, this
might appear, for example, in the prototype of function in the
argument list.
result_var : Symbol, Indexed
Something that can be used to assign a value to this variable.
Typically the same as `name` but for Indexed this should be e.g.,
"y[i]" whereas `name` should be the Symbol "y".
expr : object
The expression that should be output, typically a SymPy
expression.
datatype : optional
When not given, the data type will be guessed based on the
assumptions on the symbol argument.
dimension : sequence containing tupes, optional
If present, the argument is interpreted as an array, where this
sequence of tuples specifies (lower, upper) bounds for each
index of the array.
precision : int, optional
Controls the precision of floating point constants.
"""
Argument.__init__(self, name, datatype, dimensions, precision)
ResultBase.__init__(self, expr, result_var)
class InOutArgument(Argument, ResultBase):
"""InOutArgument are never initialized in the routine."""
def __init__(self, name, result_var, expr, datatype=None, dimensions=None, precision=None):
if not datatype:
datatype = get_default_datatype(expr)
Argument.__init__(self, name, datatype, dimensions, precision)
ResultBase.__init__(self, expr, result_var)
__init__.__doc__ = OutputArgument.__init__.__doc__
class Result(Variable, ResultBase):
"""An expression for a return value.
The name result is used to avoid conflicts with the reserved word
"return" in the python language. It is also shorter than ReturnValue.
These may or may not need a name in the destination (e.g., "return(x*y)"
might return a value without ever naming it).
"""
def __init__(self, expr, name=None, result_var=None, datatype=None,
dimensions=None, precision=None):
"""Initialize a return value.
Parameters
==========
expr : SymPy expression
name : Symbol, MatrixSymbol, optional
The name of this return variable. When used for code generation,
this might appear, for example, in the prototype of function in a
list of return values. A dummy name is generated if omitted.
result_var : Symbol, Indexed, optional
Something that can be used to assign a value to this variable.
Typically the same as `name` but for Indexed this should be e.g.,
"y[i]" whereas `name` should be the Symbol "y". Defaults to
`name` if omitted.
datatype : optional
When not given, the data type will be guessed based on the
assumptions on the symbol argument.
dimension : sequence containing tupes, optional
If present, this variable is interpreted as an array,
where this sequence of tuples specifies (lower, upper)
bounds for each index of the array.
precision : int, optional
Controls the precision of floating point constants.
"""
if not isinstance(expr, (Expr, MatrixBase, MatrixExpr)):
raise TypeError("The first argument must be a sympy expression.")
if name is None:
name = 'result_%d' % abs(hash(expr))
if isinstance(name, string_types):
if isinstance(expr, (MatrixBase, MatrixExpr)):
name = MatrixSymbol(name, *expr.shape)
else:
name = Symbol(name)
if result_var is None:
result_var = name
Variable.__init__(self, name, datatype=datatype,
dimensions=dimensions, precision=precision)
ResultBase.__init__(self, expr, result_var)
#
# Transformation of routine objects into code
#
class CodeGen(object):
"""Abstract class for the code generators."""
def __init__(self, project="project"):
"""Initialize a code generator.
Derived classes will offer more options that affect the generated
code.
"""
self.project = project
def routine(self, name, expr, argument_sequence, global_vars):
"""Creates an Routine object that is appropriate for this language.
This implementation is appropriate for at least C/Fortran. Subclasses
can override this if necessary.
Here, we assume at most one return value (the l-value) which must be
scalar. Additional outputs are OutputArguments (e.g., pointers on
right-hand-side or pass-by-reference). Matrices are always returned
via OutputArguments. If ``argument_sequence`` is None, arguments will
be ordered alphabetically, but with all InputArguments first, and then
OutputArgument and InOutArguments.
"""
if is_sequence(expr) and not isinstance(expr, (MatrixBase, MatrixExpr)):
if not expr:
raise ValueError("No expression given")
expressions = Tuple(*expr)
else:
expressions = Tuple(expr)
# local variables
local_vars = set([i.label for i in expressions.atoms(Idx)])
# global variables
global_vars = set() if global_vars is None else set(global_vars)
# symbols that should be arguments
symbols = expressions.free_symbols - local_vars - global_vars
# Decide whether to use output argument or return value
return_val = []
output_args = []
for expr in expressions:
if isinstance(expr, Equality):
out_arg = expr.lhs
expr = expr.rhs
if isinstance(out_arg, Indexed):
dims = tuple([ (S.Zero, dim - 1) for dim in out_arg.shape])
symbol = out_arg.base.label
elif isinstance(out_arg, Symbol):
dims = []
symbol = out_arg
elif isinstance(out_arg, MatrixSymbol):
dims = tuple([ (S.Zero, dim - 1) for dim in out_arg.shape])
symbol = out_arg
else:
raise CodeGenError("Only Indexed, Symbol, or MatrixSymbol "
"can define output arguments.")
if expr.has(symbol):
output_args.append(
InOutArgument(symbol, out_arg, expr, dimensions=dims))
else:
output_args.append(
OutputArgument(symbol, out_arg, expr, dimensions=dims))
# avoid duplicate arguments
symbols.remove(symbol)
elif isinstance(expr, (ImmutableMatrix, MatrixSlice)):
# Create a "dummy" MatrixSymbol to use as the Output arg
out_arg = MatrixSymbol('out_%s' % abs(hash(expr)), *expr.shape)
dims = tuple([(S.Zero, dim - 1) for dim in out_arg.shape])
output_args.append(
OutputArgument(out_arg, out_arg, expr, dimensions=dims))
else:
return_val.append(Result(expr))
arg_list = []
# setup input argument list
array_symbols = {}
for array in expressions.atoms(Indexed):
array_symbols[array.base.label] = array
for array in expressions.atoms(MatrixSymbol):
array_symbols[array] = array
for symbol in sorted(symbols, key=str):
if symbol in array_symbols:
dims = []
array = array_symbols[symbol]
for dim in array.shape:
dims.append((S.Zero, dim - 1))
metadata = {'dimensions': dims}
else:
metadata = {}
arg_list.append(InputArgument(symbol, **metadata))
output_args.sort(key=lambda x: str(x.name))
arg_list.extend(output_args)
if argument_sequence is not None:
# if the user has supplied IndexedBase instances, we'll accept that
new_sequence = []
for arg in argument_sequence:
if isinstance(arg, IndexedBase):
new_sequence.append(arg.label)
else:
new_sequence.append(arg)
argument_sequence = new_sequence
missing = [x for x in arg_list if x.name not in argument_sequence]
if missing:
msg = "Argument list didn't specify: {0} "
msg = msg.format(", ".join([str(m.name) for m in missing]))
raise CodeGenArgumentListError(msg, missing)
# create redundant arguments to produce the requested sequence
name_arg_dict = dict([(x.name, x) for x in arg_list])
new_args = []
for symbol in argument_sequence:
try:
new_args.append(name_arg_dict[symbol])
except KeyError:
new_args.append(InputArgument(symbol))
arg_list = new_args
return Routine(name, arg_list, return_val, local_vars, global_vars)
def write(self, routines, prefix, to_files=False, header=True, empty=True):
"""Writes all the source code files for the given routines.
The generated source is returned as a list of (filename, contents)
tuples, or is written to files (see below). Each filename consists
of the given prefix, appended with an appropriate extension.
Parameters
==========
routines : list
A list of Routine instances to be written
prefix : string
The prefix for the output files
to_files : bool, optional
When True, the output is written to files. Otherwise, a list
of (filename, contents) tuples is returned. [default: False]
header : bool, optional
When True, a header comment is included on top of each source
file. [default: True]
empty : bool, optional
When True, empty lines are included to structure the source
files. [default: True]
"""
if to_files:
for dump_fn in self.dump_fns:
filename = "%s.%s" % (prefix, dump_fn.extension)
with open(filename, "w") as f:
dump_fn(self, routines, f, prefix, header, empty)
else:
result = []
for dump_fn in self.dump_fns:
filename = "%s.%s" % (prefix, dump_fn.extension)
contents = StringIO()
dump_fn(self, routines, contents, prefix, header, empty)
result.append((filename, contents.getvalue()))
return result
def dump_code(self, routines, f, prefix, header=True, empty=True):
"""Write the code by calling language specific methods.
The generated file contains all the definitions of the routines in
low-level code and refers to the header file if appropriate.
Parameters
==========
routines : list
A list of Routine instances.
f : file-like
Where to write the file.
prefix : string
The filename prefix, used to refer to the proper header file.
Only the basename of the prefix is used.
header : bool, optional
When True, a header comment is included on top of each source
file. [default : True]
empty : bool, optional
When True, empty lines are included to structure the source
files. [default : True]
"""
code_lines = self._preprocessor_statements(prefix)
for routine in routines:
if empty:
code_lines.append("\n")
code_lines.extend(self._get_routine_opening(routine))
code_lines.extend(self._declare_arguments(routine))
code_lines.extend(self._declare_globals(routine))
code_lines.extend(self._declare_locals(routine))
if empty:
code_lines.append("\n")
code_lines.extend(self._call_printer(routine))
if empty:
code_lines.append("\n")
code_lines.extend(self._get_routine_ending(routine))
code_lines = self._indent_code(''.join(code_lines))
if header:
code_lines = ''.join(self._get_header() + [code_lines])
if code_lines:
f.write(code_lines)
class CodeGenError(Exception):
pass
class CodeGenArgumentListError(Exception):
@property
def missing_args(self):
return self.args[1]
header_comment = """Code generated with sympy %(version)s
See http://www.sympy.org/ for more information.
This file is part of '%(project)s'
"""
class CCodeGen(CodeGen):
"""Generator for C code.
The .write() method inherited from CodeGen will output a code file and
an interface file, <prefix>.c and <prefix>.h respectively.
"""
code_extension = "c"
interface_extension = "h"
def _get_header(self):
"""Writes a common header for the generated files."""
code_lines = []
code_lines.append("/" + "*"*78 + '\n')
tmp = header_comment % {"version": sympy_version,
"project": self.project}
for line in tmp.splitlines():
code_lines.append(" *%s*\n" % line.center(76))
code_lines.append(" " + "*"*78 + "/\n")
return code_lines
def get_prototype(self, routine):
"""Returns a string for the function prototype of the routine.
If the routine has multiple result objects, an CodeGenError is
raised.
See: http://en.wikipedia.org/wiki/Function_prototype
"""
if len(routine.results) > 1:
raise CodeGenError("C only supports a single or no return value.")
elif len(routine.results) == 1:
ctype = routine.results[0].get_datatype('C')
else:
ctype = "void"
type_args = []
for arg in routine.arguments:
name = ccode(arg.name)
if arg.dimensions or isinstance(arg, ResultBase):
type_args.append((arg.get_datatype('C'), "*%s" % name))
else:
type_args.append((arg.get_datatype('C'), name))
arguments = ", ".join([ "%s %s" % t for t in type_args])
return "%s %s(%s)" % (ctype, routine.name, arguments)
def _preprocessor_statements(self, prefix):
code_lines = []
code_lines.append("#include \"%s.h\"\n" % os.path.basename(prefix))
code_lines.append("#include <math.h>\n")
return code_lines
def _get_routine_opening(self, routine):
prototype = self.get_prototype(routine)
return ["%s {\n" % prototype]
def _declare_arguments(self, routine):
# arguments are declared in prototype
return []
def _declare_globals(self, routine):
# global variables are not explicitly declared within C functions
return []
def _declare_locals(self, routine):
# loop variables are declared in loop statement
return []
def _call_printer(self, routine):
code_lines = []
# Compose a list of symbols to be dereferenced in the function
# body. These are the arguments that were passed by a reference
# pointer, excluding arrays.
dereference = []
for arg in routine.arguments:
if isinstance(arg, ResultBase) and not arg.dimensions:
dereference.append(arg.name)
return_val = None
for result in routine.result_variables:
if isinstance(result, Result):
assign_to = routine.name + "_result"
t = result.get_datatype('c')
code_lines.append("{0} {1};\n".format(t, str(assign_to)))
return_val = assign_to
else:
assign_to = result.result_var
try:
constants, not_c, c_expr = ccode(result.expr, human=False,
assign_to=assign_to, dereference=dereference)
except AssignmentError:
assign_to = result.result_var
code_lines.append(
"%s %s;\n" % (result.get_datatype('c'), str(assign_to)))
constants, not_c, c_expr = ccode(result.expr, human=False,
assign_to=assign_to, dereference=dereference)
for name, value in sorted(constants, key=str):
code_lines.append("double const %s = %s;\n" % (name, value))
code_lines.append("%s\n" % c_expr)
if return_val:
code_lines.append(" return %s;\n" % return_val)
return code_lines
def _indent_code(self, codelines):
p = CCodePrinter()
return p.indent_code(codelines)
def _get_routine_ending(self, routine):
return ["}\n"]
def dump_c(self, routines, f, prefix, header=True, empty=True):
self.dump_code(routines, f, prefix, header, empty)
dump_c.extension = code_extension
dump_c.__doc__ = CodeGen.dump_code.__doc__
def dump_h(self, routines, f, prefix, header=True, empty=True):
"""Writes the C header file.
This file contains all the function declarations.
Parameters
==========
routines : list
A list of Routine instances.
f : file-like
Where to write the file.
prefix : string
The filename prefix, used to construct the include guards.
Only the basename of the prefix is used.
header : bool, optional
When True, a header comment is included on top of each source
file. [default : True]
empty : bool, optional
When True, empty lines are included to structure the source
files. [default : True]
"""
if header:
print(''.join(self._get_header()), file=f)
guard_name = "%s__%s__H" % (self.project.replace(
" ", "_").upper(), prefix.replace("/", "_").upper())
# include guards
if empty:
print(file=f)
print("#ifndef %s" % guard_name, file=f)
print("#define %s" % guard_name, file=f)
if empty:
print(file=f)
# declaration of the function prototypes
for routine in routines:
prototype = self.get_prototype(routine)
print("%s;" % prototype, file=f)
# end if include guards
if empty:
print(file=f)
print("#endif", file=f)
if empty:
print(file=f)
dump_h.extension = interface_extension
# This list of dump functions is used by CodeGen.write to know which dump
# functions it has to call.
dump_fns = [dump_c, dump_h]
class FCodeGen(CodeGen):
"""Generator for Fortran 95 code
The .write() method inherited from CodeGen will output a code file and
an interface file, <prefix>.f90 and <prefix>.h respectively.
"""
code_extension = "f90"
interface_extension = "h"
def __init__(self, project='project'):
CodeGen.__init__(self, project)
def _get_symbol(self, s):
"""Returns the symbol as fcode prints it."""
return fcode(s).strip()
def _get_header(self):
"""Writes a common header for the generated files."""
code_lines = []
code_lines.append("!" + "*"*78 + '\n')
tmp = header_comment % {"version": sympy_version,
"project": self.project}
for line in tmp.splitlines():
code_lines.append("!*%s*\n" % line.center(76))
code_lines.append("!" + "*"*78 + '\n')
return code_lines
def _preprocessor_statements(self, prefix):
return []
def _get_routine_opening(self, routine):
"""Returns the opening statements of the fortran routine."""
code_list = []
if len(routine.results) > 1:
raise CodeGenError(
"Fortran only supports a single or no return value.")
elif len(routine.results) == 1:
result = routine.results[0]
code_list.append(result.get_datatype('fortran'))
code_list.append("function")
else:
code_list.append("subroutine")
args = ", ".join("%s" % self._get_symbol(arg.name)
for arg in routine.arguments)
call_sig = "{0}({1})\n".format(routine.name, args)
# Fortran 95 requires all lines be less than 132 characters, so wrap
# this line before appending.
call_sig = ' &\n'.join(textwrap.wrap(call_sig,
width=60,
break_long_words=False)) + '\n'
code_list.append(call_sig)
code_list = [' '.join(code_list)]
code_list.append('implicit none\n')
return code_list
def _declare_arguments(self, routine):
# argument type declarations
code_list = []
array_list = []
scalar_list = []
for arg in routine.arguments:
if isinstance(arg, InputArgument):
typeinfo = "%s, intent(in)" % arg.get_datatype('fortran')
elif isinstance(arg, InOutArgument):
typeinfo = "%s, intent(inout)" % arg.get_datatype('fortran')
elif isinstance(arg, OutputArgument):
typeinfo = "%s, intent(out)" % arg.get_datatype('fortran')
else:
raise CodeGenError("Unkown Argument type: %s" % type(arg))
fprint = self._get_symbol
if arg.dimensions:
# fortran arrays start at 1
dimstr = ", ".join(["%s:%s" % (
fprint(dim[0] + 1), fprint(dim[1] + 1))
for dim in arg.dimensions])
typeinfo += ", dimension(%s)" % dimstr
array_list.append("%s :: %s\n" % (typeinfo, fprint(arg.name)))
else:
scalar_list.append("%s :: %s\n" % (typeinfo, fprint(arg.name)))
# scalars first, because they can be used in array declarations
code_list.extend(scalar_list)
code_list.extend(array_list)
return code_list
def _declare_globals(self, routine):
# Global variables not explicitly declared within Fortran 90 functions.
# Note: a future F77 mode may need to generate "common" blocks.
return []
def _declare_locals(self, routine):
code_list = []
for var in sorted(routine.local_vars, key=str):
typeinfo = get_default_datatype(var)
code_list.append("%s :: %s\n" % (
typeinfo.fname, self._get_symbol(var)))
return code_list
def _get_routine_ending(self, routine):
"""Returns the closing statements of the fortran routine."""
if len(routine.results) == 1:
return ["end function\n"]
else:
return ["end subroutine\n"]
def get_interface(self, routine):
"""Returns a string for the function interface.
The routine should have a single result object, which can be None.
If the routine has multiple result objects, a CodeGenError is
raised.
See: http://en.wikipedia.org/wiki/Function_prototype
"""
prototype = [ "interface\n" ]
prototype.extend(self._get_routine_opening(routine))
prototype.extend(self._declare_arguments(routine))
prototype.extend(self._get_routine_ending(routine))
prototype.append("end interface\n")
return "".join(prototype)
def _call_printer(self, routine):
declarations = []
code_lines = []
for result in routine.result_variables:
if isinstance(result, Result):
assign_to = routine.name
elif isinstance(result, (OutputArgument, InOutArgument)):
assign_to = result.result_var
constants, not_fortran, f_expr = fcode(result.expr,
assign_to=assign_to, source_format='free', human=False)
for obj, v in sorted(constants, key=str):
t = get_default_datatype(obj)
declarations.append(
"%s, parameter :: %s = %s\n" % (t.fname, obj, v))
for obj in sorted(not_fortran, key=str):
t = get_default_datatype(obj)
if isinstance(obj, Function):
name = obj.func
else:
name = obj
declarations.append("%s :: %s\n" % (t.fname, name))
code_lines.append("%s\n" % f_expr)
return declarations + code_lines
def _indent_code(self, codelines):
p = FCodePrinter({'source_format': 'free', 'human': False})
return p.indent_code(codelines)
def dump_f95(self, routines, f, prefix, header=True, empty=True):
# check that symbols are unique with ignorecase
for r in routines:
lowercase = set([str(x).lower() for x in r.variables])
orig_case = set([str(x) for x in r.variables])
if len(lowercase) < len(orig_case):
raise CodeGenError("Fortran ignores case. Got symbols: %s" %
(", ".join([str(var) for var in r.variables])))
self.dump_code(routines, f, prefix, header, empty)
dump_f95.extension = code_extension
dump_f95.__doc__ = CodeGen.dump_code.__doc__
def dump_h(self, routines, f, prefix, header=True, empty=True):
"""Writes the interface to a header file.
This file contains all the function declarations.
Parameters
==========
routines : list
A list of Routine instances.
f : file-like
Where to write the file.
prefix : string
The filename prefix.
header : bool, optional
When True, a header comment is included on top of each source
file. [default : True]
empty : bool, optional
When True, empty lines are included to structure the source
files. [default : True]
"""
if header:
print(''.join(self._get_header()), file=f)
if empty:
print(file=f)
# declaration of the function prototypes
for routine in routines:
prototype = self.get_interface(routine)
f.write(prototype)
if empty:
print(file=f)
dump_h.extension = interface_extension
# This list of dump functions is used by CodeGen.write to know which dump
# functions it has to call.
dump_fns = [dump_f95, dump_h]
class OctaveCodeGen(CodeGen):
"""Generator for Octave code.
The .write() method inherited from CodeGen will output a code file
<prefix>.m.
Octave .m files usually contain one function. That function name should
match the filename (``prefix``). If you pass multiple ``name_expr`` pairs,
the latter ones are presumed to be private functions accessed by the
primary function.
You should only pass inputs to ``argument_sequence``: outputs are ordered
according to their order in ``name_expr``.
"""
code_extension = "m"
def routine(self, name, expr, argument_sequence, global_vars):
"""Specialized Routine creation for Octave."""
# FIXME: this is probably general enough for other high-level
# languages, perhaps its the C/Fortran one that is specialized!
if is_sequence(expr) and not isinstance(expr, (MatrixBase, MatrixExpr)):
if not expr:
raise ValueError("No expression given")
expressions = Tuple(*expr)
else:
expressions = Tuple(expr)
# local variables
local_vars = set([i.label for i in expressions.atoms(Idx)])
# global variables
global_vars = set() if global_vars is None else set(global_vars)
# symbols that should be arguments
symbols = expressions.free_symbols - local_vars - global_vars
# Octave supports multiple return values
return_vals = []
for (i, expr) in enumerate(expressions):
if isinstance(expr, Equality):
out_arg = expr.lhs
expr = expr.rhs
symbol = out_arg
if isinstance(out_arg, Indexed):
symbol = out_arg.base.label
if not isinstance(out_arg, (Indexed, Symbol, MatrixSymbol)):
raise CodeGenError("Only Indexed, Symbol, or MatrixSymbol "
"can define output arguments.")
return_vals.append(Result(expr, name=symbol, result_var=out_arg))
if not expr.has(symbol):
# this is a pure output: remove from the symbols list, so
# it doesn't become an input.
symbols.remove(symbol)
else:
# we have no name for this output
return_vals.append(Result(expr, name='out%d' % (i+1)))
# setup input argument list
arg_list = []
array_symbols = {}
for array in expressions.atoms(Indexed):
array_symbols[array.base.label] = array
for array in expressions.atoms(MatrixSymbol):
array_symbols[array] = array
for symbol in sorted(symbols, key=str):
arg_list.append(InputArgument(symbol))
if argument_sequence is not None:
# if the user has supplied IndexedBase instances, we'll accept that
new_sequence = []
for arg in argument_sequence:
if isinstance(arg, IndexedBase):
new_sequence.append(arg.label)
else:
new_sequence.append(arg)
argument_sequence = new_sequence
missing = [x for x in arg_list if x.name not in argument_sequence]
if missing:
msg = "Argument list didn't specify: {0} "
msg = msg.format(", ".join([str(m.name) for m in missing]))
raise CodeGenArgumentListError(msg, missing)
# create redundant arguments to produce the requested sequence
name_arg_dict = dict([(x.name, x) for x in arg_list])
new_args = []
for symbol in argument_sequence:
try:
new_args.append(name_arg_dict[symbol])
except KeyError:
new_args.append(InputArgument(symbol))
arg_list = new_args
return Routine(name, arg_list, return_vals, local_vars, global_vars)
def _get_symbol(self, s):
"""Print the symbol appropriately."""
return octave_code(s).strip()
def _get_header(self):
"""Writes a common header for the generated files."""
code_lines = []
tmp = header_comment % {"version": sympy_version,
"project": self.project}
for line in tmp.splitlines():
if line == '':
code_lines.append("%\n")
else:
code_lines.append("%% %s\n" % line)
return code_lines
def _preprocessor_statements(self, prefix):
return []
def _get_routine_opening(self, routine):
"""Returns the opening statements of the routine."""
code_list = []
code_list.append("function ")
# Outputs
outs = []
for i, result in enumerate(routine.results):
if isinstance(result, Result):
# Note: name not result_var; want `y` not `y(i)` for Indexed
s = self._get_symbol(result.name)
else:
raise CodeGenError("unexpected object in Routine results")
outs.append(s)
if len(outs) > 1:
code_list.append("[" + (", ".join(outs)) + "]")
else:
code_list.append("".join(outs))
code_list.append(" = ")
# Inputs
args = []
for i, arg in enumerate(routine.arguments):
if isinstance(arg, (OutputArgument, InOutArgument)):
raise CodeGenError("Octave: invalid argument of type %s" %
str(type(arg)))
if isinstance(arg, InputArgument):
args.append("%s" % self._get_symbol(arg.name))
args = ", ".join(args)
code_list.append("%s(%s)\n" % (routine.name, args))
code_list = [ "".join(code_list) ]
return code_list
def _declare_arguments(self, routine):
return []
def _declare_globals(self, routine):
if not routine.global_vars:
return []
s = " ".join(sorted([self._get_symbol(g) for g in routine.global_vars]))
return ["global " + s + "\n"]
def _declare_locals(self, routine):
return []
def _get_routine_ending(self, routine):
return ["end\n"]
def _call_printer(self, routine):
declarations = []
code_lines = []
for i, result in enumerate(routine.results):
if isinstance(result, Result):
assign_to = result.result_var
else:
raise CodeGenError("unexpected object in Routine results")
constants, not_supported, oct_expr = octave_code(result.expr,
assign_to=assign_to, human=False)
for obj, v in sorted(constants, key=str):
declarations.append(
" %s = %s; %% constant\n" % (obj, v))
for obj in sorted(not_supported, key=str):
if isinstance(obj, Function):
name = obj.func
else:
name = obj
declarations.append(
" %% unsupported: %s\n" % (name))
code_lines.append("%s\n" % (oct_expr))
return declarations + code_lines
def _indent_code(self, codelines):
# Note that indenting seems to happen twice, first
# statement-by-statement by OctavePrinter then again here.
p = OctaveCodePrinter({'human': False})
return p.indent_code(codelines)
return codelines
def dump_m(self, routines, f, prefix, header=True, empty=True, inline=True):
# Note used to call self.dump_code() but we need more control for header
code_lines = self._preprocessor_statements(prefix)
for i, routine in enumerate(routines):
if i > 0:
if empty:
code_lines.append("\n")
code_lines.extend(self._get_routine_opening(routine))
if i == 0:
if routine.name != prefix:
raise ValueError('Octave function name should match prefix')
if header:
code_lines.append("%" + prefix.upper() +
" Autogenerated by sympy\n")
code_lines.append(''.join(self._get_header()))
code_lines.extend(self._declare_arguments(routine))
code_lines.extend(self._declare_globals(routine))
code_lines.extend(self._declare_locals(routine))
if empty:
code_lines.append("\n")
code_lines.extend(self._call_printer(routine))
if empty:
code_lines.append("\n")
code_lines.extend(self._get_routine_ending(routine))
code_lines = self._indent_code(''.join(code_lines))
if code_lines:
f.write(code_lines)
dump_m.extension = code_extension
dump_m.__doc__ = CodeGen.dump_code.__doc__
# This list of dump functions is used by CodeGen.write to know which dump
# functions it has to call.
dump_fns = [dump_m]
def get_code_generator(language, project):
CodeGenClass = {"C": CCodeGen, "F95": FCodeGen,
"OCTAVE": OctaveCodeGen}.get(language.upper())
if CodeGenClass is None:
raise ValueError("Language '%s' is not supported." % language)
return CodeGenClass(project)
#
# Friendly functions
#
def codegen(name_expr, language, prefix=None, project="project",
to_files=False, header=True, empty=True, argument_sequence=None,
global_vars=None):
"""Generate source code for expressions in a given language.
Parameters
==========
name_expr : tuple, or list of tuples
A single (name, expression) tuple or a list of (name, expression)
tuples. Each tuple corresponds to a routine. If the expression is
an equality (an instance of class Equality) the left hand side is
considered an output argument. If expression is an iterable, then
the routine will have multiple outputs.
language : string
A string that indicates the source code language. This is case
insensitive. Currently, 'C', 'F95' and 'Octave' are supported.
'Octave' generates code compatible with both Octave and Matlab.
prefix : string, optional
A prefix for the names of the files that contain the source code.
Language-dependent suffixes will be appended. If omitted, the name
of the first name_expr tuple is used.
project : string, optional
A project name, used for making unique preprocessor instructions.
[default: "project"]
to_files : bool, optional
When True, the code will be written to one or more files with the
given prefix, otherwise strings with the names and contents of
these files are returned. [default: False]
header : bool, optional
When True, a header is written on top of each source file.
[default: True]
empty : bool, optional
When True, empty lines are used to structure the code.
[default: True]
argument_sequence : iterable, optional
Sequence of arguments for the routine in a preferred order. A
CodeGenError is raised if required arguments are missing.
Redundant arguments are used without warning. If omitted,
arguments will be ordered alphabetically, but with all input
aguments first, and then output or in-out arguments.
global_vars : iterable, optional
Sequence of global variables used by the routine. Variables
listed here will not show up as function arguments.
Examples
========
>>> from sympy.utilities.codegen import codegen
>>> from sympy.abc import x, y, z
>>> [(c_name, c_code), (h_name, c_header)] = codegen(
... ("f", x+y*z), "C", "test", header=False, empty=False)
>>> print(c_name)
test.c
>>> print(c_code)
#include "test.h"
#include <math.h>
double f(double x, double y, double z) {
double f_result;
f_result = x + y*z;
return f_result;
}
>>> print(h_name)
test.h
>>> print(c_header)
#ifndef PROJECT__TEST__H
#define PROJECT__TEST__H
double f(double x, double y, double z);
#endif
Another example using Equality objects to give named outputs. Here the
filename (prefix) is taken from the first (name, expr) pair.
>>> from sympy.abc import f, g
>>> from sympy import Eq
>>> [(c_name, c_code), (h_name, c_header)] = codegen(
... [("myfcn", x + y), ("fcn2", [Eq(f, 2*x), Eq(g, y)])],
... "C", header=False, empty=False)
>>> print(c_name)
myfcn.c
>>> print(c_code)
#include "myfcn.h"
#include <math.h>
double myfcn(double x, double y) {
double myfcn_result;
myfcn_result = x + y;
return myfcn_result;
}
void fcn2(double x, double y, double *f, double *g) {
(*f) = 2*x;
(*g) = y;
}
If the generated function(s) will be part of a larger project where various
global variables have been defined, the 'global_vars' option can be used
to remove the specified variables from the function signature
>>> from sympy.utilities.codegen import codegen
>>> from sympy.abc import x, y, z
>>> [(f_name, f_code), header] = codegen(
... ("f", x+y*z), "F95", header=False, empty=False,
... argument_sequence=(x, y), global_vars=(z,))
>>> print(f_code)
REAL*8 function f(x, y)
implicit none
REAL*8, intent(in) :: x
REAL*8, intent(in) :: y
f = x + y*z
end function
"""
# Initialize the code generator.
code_gen = get_code_generator(language, project)
if isinstance(name_expr[0], string_types):
# single tuple is given, turn it into a singleton list with a tuple.
name_expr = [name_expr]
if prefix is None:
prefix = name_expr[0][0]
# Construct Routines appropriate for this code_gen from (name, expr) pairs.
routines = []
for name, expr in name_expr:
routines.append(code_gen.routine(name, expr, argument_sequence,
global_vars))
# Write the code.
return code_gen.write(routines, prefix, to_files, header, empty)
def make_routine(name, expr, argument_sequence=None,
global_vars=None, language="F95"):
"""A factory that makes an appropriate Routine from an expression.
Parameters
==========
name : string
The name of this routine in the generated code.
expr : expression or list/tuple of expressions
A SymPy expression that the Routine instance will represent. If
given a list or tuple of expressions, the routine will be
considered to have multiple return values and/or output arguments.
argument_sequence : list or tuple, optional
List arguments for the routine in a preferred order. If omitted,
the results are language dependent, for example, alphabetical order
or in the same order as the given expressions.
global_vars : iterable, optional
Sequence of global variables used by the routine. Variables
listed here will not show up as function arguments.
language : string, optional
Specify a target language. The Routine itself should be
language-agnostic but the precise way one is created, error
checking, etc depend on the language. [default: "F95"].
A decision about whether to use output arguments or return values is made
depending on both the language and the particular mathematical expressions.
For an expression of type Equality, the left hand side is typically made
into an OutputArgument (or perhaps an InOutArgument if appropriate).
Otherwise, typically, the calculated expression is made a return values of
the routine.
Examples
========
>>> from sympy.utilities.codegen import make_routine
>>> from sympy.abc import x, y, f, g
>>> from sympy import Eq
>>> r = make_routine('test', [Eq(f, 2*x), Eq(g, x + y)])
>>> [arg.result_var for arg in r.results]
[]
>>> [arg.name for arg in r.arguments]
[x, y, f, g]
>>> [arg.name for arg in r.result_variables]
[f, g]
>>> r.local_vars
set()
Another more complicated example with a mixture of specified and
automatically-assigned names. Also has Matrix output.
>>> from sympy import Matrix
>>> r = make_routine('fcn', [x*y, Eq(f, 1), Eq(g, x + g), Matrix([[x, 2]])])
>>> [arg.result_var for arg in r.results] # doctest: +SKIP
[result_5397460570204848505]
>>> [arg.expr for arg in r.results]
[x*y]
>>> [arg.name for arg in r.arguments] # doctest: +SKIP
[x, y, f, g, out_8598435338387848786]
We can examine the various arguments more closely:
>>> from sympy.utilities.codegen import (InputArgument, OutputArgument,
... InOutArgument)
>>> [a.name for a in r.arguments if isinstance(a, InputArgument)]
[x, y]
>>> [a.name for a in r.arguments if isinstance(a, OutputArgument)] # doctest: +SKIP
[f, out_8598435338387848786]
>>> [a.expr for a in r.arguments if isinstance(a, OutputArgument)]
[1, Matrix([[x, 2]])]
>>> [a.name for a in r.arguments if isinstance(a, InOutArgument)]
[g]
>>> [a.expr for a in r.arguments if isinstance(a, InOutArgument)]
[g + x]
"""
# initialize a new code generator
code_gen = get_code_generator(language, "nothingElseMatters")
return code_gen.routine(name, expr, argument_sequence, global_vars)
|
orion1024/Sick-Beard | refs/heads/master | sickbeard/clients/requests/packages/urllib3/filepost.py | 240 | # urllib3/filepost.py
# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import codecs
import mimetypes
from uuid import uuid4
from io import BytesIO
from .packages import six
from .packages.six import b
writer = codecs.lookup('utf-8')[3]
def choose_boundary():
"""
Our embarassingly-simple replacement for mimetools.choose_boundary.
"""
return uuid4().hex
def get_content_type(filename):
return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
def iter_fields(fields):
"""
Iterate over fields.
Supports list of (k, v) tuples and dicts.
"""
if isinstance(fields, dict):
return ((k, v) for k, v in six.iteritems(fields))
return ((k, v) for k, v in fields)
def encode_multipart_formdata(fields, boundary=None):
"""
Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
:param fields:
Dictionary of fields or list of (key, value) or (key, value, MIME type)
field tuples. The key is treated as the field name, and the value as
the body of the form-data bytes. If the value is a tuple of two
elements, then the first element is treated as the filename of the
form-data section and a suitable MIME type is guessed based on the
filename. If the value is a tuple of three elements, then the third
element is treated as an explicit MIME type of the form-data section.
Field names and filenames must be unicode.
:param boundary:
If not specified, then a random boundary will be generated using
:func:`mimetools.choose_boundary`.
"""
body = BytesIO()
if boundary is None:
boundary = choose_boundary()
for fieldname, value in iter_fields(fields):
body.write(b('--%s\r\n' % (boundary)))
if isinstance(value, tuple):
if len(value) == 3:
filename, data, content_type = value
else:
filename, data = value
content_type = get_content_type(filename)
writer(body).write('Content-Disposition: form-data; name="%s"; '
'filename="%s"\r\n' % (fieldname, filename))
body.write(b('Content-Type: %s\r\n\r\n' %
(content_type,)))
else:
data = value
writer(body).write('Content-Disposition: form-data; name="%s"\r\n'
% (fieldname))
body.write(b'\r\n')
if isinstance(data, int):
data = str(data) # Backwards compatibility
if isinstance(data, six.text_type):
writer(body).write(data)
else:
body.write(data)
body.write(b'\r\n')
body.write(b('--%s--\r\n' % (boundary)))
content_type = b('multipart/form-data; boundary=%s' % boundary)
return body.getvalue(), content_type
|
seeminglee/pyglet64 | refs/heads/master | pyglet/media/procedural.py | 41 | # ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
# $Id:$
from pyglet.media import Source, AudioFormat, AudioData
import ctypes
import os
import math
class ProceduralSource(Source):
def __init__(self, duration, sample_rate=44800, sample_size=16):
self._duration = float(duration)
self.audio_format = AudioFormat(
channels=1,
sample_size=sample_size,
sample_rate=sample_rate)
self._offset = 0
self._bytes_per_sample = sample_size >> 3
self._bytes_per_second = self._bytes_per_sample * sample_rate
self._max_offset = int(self._bytes_per_second * self._duration)
if self._bytes_per_sample == 2:
self._max_offset &= 0xfffffffe
def _get_audio_data(self, bytes):
bytes = min(bytes, self._max_offset - self._offset)
if bytes <= 0:
return None
timestamp = float(self._offset) / self._bytes_per_second
duration = float(bytes) / self._bytes_per_second
data = self._generate_data(bytes, self._offset)
self._offset += bytes
return AudioData(data,
bytes,
timestamp,
duration,
[])
def _generate_data(self, bytes, offset):
'''Generate `bytes` bytes of data.
Return data as ctypes array or string.
'''
raise NotImplementedError('abstract')
def seek(self, timestamp):
self._offset = int(timestamp * self._bytes_per_second)
# Bound within duration
self._offset = min(max(self._offset, 0), self._max_offset)
# Align to sample
if self._bytes_per_sample == 2:
self._offset &= 0xfffffffe
class Silence(ProceduralSource):
def _generate_data(self, bytes, offset):
if self._bytes_per_sample == 1:
return '\127' * bytes
else:
return '\0' * bytes
class WhiteNoise(ProceduralSource):
def _generate_data(self, bytes, offset):
return os.urandom(bytes)
class Sine(ProceduralSource):
def __init__(self, duration, frequency=440, **kwargs):
super(Sine, self).__init__(duration, **kwargs)
self.frequency = frequency
def _generate_data(self, bytes, offset):
if self._bytes_per_sample == 1:
start = offset
samples = bytes
bias = 127
amplitude = 127
data = (ctypes.c_ubyte * samples)()
else:
start = offset >> 1
samples = bytes >> 1
bias = 0
amplitude = 32767
data = (ctypes.c_short * samples)()
step = self.frequency * (math.pi * 2) / self.audio_format.sample_rate
for i in range(samples):
data[i] = int(math.sin(step * (i + start)) * amplitude + bias)
return data
class Saw(ProceduralSource):
def __init__(self, duration, frequency=440, **kwargs):
super(Saw, self).__init__(duration, **kwargs)
self.frequency = frequency
def _generate_data(self, bytes, offset):
# XXX TODO consider offset
if self._bytes_per_sample == 1:
samples = bytes
value = 127
max = 255
min = 0
data = (ctypes.c_ubyte * samples)()
else:
samples = bytes >> 1
value = 0
max = 32767
min = -32768
data = (ctypes.c_short * samples)()
step = (max - min) * 2 * self.frequency / self.audio_format.sample_rate
for i in range(samples):
value += step
if value > max:
value = max - (value - max)
step = -step
if value < min:
value = min - (value - min)
step = -step
data[i] = value
return data
class Square(ProceduralSource):
def __init__(self, duration, frequency=440, **kwargs):
super(Square, self).__init__(duration, **kwargs)
self.frequency = frequency
def _generate_data(self, bytes, offset):
# XXX TODO consider offset
if self._bytes_per_sample == 1:
samples = bytes
value = 0
amplitude = 255
data = (ctypes.c_ubyte * samples)()
else:
samples = bytes >> 1
value = -32768
amplitude = 65535
data = (ctypes.c_short * samples)()
period = self.audio_format.sample_rate / self.frequency / 2
count = 0
for i in range(samples):
count += 1
if count == period:
value = amplitude - value
count = 0
data[i] = value
return data
|
mancoast/CPythonPyc_test | refs/heads/master | cpython/152_test_new.py | 8 | from test_support import verbose
import sys
import new
class Eggs:
def get_yolks(self):
return self.yolks
print 'new.module()'
m = new.module('Spam')
if verbose:
print m
m.Eggs = Eggs
sys.modules['Spam'] = m
import Spam
def get_more_yolks(self):
return self.yolks + 3
print 'new.classobj()'
C = new.classobj('Spam', (Spam.Eggs,), {'get_more_yolks': get_more_yolks})
if verbose:
print C
print 'new.instance()'
c = new.instance(C, {'yolks': 3})
if verbose:
print c
def break_yolks(self):
self.yolks = self.yolks - 2
print 'new.instancemethod()'
im = new.instancemethod(break_yolks, c, C)
if verbose:
print im
if c.get_yolks() <> 3 and c.get_more_yolks() <> 6:
print 'Broken call of hand-crafted class instance'
im()
if c.get_yolks() <> 1 and c.get_more_yolks() <> 4:
print 'Broken call of hand-crafted instance method'
codestr = '''
a = 1
b = 2
c = a + b
'''
ccode = compile(codestr, '<string>', 'exec')
g = {'c': 0, '__builtins__': __builtins__}
# this test could be more robust
print 'new.function()'
func = new.function(ccode, g)
if verbose:
print func
func()
if g['c'] <> 3:
print 'Could not create a proper function object'
# bogus test of new.code()
print 'new.code()'
d = new.code(3, 3, 3, 3, codestr, (), (), (), "<string>", "<name>", 1, "")
if verbose:
print d
|
tuxfux-hlp-notes/python-batches | refs/heads/master | archieves/batch-64/09-modules/myenv/lib/python2.7/site-packages/django/conf/locale/uk/formats.py | 565 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j E Y р.'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j E Y р. H:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j F'
SHORT_DATE_FORMAT = 'j M Y'
# SHORT_DATETIME_FORMAT =
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# DATE_INPUT_FORMATS =
# TIME_INPUT_FORMATS =
# DATETIME_INPUT_FORMATS =
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = ' '
# NUMBER_GROUPING =
|
clovett/MissionPlanner | refs/heads/master | Lib/urllib.py | 52 | """Open an arbitrary URL.
See the following document for more info on URLs:
"Names and Addresses, URIs, URLs, URNs, URCs", at
http://www.w3.org/pub/WWW/Addressing/Overview.html
See also the HTTP spec (from which the error codes are derived):
"HTTP - Hypertext Transfer Protocol", at
http://www.w3.org/pub/WWW/Protocols/
Related standards and specs:
- RFC1808: the "relative URL" spec. (authoritative status)
- RFC1738 - the "URL standard". (authoritative status)
- RFC1630 - the "URI spec". (informational status)
The object returned by URLopener().open(file) will differ per
protocol. All you know is that is has methods read(), readline(),
readlines(), fileno(), close() and info(). The read*(), fileno()
and close() methods work like those of open files.
The info() method returns a mimetools.Message object which can be
used to query various info about the object, if available.
(mimetools.Message objects are queried with the getheader() method.)
"""
import string
import socket
import os
import time
import sys
from urlparse import urljoin as basejoin
__all__ = ["urlopen", "URLopener", "FancyURLopener", "urlretrieve",
"urlcleanup", "quote", "quote_plus", "unquote", "unquote_plus",
"urlencode", "url2pathname", "pathname2url", "splittag",
"localhost", "thishost", "ftperrors", "basejoin", "unwrap",
"splittype", "splithost", "splituser", "splitpasswd", "splitport",
"splitnport", "splitquery", "splitattr", "splitvalue",
"getproxies"]
__version__ = '1.17' # XXX This version is not always updated :-(
MAXFTPCACHE = 10 # Trim the ftp cache beyond this size
# Helper for non-unix systems
if os.name == 'nt':
from nturl2path import url2pathname, pathname2url
elif os.name == 'riscos':
from rourl2path import url2pathname, pathname2url
else:
def url2pathname(pathname):
"""OS-specific conversion from a relative URL of the 'file' scheme
to a file system path; not recommended for general use."""
return unquote(pathname)
def pathname2url(pathname):
"""OS-specific conversion from a file system path to a relative URL
of the 'file' scheme; not recommended for general use."""
return quote(pathname)
# This really consists of two pieces:
# (1) a class which handles opening of all sorts of URLs
# (plus assorted utilities etc.)
# (2) a set of functions for parsing URLs
# XXX Should these be separated out into different modules?
# Shortcut for basic usage
_urlopener = None
def urlopen(url, data=None, proxies=None):
"""Create a file-like object for the specified URL to read from."""
from warnings import warnpy3k
warnpy3k("urllib.urlopen() has been removed in Python 3.0 in "
"favor of urllib2.urlopen()", stacklevel=2)
global _urlopener
if proxies is not None:
opener = FancyURLopener(proxies=proxies)
elif not _urlopener:
opener = FancyURLopener()
_urlopener = opener
else:
opener = _urlopener
if data is None:
return opener.open(url)
else:
return opener.open(url, data)
def urlretrieve(url, filename=None, reporthook=None, data=None):
global _urlopener
if not _urlopener:
_urlopener = FancyURLopener()
return _urlopener.retrieve(url, filename, reporthook, data)
def urlcleanup():
if _urlopener:
_urlopener.cleanup()
_safe_quoters.clear()
ftpcache.clear()
# check for SSL
try:
import ssl
except:
_have_ssl = False
else:
_have_ssl = True
# exception raised when downloaded size does not match content-length
class ContentTooShortError(IOError):
def __init__(self, message, content):
IOError.__init__(self, message)
self.content = content
ftpcache = {}
class URLopener:
"""Class to open URLs.
This is a class rather than just a subroutine because we may need
more than one set of global protocol-specific options.
Note -- this is a base class for those who don't want the
automatic handling of errors type 302 (relocated) and 401
(authorization needed)."""
__tempfiles = None
version = "Python-urllib/%s" % __version__
# Constructor
def __init__(self, proxies=None, **x509):
if proxies is None:
proxies = getproxies()
assert hasattr(proxies, 'has_key'), "proxies must be a mapping"
self.proxies = proxies
self.key_file = x509.get('key_file')
self.cert_file = x509.get('cert_file')
self.addheaders = [('User-Agent', self.version)]
self.__tempfiles = []
self.__unlink = os.unlink # See cleanup()
self.tempcache = None
# Undocumented feature: if you assign {} to tempcache,
# it is used to cache files retrieved with
# self.retrieve(). This is not enabled by default
# since it does not work for changing documents (and I
# haven't got the logic to check expiration headers
# yet).
self.ftpcache = ftpcache
# Undocumented feature: you can use a different
# ftp cache by assigning to the .ftpcache member;
# in case you want logically independent URL openers
# XXX This is not threadsafe. Bah.
def __del__(self):
self.close()
def close(self):
self.cleanup()
def cleanup(self):
# This code sometimes runs when the rest of this module
# has already been deleted, so it can't use any globals
# or import anything.
if self.__tempfiles:
for file in self.__tempfiles:
try:
self.__unlink(file)
except OSError:
pass
del self.__tempfiles[:]
if self.tempcache:
self.tempcache.clear()
def addheader(self, *args):
"""Add a header to be used by the HTTP interface only
e.g. u.addheader('Accept', 'sound/basic')"""
self.addheaders.append(args)
# External interface
def open(self, fullurl, data=None):
"""Use URLopener().open(file) instead of open(file, 'r')."""
fullurl = unwrap(toBytes(fullurl))
# percent encode url, fixing lame server errors for e.g, like space
# within url paths.
fullurl = quote(fullurl, safe="%/:=&?~#+!$,;'@()*[]|")
if self.tempcache and fullurl in self.tempcache:
filename, headers = self.tempcache[fullurl]
fp = open(filename, 'rb')
return addinfourl(fp, headers, fullurl)
urltype, url = splittype(fullurl)
if not urltype:
urltype = 'file'
if urltype in self.proxies:
proxy = self.proxies[urltype]
urltype, proxyhost = splittype(proxy)
host, selector = splithost(proxyhost)
url = (host, fullurl) # Signal special case to open_*()
else:
proxy = None
name = 'open_' + urltype
self.type = urltype
name = name.replace('-', '_')
if not hasattr(self, name):
if proxy:
return self.open_unknown_proxy(proxy, fullurl, data)
else:
return self.open_unknown(fullurl, data)
try:
if data is None:
return getattr(self, name)(url)
else:
return getattr(self, name)(url, data)
except socket.error, msg:
raise IOError, ('socket error', msg), sys.exc_info()[2]
def open_unknown(self, fullurl, data=None):
"""Overridable interface to open unknown URL type."""
type, url = splittype(fullurl)
raise IOError, ('url error', 'unknown url type', type)
def open_unknown_proxy(self, proxy, fullurl, data=None):
"""Overridable interface to open unknown URL type."""
type, url = splittype(fullurl)
raise IOError, ('url error', 'invalid proxy for %s' % type, proxy)
# External interface
def retrieve(self, url, filename=None, reporthook=None, data=None):
"""retrieve(url) returns (filename, headers) for a local object
or (tempfilename, headers) for a remote object."""
url = unwrap(toBytes(url))
if self.tempcache and url in self.tempcache:
return self.tempcache[url]
type, url1 = splittype(url)
if filename is None and (not type or type == 'file'):
try:
fp = self.open_local_file(url1)
hdrs = fp.info()
fp.close()
return url2pathname(splithost(url1)[1]), hdrs
except IOError:
pass
fp = self.open(url, data)
try:
headers = fp.info()
if filename:
tfp = open(filename, 'wb')
else:
import tempfile
garbage, path = splittype(url)
garbage, path = splithost(path or "")
path, garbage = splitquery(path or "")
path, garbage = splitattr(path or "")
suffix = os.path.splitext(path)[1]
(fd, filename) = tempfile.mkstemp(suffix)
self.__tempfiles.append(filename)
tfp = os.fdopen(fd, 'wb')
try:
result = filename, headers
if self.tempcache is not None:
self.tempcache[url] = result
bs = 1024*8
size = -1
read = 0
blocknum = 0
if reporthook:
if "content-length" in headers:
size = int(headers["Content-Length"])
reporthook(blocknum, bs, size)
while 1:
block = fp.read(bs)
if block == "":
break
read += len(block)
tfp.write(block)
blocknum += 1
if reporthook:
reporthook(blocknum, bs, size)
finally:
tfp.close()
finally:
fp.close()
# raise exception if actual size does not match content-length header
if size >= 0 and read < size:
raise ContentTooShortError("retrieval incomplete: got only %i out "
"of %i bytes" % (read, size), result)
return result
# Each method named open_<type> knows how to open that type of URL
def open_http(self, url, data=None):
"""Use HTTP protocol."""
import httplib
user_passwd = None
proxy_passwd= None
if isinstance(url, str):
host, selector = splithost(url)
if host:
user_passwd, host = splituser(host)
host = unquote(host)
realhost = host
else:
host, selector = url
# check whether the proxy contains authorization information
proxy_passwd, host = splituser(host)
# now we proceed with the url we want to obtain
urltype, rest = splittype(selector)
url = rest
user_passwd = None
if urltype.lower() != 'http':
realhost = None
else:
realhost, rest = splithost(rest)
if realhost:
user_passwd, realhost = splituser(realhost)
if user_passwd:
selector = "%s://%s%s" % (urltype, realhost, rest)
if proxy_bypass(realhost):
host = realhost
#print "proxy via http:", host, selector
if not host: raise IOError, ('http error', 'no host given')
if proxy_passwd:
import base64
proxy_auth = base64.b64encode(proxy_passwd).strip()
else:
proxy_auth = None
if user_passwd:
import base64
auth = base64.b64encode(user_passwd).strip()
else:
auth = None
h = httplib.HTTP(host)
if data is not None:
h.putrequest('POST', selector)
h.putheader('Content-Type', 'application/x-www-form-urlencoded')
h.putheader('Content-Length', '%d' % len(data))
else:
h.putrequest('GET', selector)
if proxy_auth: h.putheader('Proxy-Authorization', 'Basic %s' % proxy_auth)
if auth: h.putheader('Authorization', 'Basic %s' % auth)
if realhost: h.putheader('Host', realhost)
for args in self.addheaders: h.putheader(*args)
h.endheaders(data)
errcode, errmsg, headers = h.getreply()
fp = h.getfile()
if errcode == -1:
if fp: fp.close()
# something went wrong with the HTTP status line
raise IOError, ('http protocol error', 0,
'got a bad status line', None)
# According to RFC 2616, "2xx" code indicates that the client's
# request was successfully received, understood, and accepted.
if (200 <= errcode < 300):
return addinfourl(fp, headers, "http:" + url, errcode)
else:
if data is None:
return self.http_error(url, fp, errcode, errmsg, headers)
else:
return self.http_error(url, fp, errcode, errmsg, headers, data)
def http_error(self, url, fp, errcode, errmsg, headers, data=None):
"""Handle http errors.
Derived class can override this, or provide specific handlers
named http_error_DDD where DDD is the 3-digit error code."""
# First check if there's a specific handler for this error
name = 'http_error_%d' % errcode
if hasattr(self, name):
method = getattr(self, name)
if data is None:
result = method(url, fp, errcode, errmsg, headers)
else:
result = method(url, fp, errcode, errmsg, headers, data)
if result: return result
return self.http_error_default(url, fp, errcode, errmsg, headers)
def http_error_default(self, url, fp, errcode, errmsg, headers):
"""Default error handler: close the connection and raise IOError."""
void = fp.read()
fp.close()
raise IOError, ('http error', errcode, errmsg, headers)
if _have_ssl:
def open_https(self, url, data=None):
"""Use HTTPS protocol."""
import httplib
user_passwd = None
proxy_passwd = None
if isinstance(url, str):
host, selector = splithost(url)
if host:
user_passwd, host = splituser(host)
host = unquote(host)
realhost = host
else:
host, selector = url
# here, we determine, whether the proxy contains authorization information
proxy_passwd, host = splituser(host)
urltype, rest = splittype(selector)
url = rest
user_passwd = None
if urltype.lower() != 'https':
realhost = None
else:
realhost, rest = splithost(rest)
if realhost:
user_passwd, realhost = splituser(realhost)
if user_passwd:
selector = "%s://%s%s" % (urltype, realhost, rest)
#print "proxy via https:", host, selector
if not host: raise IOError, ('https error', 'no host given')
if proxy_passwd:
import base64
proxy_auth = base64.b64encode(proxy_passwd).strip()
else:
proxy_auth = None
if user_passwd:
import base64
auth = base64.b64encode(user_passwd).strip()
else:
auth = None
h = httplib.HTTPS(host, 0,
key_file=self.key_file,
cert_file=self.cert_file)
if data is not None:
h.putrequest('POST', selector)
h.putheader('Content-Type',
'application/x-www-form-urlencoded')
h.putheader('Content-Length', '%d' % len(data))
else:
h.putrequest('GET', selector)
if proxy_auth: h.putheader('Proxy-Authorization', 'Basic %s' % proxy_auth)
if auth: h.putheader('Authorization', 'Basic %s' % auth)
if realhost: h.putheader('Host', realhost)
for args in self.addheaders: h.putheader(*args)
h.endheaders(data)
errcode, errmsg, headers = h.getreply()
fp = h.getfile()
if errcode == -1:
if fp: fp.close()
# something went wrong with the HTTP status line
raise IOError, ('http protocol error', 0,
'got a bad status line', None)
# According to RFC 2616, "2xx" code indicates that the client's
# request was successfully received, understood, and accepted.
if (200 <= errcode < 300):
return addinfourl(fp, headers, "https:" + url, errcode)
else:
if data is None:
return self.http_error(url, fp, errcode, errmsg, headers)
else:
return self.http_error(url, fp, errcode, errmsg, headers,
data)
def open_file(self, url):
"""Use local file or FTP depending on form of URL."""
if not isinstance(url, str):
raise IOError, ('file error', 'proxy support for file protocol currently not implemented')
if url[:2] == '//' and url[2:3] != '/' and url[2:12].lower() != 'localhost/':
return self.open_ftp(url)
else:
return self.open_local_file(url)
def open_local_file(self, url):
"""Use local file."""
import mimetypes, mimetools, email.utils
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
host, file = splithost(url)
localname = url2pathname(file)
try:
stats = os.stat(localname)
except OSError, e:
raise IOError(e.errno, e.strerror, e.filename)
size = stats.st_size
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
mtype = mimetypes.guess_type(url)[0]
headers = mimetools.Message(StringIO(
'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' %
(mtype or 'text/plain', size, modified)))
if not host:
urlfile = file
if file[:1] == '/':
urlfile = 'file://' + file
return addinfourl(open(localname, 'rb'),
headers, urlfile)
host, port = splitport(host)
if not port \
and socket.gethostbyname(host) in (localhost(), thishost()):
urlfile = file
if file[:1] == '/':
urlfile = 'file://' + file
return addinfourl(open(localname, 'rb'),
headers, urlfile)
raise IOError, ('local file error', 'not on local host')
def open_ftp(self, url):
"""Use FTP protocol."""
if not isinstance(url, str):
raise IOError, ('ftp error', 'proxy support for ftp protocol currently not implemented')
import mimetypes, mimetools
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
host, path = splithost(url)
if not host: raise IOError, ('ftp error', 'no host given')
host, port = splitport(host)
user, host = splituser(host)
if user: user, passwd = splitpasswd(user)
else: passwd = None
host = unquote(host)
user = user or ''
passwd = passwd or ''
host = socket.gethostbyname(host)
if not port:
import ftplib
port = ftplib.FTP_PORT
else:
port = int(port)
path, attrs = splitattr(path)
path = unquote(path)
dirs = path.split('/')
dirs, file = dirs[:-1], dirs[-1]
if dirs and not dirs[0]: dirs = dirs[1:]
if dirs and not dirs[0]: dirs[0] = '/'
key = user, host, port, '/'.join(dirs)
# XXX thread unsafe!
if len(self.ftpcache) > MAXFTPCACHE:
# Prune the cache, rather arbitrarily
for k in self.ftpcache.keys():
if k != key:
v = self.ftpcache[k]
del self.ftpcache[k]
v.close()
try:
if not key in self.ftpcache:
self.ftpcache[key] = \
ftpwrapper(user, passwd, host, port, dirs)
if not file: type = 'D'
else: type = 'I'
for attr in attrs:
attr, value = splitvalue(attr)
if attr.lower() == 'type' and \
value in ('a', 'A', 'i', 'I', 'd', 'D'):
type = value.upper()
(fp, retrlen) = self.ftpcache[key].retrfile(file, type)
mtype = mimetypes.guess_type("ftp:" + url)[0]
headers = ""
if mtype:
headers += "Content-Type: %s\n" % mtype
if retrlen is not None and retrlen >= 0:
headers += "Content-Length: %d\n" % retrlen
headers = mimetools.Message(StringIO(headers))
return addinfourl(fp, headers, "ftp:" + url)
except ftperrors(), msg:
raise IOError, ('ftp error', msg), sys.exc_info()[2]
def open_data(self, url, data=None):
"""Use "data" URL."""
if not isinstance(url, str):
raise IOError, ('data error', 'proxy support for data protocol currently not implemented')
# ignore POSTed data
#
# syntax of data URLs:
# dataurl := "data:" [ mediatype ] [ ";base64" ] "," data
# mediatype := [ type "/" subtype ] *( ";" parameter )
# data := *urlchar
# parameter := attribute "=" value
import mimetools
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
try:
[type, data] = url.split(',', 1)
except ValueError:
raise IOError, ('data error', 'bad data URL')
if not type:
type = 'text/plain;charset=US-ASCII'
semi = type.rfind(';')
if semi >= 0 and '=' not in type[semi:]:
encoding = type[semi+1:]
type = type[:semi]
else:
encoding = ''
msg = []
msg.append('Date: %s'%time.strftime('%a, %d %b %Y %H:%M:%S GMT',
time.gmtime(time.time())))
msg.append('Content-type: %s' % type)
if encoding == 'base64':
import base64
data = base64.decodestring(data)
else:
data = unquote(data)
msg.append('Content-Length: %d' % len(data))
msg.append('')
msg.append(data)
msg = '\n'.join(msg)
f = StringIO(msg)
headers = mimetools.Message(f, 0)
#f.fileno = None # needed for addinfourl
return addinfourl(f, headers, url)
class FancyURLopener(URLopener):
"""Derived class with handlers for errors we can handle (perhaps)."""
def __init__(self, *args, **kwargs):
URLopener.__init__(self, *args, **kwargs)
self.auth_cache = {}
self.tries = 0
self.maxtries = 10
def http_error_default(self, url, fp, errcode, errmsg, headers):
"""Default error handling -- don't raise an exception."""
return addinfourl(fp, headers, "http:" + url, errcode)
def http_error_302(self, url, fp, errcode, errmsg, headers, data=None):
"""Error 302 -- relocated (temporarily)."""
self.tries += 1
if self.maxtries and self.tries >= self.maxtries:
if hasattr(self, "http_error_500"):
meth = self.http_error_500
else:
meth = self.http_error_default
self.tries = 0
return meth(url, fp, 500,
"Internal Server Error: Redirect Recursion", headers)
result = self.redirect_internal(url, fp, errcode, errmsg, headers,
data)
self.tries = 0
return result
def redirect_internal(self, url, fp, errcode, errmsg, headers, data):
if 'location' in headers:
newurl = headers['location']
elif 'uri' in headers:
newurl = headers['uri']
else:
return
void = fp.read()
fp.close()
# In case the server sent a relative URL, join with original:
newurl = basejoin(self.type + ":" + url, newurl)
# For security reasons we do not allow redirects to protocols
# other than HTTP, HTTPS or FTP.
newurl_lower = newurl.lower()
if not (newurl_lower.startswith('http://') or
newurl_lower.startswith('https://') or
newurl_lower.startswith('ftp://')):
raise IOError('redirect error', errcode,
errmsg + " - Redirection to url '%s' is not allowed" %
newurl,
headers)
return self.open(newurl)
def http_error_301(self, url, fp, errcode, errmsg, headers, data=None):
"""Error 301 -- also relocated (permanently)."""
return self.http_error_302(url, fp, errcode, errmsg, headers, data)
def http_error_303(self, url, fp, errcode, errmsg, headers, data=None):
"""Error 303 -- also relocated (essentially identical to 302)."""
return self.http_error_302(url, fp, errcode, errmsg, headers, data)
def http_error_307(self, url, fp, errcode, errmsg, headers, data=None):
"""Error 307 -- relocated, but turn POST into error."""
if data is None:
return self.http_error_302(url, fp, errcode, errmsg, headers, data)
else:
return self.http_error_default(url, fp, errcode, errmsg, headers)
def http_error_401(self, url, fp, errcode, errmsg, headers, data=None):
"""Error 401 -- authentication required.
This function supports Basic authentication only."""
if not 'www-authenticate' in headers:
URLopener.http_error_default(self, url, fp,
errcode, errmsg, headers)
stuff = headers['www-authenticate']
import re
match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff)
if not match:
URLopener.http_error_default(self, url, fp,
errcode, errmsg, headers)
scheme, realm = match.groups()
if scheme.lower() != 'basic':
URLopener.http_error_default(self, url, fp,
errcode, errmsg, headers)
name = 'retry_' + self.type + '_basic_auth'
if data is None:
return getattr(self,name)(url, realm)
else:
return getattr(self,name)(url, realm, data)
def http_error_407(self, url, fp, errcode, errmsg, headers, data=None):
"""Error 407 -- proxy authentication required.
This function supports Basic authentication only."""
if not 'proxy-authenticate' in headers:
URLopener.http_error_default(self, url, fp,
errcode, errmsg, headers)
stuff = headers['proxy-authenticate']
import re
match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff)
if not match:
URLopener.http_error_default(self, url, fp,
errcode, errmsg, headers)
scheme, realm = match.groups()
if scheme.lower() != 'basic':
URLopener.http_error_default(self, url, fp,
errcode, errmsg, headers)
name = 'retry_proxy_' + self.type + '_basic_auth'
if data is None:
return getattr(self,name)(url, realm)
else:
return getattr(self,name)(url, realm, data)
def retry_proxy_http_basic_auth(self, url, realm, data=None):
host, selector = splithost(url)
newurl = 'http://' + host + selector
proxy = self.proxies['http']
urltype, proxyhost = splittype(proxy)
proxyhost, proxyselector = splithost(proxyhost)
i = proxyhost.find('@') + 1
proxyhost = proxyhost[i:]
user, passwd = self.get_user_passwd(proxyhost, realm, i)
if not (user or passwd): return None
proxyhost = quote(user, safe='') + ':' + quote(passwd, safe='') + '@' + proxyhost
self.proxies['http'] = 'http://' + proxyhost + proxyselector
if data is None:
return self.open(newurl)
else:
return self.open(newurl, data)
def retry_proxy_https_basic_auth(self, url, realm, data=None):
host, selector = splithost(url)
newurl = 'https://' + host + selector
proxy = self.proxies['https']
urltype, proxyhost = splittype(proxy)
proxyhost, proxyselector = splithost(proxyhost)
i = proxyhost.find('@') + 1
proxyhost = proxyhost[i:]
user, passwd = self.get_user_passwd(proxyhost, realm, i)
if not (user or passwd): return None
proxyhost = quote(user, safe='') + ':' + quote(passwd, safe='') + '@' + proxyhost
self.proxies['https'] = 'https://' + proxyhost + proxyselector
if data is None:
return self.open(newurl)
else:
return self.open(newurl, data)
def retry_http_basic_auth(self, url, realm, data=None):
host, selector = splithost(url)
i = host.find('@') + 1
host = host[i:]
user, passwd = self.get_user_passwd(host, realm, i)
if not (user or passwd): return None
host = quote(user, safe='') + ':' + quote(passwd, safe='') + '@' + host
newurl = 'http://' + host + selector
if data is None:
return self.open(newurl)
else:
return self.open(newurl, data)
def retry_https_basic_auth(self, url, realm, data=None):
host, selector = splithost(url)
i = host.find('@') + 1
host = host[i:]
user, passwd = self.get_user_passwd(host, realm, i)
if not (user or passwd): return None
host = quote(user, safe='') + ':' + quote(passwd, safe='') + '@' + host
newurl = 'https://' + host + selector
if data is None:
return self.open(newurl)
else:
return self.open(newurl, data)
def get_user_passwd(self, host, realm, clear_cache=0):
key = realm + '@' + host.lower()
if key in self.auth_cache:
if clear_cache:
del self.auth_cache[key]
else:
return self.auth_cache[key]
user, passwd = self.prompt_user_passwd(host, realm)
if user or passwd: self.auth_cache[key] = (user, passwd)
return user, passwd
def prompt_user_passwd(self, host, realm):
"""Override this in a GUI environment!"""
import getpass
try:
user = raw_input("Enter username for %s at %s: " % (realm,
host))
passwd = getpass.getpass("Enter password for %s in %s at %s: " %
(user, realm, host))
return user, passwd
except KeyboardInterrupt:
print
return None, None
# Utility functions
_localhost = None
def localhost():
"""Return the IP address of the magic hostname 'localhost'."""
global _localhost
if _localhost is None:
_localhost = socket.gethostbyname('localhost')
return _localhost
_thishost = None
def thishost():
"""Return the IP address of the current host."""
global _thishost
if _thishost is None:
_thishost = socket.gethostbyname(socket.gethostname())
return _thishost
_ftperrors = None
def ftperrors():
"""Return the set of errors raised by the FTP class."""
global _ftperrors
if _ftperrors is None:
import ftplib
_ftperrors = ftplib.all_errors
return _ftperrors
_noheaders = None
def noheaders():
"""Return an empty mimetools.Message object."""
global _noheaders
if _noheaders is None:
import mimetools
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
_noheaders = mimetools.Message(StringIO(), 0)
_noheaders.fp.close() # Recycle file descriptor
return _noheaders
# Utility classes
class ftpwrapper:
"""Class used by open_ftp() for cache of open FTP connections."""
def __init__(self, user, passwd, host, port, dirs,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
self.user = user
self.passwd = passwd
self.host = host
self.port = port
self.dirs = dirs
self.timeout = timeout
self.init()
def init(self):
import ftplib
self.busy = 0
self.ftp = ftplib.FTP()
self.ftp.connect(self.host, self.port, self.timeout)
self.ftp.login(self.user, self.passwd)
for dir in self.dirs:
self.ftp.cwd(dir)
def retrfile(self, file, type):
import ftplib
self.endtransfer()
if type in ('d', 'D'): cmd = 'TYPE A'; isdir = 1
else: cmd = 'TYPE ' + type; isdir = 0
try:
self.ftp.voidcmd(cmd)
except ftplib.all_errors:
self.init()
self.ftp.voidcmd(cmd)
conn = None
if file and not isdir:
# Try to retrieve as a file
try:
cmd = 'RETR ' + file
conn = self.ftp.ntransfercmd(cmd)
except ftplib.error_perm, reason:
if str(reason)[:3] != '550':
raise IOError, ('ftp error', reason), sys.exc_info()[2]
if not conn:
# Set transfer mode to ASCII!
self.ftp.voidcmd('TYPE A')
# Try a directory listing. Verify that directory exists.
if file:
pwd = self.ftp.pwd()
try:
try:
self.ftp.cwd(file)
except ftplib.error_perm, reason:
raise IOError, ('ftp error', reason), sys.exc_info()[2]
finally:
self.ftp.cwd(pwd)
cmd = 'LIST ' + file
else:
cmd = 'LIST'
conn = self.ftp.ntransfercmd(cmd)
self.busy = 1
# Pass back both a suitably decorated object and a retrieval length
return (addclosehook(conn[0].makefile('rb'),
self.endtransfer), conn[1])
def endtransfer(self):
if not self.busy:
return
self.busy = 0
try:
self.ftp.voidresp()
except ftperrors():
pass
def close(self):
self.endtransfer()
try:
self.ftp.close()
except ftperrors():
pass
class addbase:
"""Base class for addinfo and addclosehook."""
def __init__(self, fp):
self.fp = fp
self.read = self.fp.read
self.readline = self.fp.readline
if hasattr(self.fp, "readlines"): self.readlines = self.fp.readlines
if hasattr(self.fp, "fileno"):
self.fileno = self.fp.fileno
else:
self.fileno = lambda: None
if hasattr(self.fp, "__iter__"):
self.__iter__ = self.fp.__iter__
if hasattr(self.fp, "next"):
self.next = self.fp.next
def __repr__(self):
return '<%s at %r whose fp = %r>' % (self.__class__.__name__,
id(self), self.fp)
def close(self):
self.read = None
self.readline = None
self.readlines = None
self.fileno = None
if self.fp: self.fp.close()
self.fp = None
class addclosehook(addbase):
"""Class to add a close hook to an open file."""
def __init__(self, fp, closehook, *hookargs):
addbase.__init__(self, fp)
self.closehook = closehook
self.hookargs = hookargs
def close(self):
addbase.close(self)
if self.closehook:
self.closehook(*self.hookargs)
self.closehook = None
self.hookargs = None
class addinfo(addbase):
"""class to add an info() method to an open file."""
def __init__(self, fp, headers):
addbase.__init__(self, fp)
self.headers = headers
def info(self):
return self.headers
class addinfourl(addbase):
"""class to add info() and geturl() methods to an open file."""
def __init__(self, fp, headers, url, code=None):
addbase.__init__(self, fp)
self.headers = headers
self.url = url
self.code = code
def info(self):
return self.headers
def getcode(self):
return self.code
def geturl(self):
return self.url
# Utilities to parse URLs (most of these return None for missing parts):
# unwrap('<URL:type://host/path>') --> 'type://host/path'
# splittype('type:opaquestring') --> 'type', 'opaquestring'
# splithost('//host[:port]/path') --> 'host[:port]', '/path'
# splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'
# splitpasswd('user:passwd') -> 'user', 'passwd'
# splitport('host:port') --> 'host', 'port'
# splitquery('/path?query') --> '/path', 'query'
# splittag('/path#tag') --> '/path', 'tag'
# splitattr('/path;attr1=value1;attr2=value2;...') ->
# '/path', ['attr1=value1', 'attr2=value2', ...]
# splitvalue('attr=value') --> 'attr', 'value'
# unquote('abc%20def') -> 'abc def'
# quote('abc def') -> 'abc%20def')
try:
unicode
except NameError:
def _is_unicode(x):
return 0
else:
def _is_unicode(x):
return isinstance(x, unicode)
def toBytes(url):
"""toBytes(u"URL") --> 'URL'."""
# Most URL schemes require ASCII. If that changes, the conversion
# can be relaxed
if _is_unicode(url):
try:
url = url.encode("ASCII")
except UnicodeError:
raise UnicodeError("URL " + repr(url) +
" contains non-ASCII characters")
return url
def unwrap(url):
"""unwrap('<URL:type://host/path>') --> 'type://host/path'."""
url = url.strip()
if url[:1] == '<' and url[-1:] == '>':
url = url[1:-1].strip()
if url[:4] == 'URL:': url = url[4:].strip()
return url
_typeprog = None
def splittype(url):
"""splittype('type:opaquestring') --> 'type', 'opaquestring'."""
global _typeprog
if _typeprog is None:
import re
_typeprog = re.compile('^([^/:]+):')
match = _typeprog.match(url)
if match:
scheme = match.group(1)
return scheme.lower(), url[len(scheme) + 1:]
return None, url
_hostprog = None
def splithost(url):
"""splithost('//host[:port]/path') --> 'host[:port]', '/path'."""
global _hostprog
if _hostprog is None:
import re
_hostprog = re.compile('^//([^/?]*)(.*)$')
match = _hostprog.match(url)
if match:
host_port = match.group(1)
path = match.group(2)
if path and not path.startswith('/'):
path = '/' + path
return host_port, path
return None, url
_userprog = None
def splituser(host):
"""splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'."""
global _userprog
if _userprog is None:
import re
_userprog = re.compile('^(.*)@(.*)$')
match = _userprog.match(host)
if match: return match.group(1, 2)
return None, host
_passwdprog = None
def splitpasswd(user):
"""splitpasswd('user:passwd') -> 'user', 'passwd'."""
global _passwdprog
if _passwdprog is None:
import re
_passwdprog = re.compile('^([^:]*):(.*)$',re.S)
match = _passwdprog.match(user)
if match: return match.group(1, 2)
return user, None
# splittag('/path#tag') --> '/path', 'tag'
_portprog = None
def splitport(host):
"""splitport('host:port') --> 'host', 'port'."""
global _portprog
if _portprog is None:
import re
_portprog = re.compile('^(.*):([0-9]+)$')
match = _portprog.match(host)
if match: return match.group(1, 2)
return host, None
_nportprog = None
def splitnport(host, defport=-1):
"""Split host and port, returning numeric port.
Return given default port if no ':' found; defaults to -1.
Return numerical port if a valid number are found after ':'.
Return None if ':' but not a valid number."""
global _nportprog
if _nportprog is None:
import re
_nportprog = re.compile('^(.*):(.*)$')
match = _nportprog.match(host)
if match:
host, port = match.group(1, 2)
try:
if not port: raise ValueError, "no digits"
nport = int(port)
except ValueError:
nport = None
return host, nport
return host, defport
_queryprog = None
def splitquery(url):
"""splitquery('/path?query') --> '/path', 'query'."""
global _queryprog
if _queryprog is None:
import re
_queryprog = re.compile('^(.*)\?([^?]*)$')
match = _queryprog.match(url)
if match: return match.group(1, 2)
return url, None
_tagprog = None
def splittag(url):
"""splittag('/path#tag') --> '/path', 'tag'."""
global _tagprog
if _tagprog is None:
import re
_tagprog = re.compile('^(.*)#([^#]*)$')
match = _tagprog.match(url)
if match: return match.group(1, 2)
return url, None
def splitattr(url):
"""splitattr('/path;attr1=value1;attr2=value2;...') ->
'/path', ['attr1=value1', 'attr2=value2', ...]."""
words = url.split(';')
return words[0], words[1:]
_valueprog = None
def splitvalue(attr):
"""splitvalue('attr=value') --> 'attr', 'value'."""
global _valueprog
if _valueprog is None:
import re
_valueprog = re.compile('^([^=]*)=(.*)$')
match = _valueprog.match(attr)
if match: return match.group(1, 2)
return attr, None
# urlparse contains a duplicate of this method to avoid a circular import. If
# you update this method, also update the copy in urlparse. This code
# duplication does not exist in Python3.
_hexdig = '0123456789ABCDEFabcdef'
_hextochr = dict((a + b, chr(int(a + b, 16)))
for a in _hexdig for b in _hexdig)
def unquote(s):
"""unquote('abc%20def') -> 'abc def'."""
res = s.split('%')
# fastpath
if len(res) == 1:
return s
s = res[0]
for item in res[1:]:
try:
s += _hextochr[item[:2]] + item[2:]
except KeyError:
s += '%' + item
except UnicodeDecodeError:
s += unichr(int(item[:2], 16)) + item[2:]
return s
def unquote_plus(s):
"""unquote('%7e/abc+def') -> '~/abc def'"""
s = s.replace('+', ' ')
return unquote(s)
always_safe = ('ABCDEFGHIJKLMNOPQRSTUVWXYZ'
'abcdefghijklmnopqrstuvwxyz'
'0123456789' '_.-')
_safe_map = {}
for i, c in zip(xrange(256), str(bytearray(xrange(256)))):
_safe_map[c] = c if (i < 128 and c in always_safe) else '%{:02X}'.format(i)
_safe_quoters = {}
def quote(s, safe='/'):
"""quote('abc def') -> 'abc%20def'
Each part of a URL, e.g. the path info, the query, etc., has a
different set of reserved characters that must be quoted.
RFC 2396 Uniform Resource Identifiers (URI): Generic Syntax lists
the following reserved characters.
reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" |
"$" | ","
Each of these characters is reserved in some component of a URL,
but not necessarily in all of them.
By default, the quote function is intended for quoting the path
section of a URL. Thus, it will not encode '/'. This character
is reserved, but in typical usage the quote function is being
called on a path where the existing slash characters are used as
reserved characters.
"""
# fastpath
if not s:
if s is None:
raise TypeError('None object cannot be quoted')
return s
cachekey = (safe, always_safe)
try:
(quoter, safe) = _safe_quoters[cachekey]
except KeyError:
safe_map = _safe_map.copy()
safe_map.update([(c, c) for c in safe])
quoter = safe_map.__getitem__
safe = always_safe + safe
_safe_quoters[cachekey] = (quoter, safe)
if not s.rstrip(safe):
return s
return ''.join(map(quoter, s))
def quote_plus(s, safe=''):
"""Quote the query fragment of a URL; replacing ' ' with '+'"""
if ' ' in s:
s = quote(s, safe + ' ')
return s.replace(' ', '+')
return quote(s, safe)
def urlencode(query, doseq=0):
"""Encode a sequence of two-element tuples or dictionary into a URL query string.
If any values in the query arg are sequences and doseq is true, each
sequence element is converted to a separate parameter.
If the query arg is a sequence of two-element tuples, the order of the
parameters in the output will match the order of parameters in the
input.
"""
if hasattr(query,"items"):
# mapping objects
query = query.items()
else:
# it's a bother at times that strings and string-like objects are
# sequences...
try:
# non-sequence items should not work with len()
# non-empty strings will fail this
if len(query) and not isinstance(query[0], tuple):
raise TypeError
# zero-length sequences of all types will get here and succeed,
# but that's a minor nit - since the original implementation
# allowed empty dicts that type of behavior probably should be
# preserved for consistency
except TypeError:
ty,va,tb = sys.exc_info()
raise TypeError, "not a valid non-string sequence or mapping object", tb
l = []
if not doseq:
# preserve old behavior
for k, v in query:
k = quote_plus(str(k))
v = quote_plus(str(v))
l.append(k + '=' + v)
else:
for k, v in query:
k = quote_plus(str(k))
if isinstance(v, str):
v = quote_plus(v)
l.append(k + '=' + v)
elif _is_unicode(v):
# is there a reasonable way to convert to ASCII?
# encode generates a string, but "replace" or "ignore"
# lose information and "strict" can raise UnicodeError
v = quote_plus(v.encode("ASCII","replace"))
l.append(k + '=' + v)
else:
try:
# is this a sufficient test for sequence-ness?
len(v)
except TypeError:
# not a sequence
v = quote_plus(str(v))
l.append(k + '=' + v)
else:
# loop over the sequence
for elt in v:
l.append(k + '=' + quote_plus(str(elt)))
return '&'.join(l)
# Proxy handling
def getproxies_environment():
"""Return a dictionary of scheme -> proxy server URL mappings.
Scan the environment for variables named <scheme>_proxy;
this seems to be the standard convention. If you need a
different way, you can pass a proxies dictionary to the
[Fancy]URLopener constructor.
"""
proxies = {}
for name, value in os.environ.items():
name = name.lower()
if value and name[-6:] == '_proxy':
proxies[name[:-6]] = value
return proxies
def proxy_bypass_environment(host):
"""Test if proxies should not be used for a particular host.
Checks the environment for a variable named no_proxy, which should
be a list of DNS suffixes separated by commas, or '*' for all hosts.
"""
no_proxy = os.environ.get('no_proxy', '') or os.environ.get('NO_PROXY', '')
# '*' is special case for always bypass
if no_proxy == '*':
return 1
# strip port off host
hostonly, port = splitport(host)
# check if the host ends with any of the DNS suffixes
for name in no_proxy.split(','):
if name and (hostonly.endswith(name) or host.endswith(name)):
return 1
# otherwise, don't bypass
return 0
if sys.platform == 'darwin':
from _scproxy import _get_proxy_settings, _get_proxies
def proxy_bypass_macosx_sysconf(host):
"""
Return True iff this host shouldn't be accessed using a proxy
This function uses the MacOSX framework SystemConfiguration
to fetch the proxy information.
"""
import re
import socket
from fnmatch import fnmatch
hostonly, port = splitport(host)
def ip2num(ipAddr):
parts = ipAddr.split('.')
parts = map(int, parts)
if len(parts) != 4:
parts = (parts + [0, 0, 0, 0])[:4]
return (parts[0] << 24) | (parts[1] << 16) | (parts[2] << 8) | parts[3]
proxy_settings = _get_proxy_settings()
# Check for simple host names:
if '.' not in host:
if proxy_settings['exclude_simple']:
return True
hostIP = None
for value in proxy_settings.get('exceptions', ()):
# Items in the list are strings like these: *.local, 169.254/16
if not value: continue
m = re.match(r"(\d+(?:\.\d+)*)(/\d+)?", value)
if m is not None:
if hostIP is None:
try:
hostIP = socket.gethostbyname(hostonly)
hostIP = ip2num(hostIP)
except socket.error:
continue
base = ip2num(m.group(1))
mask = m.group(2)
if mask is None:
mask = 8 * (m.group(1).count('.') + 1)
else:
mask = int(mask[1:])
mask = 32 - mask
if (hostIP >> mask) == (base >> mask):
return True
elif fnmatch(host, value):
return True
return False
def getproxies_macosx_sysconf():
"""Return a dictionary of scheme -> proxy server URL mappings.
This function uses the MacOSX framework SystemConfiguration
to fetch the proxy information.
"""
return _get_proxies()
def proxy_bypass(host):
if getproxies_environment():
return proxy_bypass_environment(host)
else:
return proxy_bypass_macosx_sysconf(host)
def getproxies():
return getproxies_environment() or getproxies_macosx_sysconf()
elif os.name == 'nt':
def getproxies_registry():
"""Return a dictionary of scheme -> proxy server URL mappings.
Win32 uses the registry to store proxies.
"""
proxies = {}
try:
import _winreg
except ImportError:
# Std module, so should be around - but you never know!
return proxies
try:
internetSettings = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER,
r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
proxyEnable = _winreg.QueryValueEx(internetSettings,
'ProxyEnable')[0]
if proxyEnable:
# Returned as Unicode but problems if not converted to ASCII
proxyServer = str(_winreg.QueryValueEx(internetSettings,
'ProxyServer')[0])
if '=' in proxyServer:
# Per-protocol settings
for p in proxyServer.split(';'):
protocol, address = p.split('=', 1)
# See if address has a type:// prefix
import re
if not re.match('^([^/:]+)://', address):
address = '%s://%s' % (protocol, address)
proxies[protocol] = address
else:
# Use one setting for all protocols
if proxyServer[:5] == 'http:':
proxies['http'] = proxyServer
else:
proxies['http'] = 'http://%s' % proxyServer
proxies['https'] = 'https://%s' % proxyServer
proxies['ftp'] = 'ftp://%s' % proxyServer
internetSettings.Close()
except (WindowsError, ValueError, TypeError):
# Either registry key not found etc, or the value in an
# unexpected format.
# proxies already set up to be empty so nothing to do
pass
return proxies
def getproxies():
"""Return a dictionary of scheme -> proxy server URL mappings.
Returns settings gathered from the environment, if specified,
or the registry.
"""
return getproxies_environment() or getproxies_registry()
def proxy_bypass_registry(host):
try:
import _winreg
import re
except ImportError:
# Std modules, so should be around - but you never know!
return 0
try:
internetSettings = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER,
r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
proxyEnable = _winreg.QueryValueEx(internetSettings,
'ProxyEnable')[0]
proxyOverride = str(_winreg.QueryValueEx(internetSettings,
'ProxyOverride')[0])
# ^^^^ Returned as Unicode but problems if not converted to ASCII
except WindowsError:
return 0
if not proxyEnable or not proxyOverride:
return 0
# try to make a host list from name and IP address.
rawHost, port = splitport(host)
host = [rawHost]
try:
addr = socket.gethostbyname(rawHost)
if addr != rawHost:
host.append(addr)
except socket.error:
pass
try:
fqdn = socket.getfqdn(rawHost)
if fqdn != rawHost:
host.append(fqdn)
except socket.error:
pass
# make a check value list from the registry entry: replace the
# '<local>' string by the localhost entry and the corresponding
# canonical entry.
proxyOverride = proxyOverride.split(';')
# now check if we match one of the registry values.
for test in proxyOverride:
if test == '<local>':
if '.' not in rawHost:
return 1
test = test.replace(".", r"\.") # mask dots
test = test.replace("*", r".*") # change glob sequence
test = test.replace("?", r".") # change glob char
for val in host:
# print "%s <--> %s" %( test, val )
if re.match(test, val, re.I):
return 1
return 0
def proxy_bypass(host):
"""Return a dictionary of scheme -> proxy server URL mappings.
Returns settings gathered from the environment, if specified,
or the registry.
"""
if getproxies_environment():
return proxy_bypass_environment(host)
else:
return proxy_bypass_registry(host)
else:
# By default use environment variables
getproxies = getproxies_environment
proxy_bypass = proxy_bypass_environment
# Test and time quote() and unquote()
def test1():
s = ''
for i in range(256): s = s + chr(i)
s = s*4
t0 = time.time()
qs = quote(s)
uqs = unquote(qs)
t1 = time.time()
if uqs != s:
print 'Wrong!'
print repr(s)
print repr(qs)
print repr(uqs)
print round(t1 - t0, 3), 'sec'
def reporthook(blocknum, blocksize, totalsize):
# Report during remote transfers
print "Block number: %d, Block size: %d, Total size: %d" % (
blocknum, blocksize, totalsize)
# Test program
def test(args=[]):
if not args:
args = [
'/etc/passwd',
'file:/etc/passwd',
'file://localhost/etc/passwd',
'ftp://ftp.gnu.org/pub/README',
'http://www.python.org/index.html',
]
if hasattr(URLopener, "open_https"):
args.append('https://synergy.as.cmu.edu/~geek/')
try:
for url in args:
print '-'*10, url, '-'*10
fn, h = urlretrieve(url, None, reporthook)
print fn
if h:
print '======'
for k in h.keys(): print k + ':', h[k]
print '======'
with open(fn, 'rb') as fp:
data = fp.read()
if '\r' in data:
table = string.maketrans("", "")
data = data.translate(table, "\r")
print data
fn, h = None, None
print '-'*40
finally:
urlcleanup()
def main():
import getopt, sys
try:
opts, args = getopt.getopt(sys.argv[1:], "th")
except getopt.error, msg:
print msg
print "Use -h for help"
return
t = 0
for o, a in opts:
if o == '-t':
t = t + 1
if o == '-h':
print "Usage: python urllib.py [-t] [url ...]"
print "-t runs self-test;",
print "otherwise, contents of urls are printed"
return
if t:
if t > 1:
test1()
test(args)
else:
if not args:
print "Use -h for help"
for url in args:
print urlopen(url).read(),
# Run test program when run as a script
if __name__ == '__main__':
main()
|
metashell/metashell | refs/heads/master | 3rd/templight/clang/utils/CIndex/completion_logger_server.py | 35 | #!/usr/bin/env python
from __future__ import absolute_import, division, print_function
import sys
from socket import *
from time import strftime
import datetime
def main():
if len(sys.argv) < 4:
print("completion_logger_server.py <listen address> <listen port> <log file>")
exit(1)
host = sys.argv[1]
port = int(sys.argv[2])
buf = 1024 * 8
addr = (host,port)
# Create socket and bind to address
UDPSock = socket(AF_INET,SOCK_DGRAM)
UDPSock.bind(addr)
print("Listing on {0}:{1} and logging to '{2}'".format(host, port, sys.argv[3]))
# Open the logging file.
f = open(sys.argv[3], "a")
# Receive messages
while 1:
data,addr = UDPSock.recvfrom(buf)
if not data:
break
else:
f.write("{ ");
f.write("\"time\": \"{0}\"".format(datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')))
f.write(", \"sender\": \"{0}\" ".format(addr[0]))
f.write(", \"data\": ")
f.write(data)
f.write(" }\n")
f.flush()
# Close socket
UDPSock.close()
if __name__ == '__main__':
main()
|
kejbaly2/invoke | refs/heads/master | tests/_support/configs/runtime.py | 12 | from spec import eq_
from invoke import ctask
@ctask
def mytask(c):
eq_(c.hooray, 'yaml')
|
geoffret/litmus-rt | refs/heads/master | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/EventClass.py | 4653 | # EventClass.py
#
# This is a library defining some events types classes, which could
# be used by other scripts to analyzing the perf samples.
#
# Currently there are just a few classes defined for examples,
# PerfEvent is the base class for all perf event sample, PebsEvent
# is a HW base Intel x86 PEBS event, and user could add more SW/HW
# event classes based on requirements.
import struct
# Event types, user could add more here
EVTYPE_GENERIC = 0
EVTYPE_PEBS = 1 # Basic PEBS event
EVTYPE_PEBS_LL = 2 # PEBS event with load latency info
EVTYPE_IBS = 3
#
# Currently we don't have good way to tell the event type, but by
# the size of raw buffer, raw PEBS event with load latency data's
# size is 176 bytes, while the pure PEBS event's size is 144 bytes.
#
def create_event(name, comm, dso, symbol, raw_buf):
if (len(raw_buf) == 144):
event = PebsEvent(name, comm, dso, symbol, raw_buf)
elif (len(raw_buf) == 176):
event = PebsNHM(name, comm, dso, symbol, raw_buf)
else:
event = PerfEvent(name, comm, dso, symbol, raw_buf)
return event
class PerfEvent(object):
event_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_GENERIC):
self.name = name
self.comm = comm
self.dso = dso
self.symbol = symbol
self.raw_buf = raw_buf
self.ev_type = ev_type
PerfEvent.event_num += 1
def show(self):
print "PMU event: name=%12s, symbol=%24s, comm=%8s, dso=%12s" % (self.name, self.symbol, self.comm, self.dso)
#
# Basic Intel PEBS (Precise Event-based Sampling) event, whose raw buffer
# contains the context info when that event happened: the EFLAGS and
# linear IP info, as well as all the registers.
#
class PebsEvent(PerfEvent):
pebs_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS):
tmp_buf=raw_buf[0:80]
flags, ip, ax, bx, cx, dx, si, di, bp, sp = struct.unpack('QQQQQQQQQQ', tmp_buf)
self.flags = flags
self.ip = ip
self.ax = ax
self.bx = bx
self.cx = cx
self.dx = dx
self.si = si
self.di = di
self.bp = bp
self.sp = sp
PerfEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type)
PebsEvent.pebs_num += 1
del tmp_buf
#
# Intel Nehalem and Westmere support PEBS plus Load Latency info which lie
# in the four 64 bit words write after the PEBS data:
# Status: records the IA32_PERF_GLOBAL_STATUS register value
# DLA: Data Linear Address (EIP)
# DSE: Data Source Encoding, where the latency happens, hit or miss
# in L1/L2/L3 or IO operations
# LAT: the actual latency in cycles
#
class PebsNHM(PebsEvent):
pebs_nhm_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS_LL):
tmp_buf=raw_buf[144:176]
status, dla, dse, lat = struct.unpack('QQQQ', tmp_buf)
self.status = status
self.dla = dla
self.dse = dse
self.lat = lat
PebsEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type)
PebsNHM.pebs_nhm_num += 1
del tmp_buf
|
the-c0d3r/jcu-api | refs/heads/master | lib/website.py | 1 | import urllib
import re
# import requests
from classes import Classes
class Website:
def __init__(self):
self.url = "http://afm.jcu.edu.sg/JCU/InfoDisplay/DailyCourseInformation.aspx"
self.page = self.getPage()
self.classes = []
self.parse()
def getPage(self):
try:
# headers = {'User Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.133 Safari/537.36'}
# r = requests.get(self.url, headers=headers)
# return r.read()
return urllib.urlopen(self.url).readlines()
except IOError:
print("Unable to connect to JCU")
exit()
def parse(self):
pattern = re.compile(r'<td class="BTsubj">(.+)</td><td class="BTclass">(.+)</td><td class="BTtime">(.+)</td><td class="BTroom">(.+)</td></tr>')
# be wary of using regular expression to parse html
# refer to this link -> https://stackoverflow.com/a/1732454/1509809
# but hey it works right now, i don't want to waste time experimenting tags and children with beautifulsoup
for line in self.page:
if len(pattern.findall(line)) != 0:
rawdata = pattern.findall(line)[0]
clsname = rawdata[0]
clstype = rawdata[1]
clstime = [rawdata[2][:5], rawdata[2][8:]] # a list with starting and ending time
clsroom = rawdata[3]
tempcls = Classes(clsname, clstype, clstime, clsroom)
self.classes.append(tempcls)
def getClassInfo(self, subjCode):
"""
Filter the class using subject code and return class object
"""
for cls in self.classes:
if subjCode in cls.name:
return cls
return None
def getClasses(self, codes):
"""
Get all the classes from the provided code list
"""
result = []
for subjCode in codes:
temp = self.getClassInfo(subjCode.upper())
if temp:
result.append(temp)
return result
def getRoomInfo(self, roomNumber):
"""
returns a list of classes held at the roomNumber)
"""
result = []
for cls in self.classes:
if cls.room.lower() == roomNumber.lower():
result.append(cls)
return result
|
2014c2g19/2014c2g19 | refs/heads/master | exts/wsgi/static/Brython2.1.0-20140419-113919/Lib/importlib/machinery.py | 106 | """The machinery of importlib: finders, loaders, hooks, etc."""
import _imp
from ._bootstrap import (SOURCE_SUFFIXES, DEBUG_BYTECODE_SUFFIXES,
OPTIMIZED_BYTECODE_SUFFIXES, BYTECODE_SUFFIXES,
EXTENSION_SUFFIXES)
from ._bootstrap import BuiltinImporter
from ._bootstrap import FrozenImporter
from ._bootstrap import WindowsRegistryFinder
from ._bootstrap import PathFinder
from ._bootstrap import FileFinder
from ._bootstrap import SourceFileLoader
from ._bootstrap import SourcelessFileLoader
from ._bootstrap import ExtensionFileLoader
def all_suffixes():
"""Returns a list of all recognized module suffixes for this process"""
return SOURCE_SUFFIXES + BYTECODE_SUFFIXES + EXTENSION_SUFFIXES
|
gcodetogit/depot_tools | refs/heads/master | third_party/pylint/config.py | 67 | # Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""utilities for Pylint configuration :
* pylintrc
* pylint.d (PYLINTHOME)
"""
from __future__ import with_statement
from __future__ import print_function
import pickle
import os
import sys
from os.path import exists, isfile, join, expanduser, abspath, dirname
# pylint home is used to save old runs results ################################
USER_HOME = expanduser('~')
if 'PYLINTHOME' in os.environ:
PYLINT_HOME = os.environ['PYLINTHOME']
if USER_HOME == '~':
USER_HOME = dirname(PYLINT_HOME)
elif USER_HOME == '~':
PYLINT_HOME = ".pylint.d"
else:
PYLINT_HOME = join(USER_HOME, '.pylint.d')
def get_pdata_path(base_name, recurs):
"""return the path of the file which should contain old search data for the
given base_name with the given options values
"""
base_name = base_name.replace(os.sep, '_')
return join(PYLINT_HOME, "%s%s%s"%(base_name, recurs, '.stats'))
def load_results(base):
"""try to unpickle and return data from file if it exists and is not
corrupted
return an empty dictionary if it doesn't exists
"""
data_file = get_pdata_path(base, 1)
try:
with open(data_file, _PICK_LOAD) as stream:
return pickle.load(stream)
except Exception: # pylint: disable=broad-except
return {}
if sys.version_info < (3, 0):
_PICK_DUMP, _PICK_LOAD = 'w', 'r'
else:
_PICK_DUMP, _PICK_LOAD = 'wb', 'rb'
def save_results(results, base):
"""pickle results"""
if not exists(PYLINT_HOME):
try:
os.mkdir(PYLINT_HOME)
except OSError:
print('Unable to create directory %s' % PYLINT_HOME, file=sys.stderr)
data_file = get_pdata_path(base, 1)
try:
with open(data_file, _PICK_DUMP) as stream:
pickle.dump(results, stream)
except (IOError, OSError) as ex:
print('Unable to create file %s: %s' % (data_file, ex), file=sys.stderr)
# location of the configuration file ##########################################
def find_pylintrc():
"""search the pylint rc file and return its path if it find it, else None
"""
# is there a pylint rc file in the current directory ?
if exists('pylintrc'):
return abspath('pylintrc')
if isfile('__init__.py'):
curdir = abspath(os.getcwd())
while isfile(join(curdir, '__init__.py')):
curdir = abspath(join(curdir, '..'))
if isfile(join(curdir, 'pylintrc')):
return join(curdir, 'pylintrc')
if 'PYLINTRC' in os.environ and exists(os.environ['PYLINTRC']):
pylintrc = os.environ['PYLINTRC']
else:
user_home = expanduser('~')
if user_home == '~' or user_home == '/root':
pylintrc = ".pylintrc"
else:
pylintrc = join(user_home, '.pylintrc')
if not isfile(pylintrc):
pylintrc = join(user_home, '.config', 'pylintrc')
if not isfile(pylintrc):
if isfile('/etc/pylintrc'):
pylintrc = '/etc/pylintrc'
else:
pylintrc = None
return pylintrc
PYLINTRC = find_pylintrc()
ENV_HELP = '''
The following environment variables are used:
* PYLINTHOME
Path to the directory where the persistent for the run will be stored. If
not found, it defaults to ~/.pylint.d/ or .pylint.d (in the current working
directory).
* PYLINTRC
Path to the configuration file. See the documentation for the method used
to search for configuration file.
''' % globals()
# evaluation messages #########################################################
def get_note_message(note):
"""return a message according to note
note is a float < 10 (10 is the highest note)
"""
assert note <= 10, "Note is %.2f. Either you cheated, or pylint's \
broken!" % note
if note < 0:
msg = 'You have to do something quick !'
elif note < 1:
msg = 'Hey! This is really dreadful. Or maybe pylint is buggy?'
elif note < 2:
msg = "Come on! You can't be proud of this code"
elif note < 3:
msg = 'Hum... Needs work.'
elif note < 4:
msg = 'Wouldn\'t you be a bit lazy?'
elif note < 5:
msg = 'A little more work would make it acceptable.'
elif note < 6:
msg = 'Just the bare minimum. Give it a bit more polish. '
elif note < 7:
msg = 'This is okay-ish, but I\'m sure you can do better.'
elif note < 8:
msg = 'If you commit now, people should not be making nasty \
comments about you on c.l.py'
elif note < 9:
msg = 'That\'s pretty good. Good work mate.'
elif note < 10:
msg = 'So close to being perfect...'
else:
msg = 'Wow ! Now this deserves our uttermost respect.\nPlease send \
your code to python-projects@logilab.org'
return msg
|
1013553207/django | refs/heads/master | tests/admin_filters/tests.py | 86 | from __future__ import unicode_literals
import datetime
from django.contrib.admin import (
AllValuesFieldListFilter, BooleanFieldListFilter, ModelAdmin,
RelatedOnlyFieldListFilter, SimpleListFilter, site,
)
from django.contrib.admin.views.main import ChangeList
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured
from django.test import RequestFactory, TestCase, override_settings
from django.utils import six
from django.utils.encoding import force_text
from .models import Book, Bookmark, Department, Employee, TaggedItem
def select_by(dictlist, key, value):
return [x for x in dictlist if x[key] == value][0]
class DecadeListFilter(SimpleListFilter):
def lookups(self, request, model_admin):
return (
('the 80s', "the 1980's"),
('the 90s', "the 1990's"),
('the 00s', "the 2000's"),
('other', "other decades"),
)
def queryset(self, request, queryset):
decade = self.value()
if decade == 'the 80s':
return queryset.filter(year__gte=1980, year__lte=1989)
if decade == 'the 90s':
return queryset.filter(year__gte=1990, year__lte=1999)
if decade == 'the 00s':
return queryset.filter(year__gte=2000, year__lte=2009)
class DecadeListFilterWithTitleAndParameter(DecadeListFilter):
title = 'publication decade'
parameter_name = 'publication-decade'
class DecadeListFilterWithoutTitle(DecadeListFilter):
parameter_name = 'publication-decade'
class DecadeListFilterWithoutParameter(DecadeListFilter):
title = 'publication decade'
class DecadeListFilterWithNoneReturningLookups(DecadeListFilterWithTitleAndParameter):
def lookups(self, request, model_admin):
pass
class DecadeListFilterWithFailingQueryset(DecadeListFilterWithTitleAndParameter):
def queryset(self, request, queryset):
raise 1 / 0
class DecadeListFilterWithQuerysetBasedLookups(DecadeListFilterWithTitleAndParameter):
def lookups(self, request, model_admin):
qs = model_admin.get_queryset(request)
if qs.filter(year__gte=1980, year__lte=1989).exists():
yield ('the 80s', "the 1980's")
if qs.filter(year__gte=1990, year__lte=1999).exists():
yield ('the 90s', "the 1990's")
if qs.filter(year__gte=2000, year__lte=2009).exists():
yield ('the 00s', "the 2000's")
class DecadeListFilterParameterEndsWith__In(DecadeListFilter):
title = 'publication decade'
parameter_name = 'decade__in' # Ends with '__in"
class DecadeListFilterParameterEndsWith__Isnull(DecadeListFilter):
title = 'publication decade'
parameter_name = 'decade__isnull' # Ends with '__isnull"
class DepartmentListFilterLookupWithNonStringValue(SimpleListFilter):
title = 'department'
parameter_name = 'department'
def lookups(self, request, model_admin):
return sorted({
(employee.department.id, # Intentionally not a string (Refs #19318)
employee.department.code)
for employee in model_admin.get_queryset(request).all()
})
def queryset(self, request, queryset):
if self.value():
return queryset.filter(department__id=self.value())
class DepartmentListFilterLookupWithUnderscoredParameter(DepartmentListFilterLookupWithNonStringValue):
parameter_name = 'department__whatever'
class DepartmentListFilterLookupWithDynamicValue(DecadeListFilterWithTitleAndParameter):
def lookups(self, request, model_admin):
if self.value() == 'the 80s':
return (('the 90s', "the 1990's"),)
elif self.value() == 'the 90s':
return (('the 80s', "the 1980's"),)
else:
return (('the 80s', "the 1980's"), ('the 90s', "the 1990's"),)
class CustomUserAdmin(UserAdmin):
list_filter = ('books_authored', 'books_contributed')
class BookAdmin(ModelAdmin):
list_filter = ('year', 'author', 'contributors', 'is_best_seller', 'date_registered', 'no')
ordering = ('-id',)
class BookAdminWithTupleBooleanFilter(BookAdmin):
list_filter = (
'year',
'author',
'contributors',
('is_best_seller', BooleanFieldListFilter),
'date_registered',
'no',
)
class BookAdminWithUnderscoreLookupAndTuple(BookAdmin):
list_filter = (
'year',
('author__email', AllValuesFieldListFilter),
'contributors',
'is_best_seller',
'date_registered',
'no',
)
class BookAdminWithCustomQueryset(ModelAdmin):
def __init__(self, user, *args, **kwargs):
self.user = user
super(BookAdminWithCustomQueryset, self).__init__(*args, **kwargs)
list_filter = ('year',)
def get_queryset(self, request):
return super(BookAdminWithCustomQueryset, self).get_queryset(request).filter(author=self.user)
class BookAdminRelatedOnlyFilter(ModelAdmin):
list_filter = (
'year', 'is_best_seller', 'date_registered', 'no',
('author', RelatedOnlyFieldListFilter),
('contributors', RelatedOnlyFieldListFilter),
)
ordering = ('-id',)
class DecadeFilterBookAdmin(ModelAdmin):
list_filter = ('author', DecadeListFilterWithTitleAndParameter)
ordering = ('-id',)
class DecadeFilterBookAdminWithoutTitle(ModelAdmin):
list_filter = (DecadeListFilterWithoutTitle,)
class DecadeFilterBookAdminWithoutParameter(ModelAdmin):
list_filter = (DecadeListFilterWithoutParameter,)
class DecadeFilterBookAdminWithNoneReturningLookups(ModelAdmin):
list_filter = (DecadeListFilterWithNoneReturningLookups,)
class DecadeFilterBookAdminWithFailingQueryset(ModelAdmin):
list_filter = (DecadeListFilterWithFailingQueryset,)
class DecadeFilterBookAdminWithQuerysetBasedLookups(ModelAdmin):
list_filter = (DecadeListFilterWithQuerysetBasedLookups,)
class DecadeFilterBookAdminParameterEndsWith__In(ModelAdmin):
list_filter = (DecadeListFilterParameterEndsWith__In,)
class DecadeFilterBookAdminParameterEndsWith__Isnull(ModelAdmin):
list_filter = (DecadeListFilterParameterEndsWith__Isnull,)
class EmployeeAdmin(ModelAdmin):
list_display = ['name', 'department']
list_filter = ['department']
class DepartmentFilterEmployeeAdmin(EmployeeAdmin):
list_filter = [DepartmentListFilterLookupWithNonStringValue, ]
class DepartmentFilterUnderscoredEmployeeAdmin(EmployeeAdmin):
list_filter = [DepartmentListFilterLookupWithUnderscoredParameter, ]
class DepartmentFilterDynamicValueBookAdmin(EmployeeAdmin):
list_filter = [DepartmentListFilterLookupWithDynamicValue, ]
class BookmarkAdminGenericRelation(ModelAdmin):
list_filter = ['tags__tag']
class ListFiltersTests(TestCase):
def setUp(self):
self.today = datetime.date.today()
self.tomorrow = self.today + datetime.timedelta(days=1)
self.one_week_ago = self.today - datetime.timedelta(days=7)
if self.today.month == 12:
self.next_month = self.today.replace(year=self.today.year + 1, month=1, day=1)
else:
self.next_month = self.today.replace(month=self.today.month + 1, day=1)
self.next_year = self.today.replace(year=self.today.year + 1, month=1, day=1)
self.request_factory = RequestFactory()
# Users
self.alfred = User.objects.create_user('alfred', 'alfred@example.com')
self.bob = User.objects.create_user('bob', 'bob@example.com')
self.lisa = User.objects.create_user('lisa', 'lisa@example.com')
# Books
self.djangonaut_book = Book.objects.create(
title='Djangonaut: an art of living', year=2009,
author=self.alfred, is_best_seller=True, date_registered=self.today,
)
self.bio_book = Book.objects.create(
title='Django: a biography', year=1999, author=self.alfred,
is_best_seller=False, no=207,
)
self.django_book = Book.objects.create(
title='The Django Book', year=None, author=self.bob,
is_best_seller=None, date_registered=self.today, no=103,
)
self.gipsy_book = Book.objects.create(
title='Gipsy guitar for dummies', year=2002, is_best_seller=True,
date_registered=self.one_week_ago,
)
self.gipsy_book.contributors = [self.bob, self.lisa]
self.gipsy_book.save()
# Departments
self.dev = Department.objects.create(code='DEV', description='Development')
self.design = Department.objects.create(code='DSN', description='Design')
# Employees
self.john = Employee.objects.create(name='John Blue', department=self.dev)
self.jack = Employee.objects.create(name='Jack Red', department=self.design)
def get_changelist(self, request, model, modeladmin):
return ChangeList(
request, model, modeladmin.list_display,
modeladmin.list_display_links, modeladmin.list_filter,
modeladmin.date_hierarchy, modeladmin.search_fields,
modeladmin.list_select_related, modeladmin.list_per_page,
modeladmin.list_max_show_all, modeladmin.list_editable, modeladmin,
)
def test_datefieldlistfilter(self):
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get('/')
changelist = self.get_changelist(request, Book, modeladmin)
request = self.request_factory.get('/', {'date_registered__gte': self.today,
'date_registered__lt': self.tomorrow})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.django_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][4]
self.assertEqual(force_text(filterspec.title), 'date registered')
choice = select_by(filterspec.choices(changelist), "display", "Today")
self.assertEqual(choice['selected'], True)
self.assertEqual(
choice['query_string'],
'?date_registered__gte=%s&date_registered__lt=%s' % (
self.today,
self.tomorrow,
)
)
request = self.request_factory.get('/', {'date_registered__gte': self.today.replace(day=1),
'date_registered__lt': self.next_month})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
if (self.today.year, self.today.month) == (self.one_week_ago.year, self.one_week_ago.month):
# In case one week ago is in the same month.
self.assertEqual(list(queryset), [self.gipsy_book, self.django_book, self.djangonaut_book])
else:
self.assertEqual(list(queryset), [self.django_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][4]
self.assertEqual(force_text(filterspec.title), 'date registered')
choice = select_by(filterspec.choices(changelist), "display", "This month")
self.assertEqual(choice['selected'], True)
self.assertEqual(
choice['query_string'],
'?date_registered__gte=%s&date_registered__lt=%s' % (
self.today.replace(day=1),
self.next_month,
)
)
request = self.request_factory.get('/', {'date_registered__gte': self.today.replace(month=1, day=1),
'date_registered__lt': self.next_year})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
if self.today.year == self.one_week_ago.year:
# In case one week ago is in the same year.
self.assertEqual(list(queryset), [self.gipsy_book, self.django_book, self.djangonaut_book])
else:
self.assertEqual(list(queryset), [self.django_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][4]
self.assertEqual(force_text(filterspec.title), 'date registered')
choice = select_by(filterspec.choices(changelist), "display", "This year")
self.assertEqual(choice['selected'], True)
self.assertEqual(
choice['query_string'],
'?date_registered__gte=%s&date_registered__lt=%s' % (
self.today.replace(month=1, day=1),
self.next_year,
)
)
request = self.request_factory.get('/', {
'date_registered__gte': str(self.one_week_ago),
'date_registered__lt': str(self.tomorrow),
})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.gipsy_book, self.django_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][4]
self.assertEqual(force_text(filterspec.title), 'date registered')
choice = select_by(filterspec.choices(changelist), "display", "Past 7 days")
self.assertEqual(choice['selected'], True)
self.assertEqual(
choice['query_string'],
'?date_registered__gte=%s&date_registered__lt=%s' % (
str(self.one_week_ago),
str(self.tomorrow),
)
)
@override_settings(USE_TZ=True)
def test_datefieldlistfilter_with_time_zone_support(self):
# Regression for #17830
self.test_datefieldlistfilter()
def test_allvaluesfieldlistfilter(self):
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get('/', {'year__isnull': 'True'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.django_book])
# Make sure the last choice is None and is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_text(filterspec.title), 'year')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[-1]['selected'], True)
self.assertEqual(choices[-1]['query_string'], '?year__isnull=True')
request = self.request_factory.get('/', {'year': '2002'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_text(filterspec.title), 'year')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[2]['selected'], True)
self.assertEqual(choices[2]['query_string'], '?year=2002')
def test_allvaluesfieldlistfilter_custom_qs(self):
# Make sure that correct filters are returned with custom querysets
modeladmin = BookAdminWithCustomQueryset(self.alfred, Book, site)
request = self.request_factory.get('/')
changelist = self.get_changelist(request, Book, modeladmin)
filterspec = changelist.get_filters(request)[0][0]
choices = list(filterspec.choices(changelist))
# Should have 'All', 1999 and 2009 options i.e. the subset of years of
# books written by alfred (which is the filtering criteria set by
# BookAdminWithCustomQueryset.get_queryset())
self.assertEqual(3, len(choices))
self.assertEqual(choices[0]['query_string'], '?')
self.assertEqual(choices[1]['query_string'], '?year=1999')
self.assertEqual(choices[2]['query_string'], '?year=2009')
def test_relatedfieldlistfilter_foreignkey(self):
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get('/')
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure that all users are present in the author's list filter
filterspec = changelist.get_filters(request)[0][1]
expected = [(self.alfred.pk, 'alfred'), (self.bob.pk, 'bob'), (self.lisa.pk, 'lisa')]
self.assertEqual(sorted(filterspec.lookup_choices), sorted(expected))
request = self.request_factory.get('/', {'author__isnull': 'True'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.gipsy_book])
# Make sure the last choice is None and is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'Verbose Author')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[-1]['selected'], True)
self.assertEqual(choices[-1]['query_string'], '?author__isnull=True')
request = self.request_factory.get('/', {'author__id__exact': self.alfred.pk})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'Verbose Author')
# order of choices depends on User model, which has no order
choice = select_by(filterspec.choices(changelist), "display", "alfred")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?author__id__exact=%d' % self.alfred.pk)
def test_relatedfieldlistfilter_manytomany(self):
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get('/')
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure that all users are present in the contrib's list filter
filterspec = changelist.get_filters(request)[0][2]
expected = [(self.alfred.pk, 'alfred'), (self.bob.pk, 'bob'), (self.lisa.pk, 'lisa')]
self.assertEqual(sorted(filterspec.lookup_choices), sorted(expected))
request = self.request_factory.get('/', {'contributors__isnull': 'True'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.django_book, self.bio_book, self.djangonaut_book])
# Make sure the last choice is None and is selected
filterspec = changelist.get_filters(request)[0][2]
self.assertEqual(force_text(filterspec.title), 'Verbose Contributors')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[-1]['selected'], True)
self.assertEqual(choices[-1]['query_string'], '?contributors__isnull=True')
request = self.request_factory.get('/', {'contributors__id__exact': self.bob.pk})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][2]
self.assertEqual(force_text(filterspec.title), 'Verbose Contributors')
choice = select_by(filterspec.choices(changelist), "display", "bob")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?contributors__id__exact=%d' % self.bob.pk)
def test_relatedfieldlistfilter_reverse_relationships(self):
modeladmin = CustomUserAdmin(User, site)
# FK relationship -----
request = self.request_factory.get('/', {'books_authored__isnull': 'True'})
changelist = self.get_changelist(request, User, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.lisa])
# Make sure the last choice is None and is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_text(filterspec.title), 'book')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[-1]['selected'], True)
self.assertEqual(choices[-1]['query_string'], '?books_authored__isnull=True')
request = self.request_factory.get('/', {'books_authored__id__exact': self.bio_book.pk})
changelist = self.get_changelist(request, User, modeladmin)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_text(filterspec.title), 'book')
choice = select_by(filterspec.choices(changelist), "display", self.bio_book.title)
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?books_authored__id__exact=%d' % self.bio_book.pk)
# M2M relationship -----
request = self.request_factory.get('/', {'books_contributed__isnull': 'True'})
changelist = self.get_changelist(request, User, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.alfred])
# Make sure the last choice is None and is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'book')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[-1]['selected'], True)
self.assertEqual(choices[-1]['query_string'], '?books_contributed__isnull=True')
request = self.request_factory.get('/', {'books_contributed__id__exact': self.django_book.pk})
changelist = self.get_changelist(request, User, modeladmin)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'book')
choice = select_by(filterspec.choices(changelist), "display", self.django_book.title)
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?books_contributed__id__exact=%d' % self.django_book.pk)
# With one book, the list filter should appear because there is also a
# (None) option.
Book.objects.exclude(pk=self.djangonaut_book.pk).delete()
filterspec = changelist.get_filters(request)[0]
self.assertEqual(len(filterspec), 2)
# With no books remaining, no list filters should appear.
Book.objects.all().delete()
filterspec = changelist.get_filters(request)[0]
self.assertEqual(len(filterspec), 0)
def test_relatedonlyfieldlistfilter_foreignkey(self):
modeladmin = BookAdminRelatedOnlyFilter(Book, site)
request = self.request_factory.get('/')
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure that only actual authors are present in author's list filter
filterspec = changelist.get_filters(request)[0][4]
expected = [(self.alfred.pk, 'alfred'), (self.bob.pk, 'bob')]
self.assertEqual(sorted(filterspec.lookup_choices), sorted(expected))
def test_relatedonlyfieldlistfilter_manytomany(self):
modeladmin = BookAdminRelatedOnlyFilter(Book, site)
request = self.request_factory.get('/')
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure that only actual contributors are present in contrib's list filter
filterspec = changelist.get_filters(request)[0][5]
expected = [(self.bob.pk, 'bob'), (self.lisa.pk, 'lisa')]
self.assertEqual(sorted(filterspec.lookup_choices), sorted(expected))
def test_listfilter_genericrelation(self):
django_bookmark = Bookmark.objects.create(url='https://www.djangoproject.com/')
python_bookmark = Bookmark.objects.create(url='https://www.python.org/')
kernel_bookmark = Bookmark.objects.create(url='https://www.kernel.org/')
TaggedItem.objects.create(content_object=django_bookmark, tag='python')
TaggedItem.objects.create(content_object=python_bookmark, tag='python')
TaggedItem.objects.create(content_object=kernel_bookmark, tag='linux')
modeladmin = BookmarkAdminGenericRelation(Bookmark, site)
request = self.request_factory.get('/', {'tags__tag': 'python'})
changelist = self.get_changelist(request, Bookmark, modeladmin)
queryset = changelist.get_queryset(request)
expected = [python_bookmark, django_bookmark]
self.assertEqual(list(queryset), expected)
def test_booleanfieldlistfilter(self):
modeladmin = BookAdmin(Book, site)
self.verify_booleanfieldlistfilter(modeladmin)
def test_booleanfieldlistfilter_tuple(self):
modeladmin = BookAdminWithTupleBooleanFilter(Book, site)
self.verify_booleanfieldlistfilter(modeladmin)
def verify_booleanfieldlistfilter(self, modeladmin):
request = self.request_factory.get('/')
changelist = self.get_changelist(request, Book, modeladmin)
request = self.request_factory.get('/', {'is_best_seller__exact': 0})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.bio_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][3]
self.assertEqual(force_text(filterspec.title), 'is best seller')
choice = select_by(filterspec.choices(changelist), "display", "No")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?is_best_seller__exact=0')
request = self.request_factory.get('/', {'is_best_seller__exact': 1})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.gipsy_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][3]
self.assertEqual(force_text(filterspec.title), 'is best seller')
choice = select_by(filterspec.choices(changelist), "display", "Yes")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?is_best_seller__exact=1')
request = self.request_factory.get('/', {'is_best_seller__isnull': 'True'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.django_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][3]
self.assertEqual(force_text(filterspec.title), 'is best seller')
choice = select_by(filterspec.choices(changelist), "display", "Unknown")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?is_best_seller__isnull=True')
def test_fieldlistfilter_underscorelookup_tuple(self):
"""
Ensure ('fieldpath', ClassName ) lookups pass lookup_allowed checks
when fieldpath contains double underscore in value.
Refs #19182
"""
modeladmin = BookAdminWithUnderscoreLookupAndTuple(Book, site)
request = self.request_factory.get('/')
changelist = self.get_changelist(request, Book, modeladmin)
request = self.request_factory.get('/', {'author__email': 'alfred@example.com'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.bio_book, self.djangonaut_book])
def test_simplelistfilter(self):
modeladmin = DecadeFilterBookAdmin(Book, site)
# Make sure that the first option is 'All' ---------------------------
request = self.request_factory.get('/', {})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), list(Book.objects.all().order_by('-id')))
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[0]['display'], 'All')
self.assertEqual(choices[0]['selected'], True)
self.assertEqual(choices[0]['query_string'], '?')
# Look for books in the 1980s ----------------------------------------
request = self.request_factory.get('/', {'publication-decade': 'the 80s'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[1]['display'], 'the 1980\'s')
self.assertEqual(choices[1]['selected'], True)
self.assertEqual(choices[1]['query_string'], '?publication-decade=the+80s')
# Look for books in the 1990s ----------------------------------------
request = self.request_factory.get('/', {'publication-decade': 'the 90s'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.bio_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[2]['display'], 'the 1990\'s')
self.assertEqual(choices[2]['selected'], True)
self.assertEqual(choices[2]['query_string'], '?publication-decade=the+90s')
# Look for books in the 2000s ----------------------------------------
request = self.request_factory.get('/', {'publication-decade': 'the 00s'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.gipsy_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[3]['display'], 'the 2000\'s')
self.assertEqual(choices[3]['selected'], True)
self.assertEqual(choices[3]['query_string'], '?publication-decade=the+00s')
# Combine multiple filters -------------------------------------------
request = self.request_factory.get('/', {'publication-decade': 'the 00s', 'author__id__exact': self.alfred.pk})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.djangonaut_book])
# Make sure the correct choices are selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[3]['display'], 'the 2000\'s')
self.assertEqual(choices[3]['selected'], True)
self.assertEqual(
choices[3]['query_string'],
'?author__id__exact=%s&publication-decade=the+00s' % self.alfred.pk
)
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_text(filterspec.title), 'Verbose Author')
choice = select_by(filterspec.choices(changelist), "display", "alfred")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?author__id__exact=%s&publication-decade=the+00s' % self.alfred.pk)
def test_listfilter_without_title(self):
"""
Any filter must define a title.
"""
modeladmin = DecadeFilterBookAdminWithoutTitle(Book, site)
request = self.request_factory.get('/', {})
six.assertRaisesRegex(self, ImproperlyConfigured,
"The list filter 'DecadeListFilterWithoutTitle' does not specify a 'title'.",
self.get_changelist, request, Book, modeladmin)
def test_simplelistfilter_without_parameter(self):
"""
Any SimpleListFilter must define a parameter_name.
"""
modeladmin = DecadeFilterBookAdminWithoutParameter(Book, site)
request = self.request_factory.get('/', {})
six.assertRaisesRegex(self, ImproperlyConfigured,
"The list filter 'DecadeListFilterWithoutParameter' does not specify a 'parameter_name'.",
self.get_changelist, request, Book, modeladmin)
def test_simplelistfilter_with_none_returning_lookups(self):
"""
A SimpleListFilter lookups method can return None but disables the
filter completely.
"""
modeladmin = DecadeFilterBookAdminWithNoneReturningLookups(Book, site)
request = self.request_factory.get('/', {})
changelist = self.get_changelist(request, Book, modeladmin)
filterspec = changelist.get_filters(request)[0]
self.assertEqual(len(filterspec), 0)
def test_filter_with_failing_queryset(self):
"""
Ensure that when a filter's queryset method fails, it fails loudly and
the corresponding exception doesn't get swallowed.
Refs #17828.
"""
modeladmin = DecadeFilterBookAdminWithFailingQueryset(Book, site)
request = self.request_factory.get('/', {})
self.assertRaises(ZeroDivisionError, self.get_changelist, request, Book, modeladmin)
def test_simplelistfilter_with_queryset_based_lookups(self):
modeladmin = DecadeFilterBookAdminWithQuerysetBasedLookups(Book, site)
request = self.request_factory.get('/', {})
changelist = self.get_changelist(request, Book, modeladmin)
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_text(filterspec.title), 'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(len(choices), 3)
self.assertEqual(choices[0]['display'], 'All')
self.assertEqual(choices[0]['selected'], True)
self.assertEqual(choices[0]['query_string'], '?')
self.assertEqual(choices[1]['display'], 'the 1990\'s')
self.assertEqual(choices[1]['selected'], False)
self.assertEqual(choices[1]['query_string'], '?publication-decade=the+90s')
self.assertEqual(choices[2]['display'], 'the 2000\'s')
self.assertEqual(choices[2]['selected'], False)
self.assertEqual(choices[2]['query_string'], '?publication-decade=the+00s')
def test_two_characters_long_field(self):
"""
Ensure that list_filter works with two-characters long field names.
Refs #16080.
"""
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get('/', {'no': '207'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.bio_book])
filterspec = changelist.get_filters(request)[0][-1]
self.assertEqual(force_text(filterspec.title), 'number')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[2]['selected'], True)
self.assertEqual(choices[2]['query_string'], '?no=207')
def test_parameter_ends_with__in__or__isnull(self):
"""
Ensure that a SimpleListFilter's parameter name is not mistaken for a
model field if it ends with '__isnull' or '__in'.
Refs #17091.
"""
# When it ends with '__in' -----------------------------------------
modeladmin = DecadeFilterBookAdminParameterEndsWith__In(Book, site)
request = self.request_factory.get('/', {'decade__in': 'the 90s'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.bio_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_text(filterspec.title), 'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[2]['display'], 'the 1990\'s')
self.assertEqual(choices[2]['selected'], True)
self.assertEqual(choices[2]['query_string'], '?decade__in=the+90s')
# When it ends with '__isnull' ---------------------------------------
modeladmin = DecadeFilterBookAdminParameterEndsWith__Isnull(Book, site)
request = self.request_factory.get('/', {'decade__isnull': 'the 90s'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.bio_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_text(filterspec.title), 'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[2]['display'], 'the 1990\'s')
self.assertEqual(choices[2]['selected'], True)
self.assertEqual(choices[2]['query_string'], '?decade__isnull=the+90s')
def test_lookup_with_non_string_value(self):
"""
Ensure choices are set the selected class when using non-string values
for lookups in SimpleListFilters.
Refs #19318
"""
modeladmin = DepartmentFilterEmployeeAdmin(Employee, site)
request = self.request_factory.get('/', {'department': self.john.pk})
changelist = self.get_changelist(request, Employee, modeladmin)
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.john])
filterspec = changelist.get_filters(request)[0][-1]
self.assertEqual(force_text(filterspec.title), 'department')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[1]['display'], 'DEV')
self.assertEqual(choices[1]['selected'], True)
self.assertEqual(choices[1]['query_string'], '?department=%s' % self.john.pk)
def test_lookup_with_non_string_value_underscored(self):
"""
Ensure SimpleListFilter lookups pass lookup_allowed checks when
parameter_name attribute contains double-underscore value.
Refs #19182
"""
modeladmin = DepartmentFilterUnderscoredEmployeeAdmin(Employee, site)
request = self.request_factory.get('/', {'department__whatever': self.john.pk})
changelist = self.get_changelist(request, Employee, modeladmin)
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.john])
filterspec = changelist.get_filters(request)[0][-1]
self.assertEqual(force_text(filterspec.title), 'department')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[1]['display'], 'DEV')
self.assertEqual(choices[1]['selected'], True)
self.assertEqual(choices[1]['query_string'], '?department__whatever=%s' % self.john.pk)
def test_fk_with_to_field(self):
"""
Ensure that a filter on a FK respects the FK's to_field attribute.
Refs #17972.
"""
modeladmin = EmployeeAdmin(Employee, site)
request = self.request_factory.get('/', {})
changelist = self.get_changelist(request, Employee, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.jack, self.john])
filterspec = changelist.get_filters(request)[0][-1]
self.assertEqual(force_text(filterspec.title), 'department')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[0]['display'], 'All')
self.assertEqual(choices[0]['selected'], True)
self.assertEqual(choices[0]['query_string'], '?')
self.assertEqual(choices[1]['display'], 'Development')
self.assertEqual(choices[1]['selected'], False)
self.assertEqual(choices[1]['query_string'], '?department__code__exact=DEV')
self.assertEqual(choices[2]['display'], 'Design')
self.assertEqual(choices[2]['selected'], False)
self.assertEqual(choices[2]['query_string'], '?department__code__exact=DSN')
# Filter by Department=='Development' --------------------------------
request = self.request_factory.get('/', {'department__code__exact': 'DEV'})
changelist = self.get_changelist(request, Employee, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.john])
filterspec = changelist.get_filters(request)[0][-1]
self.assertEqual(force_text(filterspec.title), 'department')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[0]['display'], 'All')
self.assertEqual(choices[0]['selected'], False)
self.assertEqual(choices[0]['query_string'], '?')
self.assertEqual(choices[1]['display'], 'Development')
self.assertEqual(choices[1]['selected'], True)
self.assertEqual(choices[1]['query_string'], '?department__code__exact=DEV')
self.assertEqual(choices[2]['display'], 'Design')
self.assertEqual(choices[2]['selected'], False)
self.assertEqual(choices[2]['query_string'], '?department__code__exact=DSN')
def test_lookup_with_dynamic_value(self):
"""
Ensure SimpleListFilter can access self.value() inside the lookup.
"""
modeladmin = DepartmentFilterDynamicValueBookAdmin(Book, site)
def _test_choices(request, expected_displays):
changelist = self.get_changelist(request, Book, modeladmin)
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_text(filterspec.title), 'publication decade')
choices = tuple(c['display'] for c in filterspec.choices(changelist))
self.assertEqual(choices, expected_displays)
_test_choices(self.request_factory.get('/', {}),
("All", "the 1980's", "the 1990's"))
_test_choices(self.request_factory.get('/', {'publication-decade': 'the 80s'}),
("All", "the 1990's"))
_test_choices(self.request_factory.get('/', {'publication-decade': 'the 90s'}),
("All", "the 1980's"))
|
sbarton272/AcousticBarcodes-Explorations | refs/heads/master | barcodes/dxfwrite/tests/test_engine.py | 1 | #!/usr/bin/env python
#coding:utf-8
# Created: 15.11.2010
# Copyright (C) 2010, Manfred Moitzi
# License: MIT License
__author__ = "mozman <mozman@gmx.at>"
import unittest
from dxfwrite.engine import DXFEngine
class TestDXFEngine(unittest.TestCase):
def test_drawing(self):
self.assertTrue(DXFEngine.drawing())
def test_layer(self):
self.assertTrue(DXFEngine.layer(name="TEST"))
def test_style(self):
self.assertTrue(DXFEngine.style(name="TEST"))
def test_linetype(self):
self.assertTrue(DXFEngine.linetype(name="TEST"))
def test_view(self):
self.assertTrue(DXFEngine.view(name="TEST"))
def test_viewport(self):
self.assertTrue(DXFEngine.vport(name="TEST"))
def test_ucs(self):
self.assertTrue(DXFEngine.ucs(name="TEST"))
def test_appid(self):
self.assertTrue(DXFEngine.appid(name="TEST"))
def test_linepattern(self):
self.assertTrue(DXFEngine.linepattern(pattern=[1, 1, 2, 1]))
def test_line(self):
self.assertTrue(DXFEngine.line(start=(0, 0), end=(1, 1)))
def test_point(self):
self.assertTrue(DXFEngine.point(point=(0, 0)))
def test_solid(self):
self.assertTrue(DXFEngine.solid(points=[(0, 0), (1, 1), (0, 1)]))
def test_trace(self):
self.assertTrue(DXFEngine.trace(points=[(0, 0), (1, 1), (0, 1)]))
def test_circle(self):
self.assertTrue(DXFEngine.circle(radius=1, center=(0, 0)))
def test_arc(self):
self.assertTrue(DXFEngine.arc(radius=1, center=(0, 0), startangle=10,
endangle=350))
def test_text(self):
self.assertTrue(DXFEngine.text(text="TEXT", insert=(0, 0), height=3.))
def test_shape(self):
self.assertTrue(DXFEngine.shape(name="TEST", insert=(0, 0)))
def test_insert(self):
self.assertTrue(DXFEngine.insert(blockname="TEST", insert=(0, 0)))
def test_attdef(self):
self.assertTrue(DXFEngine.attdef(tag="TEST", insert=(0, 0)))
def test_attrib(self):
self.assertTrue(DXFEngine.attrib(text="TEXT", insert=(0, 0)))
def test_face3d(self):
self.assertTrue(DXFEngine.face3d(points=[(0, 0), (1, 1), (0, 1)]))
def test_block(self):
self.assertTrue(DXFEngine.block(name="TEST", basepoint=(0, 0)))
def test_polyline(self):
self.assertTrue(DXFEngine.polyline(points=[(0, 0), (1, 1), (0, 1)]))
def test_polymesh(self):
self.assertTrue(DXFEngine.polymesh(nrows=10, ncols=10))
def test_polyface(self):
self.assertTrue(DXFEngine.polyface(precision=5))
def test_mtext(self):
self.assertTrue(DXFEngine.mtext("TEXT", insert=(0,0), linespacing=1.5))
def test_rectangle(self):
self.assertTrue(DXFEngine.rectangle(insert=(0,0), width=10, height=10))
def test_table(self):
self.assertTrue(DXFEngine.table(insert=(0, 0), nrows=10, ncols=10,
default_grid=True))
def test_ellipse(self):
self.assertTrue(DXFEngine.ellipse(center=(0,0), rx=3, ry=1))
def test_spline(self):
self.assertTrue(DXFEngine.spline(points=[(0,0), (2,1), (5,3)],
segments=100))
def test_bezier(self):
self.assertTrue(DXFEngine.bezier())
def test_clothoid(self):
self.assertTrue(DXFEngine.clothoid(start=(0,0), length=30, paramA=2))
def test_insert2(self):
block = DXFEngine.block('TEST')
self.assertTrue(DXFEngine.insert2(block, insert=(0,0), attribs={}))
if __name__=='__main__':
unittest.main()
|
auready/django | refs/heads/master | tests/null_fk_ordering/tests.py | 133 | from django.test import TestCase
from .models import Article, Author, Comment, Forum, Post, SystemInfo
class NullFkOrderingTests(TestCase):
def test_ordering_across_null_fk(self):
"""
Regression test for #7512
ordering across nullable Foreign Keys shouldn't exclude results
"""
author_1 = Author.objects.create(name='Tom Jones')
author_2 = Author.objects.create(name='Bob Smith')
Article.objects.create(title='No author on this article')
Article.objects.create(author=author_1, title='This article written by Tom Jones')
Article.objects.create(author=author_2, title='This article written by Bob Smith')
# We can't compare results directly (since different databases sort NULLs to
# different ends of the ordering), but we can check that all results are
# returned.
self.assertEqual(len(list(Article.objects.all())), 3)
s = SystemInfo.objects.create(system_name='System Info')
f = Forum.objects.create(system_info=s, forum_name='First forum')
p = Post.objects.create(forum=f, title='First Post')
Comment.objects.create(post=p, comment_text='My first comment')
Comment.objects.create(comment_text='My second comment')
s2 = SystemInfo.objects.create(system_name='More System Info')
f2 = Forum.objects.create(system_info=s2, forum_name='Second forum')
p2 = Post.objects.create(forum=f2, title='Second Post')
Comment.objects.create(comment_text='Another first comment')
Comment.objects.create(post=p2, comment_text='Another second comment')
# We have to test this carefully. Some databases sort NULL values before
# everything else, some sort them afterwards. So we extract the ordered list
# and check the length. Before the fix, this list was too short (some values
# were omitted).
self.assertEqual(len(list(Comment.objects.all())), 4)
|
endlessm/chromium-browser | refs/heads/master | third_party/grpc/src/examples/python/auth/test/_auth_example_test.py | 1 | # Copyright 2019 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test for gRPC Python authentication example."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import unittest
import grpc
from examples.python.auth import _credentials
from examples.python.auth import customized_auth_client
from examples.python.auth import customized_auth_server
_SERVER_ADDR_TEMPLATE = 'localhost:%d'
class AuthExampleTest(unittest.TestCase):
def test_successful_call(self):
with customized_auth_server.run_server(0) as port:
with customized_auth_client.create_client_channel(
_SERVER_ADDR_TEMPLATE % port) as channel:
customized_auth_client.send_rpc(channel)
# No unhandled exception raised, test passed!
def test_no_channel_credential(self):
with customized_auth_server.run_server(0) as port:
with grpc.insecure_channel(_SERVER_ADDR_TEMPLATE % port) as channel:
resp = customized_auth_client.send_rpc(channel)
self.assertEqual(resp.code(), grpc.StatusCode.UNAVAILABLE)
def test_no_call_credential(self):
with customized_auth_server.run_server(0) as port:
channel_credential = grpc.ssl_channel_credentials(
_credentials.ROOT_CERTIFICATE)
with grpc.secure_channel(_SERVER_ADDR_TEMPLATE % port,
channel_credential) as channel:
resp = customized_auth_client.send_rpc(channel)
self.assertEqual(resp.code(), grpc.StatusCode.UNAUTHENTICATED)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
sam-tsai/django | refs/heads/master | django/contrib/gis/geos/prototypes/topology.py | 338 | """
This module houses the GEOS ctypes prototype functions for the
topological operations on geometries.
"""
from ctypes import c_double, c_int
from django.contrib.gis.geos.libgeos import GEOM_PTR, GEOSFuncFactory
from django.contrib.gis.geos.prototypes.errcheck import (
check_geom, check_minus_one, check_string,
)
from django.contrib.gis.geos.prototypes.geom import geos_char_p
class Topology(GEOSFuncFactory):
"For GEOS unary topology functions."
argtypes = [GEOM_PTR]
restype = GEOM_PTR
errcheck = staticmethod(check_geom)
# Topology Routines
geos_boundary = Topology('GEOSBoundary')
geos_buffer = Topology('GEOSBuffer', argtypes=[GEOM_PTR, c_double, c_int])
geos_centroid = Topology('GEOSGetCentroid')
geos_convexhull = Topology('GEOSConvexHull')
geos_difference = Topology('GEOSDifference', argtypes=[GEOM_PTR, GEOM_PTR])
geos_envelope = Topology('GEOSEnvelope')
geos_intersection = Topology('GEOSIntersection', argtypes=[GEOM_PTR, GEOM_PTR])
geos_linemerge = Topology('GEOSLineMerge')
geos_pointonsurface = Topology('GEOSPointOnSurface')
geos_preservesimplify = Topology('GEOSTopologyPreserveSimplify', argtypes=[GEOM_PTR, c_double])
geos_simplify = Topology('GEOSSimplify', argtypes=[GEOM_PTR, c_double])
geos_symdifference = Topology('GEOSSymDifference', argtypes=[GEOM_PTR, GEOM_PTR])
geos_union = Topology('GEOSUnion', argtypes=[GEOM_PTR, GEOM_PTR])
geos_cascaded_union = GEOSFuncFactory('GEOSUnionCascaded', argtypes=[GEOM_PTR], restype=GEOM_PTR)
# GEOSRelate returns a string, not a geometry.
geos_relate = GEOSFuncFactory(
'GEOSRelate', argtypes=[GEOM_PTR, GEOM_PTR], restype=geos_char_p, errcheck=check_string
)
# Linear referencing routines
geos_project = GEOSFuncFactory(
'GEOSProject', argtypes=[GEOM_PTR, GEOM_PTR], restype=c_double, errcheck=check_minus_one
)
geos_interpolate = Topology('GEOSInterpolate', argtypes=[GEOM_PTR, c_double])
geos_project_normalized = GEOSFuncFactory(
'GEOSProjectNormalized', argtypes=[GEOM_PTR, GEOM_PTR], restype=c_double, errcheck=check_minus_one
)
geos_interpolate_normalized = Topology('GEOSInterpolateNormalized', argtypes=[GEOM_PTR, c_double])
|
gnuhub/intellij-community | refs/heads/master | python/testData/inspections/PyBroadExceptionInspection/test.py | 83 | class ExceptionKlass(Exception):
pass
try:
function_throws_exception()
except <weak_warning descr="Too broad exception clause">Exception</weak_warning>:
pass
try:
function_throws_exception()
except <weak_warning descr="Too broad exception clause">BaseException</weak_warning>:
pass
try:
function_throws_exception()
except ExceptionKlass:
pass
try:
function_throws_exception()
<weak_warning descr="Too broad exception clause">except</weak_warning>:
pass
class Exception:
pass
try:
function_throws_exception()
except Exception:
pass
try:
doSomething()
except:
someCleanup()
raise
result = []
## PY-2698
try:
function_throws_exception()
except Exception, e:
result.append(e) |
splotz90/urh | refs/heads/master | src/urh/signalprocessing/Spectrogram.py | 1 | import math
import numpy as np
from PyQt5.QtGui import QImage
from urh import colormaps
from urh.cythonext import util
from urh.util.Logger import logger
class Spectrogram(object):
MAX_LINES_PER_VIEW = 1000
DEFAULT_FFT_WINDOW_SIZE = 1024
def __init__(self, samples: np.ndarray, window_size=DEFAULT_FFT_WINDOW_SIZE,
overlap_factor=0.5, window_function=np.hanning):
"""
:param samples: Complex samples
:param window_size: Size of DFT window
:param overlap_factor: Value between 0 (= No Overlapping) and 1 (= Full overlapping) of windows
:param window_function: Function for DFT window
"""
self.__samples = samples
self.__window_size = window_size
self.__overlap_factor = overlap_factor
self.__window_function = window_function
self.data_min, self.data_max = -140, 10
@property
def samples(self):
return self.__samples
@samples.setter
def samples(self, value):
self.__samples = value
@property
def window_size(self):
return self.__window_size
@window_size.setter
def window_size(self, value):
self.__window_size = value
@property
def overlap_factor(self):
return self.__overlap_factor
@overlap_factor.setter
def overlap_factor(self, value):
self.__overlap_factor = value
@property
def window_function(self):
return self.__window_function
@window_function.setter
def window_function(self, value):
self.__window_function = value
@property
def time_bins(self):
return int(math.ceil(len(self.samples) / self.hop_size))
@property
def freq_bins(self):
return self.window_size
@property
def hop_size(self):
"""
hop size determines by how many samples the window is advanced
"""
return self.window_size - int(self.overlap_factor * self.window_size)
def stft(self, samples: np.ndarray):
"""
Perform Short-time Fourier transform to get the spectrogram for the given samples
:return: short-time Fourier transform of the given signal
"""
window = self.window_function(self.window_size)
hop_size = self.hop_size
if len(samples) < self.window_size:
samples = np.append(samples, np.zeros(self.window_size - len(samples)))
num_frames = max(1, (len(samples) - self.window_size) // hop_size + 1)
# Get frames as numpy view with stride_tricks to save RAM
# Same as: frames = [padded_samples[i*hop_size:i*hop_size+self.window_size] for i in range(num_frames)]
shape = (num_frames, self.window_size)
strides = (hop_size * samples.strides[-1], samples.strides[-1])
frames = np.lib.stride_tricks.as_strided(samples, shape=shape, strides=strides)
result = np.fft.fft(frames * window, self.window_size) / np.atleast_1d(self.window_size)
return result
def __calculate_spectrogram(self, samples: np.ndarray) -> np.ndarray:
# Only shift axis 1 (frequency) and not time
spectrogram = np.fft.fftshift(self.stft(samples), axes=(1,))
spectrogram = util.arr2decibel(spectrogram.astype(np.complex64))
# Flip Array so Y axis goes from negative to positive
return np.fliplr(spectrogram)
def create_spectrogram_image(self, sample_start: int=None, sample_end: int=None, step: int=None, transpose=False):
spectrogram = self.__calculate_spectrogram(self.samples[sample_start:sample_end:step])
if transpose:
spectrogram = np.flipud(spectrogram.T)
return self.create_image(spectrogram, colormaps.chosen_colormap_numpy_bgra, self.data_min, self.data_max)
def create_image_segments(self):
n_segments = max(1, self.time_bins // self.MAX_LINES_PER_VIEW)
step = self.time_bins / n_segments
step = max(1, int((step / self.hop_size) * self.hop_size ** 2))
for i in range(0, len(self.samples), step):
image = self.create_spectrogram_image(sample_start=i, sample_end=i+step)
yield image
@staticmethod
def apply_bgra_lookup(data: np.ndarray, colormap, data_min=None, data_max=None, normalize=True) -> np.ndarray:
if normalize and (data_min is None or data_max is None):
raise ValueError("Can't normalize without data min and data max")
if normalize:
normalized_values = (len(colormap) - 1) * ((data.T - data_min) / (data_max - data_min))
else:
normalized_values = data.T
return np.take(colormap, normalized_values.astype(np.int), axis=0, mode='clip')
@staticmethod
def create_image(data: np.ndarray, colormap, data_min=None, data_max=None, normalize=True) -> QImage:
"""
Create QImage from ARGB array.
The ARGB must have shape (width, height, 4) and dtype=ubyte.
NOTE: The order of values in the 3rd axis must be (blue, green, red, alpha).
:return:
"""
image_data = Spectrogram.apply_bgra_lookup(data, colormap, data_min, data_max, normalize)
if not image_data.flags['C_CONTIGUOUS']:
logger.debug("Array was not C_CONTIGUOUS. Converting it.")
image_data = np.ascontiguousarray(image_data)
try:
# QImage constructor needs inverted row/column order
image = QImage(image_data.ctypes.data, image_data.shape[1], image_data.shape[0], QImage.Format_ARGB32)
except Exception as e:
logger.error("could not create image " + str(e))
return QImage()
image.data = image_data
return image
@staticmethod
def create_colormap_image(colormap_name: str, height=100) -> QImage:
colormap = colormaps.calculate_numpy_brga_for(colormap_name)
indices = np.zeros((len(colormap), height), dtype=np.int64)
for i in np.arange(len(colormap), dtype=np.int64):
indices[i, :] = np.repeat(i, height)
return Spectrogram.create_image(indices, colormap, normalize=False)
|
elventear/ansible | refs/heads/devel | lib/ansible/galaxy/login.py | 12 | #!/usr/bin/env python
########################################################################
#
# (C) 2015, Chris Houseknecht <chouse@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
########################################################################
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import getpass
import json
from ansible.compat.six.moves.urllib.parse import quote as urlquote, urlparse
from ansible.compat.six.moves.urllib.error import HTTPError
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.module_utils.urls import open_url
from ansible.utils.color import stringc
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class GalaxyLogin(object):
''' Class to handle authenticating user with Galaxy API prior to performing CUD operations '''
GITHUB_AUTH = 'https://api.github.com/authorizations'
def __init__(self, galaxy, github_token=None):
self.galaxy = galaxy
self.github_username = None
self.github_password = None
if github_token is None:
self.get_credentials()
def get_credentials(self):
display.display(u'\n\n' + "We need your " + stringc("Github login",'bright cyan') +
" to identify you.", screen_only=True)
display.display("This information will " + stringc("not be sent to Galaxy",'bright cyan') +
", only to " + stringc("api.github.com.","yellow"), screen_only=True)
display.display("The password will not be displayed." + u'\n\n', screen_only=True)
display.display("Use " + stringc("--github-token",'yellow') +
" if you do not want to enter your password." + u'\n\n', screen_only=True)
try:
self.github_username = raw_input("Github Username: ")
except:
pass
try:
self.github_password = getpass.getpass("Password for %s: " % self.github_username)
except:
pass
if not self.github_username or not self.github_password:
raise AnsibleError("Invalid Github credentials. Username and password are required.")
def remove_github_token(self):
'''
If for some reason an ansible-galaxy token was left from a prior login, remove it. We cannot
retrieve the token after creation, so we are forced to create a new one.
'''
try:
tokens = json.load(open_url(self.GITHUB_AUTH, url_username=self.github_username,
url_password=self.github_password, force_basic_auth=True,))
except HTTPError as e:
res = json.load(e)
raise AnsibleError(res['message'])
for token in tokens:
if token['note'] == 'ansible-galaxy login':
display.vvvvv('removing token: %s' % token['token_last_eight'])
try:
open_url('https://api.github.com/authorizations/%d' % token['id'], url_username=self.github_username,
url_password=self.github_password, method='DELETE', force_basic_auth=True,)
except HTTPError as e:
res = json.load(e)
raise AnsibleError(res['message'])
def create_github_token(self):
'''
Create a personal authorization token with a note of 'ansible-galaxy login'
'''
self.remove_github_token()
args = json.dumps({"scopes":["public_repo"], "note":"ansible-galaxy login"})
try:
data = json.load(open_url(self.GITHUB_AUTH, url_username=self.github_username,
url_password=self.github_password, force_basic_auth=True, data=args))
except HTTPError as e:
res = json.load(e)
raise AnsibleError(res['message'])
return data['token']
|
Baha/z3 | refs/heads/master | src/api/python/z3types.py | 22 | import ctypes, z3core
class Z3Exception(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return str(self.value)
class ContextObj(ctypes.c_void_p):
def __init__(self, context): self._as_parameter_ = context
def from_param(obj): return obj
class Config(ctypes.c_void_p):
def __init__(self, config): self._as_parameter_ = config
def from_param(obj): return obj
class Symbol(ctypes.c_void_p):
def __init__(self, symbol): self._as_parameter_ = symbol
def from_param(obj): return obj
class Sort(ctypes.c_void_p):
def __init__(self, sort): self._as_parameter_ = sort
def from_param(obj): return obj
class FuncDecl(ctypes.c_void_p):
def __init__(self, decl): self._as_parameter_ = decl
def from_param(obj): return obj
class Ast(ctypes.c_void_p):
def __init__(self, ast): self._as_parameter_ = ast
def from_param(obj): return obj
class Pattern(ctypes.c_void_p):
def __init__(self, pattern): self._as_parameter_ = pattern
def from_param(obj): return obj
class Model(ctypes.c_void_p):
def __init__(self, model): self._as_parameter_ = model
def from_param(obj): return obj
class Literals(ctypes.c_void_p):
def __init__(self, literals): self._as_parameter_ = literals
def from_param(obj): return obj
class Constructor(ctypes.c_void_p):
def __init__(self, constructor): self._as_parameter_ = constructor
def from_param(obj): return obj
class ConstructorList(ctypes.c_void_p):
def __init__(self, constructor_list): self._as_parameter_ = constructor_list
def from_param(obj): return obj
class GoalObj(ctypes.c_void_p):
def __init__(self, goal): self._as_parameter_ = goal
def from_param(obj): return obj
class TacticObj(ctypes.c_void_p):
def __init__(self, tactic): self._as_parameter_ = tactic
def from_param(obj): return obj
class ProbeObj(ctypes.c_void_p):
def __init__(self, probe): self._as_parameter_ = probe
def from_param(obj): return obj
class ApplyResultObj(ctypes.c_void_p):
def __init__(self, obj): self._as_parameter_ = obj
def from_param(obj): return obj
class StatsObj(ctypes.c_void_p):
def __init__(self, statistics): self._as_parameter_ = statistics
def from_param(obj): return obj
class SolverObj(ctypes.c_void_p):
def __init__(self, solver): self._as_parameter_ = solver
def from_param(obj): return obj
class FixedpointObj(ctypes.c_void_p):
def __init__(self, fixedpoint): self._as_parameter_ = fixedpoint
def from_param(obj): return obj
class ModelObj(ctypes.c_void_p):
def __init__(self, model): self._as_parameter_ = model
def from_param(obj): return obj
class AstVectorObj(ctypes.c_void_p):
def __init__(self, vector): self._as_parameter_ = vector
def from_param(obj): return obj
class AstMapObj(ctypes.c_void_p):
def __init__(self, ast_map): self._as_parameter_ = ast_map
def from_param(obj): return obj
class Params(ctypes.c_void_p):
def __init__(self, params): self._as_parameter_ = params
def from_param(obj): return obj
class ParamDescrs(ctypes.c_void_p):
def __init__(self, paramdescrs): self._as_parameter_ = paramdescrs
def from_param(obj): return obj
class FuncInterpObj(ctypes.c_void_p):
def __init__(self, f): self._as_parameter_ = f
def from_param(obj): return obj
class FuncEntryObj(ctypes.c_void_p):
def __init__(self, e): self._as_parameter_ = e
def from_param(obj): return obj
class RCFNumObj(ctypes.c_void_p):
def __init__(self, e): self._as_parameter_ = e
def from_param(obj): return obj
|
EDUlib/edx-platform | refs/heads/master | cms/djangoapps/contentstore/views/tests/test_library.py | 4 | """
Unit tests for contentstore.views.library
More important high-level tests are in contentstore/tests/test_libraries.py
"""
from unittest import mock
from unittest.mock import patch
import ddt
from django.conf import settings
from django.test.utils import override_settings
from django.urls import reverse
from opaque_keys.edx.locator import CourseKey, LibraryLocator
from organizations.api import get_organization_by_short_name
from organizations.exceptions import InvalidOrganizationException
from cms.djangoapps.contentstore.tests.utils import AjaxEnabledTestClient, CourseTestCase, parse_json
from cms.djangoapps.contentstore.utils import reverse_course_url, reverse_library_url
from cms.djangoapps.course_creators.views import add_user_with_status_granted as grant_course_creator_status
from common.djangoapps.student.roles import LibraryUserRole
from xmodule.modulestore.tests.factories import LibraryFactory
from ..component import get_component_templates
from ..library import get_library_creator_status
LIBRARY_REST_URL = '/library/' # URL for GET/POST requests involving libraries
def make_url_for_lib(key):
""" Get the RESTful/studio URL for testing the given library """
if isinstance(key, LibraryLocator):
key = str(key)
return LIBRARY_REST_URL + key
@ddt.ddt
@mock.patch.dict('django.conf.settings.FEATURES', {'DISABLE_COURSE_CREATION': False})
class UnitTestLibraries(CourseTestCase):
"""
Unit tests for library views
"""
def setUp(self):
super().setUp()
self.client = AjaxEnabledTestClient()
self.client.login(username=self.user.username, password=self.user_password)
######################################################
# Tests for /library/ - list and create libraries:
@mock.patch("cms.djangoapps.contentstore.views.library.LIBRARIES_ENABLED", False)
def test_library_creator_status_libraries_not_enabled(self):
_, nostaff_user = self.create_non_staff_authed_user_client()
self.assertEqual(get_library_creator_status(nostaff_user), False)
@mock.patch("cms.djangoapps.contentstore.views.library.LIBRARIES_ENABLED", True)
def test_library_creator_status_with_is_staff_user(self):
self.assertEqual(get_library_creator_status(self.user), True)
@mock.patch("cms.djangoapps.contentstore.views.library.LIBRARIES_ENABLED", True)
def test_library_creator_status_with_course_creator_role(self):
_, nostaff_user = self.create_non_staff_authed_user_client()
with mock.patch.dict('django.conf.settings.FEATURES', {"ENABLE_CREATOR_GROUP": True}):
grant_course_creator_status(self.user, nostaff_user)
self.assertEqual(get_library_creator_status(nostaff_user), True)
@mock.patch("cms.djangoapps.contentstore.views.library.LIBRARIES_ENABLED", True)
def test_library_creator_status_with_no_course_creator_role(self):
_, nostaff_user = self.create_non_staff_authed_user_client()
self.assertEqual(get_library_creator_status(nostaff_user), True)
@ddt.data(
(False, False, True),
(False, True, False),
(True, False, True),
(True, True, False),
(True, None, False),
(False, None, True)
)
@ddt.unpack
def test_library_creator_status_settings(self, disable_course, disable_library, expected_status):
"""
Ensure that the setting DISABLE_LIBRARY_CREATION overrides DISABLE_COURSE_CREATION as expected.
"""
_, nostaff_user = self.create_non_staff_authed_user_client()
with mock.patch("cms.djangoapps.contentstore.views.library.LIBRARIES_ENABLED", True):
with mock.patch.dict(
"django.conf.settings.FEATURES",
{
"DISABLE_COURSE_CREATION": disable_course,
"DISABLE_LIBRARY_CREATION": disable_library
}
):
self.assertEqual(get_library_creator_status(nostaff_user), expected_status)
@mock.patch.dict('django.conf.settings.FEATURES', {'DISABLE_COURSE_CREATION': True})
@mock.patch("cms.djangoapps.contentstore.views.library.LIBRARIES_ENABLED", True)
def test_library_creator_status_with_no_course_creator_role_and_disabled_nonstaff_course_creation(self):
"""
Ensure that `DISABLE_COURSE_CREATION` feature works with libraries as well.
"""
nostaff_client, nostaff_user = self.create_non_staff_authed_user_client()
self.assertFalse(get_library_creator_status(nostaff_user))
# To be explicit, this user can GET, but not POST
get_response = nostaff_client.get_json(LIBRARY_REST_URL)
post_response = nostaff_client.ajax_post(LIBRARY_REST_URL, {
'org': 'org', 'library': 'lib', 'display_name': "New Library",
})
self.assertEqual(get_response.status_code, 200)
self.assertEqual(post_response.status_code, 403)
@patch("cms.djangoapps.contentstore.views.library.LIBRARIES_ENABLED", False)
def test_with_libraries_disabled(self):
"""
The library URLs should return 404 if libraries are disabled.
"""
response = self.client.get_json(LIBRARY_REST_URL)
self.assertEqual(response.status_code, 404)
def test_list_libraries(self):
"""
Test that we can GET /library/ to list all libraries visible to the current user.
"""
# Create some more libraries
libraries = [LibraryFactory.create() for _ in range(3)]
lib_dict = {lib.location.library_key: lib for lib in libraries}
response = self.client.get_json(LIBRARY_REST_URL)
self.assertEqual(response.status_code, 200)
lib_list = parse_json(response)
self.assertEqual(len(lib_list), len(libraries))
for entry in lib_list:
self.assertIn("library_key", entry)
self.assertIn("display_name", entry)
key = CourseKey.from_string(entry["library_key"])
self.assertIn(key, lib_dict)
self.assertEqual(entry["display_name"], lib_dict[key].display_name)
del lib_dict[key] # To ensure no duplicates are matched
@ddt.data("delete", "put")
def test_bad_http_verb(self, verb):
"""
We should get an error if we do weird requests to /library/
"""
response = getattr(self.client, verb)(LIBRARY_REST_URL)
self.assertEqual(response.status_code, 405)
def test_create_library(self):
""" Create a library. """
response = self.client.ajax_post(LIBRARY_REST_URL, {
'org': 'org',
'library': 'lib',
'display_name': "New Library",
})
self.assertEqual(response.status_code, 200)
# That's all we check. More detailed tests are in contentstore.tests.test_libraries...
@patch.dict('django.conf.settings.FEATURES', {'ENABLE_CREATOR_GROUP': True})
def test_lib_create_permission(self):
"""
Users who are given course creator roles should be able to create libraries.
"""
self.client.logout()
ns_user, password = self.create_non_staff_user()
self.client.login(username=ns_user.username, password=password)
grant_course_creator_status(self.user, ns_user)
response = self.client.ajax_post(LIBRARY_REST_URL, {
'org': 'org', 'library': 'lib', 'display_name': "New Library",
})
self.assertEqual(response.status_code, 200)
@patch.dict('django.conf.settings.FEATURES', {'ENABLE_CREATOR_GROUP': False})
def test_lib_create_permission_no_course_creator_role_and_no_course_creator_group(self):
"""
Users who are not given course creator roles should still be able to create libraries
if ENABLE_CREATOR_GROUP is not enabled.
"""
self.client.logout()
ns_user, password = self.create_non_staff_user()
self.client.login(username=ns_user.username, password=password)
response = self.client.ajax_post(LIBRARY_REST_URL, {
'org': 'org', 'library': 'lib', 'display_name': "New Library",
})
self.assertEqual(response.status_code, 200)
@patch.dict('django.conf.settings.FEATURES', {'ENABLE_CREATOR_GROUP': True})
def test_lib_create_permission_no_course_creator_role_and_course_creator_group(self):
"""
Users who are not given course creator roles should not be able to create libraries
if ENABLE_CREATOR_GROUP is enabled.
"""
self.client.logout()
ns_user, password = self.create_non_staff_user()
self.client.login(username=ns_user.username, password=password)
response = self.client.ajax_post(LIBRARY_REST_URL, {
'org': 'org', 'library': 'lib', 'display_name': "New Library",
})
self.assertEqual(response.status_code, 403)
@ddt.data(
{},
{'org': 'org'},
{'library': 'lib'},
{'org': 'C++', 'library': 'lib', 'display_name': 'Lib with invalid characters in key'},
{'org': 'Org', 'library': 'Wh@t?', 'display_name': 'Lib with invalid characters in key'},
)
def test_create_library_invalid(self, data):
"""
Make sure we are prevented from creating libraries with invalid keys/data
"""
response = self.client.ajax_post(LIBRARY_REST_URL, data)
self.assertEqual(response.status_code, 400)
def test_no_duplicate_libraries(self):
"""
We should not be able to create multiple libraries with the same key
"""
lib = LibraryFactory.create()
lib_key = lib.location.library_key
response = self.client.ajax_post(LIBRARY_REST_URL, {
'org': lib_key.org,
'library': lib_key.library,
'display_name': "A Duplicate key, same as 'lib'",
})
self.assertIn('already a library defined', parse_json(response)['ErrMsg'])
self.assertEqual(response.status_code, 400)
@override_settings(ORGANIZATIONS_AUTOCREATE=True)
def test_library_with_unknown_organization_autocreation(self):
"""
Test that when automatic organization creation is enabled,
creating a content library with an unknown organization auto-creates
said organization.
"""
with self.assertRaises(InvalidOrganizationException):
get_organization_by_short_name("org_xyz")
response = self.client.ajax_post(LIBRARY_REST_URL, {
'org': "org_xyz",
'library': "org_test_lib",
'display_name': "This library's organization doesn't exist... yet.",
})
assert response.status_code == 200
assert get_organization_by_short_name("org_xyz")
@override_settings(ORGANIZATIONS_AUTOCREATE=False)
def test_library_with_unknown_organization_validation_error(self):
"""
Test that when automatic organization creation is disabled,
creating a content library with an unknown organization raises an error.
"""
with self.assertRaises(InvalidOrganizationException):
get_organization_by_short_name("org_xyz")
response = self.client.ajax_post(LIBRARY_REST_URL, {
'org': "org_xyz",
'library': "org_test_lib",
'display_name': "This library's organization doesn't exist!",
})
assert response.status_code == 400
assert "'org_xyz' is not a valid organization identifier" in parse_json(response)['ErrMsg']
with self.assertRaises(InvalidOrganizationException):
get_organization_by_short_name("org_xyz")
######################################################
# Tests for /library/:lib_key/ - get a specific library as JSON or HTML editing view
def test_get_lib_info(self):
"""
Test that we can get data about a library (in JSON format) using /library/:key/
"""
# Create a library
lib_key = LibraryFactory.create().location.library_key
# Re-load the library from the modulestore, explicitly including version information:
lib = self.store.get_library(lib_key, remove_version=False, remove_branch=False)
version = lib.location.library_key.version_guid
self.assertNotEqual(version, None)
response = self.client.get_json(make_url_for_lib(lib_key))
self.assertEqual(response.status_code, 200)
info = parse_json(response)
self.assertEqual(info['display_name'], lib.display_name)
self.assertEqual(info['library_id'], str(lib_key))
self.assertEqual(info['previous_version'], None)
self.assertNotEqual(info['version'], None)
self.assertNotEqual(info['version'], '')
self.assertEqual(info['version'], str(version))
def test_get_lib_edit_html(self):
"""
Test that we can get the studio view for editing a library using /library/:key/
"""
lib = LibraryFactory.create()
response = self.client.get(make_url_for_lib(lib.location.library_key))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "<html")
self.assertContains(response, lib.display_name)
@ddt.data('library-v1:Nonexistent+library', 'course-v1:Org+Course', 'course-v1:Org+Course+Run', 'invalid')
def test_invalid_keys(self, key_str):
"""
Check that various Nonexistent/invalid keys give 404 errors
"""
response = self.client.get_json(make_url_for_lib(key_str))
self.assertEqual(response.status_code, 404)
def test_bad_http_verb_with_lib_key(self):
"""
We should get an error if we do weird requests to /library/
"""
lib = LibraryFactory.create()
for verb in ("post", "delete", "put"):
response = getattr(self.client, verb)(make_url_for_lib(lib.location.library_key))
self.assertEqual(response.status_code, 405)
def test_no_access(self):
user, password = self.create_non_staff_user()
self.client.login(username=user, password=password)
lib = LibraryFactory.create()
response = self.client.get(make_url_for_lib(lib.location.library_key))
self.assertEqual(response.status_code, 403)
def test_get_component_templates(self):
"""
Verify that templates for adding discussion and advanced components to
content libraries are not provided.
"""
lib = LibraryFactory.create()
lib.advanced_modules = ['lti']
lib.save()
templates = [template['type'] for template in get_component_templates(lib, library=True)]
self.assertIn('problem', templates)
self.assertNotIn('discussion', templates)
self.assertNotIn('advanced', templates)
self.assertNotIn('openassessment', templates)
def test_advanced_problem_types(self):
"""
Verify that advanced problem types are not provided in problem component for libraries.
"""
lib = LibraryFactory.create()
lib.save()
problem_type_templates = next(
(
component['templates']
for component in get_component_templates(lib, library=True)
if component['type'] == 'problem'
),
[]
)
# Each problem template has a category which shows whether problem is a 'problem'
# or which of the advanced problem type (e.g drag-and-drop-v2).
problem_type_categories = [problem_template['category'] for problem_template in problem_type_templates]
for advance_problem_type in settings.ADVANCED_PROBLEM_TYPES:
self.assertNotIn(advance_problem_type['component'], problem_type_categories)
def test_manage_library_users(self):
"""
Simple test that the Library "User Access" view works.
Also tests that we can use the REST API to assign a user to a library.
"""
library = LibraryFactory.create()
extra_user, _ = self.create_non_staff_user()
manage_users_url = reverse_library_url('manage_library_users', str(library.location.library_key))
response = self.client.get(manage_users_url)
self.assertEqual(response.status_code, 200)
# extra_user has not been assigned to the library so should not show up in the list:
self.assertNotContains(response, extra_user.username)
# Now add extra_user to the library:
user_details_url = reverse_course_url(
'course_team_handler',
library.location.library_key, kwargs={'email': extra_user.email}
)
edit_response = self.client.ajax_post(user_details_url, {"role": LibraryUserRole.ROLE})
self.assertIn(edit_response.status_code, (200, 204))
# Now extra_user should apear in the list:
response = self.client.get(manage_users_url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, extra_user.username)
def test_component_limits(self):
"""
Test that component limits in libraries are respected.
"""
with self.settings(MAX_BLOCKS_PER_CONTENT_LIBRARY=1):
library = LibraryFactory.create()
data = {
'parent_locator': str(library.location),
'category': 'html'
}
response = self.client.ajax_post(reverse('xblock_handler'), data)
self.assertEqual(response.status_code, 200)
# Adding another component should cause failure:
response = self.client.ajax_post(reverse('xblock_handler'), data)
self.assertEqual(response.status_code, 400)
self.assertIn('cannot have more than 1 component', parse_json(response)['error'])
|
Soya93/Extract-Refactoring | refs/heads/master | python/testData/completion/globalName.py | 83 | global foo<caret> |
scorpionis/docklet | refs/heads/master | client/venv/lib/python3.5/site-packages/setuptools/command/upload.py | 210 | import getpass
from distutils.command import upload as orig
class upload(orig.upload):
"""
Override default upload behavior to obtain password
in a variety of different ways.
"""
def finalize_options(self):
orig.upload.finalize_options(self)
# Attempt to obtain password. Short circuit evaluation at the first
# sign of success.
self.password = (
self.password or
self._load_password_from_keyring() or
self._prompt_for_password()
)
def _load_password_from_keyring(self):
"""
Attempt to load password from keyring. Suppress Exceptions.
"""
try:
keyring = __import__('keyring')
return keyring.get_password(self.repository, self.username)
except Exception:
pass
def _prompt_for_password(self):
"""
Prompt for a password on the tty. Suppress Exceptions.
"""
try:
return getpass.getpass()
except (Exception, KeyboardInterrupt):
pass
|
mandliya/algorithms_and_ds_playground | refs/heads/master | tree_problems/closest_bst_value.py | 1 | '''
Given a non-empty binary search tree and a target value,
find the value in the BST that is closest to the target.
Also, to note that the target value is a floating point.
There will be only one unique value which is closest to the target.
'''
import sys
class TreeNode:
'''
A tree's node representation
'''
def __init__(self, data):
self.data = data
self.left = None
self.right = None
def closest_diff_util(node, target, min_diff, min_diff_data):
''' Util function to find the node closest to target
Params:
node -- node in the current BST tree
target -- The value to which closest node needs to be found.
min_diff -- minimum difference between target and the nodes we have iterated so far
min_diff_data -- the value of node which is closest to target so far.
'''
if node == None:
return
# If the target itself is present in the tree.
if node.data == target:
min_diff_data[0] = target
return
if min_diff > abs(node.data - target):
min_diff = abs(node.data - target)
min_diff_data[0] = node.data
if target < node.data:
closest_diff_util(node.left, target, min_diff, min_diff_data)
else:
closest_diff_util(node.right, target, min_diff, min_diff_data)
def closest_diff(root, target):
'''Function to find the node closest to target
Params:
root -- the root node of the tree
target -- the value to which closest node needs to be found.
'''
min_diff, min_diff_data = sys.maxsize, [-1]
closest_diff_util(root, target, min_diff, min_diff_data)
return min_diff_data[0]
def closest_diff_iter(root, target):
'''Function to find the node closest to target iteratively
Params:
root -- the root node of the tree
target -- the value to which closest node needs to be found.
'''
if not root:
return sys.maxsize
closest = root.data
while root:
if abs(target - closest) >= abs(target - root.data):
closest = root.data
root = root.left if target < root.data else root.right
return closest
def inorder(root):
"""Print inorder traversal of the tree.
Param:
root -- root of the tree.
"""
if not root:
return
inorder(root.left)
print(root.data, end=" ")
inorder(root.right)
if __name__ == '__main__':
root = TreeNode(10);
root.left = TreeNode(5);
root.right = TreeNode(15);
root.left.left = TreeNode(2);
root.left.right = TreeNode(7);
root.right.left = TreeNode(12);
root.right.right = TreeNode(16);
print("Inorder traversal of the tree:")
inorder(root)
print()
print("Closest value in the tree (recursively) to 6.6779 : ", closest_diff(root, 6.6779))
print("Closest value in the tree (iteratively) to 6.6779 : ", closest_diff_iter(root, 6.6779)) |
willusher/ansible-modules-core | refs/heads/devel | cloud/linode/linode.py | 37 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: linode
short_description: create / delete / stop / restart an instance in Linode Public Cloud
description:
- creates / deletes a Linode Public Cloud instance and optionally waits for it to be 'running'.
version_added: "1.3"
options:
state:
description:
- Indicate desired state of the resource
choices: ['present', 'active', 'started', 'absent', 'deleted', 'stopped', 'restarted']
default: present
api_key:
description:
- Linode API key
default: null
name:
description:
- Name to give the instance (alphanumeric, dashes, underscore)
- To keep sanity on the Linode Web Console, name is prepended with LinodeID_
default: null
type: string
linode_id:
description:
- Unique ID of a linode server
aliases: [ 'lid' ]
default: null
type: integer
plan:
description:
- plan to use for the instance (Linode plan)
default: null
type: integer
payment_term:
description:
- payment term to use for the instance (payment term in months)
default: 1
type: integer
choices: [1, 12, 24]
password:
description:
- root password to apply to a new server (auto generated if missing)
default: null
type: string
ssh_pub_key:
description:
- SSH public key applied to root user
default: null
type: string
swap:
description:
- swap size in MB
default: 512
type: integer
distribution:
description:
- distribution to use for the instance (Linode Distribution)
default: null
type: integer
datacenter:
description:
- datacenter to create an instance in (Linode Datacenter)
default: null
type: integer
wait:
description:
- wait for the instance to be in state 'running' before returning
default: "no"
choices: [ "yes", "no" ]
wait_timeout:
description:
- how long before wait gives up, in seconds
default: 300
requirements:
- "python >= 2.6"
- "linode-python"
- "pycurl"
author: "Vincent Viallet (@zbal)"
notes:
- LINODE_API_KEY env variable can be used instead
'''
EXAMPLES = '''
# Create a server
- local_action:
module: linode
api_key: 'longStringFromLinodeApi'
name: linode-test1
plan: 1
datacenter: 2
distribution: 99
password: 'superSecureRootPassword'
ssh_pub_key: 'ssh-rsa qwerty'
swap: 768
wait: yes
wait_timeout: 600
state: present
# Ensure a running server (create if missing)
- local_action:
module: linode
api_key: 'longStringFromLinodeApi'
name: linode-test1
linode_id: 12345678
plan: 1
datacenter: 2
distribution: 99
password: 'superSecureRootPassword'
ssh_pub_key: 'ssh-rsa qwerty'
swap: 768
wait: yes
wait_timeout: 600
state: present
# Delete a server
- local_action:
module: linode
api_key: 'longStringFromLinodeApi'
name: linode-test1
linode_id: 12345678
state: absent
# Stop a server
- local_action:
module: linode
api_key: 'longStringFromLinodeApi'
name: linode-test1
linode_id: 12345678
state: stopped
# Reboot a server
- local_action:
module: linode
api_key: 'longStringFromLinodeApi'
name: linode-test1
linode_id: 12345678
state: restarted
'''
import time
import os
try:
import pycurl
HAS_PYCURL = True
except ImportError:
HAS_PYCURL = False
try:
from linode import api as linode_api
HAS_LINODE = True
except ImportError:
HAS_LINODE = False
def randompass():
'''
Generate a long random password that comply to Linode requirements
'''
# Linode API currently requires the following:
# It must contain at least two of these four character classes:
# lower case letters - upper case letters - numbers - punctuation
# we play it safe :)
import random
import string
# as of python 2.4, this reseeds the PRNG from urandom
random.seed()
lower = ''.join(random.choice(string.ascii_lowercase) for x in range(6))
upper = ''.join(random.choice(string.ascii_uppercase) for x in range(6))
number = ''.join(random.choice(string.digits) for x in range(6))
punct = ''.join(random.choice(string.punctuation) for x in range(6))
p = lower + upper + number + punct
return ''.join(random.sample(p, len(p)))
def getInstanceDetails(api, server):
'''
Return the details of an instance, populating IPs, etc.
'''
instance = {'id': server['LINODEID'],
'name': server['LABEL'],
'public': [],
'private': []}
# Populate with ips
for ip in api.linode_ip_list(LinodeId=server['LINODEID']):
if ip['ISPUBLIC'] and 'ipv4' not in instance:
instance['ipv4'] = ip['IPADDRESS']
instance['fqdn'] = ip['RDNS_NAME']
if ip['ISPUBLIC']:
instance['public'].append({'ipv4': ip['IPADDRESS'],
'fqdn': ip['RDNS_NAME'],
'ip_id': ip['IPADDRESSID']})
else:
instance['private'].append({'ipv4': ip['IPADDRESS'],
'fqdn': ip['RDNS_NAME'],
'ip_id': ip['IPADDRESSID']})
return instance
def linodeServers(module, api, state, name, plan, distribution, datacenter, linode_id,
payment_term, password, ssh_pub_key, swap, wait, wait_timeout):
instances = []
changed = False
new_server = False
servers = []
disks = []
configs = []
jobs = []
# See if we can match an existing server details with the provided linode_id
if linode_id:
# For the moment we only consider linode_id as criteria for match
# Later we can use more (size, name, etc.) and update existing
servers = api.linode_list(LinodeId=linode_id)
# Attempt to fetch details about disks and configs only if servers are
# found with linode_id
if servers:
disks = api.linode_disk_list(LinodeId=linode_id)
configs = api.linode_config_list(LinodeId=linode_id)
# Act on the state
if state in ('active', 'present', 'started'):
# TODO: validate all the plan / distribution / datacenter are valid
# Multi step process/validation:
# - need linode_id (entity)
# - need disk_id for linode_id - create disk from distrib
# - need config_id for linode_id - create config (need kernel)
# Any create step triggers a job that need to be waited for.
if not servers:
for arg in ('name', 'plan', 'distribution', 'datacenter'):
if not eval(arg):
module.fail_json(msg='%s is required for active state' % arg)
# Create linode entity
new_server = True
try:
res = api.linode_create(DatacenterID=datacenter, PlanID=plan,
PaymentTerm=payment_term)
linode_id = res['LinodeID']
# Update linode Label to match name
api.linode_update(LinodeId=linode_id, Label='%s_%s' % (linode_id, name))
# Save server
servers = api.linode_list(LinodeId=linode_id)
except Exception as e:
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
if not disks:
for arg in ('name', 'linode_id', 'distribution'):
if not eval(arg):
module.fail_json(msg='%s is required for active state' % arg)
# Create disks (1 from distrib, 1 for SWAP)
new_server = True
try:
if not password:
# Password is required on creation, if not provided generate one
password = randompass()
if not swap:
swap = 512
# Create data disk
size = servers[0]['TOTALHD'] - swap
if ssh_pub_key:
res = api.linode_disk_createfromdistribution(
LinodeId=linode_id, DistributionID=distribution,
rootPass=password, rootSSHKey=ssh_pub_key,
Label='%s data disk (lid: %s)' % (name, linode_id), Size=size)
else:
res = api.linode_disk_createfromdistribution(
LinodeId=linode_id, DistributionID=distribution, rootPass=password,
Label='%s data disk (lid: %s)' % (name, linode_id), Size=size)
jobs.append(res['JobID'])
# Create SWAP disk
res = api.linode_disk_create(LinodeId=linode_id, Type='swap',
Label='%s swap disk (lid: %s)' % (name, linode_id),
Size=swap)
jobs.append(res['JobID'])
except Exception as e:
# TODO: destroy linode ?
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
if not configs:
for arg in ('name', 'linode_id', 'distribution'):
if not eval(arg):
module.fail_json(msg='%s is required for active state' % arg)
# Check architecture
for distrib in api.avail_distributions():
if distrib['DISTRIBUTIONID'] != distribution:
continue
arch = '32'
if distrib['IS64BIT']:
arch = '64'
break
# Get latest kernel matching arch
for kernel in api.avail_kernels():
if not kernel['LABEL'].startswith('Latest %s' % arch):
continue
kernel_id = kernel['KERNELID']
break
# Get disk list
disks_id = []
for disk in api.linode_disk_list(LinodeId=linode_id):
if disk['TYPE'] == 'ext3':
disks_id.insert(0, str(disk['DISKID']))
continue
disks_id.append(str(disk['DISKID']))
# Trick to get the 9 items in the list
while len(disks_id) < 9:
disks_id.append('')
disks_list = ','.join(disks_id)
# Create config
new_server = True
try:
api.linode_config_create(LinodeId=linode_id, KernelId=kernel_id,
Disklist=disks_list, Label='%s config' % name)
configs = api.linode_config_list(LinodeId=linode_id)
except Exception as e:
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
# Start / Ensure servers are running
for server in servers:
# Refresh server state
server = api.linode_list(LinodeId=server['LINODEID'])[0]
# Ensure existing servers are up and running, boot if necessary
if server['STATUS'] != 1:
res = api.linode_boot(LinodeId=linode_id)
jobs.append(res['JobID'])
changed = True
# wait here until the instances are up
wait_timeout = time.time() + wait_timeout
while wait and wait_timeout > time.time():
# refresh the server details
server = api.linode_list(LinodeId=server['LINODEID'])[0]
# status:
# -2: Boot failed
# 1: Running
if server['STATUS'] in (-2, 1):
break
time.sleep(5)
if wait and wait_timeout <= time.time():
# waiting took too long
module.fail_json(msg = 'Timeout waiting on %s (lid: %s)' %
(server['LABEL'], server['LINODEID']))
# Get a fresh copy of the server details
server = api.linode_list(LinodeId=server['LINODEID'])[0]
if server['STATUS'] == -2:
module.fail_json(msg = '%s (lid: %s) failed to boot' %
(server['LABEL'], server['LINODEID']))
# From now on we know the task is a success
# Build instance report
instance = getInstanceDetails(api, server)
# depending on wait flag select the status
if wait:
instance['status'] = 'Running'
else:
instance['status'] = 'Starting'
# Return the root password if this is a new box and no SSH key
# has been provided
if new_server and not ssh_pub_key:
instance['password'] = password
instances.append(instance)
elif state in ('stopped'):
for arg in ('name', 'linode_id'):
if not eval(arg):
module.fail_json(msg='%s is required for active state' % arg)
if not servers:
module.fail_json(msg = 'Server %s (lid: %s) not found' % (name, linode_id))
for server in servers:
instance = getInstanceDetails(api, server)
if server['STATUS'] != 2:
try:
res = api.linode_shutdown(LinodeId=linode_id)
except Exception as e:
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
instance['status'] = 'Stopping'
changed = True
else:
instance['status'] = 'Stopped'
instances.append(instance)
elif state in ('restarted'):
for arg in ('name', 'linode_id'):
if not eval(arg):
module.fail_json(msg='%s is required for active state' % arg)
if not servers:
module.fail_json(msg = 'Server %s (lid: %s) not found' % (name, linode_id))
for server in servers:
instance = getInstanceDetails(api, server)
try:
res = api.linode_reboot(LinodeId=server['LINODEID'])
except Exception as e:
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
instance['status'] = 'Restarting'
changed = True
instances.append(instance)
elif state in ('absent', 'deleted'):
for server in servers:
instance = getInstanceDetails(api, server)
try:
api.linode_delete(LinodeId=server['LINODEID'], skipChecks=True)
except Exception as e:
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
instance['status'] = 'Deleting'
changed = True
instances.append(instance)
# Ease parsing if only 1 instance
if len(instances) == 1:
module.exit_json(changed=changed, instance=instances[0])
module.exit_json(changed=changed, instances=instances)
def main():
module = AnsibleModule(
argument_spec = dict(
state = dict(default='present', choices=['active', 'present', 'started',
'deleted', 'absent', 'stopped',
'restarted']),
api_key = dict(),
name = dict(type='str'),
plan = dict(type='int'),
distribution = dict(type='int'),
datacenter = dict(type='int'),
linode_id = dict(type='int', aliases=['lid']),
payment_term = dict(type='int', default=1, choices=[1, 12, 24]),
password = dict(type='str'),
ssh_pub_key = dict(type='str'),
swap = dict(type='int', default=512),
wait = dict(type='bool', default=True),
wait_timeout = dict(default=300),
)
)
if not HAS_PYCURL:
module.fail_json(msg='pycurl required for this module')
if not HAS_LINODE:
module.fail_json(msg='linode-python required for this module')
state = module.params.get('state')
api_key = module.params.get('api_key')
name = module.params.get('name')
plan = module.params.get('plan')
distribution = module.params.get('distribution')
datacenter = module.params.get('datacenter')
linode_id = module.params.get('linode_id')
payment_term = module.params.get('payment_term')
password = module.params.get('password')
ssh_pub_key = module.params.get('ssh_pub_key')
swap = module.params.get('swap')
wait = module.params.get('wait')
wait_timeout = int(module.params.get('wait_timeout'))
# Setup the api_key
if not api_key:
try:
api_key = os.environ['LINODE_API_KEY']
except KeyError as e:
module.fail_json(msg = 'Unable to load %s' % e.message)
# setup the auth
try:
api = linode_api.Api(api_key)
api.test_echo()
except Exception as e:
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
linodeServers(module, api, state, name, plan, distribution, datacenter, linode_id,
payment_term, password, ssh_pub_key, swap, wait, wait_timeout)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
common-workflow-language/cwltool | refs/heads/main | tests/test_environment.py | 1 | """Test passing of environment variables to tools."""
import os
from abc import ABC, abstractmethod
from pathlib import Path
from typing import Any, Callable, Dict, List, Mapping, Union
import pytest
from cwltool.singularity import get_version
from .util import env_accepts_null, get_tool_env, needs_docker, needs_singularity
# None => accept anything, just require the key is present
# str => string equality
# Callable => call the function with the value - True => OK, False => fail
# TODO: maybe add regex?
Env = Mapping[str, str]
CheckerTypes = Union[None, str, Callable[[str, Env], bool]]
EnvChecks = Dict[str, CheckerTypes]
def assert_envvar_matches(check: CheckerTypes, k: str, env: Mapping[str, str]) -> None:
"""Assert that the check is satisfied by the key in the env."""
if check is None:
pass
else:
v = env[k]
if isinstance(check, str):
assert v == check, f'Environment variable {k} == "{v}" != "{check}"'
else:
assert check(v, env), f'Environment variable {k}="{v}" fails check'
def assert_env_matches(
checks: EnvChecks, env: Mapping[str, str], allow_unexpected: bool = False
) -> None:
"""Assert that all checks are satisfied by the Mapping.
Optional flag `allow_unexpected` (default = False) will allow the
Mapping to contain extra keys which are not checked.
"""
e = dict(env)
for k, check in checks.items():
v = e.pop(k)
assert_envvar_matches(check, k, env)
if not allow_unexpected:
# If we have to use env4.cwl, there may be unwanted variables
# (see cwltool.env_to_stdout docstrings).
# LC_CTYPE if platform has glibc
# __CF_USER_TEXT_ENCODING on macOS
if not env_accepts_null():
e.pop("LC_CTYPE", None)
e.pop("__CF_USER_TEXT_ENCODING", None)
assert len(e) == 0, f"Unexpected environment variable(s): {', '.join(e.keys())}"
class CheckHolder(ABC):
"""Base class for check factory functions and other data required to parametrize the tests below."""
@staticmethod
@abstractmethod
def checks(tmp_prefix: str) -> EnvChecks:
"""Return a mapping from environment variable names to how to check for correctness."""
pass
# Any flags to pass to cwltool to force use of the correct container
flags: List[str]
# Does the env tool (maybe in our container) accept a `-0` flag?
env_accepts_null: bool
pass
class NoContainer(CheckHolder):
"""No containers at all, just run in the host."""
@staticmethod
def checks(tmp_prefix: str) -> EnvChecks:
"""Create checks."""
return {
"TMPDIR": lambda v, _: v.startswith(tmp_prefix),
"HOME": lambda v, _: v.startswith(tmp_prefix),
"PATH": os.environ["PATH"],
}
flags = ["--no-container"]
env_accepts_null = env_accepts_null()
class Docker(CheckHolder):
"""Run in a Docker container."""
@staticmethod
def checks(tmp_prefix: str) -> EnvChecks:
"""Create checks."""
def HOME(v: str, env: Env) -> bool:
# Want /whatever
parts = os.path.split(v)
return len(parts) == 2 and parts[0] == "/"
return {
"HOME": HOME,
"TMPDIR": "/tmp",
"PATH": "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
"HOSTNAME": None,
}
flags = ["--default-container=debian"]
env_accepts_null = True
class Singularity(CheckHolder):
"""Run in a Singularity container."""
@staticmethod
def checks(tmp_prefix: str) -> EnvChecks:
"""Create checks."""
def PWD(v: str, env: Env) -> bool:
return v == env["HOME"]
ans: EnvChecks = {
"HOME": None,
"LANG": "C",
"LD_LIBRARY_PATH": None,
"PATH": None,
"PROMPT_COMMAND": None,
"PS1": None,
"PWD": PWD,
"TMPDIR": "/tmp",
}
# Singularity variables appear to be in flux somewhat.
version = get_version().split(".")
vmajor = int(version[0])
assert vmajor == 3, "Tests only work for Singularity 3"
vminor = int(version[1])
sing_vars: EnvChecks = {
"SINGULARITY_CONTAINER": None,
"SINGULARITY_ENVIRONMENT": None,
"SINGULARITY_NAME": None,
}
if vminor < 6:
sing_vars["SINGULARITY_INIT"] = "1"
else:
sing_vars["SINGULARITY_COMMAND"] = "exec"
if vminor >= 7:
def BIND(v: str, env: Env) -> bool:
return v.startswith(tmp_prefix) and v.endswith(":/tmp")
sing_vars["SINGULARITY_BIND"] = BIND
ans.update(sing_vars)
# Singularity automatically passes some variables through, if
# they exist. This seems to be constant from 3.1 but isn't
# documented (see source /internal/pkg/util/env/clean.go).
autopass = (
"ALL_PROXY",
"FTP_PROXY",
"HTTP_PROXY",
"HTTPS_PROXY",
"NO_PROXY",
"TERM",
)
for vname in autopass:
if vname in os.environ:
ans[vname] = os.environ[vname]
return ans
flags = ["--default-container=debian", "--singularity"]
env_accepts_null = True
# CRT = container runtime
CRT_PARAMS = pytest.mark.parametrize(
"crt_params",
[
NoContainer(),
pytest.param(Docker(), marks=needs_docker),
pytest.param(Singularity(), marks=needs_singularity),
],
)
@CRT_PARAMS
def test_basic(crt_params: CheckHolder, tmp_path: Path, monkeypatch: Any) -> None:
"""Test that basic env vars (only) show up."""
tmp_prefix = str(tmp_path / "canary")
extra_env = {
"USEDVAR": "VARVAL",
"UNUSEDVAR": "VARVAL",
}
args = crt_params.flags + [f"--tmpdir-prefix={tmp_prefix}"]
env = get_tool_env(
tmp_path,
args,
extra_env=extra_env,
monkeypatch=monkeypatch,
runtime_env_accepts_null=crt_params.env_accepts_null,
)
checks = crt_params.checks(tmp_prefix)
assert_env_matches(checks, env)
@CRT_PARAMS
def test_preserve_single(
crt_params: CheckHolder, tmp_path: Path, monkeypatch: Any
) -> None:
"""Test that preserving a single env var works."""
tmp_prefix = str(tmp_path / "canary")
extra_env = {
"USEDVAR": "VARVAL",
"UNUSEDVAR": "VARVAL",
}
args = crt_params.flags + [
f"--tmpdir-prefix={tmp_prefix}",
"--preserve-environment=USEDVAR",
]
env = get_tool_env(
tmp_path,
args,
extra_env=extra_env,
monkeypatch=monkeypatch,
runtime_env_accepts_null=crt_params.env_accepts_null,
)
checks = crt_params.checks(tmp_prefix)
checks["USEDVAR"] = extra_env["USEDVAR"]
assert_env_matches(checks, env)
@CRT_PARAMS
def test_preserve_all(
crt_params: CheckHolder, tmp_path: Path, monkeypatch: Any
) -> None:
"""Test that preserving all works."""
tmp_prefix = str(tmp_path / "canary")
extra_env = {
"USEDVAR": "VARVAL",
"UNUSEDVAR": "VARVAL",
}
args = crt_params.flags + [
f"--tmpdir-prefix={tmp_prefix}",
"--preserve-entire-environment",
]
env = get_tool_env(
tmp_path,
args,
extra_env=extra_env,
monkeypatch=monkeypatch,
runtime_env_accepts_null=crt_params.env_accepts_null,
)
checks = crt_params.checks(tmp_prefix)
checks.update(extra_env)
for vname, val in env.items():
try:
assert_envvar_matches(checks[vname], vname, env)
except KeyError:
assert val == os.environ[vname]
except AssertionError:
if vname == "HOME" or vname == "TMPDIR":
# These MUST be OK
raise
# Other variables can be overriden
assert val == os.environ[vname]
|
wlamond/scikit-learn | refs/heads/master | examples/feature_selection/plot_feature_selection_pipeline.py | 58 | """
==================
Pipeline Anova SVM
==================
Simple usage of Pipeline that runs successively a univariate
feature selection with anova and then a C-SVM of the selected features.
"""
from sklearn import svm
from sklearn.datasets import samples_generator
from sklearn.feature_selection import SelectKBest, f_regression
from sklearn.pipeline import make_pipeline
from sklearn.model_selection import train_test_split
from sklearn.metrics import classification_report
print(__doc__)
# import some data to play with
X, y = samples_generator.make_classification(
n_features=20, n_informative=3, n_redundant=0, n_classes=4,
n_clusters_per_class=2)
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
# ANOVA SVM-C
# 1) anova filter, take 3 best ranked features
anova_filter = SelectKBest(f_regression, k=3)
# 2) svm
clf = svm.SVC(kernel='linear')
anova_svm = make_pipeline(anova_filter, clf)
anova_svm.fit(X_train, y_train)
y_pred = anova_svm.predict(X_test)
print(classification_report(y_test, y_pred))
|
google/tf-quant-finance | refs/heads/master | tf_quant_finance/__init__.py | 1 | # Lint as: python3
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""TensorFlow Quantitative Finance."""
import sys
# We need to put some imports inside a function call below, and the function
# call needs to come before the *actual* imports that populate the
# tf_quant_finance namespace. Hence, we disable this lint check throughout
# the file.
#
# pylint: disable=g-import-not-at-top
# Update this whenever we need to depend on a newer TensorFlow release.
_REQUIRED_TENSORFLOW_VERSION = "2.3" # pylint: disable=g-statement-before-imports
# Ensure Python 3 is used.
def _check_py_version():
if sys.version_info[0] < 3:
raise Exception("Please use Python 3. Python 2 is not supported.")
# Ensure TensorFlow is importable and its version is sufficiently recent. This
# needs to happen before anything else, since the imports below will try to
# import tensorflow, too.
def _ensure_tf_install(): # pylint: disable=g-statement-before-imports
"""Attempt to import tensorflow, and ensure its version is sufficient.
Raises:
ImportError: if either tensorflow is not importable or its version is
inadequate.
"""
try:
import tensorflow.compat.v2 as tf
except ImportError:
# Print more informative error message, then reraise.
print("\n\nFailed to import TensorFlow. Please note that TensorFlow is not "
"installed by default when you install TF Quant Finance library. "
"This is so that users can decide whether to install the GPU-enabled "
"TensorFlow package. To use TF Quant Finance library, please install "
"the most recent version of TensorFlow, by following instructions at "
"https://tensorflow.org/install.\n\n")
raise
import distutils.version
if (distutils.version.LooseVersion(tf.__version__) <
distutils.version.LooseVersion(_REQUIRED_TENSORFLOW_VERSION)):
raise ImportError(
"This version of TF Quant Finance library requires TensorFlow "
"version >= {required}; Detected an installation of version {present}. "
"Please upgrade TensorFlow to proceed.".format(
required=_REQUIRED_TENSORFLOW_VERSION, present=tf.__version__))
_check_py_version()
_ensure_tf_install()
from tf_quant_finance import black_scholes
from tf_quant_finance import datetime
from tf_quant_finance import experimental
from tf_quant_finance import math
from tf_quant_finance import models
from tf_quant_finance import rates
from tf_quant_finance import types
from tf_quant_finance import utils
from tensorflow.python.util.all_util import remove_undocumented # pylint: disable=g-direct-tensorflow-import
_allowed_symbols = [
"black_scholes",
"datetime",
"experimental",
"math",
"models",
"rates",
"types",
"utils",
]
remove_undocumented(__name__, _allowed_symbols)
|
mastizada/kuma | refs/heads/master | vendor/packages/translate-toolkit/translate/storage/__init__.py | 6 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2002-2005 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with translate; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""translate.storage is part of the translate package
It contains classes that represent various storage formats for localization
@group Primary Localisation: xliff pypo cpo pocommon po poheader poparser base factory
@group Bilingual: ts2 ts oo lisa tmx tbx wordfast qph poxliff
@group Monolingual: dtd properties ini rc ical csvl10n html php txt subtitles symbian tiki skype
@group OpenDocument Format: xml_extract odf*
@group Binary: qm mo
@group Version Control: versioncontrol
@group Placeables: placeables
@group Other file processing: directory xpi zip statsdb statistics
@group Other: benchmark tmdb xml_name
"""
|
sdlBasic/sdlbrt | refs/heads/master | win32/mingw/opt/lib/python2.7/distutils/command/install_egg_info.py | 438 | """distutils.command.install_egg_info
Implements the Distutils 'install_egg_info' command, for installing
a package's PKG-INFO metadata."""
from distutils.cmd import Command
from distutils import log, dir_util
import os, sys, re
class install_egg_info(Command):
"""Install an .egg-info file for the package"""
description = "Install package's PKG-INFO metadata as an .egg-info file"
user_options = [
('install-dir=', 'd', "directory to install to"),
]
def initialize_options(self):
self.install_dir = None
def finalize_options(self):
self.set_undefined_options('install_lib',('install_dir','install_dir'))
basename = "%s-%s-py%s.egg-info" % (
to_filename(safe_name(self.distribution.get_name())),
to_filename(safe_version(self.distribution.get_version())),
sys.version[:3]
)
self.target = os.path.join(self.install_dir, basename)
self.outputs = [self.target]
def run(self):
target = self.target
if os.path.isdir(target) and not os.path.islink(target):
dir_util.remove_tree(target, dry_run=self.dry_run)
elif os.path.exists(target):
self.execute(os.unlink,(self.target,),"Removing "+target)
elif not os.path.isdir(self.install_dir):
self.execute(os.makedirs, (self.install_dir,),
"Creating "+self.install_dir)
log.info("Writing %s", target)
if not self.dry_run:
f = open(target, 'w')
self.distribution.metadata.write_pkg_file(f)
f.close()
def get_outputs(self):
return self.outputs
# The following routines are taken from setuptools' pkg_resources module and
# can be replaced by importing them from pkg_resources once it is included
# in the stdlib.
def safe_name(name):
"""Convert an arbitrary string to a standard distribution name
Any runs of non-alphanumeric/. characters are replaced with a single '-'.
"""
return re.sub('[^A-Za-z0-9.]+', '-', name)
def safe_version(version):
"""Convert an arbitrary string to a standard version string
Spaces become dots, and all other non-alphanumeric characters become
dashes, with runs of multiple dashes condensed to a single dash.
"""
version = version.replace(' ','.')
return re.sub('[^A-Za-z0-9.]+', '-', version)
def to_filename(name):
"""Convert a project or version name to its filename-escaped form
Any '-' characters are currently replaced with '_'.
"""
return name.replace('-','_')
|
argilo/nrsc-5 | refs/heads/master | hd_tx_hackrf.py | 1 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
##################################################
# GNU Radio Python Flow Graph
# Title: Hd Tx Hackrf
# Generated: Wed Aug 9 22:01:16 2017
##################################################
from gnuradio import analog
from gnuradio import blocks
from gnuradio import digital
from gnuradio import eng_notation
from gnuradio import fft
from gnuradio import filter
from gnuradio import gr
from gnuradio.eng_option import eng_option
from gnuradio.fft import window
from gnuradio.filter import firdes
from optparse import OptionParser
import math
import osmosdr
import time
class hd_tx_hackrf(gr.top_block):
def __init__(self):
gr.top_block.__init__(self, "Hd Tx Hackrf")
##################################################
# Variables
##################################################
self.samp_rate = samp_rate = 2000000
self.freq = freq = 95.7e6
##################################################
# Blocks
##################################################
self.rational_resampler_xxx_2 = filter.rational_resampler_ccc(
interpolation=256,
decimation=243,
taps=None,
fractional_bw=None,
)
self.rational_resampler_xxx_1 = filter.rational_resampler_ccc(
interpolation=125,
decimation=49,
taps=None,
fractional_bw=None,
)
self.rational_resampler_xxx_0 = filter.rational_resampler_ccc(
interpolation=samp_rate / 200000,
decimation=1,
taps=None,
fractional_bw=None,
)
self.osmosdr_sink_0 = osmosdr.sink( args="numchan=" + str(1) + " " + '' )
self.osmosdr_sink_0.set_sample_rate(samp_rate)
self.osmosdr_sink_0.set_center_freq(freq, 0)
self.osmosdr_sink_0.set_freq_corr(0, 0)
self.osmosdr_sink_0.set_gain(0, 0)
self.osmosdr_sink_0.set_if_gain(40, 0)
self.osmosdr_sink_0.set_bb_gain(0, 0)
self.osmosdr_sink_0.set_antenna('', 0)
self.osmosdr_sink_0.set_bandwidth(1.5e6, 0)
self.low_pass_filter_0 = filter.fir_filter_ccf(1, firdes.low_pass(
1, samp_rate, 80000, 20000, firdes.WIN_HAMMING, 6.76))
self.fft_vxx_0 = fft.fft_vcc(2048, False, (window.rectangular(2048)), True, 1)
self.digital_chunks_to_symbols_xx_0 = digital.chunks_to_symbols_bc((-1-1j, -1+1j, 1-1j, 1+1j, 0), 1)
self.blocks_vector_to_stream_0 = blocks.vector_to_stream(gr.sizeof_gr_complex*1, 2048)
self.blocks_vector_source_x_0 = blocks.vector_source_c([math.sin(math.pi / 2 * i / 112) for i in range(112)] + [1] * (2048-112) + [math.cos(math.pi / 2 * i / 112) for i in range(112)], True, 1, [])
self.blocks_stream_to_vector_0 = blocks.stream_to_vector(gr.sizeof_gr_complex*1, 2048)
self.blocks_repeat_0 = blocks.repeat(gr.sizeof_gr_complex*2048, 2)
self.blocks_multiply_xx_0 = blocks.multiply_vcc(1)
self.blocks_multiply_const_vxx_1 = blocks.multiply_const_vcc((0.1, ))
self.blocks_multiply_const_vxx_0 = blocks.multiply_const_vcc((0.001, ))
self.blocks_keep_m_in_n_0 = blocks.keep_m_in_n(gr.sizeof_gr_complex, 2160, 4096, 0)
self.blocks_file_source_0 = blocks.file_source(gr.sizeof_char*1, 'symbols.raw', False)
self.blocks_conjugate_cc_0 = blocks.conjugate_cc()
self.blocks_add_xx_0 = blocks.add_vcc(1)
self.analog_wfm_tx_0 = analog.wfm_tx(
audio_rate=50000,
quad_rate=200000,
tau=75e-6,
max_dev=75e3,
fh=-1.0,
)
self.analog_sig_source_x_0 = analog.sig_source_f(50000, analog.GR_COS_WAVE, 1000, 0.1, 0)
##################################################
# Connections
##################################################
self.connect((self.analog_sig_source_x_0, 0), (self.analog_wfm_tx_0, 0))
self.connect((self.analog_wfm_tx_0, 0), (self.rational_resampler_xxx_0, 0))
self.connect((self.blocks_add_xx_0, 0), (self.osmosdr_sink_0, 0))
self.connect((self.blocks_conjugate_cc_0, 0), (self.rational_resampler_xxx_1, 0))
self.connect((self.blocks_file_source_0, 0), (self.digital_chunks_to_symbols_xx_0, 0))
self.connect((self.blocks_keep_m_in_n_0, 0), (self.blocks_multiply_xx_0, 1))
self.connect((self.blocks_multiply_const_vxx_0, 0), (self.blocks_add_xx_0, 0))
self.connect((self.blocks_multiply_const_vxx_1, 0), (self.blocks_add_xx_0, 1))
self.connect((self.blocks_multiply_xx_0, 0), (self.blocks_conjugate_cc_0, 0))
self.connect((self.blocks_repeat_0, 0), (self.blocks_vector_to_stream_0, 0))
self.connect((self.blocks_stream_to_vector_0, 0), (self.fft_vxx_0, 0))
self.connect((self.blocks_vector_source_x_0, 0), (self.blocks_multiply_xx_0, 0))
self.connect((self.blocks_vector_to_stream_0, 0), (self.blocks_keep_m_in_n_0, 0))
self.connect((self.digital_chunks_to_symbols_xx_0, 0), (self.blocks_stream_to_vector_0, 0))
self.connect((self.fft_vxx_0, 0), (self.blocks_repeat_0, 0))
self.connect((self.low_pass_filter_0, 0), (self.blocks_multiply_const_vxx_1, 0))
self.connect((self.rational_resampler_xxx_0, 0), (self.low_pass_filter_0, 0))
self.connect((self.rational_resampler_xxx_1, 0), (self.rational_resampler_xxx_2, 0))
self.connect((self.rational_resampler_xxx_2, 0), (self.blocks_multiply_const_vxx_0, 0))
def get_samp_rate(self):
return self.samp_rate
def set_samp_rate(self, samp_rate):
self.samp_rate = samp_rate
self.osmosdr_sink_0.set_sample_rate(self.samp_rate)
self.low_pass_filter_0.set_taps(firdes.low_pass(1, self.samp_rate, 80000, 20000, firdes.WIN_HAMMING, 6.76))
def get_freq(self):
return self.freq
def set_freq(self, freq):
self.freq = freq
self.osmosdr_sink_0.set_center_freq(self.freq, 0)
def main(top_block_cls=hd_tx_hackrf, options=None):
tb = top_block_cls()
tb.start()
try:
raw_input('Press Enter to quit: ')
except EOFError:
pass
tb.stop()
tb.wait()
if __name__ == '__main__':
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.