repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
---|---|---|---|---|
TangxingZhou/linux | refs/heads/master | scripts/tracing/draw_functrace.py | 14679 | #!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <fweisbec@gmail.com>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
|
valerauko/kcbridge | refs/heads/master | kconfig.py | 1 | #!/usr/bin/env python
#coding=utf-8
theLimit = 45 ## time limit to mark returning fleets / completed repairs
theServer = '125.6.189.135' ## default brunei
'''
### These are just minor stuff for/about the code. No need to touch them.
'''
theName = u'母港執務室'
theSoft = 'KC Bridge' |
valtech-mooc/edx-platform | refs/heads/master | common/test/acceptance/performance/test_studio_performance.py | 139 | """
Single page performance tests for Studio.
"""
from bok_choy.web_app_test import WebAppTest, with_cache
from ..pages.studio.auto_auth import AutoAuthPage
from ..pages.studio.overview import CourseOutlinePage
from nose.plugins.attrib import attr
@attr(har_mode='explicit')
class StudioPagePerformanceTest(WebAppTest):
"""
Base class to capture studio performance with HTTP Archives.
To import courses for the bok choy tests, pass the --imports_dir=<course directory> argument to the paver command
where <course directory> contains the (un-archived) courses to be imported.
"""
course_org = 'edX'
course_num = 'Open_DemoX'
course_run = 'edx_demo_course'
def setUp(self):
"""
Authenticate as staff so we can view and edit courses.
"""
super(StudioPagePerformanceTest, self).setUp()
AutoAuthPage(self.browser, staff=True).visit()
def record_visit_outline(self):
"""
Produce a HAR for loading the course outline page.
"""
course_outline_page = CourseOutlinePage(self.browser, self.course_org, self.course_num, self.course_run)
har_name = 'OutlinePage_{org}_{course}'.format(
org=self.course_org,
course=self.course_num
)
self.har_capturer.add_page(self.browser, har_name)
course_outline_page.visit()
self.har_capturer.save_har(self.browser, har_name)
def record_visit_unit(self, section_title, subsection_title, unit_title):
"""
Produce a HAR for loading a unit page.
"""
course_outline_page = CourseOutlinePage(self.browser, self.course_org, self.course_num, self.course_run).visit()
course_outline_unit = course_outline_page.section(section_title).subsection(subsection_title).expand_subsection().unit(unit_title)
har_name = 'UnitPage_{org}_{course}'.format(
org=self.course_org,
course=self.course_num
)
self.har_capturer.add_page(self.browser, har_name)
course_outline_unit.go_to()
self.har_capturer.save_har(self.browser, har_name)
class StudioJusticePerformanceTest(StudioPagePerformanceTest):
"""
Test performance on the HarvardX Justice course.
"""
course_org = 'HarvardX'
course_num = 'ER22x'
course_run = '2013_Spring'
@with_cache
def test_visit_outline(self):
"""Record visiting the Justice course outline page"""
self.record_visit_outline()
@with_cache
def test_visit_unit(self):
"""Record visiting a Justice unit page"""
self.record_visit_unit(
'Lecture 1 - Doing the Right Thing',
'Discussion Prompt: Ethics of Torture',
'Discussion Prompt: Ethics of Torture'
)
class StudioPub101PerformanceTest(StudioPagePerformanceTest):
"""
Test performance on Andy's PUB101 outline page.
"""
course_org = 'AndyA'
course_num = 'PUB101'
course_run = 'PUB101'
@with_cache
def test_visit_outline(self):
"""Record visiting the PUB101 course outline page"""
self.record_visit_outline()
@with_cache
def test_visit_unit(self):
"""Record visiting the PUB101 unit page"""
self.record_visit_unit('Released', 'Released', 'Released')
|
PatidarWeb/poedit | refs/heads/master | deps/boost/tools/build/test/chain.py | 44 | #!/usr/bin/python
# Copyright 2003 Dave Abrahams
# Copyright 2002, 2003 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# This tests that :
# 1) the 'make' correctly assigns types to produced targets
# 2) if 'make' creates targets of type CPP, they are correctly used.
import BoostBuild
t = BoostBuild.Tester(use_test_config=False)
# In order to correctly link this app, 'b.cpp', created by a 'make' rule, should
# be compiled.
t.write("jamroot.jam", "import gcc ;")
t.write("jamfile.jam", r'''
import os ;
if [ os.name ] = NT
{
actions create
{
echo int main() {} > $(<)
}
}
else
{
actions create
{
echo "int main() {}" > $(<)
}
}
IMPORT $(__name__) : create : : create ;
exe a : l dummy.cpp ;
# Needs to be a static lib for Windows - main() cannot appear in DLL.
static-lib l : a.cpp b.cpp ;
make b.cpp : : create ;
''')
t.write("a.cpp", "")
t.write("dummy.cpp", "// msvc needs at least one object file\n")
t.run_build_system()
t.expect_addition("bin/$toolset/debug/a.exe")
t.cleanup()
|
tximikel/kuma | refs/heads/master | kuma/wiki/kumascript.py | 10 | import base64
from collections import defaultdict
import json
import hashlib
import time
from urlparse import urljoin
from django.conf import settings
from django.contrib.sites.models import Site
from constance import config
import requests
from kuma.core.cache import memcache
from .constants import KUMASCRIPT_TIMEOUT_ERROR, TEMPLATE_TITLE_PREFIX
def should_use_rendered(doc, params, html=None):
"""
* The service isn't disabled with a timeout of 0
* The document isn't empty
* The request has *not* asked for raw source
(eg. ?raw)
* The request has *not* asked for no macro evaluation
(eg. ?nomacros)
* The request *has* asked for macro evaluation
(eg. ?raw¯os)
"""
show_raw = params.get('raw', False) is not False
no_macros = params.get('nomacros', False) is not False
force_macros = params.get('macros', False) is not False
is_template = False
if doc:
is_template = doc.is_template
html = doc.html
return (config.KUMASCRIPT_TIMEOUT > 0 and
html and
not is_template and
(force_macros or (not no_macros and not show_raw)))
def post(request, content, locale=settings.LANGUAGE_CODE,
use_constance_bleach_whitelists=False):
url = settings.KUMASCRIPT_URL_TEMPLATE.format(path='')
headers = {
'X-FireLogger': '1.2',
}
env_vars = {
'url': request.build_absolute_uri('/'),
'locale': locale,
}
add_env_headers(headers, env_vars)
response = requests.post(url,
timeout=config.KUMASCRIPT_TIMEOUT,
data=content.encode('utf8'),
headers=headers)
if response:
body = process_body(response, use_constance_bleach_whitelists)
errors = process_errors(response)
return body, errors
else:
errors = KUMASCRIPT_TIMEOUT_ERROR
return content, errors
def _get_attachment_metadata_dict(attachment):
filesize = 0
try:
filesize = attachment.current_revision.file.size
except OSError:
pass
return {
'title': attachment.title,
'description': attachment.current_revision.description,
'filename': attachment.current_revision.filename(),
'size': filesize,
'author': attachment.current_revision.creator.username,
'mime': attachment.current_revision.mime_type,
'url': attachment.get_file_url(),
}
def _format_slug_for_request(slug):
"""Formats a document slug which will play nice with kumascript caching"""
# http://bugzil.la/1063580
index = slug.find(TEMPLATE_TITLE_PREFIX)
if index != -1:
slug = '%s%s' % (TEMPLATE_TITLE_PREFIX, slug[(index + len(TEMPLATE_TITLE_PREFIX)):].lower())
return slug
def get(document, cache_control, base_url, timeout=None):
"""Perform a kumascript GET request for a document locale and slug."""
if not cache_control:
# Default to the configured max-age for cache control.
max_age = config.KUMASCRIPT_MAX_AGE
cache_control = 'max-age=%s' % max_age
if not base_url:
site = Site.objects.get_current()
base_url = 'http://%s' % site.domain
if not timeout:
timeout = config.KUMASCRIPT_TIMEOUT
document_locale = document.locale
document_slug = document.slug
max_age = config.KUMASCRIPT_MAX_AGE
# 1063580 - Kumascript converts template name calls to lower case and bases
# caching keys off of that.
document_slug_for_kumascript = document_slug
if document.is_template:
document_slug_for_kumascript = _format_slug_for_request(document_slug)
body, errors = None, None
try:
url_tmpl = settings.KUMASCRIPT_URL_TEMPLATE
url = unicode(url_tmpl).format(path=u'%s/%s' %
(document_locale,
document_slug_for_kumascript))
cache_keys = build_cache_keys(document_slug, document_locale)
etag_key, modified_key, body_key, errors_key = cache_keys
headers = {
'X-FireLogger': '1.2',
'Cache-Control': cache_control,
}
# Create the file interface
files = []
for attachment in document.attachments.all():
files.append(_get_attachment_metadata_dict(attachment))
# Assemble some KumaScript env vars
# TODO: See dekiscript vars for future inspiration
# http://developer.mindtouch.com/en/docs/DekiScript/Reference/
# Wiki_Functions_and_Variables
path = document.get_absolute_url()
# TODO: Someday merge with _get_document_for_json in views.py
# where most of this is duplicated code.
env_vars = dict(
path=path,
url=urljoin(base_url, path),
id=document.pk,
revision_id=document.current_revision.pk,
locale=document.locale,
title=document.title,
files=files,
attachments=files, # Just for sake of verbiage?
slug=document.slug,
tags=list(document.tags.values_list('name', flat=True)),
review_tags=list(document.current_revision
.review_tags
.values_list('name', flat=True)),
modified=time.mktime(document.modified.timetuple()),
cache_control=cache_control,
)
add_env_headers(headers, env_vars)
# Set up for conditional GET, if we have the details cached.
cached_meta = memcache.get_many([etag_key, modified_key])
if etag_key in cached_meta:
headers['If-None-Match'] = cached_meta[etag_key]
if modified_key in cached_meta:
headers['If-Modified-Since'] = cached_meta[modified_key]
# Finally, fire off the request.
response = requests.get(url, headers=headers, timeout=timeout)
if response.status_code == 304:
# Conditional GET was a pass, so use the cached content.
result = memcache.get_many([body_key, errors_key])
body = result.get(body_key, '').decode('utf-8')
errors = result.get(errors_key, None)
elif response.status_code == 200:
body = process_body(response)
errors = process_errors(response)
# Cache the request for conditional GET, but use the max_age for
# the cache timeout here too.
headers = response.headers
memcache.set(etag_key, headers.get('etag'), timeout=max_age)
memcache.set(modified_key, headers.get('last-modified'), timeout=max_age)
memcache.set(body_key, body.encode('utf-8'), timeout=max_age)
if errors:
memcache.set(errors_key, errors, timeout=max_age)
elif response.status_code is None:
errors = KUMASCRIPT_TIMEOUT_ERROR
else:
errors = [
{
"level": "error",
"message": "Unexpected response from Kumascript service: %s" %
response.status_code,
"args": ["UnknownError"],
},
]
except Exception, exc:
# Last resort: Something went really haywire. Kumascript server died
# mid-request, or something. Try to report at least some hint.
errors = [
{
"level": "error",
"message": "Kumascript service failed unexpectedly: %s" % exc,
"args": ["UnknownError"],
},
]
return (body, errors)
def add_env_headers(headers, env_vars):
"""Encode env_vars as kumascript headers, as base64 JSON-encoded values."""
headers.update(dict(
('x-kumascript-env-%s' % k, base64.b64encode(json.dumps(v)))
for k, v in env_vars.items()
))
return headers
def process_body(response, use_constance_bleach_whitelists=False):
# We defer bleach sanitation of kumascript content all the way
# through editing, source display, and raw output. But, we still
# want sanitation, so it finally gets picked up here.
from kuma.wiki.models import Document
return Document.objects.clean_content(response.text,
use_constance_bleach_whitelists)
def process_errors(response):
"""
Attempt to decode any FireLogger-style error messages in the response
from kumascript.
"""
errors = []
try:
# Extract all the log packets from headers.
packets = defaultdict(dict)
for key, value in response.headers.items():
if not key.lower().startswith('firelogger-'):
continue
prefix, id_, seq = key.split('-', 3)
packets[id_][seq] = value
# The FireLogger spec allows for multiple "packets". But,
# kumascript only ever sends the one, so flatten all messages.
msgs = []
for contents in packets.values():
keys = sorted(contents.keys(), key=int)
encoded = '\n'.join(contents[key] for key in keys)
decoded_json = base64.decodestring(encoded)
packet = json.loads(decoded_json)
msgs.extend(packet['logs'])
if len(msgs):
errors = msgs
except Exception, exc:
errors = [
{
"level": "error",
"message": "Problem parsing errors: %s" % exc,
"args": ["ParsingError"],
},
]
return errors
def build_cache_keys(document_locale, document_slug):
"""Build the cache keys used for Kumascript"""
path_hash = hashlib.md5((u'%s/%s' % (document_locale, document_slug))
.encode('utf8'))
base_key = 'kumascript:%s:%%s' % path_hash.hexdigest()
etag_key = base_key % 'etag'
modified_key = base_key % 'modified'
body_key = base_key % 'body'
errors_key = base_key % 'errors'
return (etag_key, modified_key, body_key, errors_key)
|
piagarwal11/GDriveLinuxClient | refs/heads/master | src/watchdog-0.8.2/setup.py | 7 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <yesudeep@gmail.com>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import imp
import os.path
from setuptools import setup, find_packages
from setuptools.extension import Extension
from setuptools.command.build_ext import build_ext
from setuptools.command.test import test as TestCommand
from distutils.util import get_platform
SRC_DIR = 'src'
WATCHDOG_PKG_DIR = os.path.join(SRC_DIR, 'watchdog')
version = imp.load_source('version', os.path.join(WATCHDOG_PKG_DIR, 'version.py'))
ext_modules = []
if get_platform().startswith('macosx'):
ext_modules = [
Extension(
name='_watchdog_fsevents',
sources=[
'src/watchdog_fsevents.c',
],
libraries=['m'],
define_macros=[
('WATCHDOG_VERSION_STRING',
'"' + version.VERSION_STRING + '"'),
('WATCHDOG_VERSION_MAJOR', version.VERSION_MAJOR),
('WATCHDOG_VERSION_MINOR', version.VERSION_MINOR),
('WATCHDOG_VERSION_BUILD', version.VERSION_BUILD),
],
extra_link_args=[
'-framework', 'CoreFoundation',
'-framework', 'CoreServices',
],
extra_compile_args=[
'-std=c99',
'-pedantic',
'-Wall',
'-Wextra',
'-fPIC',
# required w/Xcode 5.1+ and above because of '-mno-fused-madd'
'-Wno-error=unused-command-line-argument-hard-error-in-future'
]
),
]
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = [
'--cov=' + SRC_DIR,
'--cov-report=term-missing',
'tests']
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
tests_require=['pytest', 'pytest-cov', 'pytest-timeout >=0.3']
if sys.version_info < (2, 7, 0):
tests_require.append('unittest2')
install_requires = ['PyYAML >=3.10', 'argh >=0.24.1', 'pathtools >=0.1.1']
if sys.version_info < (2, 7, 0):
# argparse is merged into Python 2.7 in the Python 2x series
# and Python 3.2 in the Python 3x series.
install_requires.append('argparse >=1.1')
if any([key in sys.platform for key in ['bsd', 'darwin']]):
# Python 2.6 and below have the broken/non-existent kqueue implementations
# in the select module. This backported patch adds one from Python 2.7,
# which works.
install_requires.append('select_backport >=0.2')
with open('README.rst') as f:
readme = f.read()
with open('changelog.rst') as f:
changelog = f.read()
setup(name="watchdog",
version=version.VERSION_STRING,
description="Filesystem events monitoring",
long_description=readme + '\n\n' + changelog,
author="Yesudeep Mangalapilly",
author_email="yesudeep@gmail.com",
license="Apache License 2.0",
url="http://github.com/gorakhargosh/watchdog",
keywords=' '.join([
'python',
'filesystem',
'monitoring',
'monitor',
'FSEvents',
'kqueue',
'inotify',
'ReadDirectoryChangesW',
'polling',
'DirectorySnapshot',
]),
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX :: BSD',
'Operating System :: Microsoft :: Windows :: Windows NT/2000',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: C',
'Topic :: Software Development :: Libraries',
'Topic :: System :: Monitoring',
'Topic :: System :: Filesystems',
'Topic :: Utilities',
],
package_dir={'': SRC_DIR},
packages=find_packages(SRC_DIR),
include_package_data=True,
install_requires=install_requires,
tests_require=tests_require,
cmdclass={
'build_ext': build_ext,
'test': PyTest,
},
ext_modules=ext_modules,
entry_points={'console_scripts': [
'watchmedo = watchdog.watchmedo:main',
]},
zip_safe=False
)
|
kobejean/tensorflow | refs/heads/master | tensorflow/python/util/serialization_test.py | 32 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for serialization functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.keras.engine import input_layer
from tensorflow.python.keras.engine import sequential
from tensorflow.python.keras.engine import training
from tensorflow.python.keras.layers import core
from tensorflow.python.platform import test
from tensorflow.python.util import serialization
class SerializationTests(test.TestCase):
def test_serialize_dense(self):
dense = core.Dense(3)
dense(constant_op.constant([[4.]]))
round_trip = json.loads(json.dumps(
dense, default=serialization.get_json_type))
self.assertEqual(3, round_trip["config"]["units"])
def test_serialize_shape(self):
round_trip = json.loads(json.dumps(
tensor_shape.TensorShape([None, 2, 3]),
default=serialization.get_json_type))
self.assertIs(round_trip[0], None)
self.assertEqual(round_trip[1], 2)
@test_util.run_in_graph_and_eager_modes
def test_serialize_sequential(self):
model = sequential.Sequential()
model.add(core.Dense(4))
model.add(core.Dense(5))
model(constant_op.constant([[1.]]))
sequential_round_trip = json.loads(
json.dumps(model, default=serialization.get_json_type))
self.assertEqual(
5, sequential_round_trip["config"]["layers"][1]["config"]["units"])
@test_util.run_in_graph_and_eager_modes
def test_serialize_model(self):
x = input_layer.Input(shape=[3])
y = core.Dense(10)(x)
model = training.Model(x, y)
model(constant_op.constant([[1., 1., 1.]]))
model_round_trip = json.loads(
json.dumps(model, default=serialization.get_json_type))
self.assertEqual(
10, model_round_trip["config"]["layers"][1]["config"]["units"])
if __name__ == "__main__":
test.main()
|
kaustubh-kabra/modified-xen | refs/heads/master | tools/xm-test/tests/memset/01_memset_basic_pos.py | 38 | #!/usr/bin/python
# Copyright (C) International Business Machines Corp., 2005
# Author: Woody Marvel <marvel@us.ibm.com>
##
## Description:
## Tests that verify mem-set output and return code
## 1) Test for xm mem-set
## create domain,
## verify domain and ls output,
## mem-set in dom0,
## verify with xm list memory change external,
## verify with xm list memory change internal,
##
## Author: Woody Marvel marvel@us.ibm.com
##
import sys
import re
import time
from XmTestLib import *
if ENABLE_HVM_SUPPORT:
SKIP("Mem-set not supported for HVM domains")
# Create a domain (default XmTestDomain, with our ramdisk)
domain = XmTestDomain()
# Start it
try:
console = domain.start()
except DomainError, e:
if verbose:
print "Failed to create test domain because:"
print e.extra
FAIL(str(e))
try:
# Make sure it's up an running before we continue
console.runCmd("ls")
except ConsoleError, e:
FAIL(str(e))
xen_mem = XenMemory(console)
origmem = xen_mem.get_mem_from_domU()
newmem = origmem - 1
# set mem-set for less than default
cmd = "xm mem-set %s %i" % (domain.getName(), newmem)
status, output = traceCommand(cmd)
if status != 0:
if verbose:
print "mem-set failed:"
print output
FAIL("cmd %s returned invalid %i != 0" % (cmd, status))
for i in [1,2,3,4,5,6,7,8,9,10]:
mem = getDomMem(domain.getName())
if mem == newmem:
break
time.sleep(1)
# verify memory set externally
mem = getDomMem(domain.getName())
if not mem:
FAIL("Failed to get memory amount for domain %s" % domain.getName())
elif mem != newmem:
FAIL("Dom0 failed to verify %i MB; got %i MB" % newmem,mem)
# verify memory set internally
domUmem = xen_mem.get_mem_from_domU()
if domUmem != newmem:
FAIL("DomU reported incorrect memory amount: %i MB" % (domUmem))
# quiesce everything
# Close the console
domain.closeConsole()
# Stop the domain (nice shutdown)
domain.stop()
|
benrudolph/commcare-hq | refs/heads/master | corehq/apps/importer/models.py | 3 | # This file is only here so that django will recognize that
# this is a valid app and run the associated unit tests.
from couchdbkit.ext.django.schema import Document
class _(Document): pass |
blbrock/WildPlanner10-v2a | refs/heads/master | Scripts/calculate_density_tool.py | 3 | # ------------------------------------------------------------------------------
#
# Copyright 2011, 2012, 2013 Brent L. Brock and the Craighead Institute
#
# This file is part of Wild Planner.
#
# Wild Planner is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Wild Planner is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Wild Planner in the file named LICENSE.TXT. If not, see <http://www.gnu.org/licenses/>.
#
# ------------------------------------------------------------------------------
#
# calculate_density.py (version 1.0 beta)
# Created on: Mon Nov 24 2008 04:08:09 PM
#
# Written by Brent L. Brock, Landscape Ecologist, Craighead Environmental Research Institute
#
# Calculates approximate housing density that will satisfy specified wildlife conservation targets assuming a random distribution of
# houses on the landscape
#
#This module depends on functionlib.py which contains functions required for processing this script
#
# Usage: calculate_density <outWorkspace> <outTable> <Number_of_Iterations_to_Run> <aExtent> <minCoreSize> <distDistance> <numHouses>
# ---------------------------------------------------------------------------
# Import system modules
import sys, os, arcgisscripting
import functionlib as fl
# Create the Geoprocessor object
gp = arcgisscripting.create()
##gp.Workspace = None
# Set the necessary product code
gp.SetProduct("ArcInfo")
# Load required toolboxes...
gp.overwriteoutput = 1
# Script arguments...
table = gp.GetParameterAsText(0)
table = str(table).rsplit("\\",1)
outWorkspace = table[0]
outTable = table[1]
i = gp.GetParameterAsText(1) # Number of iterations to run
aExtent = gp.GetParameterAsText(2)
## FIX: check for polygon topology
constraintLayer = gp.GetParameterAsText(3)
minCoreSize = gp.GetParameterAsText(4)
distDistance = gp.GetParameterAsText(5)
target = gp.GetParameterAsText(6)
outputUnits = gp.GetParameterAsText(11)
##i = "3" # Number of iterations to run
##aExtent = r"w:\gis\inputs\boundary.shp"
#### FIX: check for polygon topology
##constraintLayer = r"w:\gis\inputs\constraint.shp"
##minCoreSize = "3000000" #units in square meters
##distDistance = "500" #units in meters
##target = "50" # initial number of houses to run
# Create temporary workspaces
tWorkspace = fl.CreateTempWorkspace(outWorkspace)
sWorkspace = fl.CreateTempWorkspace(outWorkspace)
gp.ScratchWorkspace = tWorkspace
# Local variables...
area = minCoreSize.split(" ")
minCoreSize = str(fl.ConvertAreaToMeters(area[0], area[1]))
if outputUnits == "" or outputUnits == "#":
outputUnits = area[1]
gp.AddMessage("Minimum Patch Size: " + str(minCoreSize) + " square meters...")
del area
distance = distDistance.split(" ")
distDistance = str(fl.ConvertDistanceToMeters(distance[0], distance[1]))
gp.AddMessage("Disturbance Distance: " + str(distDistance) + " meters...")
del distance
cExtent = tWorkspace+"\\aExtent_copy.shp" #Temporary copy of analysis extent for manipulation
nTable = outWorkspace+"\\"+outTable #Final output table
i = int(i)
endSim = False
target = int(target)
low = target - (target * 0.05)
high = target + (target * 0.05)
currentDirectory = sys.path[0]
# If constraint layer is specified, make copy and clip to aExtent
if constraintLayer == "#" or constraintLayer == "":
constraintLayer = aExtent
else:
gp.MakeFeatureLayer_management(constraintLayer, "tempLayer", "", "", "Input_FID Input_FID VISIBLE NONE")
gp.Clip_analysis("tempLayer", aExtent, sWorkspace + "\\clip_constraint.shp", "")
constraintLayer = sWorkspace + "\\clip_constraint.shp"
##Calculate Total Area and store in variable 'tArea'##
gp.AddMessage("Caculating area of analysis extent...")
# Make copy of analysis extent layer to modify
try:
gp.Copy_management(aExtent, cExtent, "ShapeFile")
except:
a = gp.MakeFeatureLayer_management(aExtent, "aLayer", "", "", "Input_FID Input_FID VISIBLE NONE")
gp.CopyFeatures(a, cExtent)
# Add 'Area' Field and calculate Area
fl.CalcArea(cExtent)
fl.CalcArea(constraintLayer)
# Calculate total area of analysis extent
tArea = fl.CalcTotalArea(gp.ScratchWorkspace, cExtent)
gp.AddMessage("Total Analysis Area = " + str(tArea))
cArea = fl.CalcTotalArea(gp.ScratchWorkspace, constraintLayer)
gp.AddMessage("Total Buildable Area = " + str(cArea))
prcntBuildable = int(cArea/tArea)
## End Calculate Total Area ##
MAX = int(tArea/float(minCoreSize)) #maximum number of houses for simulation
MIN = 1 #minimum number of houses for simulation
# If number of houses in simulation will be very large, give user a chance to abort
if MAX > 50000:
from Tkinter import *
import tkMessageBox
root = Tk()
root.withdraw()
if tkMessageBox.askokcancel(
"Housing Density",
"The parameters entered result in a maximum number of houses of " + str(MAX) + " and may take a long time to process. Click 'Cancel' to quit or 'OK' to continue."):
pass
else:
gp.AddMessage("Simulation aborted by user. Cleaning up temporary files...")
fl.CleanFiles(sWorkspace)
fl.CleanFiles(tWorkspace)
gp.Delete_management(tWorkspace,"")
gp.Delete_management(sWorkspace,"")
sys.exit()
## Create New Table to store final results if it doesn't exist... ##
#If the nTable does not exist, create it.
gp.AddMessage("Creating output table " + nTable + "...")
if not gp.Exists(nTable):
fl.MakeTable(outWorkspace, outTable)
## # Process: Create Table...
## gp.CreateTable_management(outWorkspace, outTable, "", "")
## # Process: Add Fields...
## gp.AddField_management(nTable, "SUM_SUM_AR", "DOUBLE", "", "", "", "", "NON_NULLABLE", "NON_REQUIRED", "")
## gp.AddField_management(nTable, "MAX_SUM_AR", "DOUBLE", "", "", "", "", "NON_NULLABLE", "NON_REQUIRED", "")
## gp.AddField_management(nTable, "MIN_SUM_AR", "DOUBLE", "", "", "", "", "NON_NULLABLE", "NON_REQUIRED", "")
## gp.AddField_management(nTable, "HOUSES", "LONG", "", "", "", "", "NON_NULLABLE", "NON_REQUIRED", "")
## gp.AddField_management(nTable, "PRCNT_AREA", "DOUBLE", "18", "4", "", "", "NON_NULLABLE", "NON_REQUIRED", "")
# Calculate initial number of houses for random pattern simulations
gp.AddMessage("Calculating number of houses for first simulation run...")
numHouses = MAX / 2 # Set initial seed for number of houses
if numHouses == 0:
numHouses = 1
numHouses = str(numHouses)
### Check if target is attainable with given parameters
##if prcntBuildable < target:
## gp.AddWarning("Buildable area is only " + str(prcntBuildable) + "% and is less than the target of " + str(target))
## gp.AddWarning("Adjust target minimum possible...")
## target = prcntBuildable
## numHouses = (int(tArea/int(minCoreSize)))
# Get result of monte carlo simulation
gp.AddMessage("Begin simulation 1...")
result = fl.RunSimulation(i,gp.ScratchWorkspace, aExtent, constraintLayer, numHouses, minCoreSize, distDistance, nTable, tArea, "", "", True)
# Test whether result does not equal target +/- 5%
# If result is outside target range, use "principle of halves" to adjust number of houses
# until a solution is reached
count = 1
lastSim = 0
while result < low or result >= high and not endSim:
if result >= high:
## if count > 20:
## gp.AddWarning("Could not reach a solution after 10 simulation runs. Setting number of houses to MAX for final simulation...")
## numHouses = MAX
### result = fl.RunSimulation(i,gp.ScratchWorkspace, aExtent, constraintLayer, numHouses, minCoreSize, distDistance, nTable, tArea, "", "", True)
## endSim = True
## break
##
## else:
## MIN = int(numHouses)
## numHouses = ((MAX - MIN)/2)+ MIN
## gp.AddMessage("Begin simulation " + str(count + 1) + "...")
## result = fl.RunSimulation(i,gp.ScratchWorkspace, aExtent, constraintLayer, numHouses, minCoreSize, distDistance, nTable, tArea, "", "", True)
## count = count + 1
MIN = int(numHouses)
numHouses = ((MAX - MIN)/2)+ MIN
gp.AddMessage("Begin simulation " + str(count + 1) + "...")
result = fl.RunSimulation(i,gp.ScratchWorkspace, aExtent, constraintLayer, numHouses, minCoreSize, distDistance, nTable, tArea, "", "", True)
count = count + 1
#Store descriptors for output if target cannot be reached
minmax = "maximum"
obtain = "exceeded"
# Double the maximum number of houses if numHouses is within 5% of MAX and result is > than high
if abs(MAX - numHouses) < MAX * 0.05:
MAX = MAX * 2
if result < low:
MAX = int(numHouses)
numHouses = MIN + ((MAX - MIN)/2)
gp.AddMessage("Begin simulation " + str(count + 1) + "...")
result = fl.RunSimulation(i,gp.ScratchWorkspace, aExtent, constraintLayer, numHouses, minCoreSize, distDistance, nTable, tArea, "", "", True)
count = count + 1
minmax = "minimum"
obtain = "not obtainable"
if numHouses < 2:
endSim = True
break
#Store descriptors for output if target cannot be reached
# if changing numHouses does not change result more than 1%, end simulation and report results.
if abs(result - lastSim) < 1:
endSim = True
break
lastSim = result
# Calculate density from the solution
gp.AddMessage("Calculating density...")
density = float(tArea)/int(numHouses)
density = int(round(fl.ConvertSquareMetersToOther(density, outputUnits)))
cDensity = float(cArea)/int(numHouses)
cDensity = int(round(fl.ConvertSquareMetersToOther(cDensity, outputUnits)))
gp.SetParameterAsText(7, density)
gp.SetParameterAsText(8, cDensity)
# Get cores and houses of last iteration result
gp.AddMessage("Copying shapefiles from last iteration to: " + outWorkspace)
rootName = str(outTable).split(".")
rootName = rootName[0]
try:
outShape1 = gp.Copy_management(tWorkspace + "\\random_temp.shp", outWorkspace + "\\" + rootName + "_example_points.shp", "ShapeFile")
outShape2 = gp.Copy_Management(tWorkspace + "\\xxcoresLayer.shp", outWorkspace + "\\" + rootName + "_example_cores.shp", "Shapefile")
gp.SetParameterAsText(9, outShape1)
gp.SetParameterAsText(10, outShape2)
params = gp.GetParameterInfo()
## FIX: symbology is not displaying in ArcMap
# Set the symbology of the output.
# output = the output value
# params[2] = the output parameter
#
gp.AddMessage("The current python script directory is: " + currentDirectory)
params[8].symbology = currentDirectory + "\\houses.lyr"
params[9].symbology = currentDirectory + "\\cores.lyr"
except:
gp.AddMessage("Could not copy final outputs to " + outWorkspace)
# Clean up temporary workspace
fl.CleanFiles(sWorkspace)
fl.CleanFiles(tWorkspace)
gp.Delete_management(tWorkspace,"")
gp.Delete_management(sWorkspace,"")
if not endSim:
# Display normal result in messagebox
from Tkinter import *
import tkMessageBox
root = Tk()
root.withdraw()
tkMessageBox.showinfo(
"Housing Density",
"Target: " + str(target) + "% of area conserved as core habitat\n Overall density: " + str(density) + " " + outputUnits + " per house\n Density within buildable area: " + str(cDensity) + " " + outputUnits + " per house")
else:
# Display truncated result in messagebox
from Tkinter import *
import tkMessageBox
root = Tk()
root.withdraw()
tkMessageBox.showinfo(
"Housing Density",
"The target of " + str(target) + "% " + obtain + " even at " + minmax + " buildout.\n Which resulted in the following results:\n Area conserved: " + str(int(result)) + "% of avialable habitat\n Overall density: " + str(density) + " " + outputUnits + " per house\n Density within buidable area: " + str(cDensity) + " " + outputUnits + " per house")
|
TheWebMonks/equipo | refs/heads/master | app/freelancers/migrations/0038_remove_company_social_account.py | 1 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2017-06-14 19:17
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('freelancers', '0037_auto_20170611_0024'),
]
operations = [
migrations.RemoveField(
model_name='company',
name='social_account',
),
]
|
RagBillySandstone/google-python-exercises | refs/heads/master | my_own_exercises/web_server/webserver2.py | 1 | # A web server that can run various web frameworks thanks to WSGI
import socket
import StringIO
import sys
class WSGIServer(object):
address_family = socket.AF_INET
socket_type = socket.SOCK_STREAM
request_queue_size = 1
def __init__(self, server_address):
# Create a listening socket
self.listen_socket = listen_socket = socket.socket(
self.address_family,
self.socket_type
)
# Allow reuse of the address
listen_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# Bind
listen_socket.bind(server_address)
# Activate
listen_socket.listen(self.request_queue_size)
# Get server host name and port
host, port = self.listen_socket.getsockname()[:2]
self.server_name = socket.getfqdn(host)
self.server_port = port
# Return headers set by Web framework/application
self.headers_set = []
def set_app(self, application):
self.application = application
def serve_forever(self):
listen_socket = self.listen_socket
while True:
# New client connection
self.client_connection, client_address = listen_socket.accept()
# Handle one request and close the client connection. Then loop over to
# wait for another client connection
self.handle_one_request()
def handle_one_request(self):
self.request_data = request_data = self.client_connection.recv(1024)
# print formatted request data a la 'curl -v'
print(''.join(
'< {line}\n'.format(line=line)
for line in request_data.splitlines()
))
self.parse_request(request_data)
# construct environment dictionarry using request data
env = self.get_environ()
# Call the application callable (???) and get back a result that will
# become HTTP response body
result = self.application(env, self.start_response)
#Construct a response and send it back to the client
self.finish_response(result)
def parse_request(self, text):
request_line = text.splitlines()[0]
request_line = request_line.rstrip('\r\n')
# Break down the request line into components
(self.request_method, # GET
self.path, # /hello
self.request_version # HTTP/1/1
) = request_line.split()
def get_environ(self):
env = {}
# required WSGI variables
env['wsgi.version'] = (1, 0)
env['wsgi.url_scheme'] = 'http'
env['wsgi.input'] = StringIO.StringIO(self.request_data)
env['wsgi.errors'] = sys.stderr
env['wsgi.multithread'] = False
env['wsgi.multiprocess'] = False
env['wsgi.run_once'] = False
# Required CGI variables
env['REQUEST_METHOD'] = self.request_method # GET
env['PATH_INFO'] = self.path # /hello
env['SERVER_NAME'] = self.server_name # localhost
env['SERVER_PORT'] = str(self.server_port) # 8888
return env
def start_response(self, status, response_headers, exc_info=None):
# Add necessary server headers
server_headers = [
('Date', 'Mon, 06 Mar 2017 01:07:38 GMT'),
('Server', 'WSGIServer 0.2'),
]
self.headers_set = [status, response_headers + server_headers]
# To adhere to WSGI specs, the start_response must return a 'write'
# callable. For simplicity's sake, we'll ignore that for now
# return self.finish_response
def finish_response(self, result):
try:
status, response_headers = self.headers_set
response = 'HTTP/1.1 {status}\r\n'.format(status=status)
for header in response_headers:
response += '{0}: {1}\r\n'.format(*header)
response += '\r\n'
for data in result:
response += data
print(''.join(
'> [line]\n'.format(line=line)
for line in response.splitlines()
))
self.client_connection.sendall(response)
finally:
self.client_connection.close()
SERVER_ADDRESS = (HOST, PORT) = '', 8888
def make_server(server_address, application):
server = WSGIServer(server_address)
server.set_app(application)
return server
if __name__ == ('__main__'):
if len(sys.argv) < 2:
sys.exit('Provide a WSGI application object as module:callable')
app_path = sys.argv[1]
module, application = app_path.split(':')
module = __import__(module)
application = getattr(module, application)
httpd = make_server(SERVER_ADDRESS, application)
print('WSGIServer: Serving HTTP on port {port} ...\n'.format(port=PORT))
httpd.serve_forever()
|
Ruide/angr-dev | refs/heads/master | simuvex/simuvex/engines/vex/expressions/base.py | 2 | print '... Importing simuvex/engines/vex/expressions/base.py ...'
from angr.engines.vex.expressions.base import *
|
40223138/2015cd_0505 | refs/heads/master | static/Brython3.1.1-20150328-091302/Lib/http/__init__.py | 1383 | # This directory is a Python package.
|
redhat-cip/horizon | refs/heads/master | openstack_dashboard/api/rest/cinder.py | 2 | # Copyright 2015 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""API over the cinder service.
"""
from django.views import generic
from openstack_dashboard import api
from openstack_dashboard.api.rest import utils as rest_utils
from openstack_dashboard.api.rest import urls
@urls.register
class Volumes(generic.View):
"""API for cinder volumes.
"""
url_regex = r'cinder/volumes/$'
@rest_utils.ajax()
def get(self, request):
"""Get a detailed list of volumes associated with the current user's
project.
If invoked as an admin, you may set the GET parameter "all_projects"
to 'true'.
The following get parameters may be passed in the GET
:param search_opts includes options such as name, status, bootable
The listing result is an object with property "items".
"""
# TODO(clu_): when v2 pagination stuff in Cinder API merges
# (https://review.openstack.org/#/c/118450), handle here accordingly
if request.GET.get('all_projects') == 'true':
result = api.cinder.volume_list(request, {'all_tenants': 1})
else:
result = api.cinder.volume_list(
request,
search_opts=rest_utils.parse_filters_kwargs(request)[0]
)
return {'items': [u.to_dict() for u in result]}
@rest_utils.ajax(data_required=True)
def post(self, request):
volume = api.cinder.volume_create(
request,
size=request.DATA['size'],
name=request.DATA['name'],
description=request.DATA['description'],
volume_type=request.DATA['volume_type'],
snapshot_id=request.DATA['snapshot_id'],
metadata=request.DATA['metadata'],
image_id=request.DATA['image_id'],
availability_zone=request.DATA['availability_zone'],
source_volid=request.DATA['source_volid']
)
return rest_utils.CreatedResponse(
'/api/cinder/volumes/%s' % volume.id,
volume.to_dict()
)
@urls.register
class Volume(generic.View):
"""API for cinder volume.
"""
url_regex = r'cinder/volumes/(?P<volume_id>[^/]+)/$'
@rest_utils.ajax()
def get(self, request, volume_id):
"""Get a single volume's details with the volume id.
The following get parameters may be passed in the GET
:param volume_id the id of the volume
The result is a volume object.
"""
return api.cinder.volume_get(request, volume_id).to_dict()
@urls.register
class VolumeTypes(generic.View):
"""API for volume types.
"""
url_regex = r'cinder/volumetypes/$'
@rest_utils.ajax()
def get(self, request):
"""Get a list of volume types.
The listing result is an object with the property "items".
"""
result = api.cinder.volume_type_list(request)
return {'items': [api.cinder.VolumeType(u).to_dict() for u in result]}
@urls.register
class VolumeType(generic.View):
"""API for getting a volume type.
"""
url_regex = r'cinder/volumetypes/(?P<volumetype_id>[^/]+)/$'
@rest_utils.ajax()
def get(self, request, volumetype_id):
"""Get a single volume type details with the volume type id.
The following get parameters may be passed in the GET
:param volumetype_id the id of the volume type
If 'default' is passed as the volumetype_id then
it returns the default volumetype
The result is a volume type object.
"""
if volumetype_id == 'default':
volumetype = api.cinder.volume_type_default(request)
else:
volumetype = api.cinder.volume_type_get(request, volumetype_id)
return api.cinder.VolumeType(volumetype).to_dict()
@urls.register
class VolumeSnapshots(generic.View):
"""API for cinder volume snapshots.
"""
url_regex = r'cinder/volumesnapshots/$'
@rest_utils.ajax()
def get(self, request):
"""Get a detailed list of volume snapshots associated with the current
user's project.
The listing result is an object with property "items".
"""
result = api.cinder.volume_snapshot_list(
request,
search_opts=rest_utils.parse_filters_kwargs(request)[0]
)
return {'items': [u.to_dict() for u in result]}
@urls.register
class Extensions(generic.View):
"""API for cinder extensions.
"""
url_regex = r'cinder/extensions/$'
@rest_utils.ajax()
def get(self, request):
"""Get a list of extensions.
The listing result is an object with property "items". Each item is
an extension.
Example GET:
http://localhost/api/cinder/extensions
"""
result = api.cinder.list_extensions(request)
return {'items': [{
'alias': e.alias,
'description': e.description,
'links': e.links,
'name': e.name,
'namespace': e.namespace,
'updated': e.updated
} for e in result]}
@urls.register
class QoSSpecs(generic.View):
url_regex = r'cinder/qosspecs/$'
@rest_utils.ajax()
def get(self, request):
result = api.cinder.qos_specs_list(request)
return {'items': [u.to_dict() for u in result]}
@urls.register
class TenantAbsoluteLimits(generic.View):
url_regex = r'cinder/tenantabsolutelimits/$'
@rest_utils.ajax()
def get(self, request):
return api.cinder.tenant_absolute_limits(request)
@urls.register
class Services(generic.View):
"""API for cinder services.
"""
url_regex = r'cinder/services/$'
@rest_utils.ajax()
def get(self, request):
"""Get a list of cinder services.
Will return HTTP 501 status code if the service_list extension is
not supported.
"""
if api.base.is_service_enabled(request, 'volume') and \
api.cinder.extension_supported(request, 'Services'):
result = api.cinder.service_list(request)
return {'items': [{
'binary': u.binary,
'host': u.host,
'zone': u.zone,
'updated_at': u.updated_at,
'status': u.status,
'state': u.state,
'id': idx + 1
} for idx, u in enumerate(result)]}
else:
raise rest_utils.AjaxError(501, '')
|
amosnier/python_for_kids | refs/heads/master | course_code/12_028_tkinter_canvas.py | 1 | import tkinter
tk = tkinter.Tk()
canvas = tkinter.Canvas(tk, width = 500, height = 500)
canvas.pack()
canvas.create_line(1, 1, 500, 500)
canvas.create_rectangle(10, 10, 50, 50)
canvas.create_rectangle(70, 10, 300, 50)
canvas.create_rectangle(10, 70, 50, 500)
tk.mainloop()
|
wantee/pocolm | refs/heads/master | scripts/text_to_int.py | 2 | #!/usr/bin/env python
# we're using python 3.x style print but want it to work in python 2.x,
from __future__ import print_function
import os
import argparse
import sys
parser = argparse.ArgumentParser(description="Transforms text data into integer form "
"using a symbol table, e.g. turns line 'hello there' into "
"'134 943'. There are a couple of special cases: any "
"word not in the word-list or equal to words numbered 0, 1 or 2 "
"(normally <eps>, <s> and </s>) are treated as out-of-vocabulary "
"words (OOV) and written as symbol 3 (normally '<unk>').",
epilog="e.g. text_to_int.py words.txt < text > int_text",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("vocab_file",
help="Filename of vocabulary file, e.g. as produced by get_vocab.py")
args = parser.parse_args()
if not os.path.exists(args.vocab_file):
sys.exit("validate_vocab.py: Expected file {0} to exist".format(args.text_dir))
word_to_index = {}
f = open(args.vocab_file, "r")
for line in f:
try:
[word, index] = line.split()
word_to_index[word] = int(index)
except:
sys.exit("validate_vocab.py: bad line {0} in vocab file {1}".format(
line[:-1], args.vocab_file))
num_words_total = 0
num_words_oov = 0
num_words_forbidden = 0
for line in sys.stdin:
line_ints = []
for word in line.split():
num_words_total += 1
if word in word_to_index:
index = word_to_index[word]
if index <= 2:
num_words_forbidden += 1
# the next line means that when we encounter symbols <eps>, <s>
# or </s> in the text, we treat them the same as any
# unknown-word.
line_ints.append(str(3))
else:
line_ints.append(str(index))
else:
num_words_oov += 1
line_ints.append(str(3))
print(' '.join(line_ints))
print("text_to_int.py: converted {0} words, {1}% of which were OOV".format(
num_words_total, (100.0*num_words_oov)/num_words_total), file=sys.stderr)
forbidden_words = []
if (num_words_forbidden != 0):
for (word, index) in word_to_index.items():
if index <= 2:
forbidden_words.append(word)
if index == 3:
unk_word = word
print("text_to_int.py: warning: encountered forbidden symbols ({0}) {1} times; "
"converted them to {2}".format(",".join(forbidden_words),
num_words_forbidden, unk_word),
file=sys.stderr)
|
gw0/myhdl | refs/heads/master | myhdl/_ShadowSignal.py | 1 | # This file is part of the myhdl library, a Python package for using
# Python as a Hardware Description Language.
#
# Copyright (C) 2003-2011 Jan Decaluwe
#
# The myhdl library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 2.1 of the
# License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
""" Module that provides the ShadowSignal classes
"""
from __future__ import absolute_import
import warnings
from copy import deepcopy
from myhdl._compat import long
from myhdl._Signal import _Signal
from myhdl._Waiter import _SignalWaiter, _SignalTupleWaiter
from myhdl._intbv import intbv
from myhdl._simulator import _siglist
from myhdl._bin import bin
# shadow signals
class _ShadowSignal(_Signal):
__slots__ = ('_waiter', )
def __init__(self, val):
_Signal.__init__(self, val)
# self._driven = True # set this in conversion analyzer
# remove next attribute assignment
@_Signal.next.setter
def next(self, val):
raise AttributeError("ShadowSignals are readonly")
class _SliceSignal(_ShadowSignal):
__slots__ = ('_sig', '_left', '_right')
def __init__(self, sig, left, right=None):
### XXX error checks
if right is None:
_ShadowSignal.__init__(self, sig[left])
else:
_ShadowSignal.__init__(self, sig[left:right])
self._sig = sig
self._left = left
self._right = right
if right is None:
gen = self._genfuncIndex()
else:
gen = self._genfuncSlice()
self._waiter = _SignalWaiter(gen)
def _genfuncIndex(self):
sig, index = self._sig, self._left
set_next = _Signal.next.fset
while 1:
set_next(self, sig[index])
yield sig
def _genfuncSlice(self):
sig, left, right = self._sig, self._left, self._right
set_next = _Signal.next.fset
while 1:
set_next(self, sig[left:right])
yield sig
def _setName(self, hdl):
if self._right is None:
if hdl == 'Verilog':
self._name = "%s[%s]" % (self._sig._name, self._left)
else:
self._name = "%s(%s)" % (self._sig._name, self._left)
else:
if hdl == 'Verilog':
self._name = "%s[%s-1:%s]" % (self._sig._name, self._left, self._right)
else:
self._name = "%s(%s-1 downto %s)" % (self._sig._name, self._left, self._right)
def _markRead(self):
self._read = True
self._sig._read = True
def _markUsed(self):
self._used = True
self._sig._used = True
def toVerilog(self):
if self._right is None:
return "assign %s = %s[%s];" % (self._name, self._sig._name, self._left)
else:
return "assign %s = %s[%s-1:%s];" % (self._name, self._sig._name, self._left, self._right)
def toVHDL(self):
if self._right is None:
return "%s <= %s(%s);" % (self._name, self._sig._name, self._left)
else:
return "%s <= %s(%s-1 downto %s);" % (self._name, self._sig._name, self._left, self._right)
class ConcatSignal(_ShadowSignal):
__slots__ = ('_args', '_sigargs', '_initval')
def __init__(self, *args):
assert len(args) >= 2
self._args = args
self._sigargs = sigargs = []
nrbits = 0
val = 0
for a in args:
if isinstance(a, intbv):
w = a._nrbits
v = a._val
elif isinstance(a, _Signal):
sigargs.append(a)
w = a._nrbits
if isinstance(a._val, intbv):
v = a._val._val
else:
v = a._val
elif isinstance(a, bool):
w = 1
v = a
elif isinstance(a, str):
w = len(a)
v = long(a, 2)
else:
raise TypeError("ConcatSignal: inappropriate argument type: %s" \
% type(a))
nrbits += w
val = val << w | v & (long(1) << w)-1
self._initval = val
ini = intbv(val)[nrbits:]
_ShadowSignal.__init__(self, ini)
gen = self.genfunc()
self._waiter = _SignalTupleWaiter(gen)
def genfunc(self):
set_next = _Signal.next.fset
args = self._args
sigargs = self._sigargs
nrbits = self._nrbits
newval = intbv(self._initval)[nrbits:]
while 1:
hi = nrbits
for a in args:
if isinstance(a, bool):
w = 1
else:
w = len(a)
lo = hi - w
if a in sigargs:
newval[hi:lo] = a[:]
hi = lo
set_next(self, newval)
yield sigargs
def _markRead(self):
self._read = True
for s in self._sigargs:
s._markRead()
def _markUsed(self):
self._used = True
for s in self._sigargs:
s._markUsed()
def toVHDL(self):
lines = []
ini = intbv(self._initval)[self._nrbits:]
hi = self._nrbits
for a in self._args:
if isinstance(a, bool):
w = 1
else:
w = len(a)
lo = hi - w
if w == 1:
if isinstance(a, _Signal):
if a._type == bool: # isinstance(a._type , bool): <- doesn't work
lines.append("%s(%s) <= %s;" % (self._name, lo, a._name))
else:
lines.append("%s(%s) <= %s(0);" % (self._name, lo, a._name))
else:
lines.append("%s(%s) <= '%s';" % (self._name, lo, bin(ini[lo])))
else:
if isinstance(a, _Signal):
lines.append("%s(%s-1 downto %s) <= %s;" % (self._name, hi, lo, a._name))
else:
lines.append('%s(%s-1 downto %s) <= "%s";' % (self._name, hi, lo, bin(ini[hi:lo],w)))
hi = lo
return "\n".join(lines)
def toVerilog(self):
lines = []
ini = intbv(self._initval)[self._nrbits:]
hi = self._nrbits
for a in self._args:
if isinstance(a, bool):
w = 1
else:
w = len(a)
lo = hi - w
if w == 1:
if isinstance(a, _Signal):
if a._type == bool:
lines.append("assign %s[%s] = %s;" % (self._name, lo, a._name))
else:
lines.append("assign %s[%s] = %s[0];" % (self._name, lo, a._name))
else:
lines.append("assign %s[%s] = 'b%s;" % (self._name, lo, bin(ini[lo])))
else:
if isinstance(a, _Signal):
lines.append("assign %s[%s-1:%s] = %s;" % (self._name, hi, lo, a._name))
else:
lines.append("assign %s[%s-1:%s] = 'b%s;" % (self._name, hi, lo, bin(ini[hi:lo],w)))
hi = lo
return "\n".join(lines)
# Tristate signal
class BusContentionWarning(UserWarning):
pass
warnings.filterwarnings('always', r".*", BusContentionWarning)
# def Tristate(val, delay=None):
# """ Return a new Tristate(default or delay 0) or DelayedTristate """
# if delay is not None:
# if delay < 0:
# raise TypeError("Signal: delay should be >= 0")
# return _DelayedTristate(val, delay)
# else:
# return _Tristate(val)
def TristateSignal(val):
return _TristateSignal(val)
class _TristateSignal(_ShadowSignal):
__slots__ = ('_drivers', '_orival' )
def __init__(self, val):
self._drivers = []
# construct normally to set type / size info right
_ShadowSignal.__init__(self, val)
self._orival = deepcopy(val) # keep for drivers
# reset signal values to None
self._next = self._val = self._init = None
self._waiter = _SignalTupleWaiter(self._resolve())
def driver(self):
d = _TristateDriver(self)
self._drivers.append(d)
return d
def _resolve(self):
# set_next = _ShadowSignal._set_next
senslist = self._drivers
while 1:
yield senslist
res = None
for d in senslist:
if res is None:
res = d._val
elif d._val is not None:
warnings.warn("Bus contention", category=BusContentionWarning)
res = None
break
self._next = res
_siglist.append(self)
def toVerilog(self):
lines = []
for d in self._drivers:
if d._driven:
lines.append("assign %s = %s;" % (self._name, d._name))
return "\n".join(lines)
def toVHDL(self):
lines = []
for d in self._drivers:
if d._driven:
lines.append("%s <= %s;" % (self._name, d._name))
return "\n".join(lines)
class _TristateDriver(_Signal):
__slots__ = ('_sig',)
def __init__(self, sig):
_Signal.__init__(self, sig._orival)
# reset signal values to None
self._next = self._val = self._init = None
self._sig = sig
@_Signal.next.setter
def next(self, val):
if isinstance(val, _Signal):
val = val._val
if val is None:
self._next = None
else:
# restore original value to cater for intbv handler
self._next = self._sig._orival
self._setNextVal(val)
_siglist.append(self)
|
mbauskar/omnitech-demo-frappe | refs/heads/develop | frappe/custom/doctype/custom_field/__init__.py | 1829 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
|
Yoshiofthewire/CrunchyXBMC-Legacy | refs/heads/master | script.module.cryptopy/lib/crypto/cipher/aes_cbc_test.py | 3 | #! /usr/bin/env python
# -*- coding: iso-8859-1 -*-
""" crypto.cipher.cbc_test
Tests for cbc encryption, uses AES for base algorithm
Copyright (c) 2002 by Paul A. Lambert
Read LICENSE.txt for license information.
"""
from crypto.cipher.aes_cbc import AES_CBC
from crypto.cipher.base import noPadding, padWithPadLen
import unittest
from binascii_plus import a2b_hex, b2a_hex, a2b_p
class AES_CBC_autoIV(unittest.TestCase):
def testAutoIV(self):
k = a2b_hex('2b7e151628aed2a6abf7158809cf4f3c')
alg = AES_CBC(key=k, padding=noPadding())
pt = a2b_hex('6bc1bee22e409f96e93d7e117393172a')
ct = alg.encrypt(pt)
dct = alg.decrypt(ct)
self.assertEqual( dct, pt ) # 'AES_CBC auto IV error'
def testAutoIVandPadding(self):
k = a2b_hex('2b7e151628aed2a6abf7158809cf4f3c')
alg = AES_CBC(key=k) # should default to padWithPadLen
pt = a2b_hex('6bc1bee22e409f96e93d7e117393172a')
ct = alg.encrypt(pt)
dct = alg.decrypt(ct)
self.assertEqual( dct, pt ) # 'AES_CBC auto IV and pad error'
def testNonDupIV(self):
""" Test to ensure that two instances of CBC don't get duplicate IV """
k = a2b_hex('2b7e151628aed2a6abf7158809cf4f3c')
alg1 = AES_CBC(k)
alg2 = AES_CBC(k)
pt = a2b_hex('6bc1bee22e409f96e93d7e117393172a')
ct1 = alg1.encrypt(pt)
ct2 = alg2.encrypt(pt)
assert( ct1!= ct2 ), 'AES_CBC dup IV error'
class AES_CBC128_TestVectors(unittest.TestCase):
""" Test AES_CBC128 algorithm using know values """
def testKnowValues(self):
""" Test using vectors from NIST """
def CBCtestVector(key,iv,pt,kct):
""" CBC test vectors using AES algorithm """
key,iv,pt,kct = a2b_hex(key),a2b_hex(iv),a2b_p(pt),a2b_p(kct)
alg = AES_CBC(key, padding=noPadding())
self.assertEqual( alg.encrypt(pt,iv=iv), kct )
self.assertEqual( alg.decrypt(iv+kct), pt )
# http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf page 34
CBCtestVector( key = '2b7e151628aed2a6abf7158809cf4f3c',
iv = '000102030405060708090a0b0c0d0e0f',
pt = '6bc1bee22e409f96e93d7e117393172a',
kct = '7649abac8119b246cee98e9b12e9197d')
# four blocks of data
CBCtestVector( key = '2b7e151628aed2a6abf7158809cf4f3c',
iv = '000102030405060708090a0b0c0d0e0f',
pt = """6bc1bee22e409f96e93d7e117393172a
ae2d8a571e03ac9c9eb76fac45af8e51
30c81c46a35ce411e5fbc1191a0a52ef
f69f2445df4f9b17ad2b417be66c3710""",
kct = """7649abac8119b246cee98e9b12e9197d
5086cb9b507219ee95db113a917678b2
73bed6b8e3c1743b7116e69e22229516
3ff1caa1681fac09120eca307586e1a7""")
# Make this test module runnable from the command prompt
if __name__ == "__main__":
unittest.main()
|
IKholopov/HackUPC2017 | refs/heads/master | hackupc/env/lib/python3.5/site-packages/django/http/cookie.py | 119 | from __future__ import unicode_literals
import sys
from django.utils import six
from django.utils.encoding import force_str
from django.utils.six.moves import http_cookies
# http://bugs.python.org/issue2193 is fixed in Python 3.3+.
_cookie_allows_colon_in_names = six.PY3
# Cookie pickling bug is fixed in Python 2.7.9 and Python 3.4.3+
# http://bugs.python.org/issue22775
cookie_pickles_properly = (
(sys.version_info[:2] == (2, 7) and sys.version_info >= (2, 7, 9)) or
sys.version_info >= (3, 4, 3)
)
if _cookie_allows_colon_in_names and cookie_pickles_properly:
SimpleCookie = http_cookies.SimpleCookie
else:
Morsel = http_cookies.Morsel
class SimpleCookie(http_cookies.SimpleCookie):
if not cookie_pickles_properly:
def __setitem__(self, key, value):
# Apply the fix from http://bugs.python.org/issue22775 where
# it's not fixed in Python itself
if isinstance(value, Morsel):
# allow assignment of constructed Morsels (e.g. for pickling)
dict.__setitem__(self, key, value)
else:
super(SimpleCookie, self).__setitem__(key, value)
if not _cookie_allows_colon_in_names:
def load(self, rawdata):
self.bad_cookies = set()
if isinstance(rawdata, six.text_type):
rawdata = force_str(rawdata)
super(SimpleCookie, self).load(rawdata)
for key in self.bad_cookies:
del self[key]
# override private __set() method:
# (needed for using our Morsel, and for laxness with CookieError
def _BaseCookie__set(self, key, real_value, coded_value):
key = force_str(key)
try:
M = self.get(key, Morsel())
M.set(key, real_value, coded_value)
dict.__setitem__(self, key, M)
except http_cookies.CookieError:
if not hasattr(self, 'bad_cookies'):
self.bad_cookies = set()
self.bad_cookies.add(key)
dict.__setitem__(self, key, http_cookies.Morsel())
def parse_cookie(cookie):
"""
Return a dictionary parsed from a `Cookie:` header string.
"""
cookiedict = {}
if six.PY2:
cookie = force_str(cookie)
for chunk in cookie.split(str(';')):
if str('=') in chunk:
key, val = chunk.split(str('='), 1)
else:
# Assume an empty name per
# https://bugzilla.mozilla.org/show_bug.cgi?id=169091
key, val = str(''), chunk
key, val = key.strip(), val.strip()
if key or val:
# unquote using Python's algorithm.
cookiedict[key] = http_cookies._unquote(val)
return cookiedict
|
joachimmetz/plaso | refs/heads/main | plaso/containers/reports.py | 3 | # -*- coding: utf-8 -*-
"""Report related attribute container definitions."""
from plaso.containers import interface
from plaso.containers import manager
class AnalysisReport(interface.AttributeContainer):
"""Analysis report attribute container.
Attributes:
analysis_counter (collections.Counter): counter of analysis results, for
example number of events analyzed and tagged.
event_filter (str): event filter expression that was used when the analysis
plugin was run.
filter_string (str): deprecated variant of event_filter.
plugin_name (str): name of the analysis plugin that generated the report.
report_dict (dict[str]): ???
text (str): report text.
time_compiled (int): timestamp of the date and time the report was compiled.
"""
CONTAINER_TYPE = 'analysis_report'
def __init__(self, plugin_name=None, text=None):
"""Initializes the analysis report.
Args:
plugin_name (Optional[str]): name of the analysis plugin that generated
the report.
text (Optional[str]): report text.
"""
super(AnalysisReport, self).__init__()
self.analysis_counter = None
self.event_filter = None
# TODO: filter_string is deprecated remove at some point.
self.filter_string = None
self.plugin_name = plugin_name
self.report_dict = None
# TODO: rename text to body?
self.text = text
self.time_compiled = None
def CopyToDict(self):
"""Copies the attribute container to a dictionary.
Returns:
dict[str, object]: attribute values per name.
"""
dictionary = {}
for attribute_name, attribute_value in self.GetAttributes():
if attribute_value is None:
continue
dictionary[attribute_name] = attribute_value
return dictionary
manager.AttributeContainersManager.RegisterAttributeContainer(AnalysisReport)
|
wwu-numerik/scripts | refs/heads/master | python/misc_csv/pivot.py | 1 | #!/usr/bin/env python
import csv
from itertools import izip
import sys
import os
first = sys.argv[1]
merged = '{}_merged.csv'.format(''.join(first[:-4]))
for i,fn in enumerate(sys.argv[1:]):
a = list(csv.reader(open(fn, "rb")))
base = os.path.dirname(fn)
a[0].append('file')
a[1].append(base)
output = '{}_pivot.csv'.format(''.join(fn[:-4]))
if i == 0:
csv.writer(open(merged, "wb")).writerow(a[0])
csv.writer(open(merged, "ab")).writerow(a[1])
a = izip(*csv.reader(open(merged, "rb")))
output = '{}_merged_pivot.csv'.format(''.join(first[:-4]))
csv.writer(open(output, "wb")).writerows(a)
print(output)
print(merged) |
Kazade/NeHe-Website | refs/heads/master | google_appengine/lib/django-1.2/django/contrib/admindocs/utils.py | 314 | "Misc. utility functions/classes for admin documentation generator."
import re
from email.Parser import HeaderParser
from email.Errors import HeaderParseError
from django.utils.safestring import mark_safe
from django.core.urlresolvers import reverse
from django.utils.encoding import smart_str
try:
import docutils.core
import docutils.nodes
import docutils.parsers.rst.roles
except ImportError:
docutils_is_available = False
else:
docutils_is_available = True
def trim_docstring(docstring):
"""
Uniformly trims leading/trailing whitespace from docstrings.
Based on http://www.python.org/peps/pep-0257.html#handling-docstring-indentation
"""
if not docstring or not docstring.strip():
return ''
# Convert tabs to spaces and split into lines
lines = docstring.expandtabs().splitlines()
indent = min([len(line) - len(line.lstrip()) for line in lines if line.lstrip()])
trimmed = [lines[0].lstrip()] + [line[indent:].rstrip() for line in lines[1:]]
return "\n".join(trimmed).strip()
def parse_docstring(docstring):
"""
Parse out the parts of a docstring. Returns (title, body, metadata).
"""
docstring = trim_docstring(docstring)
parts = re.split(r'\n{2,}', docstring)
title = parts[0]
if len(parts) == 1:
body = ''
metadata = {}
else:
parser = HeaderParser()
try:
metadata = parser.parsestr(parts[-1])
except HeaderParseError:
metadata = {}
body = "\n\n".join(parts[1:])
else:
metadata = dict(metadata.items())
if metadata:
body = "\n\n".join(parts[1:-1])
else:
body = "\n\n".join(parts[1:])
return title, body, metadata
def parse_rst(text, default_reference_context, thing_being_parsed=None):
"""
Convert the string from reST to an XHTML fragment.
"""
overrides = {
'doctitle_xform' : True,
'inital_header_level' : 3,
"default_reference_context" : default_reference_context,
"link_base" : reverse('django-admindocs-docroot').rstrip('/')
}
if thing_being_parsed:
thing_being_parsed = smart_str("<%s>" % thing_being_parsed)
parts = docutils.core.publish_parts(text, source_path=thing_being_parsed,
destination_path=None, writer_name='html',
settings_overrides=overrides)
return mark_safe(parts['fragment'])
#
# reST roles
#
ROLES = {
'model' : '%s/models/%s/',
'view' : '%s/views/%s/',
'template' : '%s/templates/%s/',
'filter' : '%s/filters/#%s',
'tag' : '%s/tags/#%s',
}
def create_reference_role(rolename, urlbase):
def _role(name, rawtext, text, lineno, inliner, options=None, content=None):
if options is None: options = {}
if content is None: content = []
node = docutils.nodes.reference(rawtext, text, refuri=(urlbase % (inliner.document.settings.link_base, text.lower())), **options)
return [node], []
docutils.parsers.rst.roles.register_canonical_role(rolename, _role)
def default_reference_role(name, rawtext, text, lineno, inliner, options=None, content=None):
if options is None: options = {}
if content is None: content = []
context = inliner.document.settings.default_reference_context
node = docutils.nodes.reference(rawtext, text, refuri=(ROLES[context] % (inliner.document.settings.link_base, text.lower())), **options)
return [node], []
if docutils_is_available:
docutils.parsers.rst.roles.register_canonical_role('cmsreference', default_reference_role)
docutils.parsers.rst.roles.DEFAULT_INTERPRETED_ROLE = 'cmsreference'
for name, urlbase in ROLES.items():
create_reference_role(name, urlbase)
|
duguhaotian/os | refs/heads/master | tests/integration/setup.py | 9 | from distutils.core import setup
setup(
name='RancherOSIntegrationTests',
version='0.1',
packages=[
'rancherostest'
],
license='ASL 2.0',
long_description=open('README.txt').read(),
)
|
dragon-skeleton/mcache-client | refs/heads/master | powertest/lib/results.py | 3 | #!/usr/bin/env python
#
# Copyright (c) 2010 Corey Goldberg (corey@goldb.org)
# License: GNU LGPLv3
#
# This file is part of Multi-Mechanize
import time
from collections import defaultdict
import graph
import reportwriter
def output_results(results_dir, results_file, run_time, rampup, ts_interval, user_group_configs=None):
report = reportwriter.Report(results_dir)
results = Results(results_dir + results_file, run_time)
print 'transactions: %i' % results.total_transactions
print 'errors: %i' % results.total_errors
print ''
print 'test start: %s' % results.start_datetime
print 'test finish: %s' % results.finish_datetime
print ''
report.write_line('<h1>Performance Results Report</h1>')
report.write_line('<h2>Summary</h2>')
report.write_line('<div class="summary">')
report.write_line('<b>transactions:</b> %d<br />' % results.total_transactions)
report.write_line('<b>errors:</b> %d<br />' % results.total_errors)
report.write_line('<b>run time:</b> %d secs<br />' % run_time)
report.write_line('<b>rampup:</b> %d secs<br /><br />' % rampup)
report.write_line('<b>test start:</b> %s<br />' % results.start_datetime)
report.write_line('<b>test finish:</b> %s<br /><br />' % results.finish_datetime)
report.write_line('<b>time-series interval:</b> %s secs<br /><br /><br />' % ts_interval)
if user_group_configs:
report.write_line('<b>workload configuration:</b><br /><br />')
report.write_line('<table>')
report.write_line('<tr><th>group name</th><th>threads</th><th>script name</th></tr>')
for user_group_config in user_group_configs:
report.write_line('<tr><td>%s</td><td>%d</td><td>%s</td></tr>' %
(user_group_config.name, user_group_config.num_threads, user_group_config.script_file))
report.write_line('</table>')
report.write_line('</div>')
report.write_line('<h2>All Transactions</h2>')
# all transactions - response times
trans_timer_points = [] # [elapsed, timervalue]
trans_timer_vals = []
for resp_stats in results.resp_stats_list:
t = (resp_stats.elapsed_time, resp_stats.trans_time)
trans_timer_points.append(t)
trans_timer_vals.append(resp_stats.trans_time)
graph.resp_graph_raw(trans_timer_points, 'All_Transactions_response_times.png', results_dir)
report.write_line('<h3>Transaction Response Summary (secs)</h3>')
report.write_line('<table>')
report.write_line('<tr><th>count</th><th>min</th><th>avg</th><th>80pct</th><th>90pct</th><th>95pct</th><th>max</th><th>stdev</th></tr>')
report.write_line('<tr><td>%i</td><td>%.3f</td><td>%.3f</td><td>%.3f</td><td>%.3f</td><td>%.3f</td><td>%.3f</td><td>%.3f</td></tr>' % (
results.total_transactions,
min(trans_timer_vals),
average(trans_timer_vals),
percentile(trans_timer_vals, 80),
percentile(trans_timer_vals, 90),
percentile(trans_timer_vals, 95),
max(trans_timer_vals),
standard_dev(trans_timer_vals),
))
report.write_line('</table>')
# all transactions - interval details
avg_resptime_points = {} # {intervalnumber: avg_resptime}
percentile_80_resptime_points = {} # {intervalnumber: 80pct_resptime}
percentile_90_resptime_points = {} # {intervalnumber: 90pct_resptime}
interval_secs = ts_interval
splat_series = split_series(trans_timer_points, interval_secs)
report.write_line('<h3>Interval Details (secs)</h3>')
report.write_line('<table>')
report.write_line('<tr><th>interval</th><th>count</th><th>rate</th><th>min</th><th>avg</th><th>80pct</th><th>90pct</th><th>95pct</th><th>max</th><th>stdev</th></tr>')
for i, bucket in enumerate(splat_series):
interval_start = int((i + 1) * interval_secs)
cnt = len(bucket)
if cnt == 0:
report.write_line('<tr><td>%i</td><td>0</td><td>0</td><td>N/A</td><td>N/A</td><td>N/A</td><td>N/A</td><td>N/A</td><td>N/A</td><td>N/A</td></tr>' % (i + 1))
else:
rate = cnt / float(interval_secs)
mn = min(bucket)
avg = average(bucket)
pct_80 = percentile(bucket, 80)
pct_90 = percentile(bucket, 90)
pct_95 = percentile(bucket, 95)
mx = max(bucket)
stdev = standard_dev(bucket)
report.write_line('<tr><td>%i</td><td>%i</td><td>%.2f</td><td>%.3f</td><td>%.3f</td><td>%.3f</td><td>%.3f</td><td>%.3f</td><td>%.3f</td><td>%.3f</td></tr>' % (i + 1, cnt, rate, mn, avg, pct_80, pct_90, pct_95, mx, stdev))
avg_resptime_points[interval_start] = avg
percentile_80_resptime_points[interval_start] = pct_80
percentile_90_resptime_points[interval_start] = pct_90
report.write_line('</table>')
graph.resp_graph(avg_resptime_points, percentile_80_resptime_points, percentile_90_resptime_points, 'All_Transactions_response_times_intervals.png', results_dir)
report.write_line('<h3>Graphs</h3>')
report.write_line('<h4>Response Time: %s sec time-series</h4>' % ts_interval)
report.write_line('<img src="All_Transactions_response_times_intervals.png"></img>')
report.write_line('<h4>Response Time: raw data (all points)</h4>')
report.write_line('<img src="All_Transactions_response_times.png"></img>')
report.write_line('<h4>Throughput: 5 sec time-series</h4>')
report.write_line('<img src="All_Transactions_throughput.png"></img>')
# all transactions - throughput
throughput_points = {} # {intervalnumber: numberofrequests}
interval_secs = 5.0
splat_series = split_series(trans_timer_points, interval_secs)
for i, bucket in enumerate(splat_series):
throughput_points[int((i + 1) * interval_secs)] = (len(bucket) / interval_secs)
graph.tp_graph(throughput_points, 'All_Transactions_throughput.png', results_dir)
# custom timers
for timer_name in sorted(results.uniq_timer_names):
custom_timer_vals = []
custom_timer_points = []
for resp_stats in results.resp_stats_list:
try:
val = resp_stats.custom_timers[timer_name]
custom_timer_points.append((resp_stats.elapsed_time, val))
custom_timer_vals.append(val)
except KeyError:
pass
graph.resp_graph_raw(custom_timer_points, timer_name + '_response_times.png', results_dir)
throughput_points = {} # {intervalnumber: numberofrequests}
interval_secs = 5.0
splat_series = split_series(custom_timer_points, interval_secs)
for i, bucket in enumerate(splat_series):
throughput_points[int((i + 1) * interval_secs)] = (len(bucket) / interval_secs)
graph.tp_graph(throughput_points, timer_name + '_throughput.png', results_dir)
report.write_line('<hr />')
report.write_line('<h2>Custom Timer: %s</h2>' % timer_name)
report.write_line('<h3>Timer Summary (secs)</h3>')
report.write_line('<table>')
report.write_line('<tr><th>count</th><th>min</th><th>avg</th><th>80pct</th><th>90pct</th><th>95pct</th><th>max</th><th>stdev</th></tr>')
report.write_line('<tr><td>%i</td><td>%.3f</td><td>%.3f</td><td>%.3f</td><td>%.3f</td><td>%.3f</td><td>%.3f</td><td>%.3f</td></tr>' % (
len(custom_timer_vals),
min(custom_timer_vals),
average(custom_timer_vals),
percentile(custom_timer_vals, 80),
percentile(custom_timer_vals, 90),
percentile(custom_timer_vals, 95),
max(custom_timer_vals),
standard_dev(custom_timer_vals)
))
report.write_line('</table>')
# custom timers - interval details
avg_resptime_points = {} # {intervalnumber: avg_resptime}
percentile_80_resptime_points = {} # {intervalnumber: 80pct_resptime}
percentile_90_resptime_points = {} # {intervalnumber: 90pct_resptime}
interval_secs = ts_interval
splat_series = split_series(custom_timer_points, interval_secs)
report.write_line('<h3>Interval Details (secs)</h3>')
report.write_line('<table>')
report.write_line('<tr><th>interval</th><th>count</th><th>rate</th><th>min</th><th>avg</th><th>80pct</th><th>90pct</th><th>95pct</th><th>max</th><th>stdev</th></tr>')
for i, bucket in enumerate(splat_series):
interval_start = int((i + 1) * interval_secs)
cnt = len(bucket)
if cnt == 0:
report.write_line('<tr><td>%i</td><td>0</td><td>0</td><td>N/A</td><td>N/A</td><td>N/A</td><td>N/A</td><td>N/A</td><td>N/A</td><td>N/A</td></tr>' % (i + 1))
else:
rate = cnt / float(interval_secs)
mn = min(bucket)
avg = average(bucket)
pct_80 = percentile(bucket, 80)
pct_90 = percentile(bucket, 90)
pct_95 = percentile(bucket, 95)
mx = max(bucket)
stdev = standard_dev(bucket)
report.write_line('<tr><td>%i</td><td>%i</td><td>%.2f</td><td>%.3f</td><td>%.3f</td><td>%.3f</td><td>%.3f</td><td>%.3f</td><td>%.3f</td><td>%.3f</td></tr>' % (i + 1, cnt, rate, mn, avg, pct_80, pct_90, pct_95, mx, stdev))
avg_resptime_points[interval_start] = avg
percentile_80_resptime_points[interval_start] = pct_80
percentile_90_resptime_points[interval_start] = pct_90
report.write_line('</table>')
graph.resp_graph(avg_resptime_points, percentile_80_resptime_points, percentile_90_resptime_points, timer_name + '_response_times_intervals.png', results_dir)
report.write_line('<h3>Graphs</h3>')
report.write_line('<h4>Response Time: %s sec time-series</h4>' % ts_interval)
report.write_line('<img src="%s_response_times_intervals.png"></img>' % timer_name)
report.write_line('<h4>Response Time: raw data (all points)</h4>')
report.write_line('<img src="%s_response_times.png"></img>' % timer_name)
report.write_line('<h4>Throughput: 5 sec time-series</h4>')
report.write_line('<img src="%s_throughput.png"></img>' % timer_name)
## user group times
#for user_group_name in sorted(results.uniq_user_group_names):
# ug_timer_vals = []
# for resp_stats in results.resp_stats_list:
# if resp_stats.user_group_name == user_group_name:
# ug_timer_vals.append(resp_stats.trans_time)
# print user_group_name
# print 'min: %.3f' % min(ug_timer_vals)
# print 'avg: %.3f' % average(ug_timer_vals)
# print '80pct: %.3f' % percentile(ug_timer_vals, 80)
# print '90pct: %.3f' % percentile(ug_timer_vals, 90)
# print '95pct: %.3f' % percentile(ug_timer_vals, 95)
# print 'max: %.3f' % max(ug_timer_vals)
# print ''
report.write_line('<hr />')
report.write_closing_html()
class Results(object):
def __init__(self, results_file_name, run_time):
self.results_file_name = results_file_name
self.run_time = run_time
self.total_transactions = 0
self.total_errors = 0
self.uniq_timer_names = set()
self.uniq_user_group_names = set()
self.resp_stats_list = self.__parse_file()
self.epoch_start = self.resp_stats_list[0].epoch_secs
self.epoch_finish = self.resp_stats_list[-1].epoch_secs
self.start_datetime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self.epoch_start))
self.finish_datetime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self.epoch_finish))
def __parse_file(self):
f = open(self.results_file_name, 'rb')
resp_stats_list = []
for line in f:
fields = line.strip().split(',')
request_num = int(fields[0])
elapsed_time = float(fields[1])
epoch_secs = int(fields[2])
user_group_name = fields[3]
trans_time = float(fields[4])
error = fields[5]
self.uniq_user_group_names.add(user_group_name)
custom_timers = {}
timers_string = ''.join(fields[6:]).replace('{', '').replace('}', '')
splat = timers_string.split("'")[1:]
timers = []
vals = []
for x in splat:
if ':' in x:
x = float(x.replace(': ', ''))
vals.append(x)
else:
timers.append(x)
self.uniq_timer_names.add(x)
for timer, val in zip(timers, vals):
custom_timers[timer] = val
r = ResponseStats(request_num, elapsed_time, epoch_secs, user_group_name, trans_time, error, custom_timers)
if elapsed_time < self.run_time: # drop all times that appear after the last request was sent (incomplete interval)
resp_stats_list.append(r)
if error != '':
self.total_errors += 1
self.total_transactions += 1
return resp_stats_list
class ResponseStats(object):
def __init__(self, request_num, elapsed_time, epoch_secs, user_group_name, trans_time, error, custom_timers):
self.request_num = request_num
self.elapsed_time = elapsed_time
self.epoch_secs = epoch_secs
self.user_group_name = user_group_name
self.trans_time = trans_time
self.error = error
self.custom_timers = custom_timers
def split_series(points, interval):
offset = points[0][0]
maxval = int((points[-1][0] - offset) // interval)
vals = defaultdict(list)
for key, value in points:
vals[(key - offset) // interval].append(value)
series = [vals[i] for i in xrange(maxval + 1)]
return series
def average(seq):
avg = (float(sum(seq)) / len(seq))
return avg
def standard_dev(seq):
avg = average(seq)
sdsq = sum([(i - avg) ** 2 for i in seq])
try:
stdev = (sdsq / (len(seq) - 1)) ** .5
except ZeroDivisionError:
stdev = 0
return stdev
def percentile(seq, percentile):
i = int(len(seq) * (percentile / 100.0))
seq.sort()
return seq[i]
if __name__ == '__main__':
output_results('./', 'results.csv', 60, 30, 10)
|
rjschof/gem5 | refs/heads/master | util/stats/profile.py | 87 | # Copyright (c) 2005 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Nathan Binkert
from orderdict import orderdict
import output
class FileData(dict):
def __init__(self, filename):
self.filename = filename
fd = file(filename)
current = []
for line in fd:
line = line.strip()
if line.startswith('>>>'):
current = []
self[line[3:]] = current
else:
current.append(line)
fd.close()
class RunData(dict):
def __init__(self, filename):
self.filename = filename
def __getattribute__(self, attr):
if attr == 'total':
total = 0.0
for value in self.itervalues():
total += value
return total
if attr == 'filedata':
return FileData(self.filename)
if attr == 'maxsymlen':
return max([ len(sym) for sym in self.iterkeys() ])
return super(RunData, self).__getattribute__(attr)
def display(self, output=None, limit=None, maxsymlen=None):
if not output:
import sys
output = sys.stdout
elif isinstance(output, str):
output = file(output, 'w')
total = float(self.total)
# swap (string,count) order so we can sort on count
symbols = [ (count,name) for name,count in self.iteritems() ]
symbols.sort(reverse=True)
if limit is not None:
symbols = symbols[:limit]
if not maxsymlen:
maxsymlen = self.maxsymlen
symbolf = "%-" + str(maxsymlen + 1) + "s %.2f%%"
for number,name in symbols:
print >>output, symbolf % (name, 100.0 * (float(number) / total))
class PCData(RunData):
def __init__(self, filename=None, categorize=None, showidle=True):
super(PCData, self).__init__(self, filename)
filedata = self.filedata['PC data']
for line in filedata:
(symbol, count) = line.split()
if symbol == "0x0":
continue
count = int(count)
if categorize is not None:
category = categorize(symbol)
if category is None:
category = 'other'
elif category == 'idle' and not showidle:
continue
self[category] = count
class FuncNode(object):
def __new__(cls, filedata=None):
if filedata is None:
return super(FuncNode, cls).__new__(cls)
nodes = {}
for line in filedata['function data']:
data = line.split(' ')
node_id = long(data[0], 16)
node = FuncNode()
node.symbol = data[1]
if node.symbol == '':
node.symbol = 'unknown'
node.count = long(data[2])
node.children = [ long(child, 16) for child in data[3:] ]
nodes[node_id] = node
for node in nodes.itervalues():
children = []
for cid in node.children:
child = nodes[cid]
children.append(child)
child.parent = node
node.children = tuple(children)
if not nodes:
print filedata.filename
print nodes
return nodes[0]
def total(self):
total = self.count
for child in self.children:
total += child.total()
return total
def aggregate(self, dict, categorize, incategory):
category = None
if categorize:
category = categorize(self.symbol)
total = self.count
for child in self.children:
total += child.aggregate(dict, categorize, category or incategory)
if category:
dict[category] = dict.get(category, 0) + total
return 0
elif not incategory:
dict[self.symbol] = dict.get(self.symbol, 0) + total
return total
def dump(self):
kids = [ child.symbol for child in self.children]
print '%s %d <%s>' % (self.symbol, self.count, ', '.join(kids))
for child in self.children:
child.dump()
def _dot(self, dot, threshold, categorize, total):
from pydot import Dot, Edge, Node
self.dot_node = None
value = self.total() * 100.0 / total
if value < threshold:
return
if categorize:
category = categorize(self.symbol)
if category and category != 'other':
return
label = '%s %.2f%%' % (self.symbol, value)
self.dot_node = Node(self, label=label)
dot.add_node(self.dot_node)
for child in self.children:
child._dot(dot, threshold, categorize, total)
if child.dot_node is not None:
dot.add_edge(Edge(self, child))
def _cleandot(self):
for child in self.children:
child._cleandot()
self.dot_node = None
del self.__dict__['dot_node']
def dot(self, dot, threshold=0.1, categorize=None):
self._dot(dot, threshold, categorize, self.total())
self._cleandot()
class FuncData(RunData):
def __init__(self, filename, categorize=None):
super(FuncData, self).__init__(filename)
tree = self.tree
tree.aggregate(self, categorize, incategory=False)
self.total = tree.total()
def __getattribute__(self, attr):
if attr == 'tree':
return FuncNode(self.filedata)
return super(FuncData, self).__getattribute__(attr)
def displayx(self, output=None, maxcount=None):
if output is None:
import sys
output = sys.stdout
items = [ (val,key) for key,val in self.iteritems() ]
items.sort(reverse=True)
for val,key in items:
if maxcount is not None:
if maxcount == 0:
return
maxcount -= 1
percent = val * 100.0 / self.total
print >>output, '%-30s %8s' % (key, '%3.2f%%' % percent)
class Profile(object):
# This list controls the order of values in stacked bar data output
default_categories = [ 'interrupt',
'driver',
'stack',
'buffer',
'copy',
'syscall',
'user',
'other',
'idle']
def __init__(self, datatype, categorize=None):
categories = Profile.default_categories
self.datatype = datatype
self.categorize = categorize
self.data = {}
self.categories = categories[:]
self.rcategories = categories[:]
self.rcategories.reverse()
self.cpu = 0
# Read in files
def inputdir(self, directory):
import os, os.path, re
from os.path import expanduser, join as joinpath
directory = expanduser(directory)
label_ex = re.compile(r'profile\.(.*).dat')
for root,dirs,files in os.walk(directory):
for name in files:
match = label_ex.match(name)
if not match:
continue
filename = joinpath(root, name)
prefix = os.path.commonprefix([root, directory])
dirname = root[len(prefix)+1:]
data = self.datatype(filename, self.categorize)
self.setdata(dirname, match.group(1), data)
def setdata(self, run, cpu, data):
if run not in self.data:
self.data[run] = {}
if cpu in self.data[run]:
raise AttributeError, \
'data already stored for run %s and cpu %s' % (run, cpu)
self.data[run][cpu] = data
def getdata(self, run, cpu):
try:
return self.data[run][cpu]
except KeyError:
print run, cpu
return None
def alldata(self):
for run,cpus in self.data.iteritems():
for cpu,data in cpus.iteritems():
yield run,cpu,data
def get(self, job, stat, system=None):
if system is None and hasattr('system', job):
system = job.system
if system is None:
raise AttributeError, 'The job must have a system set'
cpu = '%s.run%d' % (system, self.cpu)
data = self.getdata(str(job), cpu)
if not data:
return None
values = []
for category in self.categories:
val = float(data.get(category, 0.0))
if val < 0.0:
raise ValueError, 'value is %f' % val
values.append(val)
total = sum(values)
return [ v / total * 100.0 for v in values ]
def dump(self):
for run,cpu,data in self.alldata():
print 'run %s, cpu %s' % (run, cpu)
data.dump()
print
def write_dot(self, threshold, jobfile=None, jobs=None):
import pydot
if jobs is None:
jobs = [ job for job in jobfile.jobs() ]
for job in jobs:
cpu = '%s.run%d' % (job.system, self.cpu)
symbols = self.getdata(job.name, cpu)
if not symbols:
continue
dot = pydot.Dot()
symbols.tree.dot(dot, threshold=threshold)
dot.write(symbols.filename[:-3] + 'dot')
def write_txt(self, jobfile=None, jobs=None, limit=None):
if jobs is None:
jobs = [ job for job in jobfile.jobs() ]
for job in jobs:
cpu = '%s.run%d' % (job.system, self.cpu)
symbols = self.getdata(job.name, cpu)
if not symbols:
continue
output = file(symbols.filename[:-3] + 'txt', 'w')
symbols.display(output, limit)
def display(self, jobfile=None, jobs=None, limit=None):
if jobs is None:
jobs = [ job for job in jobfile.jobs() ]
maxsymlen = 0
thejobs = []
for job in jobs:
cpu = '%s.run%d' % (job.system, self.cpu)
symbols = self.getdata(job.name, cpu)
if symbols:
thejobs.append(job)
maxsymlen = max(maxsymlen, symbols.maxsymlen)
for job in thejobs:
cpu = '%s.run%d' % (job.system, self.cpu)
symbols = self.getdata(job.name, cpu)
print job.name
symbols.display(limit=limit, maxsymlen=maxsymlen)
print
from categories import func_categorize, pc_categorize
class PCProfile(Profile):
def __init__(self, categorize=pc_categorize):
super(PCProfile, self).__init__(PCData, categorize)
class FuncProfile(Profile):
def __init__(self, categorize=func_categorize):
super(FuncProfile, self).__init__(FuncData, categorize)
def usage(exitcode = None):
print '''\
Usage: %s [-bc] [-g <dir>] [-j <jobfile>] [-n <num>]
-c groups symbols into categories
-b dumps data for bar charts
-d generate dot output
-g <d> draw graphs and send output to <d>
-j <jobfile> specify a different jobfile (default is Test.py)
-n <n> selects number of top symbols to print (default 5)
''' % sys.argv[0]
if exitcode is not None:
sys.exit(exitcode)
if __name__ == '__main__':
import getopt, re, sys
from os.path import expanduser
from output import StatOutput
# default option values
numsyms = 10
graph = None
cpus = [ 0 ]
categorize = False
showidle = True
funcdata = True
jobfilename = 'Test.py'
dodot = False
dotfile = None
textout = False
threshold = 0.01
inputfile = None
try:
opts, args = getopt.getopt(sys.argv[1:], 'C:cdD:f:g:ij:n:pT:t')
except getopt.GetoptError:
usage(2)
for o,a in opts:
if o == '-C':
cpus = [ int(x) for x in a.split(',') ]
elif o == '-c':
categorize = True
elif o == '-D':
dotfile = a
elif o == '-d':
dodot = True
elif o == '-f':
inputfile = expanduser(a)
elif o == '-g':
graph = a
elif o == '-i':
showidle = False
elif o == '-j':
jobfilename = a
elif o == '-n':
numsyms = int(a)
elif o == '-p':
funcdata = False
elif o == '-T':
threshold = float(a)
elif o == '-t':
textout = True
if args:
print "'%s'" % args, len(args)
usage(1)
if inputfile:
catfunc = None
if categorize:
catfunc = func_categorize
data = FuncData(inputfile, categorize=catfunc)
if dodot:
import pydot
dot = pydot.Dot()
data.tree.dot(dot, threshold=threshold)
#dot.orientation = 'landscape'
#dot.ranksep='equally'
#dot.rank='samerank'
dot.write(dotfile, format='png')
else:
data.display(limit=numsyms)
else:
from jobfile import JobFile
jobfile = JobFile(jobfilename)
if funcdata:
profile = FuncProfile()
else:
profile = PCProfile()
if not categorize:
profile.categorize = None
profile.inputdir(jobfile.rootdir)
if graph:
for cpu in cpus:
profile.cpu = cpu
if funcdata:
name = 'funcstacks%d' % cpu
else:
name = 'stacks%d' % cpu
output = StatOutput(jobfile, info=profile)
output.xlabel = 'System Configuration'
output.ylabel = '% CPU utilization'
output.stat = name
output.graph(name, graph)
if dodot:
for cpu in cpus:
profile.cpu = cpu
profile.write_dot(jobfile=jobfile, threshold=threshold)
if textout:
for cpu in cpus:
profile.cpu = cpu
profile.write_txt(jobfile=jobfile)
if not graph and not textout and not dodot:
for cpu in cpus:
if not categorize:
profile.categorize = None
profile.cpu = cpu
profile.display(jobfile=jobfile, limit=numsyms)
|
FireCARES/firecares | refs/heads/develop | firecares/firestation/migrations/0039_auto_20170126_0857.py | 1 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('firestation', '0038_firedepartment_domain_name'),
]
operations = [
migrations.AlterField(
model_name='firedepartment',
name='domain_name',
field=models.CharField(max_length=255, null=True, blank=True),
),
]
|
efornal/mollys | refs/heads/master | app/migrations/0020_alter_emails_to_person.py | 1 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2017-05-09 13:53
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0019_add_emails_to_person'),
]
operations = [
migrations.AlterField(
model_name='person',
name='alternative_email',
field=models.EmailField(blank=True, max_length=254, null=True, verbose_name='email alternativo'),
),
migrations.AlterField(
model_name='person',
name='email',
field=models.EmailField(blank=True, max_length=254, null=True, verbose_name='email'),
),
]
|
GDGLima/contentbox | refs/heads/master | third_party/social/tests/backends/test_xing.py | 92 | import json
from social.p3 import urlencode
from social.tests.backends.oauth import OAuth1Test
class XingOAuth1Test(OAuth1Test):
backend_path = 'social.backends.xing.XingOAuth'
user_data_url = 'https://api.xing.com/v1/users/me.json'
expected_username = 'FooBar'
access_token_body = json.dumps({
'access_token': 'foobar',
'token_type': 'bearer',
'user_id': '123456_abcdef'
})
request_token_body = urlencode({
'oauth_token_secret': 'foobar-secret',
'oauth_token': 'foobar',
'oauth_callback_confirmed': 'true'
})
user_data_body = json.dumps({
'users': [{
'id': '123456_abcdef',
'first_name': 'Foo',
'last_name': 'Bar',
'display_name': 'Foo Bar',
'page_name': 'Foo_Bar',
'permalink': 'https://www.xing.com/profile/Foo_Bar',
'gender': 'm',
'birth_date': {
'day': 12,
'month': 8,
'year': 1963
},
'active_email': 'foo@bar.com',
'time_zone': {
'name': 'Europe/Copenhagen',
'utc_offset': 2.0
},
'premium_services': ['SEARCH', 'PRIVATEMESSAGES'],
'badges': ['PREMIUM', 'MODERATOR'],
'wants': 'Nothing',
'haves': 'Skills',
'interests': 'Foo Foo',
'organisation_member': 'ACM, GI',
'languages': {
'de': 'NATIVE',
'en': 'FLUENT',
'fr': None,
'zh': 'BASIC'
},
'private_address': {
'city': 'Foo',
'country': 'DE',
'zip_code': '20357',
'street': 'Bar',
'phone': '12|34|1234560',
'fax': '||',
'province': 'Foo',
'email': 'foo@bar.com',
'mobile_phone': '12|3456|1234567'
},
'business_address': {
'city': 'Foo',
'country': 'DE',
'zip_code': '20357',
'street': 'Bar',
'phone': '12|34|1234569',
'fax': '12|34|1234561',
'province': 'Foo',
'email': 'foo@bar.com',
'mobile_phone': '12|345|12345678'
},
'web_profiles': {
'qype': ['http://qype.de/users/foo'],
'google_plus': ['http://plus.google.com/foo'],
'blog': ['http://blog.example.org'],
'homepage': ['http://example.org', 'http://other-example.org']
},
'instant_messaging_accounts': {
'skype': 'foobar',
'googletalk': 'foobar'
},
'professional_experience': {
'primary_company': {
'name': 'XING AG',
'title': 'Softwareentwickler',
'company_size': '201-500',
'tag': None,
'url': 'http://www.xing.com',
'career_level': 'PROFESSIONAL_EXPERIENCED',
'begin_date': '2010-01',
'description': None,
'end_date': None,
'industry': 'AEROSPACE'
},
'non_primary_companies': [{
'name': 'Ninja Ltd.',
'title': 'DevOps',
'company_size': None,
'tag': 'NINJA',
'url': 'http://www.ninja-ltd.co.uk',
'career_level': None,
'begin_date': '2009-04',
'description': None,
'end_date': '2010-07',
'industry': 'ALTERNATIVE_MEDICINE'
}, {
'name': None,
'title': 'Wiss. Mitarbeiter',
'company_size': None,
'tag': 'OFFIS',
'url': 'http://www.uni.de',
'career_level': None,
'begin_date': '2007',
'description': None,
'end_date': '2008',
'industry': 'APPAREL_AND_FASHION'
}, {
'name': None,
'title': 'TEST NINJA',
'company_size': '201-500',
'tag': 'TESTCOMPANY',
'url': None,
'career_level': 'ENTRY_LEVEL',
'begin_date': '1998-12',
'description': None,
'end_date': '1999-05',
'industry': 'ARTS_AND_CRAFTS'
}],
'awards': [{
'name': 'Awesome Dude Of The Year',
'date_awarded': 2007,
'url': None
}]
},
'educational_background': {
'schools': [{
'name': 'Foo University',
'degree': 'MSc CE/CS',
'notes': None,
'subject': None,
'begin_date': '1998-08',
'end_date': '2005-02'
}],
'qualifications': ['TOEFLS', 'PADI AOWD']
},
'photo_urls': {
'large': 'http://www.xing.com/img/users/e/3/d/'
'f94ef165a.123456,1.140x185.jpg',
'mini_thumb': 'http://www.xing.com/img/users/e/3/d/'
'f94ef165a.123456,1.18x24.jpg',
'thumb': 'http://www.xing.com/img/users/e/3/d/'
'f94ef165a.123456,1.30x40.jpg',
'medium_thumb': 'http://www.xing.com/img/users/e/3/d/'
'f94ef165a.123456,1.57x75.jpg',
'maxi_thumb': 'http://www.xing.com/img/users/e/3/d/'
'f94ef165a.123456,1.70x93.jpg'
}
}]
})
def test_login(self):
self.do_login()
def test_partial_pipeline(self):
self.do_partial_pipeline()
|
BMeu/Orchard | refs/heads/master | instance/__init__.py | 1 | # -*- coding: utf-8 -*-
"""
The configuration specific to a certain instance of |projectname|.
"""
from .configuration import Configuration
__all__ = ['Configuration']
|
ayushgoel/FixGoogleContacts | refs/heads/master | phonenumbers/shortdata/region_PY.py | 1 | """Auto-generated file, do not edit by hand. PY metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_PY = PhoneMetadata(id='PY', country_code=None, international_prefix=None,
general_desc=PhoneNumberDesc(national_number_pattern='1[1-4]\\d', possible_number_pattern='\\d{3}'),
fixed_line=PhoneNumberDesc(national_number_pattern='1[1-4]\\d', possible_number_pattern='\\d{3}'),
mobile=PhoneNumberDesc(national_number_pattern='1[1-4]\\d', possible_number_pattern='\\d{3}'),
toll_free=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
premium_rate=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
shared_cost=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
personal_number=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voip=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
pager=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
uan=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
emergency=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voicemail=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
short_code=PhoneNumberDesc(national_number_pattern='1[1-4]\\d', possible_number_pattern='\\d{3}', example_number='123'),
standard_rate=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
no_international_dialling=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
short_data=True)
|
csm0042/bob_wemo_service | refs/heads/master | bob_wemo_service/wemo.py | 1 | #!/usr/bin/python3
""" wemo.py:
"""
# Import Required Libraries (Standard, Third Party, Local) ********************
import copy
import datetime
import logging
import pywemo
from bob_wemo_service.ipv4_help import check_ipv4
# Authorship Info *************************************************************
__author__ = "Christopher Maue"
__copyright__ = "Copyright 2017, The B.O.B. Project"
__credits__ = ["Christopher Maue"]
__license__ = "GPL"
__version__ = "1.0.0"
__maintainer__ = "Christopher Maue"
__email__ = "csmaue@gmail.com"
__status__ = "Development"
# pywemo wrapper API **********************************************************
class WemoAPI(object):
""" Class and methods necessary to read items from a google calendar """
def __init__(self, logger):
# Configure loggers
self.logger = logger or logging.getLogger(__name__)
# Configure other class objects
self._wemo_known = []
self.wemo_device = None
self.wemo_port = None
self.wemo_url = str()
self.result = None
self.status = str()
self.logger.info('Performing initial scan for wemo devices on network')
self._wemo_known = pywemo.discover_devices()
for device in self._wemo_known:
self.logger.info('Found: %s', device)
@property
def wemo_known(self):
""" Return complete list of known devices """
return self._wemo_known
def search_by_name(self, name=None, addr=None):
""" Searches known device list for matching device name. If not found
performs a network discovery to attempt to find the device. If found
returns device, else returns None
"""
self.logger.debug(
'Starting search of wemo table for matching name: %s',
name
)
# Search table of previously discovered devices
for i, device in enumerate(self._wemo_known):
if name.lower() == device.name.lower():
self.logger.debug('Match found at index: %s', i)
return device
# If not found, run a device-specific discovery
self.logger.debug(
'Device %s @ %s not previously discovered. Running discovery',
name,
addr
)
self.wemo_device = self.discover(name=name, addr=addr)
# If discovered, return device, else return None
if self.wemo_device is not None:
return self.wemo_device
else:
return None
def discover(self, name=None, addr=None):
""" discovers wemo device on network based upon known IP address """
self.wemo_device = None
self.wemo_port = None
# Check if valid address was given
if check_ipv4(addr) is True:
self.logger.info(
'Attempting to discover wemo device: %s @ %s',
name,
addr
)
try:
self.wemo_port = pywemo.ouimeaux_device.probe_wemo(addr)
self.logger.debug('Device discovered at port %s', self.wemo_port)
except Exception:
self.wemo_port = None
self.logger.warning('Failed to discover port for: %s', name)
else:
self.wemo_port = None
self.logger.debug('Invalid IP address in device attributes')
# If port was found, create url for device and run discovery function
if self.wemo_port is not None:
self.wemo_url = 'http://%s:%i/setup.xml' % (addr, self.wemo_port)
self.logger.debug('Resulting URL: %s', self.wemo_url)
try:
self.wemo_device = pywemo.discovery.device_from_description(
self.wemo_url,
None)
self.logger.debug('Discovery successful for: %s', name)
# Add newly discovered device to list of known devices
self._wemo_known.append(copy.copy(self.wemo_device))
except Exception:
self.logger.warning('Discovery failed for: %s', name)
self.wemo_device = None
else:
self.logger.warning('Discovery failed for: %s', name)
self.wemo_device = None
# Return device to calling program
return self.wemo_device
def read_status(self, name=None, addr=None, last_seen=None):
""" method to send a status query message to the physical device to
request that it report its current status back to this program """
self.logger.debug(
'Querrying device status for: %s @ %s',
name,
addr
)
# Look up physical device
self.wemo_device = self.search_by_name(name=name, addr=addr)
# Perform status query
if self.wemo_device is not None:
self.status = str(self.wemo_device.get_state(force_update=True))
self.logger.debug(
'Wemo device %s found with status: %s',
name,
self.status
)
# Update last seen timestamp
last_seen = str(datetime.datetime.now())
else:
self.status = 'offline'
self.logger.debug(
'Wemo device %s discovery failed. Status set to: %s',
name,
self.status
)
# Return device status and timestamp
return self.status, last_seen
# Wemo set to on function *****************************************************
def turn_on(self, name=None, addr=None, last_seen=None):
""" Send 'turn on' command to a specific wemo device """
self.logger.debug(
'Setting device state to "on" for: %s @ %s',
name,
addr
)
# Look up physical device
self.wemo_device = self.search_by_name(name=name, addr=addr)
# Perform command
if self.wemo_device is not None:
self.wemo_device.on()
self.status = 'on'
self.logger.debug(
'"on" command sent to wemo device: %s',
self.wemo_device.name
)
# Update last seen timestamp
last_seen = str(datetime.datetime.now())
else:
self.status = 'offline'
self.logger.debug(
'Wemo device [%s] discovery failed. Status set to: %s',
name,
self.status
)
# Return device status and timestamp
return self.status, last_seen
# Wemo set to off function ****************************************************
def turn_off(self, name=None, addr=None, last_seen=None):
""" Send 'turn off' command to a specific wemo device """
self.logger.debug(
'Setting device state to "off" for: %s @ %s',
name,
addr
)
# Look up physical device
self.wemo_device = self.search_by_name(name=name, addr=addr)
# Perform command
if self.wemo_device is not None:
self.wemo_device.off()
self.status = 'off'
self.logger.debug(
'"off" command sent to wemo device [%s]',
self.wemo_device.name
)
# Update last seen timestamp
last_seen = str(datetime.datetime.now())
else:
self.status = 'offline'
self.logger.debug(
'Wemo device [%s] discovery failed. Status set to [%s]',
name,
self.status
)
# Return device status and timestamp
return self.status, last_seen
|
ar4s/django | refs/heads/master | django/contrib/auth/tests/utils.py | 220 | from unittest import skipIf
from django.conf import settings
def skipIfCustomUser(test_func):
"""
Skip a test if a custom user model is in use.
"""
return skipIf(settings.AUTH_USER_MODEL != 'auth.User', 'Custom user model in use')(test_func)
|
JuliBakagianni/CEF-ELRC | refs/heads/master | lib/python2.7/site-packages/unidecode/x077.py | 252 | data = (
'Ming ', # 0x00
'Sheng ', # 0x01
'Shi ', # 0x02
'Yun ', # 0x03
'Mian ', # 0x04
'Pan ', # 0x05
'Fang ', # 0x06
'Miao ', # 0x07
'Dan ', # 0x08
'Mei ', # 0x09
'Mao ', # 0x0a
'Kan ', # 0x0b
'Xian ', # 0x0c
'Ou ', # 0x0d
'Shi ', # 0x0e
'Yang ', # 0x0f
'Zheng ', # 0x10
'Yao ', # 0x11
'Shen ', # 0x12
'Huo ', # 0x13
'Da ', # 0x14
'Zhen ', # 0x15
'Kuang ', # 0x16
'Ju ', # 0x17
'Shen ', # 0x18
'Chi ', # 0x19
'Sheng ', # 0x1a
'Mei ', # 0x1b
'Mo ', # 0x1c
'Zhu ', # 0x1d
'Zhen ', # 0x1e
'Zhen ', # 0x1f
'Mian ', # 0x20
'Di ', # 0x21
'Yuan ', # 0x22
'Die ', # 0x23
'Yi ', # 0x24
'Zi ', # 0x25
'Zi ', # 0x26
'Chao ', # 0x27
'Zha ', # 0x28
'Xuan ', # 0x29
'Bing ', # 0x2a
'Mi ', # 0x2b
'Long ', # 0x2c
'Sui ', # 0x2d
'Dong ', # 0x2e
'Mi ', # 0x2f
'Die ', # 0x30
'Yi ', # 0x31
'Er ', # 0x32
'Ming ', # 0x33
'Xuan ', # 0x34
'Chi ', # 0x35
'Kuang ', # 0x36
'Juan ', # 0x37
'Mou ', # 0x38
'Zhen ', # 0x39
'Tiao ', # 0x3a
'Yang ', # 0x3b
'Yan ', # 0x3c
'Mo ', # 0x3d
'Zhong ', # 0x3e
'Mai ', # 0x3f
'Zhao ', # 0x40
'Zheng ', # 0x41
'Mei ', # 0x42
'Jun ', # 0x43
'Shao ', # 0x44
'Han ', # 0x45
'Huan ', # 0x46
'Di ', # 0x47
'Cheng ', # 0x48
'Cuo ', # 0x49
'Juan ', # 0x4a
'E ', # 0x4b
'Wan ', # 0x4c
'Xian ', # 0x4d
'Xi ', # 0x4e
'Kun ', # 0x4f
'Lai ', # 0x50
'Jian ', # 0x51
'Shan ', # 0x52
'Tian ', # 0x53
'Hun ', # 0x54
'Wan ', # 0x55
'Ling ', # 0x56
'Shi ', # 0x57
'Qiong ', # 0x58
'Lie ', # 0x59
'Yai ', # 0x5a
'Jing ', # 0x5b
'Zheng ', # 0x5c
'Li ', # 0x5d
'Lai ', # 0x5e
'Sui ', # 0x5f
'Juan ', # 0x60
'Shui ', # 0x61
'Sui ', # 0x62
'Du ', # 0x63
'Bi ', # 0x64
'Bi ', # 0x65
'Mu ', # 0x66
'Hun ', # 0x67
'Ni ', # 0x68
'Lu ', # 0x69
'Yi ', # 0x6a
'Jie ', # 0x6b
'Cai ', # 0x6c
'Zhou ', # 0x6d
'Yu ', # 0x6e
'Hun ', # 0x6f
'Ma ', # 0x70
'Xia ', # 0x71
'Xing ', # 0x72
'Xi ', # 0x73
'Gun ', # 0x74
'Cai ', # 0x75
'Chun ', # 0x76
'Jian ', # 0x77
'Mei ', # 0x78
'Du ', # 0x79
'Hou ', # 0x7a
'Xuan ', # 0x7b
'Ti ', # 0x7c
'Kui ', # 0x7d
'Gao ', # 0x7e
'Rui ', # 0x7f
'Mou ', # 0x80
'Xu ', # 0x81
'Fa ', # 0x82
'Wen ', # 0x83
'Miao ', # 0x84
'Chou ', # 0x85
'Kui ', # 0x86
'Mi ', # 0x87
'Weng ', # 0x88
'Kou ', # 0x89
'Dang ', # 0x8a
'Chen ', # 0x8b
'Ke ', # 0x8c
'Sou ', # 0x8d
'Xia ', # 0x8e
'Qiong ', # 0x8f
'Mao ', # 0x90
'Ming ', # 0x91
'Man ', # 0x92
'Shui ', # 0x93
'Ze ', # 0x94
'Zhang ', # 0x95
'Yi ', # 0x96
'Diao ', # 0x97
'Ou ', # 0x98
'Mo ', # 0x99
'Shun ', # 0x9a
'Cong ', # 0x9b
'Lou ', # 0x9c
'Chi ', # 0x9d
'Man ', # 0x9e
'Piao ', # 0x9f
'Cheng ', # 0xa0
'Ji ', # 0xa1
'Meng ', # 0xa2
'[?] ', # 0xa3
'Run ', # 0xa4
'Pie ', # 0xa5
'Xi ', # 0xa6
'Qiao ', # 0xa7
'Pu ', # 0xa8
'Zhu ', # 0xa9
'Deng ', # 0xaa
'Shen ', # 0xab
'Shun ', # 0xac
'Liao ', # 0xad
'Che ', # 0xae
'Xian ', # 0xaf
'Kan ', # 0xb0
'Ye ', # 0xb1
'Xu ', # 0xb2
'Tong ', # 0xb3
'Mou ', # 0xb4
'Lin ', # 0xb5
'Kui ', # 0xb6
'Xian ', # 0xb7
'Ye ', # 0xb8
'Ai ', # 0xb9
'Hui ', # 0xba
'Zhan ', # 0xbb
'Jian ', # 0xbc
'Gu ', # 0xbd
'Zhao ', # 0xbe
'Qu ', # 0xbf
'Wei ', # 0xc0
'Chou ', # 0xc1
'Sao ', # 0xc2
'Ning ', # 0xc3
'Xun ', # 0xc4
'Yao ', # 0xc5
'Huo ', # 0xc6
'Meng ', # 0xc7
'Mian ', # 0xc8
'Bin ', # 0xc9
'Mian ', # 0xca
'Li ', # 0xcb
'Kuang ', # 0xcc
'Jue ', # 0xcd
'Xuan ', # 0xce
'Mian ', # 0xcf
'Huo ', # 0xd0
'Lu ', # 0xd1
'Meng ', # 0xd2
'Long ', # 0xd3
'Guan ', # 0xd4
'Man ', # 0xd5
'Xi ', # 0xd6
'Chu ', # 0xd7
'Tang ', # 0xd8
'Kan ', # 0xd9
'Zhu ', # 0xda
'Mao ', # 0xdb
'Jin ', # 0xdc
'Lin ', # 0xdd
'Yu ', # 0xde
'Shuo ', # 0xdf
'Ce ', # 0xe0
'Jue ', # 0xe1
'Shi ', # 0xe2
'Yi ', # 0xe3
'Shen ', # 0xe4
'Zhi ', # 0xe5
'Hou ', # 0xe6
'Shen ', # 0xe7
'Ying ', # 0xe8
'Ju ', # 0xe9
'Zhou ', # 0xea
'Jiao ', # 0xeb
'Cuo ', # 0xec
'Duan ', # 0xed
'Ai ', # 0xee
'Jiao ', # 0xef
'Zeng ', # 0xf0
'Huo ', # 0xf1
'Bai ', # 0xf2
'Shi ', # 0xf3
'Ding ', # 0xf4
'Qi ', # 0xf5
'Ji ', # 0xf6
'Zi ', # 0xf7
'Gan ', # 0xf8
'Wu ', # 0xf9
'Tuo ', # 0xfa
'Ku ', # 0xfb
'Qiang ', # 0xfc
'Xi ', # 0xfd
'Fan ', # 0xfe
'Kuang ', # 0xff
)
|
sakura-internet/saklient.python | refs/heads/master | saklient/errors/httpnotacceptableexception.py | 1 | # -*- coding:utf-8 -*-
# This code is automatically transpiled by Saklient Translator
import six
from .httpexception import HttpException
import saklient
str = six.text_type
# module saklient.errors.httpnotacceptableexception
class HttpNotAcceptableException(HttpException):
## 要求を受け付けできません。サポートサイトやメンテナンス情報をご確認ください。
## @param {int} status
# @param {str} code=None
# @param {str} message=""
def __init__(self, status, code=None, message=""):
super(HttpNotAcceptableException, self).__init__(status, code, "要求を受け付けできません。サポートサイトやメンテナンス情報をご確認ください。" if message is None or message == "" else message)
|
martynovp/edx-platform | refs/heads/master | lms/djangoapps/circuit/views.py | 93 | import json
import xml.etree.ElementTree
from django.http import Http404
from django.http import HttpResponse
from edxmako.shortcuts import render_to_response
from .models import ServerCircuit
def circuit_line(circuit):
''' Returns string for an appropriate input element for a circuit.
TODO: Rename. '''
if not circuit.isalnum():
raise Http404()
try:
sc = ServerCircuit.objects.get(name=circuit)
schematic = sc.schematic
except:
schematic = ''
circuit_line = xml.etree.ElementTree.Element('input')
circuit_line.set('type', 'hidden')
circuit_line.set('class', 'schematic')
circuit_line.set('width', '640')
circuit_line.set('height', '480')
circuit_line.set('name', 'schematic')
circuit_line.set('id', 'schematic_' + circuit)
circuit_line.set('value', schematic) # We do it this way for security -- guarantees users cannot put funny stuff in schematic.
return xml.etree.ElementTree.tostring(circuit_line)
def edit_circuit(_request, circuit):
try:
sc = ServerCircuit.objects.get(name=circuit)
except:
sc = None
if not circuit.isalnum():
raise Http404()
response = render_to_response('edit_circuit.html', {'name': circuit,
'circuit_line': circuit_line(circuit)})
response['Cache-Control'] = 'no-cache'
return response
def save_circuit(request, circuit):
if not circuit.isalnum():
raise Http404()
print dict(request.POST)
schematic = request.POST['schematic']
print schematic
try:
sc = ServerCircuit.objects.get(name=circuit)
except:
sc = ServerCircuit()
sc.name = circuit
sc.schematic = schematic
print ":", sc.schematic
sc.save()
json_str = json.dumps({'results': 'success'})
response = HttpResponse(json_str, mimetype='application/json')
response['Cache-Control'] = 'no-cache'
return response
|
somsak/youtube-dl | refs/heads/master | test/helper.py | 63 | from __future__ import unicode_literals
import errno
import io
import hashlib
import json
import os.path
import re
import types
import sys
import youtube_dl.extractor
from youtube_dl import YoutubeDL
from youtube_dl.utils import (
compat_str,
preferredencoding,
write_string,
)
def get_params(override=None):
PARAMETERS_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"parameters.json")
with io.open(PARAMETERS_FILE, encoding='utf-8') as pf:
parameters = json.load(pf)
if override:
parameters.update(override)
return parameters
def try_rm(filename):
""" Remove a file if it exists """
try:
os.remove(filename)
except OSError as ose:
if ose.errno != errno.ENOENT:
raise
def report_warning(message):
'''
Print the message to stderr, it will be prefixed with 'WARNING:'
If stderr is a tty file the 'WARNING:' will be colored
'''
if sys.stderr.isatty() and os.name != 'nt':
_msg_header = '\033[0;33mWARNING:\033[0m'
else:
_msg_header = 'WARNING:'
output = '%s %s\n' % (_msg_header, message)
if 'b' in getattr(sys.stderr, 'mode', '') or sys.version_info[0] < 3:
output = output.encode(preferredencoding())
sys.stderr.write(output)
class FakeYDL(YoutubeDL):
def __init__(self, override=None):
# Different instances of the downloader can't share the same dictionary
# some test set the "sublang" parameter, which would break the md5 checks.
params = get_params(override=override)
super(FakeYDL, self).__init__(params, auto_init=False)
self.result = []
def to_screen(self, s, skip_eol=None):
print(s)
def trouble(self, s, tb=None):
raise Exception(s)
def download(self, x):
self.result.append(x)
def expect_warning(self, regex):
# Silence an expected warning matching a regex
old_report_warning = self.report_warning
def report_warning(self, message):
if re.match(regex, message):
return
old_report_warning(message)
self.report_warning = types.MethodType(report_warning, self)
def gettestcases(include_onlymatching=False):
for ie in youtube_dl.extractor.gen_extractors():
for tc in ie.get_testcases(include_onlymatching):
yield tc
md5 = lambda s: hashlib.md5(s.encode('utf-8')).hexdigest()
def expect_info_dict(self, got_dict, expected_dict):
for info_field, expected in expected_dict.items():
if isinstance(expected, compat_str) and expected.startswith('re:'):
got = got_dict.get(info_field)
match_str = expected[len('re:'):]
match_rex = re.compile(match_str)
self.assertTrue(
isinstance(got, compat_str),
'Expected a %s object, but got %s for field %s' % (
compat_str.__name__, type(got).__name__, info_field))
self.assertTrue(
match_rex.match(got),
'field %s (value: %r) should match %r' % (info_field, got, match_str))
elif isinstance(expected, compat_str) and expected.startswith('startswith:'):
got = got_dict.get(info_field)
start_str = expected[len('startswith:'):]
self.assertTrue(
isinstance(got, compat_str),
'Expected a %s object, but got %s for field %s' % (
compat_str.__name__, type(got).__name__, info_field))
self.assertTrue(
got.startswith(start_str),
'field %s (value: %r) should start with %r' % (info_field, got, start_str))
elif isinstance(expected, compat_str) and expected.startswith('contains:'):
got = got_dict.get(info_field)
contains_str = expected[len('contains:'):]
self.assertTrue(
isinstance(got, compat_str),
'Expected a %s object, but got %s for field %s' % (
compat_str.__name__, type(got).__name__, info_field))
self.assertTrue(
contains_str in got,
'field %s (value: %r) should contain %r' % (info_field, got, contains_str))
elif isinstance(expected, type):
got = got_dict.get(info_field)
self.assertTrue(isinstance(got, expected),
'Expected type %r for field %s, but got value %r of type %r' % (expected, info_field, got, type(got)))
else:
if isinstance(expected, compat_str) and expected.startswith('md5:'):
got = 'md5:' + md5(got_dict.get(info_field))
elif isinstance(expected, compat_str) and expected.startswith('mincount:'):
got = got_dict.get(info_field)
self.assertTrue(
isinstance(got, (list, dict)),
'Expected field %s to be a list or a dict, but it is of type %s' % (
info_field, type(got).__name__))
expected_num = int(expected.partition(':')[2])
assertGreaterEqual(
self, len(got), expected_num,
'Expected %d items in field %s, but only got %d' % (
expected_num, info_field, len(got)
)
)
continue
else:
got = got_dict.get(info_field)
self.assertEqual(expected, got,
'invalid value for field %s, expected %r, got %r' % (info_field, expected, got))
# Check for the presence of mandatory fields
if got_dict.get('_type') not in ('playlist', 'multi_video'):
for key in ('id', 'url', 'title', 'ext'):
self.assertTrue(got_dict.get(key), 'Missing mandatory field %s' % key)
# Check for mandatory fields that are automatically set by YoutubeDL
for key in ['webpage_url', 'extractor', 'extractor_key']:
self.assertTrue(got_dict.get(key), 'Missing field: %s' % key)
# Are checkable fields missing from the test case definition?
test_info_dict = dict((key, value if not isinstance(value, compat_str) or len(value) < 250 else 'md5:' + md5(value))
for key, value in got_dict.items()
if value and key in ('id', 'title', 'description', 'uploader', 'upload_date', 'timestamp', 'uploader_id', 'location', 'age_limit'))
missing_keys = set(test_info_dict.keys()) - set(expected_dict.keys())
if missing_keys:
def _repr(v):
if isinstance(v, compat_str):
return "'%s'" % v.replace('\\', '\\\\').replace("'", "\\'").replace('\n', '\\n')
else:
return repr(v)
info_dict_str = ''
if len(missing_keys) != len(expected_dict):
info_dict_str += ''.join(
' %s: %s,\n' % (_repr(k), _repr(v))
for k, v in test_info_dict.items() if k not in missing_keys)
if info_dict_str:
info_dict_str += '\n'
info_dict_str += ''.join(
' %s: %s,\n' % (_repr(k), _repr(test_info_dict[k]))
for k in missing_keys)
write_string(
'\n\'info_dict\': {\n' + info_dict_str + '},\n', out=sys.stderr)
self.assertFalse(
missing_keys,
'Missing keys in test definition: %s' % (
', '.join(sorted(missing_keys))))
def assertRegexpMatches(self, text, regexp, msg=None):
if hasattr(self, 'assertRegexp'):
return self.assertRegexp(text, regexp, msg)
else:
m = re.match(regexp, text)
if not m:
note = 'Regexp didn\'t match: %r not found' % (regexp)
if len(text) < 1000:
note += ' in %r' % text
if msg is None:
msg = note
else:
msg = note + ', ' + msg
self.assertTrue(m, msg)
def assertGreaterEqual(self, got, expected, msg=None):
if not (got >= expected):
if msg is None:
msg = '%r not greater than or equal to %r' % (got, expected)
self.assertTrue(got >= expected, msg)
def expect_warnings(ydl, warnings_re):
real_warning = ydl.report_warning
def _report_warning(w):
if not any(re.search(w_re, w) for w_re in warnings_re):
real_warning(w)
ydl.report_warning = _report_warning
|
rcarmo/soup-strainer | refs/heads/master | chardet/constants.py | 237 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
_debug = 0
eDetecting = 0
eFoundIt = 1
eNotMe = 2
eStart = 0
eError = 1
eItsMe = 2
SHORTCUT_THRESHOLD = 0.95
import __builtin__
if not hasattr(__builtin__, 'False'):
False = 0
True = 1
else:
False = __builtin__.False
True = __builtin__.True
|
twolfson/restructuredtext-lint | refs/heads/master | docs/sphinx/index.py | 1 | # Load in our dependencies
from docutils.parsers.rst.directives import register_directive
from sphinx.directives.code import Highlight
import restructuredtext_lint
# Load our new directive
register_directive('highlight', Highlight)
# Lint our README
errors = restructuredtext_lint.lint_file('docs/sphinx/README.rst')
print errors[0].message # Error in "highlight" directive: no content permitted.
|
damiansoriano/odoo | refs/heads/master | addons/point_of_sale/controllers/__init__.py | 382 | import main
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
shengzhou/onie | refs/heads/master | test/lib/connection.py | 6 | #
# DUT Connection classes
#
# Copyright (C) 2013 Curt Brune <curt@cumulusnetworks.com>
#
# SPDX-License-Identifier: GPL-2.0
'''
Defines a Connection object that the test fixture uses to communicate
with a DUT.
'''
#-------------------------------------------------------------------------------
#
# Imports
#
try:
import sys
import os
import re
import io
import logging
import pexpect
except ImportError, e:
raise ImportError (str(e) + "- required module not found")
class Connection(object):
'''
Base connection class
'''
proto = None
def __init__(self, dut):
self._dut = dut
self._child = None
def open(self, prompt=""):
'''
Open the DUT communication channel.
prompt -- default CLI prompt to synchronize to
'''
self._prompt = prompt
to = int(self._dut.get_config('timeout'))
logging.info("Opening connection: " + self.command)
self._child = pexpect.spawn(self.command, timeout=to)
logging.info("Logging console output: " + self._dut.args.console_log.name)
self._child.logfile = self._dut.args.console_log
self._login()
def _login(self):
'''
After open() _login() is called to provide any required chat.
'''
pass
def close(self):
'''
Close the DUT communication channel
'''
self._child.close(force=True)
def expect(self, pattern, timeout=-1):
'''
Monitor DUT communication channel, looking for a pattern.
pattern -- pattern to look for
timeout -- how many seconds to wait for pattern to show up. -1 is connection default.
'''
try:
self._child.expect(pattern, timeout=timeout)
except pexpect.EOF, e:
logging.critical("pexpect received EOF while expecting: " + pattern)
raise
except pexpect.TIMEOUT, e:
if timeout != -1:
to = timeout
else:
to = self._child.timeout
logging.critical("pexpect received TIMEOUT (%d secs) while expecting: %s" %
(to, pattern))
raise
logging.debug("before text: %s" % (self._child.before))
logging.debug("match text: %s" % (self._child.match))
logging.debug("after text: %s" % (self._child.after))
return self._child.before
def send(self, line, timeout=-1):
'''
Send line to DUT and wait for DUT prompt.
line -- string to send to DUT. A newline is added automatically.
timeout -- how many seconds to wait for prompt. -1 is connection default.
'''
self._child.sendline(line)
try:
output = self.expect(self.prompt, timeout)
except pexpect.EOF, e:
logging.critical("pexpect received EOF while sending: " + line)
sys.exit(1)
except pexpect.TIMEOUT, e:
if timeout != -1:
to = timeout
else:
to = self._child.timeout
logging.critical("pexpect received TIMEOUT (%d secs) while sending: >%s<" %
(to, line))
sys.exit(1)
# Return the output, split into lines. Also skip the first
# line as it is just an echo of the command sent.
return output.splitlines()[1:]
def sendline(self, line):
'''
Send line to DUT and return immediately.
line -- string to send to DUT. A newline is added automatically.
'''
self._child.sendline(line)
@property
def prompt(self):
'''
Return the current prompt pattern.
'''
return self._prompt
@prompt.setter
def prompt(self, pattern):
'''
Set the prompt to wait for when issuing CLI commands.
pattern -- The pattern representing the prompt.
'''
old_prompt = self._prompt
self._prompt = pattern
class SimpleTelnetConnection(Connection):
'''
Simple telnet connection that does not require a password.
'''
proto = "telnet-simple"
def __init__(self, dut):
server = dut.get_config('telnet_server')
port = dut.get_config('telnet_port')
self.command = "/usr/bin/telnet %s %s" % (server, port)
Connection.__init__(self, dut)
class AuthTelnetConnection(SimpleTelnetConnection):
'''
Authenticated telnet connection that requires a username and
password.
'''
proto = "telnet"
def _login(self):
index = self._child.expect(["login: ", pexpect.EOF, pexpect.TIMEOUT])
if index == 1:
logging.critical("pexect received EOF during telnet login")
sys.exit(1)
elif index == 2:
to = self._child.timeout
logging.critical("received TIMEOUT (%d secs) during telnet login" %
(to))
sys.exit(1)
user = self._dut.get_config('telnet_user')
self._child.sendline(user)
index = self._child.expect(["Password: ", pexpect.EOF, pexpect.TIMEOUT])
if index == 1:
logging.critical("pexect received EOF during telnet password")
sys.exit(1)
elif index == 2:
to = self._child.timeout
logging.critical("received TIMEOUT (%d secs) during telnet password" %
(to))
sys.exit(1)
pw = self._dut.get_config('telnet_pass')
self._child.sendline(pw)
class SSHConnection(Connection):
'''
Authenticated SSH connection that requires a username and password.
'''
proto = "ssh"
def __init__(self, dut):
server = dut.get_config('ssh_server')
port = dut.get_config('ssh_port')
user = dut.get_config('ssh_user')
self.command = "/usr/bin/ssh -p %s %s@%s" % (port, user, server)
Connection.__init__(self, dut)
def _login(self):
index = self._child.expect(["Password: ", pexpect.EOF, pexpect.TIMEOUT])
if index == 1:
logging.critical("pexect received EOF during ssh login")
sys.exit(1)
elif index == 2:
to = self._child.timeout
logging.critical("pexect received TIMEOUT (%d secs) during ssh login" %
(to))
sys.exit(1)
pw = self._dut.get_config('ssh_pass')
self._child.sendline(pw)
#
# Registry of available Connection classes
#
connection_protos = (SimpleTelnetConnection,
AuthTelnetConnection,
SSHConnection,)
class NoSuchConnection(RuntimeError):
pass
def find_connection(proto):
for c in connection_protos:
if c.proto == proto:
return c
raise NoSuchConnection('Connection proto not found: %s' % (proto))
|
aioue/ansible | refs/heads/devel | hacking/tests/gen_distribution_version_testcase.py | 80 | #!/usr/bin/env python
"""
This script generated test_cases for test_distribution_version.py.
To do so it outputs the relevant files from /etc/*release, the output of platform.dist() and the current ansible_facts regarding the distribution version.
This assumes a working ansible version in the path.
"""
import platform
import os.path
import subprocess
import json
import sys
filelist = [
'/etc/oracle-release',
'/etc/slackware-version',
'/etc/redhat-release',
'/etc/vmware-release',
'/etc/openwrt_release',
'/etc/system-release',
'/etc/alpine-release',
'/etc/release',
'/etc/arch-release',
'/etc/os-release',
'/etc/SuSE-release',
'/etc/gentoo-release',
'/etc/os-release',
'/etc/lsb-release',
'/etc/altlinux-release',
'/etc/os-release',
'/etc/coreos/update.conf',
]
fcont = {}
for f in filelist:
if os.path.exists(f):
s = os.path.getsize(f)
if s > 0 and s < 10000:
with open(f) as fh:
fcont[f] = fh.read()
dist = platform.dist()
facts = ['distribution', 'distribution_version', 'distribution_release', 'distribution_major_version', 'os_family']
try:
ansible_out = subprocess.check_output(
['ansible', 'localhost', '-m', 'setup'])
except subprocess.CalledProcessError as e:
print("ERROR: ansible run failed, output was: \n")
print(e.output)
sys.exit(e.returncode)
parsed = json.loads(ansible_out[ansible_out.index('{'):])
ansible_facts = {}
for fact in facts:
try:
ansible_facts[fact] = parsed['ansible_facts']['ansible_' + fact]
except:
ansible_facts[fact] = "N/A"
nicename = ansible_facts['distribution'] + ' ' + ansible_facts['distribution_version']
output = {
'name': nicename,
'input': fcont,
'platform.dist': dist,
'result': ansible_facts,
}
print(json.dumps(output, indent=4))
|
XXMrHyde/android_external_chromium_org | refs/heads/darkkat-4.4 | third_party/protobuf/python/google/protobuf/internal/service_reflection_test.py | 559 | #! /usr/bin/python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# http://code.google.com/p/protobuf/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests for google.protobuf.internal.service_reflection."""
__author__ = 'petar@google.com (Petar Petrov)'
import unittest
from google.protobuf import unittest_pb2
from google.protobuf import service_reflection
from google.protobuf import service
class FooUnitTest(unittest.TestCase):
def testService(self):
class MockRpcChannel(service.RpcChannel):
def CallMethod(self, method, controller, request, response, callback):
self.method = method
self.controller = controller
self.request = request
callback(response)
class MockRpcController(service.RpcController):
def SetFailed(self, msg):
self.failure_message = msg
self.callback_response = None
class MyService(unittest_pb2.TestService):
pass
self.callback_response = None
def MyCallback(response):
self.callback_response = response
rpc_controller = MockRpcController()
channel = MockRpcChannel()
srvc = MyService()
srvc.Foo(rpc_controller, unittest_pb2.FooRequest(), MyCallback)
self.assertEqual('Method Foo not implemented.',
rpc_controller.failure_message)
self.assertEqual(None, self.callback_response)
rpc_controller.failure_message = None
service_descriptor = unittest_pb2.TestService.GetDescriptor()
srvc.CallMethod(service_descriptor.methods[1], rpc_controller,
unittest_pb2.BarRequest(), MyCallback)
self.assertEqual('Method Bar not implemented.',
rpc_controller.failure_message)
self.assertEqual(None, self.callback_response)
class MyServiceImpl(unittest_pb2.TestService):
def Foo(self, rpc_controller, request, done):
self.foo_called = True
def Bar(self, rpc_controller, request, done):
self.bar_called = True
srvc = MyServiceImpl()
rpc_controller.failure_message = None
srvc.Foo(rpc_controller, unittest_pb2.FooRequest(), MyCallback)
self.assertEqual(None, rpc_controller.failure_message)
self.assertEqual(True, srvc.foo_called)
rpc_controller.failure_message = None
srvc.CallMethod(service_descriptor.methods[1], rpc_controller,
unittest_pb2.BarRequest(), MyCallback)
self.assertEqual(None, rpc_controller.failure_message)
self.assertEqual(True, srvc.bar_called)
def testServiceStub(self):
class MockRpcChannel(service.RpcChannel):
def CallMethod(self, method, controller, request,
response_class, callback):
self.method = method
self.controller = controller
self.request = request
callback(response_class())
self.callback_response = None
def MyCallback(response):
self.callback_response = response
channel = MockRpcChannel()
stub = unittest_pb2.TestService_Stub(channel)
rpc_controller = 'controller'
request = 'request'
# GetDescriptor now static, still works as instance method for compatability
self.assertEqual(unittest_pb2.TestService_Stub.GetDescriptor(),
stub.GetDescriptor())
# Invoke method.
stub.Foo(rpc_controller, request, MyCallback)
self.assertTrue(isinstance(self.callback_response,
unittest_pb2.FooResponse))
self.assertEqual(request, channel.request)
self.assertEqual(rpc_controller, channel.controller)
self.assertEqual(stub.GetDescriptor().methods[0], channel.method)
if __name__ == '__main__':
unittest.main()
|
okuribito/GiftConcierge | refs/heads/master | dialogue_system/knowledge/__init__.py | 15 | __author__ = 'h-nakayama'
|
xLegoz/marshmallow | refs/heads/dev | tests/test_fields.py | 1 | # -*- coding: utf-8 -*-
import pytest
from marshmallow import fields, Schema, ValidationError
from marshmallow.marshalling import missing
from tests.base import ALL_FIELDS, User
class TestFieldAliases:
def test_int_is_integer(self):
assert fields.Int is fields.Integer
def test_str_is_string(self):
assert fields.Str is fields.String
def test_bool_is_boolean(self):
assert fields.Bool is fields.Boolean
def test_URL_is_Url(self): # flake8: noqa
assert fields.URL is fields.Url
class TestField:
def test_repr(self):
default = u'œ∑´'
field = fields.Field(default=default, attribute=None)
assert repr(field) == (u'<fields.Field(default={0!r}, attribute=None, '
'validate=None, required=False, '
'load_only=False, dump_only=False, '
'missing={missing}, allow_none=False, '
'error_messages={error_messages})>'
.format(default, missing=missing,
error_messages=field.error_messages))
int_field = fields.Integer(validate=lambda x: True)
assert '<fields.Integer' in repr(int_field)
def test_error_raised_if_uncallable_validator_passed(self):
with pytest.raises(ValueError):
fields.Field(validate='notcallable')
def test_custom_field_receives_attr_and_obj(self, user):
class MyField(fields.Field):
def _deserialize(self, val, attr, data):
assert attr == 'name'
assert data['foo'] == 42
return val
class MySchema(Schema):
name = MyField()
result = MySchema().load({'name': 'Monty', 'foo': 42})
assert result.data == {'name': 'Monty'}
def test_custom_field_receives_load_from_if_set(self, user):
class MyField(fields.Field):
def _deserialize(self, val, attr, data):
assert attr == 'name'
assert data['foo'] == 42
return val
class MySchema(Schema):
Name = MyField(load_from='name')
result = MySchema().load({'name': 'Monty', 'foo': 42})
assert result.data == {'Name': 'Monty'}
def test_custom_field_follows_dump_to_if_set(self, user):
class MyField(fields.Field):
def _serialize(self, val, attr, data):
assert attr == 'name'
assert data['foo'] == 42
return val
class MySchema(Schema):
name = MyField(dump_to='_NaMe')
result = MySchema().dump({'name': 'Monty', 'foo': 42})
assert result.data == {'_NaMe': 'Monty'}
class TestParentAndName:
class MySchema(Schema):
foo = fields.Field()
bar = fields.List(fields.Str())
@pytest.fixture()
def schema(self):
return self.MySchema()
def test_simple_field_parent_and_name(self, schema):
assert schema.fields['foo'].parent == schema
assert schema.fields['foo'].name == 'foo'
assert schema.fields['bar'].parent == schema
assert schema.fields['bar'].name == 'bar'
# https://github.com/marshmallow-code/marshmallow/pull/572#issuecomment-275800288
def test_unbound_field_root_returns_none(self):
field = fields.Str()
assert field.root is None
inner_field = fields.Nested(self.MySchema())
outer_field = fields.List(inner_field)
assert outer_field.root is None
assert inner_field.root is None
def test_list_field_inner_parent_and_name(self, schema):
assert schema.fields['bar'].container.parent == schema.fields['bar']
assert schema.fields['bar'].container.name == 'bar'
def test_simple_field_root(self, schema):
assert schema.fields['foo'].root == schema
assert schema.fields['bar'].root == schema
def test_list_field_inner_root(self, schema):
assert schema.fields['bar'].container.root == schema
class TestMetadata:
FIELDS_TO_TEST = [
field for field in ALL_FIELDS
if field not in [fields.FormattedString]
]
@pytest.mark.parametrize('FieldClass', FIELDS_TO_TEST)
def test_extra_metadata_may_be_added_to_field(self, FieldClass): # noqa
field = FieldClass(description='Just a normal field.')
assert field.metadata['description'] == 'Just a normal field.'
field = FieldClass(required=True, default=None, validate=lambda v: True,
description='foo', widget='select')
assert field.metadata == {'description': 'foo', 'widget': 'select'}
def test_metadata_may_be_added_to_formatted_string_field(self):
field = fields.FormattedString('hello {name}', description='a greeting')
assert field.metadata == {'description': 'a greeting'}
class TestErrorMessages:
class MyField(fields.Field):
default_error_messages = {
'custom': 'Custom error message.'
}
def test_default_error_messages_get_merged_with_parent_error_messages(self):
field = self.MyField()
assert field.error_messages['custom'] == 'Custom error message'
assert 'required' in field.error_messages
def test_default_error_messages_get_merged_with_parent_error_messages(self):
field = self.MyField(error_messages={'passed': 'Passed error message'})
assert field.error_messages['passed'] == 'Passed error message'
def test_fail(self):
field = self.MyField()
with pytest.raises(ValidationError) as excinfo:
field.fail('required')
assert excinfo.value.args[0] == 'Missing data for required field.'
with pytest.raises(ValidationError) as excinfo:
field.fail('null')
assert excinfo.value.args[0] == 'Field may not be null.'
with pytest.raises(ValidationError) as excinfo:
field.fail('custom')
assert excinfo.value.args[0] == 'Custom error message.'
with pytest.raises(ValidationError) as excinfo:
field.fail('validator_failed')
assert excinfo.value.args[0] == 'Invalid value.'
with pytest.raises(AssertionError) as excinfo:
field.fail('doesntexist')
assert 'doesntexist' in excinfo.value.args[0]
assert 'MyField' in excinfo.value.args[0]
|
testmana2/test | refs/heads/master | Plugins/VcsPlugins/vcsMercurial/HgNewProjectOptionsDialog.py | 1 | # -*- coding: utf-8 -*-
# Copyright (c) 2010 - 2015 Detlev Offenbach <detlev@die-offenbachs.de>
#
"""
Module implementing the Mercurial Options Dialog for a new project from the
repository.
"""
from __future__ import unicode_literals
import os
from PyQt5.QtCore import pyqtSlot, QDir
from PyQt5.QtWidgets import QDialog, QDialogButtonBox
from E5Gui.E5PathPicker import E5PathPickerModes
from .Ui_HgNewProjectOptionsDialog import Ui_HgNewProjectOptionsDialog
from .Config import ConfigHgProtocols
import Utilities
import Preferences
class HgNewProjectOptionsDialog(QDialog, Ui_HgNewProjectOptionsDialog):
"""
Class implementing the Options Dialog for a new project from the
repository.
"""
def __init__(self, vcs, parent=None):
"""
Constructor
@param vcs reference to the version control object
@param parent parent widget (QWidget)
"""
super(HgNewProjectOptionsDialog, self).__init__(parent)
self.setupUi(self)
self.vcsProjectDirPicker.setMode(E5PathPickerModes.DirectoryMode)
self.vcsUrlPicker.setMode(E5PathPickerModes.DirectoryMode)
self.protocolCombo.addItems(ConfigHgProtocols)
hd = Utilities.toNativeSeparators(QDir.homePath())
hd = os.path.join(hd, 'hgroot')
self.vcsUrlPicker.setText(hd)
self.vcs = vcs
self.localPath = hd
self.networkPath = "localhost/"
self.localProtocol = True
ipath = Preferences.getMultiProject("Workspace") or \
Utilities.getHomeDir()
self.__initPaths = [
Utilities.fromNativeSeparators(ipath),
Utilities.fromNativeSeparators(ipath) + "/",
]
self.vcsProjectDirPicker.setText(self.__initPaths[0])
self.lfNoteLabel.setVisible(self.vcs.isExtensionActive("largefiles"))
self.largeCheckBox.setVisible(self.vcs.isExtensionActive("largefiles"))
self.resize(self.width(), self.minimumSizeHint().height())
self.buttonBox.button(QDialogButtonBox.Ok).setEnabled(False)
msh = self.minimumSizeHint()
self.resize(max(self.width(), msh.width()), msh.height())
@pyqtSlot(str)
def on_vcsProjectDirPicker_textChanged(self, txt):
"""
Private slot to handle a change of the project directory.
@param txt name of the project directory (string)
"""
self.buttonBox.button(QDialogButtonBox.Ok).setEnabled(
bool(txt) and
Utilities.fromNativeSeparators(txt) not in self.__initPaths)
@pyqtSlot(str)
def on_protocolCombo_activated(self, protocol):
"""
Private slot to switch the status of the directory selection button.
@param protocol name of the selected protocol (string)
"""
self.vcsUrlPicker.setPickerEnabled(protocol == "file://")
if protocol == "file://":
self.networkPath = self.vcsUrlPicker.text()
self.vcsUrlPicker.setText(self.localPath)
self.localProtocol = True
else:
if self.localProtocol:
self.localPath = self.vcsUrlPicker.text()
self.vcsUrlPicker.setText(self.networkPath)
self.localProtocol = False
@pyqtSlot(str)
def on_vcsUrlPicker_textChanged(self, txt):
"""
Private slot to handle changes of the URL.
@param txt current text of the line edit (string)
"""
enable = "://" not in txt
self.buttonBox.button(QDialogButtonBox.Ok).setEnabled(enable)
def getData(self):
"""
Public slot to retrieve the data entered into the dialog.
@return a tuple of a string (project directory) and a dictionary
containing the data entered.
"""
scheme = self.protocolCombo.currentText()
url = self.vcsUrlPicker.text()
if scheme == "file://" and url[0] not in ["\\", "/"]:
url = "/{0}".format(url)
vcsdatadict = {
"url": '{0}{1}'.format(scheme, url),
"revision": self.vcsRevisionEdit.text(),
"largefiles": self.largeCheckBox.isChecked(),
}
return (self.vcsProjectDirPicker.text(), vcsdatadict)
|
chrisglass/xhtml2pdf | refs/heads/master | xhtml2pdf/config/httpconfig.py | 3 | '''
Created on 1 dic. 2017
@author: luisza
'''
import ssl
class HttpConfig(dict):
"""
Configuration settings for httplib
See
- python2 : https://docs.python.org/2/library/httplib.html#httplib.HTTPSConnection
- python3 : https://docs.python.org/3.4/library/http.client.html#http.client.HTTPSConnection
available settings
- http_key_file
- http_cert_file
- http_source_address
- http_timeout
"""
def save_keys(self, name, value):
if name=='nosslcheck':
self['context']=ssl._create_unverified_context()
else:
self[name]=value
def is_http_config(self, name, value):
if name.startswith('--'):
name=name[2:]
elif name.startswith('-'):
name=name[1:]
if 'http_' in name:
name=name.replace("http_", '')
self.save_keys(name, value)
return True
return False
def __repr__(self):
dev=''
for key, value in self.items():
dev+="%r = %r, "%(key, value)
return dev
httpConfig=HttpConfig() |
zjost/antsystem | refs/heads/master | functions/colMutate.py | 1 | ''' This function will reproduce a colony with mutated individuals.
It will generate a shifted alpha_weight array and assign new alpha
values to the ants based on these probabilities'''
import random
from functions.alphaWeightMutate import alphaWeightMutate
from functions.alphaAssign import alphaAssign
def colMutate(colony, alpha_min, alpha_max, max_mutation):
# Mutate the alpha_weights of the colony
alphaWeightMutate(colony.alphaWeights, max_mutation)
# Assign new alpha values based on these mutated weights
alphaAssign(colony, alpha_min, alpha_max)
return colony
|
saurabh6790/test-frappe | refs/heads/develop | frappe/website/doctype/web_form/web_form.py | 11 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, json
from frappe.website.website_generator import WebsiteGenerator
from frappe import _
from frappe.utils.file_manager import save_file, remove_file_by_url
from frappe.website.utils import get_comment_list
class WebForm(WebsiteGenerator):
website = frappe._dict(
template = "templates/generators/web_form.html",
condition_field = "published",
page_title_field = "title",
no_cache = 1
)
def get_context(self, context):
from frappe.templates.pages.list import get_context as get_list_context
frappe.local.form_dict.is_web_form = 1
context.params = frappe.form_dict
logged_in = frappe.session.user != "Guest"
# check permissions
if not logged_in and frappe.form_dict.name:
frappe.throw(_("You need to be logged in to access this {0}.").format(self.doc_type), frappe.PermissionError)
if frappe.form_dict.name and not has_web_form_permission(self.doc_type, frappe.form_dict.name):
frappe.throw(_("You don't have the permissions to access this document"), frappe.PermissionError)
if self.login_required and logged_in:
if self.allow_edit:
if self.allow_multiple:
if not context.params.name and not context.params.new:
frappe.form_dict.doctype = self.doc_type
get_list_context(context)
context.is_list = True
else:
name = frappe.db.get_value(self.doc_type, {"owner": frappe.session.user}, "name")
if name:
frappe.form_dict.name = name
# always render new form if login is not required or doesn't allow editing existing ones
if not self.login_required or not self.allow_edit:
frappe.form_dict.new = 1
if frappe.form_dict.name or frappe.form_dict.new:
context.layout = self.get_layout()
context.parents = [{"name": self.get_route(), "title": self.title }]
if frappe.form_dict.name:
context.doc = frappe.get_doc(self.doc_type, frappe.form_dict.name)
context.title = context.doc.get(context.doc.meta.get_title_field())
context.comment_doctype = context.doc.doctype
context.comment_docname = context.doc.name
if self.allow_comments and frappe.form_dict.name:
context.comment_list = get_comment_list(context.doc.doctype, context.doc.name)
context.types = [f.fieldtype for f in self.web_form_fields]
return context
def get_layout(self):
layout = []
for df in self.web_form_fields:
if df.fieldtype=="Section Break" or not layout:
layout.append([])
if df.fieldtype=="Column Break" or not layout[-1]:
layout[-1].append([])
if df.fieldtype not in ("Section Break", "Column Break"):
layout[-1][-1].append(df)
return layout
def get_parents(self, context):
if context.parents:
return context.parents
elif self.breadcrumbs:
return json.loads(self.breadcrumbs)
@frappe.whitelist(allow_guest=True)
def accept():
args = frappe.form_dict
files = []
web_form = frappe.get_doc("Web Form", args.web_form)
if args.doctype != web_form.doc_type:
frappe.throw(_("Invalid Request"))
elif args.name and not web_form.allow_edit:
frappe.throw(_("You are not allowed to update this Web Form Document"))
if args.name:
# update
doc = frappe.get_doc(args.doctype, args.name)
else:
# insert
doc = frappe.new_doc(args.doctype)
# set values
for fieldname, value in args.iteritems():
if fieldname not in ("web_form", "cmd", "owner"):
if value and value.startswith("{"):
try:
filedata = json.loads(value)
if "__file_attachment" in filedata:
files.append((fieldname, filedata))
continue
except ValueError:
pass
doc.set(fieldname, value)
if args.name:
if has_web_form_permission(doc.doctype, doc.name, "write"):
doc.save(ignore_permissions=True)
else:
# only if permissions are present
doc.save()
else:
# insert
if web_form.login_required and frappe.session.user=="Guest":
frappe.throw(_("You must login to submit this form"))
doc.insert(ignore_permissions = True)
# add files
if files:
for f in files:
fieldname, filedata = f
# remove earlier attachmed file (if exists)
if doc.get(fieldname):
remove_file_by_url(doc.get(fieldname), doc.doctype, doc.name)
# save new file
filedoc = save_file(filedata["filename"], filedata["dataurl"],
doc.doctype, doc.name, decode=True)
# update values
doc.set(fieldname, filedoc.file_url)
doc.save()
@frappe.whitelist()
def delete(web_form, name):
web_form = frappe.get_doc("Web Form", web_form)
owner = frappe.db.get_value(web_form.doc_type, name, "owner")
if frappe.session.user == owner and web_form.allow_delete:
frappe.delete_doc(web_form.doc_type, name, ignore_permissions=True)
else:
raise frappe.PermissionError, "Not Allowed"
def has_web_form_permission(doctype, name, ptype='read'):
if frappe.session.user=="Guest":
return False
# owner matches
elif frappe.db.get_value(doctype, name, "owner")==frappe.session.user:
return True
elif frappe.has_website_permission(doctype, ptype=ptype, doc=name):
return True
else:
return False
def get_web_form_list(doctype, txt, filters, limit_start, limit_page_length=20):
from frappe.templates.pages.list import get_list
if not filters:
filters = {}
filters["owner"] = frappe.session.user
return get_list(doctype, txt, filters, limit_start, limit_page_length, ignore_permissions=True)
|
mxklabs/mxklabs-python | refs/heads/master | mxklabs/expr/valtype/bool/semantics.py | 1 | class ValtypeSemantics:
def __init__(self, ctx):
self.ctx = ctx
def is_valid_value(self, valtype, value):
if type(value) == int:
return value == 0 or value == 1
if type(value) == bool:
return True
def values(self, valtype):
yield False
yield True
def num_values(self, valtype):
return 2
def value_to_str(self, valtype, value):
return 'False' if not value else 'True'
|
ilsawa/p2pool-lit | refs/heads/master | p2pool/util/switchprotocol.py | 280 | from twisted.internet import protocol
class FirstByteSwitchProtocol(protocol.Protocol):
p = None
def dataReceived(self, data):
if self.p is None:
if not data: return
serverfactory = self.factory.first_byte_to_serverfactory.get(data[0], self.factory.default_serverfactory)
self.p = serverfactory.buildProtocol(self.transport.getPeer())
self.p.makeConnection(self.transport)
self.p.dataReceived(data)
def connectionLost(self, reason):
if self.p is not None:
self.p.connectionLost(reason)
class FirstByteSwitchFactory(protocol.ServerFactory):
protocol = FirstByteSwitchProtocol
def __init__(self, first_byte_to_serverfactory, default_serverfactory):
self.first_byte_to_serverfactory = first_byte_to_serverfactory
self.default_serverfactory = default_serverfactory
def startFactory(self):
for f in list(self.first_byte_to_serverfactory.values()) + [self.default_serverfactory]:
f.doStart()
def stopFactory(self):
for f in list(self.first_byte_to_serverfactory.values()) + [self.default_serverfactory]:
f.doStop()
|
kylemsguy/FPGA-Litecoin-Miner | refs/heads/master | ICARUS-LX150/MiningSoftware/pyserial-2.6/examples/setup-rfc2217_server-py2exe.py | 7 | # setup script for py2exe to create the miniterm.exe
# $Id: setup-rfc2217_server-py2exe.py 320 2009-08-07 18:22:49Z cliechti $
from distutils.core import setup
import glob, sys, py2exe, os
sys.path.append('..')
sys.argv.extend("py2exe --bundle 1".split())
setup(
name='rfc2217_server',
zipfile=None,
options = {"py2exe":
{
'dist_dir': 'bin',
'excludes': ['javax.comm'],
'compressed': 1,
}
},
console = [
"rfc2217_server.py",
],
)
|
da1z/intellij-community | refs/heads/master | python/testData/completion/percentStringDictRefKeys.py | 31 | f = "fst"
s1 = "snd"
print("first is %(<caret>)s, second is %(snd)s" % {f: 1, s1: 2}) |
alsrgv/tensorflow | refs/heads/master | tensorflow/contrib/nearest_neighbor/python/kernel_tests/hyperplane_lsh_probes_test.py | 25 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for hyperplane_lsh_probes."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.nearest_neighbor.python.ops.nearest_neighbor_ops import hyperplane_lsh_probes
from tensorflow.python.platform import test
class HyperplaneLshProbesTest(test.TestCase):
# We only test the batch functionality of the op here because the multiprobe
# tests in hyperplane_lsh_probes_test.cc already cover most of the LSH
# functionality.
def simple_batch_test(self):
with self.cached_session():
hyperplanes = np.eye(4)
points = np.array([[1.2, 0.5, -0.9, -1.0], [2.0, -3.0, 1.0, -1.5]])
product = np.dot(points, hyperplanes)
num_tables = 2
num_hyperplanes_per_table = 2
num_probes = 4
hashes, tables = hyperplane_lsh_probes(product,
num_tables,
num_hyperplanes_per_table,
num_probes)
self.assertAllEqual(hashes.eval(), [[3, 0, 2, 2], [2, 2, 0, 3]])
self.assertAllEqual(tables.eval(), [[0, 1, 0, 1], [0, 1, 1, 1]])
if __name__ == '__main__':
test.main()
|
cmelange/ansible | refs/heads/devel | lib/ansible/plugins/action/ios.py | 21 | #
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import sys
import copy
from ansible.plugins.action.normal import ActionModule as _ActionModule
from ansible.utils.path import unfrackpath
from ansible.plugins import connection_loader
from ansible.compat.six import iteritems
from ansible.module_utils.ios import ios_argument_spec
from ansible.module_utils.basic import AnsibleFallbackNotFound
from ansible.module_utils._text import to_bytes
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class ActionModule(_ActionModule):
def run(self, tmp=None, task_vars=None):
if self._play_context.connection != 'local':
return dict(
failed=True,
msg='invalid connection specified, expected connection=local, '
'got %s' % self._play_context.connection
)
provider = self.load_provider()
pc = copy.deepcopy(self._play_context)
pc.connection = 'network_cli'
pc.network_os = 'ios'
pc.port = provider['port'] or self._play_context.port or 22
pc.remote_user = provider['username'] or self._play_context.connection_user
pc.password = provider['password'] or self._play_context.password
pc.private_key_file = provider['ssh_keyfile'] or self._play_context.private_key_file
pc.timeout = provider['timeout'] or self._play_context.timeout
pc.become = provider['authorize'] or False
pc.become_pass = provider['auth_pass']
connection = self._shared_loader_obj.connection_loader.get('persistent', pc, sys.stdin)
socket_path = self._get_socket_path(pc)
if not os.path.exists(socket_path):
# start the connection if it isn't started
rc, out, err = connection.exec_command('open_shell()')
if not rc == 0:
return {'failed': True, 'msg': 'unable to open shell', 'rc': rc}
else:
# make sure we are in the right cli context which should be
# enable mode and not config module
rc, out, err = connection.exec_command('prompt()')
if str(out).strip().endswith(')#'):
display.vvv('wrong context, sending exit to device', self._play_context.remote_addr)
connection.exec_command('exit')
task_vars['ansible_socket'] = socket_path
if self._play_context.become_method == 'enable':
self._play_context.become = False
self._play_context.become_method = None
return super(ActionModule, self).run(tmp, task_vars)
def _get_socket_path(self, play_context):
ssh = connection_loader.get('ssh', class_only=True)
cp = ssh._create_control_path(play_context.remote_addr, play_context.port, play_context.remote_user)
path = unfrackpath("$HOME/.ansible/pc")
return cp % dict(directory=path)
def load_provider(self):
provider = self._task.args.get('provider', {})
for key, value in iteritems(ios_argument_spec):
if key != 'provider' and key not in provider:
if key in self._task.args:
provider[key] = self._task.args[key]
elif 'fallback' in value:
provider[key] = self._fallback(value['fallback'])
elif key not in provider:
provider[key] = None
return provider
def _fallback(self, fallback):
strategy = fallback[0]
args = []
kwargs = {}
for item in fallback[1:]:
if isinstance(item, dict):
kwargs = item
else:
args = item
try:
return strategy(*args, **kwargs)
except AnsibleFallbackNotFound:
pass
|
armet/python-armet | refs/heads/master | tests/connectors/django/settings.py | 1 | # -*- coding: utf-8 -*-
# from __future__ import absolute_import, unicode_literals, division
from os import path
DEBUG = True
TEMPLATE_DEBUG = DEBUG
PROJECT_ROOT = path.abspath(path.dirname(__file__))
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Turn off auto slash handling.
APPEND_SLASH = False
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# URL configuration.
ROOT_URLCONF = 'tests.connectors.django.urls'
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '_lhu2gqrh7j0+9aa_*n-fzerhsar+n$tm1nf+6i+f+$abx#$q@'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates"
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'tests.connectors.django'
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
},
'loggers': {
'armet': {
'handlers': ['console'],
'level': 'DEBUG',
},
}
}
|
chuan9/chromium-crosswalk | refs/heads/master | tools/chrome_proxy/live_tests/chrome_proxy_measurements.py | 12 | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import chrome_proxy_metrics as metrics
from telemetry.core import exceptions
from telemetry.page import page_test
class ChromeProxyLatency(page_test.PageTest):
"""Chrome proxy latency measurement."""
def __init__(self, *args, **kwargs):
super(ChromeProxyLatency, self).__init__(*args, **kwargs)
self._metrics = metrics.ChromeProxyMetric()
def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArgs('--enable-spdy-proxy-auth')
def WillNavigateToPage(self, page, tab):
tab.ClearCache(force=True)
def ValidateAndMeasurePage(self, page, tab, results):
# Wait for the load event.
tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
self._metrics.AddResultsForLatency(tab, results)
class ChromeProxyDataSaving(page_test.PageTest):
"""Chrome proxy data saving measurement."""
def __init__(self, *args, **kwargs):
super(ChromeProxyDataSaving, self).__init__(*args, **kwargs)
self._metrics = metrics.ChromeProxyMetric()
def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArgs('--enable-spdy-proxy-auth')
def WillNavigateToPage(self, page, tab):
tab.ClearCache(force=True)
self._metrics.Start(page, tab)
def ValidateAndMeasurePage(self, page, tab, results):
# Wait for the load event.
tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
self._metrics.Stop(page, tab)
self._metrics.AddResultsForDataSaving(tab, results)
|
harayz/raspberry_pwn | refs/heads/master | src/pentest/voiper/sulley/impacket/structure.py | 8 | # Copyright (c) 2003-2006 CORE Security Technologies
#
# This software is provided under under a slightly modified version
# of the Apache Software License. See the accompanying LICENSE file
# for more information.
#
# $Id: structure.py,v 1.2 2006/05/23 21:19:26 gera Exp $
#
from struct import pack, unpack, calcsize
class Structure:
""" sublcasses can define commonHdr and/or structure.
each of them is an tuple of either two: (fieldName, format) or three: (fieldName, ':', class) fields.
[it can't be a dictionary, because order is important]
where format specifies how the data in the field will be converted to/from bytes (string)
class is the class to use when unpacking ':' fields.
each field can only contain one value (or an array of values for *)
i.e. struct.pack('Hl',1,2) is valid, but format specifier 'Hl' is not (you must use 2 dfferent fields)
format specifiers:
specifiers from module pack can be used with the same format
see struct.__doc__ (pack/unpack is finally called)
> [little endian]
x [padding byte]
c [character]
b [signed byte]
B [unsigned byte]
h [signed short]
H [unsigned short]
l [signed long]
L [unsigned long]
i [signed integer]
I [unsigned integer]
q [signed long long (quad)]
Q [unsigned long ong (quad)]
s [string (array of chars), must be preceded with length in format specifier, padded with zeros]
p [pascal string (includes byte count), must be preceded with length in format specifier, padded with zeros]
f [float]
d [double]
= [native byte ordering, size and alignment]
@ [native byte ordering, standard size and alignment]
! [network byte ordering]
< [little endian]
> [big endian]
usual printf like specifiers can be used (if started with %)
[not recommeneded, there is no why to unpack this]
%08x will output an 8 bytes hex
%s will output a string
%s\x00 will output a NUL terminated string
%d%d will output 2 decimal digits (against the very same specification of Structure)
...
some additional format specifiers:
: just copy the bytes from the field into the output string (input may be string, other structure, or anything responding to __str__()) (for unpacking, all what's left is returned)
z same as :, but adds a NUL byte at the end (asciiz) (for unpacking the first NUL byte is used as terminator) [asciiz string]
u same as z, but adds two NUL bytes at the end (after padding to an even size with NULs). (same for unpacking) [unicode string]
w DCE-RPC/NDR string (it's a macro for [ '<L=(len(field)+1)/2','"\x00\x00\x00\x00','<L=(len(field)+1)/2',':' ]
?-field length of field named 'field', formated as specified with ? ('?' may be '!H' for example). The input value overrides the real length
?1*?2 array of elements. Each formated as '?2', the number of elements in the array is stored as specified by '?1' (?1 is optional, or can also be a constant (number), for unpacking)
'xxxx literal xxxx (field's value doesn't change the output. quotes must not be closed or escaped)
"xxxx literal xxxx (field's value doesn't change the output. quotes must not be closed or escaped)
_ will not pack the field. Accepts a third argument, which is an unpack code. See _Test_UnpackCode for an example
?=packcode will evaluate packcode in the context of the structure, and pack the result as specified by ?. Unpacking is made plain
?&fieldname "Address of field fieldname".
For packing it will simply pack the id() of fieldname. Or use 0 if fieldname doesn't exists.
For unpacking, it's used to know weather fieldname has to be unpacked or not, i.e. by adding a & field you turn another field (fieldname) in an optional field.
"""
commonHdr = ()
structure = ()
debug = 0
def __init__(self, data = None, alignment = 0):
if not hasattr(self, 'alignment'):
self.alignment = alignment
self.fields = {}
if data is not None:
self.fromString(data)
else:
self.data = None
def setAlignment(self, alignment):
self.alignment = alignment
def setData(self, data):
self.data = data
def packField(self, fieldName, format = None):
if self.debug:
print "packField( %s | %s )" % (fieldName, format)
if format is None:
format = self.formatForField(fieldName)
if self.fields.has_key(fieldName):
ans = self.pack(format, self.fields[fieldName], field = fieldName)
else:
ans = self.pack(format, None, field = fieldName)
if self.debug:
print "\tanswer %r" % ans
return ans
def getData(self):
if self.data is not None:
return self.data
data = ''
for field in self.commonHdr+self.structure:
try:
data += self.packField(field[0], field[1])
except Exception, e:
if self.fields.has_key(field[0]):
e.args += ("When packing field '%s | %s | %r' in %s" % (field[0], field[1], self[field[0]], self.__class__),)
else:
e.args += ("When packing field '%s | %s' in %s" % (field[0], field[1], self.__class__),)
raise
if self.alignment:
if len(data) % self.alignment:
data += ('\x00'*self.alignment)[:-(len(data) % self.alignment)]
#if len(data) % self.alignment: data += ('\x00'*self.alignment)[:-(len(data) % self.alignment)]
return data
def fromString(self, data):
for field in self.commonHdr+self.structure:
if self.debug:
print "fromString( %s | %s | %r )" % (field[0], field[1], data)
size = self.calcUnpackSize(field[1], data, field[0])
dataClassOrCode = str
if len(field) > 2:
dataClassOrCode = field[2]
try:
self[field[0]] = self.unpack(field[1], data[:size], dataClassOrCode = dataClassOrCode, field = field[0])
except Exception,e:
e.args += ("When unpacking field '%s | %s | %r[:%d]'" % (field[0], field[1], data, size),)
raise
size = self.calcPackSize(field[1], self[field[0]], field[0])
if self.alignment and size % self.alignment:
size += self.alignment - (size % self.alignment)
data = data[size:]
return self
def __setitem__(self, key, value):
self.fields[key] = value
self.data = None # force recompute
def __getitem__(self, key):
return self.fields[key]
def __delitem__(self, key):
del self.fields[key]
def __str__(self):
return self.getData()
def __len__(self):
# XXX: improve
return len(self.getData())
def pack(self, format, data, field = None):
if self.debug:
print " pack( %s | %r | %s)" % (format, data, field)
if field:
addressField = self.findAddressFieldFor(field)
if (addressField is not None) and (data is None):
return ''
# void specifier
if format[:1] == '_':
return ''
# quote specifier
if format[:1] == "'" or format[:1] == '"':
return format[1:]
# address specifier
two = format.split('&')
if len(two) == 2:
try:
return self.pack(two[0], data)
except:
if (self.fields.has_key(two[1])) and (self[two[1]] is not None):
return self.pack(two[0], id(self[two[1]]))
else:
return self.pack(two[0], 0)
# code specifier
two = format.split('=')
if len(two) >= 2:
try:
return self.pack(two[0], data)
except:
return self.pack(two[0], eval(two[1], {}, self.fields))
# length specifier
two = format.split('-')
if len(two) == 2:
try:
return self.pack(two[0],data)
except:
return self.pack(two[0], self.calcPackFieldSize(two[1]))
# array specifier
two = format.split('*')
if len(two) == 2:
answer = ''
for each in data:
answer += self.pack(two[1], each)
if two[0]:
if two[0].isdigit():
if int(two[0]) != len(data):
raise Exception, "Array field has a constant size, and it doesn't match the actual value"
else:
return self.pack(two[0], len(data))+answer
return answer
# "printf" string specifier
if format[:1] == '%':
# format string like specifier
return format % data
# asciiz specifier
if format[:1] == 'z':
return str(data)+'\0'
# unicode specifier
if format[:1] == 'u':
return str(data)+'\0\0' + (len(data) & 1 and '\0' or '')
# DCE-RPC/NDR string specifier
if format[:1] == 'w':
if len(data) == 0:
data = '\0\0'
elif len(data) % 2:
data += '\0'
l = pack('<L', len(data)/2)
return '%s\0\0\0\0%s%s' % (l,l,data)
if data is None:
raise Exception, "Trying to pack None"
# literal specifier
if format[:1] == ':':
return str(data)
# struct like specifier
return pack(format, data)
def unpack(self, format, data, dataClassOrCode = str, field = None):
if self.debug:
print " unpack( %s | %r )" % (format, data)
if field:
addressField = self.findAddressFieldFor(field)
if addressField is not None:
if not self[addressField]:
return
# void specifier
if format[:1] == '_':
if dataClassOrCode != str:
return eval(dataClassOrCode, {}, self.fields)
# quote specifier
if format[:1] == "'" or format[:1] == '"':
answer = format[1:]
if answer != data:
raise Exception, "Unpacked data doesn't match constant value '%r' should be '%r'" % (data, answer)
return answer
# address specifier
two = format.split('&')
if len(two) == 2:
return self.unpack(two[0],data)
# code specifier
two = format.split('=')
if len(two) >= 2:
return self.unpack(two[0],data)
# length specifier
two = format.split('-')
if len(two) == 2:
return self.unpack(two[0],data)
# array specifier
two = format.split('*')
if len(two) == 2:
answer = []
sofar = 0
if two[0].isdigit():
number = int(two[0])
elif two[0]:
sofar += self.calcUnpackSize(two[0], data)
number = self.unpack(two[0], data[:sofar])
else:
number = -1
while number and sofar < len(data):
nsofar = sofar + self.calcUnpackSize(two[1],data[sofar:])
answer.append(self.unpack(two[1], data[sofar:nsofar], dataClassOrCode))
number -= 1
sofar = nsofar
return answer
# "printf" string specifier
if format[:1] == '%':
# format string like specifier
return format % data
# asciiz specifier
if format == 'z':
if data[-1] != '\x00':
raise Exception, ("%s 'z' field is not NUL terminated: %r" % (field, data))
return data[:-1] # remove trailing NUL
# unicode specifier
if format == 'u':
if data[-2:] != '\x00\x00':
raise Exception, ("%s 'u' field is not NUL-NUL terminated: %r" % (field, data))
return data[:-2] # remove trailing NUL
# DCE-RPC/NDR string specifier
if format == 'w':
l = unpack('<L', data[:4])[0]
return data[12:12+l*2]
# literal specifier
if format == ':':
return dataClassOrCode(data)
# struct like specifier
return unpack(format, data)[0]
def calcPackSize(self, format, data, field = None):
# # print " calcPackSize %s:%r" % (format, data)
if field:
addressField = self.findAddressFieldFor(field)
if addressField is not None:
if not self[addressField]:
return 0
# void specifier
if format[:1] == '_':
return 0
# quote specifier
if format[:1] == "'" or format[:1] == '"':
return len(format)-1
# address specifier
two = format.split('&')
if len(two) == 2:
return self.calcPackSize(two[0], data)
# code specifier
two = format.split('=')
if len(two) >= 2:
return self.calcPackSize(two[0], data)
# length specifier
two = format.split('-')
if len(two) == 2:
return self.calcPackSize(two[0], data)
# array specifier
two = format.split('*')
if len(two) == 2:
answer = 0
if two[0].isdigit():
if int(two[0]) != len(data):
raise Exception, "Array field has a constant size, and it doesn't match the actual value"
elif two[0]:
answer += self.calcPackSize(two[0], len(data))
for each in data:
answer += self.calcPackSize(two[1], each)
return answer
# "printf" string specifier
if format[:1] == '%':
# format string like specifier
return len(format % data)
# asciiz specifier
if format[:1] == 'z':
return len(data)+1
# asciiz specifier
if format[:1] == 'u':
l = len(data)
return l + (l & 1 and 3 or 2)
# DCE-RPC/NDR string specifier
if format[:1] == 'w':
l = len(data)
return 12+l+l % 2
# literal specifier
if format[:1] == ':':
return len(data)
# struct like specifier
return calcsize(format)
def calcUnpackSize(self, format, data, field = None):
if self.debug:
print " calcUnpackSize( %s | %s | %r)" % (field, format, data)
addressField = self.findAddressFieldFor(field)
if addressField is not None:
if not self[addressField]:
return 0
try:
lengthField = self.findLengthFieldFor(field)
return self[lengthField]
except:
pass
# XXX: Try to match to actual values, raise if no match
# void specifier
if format[:1] == '_':
return 0
# quote specifier
if format[:1] == "'" or format[:1] == '"':
return len(format)-1
# address specifier
two = format.split('&')
if len(two) == 2:
return self.calcUnpackSize(two[0], data)
# code specifier
two = format.split('=')
if len(two) >= 2:
return self.calcUnpackSize(two[0], data)
# length specifier
two = format.split('-')
if len(two) == 2:
return self.calcUnpackSize(two[0], data)
# array specifier
two = format.split('*')
if len(two) == 2:
answer = 0
if two[0]:
if two[0].isdigit():
number = int(two[0])
else:
answer += self.calcUnpackSize(two[0], data)
number = self.unpack(two[0], data[:answer])
while number:
number -= 1
answer += self.calcUnpackSize(two[1], data[answer:])
else:
while answer < len(data):
answer += self.calcUnpackSize(two[1], data[answer:])
return answer
# "printf" string specifier
if format[:1] == '%':
raise Exception, "Can't guess the size of a printf like specifier for unpacking"
# asciiz specifier
if format[:1] == 'z':
return data.index('\x00')+1
# asciiz specifier
if format[:1] == 'u':
l = data.index('\x00\x00')
return l + (l & 1 and 3 or 2)
# DCE-RPC/NDR string specifier
if format[:1] == 'w':
l = unpack('<L', data[:4])[0]
return 12+l*2
# literal specifier
if format[:1] == ':':
return len(data)
# struct like specifier
return calcsize(format)
def calcPackFieldSize(self, fieldName, format = None):
if format is None:
format = self.formatForField(fieldName)
return self.calcPackSize(format, self[fieldName])
def formatForField(self, fieldName):
for field in self.commonHdr+self.structure:
if field[0] == fieldName:
return field[1]
raise Exception, ("Field %s not found" % fieldName)
def findAddressFieldFor(self, fieldName):
descriptor = '&%s' % fieldName
l = len(descriptor)
for field in self.commonHdr+self.structure:
if field[1][-l:] == descriptor:
return field[0]
return None
def findLengthFieldFor(self, fieldName):
descriptor = '-%s' % fieldName
l = len(descriptor)
for field in self.commonHdr+self.structure:
if field[1][-l:] == descriptor:
return field[0]
return None
def zeroValue(self, format):
two = format.split('*')
if len(two) == 2:
if two[0].isdigit():
return (self.zeroValue(two[1]),)*int(two[0])
if not format.find('*') == -1: return ()
if 's' in format: return ''
if format in ['z',':','u']: return ''
if format == 'w': return '\x00\x00'
return 0
def clear(self):
for field in self.commonHdr + self.structure:
self[field[0]] = self.zeroValue(field[1])
def dump(self, msg, indent = 0):
import types
ind = ' '*indent
print "\n%s" % (msg)
for i in self.fields.keys():
if isinstance(self[i], Structure):
self[i].dump('%s:{' % i, indent = indent + 4)
print "}"
else:
print "%s%s: {%r}" % (ind,i,self[i])
class _StructureTest:
alignment = 0
def create(self,data = None):
if data is not None:
return self.theClass(data, alignment = self.alignment)
else:
return self.theClass(alignment = self.alignment)
def run(self):
print
print "-"*70
testName = self.__class__.__name__
print "starting test: %s....." % testName
a = self.create()
self.populate(a)
a.dump("packing.....")
a_str = str(a)
print "packed: %r" % a_str
print "unpacking....."
b = self.create(a_str)
b.dump("unpacked.....")
print "repacking....."
b_str = str(b)
if b_str != a_str:
print "ERROR: original packed and repacked don't match"
print "packed: %r" % b_str
class _Test_simple(_StructureTest):
class theClass(Structure):
commonHdr = ()
structure = (
('int1', '!L'),
('len1','!L-z1'),
('arr1','B*<L'),
('z1', 'z'),
('u1','u'),
('', '"COCA'),
('len2','!H-:1'),
('', '"COCA'),
(':1', ':'),
('int3','>L'),
('code1','>L=len(arr1)*2+0x1000'),
)
def populate(self, a):
a['default'] = 'hola'
a['int1'] = 0x3131
a['int3'] = 0x45444342
a['z1'] = 'hola'
a['u1'] = 'hola'.encode('utf_16_le')
a[':1'] = ':1234:'
a['arr1'] = (0x12341234,0x88990077,0x41414141)
# a['len1'] = 0x42424242
class _Test_fixedLength(_Test_simple):
def populate(self, a):
_Test_simple.populate(self, a)
a['len1'] = 0x42424242
class _Test_simple_aligned4(_Test_simple):
alignment = 4
class _Test_nested(_StructureTest):
class theClass(Structure):
class _Inner(Structure):
structure = (('data', 'z'),)
structure = (
('nest1', ':', _Inner),
('nest2', ':', _Inner),
('int', '<L'),
)
def populate(self, a):
a['nest1'] = _Test_nested.theClass._Inner()
a['nest2'] = _Test_nested.theClass._Inner()
a['nest1']['data'] = 'hola manola'
a['nest2']['data'] = 'chau loco'
a['int'] = 0x12345678
class _Test_Optional(_StructureTest):
class theClass(Structure):
structure = (
('pName','<L&Name'),
('pList','<L&List'),
('Name','w'),
('List','<H*<L'),
)
def populate(self, a):
a['Name'] = 'Optional test'
a['List'] = (1,2,3,4)
class _Test_Optional_sparse(_Test_Optional):
def populate(self, a):
_Test_Optional.populate(self, a)
del a['Name']
class _Test_AsciiZArray(_StructureTest):
class theClass(Structure):
structure = (
('head','<L'),
('array','B*z'),
('tail','<L'),
)
def populate(self, a):
a['head'] = 0x1234
a['tail'] = 0xabcd
a['array'] = ('hola','manola','te traje')
class _Test_UnpackCode(_StructureTest):
class theClass(Structure):
structure = (
('leni','<L=len(uno)*2'),
('cuchi','_-uno','leni/2'),
('uno',':'),
('dos',':'),
)
def populate(self, a):
a['uno'] = 'soy un loco!'
a['dos'] = 'que haces fiera'
if __name__ == '__main__':
_Test_simple().run()
try:
_Test_fixedLength().run()
except:
print "cannot repack because length is bogus"
_Test_simple_aligned4().run()
_Test_nested().run()
_Test_Optional().run()
_Test_Optional_sparse().run()
_Test_AsciiZArray().run()
_Test_UnpackCode().run()
|
uehara1414/serverctl-prototype | refs/heads/master | serverctl/migrations/0008_auto_20170412_1031.py | 1 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-12 01:31
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('serverctl', '0007_serverhistory_data_s3_key'),
]
operations = [
migrations.AlterModelOptions(
name='serverhistory',
options={'get_latest_by': 'created_at'},
),
migrations.AddField(
model_name='gameserver',
name='ip',
field=models.CharField(default='', max_length=64),
),
]
|
nicolargo/intellij-community | refs/heads/master | python/testData/inspections/PyUnboundLocalVariableInspection/UnboundNonLocal.py | 83 | def f1():
nonlocal <warning descr="Nonlocal variable 'x' must be bound in an outer function scope">x</warning> #fail
def f2():
def g():
nonlocal <warning descr="Nonlocal variable 'x' must be bound in an outer function scope">x</warning> #fail
print(x)
x = 1
def f3():
nonlocal <warning descr="Nonlocal variable 'x' must be bound in an outer function scope">x</warning> #fail
x = 2
def f4():
x = 0
def g():
nonlocal x #pass
x = 2
return x
return g()
|
rikima/spark | refs/heads/master | python/pyspark/context.py | 1 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
import os
import shutil
import signal
import sys
import threading
import warnings
from threading import RLock
from tempfile import NamedTemporaryFile
from py4j.protocol import Py4JError
from pyspark import accumulators
from pyspark.accumulators import Accumulator
from pyspark.broadcast import Broadcast, BroadcastPickleRegistry
from pyspark.conf import SparkConf
from pyspark.files import SparkFiles
from pyspark.java_gateway import launch_gateway
from pyspark.serializers import PickleSerializer, BatchedSerializer, UTF8Deserializer, \
PairDeserializer, AutoBatchedSerializer, NoOpSerializer
from pyspark.storagelevel import StorageLevel
from pyspark.rdd import RDD, _load_from_socket, ignore_unicode_prefix
from pyspark.traceback_utils import CallSite, first_spark_call
from pyspark.status import StatusTracker
from pyspark.profiler import ProfilerCollector, BasicProfiler
if sys.version > '3':
xrange = range
__all__ = ['SparkContext']
# These are special default configs for PySpark, they will overwrite
# the default ones for Spark if they are not configured by user.
DEFAULT_CONFIGS = {
"spark.serializer.objectStreamReset": 100,
"spark.rdd.compress": True,
}
class SparkContext(object):
"""
Main entry point for Spark functionality. A SparkContext represents the
connection to a Spark cluster, and can be used to create L{RDD} and
broadcast variables on that cluster.
"""
_gateway = None
_jvm = None
_next_accum_id = 0
_active_spark_context = None
_lock = RLock()
_python_includes = None # zip and egg files that need to be added to PYTHONPATH
PACKAGE_EXTENSIONS = ('.zip', '.egg', '.jar')
def __init__(self, master=None, appName=None, sparkHome=None, pyFiles=None,
environment=None, batchSize=0, serializer=PickleSerializer(), conf=None,
gateway=None, jsc=None, profiler_cls=BasicProfiler):
"""
Create a new SparkContext. At least the master and app name should be set,
either through the named parameters here or through C{conf}.
:param master: Cluster URL to connect to
(e.g. mesos://host:port, spark://host:port, local[4]).
:param appName: A name for your job, to display on the cluster web UI.
:param sparkHome: Location where Spark is installed on cluster nodes.
:param pyFiles: Collection of .zip or .py files to send to the cluster
and add to PYTHONPATH. These can be paths on the local file
system or HDFS, HTTP, HTTPS, or FTP URLs.
:param environment: A dictionary of environment variables to set on
worker nodes.
:param batchSize: The number of Python objects represented as a single
Java object. Set 1 to disable batching, 0 to automatically choose
the batch size based on object sizes, or -1 to use an unlimited
batch size
:param serializer: The serializer for RDDs.
:param conf: A L{SparkConf} object setting Spark properties.
:param gateway: Use an existing gateway and JVM, otherwise a new JVM
will be instantiated.
:param jsc: The JavaSparkContext instance (optional).
:param profiler_cls: A class of custom Profiler used to do profiling
(default is pyspark.profiler.BasicProfiler).
>>> from pyspark.context import SparkContext
>>> sc = SparkContext('local', 'test')
>>> sc2 = SparkContext('local', 'test2') # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
ValueError:...
"""
self._callsite = first_spark_call() or CallSite(None, None, None)
SparkContext._ensure_initialized(self, gateway=gateway, conf=conf)
try:
self._do_init(master, appName, sparkHome, pyFiles, environment, batchSize, serializer,
conf, jsc, profiler_cls)
except:
# If an error occurs, clean up in order to allow future SparkContext creation:
self.stop()
raise
def _do_init(self, master, appName, sparkHome, pyFiles, environment, batchSize, serializer,
conf, jsc, profiler_cls):
self.environment = environment or {}
# java gateway must have been launched at this point.
if conf is not None and conf._jconf is not None:
# conf has been initialized in JVM properly, so use conf directly. This represents the
# scenario that JVM has been launched before SparkConf is created (e.g. SparkContext is
# created and then stopped, and we create a new SparkConf and new SparkContext again)
self._conf = conf
else:
self._conf = SparkConf(_jvm=SparkContext._jvm)
if conf is not None:
for k, v in conf.getAll():
self._conf.set(k, v)
self._batchSize = batchSize # -1 represents an unlimited batch size
self._unbatched_serializer = serializer
if batchSize == 0:
self.serializer = AutoBatchedSerializer(self._unbatched_serializer)
else:
self.serializer = BatchedSerializer(self._unbatched_serializer,
batchSize)
# Set any parameters passed directly to us on the conf
if master:
self._conf.setMaster(master)
if appName:
self._conf.setAppName(appName)
if sparkHome:
self._conf.setSparkHome(sparkHome)
if environment:
for key, value in environment.items():
self._conf.setExecutorEnv(key, value)
for key, value in DEFAULT_CONFIGS.items():
self._conf.setIfMissing(key, value)
# Check that we have at least the required parameters
if not self._conf.contains("spark.master"):
raise Exception("A master URL must be set in your configuration")
if not self._conf.contains("spark.app.name"):
raise Exception("An application name must be set in your configuration")
# Read back our properties from the conf in case we loaded some of them from
# the classpath or an external config file
self.master = self._conf.get("spark.master")
self.appName = self._conf.get("spark.app.name")
self.sparkHome = self._conf.get("spark.home", None)
for (k, v) in self._conf.getAll():
if k.startswith("spark.executorEnv."):
varName = k[len("spark.executorEnv."):]
self.environment[varName] = v
self.environment["PYTHONHASHSEED"] = os.environ.get("PYTHONHASHSEED", "0")
# Create the Java SparkContext through Py4J
self._jsc = jsc or self._initialize_context(self._conf._jconf)
# Reset the SparkConf to the one actually used by the SparkContext in JVM.
self._conf = SparkConf(_jconf=self._jsc.sc().conf())
# Create a single Accumulator in Java that we'll send all our updates through;
# they will be passed back to us through a TCP server
auth_token = self._gateway.gateway_parameters.auth_token
self._accumulatorServer = accumulators._start_update_server(auth_token)
(host, port) = self._accumulatorServer.server_address
self._javaAccumulator = self._jvm.PythonAccumulatorV2(host, port, auth_token)
self._jsc.sc().register(self._javaAccumulator)
self.pythonExec = os.environ.get("PYSPARK_PYTHON", 'python')
self.pythonVer = "%d.%d" % sys.version_info[:2]
# Broadcast's __reduce__ method stores Broadcast instances here.
# This allows other code to determine which Broadcast instances have
# been pickled, so it can determine which Java broadcast objects to
# send.
self._pickled_broadcast_vars = BroadcastPickleRegistry()
SparkFiles._sc = self
root_dir = SparkFiles.getRootDirectory()
sys.path.insert(1, root_dir)
# Deploy any code dependencies specified in the constructor
self._python_includes = list()
for path in (pyFiles or []):
self.addPyFile(path)
# Deploy code dependencies set by spark-submit; these will already have been added
# with SparkContext.addFile, so we just need to add them to the PYTHONPATH
for path in self._conf.get("spark.submit.pyFiles", "").split(","):
if path != "":
(dirname, filename) = os.path.split(path)
try:
filepath = os.path.join(SparkFiles.getRootDirectory(), filename)
if not os.path.exists(filepath):
# In case of YARN with shell mode, 'spark.submit.pyFiles' files are
# not added via SparkContext.addFile. Here we check if the file exists,
# try to copy and then add it to the path. See SPARK-21945.
shutil.copyfile(path, filepath)
if filename[-4:].lower() in self.PACKAGE_EXTENSIONS:
self._python_includes.append(filename)
sys.path.insert(1, filepath)
except Exception:
warnings.warn(
"Failed to add file [%s] speficied in 'spark.submit.pyFiles' to "
"Python path:\n %s" % (path, "\n ".join(sys.path)),
RuntimeWarning)
# Create a temporary directory inside spark.local.dir:
local_dir = self._jvm.org.apache.spark.util.Utils.getLocalDir(self._jsc.sc().conf())
self._temp_dir = \
self._jvm.org.apache.spark.util.Utils.createTempDir(local_dir, "pyspark") \
.getAbsolutePath()
# profiling stats collected for each PythonRDD
if self._conf.get("spark.python.profile", "false") == "true":
dump_path = self._conf.get("spark.python.profile.dump", None)
self.profiler_collector = ProfilerCollector(profiler_cls, dump_path)
else:
self.profiler_collector = None
# create a signal handler which would be invoked on receiving SIGINT
def signal_handler(signal, frame):
self.cancelAllJobs()
raise KeyboardInterrupt()
# see http://stackoverflow.com/questions/23206787/
if isinstance(threading.current_thread(), threading._MainThread):
signal.signal(signal.SIGINT, signal_handler)
def __repr__(self):
return "<SparkContext master={master} appName={appName}>".format(
master=self.master,
appName=self.appName,
)
def _repr_html_(self):
return """
<div>
<p><b>SparkContext</b></p>
<p><a href="{sc.uiWebUrl}">Spark UI</a></p>
<dl>
<dt>Version</dt>
<dd><code>v{sc.version}</code></dd>
<dt>Master</dt>
<dd><code>{sc.master}</code></dd>
<dt>AppName</dt>
<dd><code>{sc.appName}</code></dd>
</dl>
</div>
""".format(
sc=self
)
def _initialize_context(self, jconf):
"""
Initialize SparkContext in function to allow subclass specific initialization
"""
return self._jvm.JavaSparkContext(jconf)
@classmethod
def _ensure_initialized(cls, instance=None, gateway=None, conf=None):
"""
Checks whether a SparkContext is initialized or not.
Throws error if a SparkContext is already running.
"""
with SparkContext._lock:
if not SparkContext._gateway:
SparkContext._gateway = gateway or launch_gateway(conf)
SparkContext._jvm = SparkContext._gateway.jvm
if instance:
if (SparkContext._active_spark_context and
SparkContext._active_spark_context != instance):
currentMaster = SparkContext._active_spark_context.master
currentAppName = SparkContext._active_spark_context.appName
callsite = SparkContext._active_spark_context._callsite
# Raise error if there is already a running Spark context
raise ValueError(
"Cannot run multiple SparkContexts at once; "
"existing SparkContext(app=%s, master=%s)"
" created by %s at %s:%s "
% (currentAppName, currentMaster,
callsite.function, callsite.file, callsite.linenum))
else:
SparkContext._active_spark_context = instance
def __getnewargs__(self):
# This method is called when attempting to pickle SparkContext, which is always an error:
raise Exception(
"It appears that you are attempting to reference SparkContext from a broadcast "
"variable, action, or transformation. SparkContext can only be used on the driver, "
"not in code that it run on workers. For more information, see SPARK-5063."
)
def __enter__(self):
"""
Enable 'with SparkContext(...) as sc: app(sc)' syntax.
"""
return self
def __exit__(self, type, value, trace):
"""
Enable 'with SparkContext(...) as sc: app' syntax.
Specifically stop the context on exit of the with block.
"""
self.stop()
@classmethod
def getOrCreate(cls, conf=None):
"""
Get or instantiate a SparkContext and register it as a singleton object.
:param conf: SparkConf (optional)
"""
with SparkContext._lock:
if SparkContext._active_spark_context is None:
SparkContext(conf=conf or SparkConf())
return SparkContext._active_spark_context
def setLogLevel(self, logLevel):
"""
Control our logLevel. This overrides any user-defined log settings.
Valid log levels include: ALL, DEBUG, ERROR, FATAL, INFO, OFF, TRACE, WARN
"""
self._jsc.setLogLevel(logLevel)
@classmethod
def setSystemProperty(cls, key, value):
"""
Set a Java system property, such as spark.executor.memory. This must
must be invoked before instantiating SparkContext.
"""
SparkContext._ensure_initialized()
SparkContext._jvm.java.lang.System.setProperty(key, value)
@property
def version(self):
"""
The version of Spark on which this application is running.
"""
return self._jsc.version()
@property
@ignore_unicode_prefix
def applicationId(self):
"""
A unique identifier for the Spark application.
Its format depends on the scheduler implementation.
* in case of local spark app something like 'local-1433865536131'
* in case of YARN something like 'application_1433865536131_34483'
>>> sc.applicationId # doctest: +ELLIPSIS
u'local-...'
"""
return self._jsc.sc().applicationId()
@property
def uiWebUrl(self):
"""Return the URL of the SparkUI instance started by this SparkContext"""
return self._jsc.sc().uiWebUrl().get()
@property
def startTime(self):
"""Return the epoch time when the Spark Context was started."""
return self._jsc.startTime()
@property
def defaultParallelism(self):
"""
Default level of parallelism to use when not given by user (e.g. for
reduce tasks)
"""
return self._jsc.sc().defaultParallelism()
@property
def defaultMinPartitions(self):
"""
Default min number of partitions for Hadoop RDDs when not given by user
"""
return self._jsc.sc().defaultMinPartitions()
def stop(self):
"""
Shut down the SparkContext.
"""
if getattr(self, "_jsc", None):
try:
self._jsc.stop()
except Py4JError:
# Case: SPARK-18523
warnings.warn(
'Unable to cleanly shutdown Spark JVM process.'
' It is possible that the process has crashed,'
' been killed or may also be in a zombie state.',
RuntimeWarning
)
pass
finally:
self._jsc = None
if getattr(self, "_accumulatorServer", None):
self._accumulatorServer.shutdown()
self._accumulatorServer = None
with SparkContext._lock:
SparkContext._active_spark_context = None
def emptyRDD(self):
"""
Create an RDD that has no partitions or elements.
"""
return RDD(self._jsc.emptyRDD(), self, NoOpSerializer())
def range(self, start, end=None, step=1, numSlices=None):
"""
Create a new RDD of int containing elements from `start` to `end`
(exclusive), increased by `step` every element. Can be called the same
way as python's built-in range() function. If called with a single argument,
the argument is interpreted as `end`, and `start` is set to 0.
:param start: the start value
:param end: the end value (exclusive)
:param step: the incremental step (default: 1)
:param numSlices: the number of partitions of the new RDD
:return: An RDD of int
>>> sc.range(5).collect()
[0, 1, 2, 3, 4]
>>> sc.range(2, 4).collect()
[2, 3]
>>> sc.range(1, 7, 2).collect()
[1, 3, 5]
"""
if end is None:
end = start
start = 0
return self.parallelize(xrange(start, end, step), numSlices)
def parallelize(self, c, numSlices=None):
"""
Distribute a local Python collection to form an RDD. Using xrange
is recommended if the input represents a range for performance.
>>> sc.parallelize([0, 2, 3, 4, 6], 5).glom().collect()
[[0], [2], [3], [4], [6]]
>>> sc.parallelize(xrange(0, 6, 2), 5).glom().collect()
[[], [0], [], [2], [4]]
"""
numSlices = int(numSlices) if numSlices is not None else self.defaultParallelism
if isinstance(c, xrange):
size = len(c)
if size == 0:
return self.parallelize([], numSlices)
step = c[1] - c[0] if size > 1 else 1
start0 = c[0]
def getStart(split):
return start0 + int((split * size / numSlices)) * step
def f(split, iterator):
return xrange(getStart(split), getStart(split + 1), step)
return self.parallelize([], numSlices).mapPartitionsWithIndex(f)
# Make sure we distribute data evenly if it's smaller than self.batchSize
if "__len__" not in dir(c):
c = list(c) # Make it a list so we can compute its length
batchSize = max(1, min(len(c) // numSlices, self._batchSize or 1024))
serializer = BatchedSerializer(self._unbatched_serializer, batchSize)
jrdd = self._serialize_to_jvm(c, numSlices, serializer)
return RDD(jrdd, self, serializer)
def _serialize_to_jvm(self, data, parallelism, serializer):
"""
Calling the Java parallelize() method with an ArrayList is too slow,
because it sends O(n) Py4J commands. As an alternative, serialized
objects are written to a file and loaded through textFile().
"""
tempFile = NamedTemporaryFile(delete=False, dir=self._temp_dir)
try:
serializer.dump_stream(data, tempFile)
tempFile.close()
readRDDFromFile = self._jvm.PythonRDD.readRDDFromFile
return readRDDFromFile(self._jsc, tempFile.name, parallelism)
finally:
# readRDDFromFile eagerily reads the file so we can delete right after.
os.unlink(tempFile.name)
def pickleFile(self, name, minPartitions=None):
"""
Load an RDD previously saved using L{RDD.saveAsPickleFile} method.
>>> tmpFile = NamedTemporaryFile(delete=True)
>>> tmpFile.close()
>>> sc.parallelize(range(10)).saveAsPickleFile(tmpFile.name, 5)
>>> sorted(sc.pickleFile(tmpFile.name, 3).collect())
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
"""
minPartitions = minPartitions or self.defaultMinPartitions
return RDD(self._jsc.objectFile(name, minPartitions), self)
@ignore_unicode_prefix
def textFile(self, name, minPartitions=None, use_unicode=True):
"""
Read a text file from HDFS, a local file system (available on all
nodes), or any Hadoop-supported file system URI, and return it as an
RDD of Strings.
If use_unicode is False, the strings will be kept as `str` (encoding
as `utf-8`), which is faster and smaller than unicode. (Added in
Spark 1.2)
>>> path = os.path.join(tempdir, "sample-text.txt")
>>> with open(path, "w") as testFile:
... _ = testFile.write("Hello world!")
>>> textFile = sc.textFile(path)
>>> textFile.collect()
[u'Hello world!']
"""
minPartitions = minPartitions or min(self.defaultParallelism, 2)
return RDD(self._jsc.textFile(name, minPartitions), self,
UTF8Deserializer(use_unicode))
@ignore_unicode_prefix
def wholeTextFiles(self, path, minPartitions=None, use_unicode=True):
"""
Read a directory of text files from HDFS, a local file system
(available on all nodes), or any Hadoop-supported file system
URI. Each file is read as a single record and returned in a
key-value pair, where the key is the path of each file, the
value is the content of each file.
If use_unicode is False, the strings will be kept as `str` (encoding
as `utf-8`), which is faster and smaller than unicode. (Added in
Spark 1.2)
For example, if you have the following files::
hdfs://a-hdfs-path/part-00000
hdfs://a-hdfs-path/part-00001
...
hdfs://a-hdfs-path/part-nnnnn
Do C{rdd = sparkContext.wholeTextFiles("hdfs://a-hdfs-path")},
then C{rdd} contains::
(a-hdfs-path/part-00000, its content)
(a-hdfs-path/part-00001, its content)
...
(a-hdfs-path/part-nnnnn, its content)
.. note:: Small files are preferred, as each file will be loaded
fully in memory.
>>> dirPath = os.path.join(tempdir, "files")
>>> os.mkdir(dirPath)
>>> with open(os.path.join(dirPath, "1.txt"), "w") as file1:
... _ = file1.write("1")
>>> with open(os.path.join(dirPath, "2.txt"), "w") as file2:
... _ = file2.write("2")
>>> textFiles = sc.wholeTextFiles(dirPath)
>>> sorted(textFiles.collect())
[(u'.../1.txt', u'1'), (u'.../2.txt', u'2')]
"""
minPartitions = minPartitions or self.defaultMinPartitions
return RDD(self._jsc.wholeTextFiles(path, minPartitions), self,
PairDeserializer(UTF8Deserializer(use_unicode), UTF8Deserializer(use_unicode)))
def binaryFiles(self, path, minPartitions=None):
"""
.. note:: Experimental
Read a directory of binary files from HDFS, a local file system
(available on all nodes), or any Hadoop-supported file system URI
as a byte array. Each file is read as a single record and returned
in a key-value pair, where the key is the path of each file, the
value is the content of each file.
.. note:: Small files are preferred, large file is also allowable, but
may cause bad performance.
"""
minPartitions = minPartitions or self.defaultMinPartitions
return RDD(self._jsc.binaryFiles(path, minPartitions), self,
PairDeserializer(UTF8Deserializer(), NoOpSerializer()))
def binaryRecords(self, path, recordLength):
"""
.. note:: Experimental
Load data from a flat binary file, assuming each record is a set of numbers
with the specified numerical format (see ByteBuffer), and the number of
bytes per record is constant.
:param path: Directory to the input data files
:param recordLength: The length at which to split the records
"""
return RDD(self._jsc.binaryRecords(path, recordLength), self, NoOpSerializer())
def _dictToJavaMap(self, d):
jm = self._jvm.java.util.HashMap()
if not d:
d = {}
for k, v in d.items():
jm[k] = v
return jm
def sequenceFile(self, path, keyClass=None, valueClass=None, keyConverter=None,
valueConverter=None, minSplits=None, batchSize=0):
"""
Read a Hadoop SequenceFile with arbitrary key and value Writable class from HDFS,
a local file system (available on all nodes), or any Hadoop-supported file system URI.
The mechanism is as follows:
1. A Java RDD is created from the SequenceFile or other InputFormat, and the key
and value Writable classes
2. Serialization is attempted via Pyrolite pickling
3. If this fails, the fallback is to call 'toString' on each key and value
4. C{PickleSerializer} is used to deserialize pickled objects on the Python side
:param path: path to sequncefile
:param keyClass: fully qualified classname of key Writable class
(e.g. "org.apache.hadoop.io.Text")
:param valueClass: fully qualified classname of value Writable class
(e.g. "org.apache.hadoop.io.LongWritable")
:param keyConverter:
:param valueConverter:
:param minSplits: minimum splits in dataset
(default min(2, sc.defaultParallelism))
:param batchSize: The number of Python objects represented as a single
Java object. (default 0, choose batchSize automatically)
"""
minSplits = minSplits or min(self.defaultParallelism, 2)
jrdd = self._jvm.PythonRDD.sequenceFile(self._jsc, path, keyClass, valueClass,
keyConverter, valueConverter, minSplits, batchSize)
return RDD(jrdd, self)
def newAPIHadoopFile(self, path, inputFormatClass, keyClass, valueClass, keyConverter=None,
valueConverter=None, conf=None, batchSize=0):
"""
Read a 'new API' Hadoop InputFormat with arbitrary key and value class from HDFS,
a local file system (available on all nodes), or any Hadoop-supported file system URI.
The mechanism is the same as for sc.sequenceFile.
A Hadoop configuration can be passed in as a Python dict. This will be converted into a
Configuration in Java
:param path: path to Hadoop file
:param inputFormatClass: fully qualified classname of Hadoop InputFormat
(e.g. "org.apache.hadoop.mapreduce.lib.input.TextInputFormat")
:param keyClass: fully qualified classname of key Writable class
(e.g. "org.apache.hadoop.io.Text")
:param valueClass: fully qualified classname of value Writable class
(e.g. "org.apache.hadoop.io.LongWritable")
:param keyConverter: (None by default)
:param valueConverter: (None by default)
:param conf: Hadoop configuration, passed in as a dict
(None by default)
:param batchSize: The number of Python objects represented as a single
Java object. (default 0, choose batchSize automatically)
"""
jconf = self._dictToJavaMap(conf)
jrdd = self._jvm.PythonRDD.newAPIHadoopFile(self._jsc, path, inputFormatClass, keyClass,
valueClass, keyConverter, valueConverter,
jconf, batchSize)
return RDD(jrdd, self)
def newAPIHadoopRDD(self, inputFormatClass, keyClass, valueClass, keyConverter=None,
valueConverter=None, conf=None, batchSize=0):
"""
Read a 'new API' Hadoop InputFormat with arbitrary key and value class, from an arbitrary
Hadoop configuration, which is passed in as a Python dict.
This will be converted into a Configuration in Java.
The mechanism is the same as for sc.sequenceFile.
:param inputFormatClass: fully qualified classname of Hadoop InputFormat
(e.g. "org.apache.hadoop.mapreduce.lib.input.TextInputFormat")
:param keyClass: fully qualified classname of key Writable class
(e.g. "org.apache.hadoop.io.Text")
:param valueClass: fully qualified classname of value Writable class
(e.g. "org.apache.hadoop.io.LongWritable")
:param keyConverter: (None by default)
:param valueConverter: (None by default)
:param conf: Hadoop configuration, passed in as a dict
(None by default)
:param batchSize: The number of Python objects represented as a single
Java object. (default 0, choose batchSize automatically)
"""
jconf = self._dictToJavaMap(conf)
jrdd = self._jvm.PythonRDD.newAPIHadoopRDD(self._jsc, inputFormatClass, keyClass,
valueClass, keyConverter, valueConverter,
jconf, batchSize)
return RDD(jrdd, self)
def hadoopFile(self, path, inputFormatClass, keyClass, valueClass, keyConverter=None,
valueConverter=None, conf=None, batchSize=0):
"""
Read an 'old' Hadoop InputFormat with arbitrary key and value class from HDFS,
a local file system (available on all nodes), or any Hadoop-supported file system URI.
The mechanism is the same as for sc.sequenceFile.
A Hadoop configuration can be passed in as a Python dict. This will be converted into a
Configuration in Java.
:param path: path to Hadoop file
:param inputFormatClass: fully qualified classname of Hadoop InputFormat
(e.g. "org.apache.hadoop.mapred.TextInputFormat")
:param keyClass: fully qualified classname of key Writable class
(e.g. "org.apache.hadoop.io.Text")
:param valueClass: fully qualified classname of value Writable class
(e.g. "org.apache.hadoop.io.LongWritable")
:param keyConverter: (None by default)
:param valueConverter: (None by default)
:param conf: Hadoop configuration, passed in as a dict
(None by default)
:param batchSize: The number of Python objects represented as a single
Java object. (default 0, choose batchSize automatically)
"""
jconf = self._dictToJavaMap(conf)
jrdd = self._jvm.PythonRDD.hadoopFile(self._jsc, path, inputFormatClass, keyClass,
valueClass, keyConverter, valueConverter,
jconf, batchSize)
return RDD(jrdd, self)
def hadoopRDD(self, inputFormatClass, keyClass, valueClass, keyConverter=None,
valueConverter=None, conf=None, batchSize=0):
"""
Read an 'old' Hadoop InputFormat with arbitrary key and value class, from an arbitrary
Hadoop configuration, which is passed in as a Python dict.
This will be converted into a Configuration in Java.
The mechanism is the same as for sc.sequenceFile.
:param inputFormatClass: fully qualified classname of Hadoop InputFormat
(e.g. "org.apache.hadoop.mapred.TextInputFormat")
:param keyClass: fully qualified classname of key Writable class
(e.g. "org.apache.hadoop.io.Text")
:param valueClass: fully qualified classname of value Writable class
(e.g. "org.apache.hadoop.io.LongWritable")
:param keyConverter: (None by default)
:param valueConverter: (None by default)
:param conf: Hadoop configuration, passed in as a dict
(None by default)
:param batchSize: The number of Python objects represented as a single
Java object. (default 0, choose batchSize automatically)
"""
jconf = self._dictToJavaMap(conf)
jrdd = self._jvm.PythonRDD.hadoopRDD(self._jsc, inputFormatClass, keyClass,
valueClass, keyConverter, valueConverter,
jconf, batchSize)
return RDD(jrdd, self)
def _checkpointFile(self, name, input_deserializer):
jrdd = self._jsc.checkpointFile(name)
return RDD(jrdd, self, input_deserializer)
@ignore_unicode_prefix
def union(self, rdds):
"""
Build the union of a list of RDDs.
This supports unions() of RDDs with different serialized formats,
although this forces them to be reserialized using the default
serializer:
>>> path = os.path.join(tempdir, "union-text.txt")
>>> with open(path, "w") as testFile:
... _ = testFile.write("Hello")
>>> textFile = sc.textFile(path)
>>> textFile.collect()
[u'Hello']
>>> parallelized = sc.parallelize(["World!"])
>>> sorted(sc.union([textFile, parallelized]).collect())
[u'Hello', 'World!']
"""
first_jrdd_deserializer = rdds[0]._jrdd_deserializer
if any(x._jrdd_deserializer != first_jrdd_deserializer for x in rdds):
rdds = [x._reserialize() for x in rdds]
first = rdds[0]._jrdd
rest = [x._jrdd for x in rdds[1:]]
return RDD(self._jsc.union(first, rest), self, rdds[0]._jrdd_deserializer)
def broadcast(self, value):
"""
Broadcast a read-only variable to the cluster, returning a
L{Broadcast<pyspark.broadcast.Broadcast>}
object for reading it in distributed functions. The variable will
be sent to each cluster only once.
"""
return Broadcast(self, value, self._pickled_broadcast_vars)
def accumulator(self, value, accum_param=None):
"""
Create an L{Accumulator} with the given initial value, using a given
L{AccumulatorParam} helper object to define how to add values of the
data type if provided. Default AccumulatorParams are used for integers
and floating-point numbers if you do not provide one. For other types,
a custom AccumulatorParam can be used.
"""
if accum_param is None:
if isinstance(value, int):
accum_param = accumulators.INT_ACCUMULATOR_PARAM
elif isinstance(value, float):
accum_param = accumulators.FLOAT_ACCUMULATOR_PARAM
elif isinstance(value, complex):
accum_param = accumulators.COMPLEX_ACCUMULATOR_PARAM
else:
raise TypeError("No default accumulator param for type %s" % type(value))
SparkContext._next_accum_id += 1
return Accumulator(SparkContext._next_accum_id - 1, value, accum_param)
def addFile(self, path, recursive=False):
"""
Add a file to be downloaded with this Spark job on every node.
The C{path} passed can be either a local file, a file in HDFS
(or other Hadoop-supported filesystems), or an HTTP, HTTPS or
FTP URI.
To access the file in Spark jobs, use
L{SparkFiles.get(fileName)<pyspark.files.SparkFiles.get>} with the
filename to find its download location.
A directory can be given if the recursive option is set to True.
Currently directories are only supported for Hadoop-supported filesystems.
.. note:: A path can be added only once. Subsequent additions of the same path are ignored.
>>> from pyspark import SparkFiles
>>> path = os.path.join(tempdir, "test.txt")
>>> with open(path, "w") as testFile:
... _ = testFile.write("100")
>>> sc.addFile(path)
>>> def func(iterator):
... with open(SparkFiles.get("test.txt")) as testFile:
... fileVal = int(testFile.readline())
... return [x * fileVal for x in iterator]
>>> sc.parallelize([1, 2, 3, 4]).mapPartitions(func).collect()
[100, 200, 300, 400]
"""
self._jsc.sc().addFile(path, recursive)
def addPyFile(self, path):
"""
Add a .py or .zip dependency for all tasks to be executed on this
SparkContext in the future. The C{path} passed can be either a local
file, a file in HDFS (or other Hadoop-supported filesystems), or an
HTTP, HTTPS or FTP URI.
.. note:: A path can be added only once. Subsequent additions of the same path are ignored.
"""
self.addFile(path)
(dirname, filename) = os.path.split(path) # dirname may be directory or HDFS/S3 prefix
if filename[-4:].lower() in self.PACKAGE_EXTENSIONS:
self._python_includes.append(filename)
# for tests in local mode
sys.path.insert(1, os.path.join(SparkFiles.getRootDirectory(), filename))
if sys.version > '3':
import importlib
importlib.invalidate_caches()
def setCheckpointDir(self, dirName):
"""
Set the directory under which RDDs are going to be checkpointed. The
directory must be a HDFS path if running on a cluster.
"""
self._jsc.sc().setCheckpointDir(dirName)
def _getJavaStorageLevel(self, storageLevel):
"""
Returns a Java StorageLevel based on a pyspark.StorageLevel.
"""
if not isinstance(storageLevel, StorageLevel):
raise Exception("storageLevel must be of type pyspark.StorageLevel")
newStorageLevel = self._jvm.org.apache.spark.storage.StorageLevel
return newStorageLevel(storageLevel.useDisk,
storageLevel.useMemory,
storageLevel.useOffHeap,
storageLevel.deserialized,
storageLevel.replication)
def setJobGroup(self, groupId, description, interruptOnCancel=False):
"""
Assigns a group ID to all the jobs started by this thread until the group ID is set to a
different value or cleared.
Often, a unit of execution in an application consists of multiple Spark actions or jobs.
Application programmers can use this method to group all those jobs together and give a
group description. Once set, the Spark web UI will associate such jobs with this group.
The application can use L{SparkContext.cancelJobGroup} to cancel all
running jobs in this group.
>>> import threading
>>> from time import sleep
>>> result = "Not Set"
>>> lock = threading.Lock()
>>> def map_func(x):
... sleep(100)
... raise Exception("Task should have been cancelled")
>>> def start_job(x):
... global result
... try:
... sc.setJobGroup("job_to_cancel", "some description")
... result = sc.parallelize(range(x)).map(map_func).collect()
... except Exception as e:
... result = "Cancelled"
... lock.release()
>>> def stop_job():
... sleep(5)
... sc.cancelJobGroup("job_to_cancel")
>>> suppress = lock.acquire()
>>> suppress = threading.Thread(target=start_job, args=(10,)).start()
>>> suppress = threading.Thread(target=stop_job).start()
>>> suppress = lock.acquire()
>>> print(result)
Cancelled
If interruptOnCancel is set to true for the job group, then job cancellation will result
in Thread.interrupt() being called on the job's executor threads. This is useful to help
ensure that the tasks are actually stopped in a timely manner, but is off by default due
to HDFS-1208, where HDFS may respond to Thread.interrupt() by marking nodes as dead.
"""
self._jsc.setJobGroup(groupId, description, interruptOnCancel)
def setLocalProperty(self, key, value):
"""
Set a local property that affects jobs submitted from this thread, such as the
Spark fair scheduler pool.
"""
self._jsc.setLocalProperty(key, value)
def getLocalProperty(self, key):
"""
Get a local property set in this thread, or null if it is missing. See
L{setLocalProperty}
"""
return self._jsc.getLocalProperty(key)
def setJobDescription(self, value):
"""
Set a human readable description of the current job.
"""
self._jsc.setJobDescription(value)
def sparkUser(self):
"""
Get SPARK_USER for user who is running SparkContext.
"""
return self._jsc.sc().sparkUser()
def cancelJobGroup(self, groupId):
"""
Cancel active jobs for the specified group. See L{SparkContext.setJobGroup}
for more information.
"""
self._jsc.sc().cancelJobGroup(groupId)
def cancelAllJobs(self):
"""
Cancel all jobs that have been scheduled or are running.
"""
self._jsc.sc().cancelAllJobs()
def statusTracker(self):
"""
Return :class:`StatusTracker` object
"""
return StatusTracker(self._jsc.statusTracker())
def runJob(self, rdd, partitionFunc, partitions=None, allowLocal=False):
"""
Executes the given partitionFunc on the specified set of partitions,
returning the result as an array of elements.
If 'partitions' is not specified, this will run over all partitions.
>>> myRDD = sc.parallelize(range(6), 3)
>>> sc.runJob(myRDD, lambda part: [x * x for x in part])
[0, 1, 4, 9, 16, 25]
>>> myRDD = sc.parallelize(range(6), 3)
>>> sc.runJob(myRDD, lambda part: [x * x for x in part], [0, 2], True)
[0, 1, 16, 25]
"""
if partitions is None:
partitions = range(rdd._jrdd.partitions().size())
# Implementation note: This is implemented as a mapPartitions followed
# by runJob() in order to avoid having to pass a Python lambda into
# SparkContext#runJob.
mappedRDD = rdd.mapPartitions(partitionFunc)
sock_info = self._jvm.PythonRDD.runJob(self._jsc.sc(), mappedRDD._jrdd, partitions)
return list(_load_from_socket(sock_info, mappedRDD._jrdd_deserializer))
def show_profiles(self):
""" Print the profile stats to stdout """
if self.profiler_collector is not None:
self.profiler_collector.show_profiles()
else:
raise RuntimeError("'spark.python.profile' configuration must be set "
"to 'true' to enable Python profile.")
def dump_profiles(self, path):
""" Dump the profile stats into directory `path`
"""
if self.profiler_collector is not None:
self.profiler_collector.dump_profiles(path)
else:
raise RuntimeError("'spark.python.profile' configuration must be set "
"to 'true' to enable Python profile.")
def getConf(self):
conf = SparkConf()
conf.setAll(self._conf.getAll())
return conf
def _test():
import atexit
import doctest
import tempfile
globs = globals().copy()
globs['sc'] = SparkContext('local[4]', 'PythonTest')
globs['tempdir'] = tempfile.mkdtemp()
atexit.register(lambda: shutil.rmtree(globs['tempdir']))
(failure_count, test_count) = doctest.testmod(globs=globs, optionflags=doctest.ELLIPSIS)
globs['sc'].stop()
if failure_count:
sys.exit(-1)
if __name__ == "__main__":
_test()
|
rezeck/ros_tamiya | refs/heads/master | src/node_tamiya.py | 1 | #!/usr/bin/env python
import rospy
from math import radians, cos, sin, asin, sqrt, atan2, degrees, pi, log, tan
import pid_class
import wp_manager
from ros_tamiya.msg import Gps_msg
from ros_tamiya.msg import Imu
from std_msgs.msg import Float32
from geometry_msgs.msg import Twist
from geometry_msgs.msg import Vector3
import time
import os
robot = None
class RobotCar:
def __init__(self):
self.curr_status = 'STANDBY'
self.WAYPOINT_RADIUS = 3.0 # [m]
# Gains linear vel
#self.KP_VEL = 300.0
#self.TI_VEL = 80.0
#self.TD_VEL = 0*0.00005
# Gains angular vel
self.KP_PSI = 0.27
self.TI_PSI = 80.0
self.TD_PSI = 0*0.00007
self.MAGNETIC_DEFLECTION = 22.2
# Lat, lon
self.gps_target = [0, 0]
# -19.869394, -43.964293 close to the entrance of icex
# -19.869689, -43.964652 near
# -19.869767, -43.964812 verlab location
self.gps_pos = [0, 0]
self.wpm = wp_manager.WPManager()
self.orientation = 0.0
self.speed = 0.0
self.sat_num = 0
self.phi = 0
self.theta = 0
self.psi = 0
# Output values angular and linear
self.out_ang = 0
self.out_lin = 0
self.MAX_ANGLE = 95
self.MIN_LINEAR = -50
self.MAX_LINEAR = 300
self.pid_angle = pid_class.PID(self.KP_PSI, self.TI_PSI, self.TD_PSI, -self.MAX_ANGLE, self.MAX_ANGLE)
#self.pid_vel = pid_class.PID(self.KP_VEL, self.TI_VEL, self.TD_VEL, -(self.MAX_LINEAR), self.MAX_LINEAR)
self.vel_publisher = rospy.Publisher('cmd_vel', Vector3, queue_size=1, latch=True)
rospy.Subscriber("gps", Gps_msg, self.setGPS)
rospy.Subscriber("heading", Float32, self.setHeading)
pass
def isGPSFixed(self):
is_fixed = False
if self.gps_pos[0] != 0.0 and self.gps_pos[1] != 0.0 and self.sat_num > 3:
is_fixed = True
return is_fixed
def getGpsData(self):
return {'lat': self.gps_pos[0], 'lon': self.gps_pos[1], 'speed': self.speed, 'sat_num': self.sat_num, 'orientation': self.orientation}
def setGPS(self, data):
#print 'setGPS data:', data.latitude, data.longitude
self.gps_pos = [data.latitude, data.longitude]
self.orientation = data.orientation
self.speed = (0.514444 * data.speed) # from knots to meters/sec
self.sat_num = data.sat_num
def setHeading(self, data):
self.psi = data.data
def setIMU(self, data):
#print 'setIMU data:', data.heading
self.phi = data.gyroscope
self.theta = data.accelerometer
self.psi = data.heading
self.psi = self.psi - self.MAGNETIC_DEFLECTION
if self.psi < 0:
self.psi += 360
#self.psi = self.psi % (2.0 * math.pi)
def angularControl(self, psi_ref):
self.pid_angle.reference(0.0)
alpha = radians(self.psi) - radians(psi_ref)
# garante erro entre [-pi...pi]
while alpha < pi:
alpha += (2 * pi)
while alpha > pi:
alpha -= (2 * pi)
return self.pid_angle.u(alpha)
def speedControl(self):
#self.pid_vel.reference(0.0)
if self.wpm.isWayPointReached(self.gps_pos[0], self.gps_pos[1]) and not self.wpm.isCompleted():
print 'WP Reached, getting next one...'
return 300
elif self.wpm.isCompleted():
print 'WP completed, stopping...'
return 300
else:
rho = self.wpm.getDistanceToTarget()
#return self.pid_vel.u(rho)
if abs(self.out_ang) > 30:
return 125
control_dist = 10
if rho > control_dist:
return 50
else:
p = 100 - (rho * 100 / control_dist)
vel = ((p/100.0) * self.MAX_LINEAR - self.MAX_LINEAR) + self.MIN_LINEAR
if vel < self.MIN_LINEAR:
vel = self.MIN_LINEAR
if vel > self.MAX_LINEAR:
vel = self.MAX_LINEAR
return vel
def calcBearingToTarget(self):
"""
Calculate the angle between two points
on the earth (specified in decimal degrees)
"""
# convert decimal degrees to radians
lat1, lon1, lat2, lon2 = map(radians,
[self.gps_pos[0], self.gps_pos[1], self.gps_target[0], self.gps_target[1]])
dLon = lon2 - lon1
dPhi = log((tan(lat2/2.0 + pi/4.0) / tan(lat1/2.0 + pi/4.0)))
if abs(dLon) > pi:
if (dLon) > 0.0:
dLon = -(2.0 * pi - dLon)
else:
dLon = (2.0 * pi + dLon)
bearing = (degrees(atan2(dLon, dPhi)) + 360.0) % 360.0
return bearing
def haversineDistanceToTarget(self):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees)
"""
# convert decimal degrees to radians
lon1, lat1, lon2, lat2 = map(radians, [self.gps_pos[0], self.gps_pos[1], self.gps_target[0], self.gps_target[1]])
# haversine formula
dlon = lon2 - lon1
dlat = lat2 - lat1
a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2
c = 2 * asin(sqrt(a))
r = 6371 # Radius of earth in kilometers. Use 3956 for miles
return (c * r) * 1000
def control(self):
self.gps_target[0], self.gps_target[1] = self.wpm.getCurrentWayPoint()
self.out_ang = degrees(self.angularControl(self.calcBearingToTarget()))
self.out_lin = self.speedControl()
pass
def publish(self):
vel_msg = Vector3()
vel_msg.z = self.out_ang
vel_msg.x = self.out_lin
self.vel_publisher.publish(vel_msg)
pass
def printStatus(self):
os.system('clear')
print "Pos:", self.gps_pos
print "Target:", self.gps_target
print "GPS distance(m):", self.wpm.getDistanceToTarget()
print "Bearing to target:", self.calcBearingToTarget()
print "Curr bearing:", self.psi
print "Linear:", self.out_lin, 'realCommand:', abs(self.out_lin + 300) + 1000
print "Angular:", self.out_ang, 'realCommand:', self.out_ang + 95
pass
def init_current_node():
rospy.init_node('robotcar', anonymous=True)
robot = RobotCar()
is_active = False
while not rospy.is_shutdown():
if robot.isGPSFixed() and is_active:
robot.control()
robot.publish()
robot.printStatus()
elif not robot.isGPSFixed():
print 'Gps not fixed yet...'
print robot.getGpsData()
else:
var = raw_input("The robot is ready to go, are you sure?: y/n\n")
if var.strip() == 'y':
is_active = True
rospy.sleep(0.1)
if __name__ == '__main__':
init_current_node()
|
stephenjoe1/gaap_product_pages | refs/heads/master | node_modules/grunt-sass/node_modules/node-sass/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py | 1509 | # Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Handle version information related to Visual Stuio."""
import errno
import os
import re
import subprocess
import sys
import gyp
import glob
class VisualStudioVersion(object):
"""Information regarding a version of Visual Studio."""
def __init__(self, short_name, description,
solution_version, project_version, flat_sln, uses_vcxproj,
path, sdk_based, default_toolset=None):
self.short_name = short_name
self.description = description
self.solution_version = solution_version
self.project_version = project_version
self.flat_sln = flat_sln
self.uses_vcxproj = uses_vcxproj
self.path = path
self.sdk_based = sdk_based
self.default_toolset = default_toolset
def ShortName(self):
return self.short_name
def Description(self):
"""Get the full description of the version."""
return self.description
def SolutionVersion(self):
"""Get the version number of the sln files."""
return self.solution_version
def ProjectVersion(self):
"""Get the version number of the vcproj or vcxproj files."""
return self.project_version
def FlatSolution(self):
return self.flat_sln
def UsesVcxproj(self):
"""Returns true if this version uses a vcxproj file."""
return self.uses_vcxproj
def ProjectExtension(self):
"""Returns the file extension for the project."""
return self.uses_vcxproj and '.vcxproj' or '.vcproj'
def Path(self):
"""Returns the path to Visual Studio installation."""
return self.path
def ToolPath(self, tool):
"""Returns the path to a given compiler tool. """
return os.path.normpath(os.path.join(self.path, "VC/bin", tool))
def DefaultToolset(self):
"""Returns the msbuild toolset version that will be used in the absence
of a user override."""
return self.default_toolset
def SetupScript(self, target_arch):
"""Returns a command (with arguments) to be used to set up the
environment."""
# Check if we are running in the SDK command line environment and use
# the setup script from the SDK if so. |target_arch| should be either
# 'x86' or 'x64'.
assert target_arch in ('x86', 'x64')
sdk_dir = os.environ.get('WindowsSDKDir')
if self.sdk_based and sdk_dir:
return [os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd')),
'/' + target_arch]
else:
# We don't use VC/vcvarsall.bat for x86 because vcvarsall calls
# vcvars32, which it can only find if VS??COMNTOOLS is set, which it
# isn't always.
if target_arch == 'x86':
if self.short_name >= '2013' and self.short_name[-1] != 'e' and (
os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
# VS2013 and later, non-Express have a x64-x86 cross that we want
# to prefer.
return [os.path.normpath(
os.path.join(self.path, 'VC/vcvarsall.bat')), 'amd64_x86']
# Otherwise, the standard x86 compiler.
return [os.path.normpath(
os.path.join(self.path, 'Common7/Tools/vsvars32.bat'))]
else:
assert target_arch == 'x64'
arg = 'x86_amd64'
# Use the 64-on-64 compiler if we're not using an express
# edition and we're running on a 64bit OS.
if self.short_name[-1] != 'e' and (
os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
arg = 'amd64'
return [os.path.normpath(
os.path.join(self.path, 'VC/vcvarsall.bat')), arg]
def _RegistryQueryBase(sysdir, key, value):
"""Use reg.exe to read a particular key.
While ideally we might use the win32 module, we would like gyp to be
python neutral, so for instance cygwin python lacks this module.
Arguments:
sysdir: The system subdirectory to attempt to launch reg.exe from.
key: The registry key to read from.
value: The particular value to read.
Return:
stdout from reg.exe, or None for failure.
"""
# Skip if not on Windows or Python Win32 setup issue
if sys.platform not in ('win32', 'cygwin'):
return None
# Setup params to pass to and attempt to launch reg.exe
cmd = [os.path.join(os.environ.get('WINDIR', ''), sysdir, 'reg.exe'),
'query', key]
if value:
cmd.extend(['/v', value])
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Obtain the stdout from reg.exe, reading to the end so p.returncode is valid
# Note that the error text may be in [1] in some cases
text = p.communicate()[0]
# Check return code from reg.exe; officially 0==success and 1==error
if p.returncode:
return None
return text
def _RegistryQuery(key, value=None):
r"""Use reg.exe to read a particular key through _RegistryQueryBase.
First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
that fails, it falls back to System32. Sysnative is available on Vista and
up and available on Windows Server 2003 and XP through KB patch 942589. Note
that Sysnative will always fail if using 64-bit python due to it being a
virtual directory and System32 will work correctly in the first place.
KB 942589 - http://support.microsoft.com/kb/942589/en-us.
Arguments:
key: The registry key.
value: The particular registry value to read (optional).
Return:
stdout from reg.exe, or None for failure.
"""
text = None
try:
text = _RegistryQueryBase('Sysnative', key, value)
except OSError, e:
if e.errno == errno.ENOENT:
text = _RegistryQueryBase('System32', key, value)
else:
raise
return text
def _RegistryGetValueUsingWinReg(key, value):
"""Use the _winreg module to obtain the value of a registry key.
Args:
key: The registry key.
value: The particular registry value to read.
Return:
contents of the registry key's value, or None on failure. Throws
ImportError if _winreg is unavailable.
"""
import _winreg
try:
root, subkey = key.split('\\', 1)
assert root == 'HKLM' # Only need HKLM for now.
with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, subkey) as hkey:
return _winreg.QueryValueEx(hkey, value)[0]
except WindowsError:
return None
def _RegistryGetValue(key, value):
"""Use _winreg or reg.exe to obtain the value of a registry key.
Using _winreg is preferable because it solves an issue on some corporate
environments where access to reg.exe is locked down. However, we still need
to fallback to reg.exe for the case where the _winreg module is not available
(for example in cygwin python).
Args:
key: The registry key.
value: The particular registry value to read.
Return:
contents of the registry key's value, or None on failure.
"""
try:
return _RegistryGetValueUsingWinReg(key, value)
except ImportError:
pass
# Fallback to reg.exe if we fail to import _winreg.
text = _RegistryQuery(key, value)
if not text:
return None
# Extract value.
match = re.search(r'REG_\w+\s+([^\r]+)\r\n', text)
if not match:
return None
return match.group(1)
def _CreateVersion(name, path, sdk_based=False):
"""Sets up MSVS project generation.
Setup is based off the GYP_MSVS_VERSION environment variable or whatever is
autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is
passed in that doesn't match a value in versions python will throw a error.
"""
if path:
path = os.path.normpath(path)
versions = {
'2015': VisualStudioVersion('2015',
'Visual Studio 2015',
solution_version='12.00',
project_version='14.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v140'),
'2013': VisualStudioVersion('2013',
'Visual Studio 2013',
solution_version='13.00',
project_version='12.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v120'),
'2013e': VisualStudioVersion('2013e',
'Visual Studio 2013',
solution_version='13.00',
project_version='12.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v120'),
'2012': VisualStudioVersion('2012',
'Visual Studio 2012',
solution_version='12.00',
project_version='4.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v110'),
'2012e': VisualStudioVersion('2012e',
'Visual Studio 2012',
solution_version='12.00',
project_version='4.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v110'),
'2010': VisualStudioVersion('2010',
'Visual Studio 2010',
solution_version='11.00',
project_version='4.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based),
'2010e': VisualStudioVersion('2010e',
'Visual C++ Express 2010',
solution_version='11.00',
project_version='4.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based),
'2008': VisualStudioVersion('2008',
'Visual Studio 2008',
solution_version='10.00',
project_version='9.00',
flat_sln=False,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2008e': VisualStudioVersion('2008e',
'Visual Studio 2008',
solution_version='10.00',
project_version='9.00',
flat_sln=True,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2005': VisualStudioVersion('2005',
'Visual Studio 2005',
solution_version='9.00',
project_version='8.00',
flat_sln=False,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2005e': VisualStudioVersion('2005e',
'Visual Studio 2005',
solution_version='9.00',
project_version='8.00',
flat_sln=True,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
}
return versions[str(name)]
def _ConvertToCygpath(path):
"""Convert to cygwin path if we are using cygwin."""
if sys.platform == 'cygwin':
p = subprocess.Popen(['cygpath', path], stdout=subprocess.PIPE)
path = p.communicate()[0].strip()
return path
def _DetectVisualStudioVersions(versions_to_check, force_express):
"""Collect the list of installed visual studio versions.
Returns:
A list of visual studio versions installed in descending order of
usage preference.
Base this on the registry and a quick check if devenv.exe exists.
Only versions 8-10 are considered.
Possibilities are:
2005(e) - Visual Studio 2005 (8)
2008(e) - Visual Studio 2008 (9)
2010(e) - Visual Studio 2010 (10)
2012(e) - Visual Studio 2012 (11)
2013(e) - Visual Studio 2013 (12)
2015 - Visual Studio 2015 (14)
Where (e) is e for express editions of MSVS and blank otherwise.
"""
version_to_year = {
'8.0': '2005',
'9.0': '2008',
'10.0': '2010',
'11.0': '2012',
'12.0': '2013',
'14.0': '2015',
}
versions = []
for version in versions_to_check:
# Old method of searching for which VS version is installed
# We don't use the 2010-encouraged-way because we also want to get the
# path to the binaries, which it doesn't offer.
keys = [r'HKLM\Software\Microsoft\VisualStudio\%s' % version,
r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s' % version,
r'HKLM\Software\Microsoft\VCExpress\%s' % version,
r'HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s' % version]
for index in range(len(keys)):
path = _RegistryGetValue(keys[index], 'InstallDir')
if not path:
continue
path = _ConvertToCygpath(path)
# Check for full.
full_path = os.path.join(path, 'devenv.exe')
express_path = os.path.join(path, '*express.exe')
if not force_express and os.path.exists(full_path):
# Add this one.
versions.append(_CreateVersion(version_to_year[version],
os.path.join(path, '..', '..')))
# Check for express.
elif glob.glob(express_path):
# Add this one.
versions.append(_CreateVersion(version_to_year[version] + 'e',
os.path.join(path, '..', '..')))
# The old method above does not work when only SDK is installed.
keys = [r'HKLM\Software\Microsoft\VisualStudio\SxS\VC7',
r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7']
for index in range(len(keys)):
path = _RegistryGetValue(keys[index], version)
if not path:
continue
path = _ConvertToCygpath(path)
if version != '14.0': # There is no Express edition for 2015.
versions.append(_CreateVersion(version_to_year[version] + 'e',
os.path.join(path, '..'), sdk_based=True))
return versions
def SelectVisualStudioVersion(version='auto', allow_fallback=True):
"""Select which version of Visual Studio projects to generate.
Arguments:
version: Hook to allow caller to force a particular version (vs auto).
Returns:
An object representing a visual studio project format version.
"""
# In auto mode, check environment variable for override.
if version == 'auto':
version = os.environ.get('GYP_MSVS_VERSION', 'auto')
version_map = {
'auto': ('14.0', '12.0', '10.0', '9.0', '8.0', '11.0'),
'2005': ('8.0',),
'2005e': ('8.0',),
'2008': ('9.0',),
'2008e': ('9.0',),
'2010': ('10.0',),
'2010e': ('10.0',),
'2012': ('11.0',),
'2012e': ('11.0',),
'2013': ('12.0',),
'2013e': ('12.0',),
'2015': ('14.0',),
}
override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH')
if override_path:
msvs_version = os.environ.get('GYP_MSVS_VERSION')
if not msvs_version:
raise ValueError('GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be '
'set to a particular version (e.g. 2010e).')
return _CreateVersion(msvs_version, override_path, sdk_based=True)
version = str(version)
versions = _DetectVisualStudioVersions(version_map[version], 'e' in version)
if not versions:
if not allow_fallback:
raise ValueError('Could not locate Visual Studio installation.')
if version == 'auto':
# Default to 2005 if we couldn't find anything
return _CreateVersion('2005', None)
else:
return _CreateVersion(version, None)
return versions[0]
|
servo/servo | refs/heads/master | tests/wpt/web-platform-tests/tools/ci/pr_preview.py | 4 | #!/usr/bin/env python
# The service provided by this script is not critical, but it shares a GitHub
# API request quota with critical services. For this reason, all requests to
# the GitHub API are preceded by a "guard" which verifies that the subsequent
# request will not deplete the shared quota.
#
# In effect, this script will fail rather than interfere with the operation of
# critical services.
import argparse
import json
import logging
import os
import time
import requests
# The ratio of "requests remaining" to "total request quota" below which this
# script should refuse to interact with the GitHub.com API
API_RATE_LIMIT_THRESHOLD = 0.2
# The GitHub Pull Request label which indicates that a Pull Request is expected
# to be actively mirrored by the preview server
LABEL = 'safe for preview'
# The number of seconds to wait between attempts to verify that a submission
# preview is available on the Pull Request preview server
POLLING_PERIOD = 15
# Pull Requests from authors with the following associations to the project
# should automatically receive previews
#
# https://developer.github.com/v4/enum/commentauthorassociation/ (equivalent
# documentation for the REST API was not available at the time of writing)
TRUSTED_AUTHOR_ASSOCIATIONS = ('COLLABORATOR', 'MEMBER', 'OWNER')
# These GitHub accounts are not associated with individuals, and the Pull
# Requests they submit rarely require a preview.
AUTOMATION_GITHUB_USERS = (
'autofoolip', 'chromium-wpt-export-bot', 'moz-wptsync-bot',
'servo-wpt-sync'
)
DEPLOYMENT_PREFIX = 'wpt-preview-'
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
def gh_request(method_name, url, body=None, media_type=None):
github_token = os.environ['GITHUB_TOKEN']
kwargs = {
'headers': {
'Authorization': 'token {}'.format(github_token),
'Accept': media_type or 'application/vnd.github.v3+json'
}
}
method = getattr(requests, method_name.lower())
if body is not None:
kwargs['json'] = body
logger.info('Issuing request: %s %s', method_name.upper(), url)
resp = method(url, **kwargs)
logger.info('Response status code: %s', resp.status_code)
# If GitHub thinks the fields are invalid, it will send a 422 back and
# include debugging information in the body. See
# https://developer.github.com/v3/#client-errors
if resp.status_code == 422:
logger.error(resp.json())
resp.raise_for_status()
if resp.status_code == 204:
return None
return resp.json()
class GitHubRateLimitException(Exception):
pass
def guard(resource):
'''Decorate a `Project` instance method which interacts with the GitHub
API, ensuring that the subsequent request will not deplete the relevant
allowance. This verification does not itself influence rate limiting:
> Accessing this endpoint does not count against your REST API rate limit.
https://developer.github.com/v3/rate_limit/
'''
def guard_decorator(func):
def wrapped(self, *args, **kwargs):
limits = gh_request('GET', '{}/rate_limit'.format(self._host))
values = limits['resources'].get(resource)
remaining = values['remaining']
limit = values['limit']
logger.info(
'Limit for "%s" resource: %s/%s', resource, remaining, limit
)
if limit and float(remaining) / limit < API_RATE_LIMIT_THRESHOLD:
raise GitHubRateLimitException(
'Exiting to avoid GitHub.com API request throttling.'
)
return func(self, *args, **kwargs)
return wrapped
return guard_decorator
class Project(object):
def __init__(self, host, github_project):
self._host = host
self._github_project = github_project
@guard('core')
def create_ref(self, refspec, revision):
url = '{}/repos/{}/git/refs'.format(self._host, self._github_project)
logger.info('Creating ref "%s" (%s)', refspec, revision)
gh_request('POST', url, {
'ref': 'refs/{}'.format(refspec),
'sha': revision
})
@guard('core')
def get_ref_revision(self, refspec):
url = '{}/repos/{}/git/refs/{}'.format(
self._host, self._github_project, refspec
)
logger.info('Fetching ref "%s"', refspec)
try:
body = gh_request('GET', url)
logger.info('Ref data: %s', json.dumps(body, indent=2))
return body['object']['sha']
except requests.exceptions.HTTPError as e:
if e.response.status_code == 404:
return None
raise e
@guard('core')
def update_ref(self, refspec, revision):
url = '{}/repos/{}/git/refs/{}'.format(
self._host, self._github_project, refspec
)
logger.info('Updating ref "%s" (%s)', refspec, revision)
gh_request('PATCH', url, {'sha': revision})
@guard('core')
def delete_ref(self, refspec):
url = '{}/repos/{}/git/refs/{}'.format(
self._host, self._github_project, refspec
)
logger.info('Deleting ref "%s"', refspec)
gh_request('DELETE', url)
@guard('core')
def create_deployment(self, pull_request, revision):
url = '{}/repos/{}/deployments'.format(
self._host, self._github_project
)
# The Pull Request preview system only exposes one Deployment for a
# given Pull Request. Identifying the Deployment by the Pull Request
# number ensures that GitHub.com automatically responds to new
# Deployments by designating prior Deployments as "inactive"
environment = DEPLOYMENT_PREFIX + str(pull_request['number'])
logger.info('Creating Deployment "%s" for "%s"', environment, revision)
return gh_request('POST', url, {
'ref': revision,
'environment': environment,
'auto_merge': False,
# Pull Request previews are created regardless of GitHub Commit
# Status Checks, so Status Checks should be ignored when creating
# GitHub Deployments.
'required_contexts': []
}, 'application/vnd.github.ant-man-preview+json')
@guard('core')
def get_deployment(self, revision):
url = '{}/repos/{}/deployments?sha={}'.format(
self._host, self._github_project, revision
)
deployments = gh_request('GET', url)
return deployments.pop() if len(deployments) else None
@guard('core')
def add_deployment_status(self, target, deployment, state, description=''):
if state in ('pending', 'success'):
pr_number = deployment['environment'][len(DEPLOYMENT_PREFIX):]
environment_url = '{}/{}'.format(target, pr_number)
else:
environment_url = None
url = '{}/repos/{}/deployments/{}/statuses'.format(
self._host, self._github_project, deployment['id']
)
gh_request('POST', url, {
'state': state,
'description': description,
'environment_url': environment_url
}, 'application/vnd.github.ant-man-preview+json')
def is_open(pull_request):
return not pull_request['closed_at']
def has_mirroring_label(pull_request):
for label in pull_request['labels']:
if label['name'] == LABEL:
return True
return False
def should_be_mirrored(project, pull_request):
return (
is_open(pull_request) and (
has_mirroring_label(pull_request) or (
pull_request['user']['login'] not in AUTOMATION_GITHUB_USERS and
pull_request['author_association'] in TRUSTED_AUTHOR_ASSOCIATIONS
)
)
)
def is_deployed(host, deployment):
worktree_name = deployment['environment'][len(DEPLOYMENT_PREFIX):]
url = '{}/.git/worktrees/{}/HEAD'.format(host, worktree_name)
logger.info('Issuing request: GET %s', url)
response = requests.get(url)
logger.info('Response status code: %s', response.status_code)
if response.status_code != 200:
return False
logger.info('Response text: %s', response.text.strip())
return response.text.strip() == deployment['sha']
def update_mirror_refs(project, pull_request):
'''Update the WPT refs that control mirroring of this pull request.
Two sets of refs are used to control wptpr.live's mirroring of pull
requests:
1. refs/prs-trusted-for-preview/{number}
2. refs/prs-open/{number}
wptpr.live will only mirror a pull request if both exist for the given pull
request number; otherwise the pull request is either not open or is not
trustworthy (e.g. came from someone who doesn't have push access anyway.)
This method returns the revision that is being mirrored, or None if the
pull request should not be mirrored.
'''
refspec_trusted = 'prs-trusted-for-preview/{number}'.format(
**pull_request
)
refspec_open = 'prs-open/{number}'.format(**pull_request)
revision_latest = pull_request['head']['sha']
revision_trusted = project.get_ref_revision(refspec_trusted)
revision_open = project.get_ref_revision(refspec_open)
if should_be_mirrored(project, pull_request):
logger.info('Pull Request should be mirrored')
if revision_trusted is None:
project.create_ref(refspec_trusted, revision_latest)
elif revision_trusted != revision_latest:
project.update_ref(refspec_trusted, revision_latest)
if revision_open is None:
project.create_ref(refspec_open, revision_latest)
elif revision_open != revision_latest:
project.update_ref(refspec_open, revision_latest)
return revision_latest
logger.info('Pull Request should not be mirrored')
if not has_mirroring_label(pull_request) and revision_trusted is not None:
project.delete_ref(refspec_trusted)
if revision_open is not None and not is_open(pull_request):
project.delete_ref(refspec_open)
# No revision to be deployed to wptpr.live
return None
class DeploymentFailedException(Exception):
pass
def deploy(project, target, pull_request, revision, timeout):
'''Create a GitHub deployment for the given pull request and revision.
This method creates a pending GitHub deployment, waits for the
corresponding revision to be available on wptpr.live and marks the
deployment as successful. If the revision does not appear in the given
timeout, the deployment is marked as errored instead.'''
if project.get_deployment(revision) is not None:
return
deployment = project.create_deployment(pull_request, revision)
message = 'Waiting up to {} seconds for Deployment {} to be available on {}'.format(
timeout, deployment['environment'], target
)
logger.info(message)
project.add_deployment_status(target, deployment, 'pending', message)
start = time.time()
while not is_deployed(target, deployment):
if time.time() - start > timeout:
message = 'Deployment did not become available after {} seconds'.format(timeout)
project.add_deployment_status(target, deployment, 'error', message)
raise DeploymentFailedException(message)
time.sleep(POLLING_PERIOD)
result = project.add_deployment_status(target, deployment, 'success')
logger.info(json.dumps(result, indent=2))
def main(host, github_project, target, timeout):
project = Project(host, github_project)
with open(os.environ['GITHUB_EVENT_PATH']) as handle:
data = json.load(handle)
logger.info('Event data: %s', json.dumps(data, indent=2))
pull_request = data['pull_request']
logger.info('Processing Pull Request #%(number)d', pull_request)
revision_to_mirror = update_mirror_refs(project, pull_request)
if revision_to_mirror:
deploy(project, target, pull_request, revision_to_mirror, timeout)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='''Mirror a pull request to an externally-hosted preview
system, and create a GitHub Deployment associated with the pull
request pointing at the preview.'''
)
parser.add_argument(
'--host', required=True, help='the location of the GitHub API server'
)
parser.add_argument(
'--github-project',
required=True,
help='''the GitHub organization and GitHub project name, separated by
a forward slash (e.g. "web-platform-tests/wpt")'''
)
parser.add_argument(
'--target',
required=True,
help='''the URL of the website to which submission previews are
expected to become available'''
)
parser.add_argument(
'--timeout',
type=int,
required=True,
help='''the number of seconds to wait for a submission preview to
become available before reporting a GitHub Deployment failure'''
)
values = dict(vars(parser.parse_args()))
main(**values)
|
aitkend/piers | refs/heads/master | fabfile/assets.py | 15 | #!/usr/bin/env python
"""
Commands related to the syncing assets.
"""
from glob import glob
import os
import boto
from fabric.api import prompt, task
import app_config
from fnmatch import fnmatch
import utils
ASSETS_ROOT = 'www/assets'
@task
def sync():
"""
Intelligently synchronize assets between S3 and local folder.
"""
ignore_globs = []
with open('%s/assetsignore' % ASSETS_ROOT, 'r') as f:
ignore_globs = [l.strip() for l in f]
local_paths = []
not_lowercase = []
for local_path, subdirs, filenames in os.walk(ASSETS_ROOT):
for name in filenames:
full_path = os.path.join(local_path, name)
glob_path = full_path.split(ASSETS_ROOT)[1].strip('/')
ignore = False
for ignore_glob in ignore_globs:
if fnmatch(glob_path, ignore_glob):
ignore = True
break
if ignore:
print 'Ignoring: %s' % full_path
continue
if name.lower() != name:
not_lowercase.append(full_path)
local_paths.append(full_path)
# Prevent case sensitivity differences between OSX and S3 from screwing us up
if not_lowercase:
print 'The following filenames are not lowercase, please change them before running `assets.sync`:'
for name in not_lowercase:
print ' %s' % name
return
bucket = _assets_get_bucket()
keys = bucket.list(app_config.ASSETS_SLUG)
which = None
always = False
for key in keys:
download = False
upload = False
local_path = key.name.replace(app_config.ASSETS_SLUG, ASSETS_ROOT, 1)
# Skip root key
if local_path == '%s/' % ASSETS_ROOT:
continue
print local_path
if local_path in local_paths:
# A file can only exist once, this speeds up future checks
# and provides a list of non-existing files when complete
local_paths.remove(local_path)
# We need an actual key, not a "list key"
# http://stackoverflow.com/a/18981298/24608
key = bucket.get_key(key.name)
with open(local_path, 'rb') as f:
local_md5 = key.compute_md5(f)[0]
# Hashes are different
if key.get_metadata('md5') != local_md5:
if not always:
# Ask user which file to take
which, always = _assets_confirm(local_path)
if not which:
print 'Cancelling!'
return
if which == 'remote':
download = True
elif which == 'local':
upload = True
else:
download = True
if download:
_assets_download(key, local_path)
if upload:
_assets_upload(local_path, key)
action = None
always = False
# Iterate over files that didn't exist on S3
for local_path in local_paths:
key_name = local_path.replace(ASSETS_ROOT, app_config.ASSETS_SLUG, 1)
key = bucket.get_key(key_name, validate=False)
print local_path
if not always:
action, always = _assets_upload_confirm()
if not action:
print 'Cancelling!'
return
if action == 'upload':
_assets_upload(local_path, key)
elif action == 'delete':
_assets_delete(local_path, key)
@task
def rm(path):
"""
Remove an asset from s3 and locally
"""
bucket = _assets_get_bucket()
file_list = glob(path)
found_folder = True
# Add files in folders, instead of folders themselves (S3 doesn't have folders)
while found_folder:
found_folder = False
for local_path in file_list:
if os.path.isdir(local_path):
found_folder = True
file_list.remove(local_path)
for path in os.listdir(local_path):
file_list.append(os.path.join(local_path, path))
if len(file_list) > 0:
utils.confirm("You are about to destroy %i files. Are you sure?" % len(file_list))
for local_path in file_list:
print local_path
if os.path.isdir(local_path):
file_list.extend(os.listdir(local_path))
continue
key_name = local_path.replace(ASSETS_ROOT, app_config.ASSETS_SLUG, 1)
key = bucket.get_key(key_name)
_assets_delete(local_path, key)
def _assets_get_bucket():
"""
Get a reference to the assets bucket.
"""
s3 = boto.connect_s3()
return s3.get_bucket(app_config.ASSETS_S3_BUCKET['bucket_name'])
def _assets_confirm(local_path):
"""
Check with user about whether to keep local or remote file.
"""
print '--> This file has been changed locally and on S3.'
answer = prompt('Take remote [r] Take local [l] Take all remote [ra] Take all local [la] cancel', default='c')
if answer == 'r':
return ('remote', False)
elif answer == 'l':
return ('local', False)
elif answer == 'ra':
return ('remote', True)
elif answer == 'la':
return ('local', True)
return (None, False)
def _assets_upload_confirm():
print '--> This file does not exist on S3.'
answer = prompt('Upload local copy [u] Delete local copy [d] Upload all [ua] Delete all [da] cancel', default='c')
if answer == 'u':
return ('upload', False)
elif answer == 'd':
return ('delete', False)
elif answer == 'ua':
return ('upload', True)
elif answer == 'da':
return ('delete', True)
return (None, False)
def _assets_download(s3_key, local_path):
"""
Utility method to download a single asset from S3.
"""
print '--> Downloading!'
dirname = os.path.dirname(local_path)
if not (os.path.exists(dirname)):
os.makedirs(dirname)
s3_key.get_contents_to_filename(local_path)
def _assets_upload(local_path, s3_key):
"""
Utility method to upload a single asset to S3.
"""
print '--> Uploading!'
with open(local_path, 'rb') as f:
local_md5 = s3_key.compute_md5(f)[0]
s3_key.set_metadata('md5', local_md5)
s3_key.set_contents_from_filename(local_path)
def _assets_delete(local_path, s3_key):
"""
Utility method to delete assets both locally and remotely.
"""
print '--> Deleting!'
s3_key.delete()
os.remove(local_path)
|
ixcoinofficialpage/master | refs/heads/master | qa/rpc-tests/invalidblockrequest.py | 104 | #!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import ComparisonTestFramework
from test_framework.util import *
from test_framework.comptool import TestManager, TestInstance, RejectResult
from test_framework.blocktools import *
import copy
import time
'''
In this test we connect to one node over p2p, and test block requests:
1) Valid blocks should be requested and become chain tip.
2) Invalid block with duplicated transaction should be re-requested.
3) Invalid block with bad coinbase value should be rejected and not
re-requested.
'''
# Use the ComparisonTestFramework with 1 node: only use --testbinary.
class InvalidBlockRequestTest(ComparisonTestFramework):
''' Can either run this test as 1 node with expected answers, or two and compare them.
Change the "outcome" variable from each TestInstance object to only do the comparison. '''
def __init__(self):
super().__init__()
self.num_nodes = 1
def run_test(self):
test = TestManager(self, self.options.tmpdir)
test.add_all_connections(self.nodes)
self.tip = None
self.block_time = None
NetworkThread().start() # Start up network handling in another thread
test.run()
def get_tests(self):
if self.tip is None:
self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
self.block_time = int(time.time())+1
'''
Create a new block with an anyone-can-spend coinbase
'''
height = 1
block = create_block(self.tip, create_coinbase(height), self.block_time)
self.block_time += 1
block.solve()
# Save the coinbase for later
self.block1 = block
self.tip = block.sha256
height += 1
yield TestInstance([[block, True]])
'''
Now we need that block to mature so we can spend the coinbase.
'''
test = TestInstance(sync_every_block=False)
for i in range(100):
block = create_block(self.tip, create_coinbase(height), self.block_time)
block.solve()
self.tip = block.sha256
self.block_time += 1
test.blocks_and_transactions.append([block, True])
height += 1
yield test
'''
Now we use merkle-root malleability to generate an invalid block with
same blockheader.
Manufacture a block with 3 transactions (coinbase, spend of prior
coinbase, spend of that spend). Duplicate the 3rd transaction to
leave merkle root and blockheader unchanged but invalidate the block.
'''
block2 = create_block(self.tip, create_coinbase(height), self.block_time)
self.block_time += 1
# b'0x51' is OP_TRUE
tx1 = create_transaction(self.block1.vtx[0], 0, b'\x51', 50 * COIN)
tx2 = create_transaction(tx1, 0, b'\x51', 50 * COIN)
block2.vtx.extend([tx1, tx2])
block2.hashMerkleRoot = block2.calc_merkle_root()
block2.rehash()
block2.solve()
orig_hash = block2.sha256
block2_orig = copy.deepcopy(block2)
# Mutate block 2
block2.vtx.append(tx2)
assert_equal(block2.hashMerkleRoot, block2.calc_merkle_root())
assert_equal(orig_hash, block2.rehash())
assert(block2_orig.vtx != block2.vtx)
self.tip = block2.sha256
yield TestInstance([[block2, RejectResult(16, b'bad-txns-duplicate')], [block2_orig, True]])
height += 1
'''
Make sure that a totally screwed up block is not valid.
'''
block3 = create_block(self.tip, create_coinbase(height), self.block_time)
self.block_time += 1
block3.vtx[0].vout[0].nValue = 100 * COIN # Too high!
block3.vtx[0].sha256=None
block3.vtx[0].calc_sha256()
block3.hashMerkleRoot = block3.calc_merkle_root()
block3.rehash()
block3.solve()
yield TestInstance([[block3, RejectResult(16, b'bad-cb-amount')]])
if __name__ == '__main__':
InvalidBlockRequestTest().main()
|
xbmc/atv2 | refs/heads/atv2 | xbmc/lib/libPython/Python/Lib/test/test_textwrap.py | 7 | #
# Test suite for the textwrap module.
#
# Original tests written by Greg Ward <gward@python.net>.
# Converted to PyUnit by Peter Hansen <peter@engcorp.com>.
# Currently maintained by Greg Ward.
#
# $Id: test_textwrap.py 38573 2005-03-05 02:38:33Z gward $
#
import unittest
from test import test_support
from textwrap import TextWrapper, wrap, fill, dedent
class BaseTestCase(unittest.TestCase):
'''Parent class with utility methods for textwrap tests.'''
def show(self, textin):
if isinstance(textin, list):
result = []
for i in range(len(textin)):
result.append(" %d: %r" % (i, textin[i]))
result = '\n'.join(result)
elif isinstance(textin, basestring):
result = " %s\n" % repr(textin)
return result
def check(self, result, expect):
self.assertEquals(result, expect,
'expected:\n%s\nbut got:\n%s' % (
self.show(expect), self.show(result)))
def check_wrap(self, text, width, expect, **kwargs):
result = wrap(text, width, **kwargs)
self.check(result, expect)
def check_split(self, text, expect):
result = self.wrapper._split(text)
self.assertEquals(result, expect,
"\nexpected %r\n"
"but got %r" % (expect, result))
class WrapTestCase(BaseTestCase):
def setUp(self):
self.wrapper = TextWrapper(width=45)
def test_simple(self):
# Simple case: just words, spaces, and a bit of punctuation
text = "Hello there, how are you this fine day? I'm glad to hear it!"
self.check_wrap(text, 12,
["Hello there,",
"how are you",
"this fine",
"day? I'm",
"glad to hear",
"it!"])
self.check_wrap(text, 42,
["Hello there, how are you this fine day?",
"I'm glad to hear it!"])
self.check_wrap(text, 80, [text])
def test_whitespace(self):
# Whitespace munging and end-of-sentence detection
text = """\
This is a paragraph that already has
line breaks. But some of its lines are much longer than the others,
so it needs to be wrapped.
Some lines are \ttabbed too.
What a mess!
"""
expect = ["This is a paragraph that already has line",
"breaks. But some of its lines are much",
"longer than the others, so it needs to be",
"wrapped. Some lines are tabbed too. What a",
"mess!"]
wrapper = TextWrapper(45, fix_sentence_endings=True)
result = wrapper.wrap(text)
self.check(result, expect)
result = wrapper.fill(text)
self.check(result, '\n'.join(expect))
def test_fix_sentence_endings(self):
wrapper = TextWrapper(60, fix_sentence_endings=True)
# SF #847346: ensure that fix_sentence_endings=True does the
# right thing even on input short enough that it doesn't need to
# be wrapped.
text = "A short line. Note the single space."
expect = ["A short line. Note the single space."]
self.check(wrapper.wrap(text), expect)
# Test some of the hairy end cases that _fix_sentence_endings()
# is supposed to handle (the easy stuff is tested in
# test_whitespace() above).
text = "Well, Doctor? What do you think?"
expect = ["Well, Doctor? What do you think?"]
self.check(wrapper.wrap(text), expect)
text = "Well, Doctor?\nWhat do you think?"
self.check(wrapper.wrap(text), expect)
text = 'I say, chaps! Anyone for "tennis?"\nHmmph!'
expect = ['I say, chaps! Anyone for "tennis?" Hmmph!']
self.check(wrapper.wrap(text), expect)
wrapper.width = 20
expect = ['I say, chaps!', 'Anyone for "tennis?"', 'Hmmph!']
self.check(wrapper.wrap(text), expect)
text = 'And she said, "Go to hell!"\nCan you believe that?'
expect = ['And she said, "Go to',
'hell!" Can you',
'believe that?']
self.check(wrapper.wrap(text), expect)
wrapper.width = 60
expect = ['And she said, "Go to hell!" Can you believe that?']
self.check(wrapper.wrap(text), expect)
def test_wrap_short(self):
# Wrapping to make short lines longer
text = "This is a\nshort paragraph."
self.check_wrap(text, 20, ["This is a short",
"paragraph."])
self.check_wrap(text, 40, ["This is a short paragraph."])
def test_wrap_short_1line(self):
# Test endcases
text = "This is a short line."
self.check_wrap(text, 30, ["This is a short line."])
self.check_wrap(text, 30, ["(1) This is a short line."],
initial_indent="(1) ")
def test_hyphenated(self):
# Test breaking hyphenated words
text = ("this-is-a-useful-feature-for-"
"reformatting-posts-from-tim-peters'ly")
self.check_wrap(text, 40,
["this-is-a-useful-feature-for-",
"reformatting-posts-from-tim-peters'ly"])
self.check_wrap(text, 41,
["this-is-a-useful-feature-for-",
"reformatting-posts-from-tim-peters'ly"])
self.check_wrap(text, 42,
["this-is-a-useful-feature-for-reformatting-",
"posts-from-tim-peters'ly"])
def test_hyphenated_numbers(self):
# Test that hyphenated numbers (eg. dates) are not broken like words.
text = ("Python 1.0.0 was released on 1994-01-26. Python 1.0.1 was\n"
"released on 1994-02-15.")
self.check_wrap(text, 30, ['Python 1.0.0 was released on',
'1994-01-26. Python 1.0.1 was',
'released on 1994-02-15.'])
self.check_wrap(text, 40, ['Python 1.0.0 was released on 1994-01-26.',
'Python 1.0.1 was released on 1994-02-15.'])
text = "I do all my shopping at 7-11."
self.check_wrap(text, 25, ["I do all my shopping at",
"7-11."])
self.check_wrap(text, 27, ["I do all my shopping at",
"7-11."])
self.check_wrap(text, 29, ["I do all my shopping at 7-11."])
def test_em_dash(self):
# Test text with em-dashes
text = "Em-dashes should be written -- thus."
self.check_wrap(text, 25,
["Em-dashes should be",
"written -- thus."])
# Probe the boundaries of the properly written em-dash,
# ie. " -- ".
self.check_wrap(text, 29,
["Em-dashes should be written",
"-- thus."])
expect = ["Em-dashes should be written --",
"thus."]
self.check_wrap(text, 30, expect)
self.check_wrap(text, 35, expect)
self.check_wrap(text, 36,
["Em-dashes should be written -- thus."])
# The improperly written em-dash is handled too, because
# it's adjacent to non-whitespace on both sides.
text = "You can also do--this or even---this."
expect = ["You can also do",
"--this or even",
"---this."]
self.check_wrap(text, 15, expect)
self.check_wrap(text, 16, expect)
expect = ["You can also do--",
"this or even---",
"this."]
self.check_wrap(text, 17, expect)
self.check_wrap(text, 19, expect)
expect = ["You can also do--this or even",
"---this."]
self.check_wrap(text, 29, expect)
self.check_wrap(text, 31, expect)
expect = ["You can also do--this or even---",
"this."]
self.check_wrap(text, 32, expect)
self.check_wrap(text, 35, expect)
# All of the above behaviour could be deduced by probing the
# _split() method.
text = "Here's an -- em-dash and--here's another---and another!"
expect = ["Here's", " ", "an", " ", "--", " ", "em-", "dash", " ",
"and", "--", "here's", " ", "another", "---",
"and", " ", "another!"]
self.check_split(text, expect)
text = "and then--bam!--he was gone"
expect = ["and", " ", "then", "--", "bam!", "--",
"he", " ", "was", " ", "gone"]
self.check_split(text, expect)
def test_unix_options (self):
# Test that Unix-style command-line options are wrapped correctly.
# Both Optik (OptionParser) and Docutils rely on this behaviour!
text = "You should use the -n option, or --dry-run in its long form."
self.check_wrap(text, 20,
["You should use the",
"-n option, or --dry-",
"run in its long",
"form."])
self.check_wrap(text, 21,
["You should use the -n",
"option, or --dry-run",
"in its long form."])
expect = ["You should use the -n option, or",
"--dry-run in its long form."]
self.check_wrap(text, 32, expect)
self.check_wrap(text, 34, expect)
self.check_wrap(text, 35, expect)
self.check_wrap(text, 38, expect)
expect = ["You should use the -n option, or --dry-",
"run in its long form."]
self.check_wrap(text, 39, expect)
self.check_wrap(text, 41, expect)
expect = ["You should use the -n option, or --dry-run",
"in its long form."]
self.check_wrap(text, 42, expect)
# Again, all of the above can be deduced from _split().
text = "the -n option, or --dry-run or --dryrun"
expect = ["the", " ", "-n", " ", "option,", " ", "or", " ",
"--dry-", "run", " ", "or", " ", "--dryrun"]
self.check_split(text, expect)
def test_funky_hyphens (self):
# Screwy edge cases cooked up by David Goodger. All reported
# in SF bug #596434.
self.check_split("what the--hey!", ["what", " ", "the", "--", "hey!"])
self.check_split("what the--", ["what", " ", "the--"])
self.check_split("what the--.", ["what", " ", "the--."])
self.check_split("--text--.", ["--text--."])
# When I first read bug #596434, this is what I thought David
# was talking about. I was wrong; these have always worked
# fine. The real problem is tested in test_funky_parens()
# below...
self.check_split("--option", ["--option"])
self.check_split("--option-opt", ["--option-", "opt"])
self.check_split("foo --option-opt bar",
["foo", " ", "--option-", "opt", " ", "bar"])
def test_punct_hyphens(self):
# Oh bother, SF #965425 found another problem with hyphens --
# hyphenated words in single quotes weren't handled correctly.
# In fact, the bug is that *any* punctuation around a hyphenated
# word was handled incorrectly, except for a leading "--", which
# was special-cased for Optik and Docutils. So test a variety
# of styles of punctuation around a hyphenated word.
# (Actually this is based on an Optik bug report, #813077).
self.check_split("the 'wibble-wobble' widget",
['the', ' ', "'wibble-", "wobble'", ' ', 'widget'])
self.check_split('the "wibble-wobble" widget',
['the', ' ', '"wibble-', 'wobble"', ' ', 'widget'])
self.check_split("the (wibble-wobble) widget",
['the', ' ', "(wibble-", "wobble)", ' ', 'widget'])
self.check_split("the ['wibble-wobble'] widget",
['the', ' ', "['wibble-", "wobble']", ' ', 'widget'])
def test_funky_parens (self):
# Second part of SF bug #596434: long option strings inside
# parentheses.
self.check_split("foo (--option) bar",
["foo", " ", "(--option)", " ", "bar"])
# Related stuff -- make sure parens work in simpler contexts.
self.check_split("foo (bar) baz",
["foo", " ", "(bar)", " ", "baz"])
self.check_split("blah (ding dong), wubba",
["blah", " ", "(ding", " ", "dong),",
" ", "wubba"])
def test_initial_whitespace(self):
# SF bug #622849 reported inconsistent handling of leading
# whitespace; let's test that a bit, shall we?
text = " This is a sentence with leading whitespace."
self.check_wrap(text, 50,
[" This is a sentence with leading whitespace."])
self.check_wrap(text, 30,
[" This is a sentence with", "leading whitespace."])
def test_unicode(self):
# *Very* simple test of wrapping Unicode strings. I'm sure
# there's more to it than this, but let's at least make
# sure textwrap doesn't crash on Unicode input!
text = u"Hello there, how are you today?"
self.check_wrap(text, 50, [u"Hello there, how are you today?"])
self.check_wrap(text, 20, [u"Hello there, how are", "you today?"])
olines = self.wrapper.wrap(text)
assert isinstance(olines, list) and isinstance(olines[0], unicode)
otext = self.wrapper.fill(text)
assert isinstance(otext, unicode)
def test_split(self):
# Ensure that the standard _split() method works as advertised
# in the comments
text = "Hello there -- you goof-ball, use the -b option!"
result = self.wrapper._split(text)
self.check(result,
["Hello", " ", "there", " ", "--", " ", "you", " ", "goof-",
"ball,", " ", "use", " ", "the", " ", "-b", " ", "option!"])
def test_bad_width(self):
# Ensure that width <= 0 is caught.
text = "Whatever, it doesn't matter."
self.assertRaises(ValueError, wrap, text, 0)
self.assertRaises(ValueError, wrap, text, -1)
class LongWordTestCase (BaseTestCase):
def setUp(self):
self.wrapper = TextWrapper()
self.text = '''\
Did you say "supercalifragilisticexpialidocious?"
How *do* you spell that odd word, anyways?
'''
def test_break_long(self):
# Wrap text with long words and lots of punctuation
self.check_wrap(self.text, 30,
['Did you say "supercalifragilis',
'ticexpialidocious?" How *do*',
'you spell that odd word,',
'anyways?'])
self.check_wrap(self.text, 50,
['Did you say "supercalifragilisticexpialidocious?"',
'How *do* you spell that odd word, anyways?'])
# SF bug 797650. Prevent an infinite loop by making sure that at
# least one character gets split off on every pass.
self.check_wrap('-'*10+'hello', 10,
['----------',
' h',
' e',
' l',
' l',
' o'],
subsequent_indent = ' '*15)
def test_nobreak_long(self):
# Test with break_long_words disabled
self.wrapper.break_long_words = 0
self.wrapper.width = 30
expect = ['Did you say',
'"supercalifragilisticexpialidocious?"',
'How *do* you spell that odd',
'word, anyways?'
]
result = self.wrapper.wrap(self.text)
self.check(result, expect)
# Same thing with kwargs passed to standalone wrap() function.
result = wrap(self.text, width=30, break_long_words=0)
self.check(result, expect)
class IndentTestCases(BaseTestCase):
# called before each test method
def setUp(self):
self.text = '''\
This paragraph will be filled, first without any indentation,
and then with some (including a hanging indent).'''
def test_fill(self):
# Test the fill() method
expect = '''\
This paragraph will be filled, first
without any indentation, and then with
some (including a hanging indent).'''
result = fill(self.text, 40)
self.check(result, expect)
def test_initial_indent(self):
# Test initial_indent parameter
expect = [" This paragraph will be filled,",
"first without any indentation, and then",
"with some (including a hanging indent)."]
result = wrap(self.text, 40, initial_indent=" ")
self.check(result, expect)
expect = "\n".join(expect)
result = fill(self.text, 40, initial_indent=" ")
self.check(result, expect)
def test_subsequent_indent(self):
# Test subsequent_indent parameter
expect = '''\
* This paragraph will be filled, first
without any indentation, and then
with some (including a hanging
indent).'''
result = fill(self.text, 40,
initial_indent=" * ", subsequent_indent=" ")
self.check(result, expect)
# Despite the similar names, DedentTestCase is *not* the inverse
# of IndentTestCase!
class DedentTestCase(unittest.TestCase):
def test_dedent_nomargin(self):
# No lines indented.
text = "Hello there.\nHow are you?\nOh good, I'm glad."
self.assertEquals(dedent(text), text)
# Similar, with a blank line.
text = "Hello there.\n\nBoo!"
self.assertEquals(dedent(text), text)
# Some lines indented, but overall margin is still zero.
text = "Hello there.\n This is indented."
self.assertEquals(dedent(text), text)
# Again, add a blank line.
text = "Hello there.\n\n Boo!\n"
self.assertEquals(dedent(text), text)
def test_dedent_even(self):
# All lines indented by two spaces.
text = " Hello there.\n How are ya?\n Oh good."
expect = "Hello there.\nHow are ya?\nOh good."
self.assertEquals(dedent(text), expect)
# Same, with blank lines.
text = " Hello there.\n\n How are ya?\n Oh good.\n"
expect = "Hello there.\n\nHow are ya?\nOh good.\n"
self.assertEquals(dedent(text), expect)
# Now indent one of the blank lines.
text = " Hello there.\n \n How are ya?\n Oh good.\n"
expect = "Hello there.\n\nHow are ya?\nOh good.\n"
self.assertEquals(dedent(text), expect)
def test_dedent_uneven(self):
# Lines indented unevenly.
text = '''\
def foo():
while 1:
return foo
'''
expect = '''\
def foo():
while 1:
return foo
'''
self.assertEquals(dedent(text), expect)
# Uneven indentation with a blank line.
text = " Foo\n Bar\n\n Baz\n"
expect = "Foo\n Bar\n\n Baz\n"
self.assertEquals(dedent(text), expect)
# Uneven indentation with a whitespace-only line.
text = " Foo\n Bar\n \n Baz\n"
expect = "Foo\n Bar\n\n Baz\n"
self.assertEquals(dedent(text), expect)
def test_main():
test_support.run_unittest(WrapTestCase,
LongWordTestCase,
IndentTestCases,
DedentTestCase)
if __name__ == '__main__':
test_main()
|
Rut0/RutoApp | refs/heads/master | ruto/apps.py | 1 | from django.apps import AppConfig
class RutoConfig(AppConfig):
name = 'ruto'
|
bittner/django-allauth | refs/heads/master | allauth/socialaccount/providers/dataporten/urls.py | 6 | from allauth.socialaccount.providers.oauth2.urls import default_urlpatterns
from .provider import DataportenProvider
urlpatterns = default_urlpatterns(DataportenProvider)
|
ghchinoy/tensorflow | refs/heads/master | tensorflow/contrib/testing/python/framework/fake_summary_writer.py | 63 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Fake summary writer for unit tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.core.framework import summary_pb2
from tensorflow.python.framework import test_util
from tensorflow.python.summary.writer import writer
from tensorflow.python.summary.writer import writer_cache
# TODO(ptucker): Replace with mock framework.
class FakeSummaryWriter(object):
"""Fake summary writer."""
_replaced_summary_writer = None
@classmethod
def install(cls):
if cls._replaced_summary_writer:
raise ValueError('FakeSummaryWriter already installed.')
cls._replaced_summary_writer = writer.FileWriter
writer.FileWriter = FakeSummaryWriter
writer_cache.FileWriter = FakeSummaryWriter
@classmethod
def uninstall(cls):
if not cls._replaced_summary_writer:
raise ValueError('FakeSummaryWriter not installed.')
writer.FileWriter = cls._replaced_summary_writer
writer_cache.FileWriter = cls._replaced_summary_writer
cls._replaced_summary_writer = None
def __init__(self, logdir, graph=None):
self._logdir = logdir
self._graph = graph
self._summaries = {}
self._added_graphs = []
self._added_meta_graphs = []
self._added_session_logs = []
self._added_run_metadata = {}
@property
def summaries(self):
return self._summaries
def assert_summaries(self,
test_case,
expected_logdir=None,
expected_graph=None,
expected_summaries=None,
expected_added_graphs=None,
expected_added_meta_graphs=None,
expected_session_logs=None):
"""Assert expected items have been added to summary writer."""
if expected_logdir is not None:
test_case.assertEqual(expected_logdir, self._logdir)
if expected_graph is not None:
test_case.assertTrue(expected_graph is self._graph)
expected_summaries = expected_summaries or {}
for step in expected_summaries:
test_case.assertTrue(
step in self._summaries,
msg='Missing step %s from %s.' % (step, self._summaries.keys()))
actual_simple_values = {}
for step_summary in self._summaries[step]:
for v in step_summary.value:
# Ignore global_step/sec since it's written by Supervisor in a
# separate thread, so it's non-deterministic how many get written.
if 'global_step/sec' != v.tag:
actual_simple_values[v.tag] = v.simple_value
test_case.assertEqual(expected_summaries[step], actual_simple_values)
if expected_added_graphs is not None:
test_case.assertEqual(expected_added_graphs, self._added_graphs)
if expected_added_meta_graphs is not None:
test_case.assertEqual(len(expected_added_meta_graphs),
len(self._added_meta_graphs))
for expected, actual in zip(expected_added_meta_graphs,
self._added_meta_graphs):
test_util.assert_meta_graph_protos_equal(test_case, expected, actual)
if expected_session_logs is not None:
test_case.assertEqual(expected_session_logs, self._added_session_logs)
def add_summary(self, summ, current_global_step):
"""Add summary."""
if isinstance(summ, bytes):
summary_proto = summary_pb2.Summary()
summary_proto.ParseFromString(summ)
summ = summary_proto
if current_global_step in self._summaries:
step_summaries = self._summaries[current_global_step]
else:
step_summaries = []
self._summaries[current_global_step] = step_summaries
step_summaries.append(summ)
# NOTE: Ignore global_step since its value is non-deterministic.
def add_graph(self, graph, global_step=None, graph_def=None):
"""Add graph."""
if (global_step is not None) and (global_step < 0):
raise ValueError('Invalid global_step %s.' % global_step)
if graph_def is not None:
raise ValueError('Unexpected graph_def %s.' % graph_def)
self._added_graphs.append(graph)
def add_meta_graph(self, meta_graph_def, global_step=None):
"""Add metagraph."""
if (global_step is not None) and (global_step < 0):
raise ValueError('Invalid global_step %s.' % global_step)
self._added_meta_graphs.append(meta_graph_def)
# NOTE: Ignore global_step since its value is non-deterministic.
def add_session_log(self, session_log, global_step=None):
# pylint: disable=unused-argument
self._added_session_logs.append(session_log)
def add_run_metadata(self, run_metadata, tag, global_step=None):
if (global_step is not None) and (global_step < 0):
raise ValueError('Invalid global_step %s.' % global_step)
self._added_run_metadata[tag] = run_metadata
def flush(self):
pass
def reopen(self):
pass
def close(self):
pass
|
freedesktop-unofficial-mirror/telepathy__telepathy-spec | refs/heads/master | tools/specparser.py | 1 | #
# specparser.py
#
# Reads in a spec document and generates pretty data structures from it.
#
# Copyright (C) 2009-2010 Collabora Ltd.
#
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or (at
# your option) any later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this library; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Authors: Davyd Madeley <davyd.madeley@collabora.co.uk>
#
import sys
import xml.dom.minidom
import xincludator
XMLNS_TP = 'http://telepathy.freedesktop.org/wiki/DbusSpec#extensions-v0'
class UnknownAccess(Exception): pass
class UnknownDirection(Exception): pass
class UnknownType(Exception): pass
class UnnamedItem(Exception): pass
class UntypedItem(Exception): pass
class UnsupportedArray(Exception): pass
class BadNameForBindings(Exception): pass
class BrokenHTML(Exception): pass
class WrongNumberOfChildren(Exception): pass
class MismatchedFlagsAndEnum(Exception): pass
class TypeMismatch(Exception): pass
class MissingVersion(Exception): pass
class DuplicateEnumValueValue(Exception): pass
class BadFlagValue(Exception): pass
class BadFlagsType(Exception): pass
class Xzibit(Exception):
def __init__(self, parent, child):
self.parent = parent
self.child = child
def __str__(self):
print """
Nested <%s>s are forbidden.
Parent:
%s...
Child:
%s...
""" % (self.parent.nodeName, self.parent.toxml()[:100],
self.child.toxml()[:100])
def getText(dom):
try:
if dom.childNodes[0].nodeType == dom.TEXT_NODE:
return dom.childNodes[0].data
else:
return ''
except IndexError:
return ''
def getChildrenByName(dom, namespace, name):
return filter(lambda n: n.nodeType == n.ELEMENT_NODE and \
n.namespaceURI == namespace and \
n.localName == name,
dom.childNodes)
def getChildrenByNameAndAttribute(dom, namespace, name, attribute, value):
return filter(lambda n: n.nodeType == n.ELEMENT_NODE and \
n.namespaceURI == namespace and \
n.localName == name and \
n.getAttribute(attribute) == value,
dom.childNodes)
def getOnlyChildByName(dom, namespace, name):
kids = getChildrenByName(dom, namespace, name)
if len(kids) == 0:
return None
if len(kids) > 1:
raise WrongNumberOfChildren(
'<%s> node should have at most one <%s xmlns="%s"/> child' %
(dom.tagName, name, namespace))
return kids[0]
def getAnnotationByName(dom, name):
kids = getChildrenByNameAndAttribute(dom, None, 'annotation', 'name', name)
if len(kids) == 0:
return None
if len(kids) > 1:
raise WrongNumberOfChildren(
'<%s> node should have at most one %s annotation' %
(dom.tagName, name))
return kids[0].getAttribute('value')
def getNamespace(n):
if n.namespaceURI is not None:
return n.namespaceURI
ancestor = n.parentNode
while ancestor is not None and ancestor.nodeType == n.ELEMENT_NODE:
if n.prefix is None:
xmlns = ancestor.getAttribute('xmlns')
else:
xmlns = ancestor.getAttribute('xmlns:%s' % n.prefix)
if xmlns is not None:
return xmlns
ancestor = ancestor.parentNode
def build_name(namespace, name):
"""Returns a name by appending `name' to the namespace of this object.
"""
return '.'.join(
filter(lambda n: n is not None and n != '',
[namespace, name.replace(' ', '')])
)
class Base(object):
"""The base class for any type of XML node in the spec that implements the
'name' attribute.
Don't instantiate this class directly.
"""
devhelp_name = ""
def __init__(self, parent, namespace, dom):
self.short_name = name = dom.getAttribute('name')
self.namespace = namespace
self.name = build_name(namespace, name)
self.parent = parent
for child in dom.childNodes:
if (child.nodeType == dom.TEXT_NODE and
child.data.strip() != ''):
raise BrokenHTML('Text found in node %s of %s, did you mean '
'to use <tp:docstring/>? Offending text:\n\n%s' %
(self.__class__.__name__, self.parent, child.data.strip()))
elif child.nodeType == dom.ELEMENT_NODE:
if child.tagName in ('p', 'em', 'strong', 'ul', 'li', 'dl',
'a', 'tt', 'code'):
raise BrokenHTML('HTML element <%s> found in node %s of '
'%s, did you mean to use <tp:docstring/>?' %
(child.tagName, self.__class__.__name__, self.parent))
self.docstring = getOnlyChildByName(dom, XMLNS_TP, 'docstring')
self.added = getOnlyChildByName(dom, XMLNS_TP, 'added')
self.deprecated = getOnlyChildByName(dom, XMLNS_TP, 'deprecated')
if self.deprecated is None:
self.is_deprecated = (getAnnotationByName(dom, 'org.freedesktop.DBus.Deprecated') == 'true')
else:
self.is_deprecated = True
self.changed = getChildrenByName(dom, XMLNS_TP, 'changed')
self.validate()
def validate(self):
if self.short_name == '':
raise UnnamedItem("Node %s of %s has no name" % (
self.__class__.__name__, self.parent))
def check_consistency(self):
pass
def get_type_name(self):
return self.__class__.__name__
def get_spec(self):
return self.parent.get_spec()
def get_root_namespace(self):
return self.get_interface().name
def get_interface(self):
return self.parent.get_interface()
def get_anchor(self):
return "%s:%s" % (
self.get_type_name().replace(' ', '-'),
self.short_name)
def get_url(self):
return "%s#%s" % (self.get_interface().get_url(), self.get_anchor())
def _get_generic_with_ver(self, nnode, htmlclass, txt):
if nnode is None:
return ''
else:
# make a copy of this node, turn it into a HTML <div> tag
node = nnode.cloneNode(True)
node.tagName = 'div'
node.baseURI = None
node.setAttribute('class', 'annotation %s' % htmlclass)
try:
node.removeAttribute('version')
doc = self.get_spec().document
span = doc.createElement('span')
span.setAttribute('class', 'version')
text = doc.createTextNode(
txt % nnode.getAttribute('version') + ' ')
span.appendChild(text)
node.insertBefore(span, node.firstChild)
except xml.dom.NotFoundErr:
raise MissingVersion(
'%s was %s, but gives no version' % (self, htmlclass))
self._convert_to_html(node)
return node.toxml().encode('ascii', 'xmlcharrefreplace')
def get_added(self):
return self._get_generic_with_ver(self.added, 'added',
"Added in %s.")
def get_deprecated(self):
if self.deprecated is None:
if self.is_deprecated:
return '<div class="annotation deprecated no-version">Deprecated.</div>'
else:
return ''
else:
return self._get_generic_with_ver(self.deprecated, 'deprecated',
"Deprecated since %s.")
def get_changed(self):
return '\n'.join(map(lambda n:
self._get_generic_with_ver(n, 'changed', "Changed in %s."),
self.changed))
def get_docstring(self):
"""Get the docstring for this node, but do node substitution to
rewrite types, interfaces, etc. as links.
"""
if self.docstring is None:
return ''
else:
# make a copy of this node, turn it into a HTML <div> tag
node = self.docstring.cloneNode(True)
node.tagName = 'div'
node.baseURI = None
node.setAttribute('class', 'docstring')
self._convert_to_html(node)
return node.toxml().encode('ascii', 'xmlcharrefreplace')
def _convert_to_html(self, node):
spec = self.get_spec()
doc = spec.document
root_namespace = self.get_root_namespace()
# rewrite <tp:rationale>
for n in node.getElementsByTagNameNS(XMLNS_TP, 'rationale'):
nested = n.getElementsByTagNameNS(XMLNS_TP, 'rationale')
if nested:
raise Xzibit(n, nested[0])
"""
<div class='rationale'>
<h5>Rationale:</h5>
<div/> <- inner_div
</div>
"""
outer_div = doc.createElement('div')
outer_div.setAttribute('class', 'rationale')
h5 = doc.createElement('h5')
h5.appendChild(doc.createTextNode('Rationale:'))
outer_div.appendChild(h5)
inner_div = doc.createElement('div')
outer_div.appendChild(inner_div)
for rationale_body in n.childNodes:
inner_div.appendChild(rationale_body.cloneNode(True))
n.parentNode.replaceChild(outer_div, n)
# rewrite <tp:type>
for n in node.getElementsByTagNameNS(XMLNS_TP, 'type'):
t = spec.lookup_type(getText(n))
n.tagName = 'a'
n.namespaceURI = None
n.setAttribute('href', t.get_url())
# rewrite <tp:value-ref>
for n in node.getElementsByTagNameNS(XMLNS_TP, 'value-ref'):
if n.hasAttribute('type'):
type_name = n.getAttribute('type')
value_name = getText(n)
t = spec.lookup_type(type_name)
assert isinstance(t, EnumLike), ("%s is not an enum or flags type"
% type_name)
else:
type_name = getText(n)
value_name_parts = []
while type_name not in spec.types:
type_name, _, rest = type_name.rpartition('_')
value_name_parts.insert(0, rest)
if not type_name:
raise ValueError("No substrings of '%s' describe "
"a valid type." % getText(n))
value_name = '_'.join(value_name_parts)
t = spec.lookup_type(type_name)
assert isinstance(t, EnumLike), ("%s is not an enum or flags type"
% type_name)
n.tagName = 'a'
n.namespaceURI = None
n.setAttribute('href', t.get_url())
short_names = [val.short_name for val in t.values]
if value_name not in short_names:
raise ValueError("'%s' is not a valid value of '%s'. "
"Valid values are %s" %
(value_name, type_name, short_names))
# rewrite <tp:error-ref>
error_ns = spec.spec_namespace + '.Error.'
for n in node.getElementsByTagNameNS(XMLNS_TP, 'error-ref'):
try:
e = spec.errors[error_ns + getText(n)]
except KeyError:
print >> sys.stderr, """
WARNING: Error '%s' not known in error namespace '%s'
(<tp:error-ref> in %s)
""".strip() % (getText(n), error_ns[:-1], self)
continue
n.tagName = 'a'
n.namespaceURI = None
n.setAttribute('href', e.get_url())
n.setAttribute('title', error_ns + getText(n))
# rewrite <tp:member-ref>
for n in node.getElementsByTagNameNS(XMLNS_TP, 'member-ref'):
key = getText(n)
try:
o = spec.lookup(key, namespace=root_namespace)
except KeyError:
print >> sys.stderr, """
WARNING: Key '%s' not known in namespace '%s'
(<tp:member-ref> in %s)
""".strip() % (key, root_namespace, self)
continue
n.tagName = 'a'
n.namespaceURI = None
n.setAttribute('href', o.get_url())
n.setAttribute('title', o.get_title())
# rewrite <tp:dbus-ref>
for n in node.getElementsByTagNameNS(XMLNS_TP, 'dbus-ref'):
namespace = n.getAttribute('namespace')
key = getText(n)
if namespace.startswith('ofdT.') or namespace == 'ofdT':
namespace = namespace.replace('ofdT',
'org.freedesktop.Telepathy')
try:
o = spec.lookup(key, namespace=namespace)
except KeyError:
print >> sys.stderr, """
WARNING: Key '%s' not known in namespace '%s'
(<tp:dbus-ref> in %s)
""".strip() % (key, namespace, self)
continue
n.tagName = 'a'
n.namespaceURI = None
n.setAttribute('href', o.get_url())
n.setAttribute('title', o.get_title())
# rewrite <tp:token-ref>
for n in node.getElementsByTagNameNS(XMLNS_TP, 'token-ref'):
key = getText(n)
namespace = n.getAttribute('namespace')
if namespace:
if namespace.startswith('ofdT.'):
namespace = 'org.freedesktop.Telepathy.' + namespace[5:]
else:
namespace = root_namespace
try:
try:
if '/' in key:
sep = '.'
else:
sep = '/'
o = spec.lookup(namespace + sep + key, None)
except KeyError:
o = spec.lookup(key, None)
except KeyError:
print >> sys.stderr, """
WARNING: Key '%s' not known in namespace '%s'
(<tp:dbus-ref> in %s)
""".strip() % (key, namespace, self)
continue
n.tagName = 'a'
n.namespaceURI = None
n.setAttribute('href', o.get_url())
n.setAttribute('title', o.get_title())
# Fill in <tp:list-dbus-property-parameters/> with a linkified list of
# properties which are also connection parameters
for n in node.getElementsByTagNameNS(XMLNS_TP,
'list-dbus-property-parameters'):
n.tagName = 'ul'
n.namespaceURI = None
props = (p for interface in spec.interfaces
for p in interface.properties
if p.is_connection_parameter
)
for p in props:
link_text = doc.createTextNode(p.name)
a = doc.createElement('a')
a.setAttribute('href', p.get_url())
a.appendChild(link_text)
# FIXME: it'd be nice to include the rich type of the property
# here too.
type_text = doc.createTextNode(' (%s)' % p.dbus_type)
li = doc.createElement('li')
li.appendChild(a)
li.appendChild(type_text)
n.appendChild(li)
def get_title(self):
return '%s %s' % (self.get_type_name(), self.name)
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self.name)
def get_index_entries(self):
context = self.parent.get_index_context()
return set([
'%s (%s in %s)' % (self.short_name, self.get_type_name(), context),
'%s %s' % (self.get_type_name(), self.name)])
def get_index_context(self):
return self.short_name
class DBusConstruct(Base):
"""Base class for signals, methods and properties."""
def __init__(self, parent, namespace, dom):
super(DBusConstruct, self).__init__(parent, namespace, dom)
self.name_for_bindings = dom.getAttributeNS(XMLNS_TP,
'name-for-bindings')
if not self.name_for_bindings:
raise BadNameForBindings('%s has no name-for-bindings'
% self)
if self.name_for_bindings.replace('_', '') != self.short_name:
raise BadNameForBindings('%s name-for-bindings = %s does not '
'match short_name = %s' % (self, self.name_for_bindings,
self.short_name))
class PossibleError(Base):
def __init__(self, parent, namespace, dom):
super(PossibleError, self).__init__(parent, namespace, dom)
def get_error(self):
spec = self.get_spec()
try:
return spec.errors[self.name]
except KeyError:
if not spec.allow_externals:
print >> sys.stderr, """
WARNING: Error not known: '%s'
(<tp:possible-error> in %s)
""".strip() % (self.name, self.parent)
return External(self.name)
def get_url(self):
return self.get_error().get_url()
def get_title(self):
return self.get_error().get_title()
def get_docstring(self):
d = super(PossibleError, self).get_docstring()
if d == '':
return self.get_error().get_docstring()
else:
return d
class Method(DBusConstruct):
devhelp_name = "function"
def __init__(self, parent, namespace, dom):
super(Method, self).__init__(parent, namespace, dom)
args = build_list(self, Arg, self.name,
dom.getElementsByTagName('arg'))
# separate arguments as input and output arguments
self.in_args = filter(lambda a: a.direction == Arg.DIRECTION_IN, args)
self.out_args = filter(lambda a: a.direction == Arg.DIRECTION_OUT, args)
for arg in args:
if arg.direction == Arg.DIRECTION_IN or \
arg.direction == Arg.DIRECTION_OUT:
continue
raise UnknownDirection("'%s' of method '%s' does not specify a suitable direction" % (arg, self))
self.possible_errors = build_list(self, PossibleError, None,
dom.getElementsByTagNameNS(XMLNS_TP, 'error'))
self.no_reply = (getAnnotationByName(dom, 'org.freedesktop.DBus.Method.NoReply') == 'true')
def get_in_args(self):
return ', '.join(map(lambda a: a.spec_name(), self.in_args))
def get_out_args(self):
if len(self.out_args) > 0:
return ', '.join(map(lambda a: a.spec_name(), self.out_args))
else:
return 'nothing'
def check_consistency(self):
for x in self.in_args:
x.check_consistency()
for x in self.out_args:
x.check_consistency()
class Typed(Base):
"""The base class for all typed nodes (i.e. Arg and Property).
Don't instantiate this class directly.
"""
def __init__(self, parent, namespace, dom):
super(Typed, self).__init__(parent, namespace, dom)
self.type = dom.getAttributeNS(XMLNS_TP, 'type')
self.dbus_type = dom.getAttribute('type')
# check we have a dbus type
if self.dbus_type == '':
raise UntypedItem("Node referred to by '%s' has no type" % dom.toxml())
def get_type(self):
return self.get_spec().lookup_type(self.type)
def get_type_url(self):
t = self.get_type()
if t is None: return ''
else: return t.get_url()
def get_type_title(self):
t = self.get_type()
if t is None: return ''
else: return t.get_title()
def check_consistency(self):
t = self.get_type()
if t is None:
if self.dbus_type not in (
# Basic types
'y', 'b', 'n', 'q', 'i', 'u', 'x', 't', 'd', 's', 'v', 'o',
'g',
# QtDBus generic support
'as', 'ay', 'av', 'a{sv}',
# telepathy-qt4 generic support
'ab', 'an', 'aq', 'ai', 'au', 'ax', 'at', 'ad', 'ao', 'ag',
):
raise TypeMismatch('%r type %s needs to be a named tp:type '
'for QtDBus interoperability'
% (self, self.dbus_type))
else:
if self.dbus_type != t.dbus_type:
raise TypeMismatch('%r type %s isn\'t tp:type %s\'s type %s'
% (self, self.dbus_type, t, t.dbus_type))
def spec_name(self):
return '%s: %s' % (self.dbus_type, self.short_name)
def __repr__(self):
return '%s(%s:%s)' % (self.__class__.__name__, self.name, self.dbus_type)
class HasEmitsChangedAnnotation(object):
EMITS_CHANGED_UNKNOWN = 0
EMITS_CHANGED_NONE = 1
EMITS_CHANGED_UPDATES = 2
EMITS_CHANGED_INVALIDATES = 3
# According to the D-Bus specification, EmitsChangedSignal defaults
# to true, but - realistically - this cannot be assumed for old specs.
# As a result, we treat the absence of the annotation as "unknown".
__MAPPING = { None: EMITS_CHANGED_UNKNOWN,
'false': EMITS_CHANGED_NONE,
'invalidates': EMITS_CHANGED_INVALIDATES,
'true': EMITS_CHANGED_UPDATES,
}
__ANNOTATION = 'org.freedesktop.DBus.Property.EmitsChangedSignal'
def _get_emits_changed(self, dom):
emits_changed = getAnnotationByName(dom, self.__ANNOTATION)
try:
return self.__MAPPING[emits_changed]
except KeyError:
print >> sys.stderr, """
WARNING: <annotation name='%s'/> has unknown value '%s'
(in %s)
""".strip() % (self.__ANNOTATION, emits_changed, self)
return self.EMITS_CHANGED_UNKNOWN;
class Property(DBusConstruct, Typed, HasEmitsChangedAnnotation):
ACCESS_READ = 1
ACCESS_WRITE = 2
ACCESS_READWRITE = ACCESS_READ | ACCESS_WRITE
def __init__(self, parent, namespace, dom):
super(Property, self).__init__(parent, namespace, dom)
access = dom.getAttribute('access')
if access == 'read':
self.access = self.ACCESS_READ
elif access == 'write':
self.access = self.ACCESS_WRITE
elif access == 'readwrite':
self.access = self.ACCESS_READWRITE
else:
raise UnknownAccess("Unknown access '%s' on %s" % (access, self))
is_cp = dom.getAttributeNS(XMLNS_TP, 'is-connection-parameter')
self.is_connection_parameter = is_cp != ''
immutable = dom.getAttributeNS(XMLNS_TP, 'immutable')
self.immutable = immutable != ''
self.sometimes_immutable = immutable == 'sometimes'
requestable = dom.getAttributeNS(XMLNS_TP, 'requestable')
self.requestable = requestable != ''
self.sometimes_requestable = requestable == 'sometimes'
self.emits_changed = self._get_emits_changed(dom)
if self.emits_changed == self.EMITS_CHANGED_UNKNOWN:
# If the <property> doesn't have the annotation, grab it from the
# interface.
self.emits_changed = parent.emits_changed
def get_access(self):
if self.access & self.ACCESS_READ and self.access & self.ACCESS_WRITE:
return 'Read/Write'
elif self.access & self.ACCESS_READ:
return 'Read only'
elif self.access & self.ACCESS_WRITE:
return 'Write only'
def get_flag_summary(self):
descriptions = []
if self.sometimes_immutable:
descriptions.append("Sometimes immutable")
elif self.immutable:
descriptions.append("Immutable")
if self.sometimes_requestable:
descriptions.append("Sometimes requestable")
elif self.requestable:
descriptions.append("Requestable")
return ', '.join(descriptions)
class AwkwardTelepathyProperty(Typed):
def __init__(self, parent, namespace, dom):
Typed.__init__(self, parent, namespace, dom)
print >> sys.stderr, """
WARNING: Old-style Telepathy properties are deprecated!
(<tp:property> in %s)
""".strip() % (parent)
def get_type_name(self):
return 'Telepathy Property'
class Arg(Typed):
DIRECTION_IN, DIRECTION_OUT, DIRECTION_UNSPECIFIED = range(3)
def __init__(self, parent, namespace, dom):
super(Arg, self).__init__(parent, namespace, dom)
direction = dom.getAttribute('direction')
if direction == 'in':
self.direction = self.DIRECTION_IN
elif direction == 'out':
self.direction = self.DIRECTION_OUT
elif direction == '':
self.direction = self.DIRECTION_UNSPECIFIED
else:
raise UnknownDirection("Unknown direction '%s' on %s" % (
direction, self.parent))
class Signal(DBusConstruct):
def __init__(self, parent, namespace, dom):
super(Signal, self).__init__(parent, namespace, dom)
self.args = build_list(self, Arg, self.name,
dom.getElementsByTagName('arg'))
for arg in self.args:
if arg.direction == Arg.DIRECTION_UNSPECIFIED:
continue
raise UnknownDirection("'%s' of signal '%s' does not specify a suitable direction" % (arg, self))
def get_args(self):
return ', '.join(map(lambda a: a.spec_name(), self.args))
class External(object):
"""External objects are objects that are referred to in another spec.
We have to attempt to look them up if at all possible.
"""
def __init__(self, name):
self.name = self.short_name = name
def get_url(self):
return None
def get_title(self):
return 'External %s' % self.name
def get_docstring(self):
return None
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self.name)
class Interface(Base, HasEmitsChangedAnnotation):
def __init__(self, parent, namespace, dom, spec_namespace):
super(Interface, self).__init__(parent, namespace, dom)
# For code generation, the <node> provides a name to be used in
# C function names, etc.
parent = dom.parentNode
if parent.localName != 'node':
raise BadNameForBindings("%s's parent is not a <node>" % self)
node_name = parent.getAttribute('name')
if node_name[0] != '/' or not node_name[1:]:
raise BadNameForBindings("%s's parent <node> has bad name %s"
% (self, node_name))
self.name_for_bindings = node_name[1:]
# If you're writing a spec with more than one top-level namespace, you
# probably want to replace spec_namespace with a list.
if self.name.startswith(spec_namespace + "."):
self.short_name = self.name[len(spec_namespace) + 1:]
else:
self.short_name = self.name
# Bit of a hack, but... I want useful information about the current
# page to fit in a tab in Chromium. I'm prepared to be disagreed with.
self.really_short_name = (
('.'+self.short_name).replace('.Interface.', '.I.')
.replace('.Channel.', '.Chan.')
.replace('.Connection.', '.Conn.')
.replace('.Type.', '.T.')[1:]
)
self.emits_changed = self._get_emits_changed(dom)
# build lists of methods, etc., in this interface
self.methods = build_list(self, Method, self.name,
dom.getElementsByTagName('method'))
self.properties = build_list(self, Property, self.name,
dom.getElementsByTagName('property'))
self.signals = build_list(self, Signal, self.name,
dom.getElementsByTagName('signal'))
self.tpproperties = build_list(self, AwkwardTelepathyProperty,
self.name, dom.getElementsByTagNameNS(XMLNS_TP, 'property'))
hct_elems = (
dom.getElementsByTagNameNS(XMLNS_TP, 'handler-capability-token') +
dom.getElementsByTagNameNS(XMLNS_TP, 'hct'))
self.handler_capability_tokens = build_list(self,
HandlerCapabilityToken, self.name,
hct_elems)
self.contact_attributes = build_list(self, ContactAttribute, self.name,
dom.getElementsByTagNameNS(XMLNS_TP, 'contact-attribute'))
self.client_interests = build_list(self, ClientInterest, self.name,
dom.getElementsByTagNameNS(XMLNS_TP, 'client-interest'))
# build a list of types in this interface
self.types = parse_types(self, dom, self.name)
# find out if this interface causes havoc
self.causes_havoc = dom.getAttributeNS(XMLNS_TP, 'causes-havoc')
if self.causes_havoc == '': self.causes_havoc = None
# find out what we're required to also implement
self.requires = map(lambda n: n.getAttribute('interface'),
getChildrenByName(dom, XMLNS_TP, 'requires'))
def map_xor(element):
return map(lambda n: n.getAttribute('interface'),
getChildrenByName(element, XMLNS_TP, 'requires'))
self.xor_requires = map(map_xor,
getChildrenByName(dom, XMLNS_TP, 'xor-requires'))
# let's make sure there's nothing we don't know about here
self.check_for_odd_children(dom)
self.is_channel_related = self.name.startswith(spec_namespace + '.Channel')
def get_interface(self):
return self
def lookup_requires(self, r):
spec = self.get_spec()
try:
return spec.lookup(r)
except KeyError:
if not spec.allow_externals:
print >> sys.stderr, """
WARNING: Interface not known: '%s'
(<tp:requires> in %s)
""".strip() % (r, self)
return External(r)
def get_requires(self):
return map(self.lookup_requires, self.requires)
def get_xor_requires(self):
def xor_lookup(r):
return map(self.lookup_requires, r)
return map(xor_lookup, self.xor_requires)
def get_url(self):
return '%s.html' % self.name_for_bindings
def check_for_odd_children(self, dom):
expected = [
(None, 'annotation'),
(None, 'method'),
(None, 'property'),
(None, 'signal'),
(XMLNS_TP, 'property'),
(XMLNS_TP, 'handler-capability-token'),
(XMLNS_TP, 'hct'),
(XMLNS_TP, 'contact-attribute'),
(XMLNS_TP, 'client-interest'),
(XMLNS_TP, 'simple-type'),
(XMLNS_TP, 'enum'),
(XMLNS_TP, 'flags'),
(XMLNS_TP, 'mapping'),
(XMLNS_TP, 'struct'),
(XMLNS_TP, 'external-type'),
(XMLNS_TP, 'requires'),
(XMLNS_TP, 'xor-requires'),
(XMLNS_TP, 'added'),
(XMLNS_TP, 'changed'),
(XMLNS_TP, 'deprecated'),
(XMLNS_TP, 'docstring')
]
unexpected = [
x for x in dom.childNodes
if isinstance(x, xml.dom.minidom.Element) and
(x.namespaceURI, x.localName) not in expected
]
if unexpected:
print >> sys.stderr, """
WARNING: Unknown element(s): %s
(in interface '%s')
""".strip() % (', '.join([x.tagName for x in unexpected]), self.name)
class Error(Base):
def get_url(self):
return 'errors.html#%s' % self.get_anchor()
def get_root_namespace(self):
return self.namespace
class DBusList(object):
"""Stores a list of a given DBusType. Provides some basic validation to
determine whether or not the type is sane.
"""
def __init__(self, child):
self.child = child
if isinstance(child, DBusType):
self.ultimate = child
self.depth = 1
if self.child.array_name == '':
raise UnsupportedArray("Type '%s' does not support being "
"used in an array" % self.child.name)
else:
self.name = build_name(self.child.namespace,
self.child.array_name)
self.short_name = self.child.array_name
elif isinstance(child, DBusList):
self.ultimate = child.ultimate
self.depth = child.depth + 1
self.name = self.child.name + '_List'
self.short_name = self.child.short_name + '_List'
# check that our child can operate at this depth
maxdepth = int(self.ultimate.array_depth)
if self.depth > maxdepth:
raise TypeError("Type '%s' has exceeded its maximum depth (%i)" % (self, maxdepth))
else:
raise TypeError("DBusList can contain only a DBusType or DBusList not '%s'" % child)
self.dbus_type = 'a' + self.child.dbus_type
def get_url(self):
return self.ultimate.get_url()
def get_title(self):
return "Array of %s" % self.child.get_title()
def __repr__(self):
return 'Array(%s)' % self.child
class DBusType(Base):
"""The base class for all D-Bus types referred to in the spec.
Don't instantiate this class directly.
"""
devhelp_name = "typedef"
def __init__(self, parent, namespace, dom):
super(DBusType, self).__init__(parent, namespace, dom)
self.dbus_type = dom.getAttribute('type')
self.array_name = dom.getAttribute('array-name')
self.array_depth = dom.getAttribute('array-depth')
self.name = self.short_name
def get_root_namespace(self):
return self.namespace
def get_breakdown(self):
return ''
def get_url(self):
if isinstance(self.parent, Interface):
html = self.parent.get_url()
else:
html = 'generic-types.html'
return '%s#%s' % (html, self.get_anchor())
class SimpleType(DBusType):
def get_type_name(self):
return 'Simple Type'
class ExternalType(DBusType):
def __init__(self, parent, namespace, dom):
super(ExternalType, self).__init__(parent, namespace, dom)
# FIXME: until we are able to cross reference external types to learn
# about their array names, we're just going to assume they work like
# this
self.array_name = self.short_name + '_List'
def get_type_name(self):
return 'External Type'
class StructLike(DBusType):
"""Base class for all D-Bus types that look kind of like Structs
Don't instantiate this class directly.
"""
class StructMember(Typed):
def get_root_namespace(self):
return self.parent.get_root_namespace()
def __init__(self, parent, namespace, dom):
super(StructLike, self).__init__(parent, namespace, dom)
self.members = build_list(self, StructLike.StructMember, None,
dom.getElementsByTagNameNS(XMLNS_TP, 'member'))
def get_breakdown(self):
str = ''
str += '<ul>\n'
for member in self.members:
# attempt to lookup the member up in the type system
t = member.get_type()
str += '<li>%s — %s' % (member.name, member.dbus_type)
if t: str += ' (<a href="%s" title="%s">%s</a>)' % (
t.get_url(), t.get_title(), t.short_name)
str += '</li>\n'
str += member.get_docstring()
str += '</ul>\n'
return str
class Mapping(StructLike):
def __init__(self, parent, namespace, dom):
super(Mapping, self).__init__(parent, namespace, dom)
if len(self.members) != 2:
raise WrongNumberOfChildren('%s node should have exactly two tp:members'
% dom.tagName)
# rewrite the D-Bus type
self.dbus_type = 'a{%s}' % ''.join(map(lambda m: m.dbus_type, self.members))
# not sure why tp:mapping sometimes has a type attribute, but
# make sure it's right.
t = dom.getAttribute('type')
if t and self.dbus_type != t:
raise TypeMismatch('%r reports type is %s but actual type is %s'
% (self, t, self.dbus_type))
class Struct(StructLike):
devhelp_name = "struct"
def __init__(self, parent, namespace, dom):
super(Struct, self).__init__(parent, namespace, dom)
if len(self.members) == 0:
raise WrongNumberOfChildren('%s node should have a tp:member'
% dom.tagName)
# rewrite the D-Bus type
self.dbus_type = '(%s)' % ''.join(map(lambda m: m.dbus_type, self.members))
class EnumLike(DBusType):
"""Base class for all D-Bus types that look kind of like Enums
Don't instantiate this class directly.
"""
class EnumValue(Base):
def __init__(self, parent, namespace, dom):
super(EnumLike.EnumValue, self).__init__(parent, namespace, dom)
# rewrite self.name
self.short_name = dom.getAttribute('suffix')
self.name = build_name(namespace, self.short_name)
self.value = dom.getAttribute('value')
super(EnumLike.EnumValue, self).validate()
def validate(self):
pass
def get_root_namespace(self):
return self.parent.get_root_namespace()
def get_breakdown(self):
str = ''
str += '<ul>\n'
for value in self.values:
# attempt to lookup the member.name as a type in the type system
str += '<li>%s (%s)</li>\n' % (value.short_name, value.value)
str += value.get_added()
str += value.get_changed()
str += value.get_deprecated()
str += value.get_docstring()
str += '</ul>\n'
return str
def check_for_duplicates(self):
# make sure no two values have the same value
for u in self.values:
for v in [x for x in self.values if x is not u]:
if u.value == v.value:
raise DuplicateEnumValueValue('%s %s has two values '
'with the same value: %s=%s and %s=%s' % \
(self.__class__.__name__, self.name, \
u.short_name, u.value, v.short_name, v.value))
class Enum(EnumLike):
devhelp_name = "enum"
def __init__(self, parent, namespace, dom):
super(Enum, self).__init__(parent, namespace, dom)
if self.name.endswith('Flag') or self.name.endswith('Flags'):
raise MismatchedFlagsAndEnum('%s should probably be tp:flags, '
'not tp:enum' % self.name)
if dom.getElementsByTagNameNS(XMLNS_TP, 'flag'):
raise MismatchedFlagsAndEnum('%s is a tp:enum, so it should not '
'contain tp:flag' % self.name)
self.values = build_list(self, EnumLike.EnumValue, self.name,
dom.getElementsByTagNameNS(XMLNS_TP, 'enumvalue'))
self.check_for_duplicates()
class Flags(EnumLike):
def __init__(self, parent, namespace, dom):
super(Flags, self).__init__(parent, namespace, dom)
if dom.getAttribute('type') != 'u':
raise BadFlagsType('Flags %s doesn\'t make sense to be of '
'type "%s" (only type "u" makes sense")' % (
self.name, dom.getAttribute('type')))
if dom.getElementsByTagNameNS(XMLNS_TP, 'enumvalue'):
raise MismatchedFlagsAndEnum('%s is a tp:flags, so it should not '
'contain tp:enumvalue' % self.name)
self.values = build_list(self, EnumLike.EnumValue, self.name,
dom.getElementsByTagNameNS(XMLNS_TP, 'flag'))
self.flags = self.values # in case you're looking for it
self.check_for_duplicates()
# make sure all these values are sane
for flag in self.values:
v = int(flag.value)
# positive x is a power of two if (x & (x - 1)) = 0.
if v == 0 or (v & (v - 1)) != 0:
raise BadFlagValue('Flags %s has bad value (not a power of '
'two): %s=%s' % (self.name, flag.short_name, v))
class TokenBase(Base):
devhelp_name = "macro" # it's a constant, which is near enough...
separator = '/'
def __init__(self, parent, namespace, dom):
super(TokenBase, self).__init__(parent, namespace, dom)
items = [ namespace ]
if self.short_name != '':
items.append (self.short_name)
self.name = self.separator.join (items)
class ContactAttribute(TokenBase, Typed):
def get_type_name(self):
return 'Contact Attribute'
class HandlerCapabilityToken(TokenBase):
def get_type_name(self):
return 'Handler Capability Token'
def __init__(self, parent, namespace, dom):
super(HandlerCapabilityToken, self).__init__(parent, namespace, dom)
is_family = dom.getAttribute('is-family')
assert is_family in ('yes', 'no', '')
self.is_family = (is_family == 'yes')
class ClientInterest(TokenBase):
def __init__(self, parent, namespace, dom):
super(ClientInterest, self).__init__(parent, namespace, dom)
self.short_name = self.name
def get_type_name(self):
return 'Client Interest'
def validate(self):
pass
class SectionBase(object):
"""A SectionBase is an abstract base class for any type of node that can
contain a <tp:section>, which means the top-level Spec object, or any
Section object.
It should not be instantiated directly.
"""
def __init__(self, dom, spec_namespace):
self.spec_namespace = spec_namespace
self.items = []
def recurse(nodes):
# iterate through the list of child nodes
for node in nodes:
if node.nodeType != node.ELEMENT_NODE: continue
if node.tagName == 'node':
# recurse into this level for interesting items
recurse(node.childNodes)
elif node.namespaceURI == XMLNS_TP and \
node.localName == 'section':
self.items.append(Section(self, None, node,
spec_namespace))
elif node.tagName == 'interface':
self.items.append(Interface(self, None, node,
spec_namespace))
recurse(dom.childNodes)
def get_index_context(self):
return self.spec_namespace
class Section(Base, SectionBase):
def __init__(self, parent, namespace, dom, spec_namespace):
Base.__init__(self, parent, namespace, dom)
SectionBase.__init__(self, dom, spec_namespace)
def get_root_namespace(self):
return None
class ErrorsSection(Section):
def validate(self):
pass
class Spec(SectionBase):
def __init__(self, dom, spec_namespace, allow_externals=False):
self.document = dom
self.spec_namespace = spec_namespace
self.short_name = spec_namespace
self.allow_externals = allow_externals
# build a dictionary of errors in this spec
try:
errorsnode = dom.getElementsByTagNameNS(XMLNS_TP, 'errors')[0]
self.errors = build_dict(self, Error,
errorsnode.getAttribute('namespace'),
errorsnode.getElementsByTagNameNS(XMLNS_TP, 'error'))
self.errors_section = ErrorsSection(self, None, errorsnode,
spec_namespace)
except IndexError:
self.errors = {}
self.errors_section = None
self.sorted_errors = sorted(self.errors.values(),
key=lambda e: e.name)
# build a list of generic types
self.generic_types = reduce (lambda a, b: a + b,
map(lambda l: parse_types(self, l),
dom.getElementsByTagNameNS(XMLNS_TP, 'generic-types')),
[])
# create a top-level section for this Spec
SectionBase.__init__(self, dom.documentElement, spec_namespace)
# build a list of interfaces in this spec
self.interfaces = []
def recurse(items):
for item in items:
if isinstance(item, Section): recurse(item.items)
elif isinstance(item, Interface): self.interfaces.append(item)
recurse(self.items)
# build a giant dictionary of everything (interfaces, methods, signals
# and properties); also build a dictionary of types
self.everything = {}
self.types = {}
for type in self.generic_types:
self.types[type.name] = type
for interface in self.interfaces:
self.everything[interface.name] = interface
for things in [ 'methods', 'signals', 'properties',
'tpproperties', 'contact_attributes',
'handler_capability_tokens',
'client_interests' ]:
for thing in getattr(interface, things):
self.everything[thing.name] = thing
for type in interface.types:
self.types[type.name] = type
# get some extra bits for the HTML
node = dom.getElementsByTagNameNS(XMLNS_TP, 'spec')[0]
self.title = getText(getChildrenByName(node, XMLNS_TP, 'title')[0])
try:
self.version = getText(getChildrenByName(node, XMLNS_TP, 'version')[0])
except IndexError:
self.version = None
self.copyrights = map(getText,
getChildrenByName(node, XMLNS_TP, 'copyright'))
try:
license = getChildrenByName(node, XMLNS_TP, 'license')[0]
self.license = map(getText, license.getElementsByTagName('p'))
except IndexError:
self.license = []
self.check_consistency()
def check_consistency(self):
for x in self.everything.values():
x.check_consistency()
def get_spec(self):
return self
def lookup(self, name, namespace=None):
key = build_name(namespace, name)
return self.everything[key]
def lookup_type(self, type_):
if type_.endswith('[]'):
return DBusList(self.lookup_type(type_[:-2]))
if type_ == '': return None
elif type_ in self.types:
return self.types[type_]
raise UnknownType("Type '%s' is unknown" % type_)
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self.title)
def build_dict(parent, type_, namespace, nodes):
"""Build a dictionary of D-Bus names to Python objects representing that
name using the XML node for that item in the spec.
e.g. 'org.freedesktop.Telepathy.Channel' : Interface(Channel)
Works for any Python object inheriting from 'Base' whose XML node
implements the 'name' attribute.
"""
def build_tuple(node):
o = type_(parent, namespace, node)
return(o.name, o)
return dict(build_tuple(n) for n in nodes)
def build_list(parent, type_, namespace, nodes):
return map(lambda node: type_(parent, namespace, node), nodes)
def parse_types(parent, dom, namespace = None):
"""Parse all of the types of type nodes mentioned in 't' from the node
'dom' and insert them into the dictionary 'd'.
"""
t = [
(SimpleType, 'simple-type'),
(Enum, 'enum'),
(Flags, 'flags'),
(Mapping, 'mapping'),
(Struct, 'struct'),
(ExternalType, 'external-type'),
]
types = []
for (type_, tagname) in t:
types += build_list(parent, type_, namespace,
dom.getElementsByTagNameNS(XMLNS_TP, tagname))
return types
def parse(filename, spec_namespace, allow_externals=False):
dom = xml.dom.minidom.parse(filename)
xincludator.xincludate(dom, filename)
spec = Spec(dom, spec_namespace, allow_externals=allow_externals)
return spec
if __name__ == '__main__':
parse(sys.argv[1])
|
be-cloud-be/horizon-addons | refs/heads/9.0 | server/addons/account_check_printing/account_journal.py | 29 | # -*- coding: utf-8 -*-
from openerp import models, fields, api, _
from openerp.exceptions import ValidationError
class AccountJournal(models.Model):
_inherit = "account.journal"
@api.one
@api.depends('outbound_payment_method_ids')
def _compute_check_printing_payment_method_selected(self):
self.check_printing_payment_method_selected = any(pm.code == 'check_printing' for pm in self.outbound_payment_method_ids)
@api.one
@api.depends('check_manual_sequencing')
def _get_check_next_number(self):
if self.check_sequence_id:
self.check_next_number = self.check_sequence_id.number_next_actual
else:
self.check_next_number = 1
@api.one
def _set_check_next_number(self):
if self.check_next_number < self.check_sequence_id.number_next_actual:
raise ValidationError(_("The last check number was %s. In order to avoid a check being rejected "
"by the bank, you can only use a greater number.") % self.check_sequence_id.number_next_actual)
if self.check_sequence_id:
self.check_sequence_id.sudo().number_next_actual = self.check_next_number
check_manual_sequencing = fields.Boolean('Manual Numbering', default=False,
help="Check this option if your pre-printed checks are not numbered.")
check_sequence_id = fields.Many2one('ir.sequence', 'Check Sequence', readonly=True, copy=False,
help="Checks numbering sequence.")
check_next_number = fields.Integer('Next Check Number', compute='_get_check_next_number', inverse='_set_check_next_number',
help="Sequence number of the next printed check.")
check_printing_payment_method_selected = fields.Boolean(compute='_compute_check_printing_payment_method_selected',
help="Technical feature used to know whether check printing was enabled as payment method.")
@api.model
def create(self, vals):
rec = super(AccountJournal, self).create(vals)
if not rec.check_sequence_id:
rec._create_check_sequence()
return rec
@api.one
def copy(self, default=None):
rec = super(AccountJournal, self).copy(default)
rec._create_check_sequence()
return rec
@api.one
def _create_check_sequence(self):
""" Create a check sequence for the journal """
self.check_sequence_id = self.env['ir.sequence'].sudo().create({
'name': self.name + _(" : Check Number Sequence"),
'implementation': 'no_gap',
'padding': 5,
'number_increment': 1,
'company_id': self.company_id.id,
})
def _default_outbound_payment_methods(self):
methods = super(AccountJournal, self)._default_outbound_payment_methods()
return methods + self.env.ref('account_check_printing.account_payment_method_check')
@api.model
def _enable_check_printing_on_bank_journals(self):
""" Enables check printing payment method and add a check sequence on bank journals.
Called upon module installation via data file.
"""
check_printing = self.env.ref('account_check_printing.account_payment_method_check')
bank_journals = self.search([('type', '=', 'bank')])
for bank_journal in bank_journals:
bank_journal._create_check_sequence()
bank_journal.write({
'outbound_payment_method_ids': [(4, check_printing.id, None)],
})
|
samukasmk/django_dashboard_example | refs/heads/master | smkcpanel_django/smkcpanel/views.py | 6027 | # Create your views here.
|
mmalyska/eve-wspace | refs/heads/develop | evewspace/account/nav_entries.py | 21 | from core.nav_registry import registry
registry.register('account_nav.html')
|
mrquim/mrquimrepo | refs/heads/master | repo/plugin.video.Rising.Tides/resources/modules/js2py/legecy_translators/constants.py | 31 | from string import ascii_lowercase, digits
##################################
StringName = u'PyJsConstantString%d_'
NumberName = u'PyJsConstantNumber%d_'
RegExpName = u'PyJsConstantRegExp%d_'
##################################
ALPHAS = set(ascii_lowercase+ ascii_lowercase.upper())
NUMS = set(digits)
IDENTIFIER_START = ALPHAS.union(NUMS)
ESCAPE_CHARS = {'n', '0', 'b', 'f', 'r', 't', 'v', '"', "'", '\\'}
OCTAL = {'0', '1', '2', '3', '4', '5', '6', '7'}
HEX = set('0123456789abcdefABCDEF')
from utils import *
IDENTIFIER_PART = IDENTIFIER_PART.union({'.'})
def _is_cancelled(source, n):
cancelled = False
k = 0
while True:
k+=1
if source[n-k]!='\\':
break
cancelled = not cancelled
return cancelled
def _ensure_regexp(source, n): #<- this function has to be improved
'''returns True if regexp starts at n else returns False
checks whether it is not a division '''
markers = '(+~"\'=[%:?!*^|&-,;/\\'
k = 0
while True:
k+=1
if n-k<0:
return True
char = source[n-k]
if char in markers:
return True
if char!=' ' and char!='\n':
break
return False
def parse_num(source, start, charset):
"""Returns a first index>=start of chat not in charset"""
while start<len(source) and source[start] in charset:
start+=1
return start
def parse_exponent(source, start):
"""returns end of exponential, raises SyntaxError if failed"""
if not source[start] in ['e', 'E']:
if source[start] in IDENTIFIER_PART:
raise SyntaxError('Invalid number literal!')
return start
start += 1
if source[start] in ['-', '+']:
start += 1
FOUND = False
# we need at least one dig after exponent
while source[start] in NUMS:
FOUND = True
start+=1
if not FOUND or source[start] in IDENTIFIER_PART:
raise SyntaxError('Invalid number literal!')
return start
def remove_constants(source):
'''Replaces Strings and Regexp literals in the source code with
identifiers and *removes comments*. Identifier is of the format:
PyJsStringConst(String const number)_ - for Strings
PyJsRegExpConst(RegExp const number)_ - for RegExps
Returns dict which relates identifier and replaced constant.
Removes single line and multiline comments from JavaScript source code
Pseudo comments (inside strings) will not be removed.
For example this line:
var x = "/*PSEUDO COMMENT*/ TEXT //ANOTHER PSEUDO COMMENT"
will be unaltered'''
source=' '+source+'\n'
comments = []
inside_comment, single_comment = False, False
inside_single, inside_double = False, False
inside_regexp = False
regexp_class_count = 0
n = 0
while n < len(source):
char = source[n]
if char=='"' and not (inside_comment or inside_single or inside_regexp):
if not _is_cancelled(source, n):
if inside_double:
inside_double[1] = n+1
comments.append(inside_double)
inside_double = False
else:
inside_double = [n, None, 0]
elif char=="'" and not (inside_comment or inside_double or inside_regexp):
if not _is_cancelled(source, n):
if inside_single:
inside_single[1] = n+1
comments.append(inside_single)
inside_single = False
else:
inside_single = [n, None, 0]
elif (inside_single or inside_double):
if char in LINE_TERMINATOR:
if _is_cancelled(source, n):
if char==CR and source[n+1]==LF:
n+=1
n+=1
continue
else:
raise SyntaxError('Invalid string literal. Line terminators must be escaped!')
else:
if inside_comment:
if single_comment:
if char in LINE_TERMINATOR:
inside_comment[1] = n
comments.append(inside_comment)
inside_comment = False
single_comment = False
else: # Multiline
if char=='/' and source[n-1]=='*':
inside_comment[1] = n+1
comments.append(inside_comment)
inside_comment = False
elif inside_regexp:
if not quiting_regexp:
if char in LINE_TERMINATOR:
raise SyntaxError('Invalid regexp literal. Line terminators cant appear!')
if _is_cancelled(source, n):
n+=1
continue
if char=='[':
regexp_class_count += 1
elif char==']':
regexp_class_count = max(regexp_class_count-1, 0)
elif char=='/' and not regexp_class_count:
quiting_regexp = True
else:
if char not in IDENTIFIER_START:
inside_regexp[1] = n
comments.append(inside_regexp)
inside_regexp = False
elif char=='/' and source[n-1]=='/':
single_comment = True
inside_comment = [n-1, None, 1]
elif char=='*' and source[n-1]=='/':
inside_comment = [n-1, None, 1]
elif char=='/' and source[n+1] not in ('/', '*'):
if not _ensure_regexp(source, n): #<- improve this one
n+=1
continue #Probably just a division
quiting_regexp = False
inside_regexp = [n, None, 2]
elif not (inside_comment or inside_regexp):
if (char in NUMS and source[n-1] not in IDENTIFIER_PART) or char=='.':
if char=='.':
k = parse_num(source,n+1, NUMS)
if k==n+1: # just a stupid dot...
n+=1
continue
k = parse_exponent(source, k)
elif char=='0' and source[n+1] in ['x', 'X']: #Hex number probably
k = parse_num(source, n+2, HEX)
if k==n+2 or source[k] in IDENTIFIER_PART:
raise SyntaxError('Invalid hex literal!')
else: #int or exp or flot or exp flot
k = parse_num(source, n+1, NUMS)
if source[k]=='.':
k = parse_num(source, k+1, NUMS)
k = parse_exponent(source, k)
comments.append((n, k, 3))
n = k
continue
n+=1
res = ''
start = 0
count = 0
constants = {}
for end, next_start, typ in comments:
res += source[start:end]
start = next_start
if typ==0: # String
name = StringName
elif typ==1: # comment
continue
elif typ==2: # regexp
name = RegExpName
elif typ==3: # number
name = NumberName
else:
raise RuntimeError()
res += ' '+name % count+' '
constants[name % count] = source[end: next_start]
count += 1
res+=source[start:]
# remove this stupid white space
for e in WHITE:
res = res.replace(e, ' ')
res = res.replace(CR+LF, '\n')
for e in LINE_TERMINATOR:
res = res.replace(e, '\n')
return res.strip(), constants
def recover_constants(py_source, replacements): #now has n^2 complexity. improve to n
'''Converts identifiers representing Js constants to the PyJs constants
PyJsNumberConst_1_ which has the true value of 5 will be converted to PyJsNumber(5)'''
for identifier, value in replacements.iteritems():
if identifier.startswith('PyJsConstantRegExp'):
py_source = py_source.replace(identifier, 'JsRegExp(%s)'%repr(value))
elif identifier.startswith('PyJsConstantString'):
py_source = py_source.replace(identifier, 'Js(u%s)' % unify_string_literals(value))
else:
py_source = py_source.replace(identifier, 'Js(%s)'%value)
return py_source
def unify_string_literals(js_string):
"""this function parses the string just like javascript
for example literal '\d' in JavaScript would be interpreted
as 'd' - backslash would be ignored and in Pyhon this
would be interpreted as '\\d' This function fixes this problem."""
n = 0
res = ''
limit = len(js_string)
while n < limit:
char = js_string[n]
if char=='\\':
new, n = do_escape(js_string, n)
res += new
else:
res += char
n += 1
return res
def unify_regexp_literals(js):
pass
def do_escape(source, n):
"""Its actually quite complicated to cover every case :)
http://www.javascriptkit.com/jsref/escapesequence.shtml"""
if not n+1 < len(source):
return '' # not possible here but can be possible in general case.
if source[n+1] in LINE_TERMINATOR:
if source[n+1]==CR and n+2<len(source) and source[n+2]==LF:
return source[n:n+3], n+3
return source[n:n+2], n+2
if source[n+1] in ESCAPE_CHARS:
return source[n:n+2], n+2
if source[n+1]in ['x', 'u']:
char, length = ('u', 4) if source[n+1]=='u' else ('x', 2)
n+=2
end = parse_num(source, n, HEX)
if end-n < length:
raise SyntaxError('Invalid escape sequence!')
#if length==4:
# return unichr(int(source[n:n+4], 16)), n+4 # <- this was a very bad way of solving this problem :)
return source[n-2:n+length], n+length
if source[n+1] in OCTAL:
n += 1
end = parse_num(source, n, OCTAL)
end = min(end, n+3) # cant be longer than 3
# now the max allowed is 377 ( in octal) and 255 in decimal
max_num = 255
num = 0
len_parsed = 0
for e in source[n:end]:
cand = 8*num + int(e)
if cand > max_num:
break
num = cand
len_parsed += 1
# we have to return in a different form because python may want to parse more...
# for example '\777' will be parsed by python as a whole while js will use only \77
return '\\' + hex(num)[1:], n + len_parsed
return source[n+1], n+2
#####TEST######
if __name__=='__main__':
test = ('''
''')
t, d = remove_constants(test)
print t, d |
cman131/MakeFriendsYouCanCallByNicknamesEvenWhenYouAreOld | refs/heads/master | server/app/tabletop_generator.py | 1 | from PIL import Image
from io import BytesIO
from app import tabletop_entity, config
import json, glob, os, requests, base64
class TableTopGenerator:
@staticmethod
def getWeissImages(cards):
baseurl = 'http://ws-tcg.com/en/cardlist/cardimages/'
for card in cards:
isGif = False
isPng = False
baseImageName = card.number.replace('/EN', 'EN').replace('/', '_')
imagenamelwr = baseImageName.lower()
imagenameupr = imagenamelwr.upper()
if imagenamelwr[-1] in ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i']:
imagenameupr = imagenameupr[:-1] + imagenamelwr[-1]
imagenames = [baseImageName, 'ws_' + baseImageName, 'WS_' + baseImageName,
imagenamelwr, 'ws_' + imagenamelwr, imagenameupr, 'WS_' + imagenameupr,
imagenamelwr + '_td', 'ws_' + imagenamelwr + '_td', imagenameupr + '_TD', 'WS_' + imagenameupr + '_TD',
imagenamelwr + 'td', 'ws_' + imagenamelwr + 'td', imagenameupr + 'TD', 'WS_' + imagenameupr + 'TD',
imagenamelwr + 'u', 'ws_' + imagenamelwr + 'u', imagenameupr + 'U', 'WS_' + imagenameupr + 'U',
imagenamelwr + 'r', 'ws_' + imagenamelwr + 'r', imagenameupr + 'R', 'WS_' + imagenameupr + 'R',
imagenamelwr + 'cc', 'ws_' + imagenamelwr + 'cc', imagenameupr + 'CC', 'WS_' + imagenameupr + 'CC',
imagenamelwr + 'c', 'ws_' + imagenamelwr + 'c', imagenameupr + 'C', 'WS_' + imagenameupr + 'C',
imagenamelwr + 'cr', 'ws_' + imagenamelwr + 'cr', imagenameupr + 'CR', 'WS_' + imagenameupr + 'CR',
imagenamelwr + 'rr', 'ws_' + imagenamelwr + 'rr', imagenameupr + 'RR', 'WS_' + imagenameupr + 'RR',
imagenamelwr + 'rrplus', 'ws_' + imagenamelwr + 'rrplus', imagenameupr + 'RRPlus', 'WS_' + imagenameupr + 'RRPlus',
imagenamelwr + 'rrr', 'ws_' + imagenamelwr + 'rrr', imagenameupr + 'RRR', 'WS_' + imagenameupr + 'RRR',
imagenamelwr + 'pr', 'ws_' + imagenamelwr + 'pr', imagenameupr + 'PR', 'WS_' + imagenameupr + 'PR']
otherNames = [x.replace('-', '_') for x in imagenames]
imagenames += otherNames
imagenames = [baseurl + x for x in imagenames]
imgResponse = requests.get(baseurl + imagenamelwr + '.jpg')
#PNG
for imagename in imagenames:
if imgResponse.status_code != 404:
break
imgResponse = requests.get(imagename + '.png')
isPng = True
isGif = False
# JPEG
for imagename in imagenames:
if imgResponse.status_code != 404:
break
imgResponse = requests.get(imagename + '.jpg')
isPng = False
isGif = False
#GIF
for imagename in imagenames:
if imgResponse.status_code != 404:
break
imgResponse = requests.get(imagename + '.gif')
isGif = True
isPng = False
if imgResponse.status_code != 404:
img = Image.open(BytesIO(imgResponse.content))
imgSaveName = 'app/images/' + imagenamelwr + '.jpg'
if isGif:
bg = Image.new("RGB", img.size, (255,255,255))
bg.paste(img, (0, 0))
bg.save(imgSaveName, 'JPEG')
elif isPng:
bg = Image.new('RGBA',img.size,(255,255,255,255))
bg.paste(img, (0, 0))
bg.save(imgSaveName, 'JPEG')
else:
img.save(imgSaveName, 'JPEG')
card.imageurl = imgSaveName
card.save(card.id)
print(imgSaveName + ' retrieved')
@staticmethod
def generateTableTopJson(name, description, cards, cardBackKey='magic', local=False):
cardSets = [cards[x:x+69] for x in range(0, len(cards), 69)]
tableTopObject = tabletop_entity.TableTopObjectState(name, description)
for cardset in cardSets:
size = (409, 585)
images = []
tableCards = []
for card in cardset:
img = None
if not local:
imgResponse = requests.get(card["imageUrl"])
img = Image.open(BytesIO(imgResponse.content))
else:
img = Image.open(card['imageUrl'])
for i in range(card["count"]):
images.append(img.resize(size, Image.ANTIALIAS))
tableCards.append(card)
dimensions = (size[0]*10, size[1]*7)
#creates a new empty image, RGB mode, and size 4096 by 4096.
newImg = Image.new('RGB', dimensions)
cur = 0
for i in range(0,dimensions[1],size[1]):
if(cur >= len(images)):
break
for j in range(0,dimensions[0],size[0]):
if (cur >= len(images)):
break
#paste the image at location j,i:
newImg.paste(images[cur], (j,i))
cur += 1
#newImg.save("grid"+str(curIndex+1)+".jpg", "JPEG")
temp = BytesIO()
newImg.save(temp, 'JPEG')
temp.seek(0)
b64image = base64.b64encode(temp.read())
# data to send with the POST request
payload = {
'image': b64image,
'title': 'apiTest'
}
imgUrl = requests.post(
"https://api.imgur.com/3/image",
headers={
'Authorization': 'Client-ID ' + config.IMGUR_CLIENT_ID
},
data=payload
)
response = imgUrl.json()
if not response['success']:
raise Exception("Image upload failed")
tableTopObject.addDeck(response['data']['link'], tableCards, cardBackKey)
return tabletop_entity.TableTopSave([tableTopObject]).getJson()
|
merry-bits/DCache | refs/heads/master | client/src/dcache_client/zmq_client.py | 1 | from enum import Enum
from enum import unique
from logging import getLogger
# noinspection PyUnresolvedReferences
from zmq import Context, REQ, RCVTIMEO, SNDTIMEO
_LOG = getLogger(__name__)
class Cache:
_VERSION = b"1"
_GET = b"get"
_SET = b"set"
ENCODING = "utf-8"
IO_TIMEOUT = 5 * 1000 # milliseconds
@unique
class Error(Enum):
NO_ERROR = b"0"
TOO_BIG = b"1"
TIMEOUT = b"2"
UNKNOWN_REQUEST = b"998"
VERSION_NOT_SUPPORTED = b"999"
def __init__(self, server_api_address, context=None):
if context is None:
context = Context()
_LOG.debug("Created new context.")
self._context = context
self._api_socket = self._context.socket(REQ)
self._api_socket.setsockopt(RCVTIMEO, self.IO_TIMEOUT)
self._api_socket.setsockopt(SNDTIMEO, self.IO_TIMEOUT)
self._api_socket.connect(server_api_address)
_LOG.debug(f"Connected to: {server_api_address}")
def _make_request(self, *data):
self._api_socket.send_multipart(data)
response = self._api_socket.recv_multipart()
return response
def get(self, key):
"""
:type key: str
:rtype: str
"""
key = key.encode(Cache.ENCODING)
response = self._make_request(Cache._VERSION, Cache._GET, key)
error = Cache.Error(response[0])
value = response[1] if error == Cache.Error.NO_ERROR else b""
value = value.decode(Cache.ENCODING)
return value
def set(self, key, value):
"""
:type key: str
:type value: str
:rtype: Cache.Error
"""
key = key.encode(Cache.ENCODING)
value = (value or "").encode(Cache.ENCODING)
error = self._make_request(Cache._VERSION, Cache._SET, key, value)
return Cache.Error(error[0])
|
leth/nose2 | refs/heads/master | nose2/tests/unit/test_testid_plugin.py | 18 | """Test testid plugin."""
import os.path
import pickle
from six import StringIO
from nose2 import session
from nose2.events import ReportTestEvent
from nose2.plugins import testid
from nose2.tests._common import (FakeStartTestEvent, FakeLoadFromNameEvent,
FakeLoadFromNamesEvent, TestCase)
class UnitTestTestId(TestCase):
"""Test class TestId.
Tests are carried out in a temporary directory, since TestId stores state
to file. The temporary directory is removed after testing.
"""
tags = ['unit']
_RUN_IN_TEMP = True
def setUp(self):
super(UnitTestTestId, self).setUp()
self.stream = StringIO()
self.session = session.Session()
self.plugin = testid.TestId(session=self.session)
def test___init__(self):
"""Test the __init__ method."""
plug = self.plugin
# Test attributes
for name, exp_val in [(
'configSection', 'testid'), ('commandLineSwitch',
('I', 'with-id', 'Add test ids to output')), ('idfile',
os.path.abspath(
'.noseids')), ('ids', {}), ('tests', {}),
('id', 0)]:
try:
val = getattr(plug, name)
except AttributeError:
self.fail(
'TestId instance doesn\'t have attribute %s' % (name,))
self.assertEqual(val, exp_val, 'Attribute %s should have value '
'\'%s\', but has value %s' % (name, exp_val, val))
def test_start_test(self):
"""Test reportStartTest method."""
self.session.verbosity = 2
event = ReportTestEvent(FakeStartTestEvent(self), self.stream)
plug = self.plugin
plug.reportStartTest(event)
self.assertEqual(plug.id, 1)
test_id = self.id()
self.assertEqual(plug.ids, {1: test_id})
self.assertEqual(plug.tests, {test_id: 1})
self.assertEqual(self.stream.getvalue(), '#1 ')
def test_start_test_twice(self):
"""Test calling reportStartTest twice."""
self.session.verbosity = 2
event = ReportTestEvent(FakeStartTestEvent(self), self.stream)
plug = self.plugin
plug.reportStartTest(event)
plug.reportStartTest(event)
self.assertEqual(plug.id, 1)
test_id = self.id()
self.assertEqual(plug.ids, {1: test_id})
self.assertEqual(plug.tests, {test_id: 1})
self.assertEqual(self.stream.getvalue(), '#1 #1 ')
def test_stop_test_run(self):
"""Test stopTestRun method."""
plug = self.plugin
plug.reportStartTest(
ReportTestEvent(FakeStartTestEvent(self), self.stream))
plug.stopTestRun(None)
fh = open(plug.idfile, 'rb')
try:
data = pickle.load(fh)
finally:
fh.close()
self.assertEqual(data, {'ids': plug.ids, 'tests': plug.tests})
def test_load_tests_from_name(self):
"""Test loadTestsFromName method."""
plug = self.plugin
# By first starting/stopping a test, an ID is assigned by the plugin
plug.reportStartTest(
ReportTestEvent(FakeStartTestEvent(self), self.stream))
plug.stopTestRun(None)
event = FakeLoadFromNameEvent('1')
plug.loadTestsFromName(event)
# The numeric ID should be translated to this test's ID
self.assertEqual(event.name, self.id())
def test_load_tests_from_name_no_ids(self):
"""Test calling loadTestsFromName when no IDs have been saved."""
plug = self.plugin
event = FakeLoadFromNameEvent('1')
plug.loadTestsFromName(event)
# The event's name should be unchanged, since no IDs should be mapped
self.assertEqual(event.name, '1')
def test_load_tests_from_names(self):
"""Test loadTestsFromNames method."""
plug = self.plugin
# By first starting/stopping a test, an ID is assigned by the plugin
plug.reportStartTest(
ReportTestEvent(FakeStartTestEvent(self), self.stream))
plug.stopTestRun(None)
event = FakeLoadFromNamesEvent(['1', '2'])
plug.loadTestsFromNames(event)
name1, name2 = event.names
# The first numeric ID should be translated to this test's ID
self.assertEqual(name1, self.id())
# The second one should not have a match
self.assertEqual(name2, '2')
|
machinaut/gym | refs/heads/master | gym/envs/algorithmic/algorithmic_env.py | 3 | from gym import Env
from gym.spaces import Discrete, Tuple
from gym.utils import colorize, seeding
import numpy as np
from six import StringIO
import sys
import math
hash_base = None
def ha(array):
return (hash_base * (array + 5)).sum()
class AlgorithmicEnv(Env):
metadata = {'render.modes': ['human', 'ansi']}
def __init__(self, inp_dim=1, base=10, chars=False):
global hash_base
hash_base = 50 ** np.arange(inp_dim)
self.base = base
self.last = 10
self.total_reward = 0
self.sum_reward = 0
AlgorithmicEnv.sum_rewards = []
self.chars = chars
self.inp_dim = inp_dim
AlgorithmicEnv.current_length = 2
tape_control = []
self.action_space = Tuple(([Discrete(2 * self.inp_dim), Discrete(2), Discrete(self.base)]))
self.observation_space = Discrete(self.base + 1)
self._seed()
self.reset()
def _seed(self, seed=None):
self.np_random, seed = seeding.np_random(seed)
return [seed]
def _get_obs(self, pos=None):
if pos is None:
pos = self.x
assert isinstance(pos, np.ndarray) and pos.shape[0] == self.inp_dim
if ha(pos) not in self.content:
self.content[ha(pos)] = self.base
return self.content[ha(pos)]
def _get_str_obs(self, pos=None):
ret = self._get_obs(pos)
if ret == self.base:
return " "
else:
if self.chars:
return chr(ret + ord('A'))
return str(ret)
def _get_str_target(self, pos=None):
if pos not in self.target:
return " "
else:
ret = self.target[pos]
if self.chars:
return chr(ret + ord('A'))
return str(ret)
def _render_observation(self):
x = self.x
if self.inp_dim == 1:
x_str = "Observation Tape : "
for i in range(-2, self.total_len + 2):
if i == x:
x_str += colorize(self._get_str_obs(np.array([i])), 'green', highlight=True)
else:
x_str += self._get_str_obs(np.array([i]))
x_str += "\n"
return x_str
elif self.inp_dim == 2:
label = "Observation Grid : "
x_str = ""
for j in range(-1, 3):
if j != -1:
x_str += " " * len(label)
for i in range(-2, self.total_len + 2):
if i == x[0] and j == x[1]:
x_str += colorize(self._get_str_obs(np.array([i, j])), 'green', highlight=True)
else:
x_str += self._get_str_obs(np.array([i, j]))
x_str += "\n"
x_str = label + x_str
return x_str
else:
assert False
def _render(self, mode='human', close=False):
if close:
# Nothing interesting to close
return
outfile = StringIO() if mode == 'ansi' else sys.stdout
inp = "Total length of input instance: %d, step: %d\n" % (self.total_len, self.time)
outfile.write(inp)
x, y, action = self.x, self.y, self.last_action
if action is not None:
inp_act, out_act, pred = action
outfile.write("=" * (len(inp) - 1) + "\n")
y_str = "Output Tape : "
target_str = "Targets : "
if action is not None:
if self.chars:
pred_str = chr(pred + ord('A'))
else:
pred_str = str(pred)
x_str = self._render_observation()
max_len = int(self.total_reward) + 1
for i in range(-2, max_len):
if i not in self.target:
y_str += " "
continue
target_str += self._get_str_target(i)
if i < y - 1:
y_str += self._get_str_target(i)
elif i == (y - 1):
if action is not None and out_act == 1:
if pred == self.target[i]:
y_str += colorize(pred_str, 'green', highlight=True)
else:
y_str += colorize(pred_str, 'red', highlight=True)
else:
y_str += self._get_str_target(i)
outfile.write(x_str)
outfile.write(y_str + "\n")
outfile.write(target_str + "\n\n")
if action is not None:
outfile.write("Current reward : %.3f\n" % self.reward)
outfile.write("Cumulative reward : %.3f\n" % self.sum_reward)
move = ""
if inp_act == 0:
move = "left"
elif inp_act == 1:
move = "right"
elif inp_act == 2:
move += "up"
elif inp_act == 3:
move += "down"
outfile.write("Action : Tuple(move over input: %s,\n" % move)
if out_act == 1:
out_act = "True"
else:
out_act = "False"
outfile.write(" write to the output tape: %s,\n" % out_act)
outfile.write(" prediction: %s)\n" % pred_str)
else:
outfile.write("\n" * 5)
return outfile
def _step(self, action):
self.last_action = action
inp_act, out_act, pred = action
done = False
reward = 0.0
# We are outside the sample.
self.time += 1
if self.y not in self.target:
reward = -10.0
done = True
else:
if out_act == 1:
if pred == self.target[self.y]:
reward = 1.0
else:
reward = -0.5
done = True
self.y += 1
if self.y not in self.target:
done = True
if inp_act == 0:
self.x[0] -= 1
elif inp_act == 1:
self.x[0] += 1
elif inp_act == 2:
self.x[1] -= 1
elif inp_act == 3:
self.x[1] += 1
if self.time > self.total_len + self.total_reward + 4:
reward = -1.0
done = True
obs = self._get_obs()
self.reward = reward
self.sum_reward += reward
return (obs, reward, done, {})
def _reset(self):
self.last_action = None
self.x = np.zeros(self.inp_dim).astype(np.int)
self.y = 0
AlgorithmicEnv.sum_rewards.append(self.sum_reward - self.total_reward)
AlgorithmicEnv.sum_rewards = AlgorithmicEnv.sum_rewards[-self.last:]
if len(AlgorithmicEnv.sum_rewards) == self.last and \
min(AlgorithmicEnv.sum_rewards) >= -1.0 and \
AlgorithmicEnv.current_length < 30:
AlgorithmicEnv.current_length += 1
AlgorithmicEnv.sum_rewards = []
self.sum_reward = 0.0
self.time = 0
self.total_len = self.np_random.randint(3) + AlgorithmicEnv.current_length
self.set_data()
return self._get_obs()
|
luiseduardohdbackup/odoo | refs/heads/8.0 | addons/website_crm/__openerp__.py | 321 | {
'name': 'Contact Form',
'category': 'Website',
'website': 'https://www.odoo.com/page/website-builder',
'summary': 'Create Leads From Contact Form',
'version': '1.0',
'description': """
OpenERP Contact Form
====================
""",
'author': 'OpenERP SA',
'depends': ['website_partner', 'crm'],
'data': [
'data/website_crm_data.xml',
'views/website_crm.xml',
],
'installable': True,
'auto_install': False,
}
|
TeachAtTUM/edx-platform | refs/heads/master | common/djangoapps/entitlements/api/v1/urls.py | 4 | from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from .views import EntitlementViewSet, EntitlementEnrollmentViewSet
router = DefaultRouter()
router.register(r'entitlements', EntitlementViewSet, base_name='entitlements')
ENROLLMENTS_VIEW = EntitlementEnrollmentViewSet.as_view({
'post': 'create',
'delete': 'destroy',
})
urlpatterns = [
url(r'', include(router.urls)),
url(
r'entitlements/(?P<uuid>{regex})/enrollments$'.format(regex=EntitlementViewSet.ENTITLEMENT_UUID4_REGEX),
ENROLLMENTS_VIEW,
name='enrollments'
)
]
|
wdaher/zulip | refs/heads/master | zerver/management/commands/create_realm.py | 115 | from __future__ import absolute_import
from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
from zerver.lib.actions import do_create_realm, set_default_streams
from zerver.models import RealmAlias
if not settings.VOYAGER:
from zilencer.models import Deployment
import re
import sys
class Command(BaseCommand):
help = """Create a realm for the specified domain.
Usage: python manage.py create_realm --domain=foo.com --name='Foo, Inc.'"""
option_list = BaseCommand.option_list + (
make_option('-o', '--open-realm',
dest='open_realm',
action="store_true",
default=False,
help='Make this an open realm.'),
make_option('-d', '--domain',
dest='domain',
type='str',
help='The domain for the realm.'),
make_option('-n', '--name',
dest='name',
type='str',
help='The user-visible name for the realm.'),
make_option('--deployment',
dest='deployment_id',
type='int',
default=None,
help='Optionally, the ID of the deployment you want to associate the realm with.'),
)
def validate_domain(self, domain):
# Domains can't contain whitespace if they are to be used in memcached
# keys.
if re.search("\s", domain):
raise ValueError("Domains can't contain whitespace")
# Domains must look like domains, ie have the structure of
# <subdomain(s)>.<tld>. One reason for this is that bots need
# to have valid looking emails.
if len(domain.split(".")) < 2:
raise ValueError("Domains must contain a '.'")
if RealmAlias.objects.filter(domain=domain).count() > 0:
raise ValueError("Cannot create a new realm that is already an alias for an existing realm")
def handle(self, *args, **options):
if options["domain"] is None or options["name"] is None:
print >>sys.stderr, "\033[1;31mPlease provide both a domain and name.\033[0m\n"
self.print_help("python manage.py", "create_realm")
exit(1)
if options["open_realm"] and options["deployment_id"] is not None:
print >>sys.stderr, "\033[1;31mExternal deployments cannot be open realms.\033[0m\n"
self.print_help("python manage.py", "create_realm")
exit(1)
if options["deployment_id"] is not None and settings.VOYAGER:
print >>sys.stderr, "\033[1;31mExternal deployments are not supported on voyager deployments.\033[0m\n"
exit(1)
domain = options["domain"]
name = options["name"]
self.validate_domain(domain)
realm, created = do_create_realm(
domain, name, restricted_to_domain=not options["open_realm"])
if created:
print domain, "created."
if options["deployment_id"] is not None:
deployment = Deployment.objects.get(id=options["deployment_id"])
deployment.realms.add(realm)
deployment.save()
print "Added to deployment", str(deployment.id)
elif settings.ZULIP_COM:
deployment = Deployment.objects.get(base_site_url="https://zulip.com/")
deployment.realms.add(realm)
deployment.save()
# In the else case, we are not using the Deployments feature.
set_default_streams(realm, ["social", "engineering"])
print "\033[1;36mDefault streams set to social,engineering,zulip!\033[0m"
else:
print domain, "already exists."
|
zzeleznick/zDjango | refs/heads/master | venv/lib/python2.7/site-packages/django/conf/locale/da/formats.py | 315 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. F Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j. F Y H:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y H:i'
FIRST_DAY_OF_WEEK = 1
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d.%m.%Y', # '25.10.2006'
)
DATETIME_INPUT_FORMATS = (
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
|
repotvsupertuga/tvsupertuga.repository | refs/heads/master | script.module.livestreamer/lib/livestreamer/plugins/letontv.py | 34 | import re
from livestreamer.plugin import Plugin
from livestreamer.plugin.api import http, validate
from livestreamer.stream import RTMPStream
PLAYER_URL = "http://leton.tv/player.php"
SWF_URL = "http://files.leton.tv/jwplayer.flash.swf"
_url_re = re.compile("""
http?://(\w+.)?leton.tv
(?:
/player\.php\?.*streampage=
)?
(?:
/broadcast/
)?
(?P<streampage>[^/?&]+)
""", re.VERBOSE)
_js_var_re = re.compile("var (?P<var>\w+)\s?=\s?'?(?P<value>[^;']+)'?;")
_rtmp_re = re.compile("/(?P<app>[^/]+)/(?P<playpath>.+)")
def _parse_server_ip(values):
octets = [
values["a"] / values["f"],
values["b"] / values["f"],
values["c"] / values["f"],
values["d"] / values["f"],
]
return ".".join(str(int(octet)) for octet in octets)
_schema = validate.Schema(
validate.transform(_js_var_re.findall),
validate.transform(dict),
{
"a": validate.transform(int),
"b": validate.transform(int),
"c": validate.transform(int),
"d": validate.transform(int),
"f": validate.transform(int),
"v_part": validate.text,
},
validate.union({
"server_ip": validate.transform(_parse_server_ip),
"path": validate.all(
validate.get("v_part"),
validate.transform(_rtmp_re.findall),
validate.get(0)
)
})
)
class LetOnTV(Plugin):
@classmethod
def can_handle_url(self, url):
return _url_re.match(url)
def _get_streams(self):
match = _url_re.match(self.url)
info = http.get(PLAYER_URL, params=match.groupdict(), schema=_schema)
if not info["path"]:
return
app, playpath = info["path"]
stream = RTMPStream(self.session, {
"rtmp": "rtmp://{0}/{1}".format(info["server_ip"], app),
"playpath": playpath,
"pageUrl": self.url,
"swfUrl": SWF_URL,
"live": True
})
return dict(live=stream)
__plugin__ = LetOnTV
|
denfromufa/PTVS | refs/heads/master | Python/Tests/TestData/VirtualEnv/env/Lib/encodings/iso8859_6.py | 93 | """ Python Character Mapping Codec iso8859_6 generated from 'MAPPINGS/ISO8859/8859-6.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-6',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\x80' # 0x80 -> <control>
u'\x81' # 0x81 -> <control>
u'\x82' # 0x82 -> <control>
u'\x83' # 0x83 -> <control>
u'\x84' # 0x84 -> <control>
u'\x85' # 0x85 -> <control>
u'\x86' # 0x86 -> <control>
u'\x87' # 0x87 -> <control>
u'\x88' # 0x88 -> <control>
u'\x89' # 0x89 -> <control>
u'\x8a' # 0x8A -> <control>
u'\x8b' # 0x8B -> <control>
u'\x8c' # 0x8C -> <control>
u'\x8d' # 0x8D -> <control>
u'\x8e' # 0x8E -> <control>
u'\x8f' # 0x8F -> <control>
u'\x90' # 0x90 -> <control>
u'\x91' # 0x91 -> <control>
u'\x92' # 0x92 -> <control>
u'\x93' # 0x93 -> <control>
u'\x94' # 0x94 -> <control>
u'\x95' # 0x95 -> <control>
u'\x96' # 0x96 -> <control>
u'\x97' # 0x97 -> <control>
u'\x98' # 0x98 -> <control>
u'\x99' # 0x99 -> <control>
u'\x9a' # 0x9A -> <control>
u'\x9b' # 0x9B -> <control>
u'\x9c' # 0x9C -> <control>
u'\x9d' # 0x9D -> <control>
u'\x9e' # 0x9E -> <control>
u'\x9f' # 0x9F -> <control>
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\xa4' # 0xA4 -> CURRENCY SIGN
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\u060c' # 0xAC -> ARABIC COMMA
u'\xad' # 0xAD -> SOFT HYPHEN
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\u061b' # 0xBB -> ARABIC SEMICOLON
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\u061f' # 0xBF -> ARABIC QUESTION MARK
u'\ufffe'
u'\u0621' # 0xC1 -> ARABIC LETTER HAMZA
u'\u0622' # 0xC2 -> ARABIC LETTER ALEF WITH MADDA ABOVE
u'\u0623' # 0xC3 -> ARABIC LETTER ALEF WITH HAMZA ABOVE
u'\u0624' # 0xC4 -> ARABIC LETTER WAW WITH HAMZA ABOVE
u'\u0625' # 0xC5 -> ARABIC LETTER ALEF WITH HAMZA BELOW
u'\u0626' # 0xC6 -> ARABIC LETTER YEH WITH HAMZA ABOVE
u'\u0627' # 0xC7 -> ARABIC LETTER ALEF
u'\u0628' # 0xC8 -> ARABIC LETTER BEH
u'\u0629' # 0xC9 -> ARABIC LETTER TEH MARBUTA
u'\u062a' # 0xCA -> ARABIC LETTER TEH
u'\u062b' # 0xCB -> ARABIC LETTER THEH
u'\u062c' # 0xCC -> ARABIC LETTER JEEM
u'\u062d' # 0xCD -> ARABIC LETTER HAH
u'\u062e' # 0xCE -> ARABIC LETTER KHAH
u'\u062f' # 0xCF -> ARABIC LETTER DAL
u'\u0630' # 0xD0 -> ARABIC LETTER THAL
u'\u0631' # 0xD1 -> ARABIC LETTER REH
u'\u0632' # 0xD2 -> ARABIC LETTER ZAIN
u'\u0633' # 0xD3 -> ARABIC LETTER SEEN
u'\u0634' # 0xD4 -> ARABIC LETTER SHEEN
u'\u0635' # 0xD5 -> ARABIC LETTER SAD
u'\u0636' # 0xD6 -> ARABIC LETTER DAD
u'\u0637' # 0xD7 -> ARABIC LETTER TAH
u'\u0638' # 0xD8 -> ARABIC LETTER ZAH
u'\u0639' # 0xD9 -> ARABIC LETTER AIN
u'\u063a' # 0xDA -> ARABIC LETTER GHAIN
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\u0640' # 0xE0 -> ARABIC TATWEEL
u'\u0641' # 0xE1 -> ARABIC LETTER FEH
u'\u0642' # 0xE2 -> ARABIC LETTER QAF
u'\u0643' # 0xE3 -> ARABIC LETTER KAF
u'\u0644' # 0xE4 -> ARABIC LETTER LAM
u'\u0645' # 0xE5 -> ARABIC LETTER MEEM
u'\u0646' # 0xE6 -> ARABIC LETTER NOON
u'\u0647' # 0xE7 -> ARABIC LETTER HEH
u'\u0648' # 0xE8 -> ARABIC LETTER WAW
u'\u0649' # 0xE9 -> ARABIC LETTER ALEF MAKSURA
u'\u064a' # 0xEA -> ARABIC LETTER YEH
u'\u064b' # 0xEB -> ARABIC FATHATAN
u'\u064c' # 0xEC -> ARABIC DAMMATAN
u'\u064d' # 0xED -> ARABIC KASRATAN
u'\u064e' # 0xEE -> ARABIC FATHA
u'\u064f' # 0xEF -> ARABIC DAMMA
u'\u0650' # 0xF0 -> ARABIC KASRA
u'\u0651' # 0xF1 -> ARABIC SHADDA
u'\u0652' # 0xF2 -> ARABIC SUKUN
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
betzw/mbed-os | refs/heads/master | tools/host_tests/udp_link_layer_auto.py | 124 | """
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""
How to use:
make.py -m LPC1768 -t ARM -d E:\ -n NET_14
udp_link_layer_auto.py -p COM20 -d E:\ -t 10
"""
import re
import uuid
import socket
import thread
from sys import stdout
from time import time, sleep
from host_test import DefaultTest
from SocketServer import BaseRequestHandler, UDPServer
# Received datagrams (with time)
dict_udp_recv_datagrams = dict()
# Sent datagrams (with time)
dict_udp_sent_datagrams = dict()
class UDPEchoClient_Handler(BaseRequestHandler):
def handle(self):
""" One handle per connection
"""
_data, _socket = self.request
# Process received datagram
data_str = repr(_data)[1:-1]
dict_udp_recv_datagrams[data_str] = time()
def udp_packet_recv(threadName, server_ip, server_port):
""" This function will receive packet stream from mbed device
"""
server = UDPServer((server_ip, server_port), UDPEchoClient_Handler)
print "[UDP_COUNTER] Listening for connections... %s:%d"% (server_ip, server_port)
server.serve_forever()
class UDPEchoServerTest(DefaultTest):
ECHO_SERVER_ADDRESS = "" # UDP IP of datagram bursts
ECHO_PORT = 0 # UDP port for datagram bursts
CONTROL_PORT = 23 # TCP port used to get stats from mbed device, e.g. counters
s = None # Socket
TEST_PACKET_COUNT = 1000 # how many packets should be send
TEST_STRESS_FACTOR = 0.001 # stress factor: 10 ms
PACKET_SATURATION_RATIO = 29.9 # Acceptable packet transmission in %
PATTERN_SERVER_IP = "Server IP Address is (\d+).(\d+).(\d+).(\d+):(\d+)"
re_detect_server_ip = re.compile(PATTERN_SERVER_IP)
def get_control_data(self, command="stat\n"):
BUFFER_SIZE = 256
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((self.ECHO_SERVER_ADDRESS, self.CONTROL_PORT))
except Exception, e:
data = None
s.send(command)
data = s.recv(BUFFER_SIZE)
s.close()
return data
def test(self):
serial_ip_msg = self.mbed.serial_readline()
if serial_ip_msg is None:
return self.RESULT_IO_SERIAL
stdout.write(serial_ip_msg)
stdout.flush()
# Searching for IP address and port prompted by server
m = self.re_detect_server_ip.search(serial_ip_msg)
if m and len(m.groups()):
self.ECHO_SERVER_ADDRESS = ".".join(m.groups()[:4])
self.ECHO_PORT = int(m.groups()[4]) # must be integer for socket.connect method
self.notify("HOST: UDP Server found at: " + self.ECHO_SERVER_ADDRESS + ":" + str(self.ECHO_PORT))
# Open client socket to burst datagrams to UDP server in mbed
try:
self.s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
except Exception, e:
self.s = None
self.notify("HOST: Error: %s"% e)
return self.RESULT_ERROR
# UDP replied receiver works in background to get echoed datagrams
SERVER_IP = str(socket.gethostbyname(socket.getfqdn()))
SERVER_PORT = self.ECHO_PORT + 1
thread.start_new_thread(udp_packet_recv, ("Thread-udp-recv", SERVER_IP, SERVER_PORT))
sleep(0.5)
# Burst part
for no in range(self.TEST_PACKET_COUNT):
TEST_STRING = str(uuid.uuid4())
payload = str(no) + "__" + TEST_STRING
self.s.sendto(payload, (self.ECHO_SERVER_ADDRESS, self.ECHO_PORT))
dict_udp_sent_datagrams[payload] = time()
sleep(self.TEST_STRESS_FACTOR)
if self.s is not None:
self.s.close()
# Wait 5 seconds for packets to come
result = True
self.notify("HOST: Test Summary:")
for d in range(5):
sleep(1.0)
summary_datagram_success = (float(len(dict_udp_recv_datagrams)) / float(self.TEST_PACKET_COUNT)) * 100.0
self.notify("HOST: Datagrams received after +%d sec: %.3f%% (%d / %d), stress=%.3f ms"% (d,
summary_datagram_success,
len(dict_udp_recv_datagrams),
self.TEST_PACKET_COUNT,
self.TEST_STRESS_FACTOR))
result = result and (summary_datagram_success >= self.PACKET_SATURATION_RATIO)
stdout.flush()
# Getting control data from test
self.notify("...")
self.notify("HOST: Mbed Summary:")
mbed_stats = self.get_control_data()
self.notify(mbed_stats)
return self.RESULT_SUCCESS if result else self.RESULT_FAILURE
if __name__ == '__main__':
UDPEchoServerTest().run()
|
Gustry/QGIS | refs/heads/master | python/core/auto_additions/qgsproviderconnectionmodel.py | 32 | # The following has been generated automatically from src/core/qgsproviderconnectionmodel.h
QgsProviderConnectionModel.Role.baseClass = QgsProviderConnectionModel
|
sdeepanshu02/microblog | refs/heads/master | flask/Lib/site-packages/pbr/hooks/commands.py | 46 | # Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from setuptools.command import easy_install
from pbr.hooks import base
from pbr import options
from pbr import packaging
class CommandsConfig(base.BaseConfig):
section = 'global'
def __init__(self, config):
super(CommandsConfig, self).__init__(config)
self.commands = self.config.get('commands', "")
def save(self):
self.config['commands'] = self.commands
super(CommandsConfig, self).save()
def add_command(self, command):
self.commands = "%s\n%s" % (self.commands, command)
def hook(self):
self.add_command('pbr.packaging.LocalEggInfo')
self.add_command('pbr.packaging.LocalSDist')
self.add_command('pbr.packaging.LocalInstallScripts')
self.add_command('pbr.packaging.LocalDevelop')
self.add_command('pbr.packaging.LocalRPMVersion')
if os.name != 'nt':
easy_install.get_script_args = packaging.override_get_script_args
if packaging.have_sphinx():
self.add_command('pbr.builddoc.LocalBuildDoc')
self.add_command('pbr.builddoc.LocalBuildLatex')
if os.path.exists('.testr.conf') and packaging.have_testr():
# There is a .testr.conf file. We want to use it.
self.add_command('pbr.packaging.TestrTest')
elif self.config.get('nosetests', False) and packaging.have_nose():
# We seem to still have nose configured
self.add_command('pbr.packaging.NoseTest')
use_egg = options.get_boolean_option(
self.pbr_config, 'use-egg', 'PBR_USE_EGG')
# We always want non-egg install unless explicitly requested
if 'manpages' in self.pbr_config or not use_egg:
self.add_command('pbr.packaging.LocalInstall')
else:
self.add_command('pbr.packaging.InstallWithGit')
|
marcelocure/django | refs/heads/master | tests/template_tests/syntax_tests/test_url.py | 108 | # coding: utf-8
from django.core.urlresolvers import NoReverseMatch, resolve
from django.template import RequestContext, TemplateSyntaxError
from django.test import (
RequestFactory, SimpleTestCase, ignore_warnings, override_settings,
)
from django.utils.deprecation import RemovedInDjango110Warning
from ..utils import setup
@override_settings(ROOT_URLCONF='template_tests.urls')
class UrlTagTests(SimpleTestCase):
# Successes
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'url01': '{% url "template_tests.views.client" client.id %}'})
def test_url01(self):
output = self.engine.render_to_string('url01', {'client': {'id': 1}})
self.assertEqual(output, '/client/1/')
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'url02': '{% url "template_tests.views.client_action" id=client.id action="update" %}'})
def test_url02(self):
output = self.engine.render_to_string('url02', {'client': {'id': 1}})
self.assertEqual(output, '/client/1/update/')
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'url02a': '{% url "template_tests.views.client_action" client.id "update" %}'})
def test_url02a(self):
output = self.engine.render_to_string('url02a', {'client': {'id': 1}})
self.assertEqual(output, '/client/1/update/')
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'url02b': "{% url 'template_tests.views.client_action' id=client.id action='update' %}"})
def test_url02b(self):
output = self.engine.render_to_string('url02b', {'client': {'id': 1}})
self.assertEqual(output, '/client/1/update/')
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'url02c': "{% url 'template_tests.views.client_action' client.id 'update' %}"})
def test_url02c(self):
output = self.engine.render_to_string('url02c', {'client': {'id': 1}})
self.assertEqual(output, '/client/1/update/')
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'url03': '{% url "template_tests.views.index" %}'})
def test_url03(self):
output = self.engine.render_to_string('url03')
self.assertEqual(output, '/')
@setup({'url04': '{% url "named.client" client.id %}'})
def test_url04(self):
output = self.engine.render_to_string('url04', {'client': {'id': 1}})
self.assertEqual(output, '/named-client/1/')
@setup({'url05': '{% url "метка_оператора" v %}'})
def test_url05(self):
output = self.engine.render_to_string('url05', {'v': 'Ω'})
self.assertEqual(output, '/%D0%AE%D0%BD%D0%B8%D0%BA%D0%BE%D0%B4/%CE%A9/')
@setup({'url06': '{% url "метка_оператора_2" tag=v %}'})
def test_url06(self):
output = self.engine.render_to_string('url06', {'v': 'Ω'})
self.assertEqual(output, '/%D0%AE%D0%BD%D0%B8%D0%BA%D0%BE%D0%B4/%CE%A9/')
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'url07': '{% url "template_tests.views.client2" tag=v %}'})
def test_url07(self):
output = self.engine.render_to_string('url07', {'v': 'Ω'})
self.assertEqual(output, '/%D0%AE%D0%BD%D0%B8%D0%BA%D0%BE%D0%B4/%CE%A9/')
@setup({'url08': '{% url "метка_оператора" v %}'})
def test_url08(self):
output = self.engine.render_to_string('url08', {'v': 'Ω'})
self.assertEqual(output, '/%D0%AE%D0%BD%D0%B8%D0%BA%D0%BE%D0%B4/%CE%A9/')
@setup({'url09': '{% url "метка_оператора_2" tag=v %}'})
def test_url09(self):
output = self.engine.render_to_string('url09', {'v': 'Ω'})
self.assertEqual(output, '/%D0%AE%D0%BD%D0%B8%D0%BA%D0%BE%D0%B4/%CE%A9/')
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'url10': '{% url "template_tests.views.client_action" id=client.id action="two words" %}'})
def test_url10(self):
output = self.engine.render_to_string('url10', {'client': {'id': 1}})
self.assertEqual(output, '/client/1/two%20words/')
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'url11': '{% url "template_tests.views.client_action" id=client.id action="==" %}'})
def test_url11(self):
output = self.engine.render_to_string('url11', {'client': {'id': 1}})
self.assertEqual(output, '/client/1/==/')
@setup({'url12': '{% url "template_tests.views.client_action" '
'id=client.id action="!$&\'()*+,;=~:@," %}'})
@ignore_warnings(category=RemovedInDjango110Warning)
def test_url12(self):
output = self.engine.render_to_string('url12', {'client': {'id': 1}})
self.assertEqual(output, '/client/1/!$&'()*+,;=~:@,/')
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'url13': '{% url "template_tests.views.client_action" '
'id=client.id action=arg|join:"-" %}'})
def test_url13(self):
output = self.engine.render_to_string('url13', {'client': {'id': 1}, 'arg': ['a', 'b']})
self.assertEqual(output, '/client/1/a-b/')
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'url14': '{% url "template_tests.views.client_action" client.id arg|join:"-" %}'})
def test_url14(self):
output = self.engine.render_to_string('url14', {'client': {'id': 1}, 'arg': ['a', 'b']})
self.assertEqual(output, '/client/1/a-b/')
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'url15': '{% url "template_tests.views.client_action" 12 "test" %}'})
def test_url15(self):
output = self.engine.render_to_string('url15')
self.assertEqual(output, '/client/12/test/')
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'url18': '{% url "template_tests.views.client" "1,2" %}'})
def test_url18(self):
output = self.engine.render_to_string('url18')
self.assertEqual(output, '/client/1,2/')
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'url19': '{% url named_url client.id %}'})
def test_url19(self):
output = self.engine.render_to_string('url19', {'client': {'id': 1}, 'named_url': 'template_tests.views.client'})
self.assertEqual(output, '/client/1/')
@setup({'url20': '{% url url_name_in_var client.id %}'})
def test_url20(self):
output = self.engine.render_to_string('url20', {'client': {'id': 1}, 'url_name_in_var': 'named.client'})
self.assertEqual(output, '/named-client/1/')
@setup({'url21': '{% autoescape off %}'
'{% url "template_tests.views.client_action" '
'id=client.id action="!$&\'()*+,;=~:@," %}'
'{% endautoescape %}'})
@ignore_warnings(category=RemovedInDjango110Warning)
def test_url21(self):
output = self.engine.render_to_string('url21', {'client': {'id': 1}})
self.assertEqual(output, '/client/1/!$&\'()*+,;=~:@,/')
# Failures
@setup({'url-fail01': '{% url %}'})
def test_url_fail01(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('url-fail01')
@setup({'url-fail02': '{% url "no_such_view" %}'})
def test_url_fail02(self):
with self.assertRaises(NoReverseMatch):
self.engine.render_to_string('url-fail02')
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'url-fail03': '{% url "template_tests.views.client" %}'})
def test_url_fail03(self):
with self.assertRaises(NoReverseMatch):
self.engine.render_to_string('url-fail03')
@setup({'url-fail04': '{% url "view" id, %}'})
def test_url_fail04(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('url-fail04')
@setup({'url-fail05': '{% url "view" id= %}'})
def test_url_fail05(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('url-fail05')
@setup({'url-fail06': '{% url "view" a.id=id %}'})
def test_url_fail06(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('url-fail06')
@setup({'url-fail07': '{% url "view" a.id!id %}'})
def test_url_fail07(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('url-fail07')
@setup({'url-fail08': '{% url "view" id="unterminatedstring %}'})
def test_url_fail08(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('url-fail08')
@setup({'url-fail09': '{% url "view" id=", %}'})
def test_url_fail09(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('url-fail09')
@setup({'url-fail11': '{% url named_url %}'})
def test_url_fail11(self):
with self.assertRaises(NoReverseMatch):
self.engine.render_to_string('url-fail11')
@setup({'url-fail12': '{% url named_url %}'})
def test_url_fail12(self):
with self.assertRaises(NoReverseMatch):
self.engine.render_to_string('url-fail12', {'named_url': 'no_such_view'})
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'url-fail13': '{% url named_url %}'})
def test_url_fail13(self):
with self.assertRaises(NoReverseMatch):
self.engine.render_to_string('url-fail13', {'named_url': 'template_tests.views.client'})
@setup({'url-fail14': '{% url named_url id, %}'})
def test_url_fail14(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('url-fail14', {'named_url': 'view'})
@setup({'url-fail15': '{% url named_url id= %}'})
def test_url_fail15(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('url-fail15', {'named_url': 'view'})
@setup({'url-fail16': '{% url named_url a.id=id %}'})
def test_url_fail16(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('url-fail16', {'named_url': 'view'})
@setup({'url-fail17': '{% url named_url a.id!id %}'})
def test_url_fail17(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('url-fail17', {'named_url': 'view'})
@setup({'url-fail18': '{% url named_url id="unterminatedstring %}'})
def test_url_fail18(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('url-fail18', {'named_url': 'view'})
@setup({'url-fail19': '{% url named_url id=", %}'})
def test_url_fail19(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('url-fail19', {'named_url': 'view'})
# {% url ... as var %}
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'url-asvar01': '{% url "template_tests.views.index" as url %}'})
def test_url_asvar01(self):
output = self.engine.render_to_string('url-asvar01')
self.assertEqual(output, '')
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'url-asvar02': '{% url "template_tests.views.index" as url %}{{ url }}'})
def test_url_asvar02(self):
output = self.engine.render_to_string('url-asvar02')
self.assertEqual(output, '/')
@setup({'url-asvar03': '{% url "no_such_view" as url %}{{ url }}'})
def test_url_asvar03(self):
output = self.engine.render_to_string('url-asvar03')
self.assertEqual(output, '')
@setup({'url-namespace01': '{% url "app:named.client" 42 %}'})
def test_url_namespace01(self):
request = RequestFactory().get('/')
request.resolver_match = resolve('/ns1/')
template = self.engine.get_template('url-namespace01')
context = RequestContext(request)
output = template.render(context)
self.assertEqual(output, '/ns1/named-client/42/')
@setup({'url-namespace02': '{% url "app:named.client" 42 %}'})
def test_url_namespace02(self):
request = RequestFactory().get('/')
request.resolver_match = resolve('/ns2/')
template = self.engine.get_template('url-namespace02')
context = RequestContext(request)
output = template.render(context)
self.assertEqual(output, '/ns2/named-client/42/')
@setup({'url-namespace03': '{% url "app:named.client" 42 %}'})
def test_url_namespace03(self):
request = RequestFactory().get('/')
template = self.engine.get_template('url-namespace03')
context = RequestContext(request)
output = template.render(context)
self.assertEqual(output, '/ns2/named-client/42/')
|
silverlogic/djangorestframework-expander | refs/heads/master | setup.py | 1 | from setuptools import setup
setup(
name='djangorestframework-expander',
version='0.2.3',
description=('A serializer mixin for Django REST Framework to expand object representations inline'),
author='Ryan Pineo',
author_email='ryanpineo@gmail.com',
license='MIT',
url='https://github.com/silverlogic/djangorestframework-expander',
packages=['expander'],
install_requires=['djangorestframework'],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
]
)
|
EddyK69/domoticz | refs/heads/development | plugins/AwoxSMP/plugin.py | 23 | # Awox SmartPlug Plugin
#
# Author: zaraki673, 2017
#
"""
<plugin key="AwoxSMP" name="Awox SmartPlug" author="zaraki673" version="1.0.0">
<params>
<param field="Address" label="MAC Address" width="150px" required="true"/>
<param field="Mode6" label="Debug" width="75px">
<options>
<option label="True" value="Debug"/>
<option label="False" value="Normal" default="true" />
</options>
</param>
</params>
</plugin>
"""
import Domoticz
import binascii
import struct
import lib.pySmartPlugSmpB16
from bluepy import btle
START_OF_MESSAGE = b'\x0f'
END_OF_MESSAGE = b'\xff\xff'
SMPstate = 0
SMPconso = 0
class BasePlugin:
enabled = False
pluginState = "Not Ready"
sessionCookie = ""
privateKey = b""
socketOn = "FALSE"
def __init__(self):
return
def onStart(self):
global SMPstate, SMPconso
if Parameters["Mode6"] == "Debug":
Domoticz.Debugging(1)
if (len(Devices) == 0):
Domoticz.Device(Name="Status", Unit=1, Type=17, Switchtype=0).Create()
Domoticz.Device(Name="Conso", Unit=2, TypeName="Usage").Create()
Domoticz.Log("Devices created.")
else:
if (1 in Devices): SMPstate = Devices[1].nValue
if (2 in Devices): SMPconso = Devices[2].nValue
DumpConfigToLog()
Domoticz.Log("Plugin is started.")
Domoticz.Heartbeat(20)
def onStop(self):
Domoticz.Log("Plugin is stopping.")
def onConnect(self, Status, Description):
return
def onMessage(self, Data, Status, Extra):
return
def onCommand(self, Unit, Command, Level, Hue):
Domoticz.Debug("onCommand called for Unit " + str(Unit) + ": Parameter '" + str(Command) + "', Level: " + str(Level))
Command = Command.strip()
action, sep, params = Command.partition(' ')
action = action.capitalize()
if (action == 'On'):
try:
plug = SmartPlug(Parameters["Address"])
plug.on()
UpdateDevice(1,1,'On')
plug.disconnect()
except btle.BTLEException as err:
Domoticz.Log('error when setting plug %s on (code %d)' % (Parameters["Address"], err.code))
elif (action == 'Off'):
try:
plug = SmartPlug(Parameters["Address"])
plug.off()
UpdateDevice(1,0,'Off')
plug.disconnect()
except btle.BTLEException as err:
Domoticz.Log('error when setting plug %s on (code %d)' % (Parameters["Address"], err.code))
return True
def onDisconnect(self):
return
def onHeartbeat(self):
global SMPstate, SMPconso
try:
plug = SmartPlug(Parameters["Address"])
(SMPstate, SMPconso) = plug.status_request()
plug.disconnect()
SMPstate = 'on' if SMPstate else 'off'
Domoticz.Log('plug state = %s' % SMPstate)
if (SMPstate == 'off'): UpdateDevice(1,0,'Off')
else: UpdateDevice(1,1,'On')
Domoticz.Log('plug power = %d W' % SMPconso)
UpdateDevice(2,0,str(SMPconso))
except btle.BTLEException as err:
Domoticz.Log('error when requesting stat to plug %s (code %d)' % (Parameters["Address"], err.code))
return True
def SetSocketSettings(self, power):
return
def GetSocketSettings(self):
return
def genericPOST(self, commandName):
return
global _plugin
_plugin = BasePlugin()
def onStart():
global _plugin
_plugin.onStart()
def onStop():
global _plugin
_plugin.onStop()
def onConnect(Status, Description):
global _plugin
_plugin.onConnect(Status, Description)
def onMessage(Data, Status, Extra):
global _plugin
_plugin.onMessage(Data, Status, Extra)
def onCommand(Unit, Command, Level, Hue):
global _plugin
_plugin.onCommand(Unit, Command, Level, Hue)
def onDisconnect():
global _plugin
_plugin.onDisconnect()
def onHeartbeat():
global _plugin
_plugin.onHeartbeat()
# xml built in parser threw import error on expat so just do it manually
def extractTagValue(tagName, XML):
startPos = XML.find(tagName)
endPos = XML.find(tagName, startPos+1)
if ((startPos == -1) or (endPos == -1)): Domoticz.Error("'"+tagName+"' not found in supplied XML")
return XML[startPos+len(tagName)+1:endPos-2]
# Generic helper functions
def DumpConfigToLog():
for x in Parameters:
if Parameters[x] != "":
Domoticz.Debug( "'" + x + "':'" + str(Parameters[x]) + "'")
Domoticz.Debug("Device count: " + str(len(Devices)))
for x in Devices:
Domoticz.Debug("Device: " + str(x) + " - " + str(Devices[x]))
Domoticz.Debug("Device ID: '" + str(Devices[x].ID) + "'")
Domoticz.Debug("Device Name: '" + Devices[x].Name + "'")
Domoticz.Debug("Device nValue: " + str(Devices[x].nValue))
Domoticz.Debug("Device sValue: '" + Devices[x].sValue + "'")
Domoticz.Debug("Device LastLevel: " + str(Devices[x].LastLevel))
return
def UpdateDevice(Unit, nValue, sValue):
# Make sure that the Domoticz device still exists (they can be deleted) before updating it
if (Unit in Devices):
if (Devices[Unit].nValue != nValue) or (Devices[Unit].sValue != sValue):
Devices[Unit].Update(nValue, str(sValue))
Domoticz.Log("Update "+str(nValue)+":'"+str(sValue)+"' ("+Devices[Unit].Name+")")
return
class SmartPlug(btle.Peripheral):
def __init__(self, addr):
btle.Peripheral.__init__(self, addr)
self.delegate = NotificationDelegate()
self.setDelegate(self.delegate)
self.plug_svc = self.getServiceByUUID('0000fff0-0000-1000-8000-00805f9b34fb')
self.plug_cmd_ch = self.plug_svc.getCharacteristics('0000fff3-0000-1000-8000-00805f9b34fb')[0]
def on(self):
self.delegate.chg_is_ok = False
self.plug_cmd_ch.write(self.get_buffer(binascii.unhexlify('0300010000')))
self.wait_data(0.5)
return self.delegate.chg_is_ok
def off(self):
self.delegate.chg_is_ok = False
self.plug_cmd_ch.write(self.get_buffer(binascii.unhexlify('0300000000')))
self.wait_data(0.5)
return self.delegate.chg_is_ok
def status_request(self):
self.plug_cmd_ch.write(self.get_buffer(binascii.unhexlify('04000000')))
self.wait_data(2.0)
return self.delegate.state, self.delegate.power
def program_request(self):
self.plug_cmd_ch.write(self.get_buffer(binascii.unhexlify('07000000')))
self.wait_data(2.0)
return self.delegate.programs
def calculate_checksum(self, message):
return (sum(bytearray(message)) + 1) & 0xff
def get_buffer(self, message):
return START_OF_MESSAGE + struct.pack("b",len(message) + 1) + message + struct.pack("b",self.calculate_checksum(message)) + END_OF_MESSAGE
def wait_data(self, timeout):
self.delegate.need_data = True
while self.delegate.need_data and self.waitForNotifications(timeout):
pass
class NotificationDelegate(btle.DefaultDelegate):
def __init__(self):
btle.DefaultDelegate.__init__(self)
self.state = False
self.power = 0
self.chg_is_ok = False
self.programs = []
self._buffer = b''
self.need_data = True
def handleNotification(self, cHandle, data):
#not sure 0x0f indicate begin of buffer but
if data[:1] == START_OF_MESSAGE:
self._buffer = data
else:
self._buffer = self._buffer + data
if self._buffer[-2:] == END_OF_MESSAGE:
self.handle_data(self._buffer)
self._buffer = b''
self.need_data = False
def handle_data(self, bytes_data):
# it's a state change confirm notification ?
if bytes_data[0:3] == b'\x0f\x04\x03':
self.chg_is_ok = True
# it's a state/power notification ?
if bytes_data[0:3] == b'\x0f\x0f\x04':
(state, dummy, power) = struct.unpack_from(">?BI", bytes_data, offset=4)
self.state = state
self.power = power / 1000
# it's a 0x0a notif ?
if bytes_data[0:3] == b'\x0f\x33\x0a':
print ("0A notif %s" % bytes_data)
# it's a programs notif ?
if bytes_data[0:3] == b'\x0f\x71\x07' :
program_offset = 4
self.programs = []
while program_offset + 21 < len(bytes_data):
(present, name, flags, start_hour, start_minute, end_hour, end_minute) = struct.unpack_from(">?16sbbbbb", bytes_data, program_offset)
#TODO interpret flags (day of program ?)
if present:
self.programs.append({ "name" : name.decode('iso-8859-1').strip('\0'), "flags":flags, "start":"{0:02d}:{1:02d}".format(start_hour, start_minute), "end":"{0:02d}:{1:02d}".format(end_hour, end_minute)})
program_offset += 22
|
koofr/thrift | refs/heads/master | lib/py/src/server/TProcessPoolServer.py | 50 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import logging
logger = logging.getLogger(__name__)
from multiprocessing import Process, Value, Condition, reduction
from TServer import TServer
from thrift.transport.TTransport import TTransportException
class TProcessPoolServer(TServer):
"""Server with a fixed size pool of worker subprocesses to service requests
Note that if you need shared state between the handlers - it's up to you!
Written by Dvir Volk, doat.com
"""
def __init__(self, *args):
TServer.__init__(self, *args)
self.numWorkers = 10
self.workers = []
self.isRunning = Value('b', False)
self.stopCondition = Condition()
self.postForkCallback = None
def setPostForkCallback(self, callback):
if not callable(callback):
raise TypeError("This is not a callback!")
self.postForkCallback = callback
def setNumWorkers(self, num):
"""Set the number of worker threads that should be created"""
self.numWorkers = num
def workerProcess(self):
"""Loop getting clients from the shared queue and process them"""
if self.postForkCallback:
self.postForkCallback()
while self.isRunning.value:
try:
client = self.serverTransport.accept()
if not client:
continue
self.serveClient(client)
except (KeyboardInterrupt, SystemExit):
return 0
except Exception as x:
logger.exception(x)
def serveClient(self, client):
"""Process input/output from a client for as long as possible"""
itrans = self.inputTransportFactory.getTransport(client)
otrans = self.outputTransportFactory.getTransport(client)
iprot = self.inputProtocolFactory.getProtocol(itrans)
oprot = self.outputProtocolFactory.getProtocol(otrans)
try:
while True:
self.processor.process(iprot, oprot)
except TTransportException as tx:
pass
except Exception as x:
logger.exception(x)
itrans.close()
otrans.close()
def serve(self):
"""Start workers and put into queue"""
# this is a shared state that can tell the workers to exit when False
self.isRunning.value = True
# first bind and listen to the port
self.serverTransport.listen()
# fork the children
for i in range(self.numWorkers):
try:
w = Process(target=self.workerProcess)
w.daemon = True
w.start()
self.workers.append(w)
except Exception as x:
logger.exception(x)
# wait until the condition is set by stop()
while True:
self.stopCondition.acquire()
try:
self.stopCondition.wait()
break
except (SystemExit, KeyboardInterrupt):
break
except Exception as x:
logger.exception(x)
self.isRunning.value = False
def stop(self):
self.isRunning.value = False
self.stopCondition.acquire()
self.stopCondition.notify()
self.stopCondition.release()
|
natanielruiz/android-yolo | refs/heads/master | jni-build/jni/include/tensorflow/contrib/distributions/python/ops/operator_pd_identity.py | 3 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Identity operator in `R^k`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.distributions.python.ops import operator_pd
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
class OperatorPDIdentity(operator_pd.OperatorPDBase):
"""Identity operator in `R^k`: `Ax = x`.
This provides an efficient implementation of the identity as an `OperatorPD`.
Storage, solves, and matmul are all `O(1)`, independent of batch size.
In order to be a drop-in replacement for other operators, shape and dtype
of arguments (e.g. to `matmul`) are checked statically as though this operator
was an instantiated matrix.
Dynamic shape checks of arguments are not done since that could impede
performance.
"""
def __init__(self, shape, dtype, verify_pd=True, name='OperatorPDIdentity'):
"""Initialize an `OperatorPDIdentity`.
Args:
shape: `int32` rank 1 `Tensor` of length at least 2, and with the last
two entries equal (since this is a square matrix).
dtype: Data type of the matrix that this operator represents.
verify_pd: `Boolean`, if `True`, asserts are added to the initialization
args to ensure they define this operator as a square (batch) matrix.
name: Name to prepend to `Ops`.
"""
# Grab static shape if available now.
with ops.name_scope(name):
with ops.op_scope([shape], 'init'):
self._dtype = dtypes.as_dtype(dtype)
self._verify_pd = verify_pd
self._name = name
# Store the static shape (if possible) right now before adding the
# asserts, since the asserts prevent .constant_value from working.
shape = ops.convert_to_tensor(shape, name='shape')
self._get_shape = tensor_shape.TensorShape(
tensor_util.constant_value(shape))
self._shape_arg = self._check_shape(shape)
def _check_shape(self, shape):
"""Check that the init arg `shape` defines a valid operator."""
shape = ops.convert_to_tensor(shape, name='shape')
if not self._verify_pd:
return shape
# Further checks are equivalent to verification that this is positive
# definite. Why? Because the further checks simply check that this is a
# square matrix, and combining the fact that this is square (and thus maps
# a vector space R^k onto itself), with the behavior of .matmul(), this must
# be the identity operator.
rank = array_ops.size(shape)
assert_matrix = check_ops.assert_less_equal(2, rank)
with ops.control_dependencies([assert_matrix]):
last_dim = array_ops.gather(shape, rank - 1)
second_to_last_dim = array_ops.gather(shape, rank - 2)
assert_square = check_ops.assert_equal(last_dim, second_to_last_dim)
return control_flow_ops.with_dependencies([assert_matrix, assert_square],
shape)
def _check_x(self, x):
"""Static check that the argument `x` is proper `shape`, `dtype`."""
# x is a typical argument e.g. to matmul or solve. In both cases, x should
# have the same type/shape since this is a square matrix. These checks are
# ususally not needed since we ususally have some tensor backing this
# distribution, and the calls to tf.matmul do a shape/type check.
#
# Static checks only for efficiency, the identity should be fast.
#
# Why check at all? Because we want this operator to be swappable for a
# real Operator.
if self.dtype != x.dtype:
raise TypeError(
'Expected argument "x" to have same dtype as this operator (%s). '
'Found: %s' % (self.dtype, x.dtype))
x_shape = x.get_shape()
self_shape = self.get_shape()
found_msg = (
'Found: operator.shape = %s, x.shape = %s' % (self_shape, x_shape))
if x_shape.ndims is not None and self_shape.ndims is not None:
if x_shape.ndims != self_shape.ndims:
raise ValueError(
'Expected argument "x" to have same tensor rank as this operator. '
+ found_msg)
if x_shape.is_fully_defined() and self_shape.is_fully_defined():
if x_shape[-2] != self_shape[-1]:
raise ValueError(
'Incompatible shapes for matrix-matrix operation. ' + found_msg)
@property
def name(self):
"""String name identifying this `Operator`."""
return self._name
@property
def verify_pd(self):
"""Whether to verify that this `Operator` is positive definite."""
return self._verify_pd
@property
def dtype(self):
"""Data type of matrix elements of `A`."""
return self._dtype
def _add_to_tensor(self, mat):
# Add to a tensor in O(k) time!
mat_diag = array_ops.batch_matrix_diag_part(mat)
new_diag = constant_op.constant(1, dtype=self.dtype) + mat_diag
return array_ops.batch_matrix_set_diag(mat, new_diag)
def _inv_quadratic_form_on_vectors(self, x):
self._check_x(x)
return self._iqfov_via_sqrt_solve(x)
@property
def inputs(self):
"""List of tensors that were provided as initialization inputs."""
return [self._shape]
def get_shape(self):
"""Static `TensorShape` of entire operator.
If this operator represents the batch matrix `A` with
`A.shape = [N1,...,Nn, k, k]`, then this returns
`TensorShape([N1,...,Nn, k, k])`
Returns:
`TensorShape`, statically determined, may be undefined.
"""
return self._get_shape
def _shape(self):
return self._shape_arg
def _det(self):
det = array_ops.ones(self.batch_shape(), dtype=self.dtype)
det.set_shape(self.get_batch_shape())
return det
def _batch_log_det(self):
log_det = array_ops.zeros(self.batch_shape(), dtype=self.dtype)
log_det.set_shape(self.get_batch_shape())
return log_det
def _batch_sqrt_log_det(self):
s_log_det = array_ops.zeros(self.batch_shape(), dtype=self.dtype)
s_log_det.set_shape(self.get_batch_shape())
return s_log_det
def _batch_matmul(self, x, transpose_x=False):
if transpose_x:
x = array_ops.batch_matrix_transpose(x)
self._check_x(x)
return x
def _batch_sqrt_matmul(self, x, transpose_x=False):
return self._batch_matmul(x, transpose_x=transpose_x)
def _batch_solve(self, rhs):
self._check_x(rhs)
return rhs
def _batch_sqrt_solve(self, rhs):
self._check_x(rhs)
return rhs
def _to_dense(self):
diag = array_ops.ones(self.vector_shape(), dtype=self.dtype)
dense = array_ops.batch_matrix_diag(diag)
dense.set_shape(self.get_shape())
return dense
def _sqrt_to_dense(self):
return self.to_dense()
|
ghber/My-Django-Nonrel | refs/heads/master | django/utils/unittest/case.py | 151 | """Test case implementation"""
import sys
import difflib
import pprint
import re
import unittest
import warnings
from django.utils.unittest import result
from django.utils.unittest.util import\
safe_repr, safe_str, strclass,\
unorderable_list_difference
from django.utils.unittest.compatibility import wraps
__unittest = True
DIFF_OMITTED = ('\nDiff is %s characters long. '
'Set self.maxDiff to None to see it.')
class SkipTest(Exception):
"""
Raise this exception in a test to skip it.
Usually you can use TestResult.skip() or one of the skipping decorators
instead of raising this directly.
"""
class _ExpectedFailure(Exception):
"""
Raise this when a test is expected to fail.
This is an implementation detail.
"""
def __init__(self, exc_info):
# can't use super because Python 2.4 exceptions are old style
Exception.__init__(self)
self.exc_info = exc_info
class _UnexpectedSuccess(Exception):
"""
The test was supposed to fail, but it didn't!
"""
def _id(obj):
return obj
def skip(reason):
"""
Unconditionally skip a test.
"""
def decorator(test_item):
if not (isinstance(test_item, type) and issubclass(test_item, TestCase)):
@wraps(test_item)
def skip_wrapper(*args, **kwargs):
raise SkipTest(reason)
test_item = skip_wrapper
test_item.__unittest_skip__ = True
test_item.__unittest_skip_why__ = reason
return test_item
return decorator
def skipIf(condition, reason):
"""
Skip a test if the condition is true.
"""
if condition:
return skip(reason)
return _id
def skipUnless(condition, reason):
"""
Skip a test unless the condition is true.
"""
if not condition:
return skip(reason)
return _id
def expectedFailure(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
func(*args, **kwargs)
except Exception:
raise _ExpectedFailure(sys.exc_info())
raise _UnexpectedSuccess
return wrapper
class _AssertRaisesContext(object):
"""A context manager used to implement TestCase.assertRaises* methods."""
def __init__(self, expected, test_case, expected_regexp=None):
self.expected = expected
self.failureException = test_case.failureException
self.expected_regexp = expected_regexp
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
if exc_type is None:
try:
exc_name = self.expected.__name__
except AttributeError:
exc_name = str(self.expected)
raise self.failureException(
"%s not raised" % (exc_name,))
if not issubclass(exc_type, self.expected):
# let unexpected exceptions pass through
return False
self.exception = exc_value # store for later retrieval
if self.expected_regexp is None:
return True
expected_regexp = self.expected_regexp
if isinstance(expected_regexp, basestring):
expected_regexp = re.compile(expected_regexp)
if not expected_regexp.search(str(exc_value)):
raise self.failureException('"%s" does not match "%s"' %
(expected_regexp.pattern, str(exc_value)))
return True
class _TypeEqualityDict(object):
def __init__(self, testcase):
self.testcase = testcase
self._store = {}
def __setitem__(self, key, value):
self._store[key] = value
def __getitem__(self, key):
value = self._store[key]
if isinstance(value, basestring):
return getattr(self.testcase, value)
return value
def get(self, key, default=None):
if key in self._store:
return self[key]
return default
class TestCase(unittest.TestCase):
"""A class whose instances are single test cases.
By default, the test code itself should be placed in a method named
'runTest'.
If the fixture may be used for many test cases, create as
many test methods as are needed. When instantiating such a TestCase
subclass, specify in the constructor arguments the name of the test method
that the instance is to execute.
Test authors should subclass TestCase for their own tests. Construction
and deconstruction of the test's environment ('fixture') can be
implemented by overriding the 'setUp' and 'tearDown' methods respectively.
If it is necessary to override the __init__ method, the base class
__init__ method must always be called. It is important that subclasses
should not change the signature of their __init__ method, since instances
of the classes are instantiated automatically by parts of the framework
in order to be run.
"""
# This attribute determines which exception will be raised when
# the instance's assertion methods fail; test methods raising this
# exception will be deemed to have 'failed' rather than 'errored'
failureException = AssertionError
# This attribute sets the maximum length of a diff in failure messages
# by assert methods using difflib. It is looked up as an instance attribute
# so can be configured by individual tests if required.
maxDiff = 80*8
# This attribute determines whether long messages (including repr of
# objects used in assert methods) will be printed on failure in *addition*
# to any explicit message passed.
longMessage = True
# Attribute used by TestSuite for classSetUp
_classSetupFailed = False
def __init__(self, methodName='runTest'):
"""Create an instance of the class that will use the named test
method when executed. Raises a ValueError if the instance does
not have a method with the specified name.
"""
self._testMethodName = methodName
self._resultForDoCleanups = None
try:
testMethod = getattr(self, methodName)
except AttributeError:
raise ValueError("no such test method in %s: %s" % \
(self.__class__, methodName))
self._testMethodDoc = testMethod.__doc__
self._cleanups = []
# Map types to custom assertEqual functions that will compare
# instances of said type in more detail to generate a more useful
# error message.
self._type_equality_funcs = _TypeEqualityDict(self)
self.addTypeEqualityFunc(dict, 'assertDictEqual')
self.addTypeEqualityFunc(list, 'assertListEqual')
self.addTypeEqualityFunc(tuple, 'assertTupleEqual')
self.addTypeEqualityFunc(set, 'assertSetEqual')
self.addTypeEqualityFunc(frozenset, 'assertSetEqual')
self.addTypeEqualityFunc(unicode, 'assertMultiLineEqual')
def addTypeEqualityFunc(self, typeobj, function):
"""Add a type specific assertEqual style function to compare a type.
This method is for use by TestCase subclasses that need to register
their own type equality functions to provide nicer error messages.
Args:
typeobj: The data type to call this function on when both values
are of the same type in assertEqual().
function: The callable taking two arguments and an optional
msg= argument that raises self.failureException with a
useful error message when the two arguments are not equal.
"""
self._type_equality_funcs[typeobj] = function
def addCleanup(self, function, *args, **kwargs):
"""Add a function, with arguments, to be called when the test is
completed. Functions added are called on a LIFO basis and are
called after tearDown on test failure or success.
Cleanup items are called even if setUp fails (unlike tearDown)."""
self._cleanups.append((function, args, kwargs))
def setUp(self):
"Hook method for setting up the test fixture before exercising it."
@classmethod
def setUpClass(cls):
"Hook method for setting up class fixture before running tests in the class."
@classmethod
def tearDownClass(cls):
"Hook method for deconstructing the class fixture after running all tests in the class."
def tearDown(self):
"Hook method for deconstructing the test fixture after testing it."
def countTestCases(self):
return 1
def defaultTestResult(self):
return result.TestResult()
def shortDescription(self):
"""Returns a one-line description of the test, or None if no
description has been provided.
The default implementation of this method returns the first line of
the specified test method's docstring.
"""
doc = self._testMethodDoc
return doc and doc.split("\n")[0].strip() or None
def id(self):
return "%s.%s" % (strclass(self.__class__), self._testMethodName)
def __eq__(self, other):
if type(self) is not type(other):
return NotImplemented
return self._testMethodName == other._testMethodName
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((type(self), self._testMethodName))
def __str__(self):
return "%s (%s)" % (self._testMethodName, strclass(self.__class__))
def __repr__(self):
return "<%s testMethod=%s>" % \
(strclass(self.__class__), self._testMethodName)
def _addSkip(self, result, reason):
addSkip = getattr(result, 'addSkip', None)
if addSkip is not None:
addSkip(self, reason)
else:
warnings.warn("Use of a TestResult without an addSkip method is deprecated",
DeprecationWarning, 2)
result.addSuccess(self)
def run(self, result=None):
orig_result = result
if result is None:
result = self.defaultTestResult()
startTestRun = getattr(result, 'startTestRun', None)
if startTestRun is not None:
startTestRun()
self._resultForDoCleanups = result
result.startTest(self)
testMethod = getattr(self, self._testMethodName)
if (getattr(self.__class__, "__unittest_skip__", False) or
getattr(testMethod, "__unittest_skip__", False)):
# If the class or method was skipped.
try:
skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
or getattr(testMethod, '__unittest_skip_why__', ''))
self._addSkip(result, skip_why)
finally:
result.stopTest(self)
return
try:
success = False
try:
self.setUp()
except SkipTest, e:
self._addSkip(result, str(e))
except Exception:
result.addError(self, sys.exc_info())
else:
try:
testMethod()
except self.failureException:
result.addFailure(self, sys.exc_info())
except _ExpectedFailure, e:
addExpectedFailure = getattr(result, 'addExpectedFailure', None)
if addExpectedFailure is not None:
addExpectedFailure(self, e.exc_info)
else:
warnings.warn("Use of a TestResult without an addExpectedFailure method is deprecated",
DeprecationWarning)
result.addSuccess(self)
except _UnexpectedSuccess:
addUnexpectedSuccess = getattr(result, 'addUnexpectedSuccess', None)
if addUnexpectedSuccess is not None:
addUnexpectedSuccess(self)
else:
warnings.warn("Use of a TestResult without an addUnexpectedSuccess method is deprecated",
DeprecationWarning)
result.addFailure(self, sys.exc_info())
except SkipTest, e:
self._addSkip(result, str(e))
except Exception:
result.addError(self, sys.exc_info())
else:
success = True
try:
self.tearDown()
except Exception:
result.addError(self, sys.exc_info())
success = False
cleanUpSuccess = self.doCleanups()
success = success and cleanUpSuccess
if success:
result.addSuccess(self)
finally:
result.stopTest(self)
if orig_result is None:
stopTestRun = getattr(result, 'stopTestRun', None)
if stopTestRun is not None:
stopTestRun()
def doCleanups(self):
"""Execute all cleanup functions. Normally called for you after
tearDown."""
result = self._resultForDoCleanups
ok = True
while self._cleanups:
function, args, kwargs = self._cleanups.pop(-1)
try:
function(*args, **kwargs)
except Exception:
ok = False
result.addError(self, sys.exc_info())
return ok
def __call__(self, *args, **kwds):
return self.run(*args, **kwds)
def debug(self):
"""Run the test without collecting errors in a TestResult"""
self.setUp()
getattr(self, self._testMethodName)()
self.tearDown()
while self._cleanups:
function, args, kwargs = self._cleanups.pop(-1)
function(*args, **kwargs)
def skipTest(self, reason):
"""Skip this test."""
raise SkipTest(reason)
def fail(self, msg=None):
"""Fail immediately, with the given message."""
raise self.failureException(msg)
def assertFalse(self, expr, msg=None):
"Fail the test if the expression is true."
if expr:
msg = self._formatMessage(msg, "%s is not False" % safe_repr(expr))
raise self.failureException(msg)
def assertTrue(self, expr, msg=None):
"""Fail the test unless the expression is true."""
if not expr:
msg = self._formatMessage(msg, "%s is not True" % safe_repr(expr))
raise self.failureException(msg)
def _formatMessage(self, msg, standardMsg):
"""Honour the longMessage attribute when generating failure messages.
If longMessage is False this means:
* Use only an explicit message if it is provided
* Otherwise use the standard message for the assert
If longMessage is True:
* Use the standard message
* If an explicit message is provided, plus ' : ' and the explicit message
"""
if not self.longMessage:
return msg or standardMsg
if msg is None:
return standardMsg
try:
return '%s : %s' % (standardMsg, msg)
except UnicodeDecodeError:
return '%s : %s' % (safe_str(standardMsg), safe_str(msg))
def assertRaises(self, excClass, callableObj=None, *args, **kwargs):
"""Fail unless an exception of class excClass is thrown
by callableObj when invoked with arguments args and keyword
arguments kwargs. If a different type of exception is
thrown, it will not be caught, and the test case will be
deemed to have suffered an error, exactly as for an
unexpected exception.
If called with callableObj omitted or None, will return a
context object used like this::
with self.assertRaises(SomeException):
do_something()
The context manager keeps a reference to the exception as
the 'exception' attribute. This allows you to inspect the
exception after the assertion::
with self.assertRaises(SomeException) as cm:
do_something()
the_exception = cm.exception
self.assertEqual(the_exception.error_code, 3)
"""
if callableObj is None:
return _AssertRaisesContext(excClass, self)
try:
callableObj(*args, **kwargs)
except excClass:
return
if hasattr(excClass,'__name__'):
excName = excClass.__name__
else:
excName = str(excClass)
raise self.failureException("%s not raised" % excName)
def _getAssertEqualityFunc(self, first, second):
"""Get a detailed comparison function for the types of the two args.
Returns: A callable accepting (first, second, msg=None) that will
raise a failure exception if first != second with a useful human
readable error message for those types.
"""
#
# NOTE(gregory.p.smith): I considered isinstance(first, type(second))
# and vice versa. I opted for the conservative approach in case
# subclasses are not intended to be compared in detail to their super
# class instances using a type equality func. This means testing
# subtypes won't automagically use the detailed comparison. Callers
# should use their type specific assertSpamEqual method to compare
# subclasses if the detailed comparison is desired and appropriate.
# See the discussion in http://bugs.python.org/issue2578.
#
if type(first) is type(second):
asserter = self._type_equality_funcs.get(type(first))
if asserter is not None:
return asserter
return self._baseAssertEqual
def _baseAssertEqual(self, first, second, msg=None):
"""The default assertEqual implementation, not type specific."""
if not first == second:
standardMsg = '%s != %s' % (safe_repr(first), safe_repr(second))
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertEqual(self, first, second, msg=None):
"""Fail if the two objects are unequal as determined by the '=='
operator.
"""
assertion_func = self._getAssertEqualityFunc(first, second)
assertion_func(first, second, msg=msg)
def assertNotEqual(self, first, second, msg=None):
"""Fail if the two objects are equal as determined by the '=='
operator.
"""
if not first != second:
msg = self._formatMessage(msg, '%s == %s' % (safe_repr(first),
safe_repr(second)))
raise self.failureException(msg)
def assertAlmostEqual(self, first, second, places=None, msg=None, delta=None):
"""Fail if the two objects are unequal as determined by their
difference rounded to the given number of decimal places
(default 7) and comparing to zero, or by comparing that the
between the two objects is more than the given delta.
Note that decimal places (from zero) are usually not the same
as significant digits (measured from the most signficant digit).
If the two objects compare equal then they will automatically
compare almost equal.
"""
if first == second:
# shortcut
return
if delta is not None and places is not None:
raise TypeError("specify delta or places not both")
if delta is not None:
if abs(first - second) <= delta:
return
standardMsg = '%s != %s within %s delta' % (safe_repr(first),
safe_repr(second),
safe_repr(delta))
else:
if places is None:
places = 7
if round(abs(second-first), places) == 0:
return
standardMsg = '%s != %s within %r places' % (safe_repr(first),
safe_repr(second),
places)
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertNotAlmostEqual(self, first, second, places=None, msg=None, delta=None):
"""Fail if the two objects are equal as determined by their
difference rounded to the given number of decimal places
(default 7) and comparing to zero, or by comparing that the
between the two objects is less than the given delta.
Note that decimal places (from zero) are usually not the same
as significant digits (measured from the most signficant digit).
Objects that are equal automatically fail.
"""
if delta is not None and places is not None:
raise TypeError("specify delta or places not both")
if delta is not None:
if not (first == second) and abs(first - second) > delta:
return
standardMsg = '%s == %s within %s delta' % (safe_repr(first),
safe_repr(second),
safe_repr(delta))
else:
if places is None:
places = 7
if not (first == second) and round(abs(second-first), places) != 0:
return
standardMsg = '%s == %s within %r places' % (safe_repr(first),
safe_repr(second),
places)
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
# Synonyms for assertion methods
# The plurals are undocumented. Keep them that way to discourage use.
# Do not add more. Do not remove.
# Going through a deprecation cycle on these would annoy many people.
assertEquals = assertEqual
assertNotEquals = assertNotEqual
assertAlmostEquals = assertAlmostEqual
assertNotAlmostEquals = assertNotAlmostEqual
assert_ = assertTrue
# These fail* assertion method names are pending deprecation and will
# be a DeprecationWarning in 3.2; http://bugs.python.org/issue2578
def _deprecate(original_func):
def deprecated_func(*args, **kwargs):
warnings.warn(
('Please use %s instead.' % original_func.__name__),
PendingDeprecationWarning, 2)
return original_func(*args, **kwargs)
return deprecated_func
failUnlessEqual = _deprecate(assertEqual)
failIfEqual = _deprecate(assertNotEqual)
failUnlessAlmostEqual = _deprecate(assertAlmostEqual)
failIfAlmostEqual = _deprecate(assertNotAlmostEqual)
failUnless = _deprecate(assertTrue)
failUnlessRaises = _deprecate(assertRaises)
failIf = _deprecate(assertFalse)
def assertSequenceEqual(self, seq1, seq2,
msg=None, seq_type=None, max_diff=80*8):
"""An equality assertion for ordered sequences (like lists and tuples).
For the purposes of this function, a valid ordered sequence type is one
which can be indexed, has a length, and has an equality operator.
Args:
seq1: The first sequence to compare.
seq2: The second sequence to compare.
seq_type: The expected datatype of the sequences, or None if no
datatype should be enforced.
msg: Optional message to use on failure instead of a list of
differences.
max_diff: Maximum size off the diff, larger diffs are not shown
"""
if seq_type is not None:
seq_type_name = seq_type.__name__
if not isinstance(seq1, seq_type):
raise self.failureException('First sequence is not a %s: %s'
% (seq_type_name, safe_repr(seq1)))
if not isinstance(seq2, seq_type):
raise self.failureException('Second sequence is not a %s: %s'
% (seq_type_name, safe_repr(seq2)))
else:
seq_type_name = "sequence"
differing = None
try:
len1 = len(seq1)
except (TypeError, NotImplementedError):
differing = 'First %s has no length. Non-sequence?' % (
seq_type_name)
if differing is None:
try:
len2 = len(seq2)
except (TypeError, NotImplementedError):
differing = 'Second %s has no length. Non-sequence?' % (
seq_type_name)
if differing is None:
if seq1 == seq2:
return
seq1_repr = repr(seq1)
seq2_repr = repr(seq2)
if len(seq1_repr) > 30:
seq1_repr = seq1_repr[:30] + '...'
if len(seq2_repr) > 30:
seq2_repr = seq2_repr[:30] + '...'
elements = (seq_type_name.capitalize(), seq1_repr, seq2_repr)
differing = '%ss differ: %s != %s\n' % elements
for i in xrange(min(len1, len2)):
try:
item1 = seq1[i]
except (TypeError, IndexError, NotImplementedError):
differing += ('\nUnable to index element %d of first %s\n' %
(i, seq_type_name))
break
try:
item2 = seq2[i]
except (TypeError, IndexError, NotImplementedError):
differing += ('\nUnable to index element %d of second %s\n' %
(i, seq_type_name))
break
if item1 != item2:
differing += ('\nFirst differing element %d:\n%s\n%s\n' %
(i, item1, item2))
break
else:
if (len1 == len2 and seq_type is None and
type(seq1) != type(seq2)):
# The sequences are the same, but have differing types.
return
if len1 > len2:
differing += ('\nFirst %s contains %d additional '
'elements.\n' % (seq_type_name, len1 - len2))
try:
differing += ('First extra element %d:\n%s\n' %
(len2, seq1[len2]))
except (TypeError, IndexError, NotImplementedError):
differing += ('Unable to index element %d '
'of first %s\n' % (len2, seq_type_name))
elif len1 < len2:
differing += ('\nSecond %s contains %d additional '
'elements.\n' % (seq_type_name, len2 - len1))
try:
differing += ('First extra element %d:\n%s\n' %
(len1, seq2[len1]))
except (TypeError, IndexError, NotImplementedError):
differing += ('Unable to index element %d '
'of second %s\n' % (len1, seq_type_name))
standardMsg = differing
diffMsg = '\n' + '\n'.join(
difflib.ndiff(pprint.pformat(seq1).splitlines(),
pprint.pformat(seq2).splitlines()))
standardMsg = self._truncateMessage(standardMsg, diffMsg)
msg = self._formatMessage(msg, standardMsg)
self.fail(msg)
def _truncateMessage(self, message, diff):
max_diff = self.maxDiff
if max_diff is None or len(diff) <= max_diff:
return message + diff
return message + (DIFF_OMITTED % len(diff))
def assertListEqual(self, list1, list2, msg=None):
"""A list-specific equality assertion.
Args:
list1: The first list to compare.
list2: The second list to compare.
msg: Optional message to use on failure instead of a list of
differences.
"""
self.assertSequenceEqual(list1, list2, msg, seq_type=list)
def assertTupleEqual(self, tuple1, tuple2, msg=None):
"""A tuple-specific equality assertion.
Args:
tuple1: The first tuple to compare.
tuple2: The second tuple to compare.
msg: Optional message to use on failure instead of a list of
differences.
"""
self.assertSequenceEqual(tuple1, tuple2, msg, seq_type=tuple)
def assertSetEqual(self, set1, set2, msg=None):
"""A set-specific equality assertion.
Args:
set1: The first set to compare.
set2: The second set to compare.
msg: Optional message to use on failure instead of a list of
differences.
assertSetEqual uses ducktyping to support
different types of sets, and is optimized for sets specifically
(parameters must support a difference method).
"""
try:
difference1 = set1.difference(set2)
except TypeError, e:
self.fail('invalid type when attempting set difference: %s' % e)
except AttributeError, e:
self.fail('first argument does not support set difference: %s' % e)
try:
difference2 = set2.difference(set1)
except TypeError, e:
self.fail('invalid type when attempting set difference: %s' % e)
except AttributeError, e:
self.fail('second argument does not support set difference: %s' % e)
if not (difference1 or difference2):
return
lines = []
if difference1:
lines.append('Items in the first set but not the second:')
for item in difference1:
lines.append(repr(item))
if difference2:
lines.append('Items in the second set but not the first:')
for item in difference2:
lines.append(repr(item))
standardMsg = '\n'.join(lines)
self.fail(self._formatMessage(msg, standardMsg))
def assertIn(self, member, container, msg=None):
"""Just like self.assertTrue(a in b), but with a nicer default message."""
if member not in container:
standardMsg = '%s not found in %s' % (safe_repr(member),
safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
def assertNotIn(self, member, container, msg=None):
"""Just like self.assertTrue(a not in b), but with a nicer default message."""
if member in container:
standardMsg = '%s unexpectedly found in %s' % (safe_repr(member),
safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
def assertIs(self, expr1, expr2, msg=None):
"""Just like self.assertTrue(a is b), but with a nicer default message."""
if expr1 is not expr2:
standardMsg = '%s is not %s' % (safe_repr(expr1), safe_repr(expr2))
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNot(self, expr1, expr2, msg=None):
"""Just like self.assertTrue(a is not b), but with a nicer default message."""
if expr1 is expr2:
standardMsg = 'unexpectedly identical: %s' % (safe_repr(expr1),)
self.fail(self._formatMessage(msg, standardMsg))
def assertDictEqual(self, d1, d2, msg=None):
self.assertTrue(isinstance(d1, dict), 'First argument is not a dictionary')
self.assertTrue(isinstance(d2, dict), 'Second argument is not a dictionary')
if d1 != d2:
standardMsg = '%s != %s' % (safe_repr(d1, True), safe_repr(d2, True))
diff = ('\n' + '\n'.join(difflib.ndiff(
pprint.pformat(d1).splitlines(),
pprint.pformat(d2).splitlines())))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertDictContainsSubset(self, expected, actual, msg=None):
"""Checks whether actual is a superset of expected."""
missing = []
mismatched = []
for key, value in expected.iteritems():
if key not in actual:
missing.append(key)
elif value != actual[key]:
mismatched.append('%s, expected: %s, actual: %s' %
(safe_repr(key), safe_repr(value),
safe_repr(actual[key])))
if not (missing or mismatched):
return
standardMsg = ''
if missing:
standardMsg = 'Missing: %s' % ','.join(safe_repr(m) for m in
missing)
if mismatched:
if standardMsg:
standardMsg += '; '
standardMsg += 'Mismatched values: %s' % ','.join(mismatched)
self.fail(self._formatMessage(msg, standardMsg))
def assertItemsEqual(self, expected_seq, actual_seq, msg=None):
"""An unordered sequence specific comparison. It asserts that
expected_seq and actual_seq contain the same elements. It is
the equivalent of::
self.assertEqual(sorted(expected_seq), sorted(actual_seq))
Raises with an error message listing which elements of expected_seq
are missing from actual_seq and vice versa if any.
Asserts that each element has the same count in both sequences.
Example:
- [0, 1, 1] and [1, 0, 1] compare equal.
- [0, 0, 1] and [0, 1] compare unequal.
"""
try:
expected = sorted(expected_seq)
actual = sorted(actual_seq)
except TypeError:
# Unsortable items (example: set(), complex(), ...)
expected = list(expected_seq)
actual = list(actual_seq)
missing, unexpected = unorderable_list_difference(
expected, actual, ignore_duplicate=False
)
else:
return self.assertSequenceEqual(expected, actual, msg=msg)
errors = []
if missing:
errors.append('Expected, but missing:\n %s' %
safe_repr(missing))
if unexpected:
errors.append('Unexpected, but present:\n %s' %
safe_repr(unexpected))
if errors:
standardMsg = '\n'.join(errors)
self.fail(self._formatMessage(msg, standardMsg))
def assertMultiLineEqual(self, first, second, msg=None):
"""Assert that two multi-line strings are equal."""
self.assertTrue(isinstance(first, basestring), (
'First argument is not a string'))
self.assertTrue(isinstance(second, basestring), (
'Second argument is not a string'))
if first != second:
standardMsg = '%s != %s' % (safe_repr(first, True), safe_repr(second, True))
diff = '\n' + ''.join(difflib.ndiff(first.splitlines(True),
second.splitlines(True)))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertLess(self, a, b, msg=None):
"""Just like self.assertTrue(a < b), but with a nicer default message."""
if not a < b:
standardMsg = '%s not less than %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertLessEqual(self, a, b, msg=None):
"""Just like self.assertTrue(a <= b), but with a nicer default message."""
if not a <= b:
standardMsg = '%s not less than or equal to %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertGreater(self, a, b, msg=None):
"""Just like self.assertTrue(a > b), but with a nicer default message."""
if not a > b:
standardMsg = '%s not greater than %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertGreaterEqual(self, a, b, msg=None):
"""Just like self.assertTrue(a >= b), but with a nicer default message."""
if not a >= b:
standardMsg = '%s not greater than or equal to %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNone(self, obj, msg=None):
"""Same as self.assertTrue(obj is None), with a nicer default message."""
if obj is not None:
standardMsg = '%s is not None' % (safe_repr(obj),)
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNotNone(self, obj, msg=None):
"""Included for symmetry with assertIsNone."""
if obj is None:
standardMsg = 'unexpectedly None'
self.fail(self._formatMessage(msg, standardMsg))
def assertIsInstance(self, obj, cls, msg=None):
"""Same as self.assertTrue(isinstance(obj, cls)), with a nicer
default message."""
if not isinstance(obj, cls):
standardMsg = '%s is not an instance of %r' % (safe_repr(obj), cls)
self.fail(self._formatMessage(msg, standardMsg))
def assertNotIsInstance(self, obj, cls, msg=None):
"""Included for symmetry with assertIsInstance."""
if isinstance(obj, cls):
standardMsg = '%s is an instance of %r' % (safe_repr(obj), cls)
self.fail(self._formatMessage(msg, standardMsg))
def assertRaisesRegexp(self, expected_exception, expected_regexp,
callable_obj=None, *args, **kwargs):
"""Asserts that the message in a raised exception matches a regexp.
Args:
expected_exception: Exception class expected to be raised.
expected_regexp: Regexp (re pattern object or string) expected
to be found in error message.
callable_obj: Function to be called.
args: Extra args.
kwargs: Extra kwargs.
"""
if callable_obj is None:
return _AssertRaisesContext(expected_exception, self, expected_regexp)
try:
callable_obj(*args, **kwargs)
except expected_exception, exc_value:
if isinstance(expected_regexp, basestring):
expected_regexp = re.compile(expected_regexp)
if not expected_regexp.search(str(exc_value)):
raise self.failureException('"%s" does not match "%s"' %
(expected_regexp.pattern, str(exc_value)))
else:
if hasattr(expected_exception, '__name__'):
excName = expected_exception.__name__
else:
excName = str(expected_exception)
raise self.failureException, "%s not raised" % excName
def assertRegexpMatches(self, text, expected_regexp, msg=None):
"""Fail the test unless the text matches the regular expression."""
if isinstance(expected_regexp, basestring):
expected_regexp = re.compile(expected_regexp)
if not expected_regexp.search(text):
msg = msg or "Regexp didn't match"
msg = '%s: %r not found in %r' % (msg, expected_regexp.pattern, text)
raise self.failureException(msg)
def assertNotRegexpMatches(self, text, unexpected_regexp, msg=None):
"""Fail the test if the text matches the regular expression."""
if isinstance(unexpected_regexp, basestring):
unexpected_regexp = re.compile(unexpected_regexp)
match = unexpected_regexp.search(text)
if match:
msg = msg or "Regexp matched"
msg = '%s: %r matches %r in %r' % (msg,
text[match.start():match.end()],
unexpected_regexp.pattern,
text)
raise self.failureException(msg)
class FunctionTestCase(TestCase):
"""A test case that wraps a test function.
This is useful for slipping pre-existing test functions into the
unittest framework. Optionally, set-up and tidy-up functions can be
supplied. As with TestCase, the tidy-up ('tearDown') function will
always be called if the set-up ('setUp') function ran successfully.
"""
def __init__(self, testFunc, setUp=None, tearDown=None, description=None):
super(FunctionTestCase, self).__init__()
self._setUpFunc = setUp
self._tearDownFunc = tearDown
self._testFunc = testFunc
self._description = description
def setUp(self):
if self._setUpFunc is not None:
self._setUpFunc()
def tearDown(self):
if self._tearDownFunc is not None:
self._tearDownFunc()
def runTest(self):
self._testFunc()
def id(self):
return self._testFunc.__name__
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self._setUpFunc == other._setUpFunc and \
self._tearDownFunc == other._tearDownFunc and \
self._testFunc == other._testFunc and \
self._description == other._description
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((type(self), self._setUpFunc, self._tearDownFunc,
self._testFunc, self._description))
def __str__(self):
return "%s (%s)" % (strclass(self.__class__),
self._testFunc.__name__)
def __repr__(self):
return "<%s testFunc=%s>" % (strclass(self.__class__),
self._testFunc)
def shortDescription(self):
if self._description is not None:
return self._description
doc = self._testFunc.__doc__
return doc and doc.split("\n")[0].strip() or None
|
Tejal011089/huntercamp_erpnext | refs/heads/develop | erpnext/patches/v4_2/set_item_has_batch.py | 92 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.db.sql("update tabItem set has_batch_no = 0 where ifnull(has_batch_no, '') = ''")
frappe.db.sql("update tabItem set has_serial_no = 0 where ifnull(has_serial_no, '') = ''")
item_list = frappe.db.sql("""select name, has_batch_no, has_serial_no from tabItem
where is_stock_item = 1""", as_dict=1)
sle_count = get_sle_count()
sle_with_batch = get_sle_with_batch()
sle_with_serial = get_sle_with_serial()
batch_items = get_items_with_batch()
serialized_items = get_items_with_serial()
for d in item_list:
if d.has_batch_no == 1:
if d.name not in batch_items and sle_count.get(d.name) and sle_count.get(d.name) != sle_with_batch.get(d.name):
frappe.db.set_value("Item", d.name, "has_batch_no", 0)
else:
if d.name in batch_items or (sle_count.get(d.name) and sle_count.get(d.name) == sle_with_batch.get(d.name)):
frappe.db.set_value("Item", d.name, "has_batch_no", 1)
if d.has_serial_no == 1:
if d.name not in serialized_items and sle_count.get(d.name) and sle_count.get(d.name) != sle_with_serial.get(d.name):
frappe.db.set_value("Item", d.name, "has_serial_no", 0)
else:
if d.name in serialized_items or (sle_count.get(d.name) and sle_count.get(d.name) == sle_with_serial.get(d.name)):
frappe.db.set_value("Item", d.name, "has_serial_no", 1)
def get_sle_count():
sle_count = {}
for d in frappe.db.sql("""select item_code, count(name) as cnt from `tabStock Ledger Entry` group by item_code""", as_dict=1):
sle_count.setdefault(d.item_code, d.cnt)
return sle_count
def get_sle_with_batch():
sle_with_batch = {}
for d in frappe.db.sql("""select item_code, count(name) as cnt from `tabStock Ledger Entry`
where ifnull(batch_no, '') != '' group by item_code""", as_dict=1):
sle_with_batch.setdefault(d.item_code, d.cnt)
return sle_with_batch
def get_sle_with_serial():
sle_with_serial = {}
for d in frappe.db.sql("""select item_code, count(name) as cnt from `tabStock Ledger Entry`
where ifnull(serial_no, '') != '' group by item_code""", as_dict=1):
sle_with_serial.setdefault(d.item_code, d.cnt)
return sle_with_serial
def get_items_with_batch():
return frappe.db.sql_list("select item from tabBatch")
def get_items_with_serial():
return frappe.db.sql_list("select item_code from `tabSerial No`")
|
NullSoldier/django | refs/heads/master | tests/check_framework/test_model_field_deprecation.py | 322 | from django.core import checks
from django.db import models
from django.test import SimpleTestCase
from .tests import IsolateModelsMixin
class TestDeprecatedField(IsolateModelsMixin, SimpleTestCase):
def test_default_details(self):
class MyField(models.Field):
system_check_deprecated_details = {}
class Model(models.Model):
name = MyField()
model = Model()
self.assertEqual(model.check(), [
checks.Warning(
msg='MyField has been deprecated.',
hint=None,
obj=Model._meta.get_field('name'),
id='fields.WXXX',
)
])
def test_user_specified_details(self):
class MyField(models.Field):
system_check_deprecated_details = {
'msg': 'This field is deprecated and will be removed soon.',
'hint': 'Use something else.',
'id': 'fields.W999',
}
class Model(models.Model):
name = MyField()
model = Model()
self.assertEqual(model.check(), [
checks.Warning(
msg='This field is deprecated and will be removed soon.',
hint='Use something else.',
obj=Model._meta.get_field('name'),
id='fields.W999',
)
])
class TestRemovedField(IsolateModelsMixin, SimpleTestCase):
def test_default_details(self):
class MyField(models.Field):
system_check_removed_details = {}
class Model(models.Model):
name = MyField()
model = Model()
self.assertEqual(model.check(), [
checks.Error(
msg='MyField has been removed except for support in historical migrations.',
hint=None,
obj=Model._meta.get_field('name'),
id='fields.EXXX',
)
])
def test_user_specified_details(self):
class MyField(models.Field):
system_check_removed_details = {
'msg': 'Support for this field is gone.',
'hint': 'Use something else.',
'id': 'fields.E999',
}
class Model(models.Model):
name = MyField()
model = Model()
self.assertEqual(model.check(), [
checks.Error(
msg='Support for this field is gone.',
hint='Use something else.',
obj=Model._meta.get_field('name'),
id='fields.E999',
)
])
|
djangorussia/django-1.3-branch | refs/heads/master | tests/regressiontests/sites_framework/models.py | 133 | from django.contrib.sites.managers import CurrentSiteManager
from django.contrib.sites.models import Site
from django.db import models
class AbstractArticle(models.Model):
title = models.CharField(max_length=50)
objects = models.Manager()
on_site = CurrentSiteManager()
class Meta:
abstract = True
def __unicode__(self):
return self.title
class SyndicatedArticle(AbstractArticle):
sites = models.ManyToManyField(Site)
class ExclusiveArticle(AbstractArticle):
site = models.ForeignKey(Site)
class CustomArticle(AbstractArticle):
places_this_article_should_appear = models.ForeignKey(Site)
objects = models.Manager()
on_site = CurrentSiteManager("places_this_article_should_appear")
class InvalidArticle(AbstractArticle):
site = models.ForeignKey(Site)
objects = models.Manager()
on_site = CurrentSiteManager("places_this_article_should_appear")
class ConfusedArticle(AbstractArticle):
site = models.IntegerField()
|
mkieszek/odoo | refs/heads/master | addons/marketing_campaign/__init__.py | 47 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import marketing_campaign
import report
|
hatwar/focal-erpnext | refs/heads/develop | erpnext/setup/page/setup_wizard/setup_wizard.py | 4 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe, json
from frappe.utils import cstr, flt, getdate
from frappe import _
from frappe.utils.file_manager import save_file
from frappe.translate import set_default_language, get_dict, get_lang_dict, send_translations
from frappe.country_info import get_country_info
from frappe.utils.nestedset import get_root_of
from default_website import website_maker
import install_fixtures
@frappe.whitelist()
def setup_account(args=None):
try:
frappe.clear_cache()
if frappe.db.sql("select name from tabCompany"):
frappe.throw(_("Setup Already Complete!!"))
if not args:
args = frappe.local.form_dict
if isinstance(args, basestring):
args = json.loads(args)
args = frappe._dict(args)
if args.language != "english":
set_default_language(args.language)
install_fixtures.install(args.get("country"))
update_user_name(args)
frappe.local.message_log = []
create_fiscal_year_and_company(args)
frappe.local.message_log = []
set_defaults(args)
frappe.local.message_log = []
create_territories()
frappe.local.message_log = []
create_price_lists(args)
frappe.local.message_log = []
create_feed_and_todo()
frappe.local.message_log = []
create_email_digest()
frappe.local.message_log = []
create_letter_head(args)
frappe.local.message_log = []
create_taxes(args)
frappe.local.message_log = []
create_items(args)
frappe.local.message_log = []
create_customers(args)
frappe.local.message_log = []
create_suppliers(args)
frappe.local.message_log = []
frappe.db.set_default('desktop:home_page', 'desktop')
website_maker(args.company_name.strip(), args.company_tagline, args.name)
create_logo(args)
frappe.clear_cache()
frappe.db.commit()
except:
if args:
traceback = frappe.get_traceback()
for hook in frappe.get_hooks("setup_wizard_exception"):
frappe.get_attr(hook)(traceback, args)
raise
else:
for hook in frappe.get_hooks("setup_wizard_success"):
frappe.get_attr(hook)(args)
def update_user_name(args):
if args.get("email"):
args['name'] = args.get("email")
frappe.flags.mute_emails = True
frappe.get_doc({
"doctype":"User",
"email": args.get("email"),
"first_name": args.get("first_name"),
"last_name": args.get("last_name")
}).insert()
frappe.flags.mute_emails = False
from frappe.auth import _update_password
_update_password(args.get("email"), args.get("password"))
else:
args['name'] = frappe.session.user
# Update User
if not args.get('last_name') or args.get('last_name')=='None':
args['last_name'] = None
frappe.db.sql("""update `tabUser` SET first_name=%(first_name)s,
last_name=%(last_name)s WHERE name=%(name)s""", args)
if args.get("attach_user"):
attach_user = args.get("attach_user").split(",")
if len(attach_user)==3:
filename, filetype, content = attach_user
fileurl = save_file(filename, content, "User", args.get("name"), decode=True).file_url
frappe.db.set_value("User", args.get("name"), "user_image", fileurl)
add_all_roles_to(args.get("name"))
def create_fiscal_year_and_company(args):
curr_fiscal_year = get_fy_details(args.get('fy_start_date'), args.get('fy_end_date'))
frappe.get_doc({
"doctype":"Fiscal Year",
'year': curr_fiscal_year,
'year_start_date': args.get('fy_start_date'),
'year_end_date': args.get('fy_end_date'),
}).insert()
# Company
frappe.get_doc({
"doctype":"Company",
'domain': args.get("industry"),
'company_name':args.get('company_name').strip(),
'abbr':args.get('company_abbr'),
'default_currency':args.get('currency'),
'country': args.get('country'),
'chart_of_accounts': args.get(('chart_of_accounts')),
}).insert()
args["curr_fiscal_year"] = curr_fiscal_year
def create_price_lists(args):
for pl_type, pl_name in (("Selling", _("Standard Selling")), ("Buying", _("Standard Buying"))):
frappe.get_doc({
"doctype": "Price List",
"price_list_name": pl_name,
"enabled": 1,
"buying": 1 if pl_type == "Buying" else 0,
"selling": 1 if pl_type == "Selling" else 0,
"currency": args["currency"],
"valid_for_territories": [{
"territory": get_root_of("Territory")
}]
}).insert()
def set_defaults(args):
# enable default currency
frappe.db.set_value("Currency", args.get("currency"), "enabled", 1)
global_defaults = frappe.get_doc("Global Defaults", "Global Defaults")
global_defaults.update({
'current_fiscal_year': args.curr_fiscal_year,
'default_currency': args.get('currency'),
'default_company':args.get('company_name').strip(),
"country": args.get("country"),
})
global_defaults.save()
number_format = get_country_info(args.get("country")).get("number_format", "#,###.##")
# replace these as float number formats, as they have 0 precision
# and are currency number formats and not for floats
if number_format=="#.###":
number_format = "#.###,##"
elif number_format=="#,###":
number_format = "#,###.##"
system_settings = frappe.get_doc("System Settings", "System Settings")
system_settings.update({
"language": args.get("language"),
"time_zone": args.get("timezone"),
"float_precision": 3,
'date_format': frappe.db.get_value("Country", args.get("country"), "date_format"),
'number_format': number_format,
'enable_scheduler': 1
})
system_settings.save()
accounts_settings = frappe.get_doc("Accounts Settings")
accounts_settings.auto_accounting_for_stock = 1
accounts_settings.save()
stock_settings = frappe.get_doc("Stock Settings")
stock_settings.item_naming_by = "Item Code"
stock_settings.valuation_method = "FIFO"
stock_settings.stock_uom = _("Nos")
stock_settings.auto_indent = 1
stock_settings.save()
selling_settings = frappe.get_doc("Selling Settings")
selling_settings.cust_master_name = "Customer Name"
selling_settings.so_required = "No"
selling_settings.dn_required = "No"
selling_settings.save()
buying_settings = frappe.get_doc("Buying Settings")
buying_settings.supp_master_name = "Supplier Name"
buying_settings.po_required = "No"
buying_settings.pr_required = "No"
buying_settings.maintain_same_rate = 1
buying_settings.save()
notification_control = frappe.get_doc("Notification Control")
notification_control.quotation = 1
notification_control.sales_invoice = 1
notification_control.purchase_order = 1
notification_control.save()
hr_settings = frappe.get_doc("HR Settings")
hr_settings.emp_created_by = "Naming Series"
hr_settings.save()
def create_feed_and_todo():
"""update activty feed and create todo for creation of item, customer, vendor"""
from erpnext.home import make_feed
make_feed('Comment', 'ToDo', '', frappe.session['user'],
'ERNext Setup Complete!', '#6B24B3')
def create_email_digest():
from frappe.utils.user import get_system_managers
system_managers = get_system_managers(only_name=True)
if not system_managers:
return
companies = frappe.db.sql_list("select name FROM `tabCompany`")
for company in companies:
if not frappe.db.exists("Email Digest", "Default Weekly Digest - " + company):
edigest = frappe.get_doc({
"doctype": "Email Digest",
"name": "Default Weekly Digest - " + company,
"company": company,
"frequency": "Weekly",
"recipient_list": "\n".join(system_managers)
})
for fieldname in edigest.meta.get("fields", {"fieldtype": "Check"}):
if fieldname != "scheduler_errors":
edigest.set(fieldname, 1)
edigest.insert()
# scheduler errors digest
if companies:
edigest = frappe.new_doc("Email Digest")
edigest.update({
"name": "Scheduler Errors",
"company": companies[0],
"frequency": "Daily",
"recipient_list": "\n".join(system_managers),
"scheduler_errors": 1,
"enabled": 1
})
edigest.insert()
def get_fy_details(fy_start_date, fy_end_date):
start_year = getdate(fy_start_date).year
if start_year == getdate(fy_end_date).year:
fy = cstr(start_year)
else:
fy = cstr(start_year) + '-' + cstr(start_year + 1)
return fy
def create_taxes(args):
for i in xrange(1,6):
if args.get("tax_" + str(i)):
# replace % in case someone also enters the % symbol
tax_rate = (args.get("tax_rate_" + str(i)) or "").replace("%", "")
try:
frappe.get_doc({
"doctype":"Account",
"company": args.get("company_name").strip(),
"parent_account": _("Duties and Taxes") + " - " + args.get("company_abbr"),
"account_name": args.get("tax_" + str(i)),
"group_or_ledger": "Ledger",
"report_type": "Balance Sheet",
"account_type": "Tax",
"tax_rate": flt(tax_rate) if tax_rate else None
}).insert()
except frappe.NameError, e:
if e.args[2][0]==1062:
pass
else:
raise
def create_items(args):
for i in xrange(1,6):
item = args.get("item_" + str(i))
if item:
item_group = args.get("item_group_" + str(i))
is_sales_item = args.get("is_sales_item_" + str(i))
is_purchase_item = args.get("is_purchase_item_" + str(i))
is_stock_item = item_group!=_("Services")
default_warehouse = ""
if is_stock_item:
if is_sales_item:
default_warehouse = _("Finished Goods") + " - " + args.get("company_abbr")
else:
default_warehouse = _("Stores") + " - " + args.get("company_abbr")
frappe.get_doc({
"doctype":"Item",
"item_code": item,
"item_name": item,
"description": item,
"is_sales_item": "Yes" if is_sales_item else "No",
"is_purchase_item": "Yes" if is_purchase_item else "No",
"show_in_website": 1,
"is_stock_item": is_stock_item and "Yes" or "No",
"item_group": item_group,
"stock_uom": args.get("item_uom_" + str(i)),
"default_warehouse": default_warehouse
}).insert()
if args.get("item_img_" + str(i)):
item_image = args.get("item_img_" + str(i)).split(",")
if len(item_image)==3:
filename, filetype, content = item_image
fileurl = save_file(filename, content, "Item", item, decode=True).file_url
frappe.db.set_value("Item", item, "image", fileurl)
def create_customers(args):
for i in xrange(1,6):
customer = args.get("customer_" + str(i))
if customer:
frappe.get_doc({
"doctype":"Customer",
"customer_name": customer,
"customer_type": "Company",
"customer_group": _("Commercial"),
"territory": args.get("country"),
"company": args.get("company_name").strip()
}).insert()
if args.get("customer_contact_" + str(i)):
contact = args.get("customer_contact_" + str(i)).split(" ")
frappe.get_doc({
"doctype":"Contact",
"customer": customer,
"first_name":contact[0],
"last_name": len(contact) > 1 and contact[1] or ""
}).insert()
def create_suppliers(args):
for i in xrange(1,6):
supplier = args.get("supplier_" + str(i))
if supplier:
frappe.get_doc({
"doctype":"Supplier",
"supplier_name": supplier,
"supplier_type": _("Local"),
"company": args.get("company_name").strip()
}).insert()
if args.get("supplier_contact_" + str(i)):
contact = args.get("supplier_contact_" + str(i)).split(" ")
frappe.get_doc({
"doctype":"Contact",
"supplier": supplier,
"first_name":contact[0],
"last_name": len(contact) > 1 and contact[1] or ""
}).insert()
def create_letter_head(args):
if args.get("attach_letterhead"):
frappe.get_doc({
"doctype":"Letter Head",
"letter_head_name": _("Standard"),
"is_default": 1
}).insert()
attach_letterhead = args.get("attach_letterhead").split(",")
if len(attach_letterhead)==3:
filename, filetype, content = attach_letterhead
fileurl = save_file(filename, content, "Letter Head", _("Standard"), decode=True).file_url
frappe.db.set_value("Letter Head", _("Standard"), "content", "<img src='%s' style='max-width: 100%%;'>" % fileurl)
def create_logo(args):
if args.get("attach_logo"):
attach_logo = args.get("attach_logo").split(",")
if len(attach_logo)==3:
filename, filetype, content = attach_logo
fileurl = save_file(filename, content, "Website Settings", "Website Settings",
decode=True).file_url
frappe.db.set_value("Website Settings", "Website Settings", "banner_html",
"<img src='%s' style='max-width: 100%%;'>" % fileurl)
def add_all_roles_to(name):
user = frappe.get_doc("User", name)
for role in frappe.db.sql("""select name from tabRole"""):
if role[0] not in ["Administrator", "Guest", "All", "Customer", "Supplier", "Partner", "Employee"]:
d = user.append("user_roles")
d.role = role[0]
user.save()
def create_territories():
"""create two default territories, one for home country and one named Rest of the World"""
from frappe.utils.nestedset import get_root_of
country = frappe.db.get_default("country")
root_territory = get_root_of("Territory")
for name in (country, _("Rest Of The World")):
if name and not frappe.db.exists("Territory", name):
frappe.get_doc({
"doctype": "Territory",
"territory_name": name.replace("'", ""),
"parent_territory": root_territory,
"is_group": "No"
}).insert()
@frappe.whitelist()
def load_messages(language):
frappe.clear_cache()
lang = get_lang_dict()[language]
frappe.local.lang = lang
m = get_dict("page", "setup-wizard")
m.update(get_dict("boot"))
send_translations(m)
return lang
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.