repo_name
stringlengths
5
100
path
stringlengths
4
294
copies
stringlengths
1
5
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
rsouflaki/RallyRequirements
MURequirement.py
1
1127
class MURequirement: """A MU requirement""" def __init__(self, id, description, dateAdded): self.id = id self.description = description self.dateAdded = dateAdded #self.tests = tests self.tests = [] self.projects = [] def validForProject(storyId): self.projects.append(MUProjectData(self.id, storyId)) def printRequirement(self): print "Requirement id is: " + self.id + ", Description: " + self.description + ", Date added: " + str(self.dateAdded) print "->Valid for " + str(len(self.projects)) + " projects:" for project in self.projects: print " " + project.projectName + ", with User Story:" + project.storyId print "->with " + str(len(self.tests)) + " tests:" for test in self.tests: print " Requirement id: " + self.id + ", Test id: " + test.id class MUProjectData: """A relationship between a requirement and a project""" def __init___(self, reqId, projectName, storyId): seld.reqId = reqId seld.storyId = storyId self.projectName = projectName
mit
Teino1978-Corp/pre-commit
pre_commit/commands/run.py
4
5763
from __future__ import print_function from __future__ import unicode_literals import logging import os import sys from pre_commit import color from pre_commit import git from pre_commit.logging_handler import LoggingHandler from pre_commit.output import get_hook_message from pre_commit.output import sys_stdout_write_wrapper from pre_commit.staged_files_only import staged_files_only from pre_commit.util import cmd_output from pre_commit.util import noop_context logger = logging.getLogger('pre_commit') def _get_skips(environ): skips = environ.get('SKIP', '') return set(skip.strip() for skip in skips.split(',') if skip.strip()) def _hook_msg_start(hook, verbose): return '{0}{1}'.format( '[{0}] '.format(hook['id']) if verbose else '', hook['name'], ) def _print_no_files_skipped(hook, write, args): write(get_hook_message( _hook_msg_start(hook, args.verbose), postfix='(no files to check) ', end_msg='Skipped', end_color=color.TURQUOISE, use_color=args.color, )) def _print_user_skipped(hook, write, args): write(get_hook_message( _hook_msg_start(hook, args.verbose), end_msg='Skipped', end_color=color.YELLOW, use_color=args.color, )) def get_changed_files(new, old): return cmd_output( 'git', 'diff', '--name-only', '{0}..{1}'.format(old, new), )[1].splitlines() def get_filenames(args, include_expr, exclude_expr): if args.origin and args.source: getter = git.get_files_matching( lambda: get_changed_files(args.origin, args.source), ) elif args.files: getter = git.get_files_matching(lambda: args.files) elif args.all_files: getter = git.get_all_files_matching elif git.is_in_merge_conflict(): getter = git.get_conflicted_files_matching else: getter = git.get_staged_files_matching return getter(include_expr, exclude_expr) def _run_single_hook(hook, repo, args, write, skips=frozenset()): filenames = get_filenames(args, hook['files'], hook['exclude']) if hook['id'] in skips: _print_user_skipped(hook, write, args) return 0 elif not filenames: _print_no_files_skipped(hook, write, args) return 0 # Print the hook and the dots first in case the hook takes hella long to # run. write(get_hook_message(_hook_msg_start(hook, args.verbose), end_len=6)) sys.stdout.flush() retcode, stdout, stderr = repo.run_hook(hook, filenames) if retcode != hook['expected_return_value']: retcode = 1 print_color = color.RED pass_fail = 'Failed' else: retcode = 0 print_color = color.GREEN pass_fail = 'Passed' write(color.format_color(pass_fail, print_color, args.color) + '\n') if (stdout or stderr) and (retcode or args.verbose): write('hookid: {0}\n'.format(hook['id'])) write('\n') for output in (stdout, stderr): assert type(output) is bytes, type(output) if output.strip(): write(output.strip() + b'\n') write('\n') return retcode def _run_hooks(repo_hooks, args, write, environ): """Actually run the hooks.""" skips = _get_skips(environ) retval = 0 for repo, hook in repo_hooks: retval |= _run_single_hook(hook, repo, args, write, skips) return retval def get_repo_hooks(runner): for repo in runner.repositories: for _, hook in repo.hooks: yield (repo, hook) def _has_unmerged_paths(runner): _, stdout, _ = runner.cmd_runner.run(['git', 'ls-files', '--unmerged']) return bool(stdout.strip()) def _has_unstaged_config(runner): retcode, _, _ = runner.cmd_runner.run( ('git', 'diff', '--exit-code', runner.config_file_path), retcode=None, ) # be explicit, other git errors don't mean it has an unstaged config. return retcode == 1 def run(runner, args, write=sys_stdout_write_wrapper, environ=os.environ): no_stash = args.no_stash or args.all_files or bool(args.files) # Set up our logging handler logger.addHandler(LoggingHandler(args.color, write=write)) logger.setLevel(logging.INFO) # Check if we have unresolved merge conflict files and fail fast. if _has_unmerged_paths(runner): logger.error('Unmerged files. Resolve before committing.') return 1 if bool(args.source) != bool(args.origin): logger.error('Specify both --origin and --source.') return 1 if _has_unstaged_config(runner) and not no_stash: if args.allow_unstaged_config: logger.warn( 'You have an unstaged config file and have specified the ' '--allow-unstaged-config option.\n' 'Note that your config will be stashed before the config is ' 'parsed unless --no-stash is specified.', ) else: logger.error( 'Your .pre-commit-config.yaml is unstaged.\n' '`git add .pre-commit-config.yaml` to fix this.\n' 'Run pre-commit with --allow-unstaged-config to silence this.' ) return 1 if no_stash: ctx = noop_context() else: ctx = staged_files_only(runner.cmd_runner) with ctx: repo_hooks = list(get_repo_hooks(runner)) if args.hook: repo_hooks = [ (repo, hook) for repo, hook in repo_hooks if hook['id'] == args.hook ] if not repo_hooks: write('No hook with id `{0}`\n'.format(args.hook)) return 1 return _run_hooks(repo_hooks, args, write, environ)
mit
datalogics/scons
test/DVIPDF/makeindex.py
2
1957
#!/usr/bin/env python # # __COPYRIGHT__ # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" import TestSCons test = TestSCons.TestSCons() dvipdf = test.where_is('dvipdf') tex = test.where_is('tex') if not dvipdf or not tex: test.skip_test('Could not find dvipdf or text; skipping test(s).\n') test.write('SConstruct', """ import os env = Environment(ENV = { 'PATH' : os.environ['PATH'] }) dvipdf = env.Dictionary('DVIPDF') env.PDF(target = 'foo.pdf', source = env.DVI(target = 'foo.dvi', source = 'foo.tex')) """) test.write('foo.tex', r""" \documentclass{article} \usepackage{makeidx} \makeindex \begin{document} \section{Test 1} I would like to \index{index} this. \section{test 2} I'll index \index{this} as well. \printindex \end{document} """) test.run(arguments = 'foo.pdf', stderr = None) test.must_exist(test.workpath('foo.pdf')) test.pass_test()
mit
sravanimantha/yaml-cpp.old-api
test/gmock-1.7.0/gtest/xcode/Scripts/versiongenerate.py
3088
4536
#!/usr/bin/env python # # Copyright 2008, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """A script to prepare version informtion for use the gtest Info.plist file. This script extracts the version information from the configure.ac file and uses it to generate a header file containing the same information. The #defines in this header file will be included in during the generation of the Info.plist of the framework, giving the correct value to the version shown in the Finder. This script makes the following assumptions (these are faults of the script, not problems with the Autoconf): 1. The AC_INIT macro will be contained within the first 1024 characters of configure.ac 2. The version string will be 3 integers separated by periods and will be surrounded by squre brackets, "[" and "]" (e.g. [1.0.1]). The first segment represents the major version, the second represents the minor version and the third represents the fix version. 3. No ")" character exists between the opening "(" and closing ")" of AC_INIT, including in comments and character strings. """ import sys import re # Read the command line argument (the output directory for Version.h) if (len(sys.argv) < 3): print "Usage: versiongenerate.py input_dir output_dir" sys.exit(1) else: input_dir = sys.argv[1] output_dir = sys.argv[2] # Read the first 1024 characters of the configure.ac file config_file = open("%s/configure.ac" % input_dir, 'r') buffer_size = 1024 opening_string = config_file.read(buffer_size) config_file.close() # Extract the version string from the AC_INIT macro # The following init_expression means: # Extract three integers separated by periods and surrounded by squre # brackets(e.g. "[1.0.1]") between "AC_INIT(" and ")". Do not be greedy # (*? is the non-greedy flag) since that would pull in everything between # the first "(" and the last ")" in the file. version_expression = re.compile(r"AC_INIT\(.*?\[(\d+)\.(\d+)\.(\d+)\].*?\)", re.DOTALL) version_values = version_expression.search(opening_string) major_version = version_values.group(1) minor_version = version_values.group(2) fix_version = version_values.group(3) # Write the version information to a header file to be included in the # Info.plist file. file_data = """// // DO NOT MODIFY THIS FILE (but you can delete it) // // This file is autogenerated by the versiongenerate.py script. This script // is executed in a "Run Script" build phase when creating gtest.framework. This // header file is not used during compilation of C-source. Rather, it simply // defines some version strings for substitution in the Info.plist. Because of // this, we are not not restricted to C-syntax nor are we using include guards. // #define GTEST_VERSIONINFO_SHORT %s.%s #define GTEST_VERSIONINFO_LONG %s.%s.%s """ % (major_version, minor_version, major_version, minor_version, fix_version) version_file = open("%s/Version.h" % output_dir, 'w') version_file.write(file_data) version_file.close()
mit
kxliugang/edx-platform
openedx/core/lib/api/permissions.py
74
3045
from django.conf import settings from rest_framework import permissions from django.http import Http404 from student.roles import CourseStaffRole class ApiKeyHeaderPermission(permissions.BasePermission): def has_permission(self, request, view): """ Check for permissions by matching the configured API key and header If settings.DEBUG is True and settings.EDX_API_KEY is not set or None, then allow the request. Otherwise, allow the request if and only if settings.EDX_API_KEY is set and the X-Edx-Api-Key HTTP header is present in the request and matches the setting. """ api_key = getattr(settings, "EDX_API_KEY", None) return ( (settings.DEBUG and api_key is None) or (api_key is not None and request.META.get("HTTP_X_EDX_API_KEY") == api_key) ) class ApiKeyHeaderPermissionIsAuthenticated(ApiKeyHeaderPermission, permissions.IsAuthenticated): """ Allow someone to access the view if they have the API key OR they are authenticated. See ApiKeyHeaderPermission for more information how the API key portion is implemented. """ def has_permission(self, request, view): #TODO We can optimize this later on when we know which of these methods is used more often. api_permissions = ApiKeyHeaderPermission.has_permission(self, request, view) is_authenticated_permissions = permissions.IsAuthenticated.has_permission(self, request, view) return api_permissions or is_authenticated_permissions class IsUserInUrl(permissions.BasePermission): """ Permission that checks to see if the request user matches the user in the URL. """ def has_permission(self, request, view): """ Returns true if the current request is by the user themselves. Note: a 404 is returned for non-staff instead of a 403. This is to prevent users from being able to detect the existence of accounts. """ url_username = request.parser_context.get('kwargs', {}).get('username', '') if request.user.username.lower() != url_username.lower(): if request.user.is_staff: return False # staff gets 403 raise Http404() return True class IsUserInUrlOrStaff(IsUserInUrl): """ Permission that checks to see if the request user matches the user in the URL or has is_staff access. """ def has_permission(self, request, view): if request.user.is_staff: return True return super(IsUserInUrlOrStaff, self).has_permission(request, view) class IsStaffOrReadOnly(permissions.BasePermission): """Permission that checks to see if the user is global or course staff, permitting only read-only access if they are not. """ def has_object_permission(self, request, view, obj): return (request.user.is_staff or CourseStaffRole(obj.course_id).has_user(request.user) or request.method in permissions.SAFE_METHODS)
agpl-3.0
ehsan/airmozilla
airmozilla/suggest/tests/test_helpers.py
12
2414
from nose.tools import eq_ from django.test import TestCase from django.contrib.auth.models import User from django.utils import timezone from funfactory.urlresolvers import reverse from airmozilla.suggest.helpers import ( next_url, state_description, truncate_url ) from airmozilla.main.models import SuggestedEvent, Event, Location class TestStateHelpers(TestCase): def setUp(self): self.user = User.objects.create_user('nigel', 'n@live.in', 'secret') def test_get_getting_state(self): event = SuggestedEvent.objects.create( user=self.user, title='Cheese!', slug='cheese' ) url = next_url(event) eq_(url, reverse('suggest:description', args=(event.pk,))) description = state_description(event) eq_(description, 'Description not entered') event.description = 'Some description' event.save() url = next_url(event) eq_(url, reverse('suggest:details', args=(event.pk,))) description = state_description(event) eq_(description, 'Details missing') event.start_time = timezone.now() event.location = Location.objects.create( name='Mountain View', timezone='US/Pacific', ) event.privacy = Event.PRIVACY_PUBLIC event.save() url = next_url(event) eq_(url, reverse('suggest:placeholder', args=(event.pk,))) description = state_description(event) eq_(description, 'No image') event.placeholder_img = 'some/path.png' event.save() url = next_url(event) eq_(url, reverse('suggest:summary', args=(event.pk,))) description = state_description(event) eq_(description, 'Not yet submitted') event.submitted = timezone.now() event.save() url = next_url(event) eq_(url, reverse('suggest:summary', args=(event.pk,))) description = state_description(event) eq_(description, 'Submitted') class TestTruncateURL(TestCase): def test_truncate_short(self): url = 'http://www.peterbe.com' result = truncate_url(url, 30) eq_(result, url) assert len(result) <= 30 def test_truncate_long(self): url = 'http://www.peterbe.com' result = truncate_url(url, 20) expect = url[:10] + u'\u2026' + url[-10:] eq_(result, expect)
bsd-3-clause
arborh/tensorflow
tensorflow/python/ops/gradient_checker_test.py
9
11694
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for tf.compat.v1.test.compute_gradient and tf.compute_gradient_error.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.framework import test_util from tensorflow.python.ops import array_ops from tensorflow.python.ops import gradient_checker from tensorflow.python.ops import math_ops from tensorflow.python.ops import nn_ops import tensorflow.python.ops.nn_grad # pylint: disable=unused-import from tensorflow.python.platform import test from tensorflow.python.platform import tf_logging @ops.RegisterGradient("BadGrad") def _bad_grad(unused_op, grad): """A gradient that returns the wrong shape.""" return array_ops.transpose(grad) @ops.RegisterGradient("NaNGrad") def _nan_grad(unused_op, grad): """A gradient that returns NaN.""" return np.nan * grad class GradientCheckerTest(test.TestCase): @test_util.run_deprecated_v1 def testAddSimple(self): np.random.seed(1) # Fix seed to avoid flakiness with self.session(use_gpu=False): # a test case for Add operation size = (2, 3) x1 = constant_op.constant(2.0, shape=size, name="x1") x2 = constant_op.constant(3.0, shape=size, name="x2") y = math_ops.add(x1, x2, name="y") # checking gradients for x1 error = gradient_checker.compute_gradient_error(x1, size, y, size) tf_logging.info("x1 error = %f", error) self.assertLess(error, 1e-4) @test_util.run_deprecated_v1 def testAddSimpleGPU(self): np.random.seed(2) # Fix seed to avoid flakiness with self.session(use_gpu=True): # a test case for Add operation size = (2, 3) x1 = constant_op.constant(2.0, shape=size, name="x1") x2 = constant_op.constant(3.0, shape=size, name="x2") y = math_ops.add(x1, x2, name="y") # checking gradients for x1 error = gradient_checker.compute_gradient_error(x1, size, y, size) tf_logging.info("x1 error = %f", error) self.assertLess(error, 1e-4) @test_util.run_deprecated_v1 def testAddCustomized(self): np.random.seed(3) # Fix seed to avoid flakiness with self.cached_session(): # a test case for Add operation size = (2, 3) x1 = constant_op.constant( 2.0, shape=size, dtype=dtypes.float64, name="x1") x2 = constant_op.constant( 3.0, shape=size, dtype=dtypes.float64, name="x2") y = math_ops.add(x1, x2, name="y") # checkint gradients for x2 using a special init_value and delta x_init_value = np.asarray(np.arange(6, dtype=np.float64).reshape(2, 3)) error = gradient_checker.compute_gradient_error( x2, size, y, size, x_init_value=x_init_value, delta=1e-2) tf_logging.info("x2 error = %f", error) self.assertLess(error, 1e-10) @test_util.run_deprecated_v1 def testGather(self): np.random.seed(4) # Fix seed to avoid flakiness with self.cached_session(): p_shape = (4, 2) p_size = 8 index_values = [1, 3] y_shape = [2, 2] params = constant_op.constant( np.arange(p_size).astype(np.float), shape=p_shape, name="p") indices = constant_op.constant(index_values, name="i") y = array_ops.gather(params, indices, name="y") error = gradient_checker.compute_gradient_error(params, p_shape, y, y_shape) tf_logging.info("gather error = %f", error) self.assertLess(error, 1e-4) @test_util.run_deprecated_v1 def testNestedGather(self): np.random.seed(5) # Fix seed to avoid flakiness with self.cached_session(): p_shape = (8, 2) p_size = 16 index_values = [1, 3, 5, 6] index_values2 = [0, 2] y2_shape = [2, 2] params = constant_op.constant( np.arange(p_size).astype(np.float), shape=p_shape, name="p") indices = constant_op.constant(index_values, name="i") y = array_ops.gather(params, indices, name="y") indices2 = constant_op.constant(index_values2, name="i2") y2 = array_ops.gather(y, indices2, name="y2") error = gradient_checker.compute_gradient_error(params, p_shape, y2, y2_shape) tf_logging.info("nested gather error = %f", error) self.assertLess(error, 1e-4) @test_util.run_deprecated_v1 def testComplexMul(self): with self.cached_session(): size = () c = constant_op.constant(5 + 7j, dtype=dtypes.complex64) x = constant_op.constant(11 - 13j, dtype=dtypes.complex64) y = c * x analytical, numerical = gradient_checker.compute_gradient(x, size, y, size) correct = np.array([[5, 7], [-7, 5]]) self.assertAllEqual(correct, analytical) self.assertAllClose(correct, numerical, rtol=1e-4) self.assertLess( gradient_checker.compute_gradient_error(x, size, y, size), 2e-4) @test_util.run_deprecated_v1 def testComplexConj(self): with self.cached_session(): size = () x = constant_op.constant(11 - 13j, dtype=dtypes.complex64) y = math_ops.conj(x) analytical, numerical = gradient_checker.compute_gradient(x, size, y, size) correct = np.array([[1, 0], [0, -1]]) self.assertAllEqual(correct, analytical) self.assertAllClose(correct, numerical, rtol=2e-5) self.assertLess( gradient_checker.compute_gradient_error(x, size, y, size), 2e-5) @test_util.run_deprecated_v1 def testEmptySucceeds(self): with self.cached_session(): x = array_ops.placeholder(dtypes.float32) y = array_ops.identity(x) for grad in gradient_checker.compute_gradient(x, (0, 3), y, (0, 3)): self.assertEqual(grad.shape, (0, 0)) error = gradient_checker.compute_gradient_error(x, (0, 3), y, (0, 3)) self.assertEqual(error, 0) def testEmptyFails(self): with ops.Graph().as_default() as g: with self.session(graph=g): x = array_ops.placeholder(dtypes.float32) with g.gradient_override_map({"Identity": "BadGrad"}): y = array_ops.identity(x) bad = r"Empty gradient has wrong shape: expected \(0, 3\), got \(3, 0\)" with self.assertRaisesRegexp(ValueError, bad): gradient_checker.compute_gradient(x, (0, 3), y, (0, 3)) with self.assertRaisesRegexp(ValueError, bad): gradient_checker.compute_gradient_error(x, (0, 3), y, (0, 3)) def testNaNGradFails(self): with ops.Graph().as_default() as g: with self.session(graph=g): x = array_ops.placeholder(dtypes.float32) with g.gradient_override_map({"Identity": "NaNGrad"}): y = array_ops.identity(x) error = gradient_checker.compute_gradient_error(x, (), y, ()) # Typical test would assert error < max_err, so assert this test would # raise AssertionError, since NaN is not < 1.0. with self.assertRaisesRegexp(AssertionError, "False is not true"): self.assertTrue(error < 1.0) class MiniMNISTTest(test.TestCase): # Gradient checker for MNIST. def _BuildAndTestMiniMNIST(self, param_index, tag): # Fix seed to avoid occasional flakiness np.random.seed(6) # Hyperparameters batch = 3 inputs = 16 features = 32 classes = 10 # Define the parameters inp_data = np.random.random_sample(inputs * batch) hidden_weight_data = np.random.randn(inputs * features) / np.sqrt(inputs) hidden_bias_data = np.random.random_sample(features) sm_weight_data = np.random.randn(features * classes) / np.sqrt(features) sm_bias_data = np.random.random_sample(classes) # special care for labels since they need to be normalized per batch label_data = np.random.random(batch * classes).reshape((batch, classes)) s = label_data.sum(axis=1) label_data /= s[:, None] with self.session(use_gpu=True): # We treat the inputs as "parameters" here inp = constant_op.constant( inp_data.tolist(), shape=[batch, inputs], dtype=dtypes.float64, name="inp") hidden_weight = constant_op.constant( hidden_weight_data.tolist(), shape=[inputs, features], dtype=dtypes.float64, name="hidden_weight") hidden_bias = constant_op.constant( hidden_bias_data.tolist(), shape=[features], dtype=dtypes.float64, name="hidden_bias") softmax_weight = constant_op.constant( sm_weight_data.tolist(), shape=[features, classes], dtype=dtypes.float64, name="softmax_weight") softmax_bias = constant_op.constant( sm_bias_data.tolist(), shape=[classes], dtype=dtypes.float64, name="softmax_bias") # List all the parameter so that we can test them one at a time all_params = [ inp, hidden_weight, hidden_bias, softmax_weight, softmax_bias ] param_sizes = [ [batch, inputs], # inp [inputs, features], # hidden_weight, [features], # hidden_bias [features, classes], # softmax_weight, [classes] ] # softmax_bias # Now, Building MNIST features = nn_ops.relu( nn_ops.xw_plus_b(inp, hidden_weight, hidden_bias), name="features") logits = nn_ops.xw_plus_b( features, softmax_weight, softmax_bias, name="logits") labels = constant_op.constant( label_data.tolist(), shape=[batch, classes], dtype=dtypes.float64, name="labels") cost = nn_ops.softmax_cross_entropy_with_logits( labels=labels, logits=logits, name="cost") # Test the gradients. err = gradient_checker.compute_gradient_error( all_params[param_index], param_sizes[param_index], cost, [batch], delta=1e-5) tf_logging.info("Mini MNIST: %s gradient error = %g", tag, err) return err @test_util.run_deprecated_v1 def testInputGradient(self): self.assertLess(self._BuildAndTestMiniMNIST(0, "input"), 1e-8) @test_util.run_deprecated_v1 def testHiddenWeightGradient(self): self.assertLess(self._BuildAndTestMiniMNIST(1, "hidden_weight"), 1e-8) @test_util.run_deprecated_v1 def testHiddenBiasGradient(self): self.assertLess(self._BuildAndTestMiniMNIST(2, "hidden_bias"), 1e-8) @test_util.run_deprecated_v1 def testSoftmaxWeightGradient(self): self.assertLess(self._BuildAndTestMiniMNIST(3, "softmax_weight"), 1e-8) @test_util.run_deprecated_v1 def testSoftmaxBiasGradient(self): self.assertLess(self._BuildAndTestMiniMNIST(4, "softmax_bias"), 1e-8) if __name__ == "__main__": test.main()
apache-2.0
dawangjiaowolaixunshan/runtime
deps/v8/test/test262/testcfg.py
5
5659
# Copyright 2012 the V8 project authors. All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import hashlib import os import shutil import sys import tarfile from testrunner.local import testsuite from testrunner.local import utils from testrunner.objects import testcase TEST_262_ARCHIVE_REVISION = "fbba29f" # This is the r365 revision. TEST_262_ARCHIVE_MD5 = "e1ff0db438cc12de8fb6da80621b4ef6" TEST_262_URL = "https://github.com/tc39/test262/tarball/%s" TEST_262_HARNESS = ["sta.js", "testBuiltInObject.js", "testIntl.js"] class Test262TestSuite(testsuite.TestSuite): def __init__(self, name, root): super(Test262TestSuite, self).__init__(name, root) self.testroot = os.path.join(root, "data", "test", "suite") self.harness = [os.path.join(self.root, "data", "test", "harness", f) for f in TEST_262_HARNESS] self.harness += [os.path.join(self.root, "harness-adapt.js")] def CommonTestName(self, testcase): return testcase.path.split(os.path.sep)[-1] def ListTests(self, context): tests = [] for dirname, dirs, files in os.walk(self.testroot): for dotted in [x for x in dirs if x.startswith(".")]: dirs.remove(dotted) if context.noi18n and "intl402" in dirs: dirs.remove("intl402") dirs.sort() files.sort() for filename in files: if filename.endswith(".js"): testname = os.path.join(dirname[len(self.testroot) + 1:], filename[:-3]) case = testcase.TestCase(self, testname) tests.append(case) return tests def GetFlagsForTestCase(self, testcase, context): return (testcase.flags + context.mode_flags + self.harness + [os.path.join(self.testroot, testcase.path + ".js")]) def GetSourceForTest(self, testcase): filename = os.path.join(self.testroot, testcase.path + ".js") with open(filename) as f: return f.read() def IsNegativeTest(self, testcase): return "@negative" in self.GetSourceForTest(testcase) def IsFailureOutput(self, output, testpath): if output.exit_code != 0: return True return "FAILED!" in output.stdout def DownloadData(self): revision = TEST_262_ARCHIVE_REVISION archive_url = TEST_262_URL % revision archive_name = os.path.join(self.root, "tc39-test262-%s.tar.gz" % revision) directory_name = os.path.join(self.root, "data") directory_old_name = os.path.join(self.root, "data.old") # Clobber if the test is in an outdated state, i.e. if there are any other # archive files present. archive_files = [f for f in os.listdir(self.root) if f.startswith("tc39-test262-")] if (len(archive_files) > 1 or os.path.basename(archive_name) not in archive_files): print "Clobber outdated test archives ..." for f in archive_files: os.remove(os.path.join(self.root, f)) if not os.path.exists(archive_name): print "Downloading test data from %s ..." % archive_url utils.URLRetrieve(archive_url, archive_name) if os.path.exists(directory_name): if os.path.exists(directory_old_name): shutil.rmtree(directory_old_name) os.rename(directory_name, directory_old_name) if not os.path.exists(directory_name): print "Extracting test262-%s.tar.gz ..." % revision md5 = hashlib.md5() with open(archive_name, "rb") as f: for chunk in iter(lambda: f.read(8192), ""): md5.update(chunk) print "MD5 hash is %s" % md5.hexdigest() if md5.hexdigest() != TEST_262_ARCHIVE_MD5: os.remove(archive_name) print "MD5 expected %s" % TEST_262_ARCHIVE_MD5 raise Exception("Hash mismatch of test data file") archive = tarfile.open(archive_name, "r:gz") if sys.platform in ("win32", "cygwin"): # Magic incantation to allow longer path names on Windows. archive.extractall(u"\\\\?\\%s" % self.root) else: archive.extractall(self.root) os.rename(os.path.join(self.root, "tc39-test262-%s" % revision), directory_name) def GetSuite(name, root): return Test262TestSuite(name, root)
apache-2.0
wunderlins/learning
python/zodb/lib/linux64/ZODB/tests/testDemoStorage.py
2
8297
############################################################################## # # Copyright (c) 2001, 2002 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## from ZODB.DB import DB from ZODB.tests import ( BasicStorage, HistoryStorage, IteratorStorage, MTStorage, PackableStorage, RevisionStorage, StorageTestBase, Synchronization, ) import os if os.environ.get('USE_ZOPE_TESTING_DOCTEST'): from zope.testing import doctest else: import doctest import random import re import transaction import unittest import ZODB.DemoStorage import ZODB.tests.hexstorage import ZODB.tests.util import ZODB.utils from zope.testing import renormalizing class DemoStorageTests( StorageTestBase.StorageTestBase, BasicStorage.BasicStorage, HistoryStorage.HistoryStorage, IteratorStorage.ExtendedIteratorStorage, IteratorStorage.IteratorStorage, MTStorage.MTStorage, PackableStorage.PackableStorage, RevisionStorage.RevisionStorage, Synchronization.SynchronizedStorage, ): def setUp(self): StorageTestBase.StorageTestBase.setUp(self) self._storage = ZODB.DemoStorage.DemoStorage() def checkOversizeNote(self): # This base class test checks for the common case where a storage # doesnt support huge transaction metadata. This storage doesnt # have this limit, so we inhibit this test here. pass def checkLoadDelegation(self): # Minimal test of loadEX w/o version -- ironically db = DB(self._storage) # creates object 0. :) s2 = ZODB.DemoStorage.DemoStorage(base=self._storage) self.assertEqual(s2.load(ZODB.utils.z64, ''), self._storage.load(ZODB.utils.z64, '')) def checkLengthAndBool(self): self.assertEqual(len(self._storage), 0) self.assertTrue(not self._storage) db = DB(self._storage) # creates object 0. :) self.assertEqual(len(self._storage), 1) self.assertTrue(self._storage) conn = db.open() for i in range(10): conn.root()[i] = conn.root().__class__() transaction.commit() self.assertEqual(len(self._storage), 11) self.assertTrue(self._storage) def checkLoadBeforeUndo(self): pass # we don't support undo yet checkUndoZombie = checkLoadBeforeUndo class DemoStorageHexTests(DemoStorageTests): def setUp(self): StorageTestBase.StorageTestBase.setUp(self) self._storage = ZODB.tests.hexstorage.HexStorage( ZODB.DemoStorage.DemoStorage()) class DemoStorageWrappedBase(DemoStorageTests): def setUp(self): StorageTestBase.StorageTestBase.setUp(self) self._base = self._makeBaseStorage() self._storage = ZODB.DemoStorage.DemoStorage(base=self._base) def tearDown(self): self._base.close() StorageTestBase.StorageTestBase.tearDown(self) def _makeBaseStorage(self): raise NotImplementedError def checkPackOnlyOneObject(self): pass # Wrapping demo storages don't do gc def checkPackWithMultiDatabaseReferences(self): pass # we never do gc checkPackAllRevisions = checkPackWithMultiDatabaseReferences class DemoStorageWrappedAroundMappingStorage(DemoStorageWrappedBase): def _makeBaseStorage(self): from ZODB.MappingStorage import MappingStorage return MappingStorage() class DemoStorageWrappedAroundFileStorage(DemoStorageWrappedBase): def _makeBaseStorage(self): from ZODB.FileStorage import FileStorage return FileStorage('FileStorageTests.fs') class DemoStorageWrappedAroundHexMappingStorage(DemoStorageWrappedBase): def _makeBaseStorage(self): from ZODB.MappingStorage import MappingStorage return ZODB.tests.hexstorage.HexStorage(MappingStorage()) def setUp(test): random.seed(0) ZODB.tests.util.setUp(test) def testSomeDelegation(): r""" >>> import six >>> class S: ... def __init__(self, name): ... self.name = name ... def registerDB(self, db): ... six.print_(self.name, db) ... def close(self): ... six.print_(self.name, 'closed') ... sortKey = getSize = __len__ = history = getTid = None ... tpc_finish = tpc_vote = tpc_transaction = None ... _lock_acquire = _lock_release = lambda self: None ... getName = lambda self: 'S' ... isReadOnly = tpc_transaction = None ... supportsUndo = undo = undoLog = undoInfo = None ... supportsTransactionalUndo = None ... def new_oid(self): ... return '\0' * 8 ... def tpc_begin(self, t, tid, status): ... six.print_('begin', tid, status) ... def tpc_abort(self, t): ... pass >>> from ZODB.DemoStorage import DemoStorage >>> storage = DemoStorage(base=S(1), changes=S(2)) >>> storage.registerDB(1) 2 1 >>> storage.close() 1 closed 2 closed >>> storage.tpc_begin(1, 2, 3) begin 2 3 >>> storage.tpc_abort(1) """ def blob_pos_key_error_with_non_blob_base(): """ >>> storage = ZODB.DemoStorage.DemoStorage() >>> storage.loadBlob(ZODB.utils.p64(1), ZODB.utils.p64(1)) Traceback (most recent call last): ... POSKeyError: 0x01 >>> storage.openCommittedBlobFile(ZODB.utils.p64(1), ZODB.utils.p64(1)) Traceback (most recent call last): ... POSKeyError: 0x01 """ def load_before_base_storage_current(): """ Here we'll exercise that DemoStorage's loadBefore method works properly when deferring to a record that is current in the base storage. >>> import time >>> import transaction >>> import ZODB.DB >>> import ZODB.DemoStorage >>> import ZODB.MappingStorage >>> import ZODB.utils >>> base = ZODB.MappingStorage.MappingStorage() >>> basedb = ZODB.DB(base) >>> conn = basedb.open() >>> conn.root()['foo'] = 'bar' >>> transaction.commit() >>> conn.close() >>> storage = ZODB.DemoStorage.DemoStorage(base=base) >>> db = ZODB.DB(storage) >>> conn = db.open() >>> conn.root()['foo'] = 'baz' >>> time.sleep(.1) # Windows has a low-resolution clock >>> transaction.commit() >>> oid = ZODB.utils.z64 >>> base_current = storage.base.load(oid) >>> tid = ZODB.utils.p64(ZODB.utils.u64(base_current[1]) + 1) >>> base_record = storage.base.loadBefore(oid, tid) >>> base_record[-1] is None True >>> base_current == base_record[:2] True >>> t = storage.loadBefore(oid, tid) The data and tid are the values from the base storage, but the next tid is from changes. >>> t[:2] == base_record[:2] True >>> t[-1] == storage.changes.load(oid)[1] True >>> conn.close() >>> db.close() >>> base.close() """ def test_suite(): suite = unittest.TestSuite(( doctest.DocTestSuite( setUp=setUp, tearDown=ZODB.tests.util.tearDown, checker=ZODB.tests.util.checker ), doctest.DocFileSuite( '../DemoStorage.test', setUp=setUp, tearDown=ZODB.tests.util.tearDown, checker=ZODB.tests.util.checker, ), )) suite.addTest(unittest.makeSuite(DemoStorageTests, 'check')) suite.addTest(unittest.makeSuite(DemoStorageHexTests, 'check')) suite.addTest(unittest.makeSuite(DemoStorageWrappedAroundFileStorage, 'check')) suite.addTest(unittest.makeSuite(DemoStorageWrappedAroundMappingStorage, 'check')) suite.addTest(unittest.makeSuite(DemoStorageWrappedAroundHexMappingStorage, 'check')) return suite
gpl-2.0
bradhowes/pyslimp3
server/KeyProcessor.py
1
7084
# # Copyright (C) 2009, 2010 Brad Howes. # # This file is part of Pyslimp3. # # Pyslimp3 is free software; you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation; either version 3, or (at your option) any later version. # # Pyslimp3 is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # Pyslimp3; see the file COPYING. If not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, # USA. # from time import time import traceback # # Key Constants # kDigit0 = '0' kDigit1 = '1' kDigit2 = '2' kDigit3 = '3' kDigit4 = '4' kDigit5 = '5' kDigit6 = '6' kDigit7 = '7' kDigit8 = '8' kDigit9 = '9' kArrowUp = 'arrowUp' kArrowDown = 'arrowDown' kArrowLeft = 'arrowLeft' kArrowRight = 'arrowRight' kRewind = 'rewind' kFastForward = 'fastForward' kChannelUp = 'channelUp' kChannelDown = 'channelDown' kDisplay = 'display' kGuide = 'guide' kMenuHome = 'menuHome' kMute = 'mute' kOK = 'ok' kPause = 'pause' kPIP = 'pip' kPlay = 'play' kPower = 'power' kRecord = 'record' kRepeat = 'repeat' kSleep = 'sleep' kStop = 'stop' kVolumeDown = 'volumeDown' kVolumeUp = 'volumeUp' # # Key Event Modifiers # kModFirst = 'ModFirst' # Key seen for the first time kModHeld = 'ModHeld' # Key still down after kHoldPressThreshold kModRepeat = 'ModRepeat' # Key repeated kModRelease = 'ModRelease' # Key released kModReleaseHeld = 'ModReleaseHeld' # Held key released # # Create a keyCode made up of a key and a modifier # def makeKeyCode( key, mod ): return key + '.' + mod # # Create a list of keyCode values, one for each provided modifier # def makeKeyCodes( key, mods ): if mods is None: mods = ( kModFirst, ) elif type( mods ) not in ( list, tuple ): mods = ( mods, ) return map( lambda a: key + '.' + a, mods ) # # Converter of raw remote key events into keyCode values. The keyCode values # are then handed to a notifier object via its processKeyCode() method for # processing. A keyCode is a pairing of a key constant value (eg. kDigit0) and # an event modifier (eg. kModFirst). The event modifiers describe where in time # the key constant event took place: # # kModFirst - the initial sighting of the key value # kModRepeat - the key value is still held down # kModHeld - the key value is pressed after kHoldPressThreshold seconds # kModRelease - the key is no longer down # kModReleaseHeld - a key that was down for kHoldPressThreshold is up # # Note that only one 'release' event is issued, kModRelease or kModReleaseHeld, # depending on whether kModHeld was issued. # class KeyProcessor( object ): # # How often to check for a key release. This should be long enough to # eliminate false positives but short enough to stop repeated key events. # This is also how frequently a held-key will repeat an action. # # kReleaseCheckThreshold = 0.256 # seconds kReleaseCheckThreshold = 0.100 # seconds # # Number of calls to checkForRelease() before a button reports being held # down. Since checkForRelease() runs every kReleaseCheckThreshold, a button # must be held down for kHoldPressThreshold * kReleaseCheckThreshold # seconds before it begins repeating. # kHoldPressCount = 4 # # Amount of time that must pass so that an incoming key message with the # same key code as the last message is treated as a separate key press, and # not a held key event. # kUniqueKeyPressTimeDelta = 0.800 def __init__( self, timerManager, notifier ): self.timerManager = timerManager self.notifier = notifier self.releaseTimer = None self.lastTimeStamp = 0 self.lastKey = None self.reset() # # Reset the key processor to a known state. If there is an active # releaseTimer, we set the silenced flag but leave everything else alone, # and let the checkForRelease() method clean up for us. This is done so # that screen changes caused by remote commands won't inherit key release # or repeat events. # def reset( self ): if self.releaseTimer: self.releaseTimer.fire() self.releaseTimer = None self.emittedHeldKey = False self.downCount = 0 # # Process a new raw key event from a remote controller. Depending on what # has taken place in the recent past, it may invoke the notify() one or # more times. # def process( self, timeStamp, key ): timeStamp = timeStamp / 100000.0 delta = timeStamp - self.lastTimeStamp self.lastTimeStamp = timeStamp # # Same key but only if no gap in updates # if key == self.lastKey and delta < self.kUniqueKeyPressTimeDelta: self.pendingRelease = False return # # If we have an active releaseTimer, manually fire it. Make sure # that it will think that the previous key was released. # self.pendingRelease = True self.reset() # # New key press. # self.lastKey = key self.downCount = 0 self.startReleaseTimer() self.notify( kModFirst ) # # Start a timer that will invoke checkForRelease() after # kReleaseCheckThreshold seconds. # def startReleaseTimer( self ): self.pendingRelease = True if self.releaseTimer: self.releaseTimer.deactivate() self.releaseTimer = self.timerManager.addTimer( self.kReleaseCheckThreshold, self.checkForRelease ) # # Check if a release event has occured and if so notify the notifier. # def checkForRelease( self ): # # If an event was received since we last checked, try again. # if not self.pendingRelease: # # Keep track of how many times we've been called for this key # event, and emit 'held' and repeat messages when enough time has # passed. # downCount = self.downCount + 1 self.downCount = downCount if downCount == self.kHoldPressCount: self.notify( kModHeld ) elif downCount > self.kHoldPressCount: self.notify( kModRepeat ) self.startReleaseTimer() return # # Key has been releasedn # if self.emittedHeldKey: self.notify( kModReleaseHeld ) else: self.notify( kModRelease ) self.reset() # # Notify the notifier object that a new keyCode event has taken place. # def notify( self, modifier ): keyCode = makeKeyCode( self.lastKey, modifier ) self.notifier.processKeyCode( keyCode )
gpl-3.0
mo-g/iris
lib/iris/tests/test_mapping.py
11
8113
# (C) British Crown Copyright 2010 - 2015, Met Office # # This file is part of Iris. # # Iris is free software: you can redistribute it and/or modify it under # the terms of the GNU Lesser General Public License as published by the # Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Iris is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with Iris. If not, see <http://www.gnu.org/licenses/>. """ Tests map creation. """ from __future__ import (absolute_import, division, print_function) from six.moves import (filter, input, map, range, zip) # noqa # import iris tests first so that some things can be initialised before # importing anything else import iris.tests as tests import numpy as np import numpy.testing as np_testing import cartopy.crs as ccrs import iris import iris.coord_systems import iris.cube import iris.tests.stock # Run tests in no graphics mode if matplotlib is not available. if tests.MPL_AVAILABLE: import matplotlib.pyplot as plt import iris.plot as iplt # A specific cartopy Globe matching the iris RotatedGeogCS default. _DEFAULT_GLOBE = ccrs.Globe(semimajor_axis=6371229.0, semiminor_axis=6371229.0, ellipse=None) @tests.skip_plot class TestBasic(tests.GraphicsTest): cube = iris.tests.stock.realistic_4d() def test_contourf(self): cube = self.cube[0, 0] iplt.contourf(cube) self.check_graphic() def test_pcolor(self): cube = self.cube[0, 0] iplt.pcolor(cube) self.check_graphic() def test_unmappable(self): cube = self.cube[0, 0] cube.coord('grid_longitude').standard_name = None iplt.contourf(cube) self.check_graphic() def test_default_projection_and_extent(self): self.assertEqual(iplt.default_projection(self.cube), ccrs.RotatedPole(357.5 - 180, 37.5, globe=_DEFAULT_GLOBE)) np_testing.assert_array_almost_equal( iplt.default_projection_extent(self.cube), (3.59579163e+02, 3.59669159e+02, -1.28250003e-01, -3.82499993e-02), decimal=3) @tests.skip_data @tests.skip_plot class TestUnmappable(tests.GraphicsTest): def setUp(self): src_cube = iris.tests.stock.global_pp() # Make a cube that can't be located on the globe. cube = iris.cube.Cube(src_cube.data) cube.add_dim_coord( iris.coords.DimCoord(np.arange(96, dtype=np.float32) * 100, long_name='x', units='m'), 1) cube.add_dim_coord( iris.coords.DimCoord(np.arange(73, dtype=np.float32) * 100, long_name='y', units='m'), 0) cube.standard_name = 'air_temperature' cube.units = 'K' cube.assert_valid() self.cube = cube def test_simple(self): iplt.contourf(self.cube) self.check_graphic() @tests.skip_data @tests.skip_plot class TestMappingSubRegion(tests.GraphicsTest): def setUp(self): cube_path = tests.get_data_path( ('PP', 'aPProt1', 'rotatedMHtimecube.pp')) cube = iris.load_cube(cube_path)[0] # make the data smaller to speed things up. self.cube = cube[::10, ::10] def test_simple(self): # First sub-plot plt.subplot(221) plt.title('Default') iplt.contourf(self.cube) plt.gca().coastlines() # Second sub-plot plt.subplot(222, projection=ccrs.Mollweide(central_longitude=120)) plt.title('Molleweide') iplt.contourf(self.cube) plt.gca().coastlines() # Third sub-plot (the projection part is redundant, but a useful # test none-the-less) ax = plt.subplot(223, projection=iplt.default_projection(self.cube)) plt.title('Native') iplt.contour(self.cube) ax.coastlines() # Fourth sub-plot ax = plt.subplot(2, 2, 4, projection=ccrs.PlateCarree()) plt.title('PlateCarree') iplt.contourf(self.cube) ax.coastlines() self.check_graphic() def test_default_projection_and_extent(self): self.assertEqual(iplt.default_projection(self.cube), ccrs.RotatedPole(357.5 - 180, 37.5, globe=_DEFAULT_GLOBE)) np_testing.assert_array_almost_equal( iplt.default_projection_extent(self.cube), (313.01998901, 391.11999512, -22.48999977, 24.80999947)) @tests.skip_data @tests.skip_plot class TestLowLevel(tests.GraphicsTest): def setUp(self): self.cube = iris.tests.stock.global_pp() self.few = 4 self.few_levels = list(range(280, 300, 5)) self.many_levels = np.linspace( self.cube.data.min(), self.cube.data.max(), 40) def test_simple(self): iplt.contour(self.cube) self.check_graphic() def test_params(self): iplt.contourf(self.cube, self.few) self.check_graphic() iplt.contourf(self.cube, self.few_levels) self.check_graphic() iplt.contourf(self.cube, self.many_levels) self.check_graphic() def test_keywords(self): iplt.contourf(self.cube, levels=self.few_levels) self.check_graphic() iplt.contourf(self.cube, levels=self.many_levels, alpha=0.5) self.check_graphic() @tests.skip_data @tests.skip_plot class TestBoundedCube(tests.GraphicsTest): def setUp(self): self.cube = iris.tests.stock.global_pp() # Add some bounds to this data (this will actually make the bounds # invalid as they will straddle the north pole and overlap on the # dateline, but that doesn't matter for this test.) self.cube.coord('latitude').guess_bounds() self.cube.coord('longitude').guess_bounds() def test_pcolormesh(self): # pcolormesh can only be drawn in native coordinates (or more # specifically, in coordinates that don't wrap). plt.axes(projection=ccrs.PlateCarree(central_longitude=180)) iplt.pcolormesh(self.cube) self.check_graphic() def test_grid(self): iplt.outline(self.cube) self.check_graphic() def test_default_projection_and_extent(self): self.assertEqual(iplt.default_projection(self.cube), ccrs.PlateCarree()) np_testing.assert_array_almost_equal( iplt.default_projection_extent(self.cube), [0., 360., -89.99995422, 89.99998474]) np_testing.assert_array_almost_equal( iplt.default_projection_extent( self.cube, mode=iris.coords.BOUND_MODE), [-1.875046, 358.124954, -91.24995422, 91.24998474]) @tests.skip_data @tests.skip_plot class TestLimitedAreaCube(tests.GraphicsTest): def setUp(self): cube_path = tests.get_data_path(('PP', 'aPProt1', 'rotated.pp')) self.cube = iris.load_cube(cube_path)[::20, ::20] self.cube.coord('grid_latitude').guess_bounds() self.cube.coord('grid_longitude').guess_bounds() def test_pcolormesh(self): iplt.pcolormesh(self.cube) self.check_graphic() def test_grid(self): iplt.pcolormesh(self.cube, facecolors='none', edgecolors='blue') # the result is a graphic which has coloured edges. This is a mpl bug, # see https://github.com/matplotlib/matplotlib/issues/1302 self.check_graphic() def test_outline(self): iplt.outline(self.cube) self.check_graphic() def test_scatter(self): iplt.points(self.cube) plt.gca().coastlines() self.check_graphic() if __name__ == "__main__": tests.main()
gpl-3.0
ApuliaSoftware/odoo
addons/account/report/account_invoice_report.py
224
12489
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import tools import openerp.addons.decimal_precision as dp from openerp.osv import fields,osv class account_invoice_report(osv.osv): _name = "account.invoice.report" _description = "Invoices Statistics" _auto = False _rec_name = 'date' def _compute_amounts_in_user_currency(self, cr, uid, ids, field_names, args, context=None): """Compute the amounts in the currency of the user """ if context is None: context={} currency_obj = self.pool.get('res.currency') currency_rate_obj = self.pool.get('res.currency.rate') user = self.pool.get('res.users').browse(cr, uid, uid, context=context) user_currency_id = user.company_id.currency_id.id currency_rate_id = currency_rate_obj.search( cr, uid, [ ('rate', '=', 1), '|', ('currency_id.company_id', '=', user.company_id.id), ('currency_id.company_id', '=', False) ], limit=1, context=context)[0] base_currency_id = currency_rate_obj.browse(cr, uid, currency_rate_id, context=context).currency_id.id res = {} ctx = context.copy() for item in self.browse(cr, uid, ids, context=context): ctx['date'] = item.date price_total = currency_obj.compute(cr, uid, base_currency_id, user_currency_id, item.price_total, context=ctx) price_average = currency_obj.compute(cr, uid, base_currency_id, user_currency_id, item.price_average, context=ctx) residual = currency_obj.compute(cr, uid, base_currency_id, user_currency_id, item.residual, context=ctx) res[item.id] = { 'user_currency_price_total': price_total, 'user_currency_price_average': price_average, 'user_currency_residual': residual, } return res _columns = { 'date': fields.date('Date', readonly=True), 'product_id': fields.many2one('product.product', 'Product', readonly=True), 'product_qty':fields.float('Product Quantity', readonly=True), 'uom_name': fields.char('Reference Unit of Measure', size=128, readonly=True), 'payment_term': fields.many2one('account.payment.term', 'Payment Term', readonly=True), 'period_id': fields.many2one('account.period', 'Force Period', domain=[('state','<>','done')], readonly=True), 'fiscal_position': fields.many2one('account.fiscal.position', 'Fiscal Position', readonly=True), 'currency_id': fields.many2one('res.currency', 'Currency', readonly=True), 'categ_id': fields.many2one('product.category','Category of Product', readonly=True), 'journal_id': fields.many2one('account.journal', 'Journal', readonly=True), 'partner_id': fields.many2one('res.partner', 'Partner', readonly=True), 'commercial_partner_id': fields.many2one('res.partner', 'Partner Company', help="Commercial Entity"), 'company_id': fields.many2one('res.company', 'Company', readonly=True), 'user_id': fields.many2one('res.users', 'Salesperson', readonly=True), 'price_total': fields.float('Total Without Tax', readonly=True), 'user_currency_price_total': fields.function(_compute_amounts_in_user_currency, string="Total Without Tax", type='float', digits_compute=dp.get_precision('Account'), multi="_compute_amounts"), 'price_average': fields.float('Average Price', readonly=True, group_operator="avg"), 'user_currency_price_average': fields.function(_compute_amounts_in_user_currency, string="Average Price", type='float', digits_compute=dp.get_precision('Account'), multi="_compute_amounts"), 'currency_rate': fields.float('Currency Rate', readonly=True), 'nbr': fields.integer('# of Invoices', readonly=True), # TDE FIXME master: rename into nbr_lines 'type': fields.selection([ ('out_invoice','Customer Invoice'), ('in_invoice','Supplier Invoice'), ('out_refund','Customer Refund'), ('in_refund','Supplier Refund'), ],'Type', readonly=True), 'state': fields.selection([ ('draft','Draft'), ('proforma','Pro-forma'), ('proforma2','Pro-forma'), ('open','Open'), ('paid','Done'), ('cancel','Cancelled') ], 'Invoice Status', readonly=True), 'date_due': fields.date('Due Date', readonly=True), 'account_id': fields.many2one('account.account', 'Account',readonly=True), 'account_line_id': fields.many2one('account.account', 'Account Line',readonly=True), 'partner_bank_id': fields.many2one('res.partner.bank', 'Bank Account',readonly=True), 'residual': fields.float('Total Residual', readonly=True), 'user_currency_residual': fields.function(_compute_amounts_in_user_currency, string="Total Residual", type='float', digits_compute=dp.get_precision('Account'), multi="_compute_amounts"), 'country_id': fields.many2one('res.country', 'Country of the Partner Company'), } _order = 'date desc' _depends = { 'account.invoice': [ 'account_id', 'amount_total', 'commercial_partner_id', 'company_id', 'currency_id', 'date_due', 'date_invoice', 'fiscal_position', 'journal_id', 'partner_bank_id', 'partner_id', 'payment_term', 'period_id', 'residual', 'state', 'type', 'user_id', ], 'account.invoice.line': [ 'account_id', 'invoice_id', 'price_subtotal', 'product_id', 'quantity', 'uos_id', ], 'product.product': ['product_tmpl_id'], 'product.template': ['categ_id'], 'product.uom': ['category_id', 'factor', 'name', 'uom_type'], 'res.currency.rate': ['currency_id', 'name'], 'res.partner': ['country_id'], } def _select(self): select_str = """ SELECT sub.id, sub.date, sub.product_id, sub.partner_id, sub.country_id, sub.payment_term, sub.period_id, sub.uom_name, sub.currency_id, sub.journal_id, sub.fiscal_position, sub.user_id, sub.company_id, sub.nbr, sub.type, sub.state, sub.categ_id, sub.date_due, sub.account_id, sub.account_line_id, sub.partner_bank_id, sub.product_qty, sub.price_total / cr.rate as price_total, sub.price_average /cr.rate as price_average, cr.rate as currency_rate, sub.residual / cr.rate as residual, sub.commercial_partner_id as commercial_partner_id """ return select_str def _sub_select(self): select_str = """ SELECT min(ail.id) AS id, ai.date_invoice AS date, ail.product_id, ai.partner_id, ai.payment_term, ai.period_id, u2.name AS uom_name, ai.currency_id, ai.journal_id, ai.fiscal_position, ai.user_id, ai.company_id, count(ail.*) AS nbr, ai.type, ai.state, pt.categ_id, ai.date_due, ai.account_id, ail.account_id AS account_line_id, ai.partner_bank_id, SUM(CASE WHEN ai.type::text = ANY (ARRAY['out_refund'::character varying::text, 'in_invoice'::character varying::text]) THEN (- ail.quantity) / u.factor * u2.factor ELSE ail.quantity / u.factor * u2.factor END) AS product_qty, SUM(CASE WHEN ai.type::text = ANY (ARRAY['out_refund'::character varying::text, 'in_invoice'::character varying::text]) THEN - ail.price_subtotal ELSE ail.price_subtotal END) AS price_total, CASE WHEN ai.type::text = ANY (ARRAY['out_refund'::character varying::text, 'in_invoice'::character varying::text]) THEN SUM(- ail.price_subtotal) ELSE SUM(ail.price_subtotal) END / CASE WHEN SUM(ail.quantity / u.factor * u2.factor) <> 0::numeric THEN CASE WHEN ai.type::text = ANY (ARRAY['out_refund'::character varying::text, 'in_invoice'::character varying::text]) THEN SUM((- ail.quantity) / u.factor * u2.factor) ELSE SUM(ail.quantity / u.factor * u2.factor) END ELSE 1::numeric END AS price_average, CASE WHEN ai.type::text = ANY (ARRAY['out_refund'::character varying::text, 'in_invoice'::character varying::text]) THEN - ai.residual ELSE ai.residual END / (SELECT count(*) FROM account_invoice_line l where invoice_id = ai.id) * count(*) AS residual, ai.commercial_partner_id as commercial_partner_id, partner.country_id """ return select_str def _from(self): from_str = """ FROM account_invoice_line ail JOIN account_invoice ai ON ai.id = ail.invoice_id JOIN res_partner partner ON ai.commercial_partner_id = partner.id LEFT JOIN product_product pr ON pr.id = ail.product_id left JOIN product_template pt ON pt.id = pr.product_tmpl_id LEFT JOIN product_uom u ON u.id = ail.uos_id LEFT JOIN product_uom u2 ON u2.id = pt.uom_id """ return from_str def _group_by(self): group_by_str = """ GROUP BY ail.product_id, ai.date_invoice, ai.id, ai.partner_id, ai.payment_term, ai.period_id, u2.name, u2.id, ai.currency_id, ai.journal_id, ai.fiscal_position, ai.user_id, ai.company_id, ai.type, ai.state, pt.categ_id, ai.date_due, ai.account_id, ail.account_id, ai.partner_bank_id, ai.residual, ai.amount_total, ai.commercial_partner_id, partner.country_id """ return group_by_str def init(self, cr): # self._table = account_invoice_report tools.drop_view_if_exists(cr, self._table) cr.execute("""CREATE or REPLACE VIEW %s as ( WITH currency_rate (currency_id, rate, date_start, date_end) AS ( SELECT r.currency_id, r.rate, r.name AS date_start, (SELECT name FROM res_currency_rate r2 WHERE r2.name > r.name AND r2.currency_id = r.currency_id ORDER BY r2.name ASC LIMIT 1) AS date_end FROM res_currency_rate r ) %s FROM ( %s %s %s ) AS sub JOIN currency_rate cr ON (cr.currency_id = sub.currency_id AND cr.date_start <= COALESCE(sub.date, NOW()) AND (cr.date_end IS NULL OR cr.date_end > COALESCE(sub.date, NOW()))) )""" % ( self._table, self._select(), self._sub_select(), self._from(), self._group_by())) # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
pedja1/aNmap
dSploit/jni/nmap/zenmap/zenmapGUI/BugReport.py
4
12022
#!/usr/bin/env python # -*- coding: utf-8 -*- # ***********************IMPORTANT NMAP LICENSE TERMS************************ # * * # * The Nmap Security Scanner is (C) 1996-2013 Insecure.Com LLC. Nmap is * # * also a registered trademark of Insecure.Com LLC. This program is free * # * software; you may redistribute and/or modify it under the terms of the * # * GNU General Public License as published by the Free Software * # * Foundation; Version 2 ("GPL"), BUT ONLY WITH ALL OF THE CLARIFICATIONS * # * AND EXCEPTIONS DESCRIBED HEREIN. This guarantees your right to use, * # * modify, and redistribute this software under certain conditions. If * # * you wish to embed Nmap technology into proprietary software, we sell * # * alternative licenses (contact sales@nmap.com). Dozens of software * # * vendors already license Nmap technology such as host discovery, port * # * scanning, OS detection, version detection, and the Nmap Scripting * # * Engine. * # * * # * Note that the GPL places important restrictions on "derivative works", * # * yet it does not provide a detailed definition of that term. To avoid * # * misunderstandings, we interpret that term as broadly as copyright law * # * allows. For example, we consider an application to constitute a * # * derivative work for the purpose of this license if it does any of the * # * following with any software or content covered by this license * # * ("Covered Software"): * # * * # * o Integrates source code from Covered Software. * # * * # * o Reads or includes copyrighted data files, such as Nmap's nmap-os-db * # * or nmap-service-probes. * # * * # * o Is designed specifically to execute Covered Software and parse the * # * results (as opposed to typical shell or execution-menu apps, which will * # * execute anything you tell them to). * # * * # * o Includes Covered Software in a proprietary executable installer. The * # * installers produced by InstallShield are an example of this. Including * # * Nmap with other software in compressed or archival form does not * # * trigger this provision, provided appropriate open source decompression * # * or de-archiving software is widely available for no charge. For the * # * purposes of this license, an installer is considered to include Covered * # * Software even if it actually retrieves a copy of Covered Software from * # * another source during runtime (such as by downloading it from the * # * Internet). * # * * # * o Links (statically or dynamically) to a library which does any of the * # * above. * # * * # * o Executes a helper program, module, or script to do any of the above. * # * * # * This list is not exclusive, but is meant to clarify our interpretation * # * of derived works with some common examples. Other people may interpret * # * the plain GPL differently, so we consider this a special exception to * # * the GPL that we apply to Covered Software. Works which meet any of * # * these conditions must conform to all of the terms of this license, * # * particularly including the GPL Section 3 requirements of providing * # * source code and allowing free redistribution of the work as a whole. * # * * # * As another special exception to the GPL terms, Insecure.Com LLC grants * # * permission to link the code of this program with any version of the * # * OpenSSL library which is distributed under a license identical to that * # * listed in the included docs/licenses/OpenSSL.txt file, and distribute * # * linked combinations including the two. * # * * # * Any redistribution of Covered Software, including any derived works, * # * must obey and carry forward all of the terms of this license, including * # * obeying all GPL rules and restrictions. For example, source code of * # * the whole work must be provided and free redistribution must be * # * allowed. All GPL references to "this License", are to be treated as * # * including the terms and conditions of this license text as well. * # * * # * Because this license imposes special exceptions to the GPL, Covered * # * Work may not be combined (even as part of a larger work) with plain GPL * # * software. The terms, conditions, and exceptions of this license must * # * be included as well. This license is incompatible with some other open * # * source licenses as well. In some cases we can relicense portions of * # * Nmap or grant special permissions to use it in other open source * # * software. Please contact fyodor@nmap.org with any such requests. * # * Similarly, we don't incorporate incompatible open source software into * # * Covered Software without special permission from the copyright holders. * # * * # * If you have any questions about the licensing restrictions on using * # * Nmap in other works, are happy to help. As mentioned above, we also * # * offer alternative license to integrate Nmap into proprietary * # * applications and appliances. These contracts have been sold to dozens * # * of software vendors, and generally include a perpetual license as well * # * as providing for priority support and updates. They also fund the * # * continued development of Nmap. Please email sales@nmap.com for further * # * information. * # * * # * If you have received a written license agreement or contract for * # * Covered Software stating terms other than these, you may choose to use * # * and redistribute Covered Software under those terms instead of these. * # * * # * Source is provided to this software because we believe users have a * # * right to know exactly what a program is going to do before they run it. * # * This also allows you to audit the software for security holes (none * # * have been found so far). * # * * # * Source code also allows you to port Nmap to new platforms, fix bugs, * # * and add new features. You are highly encouraged to send your changes * # * to the dev@nmap.org mailing list for possible incorporation into the * # * main distribution. By sending these changes to Fyodor or one of the * # * Insecure.Org development mailing lists, or checking them into the Nmap * # * source code repository, it is understood (unless you specify otherwise) * # * that you are offering the Nmap Project (Insecure.Com LLC) the * # * unlimited, non-exclusive right to reuse, modify, and relicense the * # * code. Nmap will always be available Open Source, but this is important * # * because the inability to relicense code has caused devastating problems * # * for other Free Software projects (such as KDE and NASM). We also * # * occasionally relicense the code to third parties as discussed above. * # * If you wish to specify special license conditions of your * # * contributions, just say so when you send them. * # * * # * This program is distributed in the hope that it will be useful, but * # * WITHOUT ANY WARRANTY; without even the implied warranty of * # * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the Nmap * # * license file for more details (it's in a COPYING file included with * # * Nmap, and also available from https://svn.nmap.org/nmap/COPYING * # * * # ***************************************************************************/ import gtk from zenmapGUI.higwidgets.higboxes import HIGVBox from zenmapCore.Name import APP_DISPLAY_NAME, NMAP_DISPLAY_NAME, NMAP_WEB_SITE import zenmapCore.I18N # For escaping text in marked-up labels. from xml.sax.saxutils import escape class BugReport(gtk.Window, object): def __init__(self): gtk.Window.__init__(self) self.set_title(_('How to Report a Bug')) self.set_position(gtk.WIN_POS_CENTER_ALWAYS) self._create_widgets() self._pack_widgets() self._connect_widgets() def _create_widgets(self): self.vbox = HIGVBox() self.button_box = gtk.HButtonBox() self.text = gtk.Label() self.btn_ok = gtk.Button(stock=gtk.STOCK_OK) def _pack_widgets(self): self.vbox.set_border_width(6) self.text.set_line_wrap(True) self.text.set_markup(_("""\ <big><b>How to report a bug</b></big> Like their author, %(nmap)s and %(app)s aren't perfect. But you can help \ make it better by sending bug reports or even writing patches. If \ %(nmap)s doesn't behave the way you expect, first upgrade to the latest \ version available from <b>%(nmap_web)s</b>. If the problem persists, do \ some research to determine whether it has already been discovered and \ addressed. Try Googling the error message or browsing the nmap-dev \ archives at http://seclists.org/. Read the full manual page as well. If \ nothing comes of this, mail a bug report to \ <b>&lt;dev@nmap.org&gt;</b>. Please include everything you have \ learned about the problem, as well as what version of Nmap you are \ running and what operating system version it is running on. Problem \ reports and %(nmap)s usage questions sent to dev@nmap.org are \ far more likely to be answered than those sent to Fyodor directly. Code patches to fix bugs are even better than bug reports. Basic \ instructions for creating patch files with your changes are available at \ http://nmap.org/data/HACKING. Patches may be sent to nmap-dev \ (recommended) or to Fyodor directly. """) % { "app": escape(APP_DISPLAY_NAME), "nmap": escape(NMAP_DISPLAY_NAME), "nmap_web": escape(NMAP_WEB_SITE) }) self.vbox.add(self.text) self.button_box.set_layout(gtk.BUTTONBOX_END) self.button_box.pack_start(self.btn_ok) self.vbox._pack_noexpand_nofill(self.button_box) self.add(self.vbox) def _connect_widgets(self): self.btn_ok.connect("clicked", self.close) self.connect("delete-event", self.close) def close(self, widget=None, event=None): self.destroy() if __name__ == "__main__": w = BugReport() w.show_all() w.connect("delete-event", lambda x, y: gtk.main_quit()) gtk.main()
gpl-3.0
hoosteeno/kuma
vendor/packages/babel/messages/catalog.py
34
30946
# -*- coding: utf-8 -*- """ babel.messages.catalog ~~~~~~~~~~~~~~~~~~~~~~ Data structures for message catalogs. :copyright: (c) 2013 by the Babel Team. :license: BSD, see LICENSE for more details. """ import re import time from cgi import parse_header from datetime import datetime, time as time_ from difflib import get_close_matches from email import message_from_string from copy import copy from babel import __version__ as VERSION from babel.core import Locale from babel.dates import format_datetime from babel.messages.plurals import get_plural from babel.util import odict, distinct, LOCALTZ, FixedOffsetTimezone from babel._compat import string_types, number_types, PY2, cmp __all__ = ['Message', 'Catalog', 'TranslationError'] PYTHON_FORMAT = re.compile(r'''(?x) \% (?:\(([\w]*)\))? ( [-#0\ +]?(?:\*|[\d]+)? (?:\.(?:\*|[\d]+))? [hlL]? ) ([diouxXeEfFgGcrs%]) ''') def _parse_datetime_header(value): match = re.match(r'^(?P<datetime>.*?)(?P<tzoffset>[+-]\d{4})?$', value) tt = time.strptime(match.group('datetime'), '%Y-%m-%d %H:%M') ts = time.mktime(tt) dt = datetime.fromtimestamp(ts) # Separate the offset into a sign component, hours, and # minutes tzoffset = match.group('tzoffset') if tzoffset is not None: plus_minus_s, rest = tzoffset[0], tzoffset[1:] hours_offset_s, mins_offset_s = rest[:2], rest[2:] # Make them all integers plus_minus = int(plus_minus_s + '1') hours_offset = int(hours_offset_s) mins_offset = int(mins_offset_s) # Calculate net offset net_mins_offset = hours_offset * 60 net_mins_offset += mins_offset net_mins_offset *= plus_minus # Create an offset object tzoffset = FixedOffsetTimezone(net_mins_offset) # Store the offset in a datetime object dt = dt.replace(tzinfo=tzoffset) return dt class Message(object): """Representation of a single message in a catalog.""" def __init__(self, id, string=u'', locations=(), flags=(), auto_comments=(), user_comments=(), previous_id=(), lineno=None, context=None): """Create the message object. :param id: the message ID, or a ``(singular, plural)`` tuple for pluralizable messages :param string: the translated message string, or a ``(singular, plural)`` tuple for pluralizable messages :param locations: a sequence of ``(filenname, lineno)`` tuples :param flags: a set or sequence of flags :param auto_comments: a sequence of automatic comments for the message :param user_comments: a sequence of user comments for the message :param previous_id: the previous message ID, or a ``(singular, plural)`` tuple for pluralizable messages :param lineno: the line number on which the msgid line was found in the PO file, if any :param context: the message context """ self.id = id #: The message ID if not string and self.pluralizable: string = (u'', u'') self.string = string #: The message translation self.locations = list(distinct(locations)) self.flags = set(flags) if id and self.python_format: self.flags.add('python-format') else: self.flags.discard('python-format') self.auto_comments = list(distinct(auto_comments)) self.user_comments = list(distinct(user_comments)) if isinstance(previous_id, string_types): self.previous_id = [previous_id] else: self.previous_id = list(previous_id) self.lineno = lineno self.context = context def __repr__(self): return '<%s %r (flags: %r)>' % (type(self).__name__, self.id, list(self.flags)) def __cmp__(self, obj): """Compare Messages, taking into account plural ids""" def values_to_compare(): if isinstance(obj, Message): plural = self.pluralizable obj_plural = obj.pluralizable if plural and obj_plural: return self.id[0], obj.id[0] elif plural: return self.id[0], obj.id elif obj_plural: return self.id, obj.id[0] return self.id, obj.id this, other = values_to_compare() return cmp(this, other) def __gt__(self, other): return self.__cmp__(other) > 0 def __lt__(self, other): return self.__cmp__(other) < 0 def __ge__(self, other): return self.__cmp__(other) >= 0 def __le__(self, other): return self.__cmp__(other) <= 0 def __eq__(self, other): return self.__cmp__(other) == 0 def __ne__(self, other): return self.__cmp__(other) != 0 def clone(self): return Message(*map(copy, (self.id, self.string, self.locations, self.flags, self.auto_comments, self.user_comments, self.previous_id, self.lineno, self.context))) def check(self, catalog=None): """Run various validation checks on the message. Some validations are only performed if the catalog is provided. This method returns a sequence of `TranslationError` objects. :rtype: ``iterator`` :param catalog: A catalog instance that is passed to the checkers :see: `Catalog.check` for a way to perform checks for all messages in a catalog. """ from babel.messages.checkers import checkers errors = [] for checker in checkers: try: checker(catalog, self) except TranslationError as e: errors.append(e) return errors @property def fuzzy(self): """Whether the translation is fuzzy. >>> Message('foo').fuzzy False >>> msg = Message('foo', 'foo', flags=['fuzzy']) >>> msg.fuzzy True >>> msg <Message 'foo' (flags: ['fuzzy'])> :type: `bool`""" return 'fuzzy' in self.flags @property def pluralizable(self): """Whether the message is plurizable. >>> Message('foo').pluralizable False >>> Message(('foo', 'bar')).pluralizable True :type: `bool`""" return isinstance(self.id, (list, tuple)) @property def python_format(self): """Whether the message contains Python-style parameters. >>> Message('foo %(name)s bar').python_format True >>> Message(('foo %(name)s', 'foo %(name)s')).python_format True :type: `bool`""" ids = self.id if not isinstance(ids, (list, tuple)): ids = [ids] return any(PYTHON_FORMAT.search(id) for id in ids) class TranslationError(Exception): """Exception thrown by translation checkers when invalid message translations are encountered.""" DEFAULT_HEADER = u"""\ # Translations template for PROJECT. # Copyright (C) YEAR ORGANIZATION # This file is distributed under the same license as the PROJECT project. # FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. #""" if PY2: def _parse_header(header_string): # message_from_string only works for str, not for unicode headers = message_from_string(header_string.encode('utf8')) decoded_headers = {} for name, value in headers.items(): name = name.decode('utf8') value = value.decode('utf8') decoded_headers[name] = value return decoded_headers else: _parse_header = message_from_string class Catalog(object): """Representation of a message catalog.""" def __init__(self, locale=None, domain=None, header_comment=DEFAULT_HEADER, project=None, version=None, copyright_holder=None, msgid_bugs_address=None, creation_date=None, revision_date=None, last_translator=None, language_team=None, charset=None, fuzzy=True): """Initialize the catalog object. :param locale: the locale identifier or `Locale` object, or `None` if the catalog is not bound to a locale (which basically means it's a template) :param domain: the message domain :param header_comment: the header comment as string, or `None` for the default header :param project: the project's name :param version: the project's version :param copyright_holder: the copyright holder of the catalog :param msgid_bugs_address: the email address or URL to submit bug reports to :param creation_date: the date the catalog was created :param revision_date: the date the catalog was revised :param last_translator: the name and email of the last translator :param language_team: the name and email of the language team :param charset: the encoding to use in the output (defaults to utf-8) :param fuzzy: the fuzzy bit on the catalog header """ self.domain = domain #: The message domain if locale: locale = Locale.parse(locale) self.locale = locale #: The locale or `None` self._header_comment = header_comment self._messages = odict() self.project = project or 'PROJECT' #: The project name self.version = version or 'VERSION' #: The project version self.copyright_holder = copyright_holder or 'ORGANIZATION' self.msgid_bugs_address = msgid_bugs_address or 'EMAIL@ADDRESS' self.last_translator = last_translator or 'FULL NAME <EMAIL@ADDRESS>' """Name and email address of the last translator.""" self.language_team = language_team or 'LANGUAGE <LL@li.org>' """Name and email address of the language team.""" self.charset = charset or 'utf-8' if creation_date is None: creation_date = datetime.now(LOCALTZ) elif isinstance(creation_date, datetime) and not creation_date.tzinfo: creation_date = creation_date.replace(tzinfo=LOCALTZ) self.creation_date = creation_date #: Creation date of the template if revision_date is None: revision_date = 'YEAR-MO-DA HO:MI+ZONE' elif isinstance(revision_date, datetime) and not revision_date.tzinfo: revision_date = revision_date.replace(tzinfo=LOCALTZ) self.revision_date = revision_date #: Last revision date of the catalog self.fuzzy = fuzzy #: Catalog header fuzzy bit (`True` or `False`) self.obsolete = odict() #: Dictionary of obsolete messages self._num_plurals = None self._plural_expr = None def _get_header_comment(self): comment = self._header_comment year = datetime.now(LOCALTZ).strftime('%Y') if hasattr(self.revision_date, 'strftime'): year = self.revision_date.strftime('%Y') comment = comment.replace('PROJECT', self.project) \ .replace('VERSION', self.version) \ .replace('YEAR', year) \ .replace('ORGANIZATION', self.copyright_holder) if self.locale: comment = comment.replace('Translations template', '%s translations' % self.locale.english_name) return comment def _set_header_comment(self, string): self._header_comment = string header_comment = property(_get_header_comment, _set_header_comment, doc="""\ The header comment for the catalog. >>> catalog = Catalog(project='Foobar', version='1.0', ... copyright_holder='Foo Company') >>> print catalog.header_comment #doctest: +ELLIPSIS # Translations template for Foobar. # Copyright (C) ... Foo Company # This file is distributed under the same license as the Foobar project. # FIRST AUTHOR <EMAIL@ADDRESS>, .... # The header can also be set from a string. Any known upper-case variables will be replaced when the header is retrieved again: >>> catalog = Catalog(project='Foobar', version='1.0', ... copyright_holder='Foo Company') >>> catalog.header_comment = '''\\ ... # The POT for my really cool PROJECT project. ... # Copyright (C) 1990-2003 ORGANIZATION ... # This file is distributed under the same license as the PROJECT ... # project. ... #''' >>> print catalog.header_comment # The POT for my really cool Foobar project. # Copyright (C) 1990-2003 Foo Company # This file is distributed under the same license as the Foobar # project. # :type: `unicode` """) def _get_mime_headers(self): headers = [] headers.append(('Project-Id-Version', '%s %s' % (self.project, self.version))) headers.append(('Report-Msgid-Bugs-To', self.msgid_bugs_address)) headers.append(('POT-Creation-Date', format_datetime(self.creation_date, 'yyyy-MM-dd HH:mmZ', locale='en'))) if isinstance(self.revision_date, (datetime, time_) + number_types): headers.append(('PO-Revision-Date', format_datetime(self.revision_date, 'yyyy-MM-dd HH:mmZ', locale='en'))) else: headers.append(('PO-Revision-Date', self.revision_date)) headers.append(('Last-Translator', self.last_translator)) if (self.locale is not None) and ('LANGUAGE' in self.language_team): headers.append(('Language-Team', self.language_team.replace('LANGUAGE', str(self.locale)))) else: headers.append(('Language-Team', self.language_team)) if self.locale is not None: headers.append(('Plural-Forms', self.plural_forms)) headers.append(('MIME-Version', '1.0')) headers.append(('Content-Type', 'text/plain; charset=%s' % self.charset)) headers.append(('Content-Transfer-Encoding', '8bit')) headers.append(('Generated-By', 'Babel %s\n' % VERSION)) return headers def _set_mime_headers(self, headers): for name, value in headers: name = name.lower() if name == 'project-id-version': parts = value.split(' ') self.project = u' '.join(parts[:-1]) self.version = parts[-1] elif name == 'report-msgid-bugs-to': self.msgid_bugs_address = value elif name == 'last-translator': self.last_translator = value elif name == 'language-team': self.language_team = value elif name == 'content-type': mimetype, params = parse_header(value) if 'charset' in params: self.charset = params['charset'].lower() elif name == 'plural-forms': _, params = parse_header(' ;' + value) self._num_plurals = int(params.get('nplurals', 2)) self._plural_expr = params.get('plural', '(n != 1)') elif name == 'pot-creation-date': self.creation_date = _parse_datetime_header(value) elif name == 'po-revision-date': # Keep the value if it's not the default one if 'YEAR' not in value: self.revision_date = _parse_datetime_header(value) mime_headers = property(_get_mime_headers, _set_mime_headers, doc="""\ The MIME headers of the catalog, used for the special ``msgid ""`` entry. The behavior of this property changes slightly depending on whether a locale is set or not, the latter indicating that the catalog is actually a template for actual translations. Here's an example of the output for such a catalog template: >>> from babel.dates import UTC >>> created = datetime(1990, 4, 1, 15, 30, tzinfo=UTC) >>> catalog = Catalog(project='Foobar', version='1.0', ... creation_date=created) >>> for name, value in catalog.mime_headers: ... print '%s: %s' % (name, value) Project-Id-Version: Foobar 1.0 Report-Msgid-Bugs-To: EMAIL@ADDRESS POT-Creation-Date: 1990-04-01 15:30+0000 PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE Last-Translator: FULL NAME <EMAIL@ADDRESS> Language-Team: LANGUAGE <LL@li.org> MIME-Version: 1.0 Content-Type: text/plain; charset=utf-8 Content-Transfer-Encoding: 8bit Generated-By: Babel ... And here's an example of the output when the locale is set: >>> revised = datetime(1990, 8, 3, 12, 0, tzinfo=UTC) >>> catalog = Catalog(locale='de_DE', project='Foobar', version='1.0', ... creation_date=created, revision_date=revised, ... last_translator='John Doe <jd@example.com>', ... language_team='de_DE <de@example.com>') >>> for name, value in catalog.mime_headers: ... print '%s: %s' % (name, value) Project-Id-Version: Foobar 1.0 Report-Msgid-Bugs-To: EMAIL@ADDRESS POT-Creation-Date: 1990-04-01 15:30+0000 PO-Revision-Date: 1990-08-03 12:00+0000 Last-Translator: John Doe <jd@example.com> Language-Team: de_DE <de@example.com> Plural-Forms: nplurals=2; plural=(n != 1) MIME-Version: 1.0 Content-Type: text/plain; charset=utf-8 Content-Transfer-Encoding: 8bit Generated-By: Babel ... :type: `list` """) @property def num_plurals(self): """The number of plurals used by the catalog or locale. >>> Catalog(locale='en').num_plurals 2 >>> Catalog(locale='ga').num_plurals 3 :type: `int`""" if self._num_plurals is None: num = 2 if self.locale: num = get_plural(self.locale)[0] self._num_plurals = num return self._num_plurals @property def plural_expr(self): """The plural expression used by the catalog or locale. >>> Catalog(locale='en').plural_expr '(n != 1)' >>> Catalog(locale='ga').plural_expr '(n==1 ? 0 : n==2 ? 1 : 2)' :type: `string_types`""" if self._plural_expr is None: expr = '(n != 1)' if self.locale: expr = get_plural(self.locale)[1] self._plural_expr = expr return self._plural_expr @property def plural_forms(self): """Return the plural forms declaration for the locale. >>> Catalog(locale='en').plural_forms 'nplurals=2; plural=(n != 1)' >>> Catalog(locale='pt_BR').plural_forms 'nplurals=2; plural=(n > 1)' :type: `str`""" return 'nplurals=%s; plural=%s' % (self.num_plurals, self.plural_expr) def __contains__(self, id): """Return whether the catalog has a message with the specified ID.""" return self._key_for(id) in self._messages def __len__(self): """The number of messages in the catalog. This does not include the special ``msgid ""`` entry.""" return len(self._messages) def __iter__(self): """Iterates through all the entries in the catalog, in the order they were added, yielding a `Message` object for every entry. :rtype: ``iterator``""" buf = [] for name, value in self.mime_headers: buf.append('%s: %s' % (name, value)) flags = set() if self.fuzzy: flags |= set(['fuzzy']) yield Message(u'', '\n'.join(buf), flags=flags) for key in self._messages: yield self._messages[key] def __repr__(self): locale = '' if self.locale: locale = ' %s' % self.locale return '<%s %r%s>' % (type(self).__name__, self.domain, locale) def __delitem__(self, id): """Delete the message with the specified ID.""" self.delete(id) def __getitem__(self, id): """Return the message with the specified ID. :param id: the message ID """ return self.get(id) def __setitem__(self, id, message): """Add or update the message with the specified ID. >>> catalog = Catalog() >>> catalog[u'foo'] = Message(u'foo') >>> catalog[u'foo'] <Message u'foo' (flags: [])> If a message with that ID is already in the catalog, it is updated to include the locations and flags of the new message. >>> catalog = Catalog() >>> catalog[u'foo'] = Message(u'foo', locations=[('main.py', 1)]) >>> catalog[u'foo'].locations [('main.py', 1)] >>> catalog[u'foo'] = Message(u'foo', locations=[('utils.py', 5)]) >>> catalog[u'foo'].locations [('main.py', 1), ('utils.py', 5)] :param id: the message ID :param message: the `Message` object """ assert isinstance(message, Message), 'expected a Message object' key = self._key_for(id, message.context) current = self._messages.get(key) if current: if message.pluralizable and not current.pluralizable: # The new message adds pluralization current.id = message.id current.string = message.string current.locations = list(distinct(current.locations + message.locations)) current.auto_comments = list(distinct(current.auto_comments + message.auto_comments)) current.user_comments = list(distinct(current.user_comments + message.user_comments)) current.flags |= message.flags message = current elif id == '': # special treatment for the header message self.mime_headers = _parse_header(message.string).items() self.header_comment = '\n'.join([('# %s' % c).rstrip() for c in message.user_comments]) self.fuzzy = message.fuzzy else: if isinstance(id, (list, tuple)): assert isinstance(message.string, (list, tuple)), \ 'Expected sequence but got %s' % type(message.string) self._messages[key] = message def add(self, id, string=None, locations=(), flags=(), auto_comments=(), user_comments=(), previous_id=(), lineno=None, context=None): """Add or update the message with the specified ID. >>> catalog = Catalog() >>> catalog.add(u'foo') <Message ...> >>> catalog[u'foo'] <Message u'foo' (flags: [])> This method simply constructs a `Message` object with the given arguments and invokes `__setitem__` with that object. :param id: the message ID, or a ``(singular, plural)`` tuple for pluralizable messages :param string: the translated message string, or a ``(singular, plural)`` tuple for pluralizable messages :param locations: a sequence of ``(filenname, lineno)`` tuples :param flags: a set or sequence of flags :param auto_comments: a sequence of automatic comments :param user_comments: a sequence of user comments :param previous_id: the previous message ID, or a ``(singular, plural)`` tuple for pluralizable messages :param lineno: the line number on which the msgid line was found in the PO file, if any :param context: the message context """ message = Message(id, string, list(locations), flags, auto_comments, user_comments, previous_id, lineno=lineno, context=context) self[id] = message return message def check(self): """Run various validation checks on the translations in the catalog. For every message which fails validation, this method yield a ``(message, errors)`` tuple, where ``message`` is the `Message` object and ``errors`` is a sequence of `TranslationError` objects. :rtype: ``iterator`` """ for message in self._messages.values(): errors = message.check(catalog=self) if errors: yield message, errors def get(self, id, context=None): """Return the message with the specified ID and context. :param id: the message ID :param context: the message context, or ``None`` for no context """ return self._messages.get(self._key_for(id, context)) def delete(self, id, context=None): """Delete the message with the specified ID and context. :param id: the message ID :param context: the message context, or ``None`` for no context """ key = self._key_for(id, context) if key in self._messages: del self._messages[key] def update(self, template, no_fuzzy_matching=False): """Update the catalog based on the given template catalog. >>> from babel.messages import Catalog >>> template = Catalog() >>> template.add('green', locations=[('main.py', 99)]) <Message ...> >>> template.add('blue', locations=[('main.py', 100)]) <Message ...> >>> template.add(('salad', 'salads'), locations=[('util.py', 42)]) <Message ...> >>> catalog = Catalog(locale='de_DE') >>> catalog.add('blue', u'blau', locations=[('main.py', 98)]) <Message ...> >>> catalog.add('head', u'Kopf', locations=[('util.py', 33)]) <Message ...> >>> catalog.add(('salad', 'salads'), (u'Salat', u'Salate'), ... locations=[('util.py', 38)]) <Message ...> >>> catalog.update(template) >>> len(catalog) 3 >>> msg1 = catalog['green'] >>> msg1.string >>> msg1.locations [('main.py', 99)] >>> msg2 = catalog['blue'] >>> msg2.string u'blau' >>> msg2.locations [('main.py', 100)] >>> msg3 = catalog['salad'] >>> msg3.string (u'Salat', u'Salate') >>> msg3.locations [('util.py', 42)] Messages that are in the catalog but not in the template are removed from the main collection, but can still be accessed via the `obsolete` member: >>> 'head' in catalog False >>> catalog.obsolete.values() [<Message 'head' (flags: [])>] :param template: the reference catalog, usually read from a POT file :param no_fuzzy_matching: whether to use fuzzy matching of message IDs """ messages = self._messages remaining = messages.copy() self._messages = odict() # Prepare for fuzzy matching fuzzy_candidates = [] if not no_fuzzy_matching: fuzzy_candidates = dict([ (self._key_for(msgid), messages[msgid].context) for msgid in messages if msgid and messages[msgid].string ]) fuzzy_matches = set() def _merge(message, oldkey, newkey): message = message.clone() fuzzy = False if oldkey != newkey: fuzzy = True fuzzy_matches.add(oldkey) oldmsg = messages.get(oldkey) if isinstance(oldmsg.id, string_types): message.previous_id = [oldmsg.id] else: message.previous_id = list(oldmsg.id) else: oldmsg = remaining.pop(oldkey, None) message.string = oldmsg.string if isinstance(message.id, (list, tuple)): if not isinstance(message.string, (list, tuple)): fuzzy = True message.string = tuple( [message.string] + ([u''] * (len(message.id) - 1)) ) elif len(message.string) != self.num_plurals: fuzzy = True message.string = tuple(message.string[:len(oldmsg.string)]) elif isinstance(message.string, (list, tuple)): fuzzy = True message.string = message.string[0] message.flags |= oldmsg.flags if fuzzy: message.flags |= set([u'fuzzy']) self[message.id] = message for message in template: if message.id: key = self._key_for(message.id, message.context) if key in messages: _merge(message, key, key) else: if no_fuzzy_matching is False: # do some fuzzy matching with difflib if isinstance(key, tuple): matchkey = key[0] # just the msgid, no context else: matchkey = key matches = get_close_matches(matchkey.lower().strip(), fuzzy_candidates.keys(), 1) if matches: newkey = matches[0] newctxt = fuzzy_candidates[newkey] if newctxt is not None: newkey = newkey, newctxt _merge(message, newkey, key) continue self[message.id] = message for msgid in remaining: if no_fuzzy_matching or msgid not in fuzzy_matches: self.obsolete[msgid] = remaining[msgid] # Make updated catalog's POT-Creation-Date equal to the template # used to update the catalog self.creation_date = template.creation_date def _key_for(self, id, context=None): """The key for a message is just the singular ID even for pluralizable messages, but is a ``(msgid, msgctxt)`` tuple for context-specific messages. """ key = id if isinstance(key, (list, tuple)): key = id[0] if context is not None: key = (key, context) return key
mpl-2.0
sszlm/MissionPlanner
Lib/site-packages/scipy/integrate/ode.py
55
25735
# Authors: Pearu Peterson, Pauli Virtanen, John Travers """ First-order ODE integrators. User-friendly interface to various numerical integrators for solving a system of first order ODEs with prescribed initial conditions:: d y(t)[i] --------- = f(t,y(t))[i], d t y(t=0)[i] = y0[i], where:: i = 0, ..., len(y0) - 1 class ode --------- A generic interface class to numeric integrators. It has the following methods:: integrator = ode(f,jac=None) integrator = integrator.set_integrator(name,**params) integrator = integrator.set_initial_value(y0,t0=0.0) integrator = integrator.set_f_params(*args) integrator = integrator.set_jac_params(*args) y1 = integrator.integrate(t1,step=0,relax=0) flag = integrator.successful() class complex_ode ----------------- This class has the same generic interface as ode, except it can handle complex f, y and Jacobians by transparently translating them into the equivalent real valued system. It supports the real valued solvers (i.e not zvode) and is an alternative to ode with the zvode solver, sometimes performing better. """ integrator_info = \ """ Available integrators --------------------- vode ~~~~ Real-valued Variable-coefficient Ordinary Differential Equation solver, with fixed-leading-coefficient implementation. It provides implicit Adams method (for non-stiff problems) and a method based on backward differentiation formulas (BDF) (for stiff problems). Source: http://www.netlib.org/ode/vode.f This integrator accepts the following parameters in set_integrator() method of the ode class: - atol : float or sequence absolute tolerance for solution - rtol : float or sequence relative tolerance for solution - lband : None or int - rband : None or int Jacobian band width, jac[i,j] != 0 for i-lband <= j <= i+rband. Setting these requires your jac routine to return the jacobian in packed format, jac_packed[i-j+lband, j] = jac[i,j]. - method: 'adams' or 'bdf' Which solver to use, Adams (non-stiff) or BDF (stiff) - with_jacobian : bool Whether to use the jacobian - nsteps : int Maximum number of (internally defined) steps allowed during one call to the solver. - first_step : float - min_step : float - max_step : float Limits for the step sizes used by the integrator. - order : int Maximum order used by the integrator, order <= 12 for Adams, <= 5 for BDF. zvode ~~~~~ Complex-valued Variable-coefficient Ordinary Differential Equation solver, with fixed-leading-coefficient implementation. It provides implicit Adams method (for non-stiff problems) and a method based on backward differentiation formulas (BDF) (for stiff problems). Source: http://www.netlib.org/ode/zvode.f This integrator accepts the same parameters in set_integrator() as the "vode" solver. :Note: When using ZVODE for a stiff system, it should only be used for the case in which the function f is analytic, that is, when each f(i) is an analytic function of each y(j). Analyticity means that the partial derivative df(i)/dy(j) is a unique complex number, and this fact is critical in the way ZVODE solves the dense or banded linear systems that arise in the stiff case. For a complex stiff ODE system in which f is not analytic, ZVODE is likely to have convergence failures, and for this problem one should instead use DVODE on the equivalent real system (in the real and imaginary parts of y). dopri5 ~~~~~~ Numerical solution of a system of first order ordinary differential equations y'=f(x,y). this is an explicit runge-kutta method of order (4)5 due to Dormand & Prince (with stepsize control and dense output). Authors: E. Hairer and G. Wanner Universite de Geneve, Dept. de Mathematiques CH-1211 Geneve 24, Switzerland e-mail: ernst.hairer@math.unige.ch gerhard.wanner@math.unige.ch This code is described in: E. Hairer, S.P. Norsett and G. Wanner, Solving Ordinary Differential Equations i. Nonstiff Problems. 2nd edition. Springer Series in Computational Mathematics, Springer-Verlag (1993) This integrator accepts the following parameters in set_integrator() method of the ode class: - atol : float or sequence absolute tolerance for solution - rtol : float or sequence relative tolerance for solution - nsteps : int Maximum number of (internally defined) steps allowed during one call to the solver. - first_step : float - max_step : float - safety : float Safety factor on new step selection (default 0.9) - ifactor : float - dfactor : float Maximum factor to increase/decrease step sixe by in one step - beta : float Beta parameter for stabilised step size control. dop853 ~~~~~~ Numerical solution of a system of first 0rder ordinary differential equations y'=f(x,y). this is an explicit runge-kutta method of order 8(5,3) due to Dormand & Prince (with stepsize control and dense output). Options and references the same as dopri5. """ if __doc__: __doc__ += integrator_info # XXX: Integrators must have: # =========================== # cvode - C version of vode and vodpk with many improvements. # Get it from http://www.netlib.org/ode/cvode.tar.gz # To wrap cvode to Python, one must write extension module by # hand. Its interface is too much 'advanced C' that using f2py # would be too complicated (or impossible). # # How to define a new integrator: # =============================== # # class myodeint(IntegratorBase): # # runner = <odeint function> or None # # def __init__(self,...): # required # <initialize> # # def reset(self,n,has_jac): # optional # # n - the size of the problem (number of equations) # # has_jac - whether user has supplied its own routine for Jacobian # <allocate memory,initialize further> # # def run(self,f,jac,y0,t0,t1,f_params,jac_params): # required # # this method is called to integrate from t=t0 to t=t1 # # with initial condition y0. f and jac are user-supplied functions # # that define the problem. f_params,jac_params are additional # # arguments # # to these functions. # <calculate y1> # if <calculation was unsuccesful>: # self.success = 0 # return t1,y1 # # # In addition, one can define step() and run_relax() methods (they # # take the same arguments as run()) if the integrator can support # # these features (see IntegratorBase doc strings). # # if myodeint.runner: # IntegratorBase.integrator_classes.append(myodeint) __all__ = ['ode', 'complex_ode'] __version__ = "$Id$" __docformat__ = "restructuredtext en" import re import warnings from numpy import asarray, array, zeros, int32, isscalar, real, imag import vode as _vode import _dop #------------------------------------------------------------------------------ # User interface #------------------------------------------------------------------------------ class ode(object): """\ A generic interface class to numeric integrators. See also -------- odeint : an integrator with a simpler interface based on lsoda from ODEPACK quad : for finding the area under a curve Examples -------- A problem to integrate and the corresponding jacobian: >>> from scipy import eye >>> from scipy.integrate import ode >>> >>> y0, t0 = [1.0j, 2.0], 0 >>> >>> def f(t, y, arg1): >>> return [1j*arg1*y[0] + y[1], -arg1*y[1]**2] >>> def jac(t, y, arg1): >>> return [[1j*arg1, 1], [0, -arg1*2*y[1]]] The integration: >>> r = ode(f, jac).set_integrator('zvode', method='bdf', with_jacobian=True) >>> r.set_initial_value(y0, t0).set_f_params(2.0).set_jac_params(2.0) >>> t1 = 10 >>> dt = 1 >>> while r.successful() and r.t < t1: >>> r.integrate(r.t+dt) >>> print r.t, r.y """ if __doc__: __doc__ += integrator_info def __init__(self, f, jac=None): """ Define equation y' = f(y,t) where (optional) jac = df/dy. Parameters ---------- f : f(t, y, *f_args) Rhs of the equation. t is a scalar, y.shape == (n,). f_args is set by calling set_f_params(*args) jac : jac(t, y, *jac_args) Jacobian of the rhs, jac[i,j] = d f[i] / d y[j] jac_args is set by calling set_f_params(*args) """ self.stiff = 0 self.f = f self.jac = jac self.f_params = () self.jac_params = () self.y = [] def set_initial_value(self, y, t=0.0): """Set initial conditions y(t) = y.""" if isscalar(y): y = [y] n_prev = len(self.y) if not n_prev: self.set_integrator('') # find first available integrator self.y = asarray(y, self._integrator.scalar) self.t = t self._integrator.reset(len(self.y),self.jac is not None) return self def set_integrator(self, name, **integrator_params): """ Set integrator by name. Parameters ---------- name : str Name of the integrator. integrator_params : Additional parameters for the integrator. """ integrator = find_integrator(name) if integrator is None: # FIXME: this really should be raise an exception. Will that break # any code? warnings.warn('No integrator name match with %r or is not ' 'available.' % name) else: self._integrator = integrator(**integrator_params) if not len(self.y): self.t = 0.0 self.y = array([0.0], self._integrator.scalar) self._integrator.reset(len(self.y),self.jac is not None) return self def integrate(self, t, step=0, relax=0): """Find y=y(t), set y as an initial condition, and return y.""" if step and self._integrator.supports_step: mth = self._integrator.step elif relax and self._integrator.supports_run_relax: mth = self._integrator.run_relax else: mth = self._integrator.run self.y,self.t = mth(self.f,self.jac or (lambda :None), self.y,self.t,t, self.f_params,self.jac_params) return self.y def successful(self): """Check if integration was successful.""" try: self._integrator except AttributeError: self.set_integrator('') return self._integrator.success==1 def set_f_params(self,*args): """Set extra parameters for user-supplied function f.""" self.f_params = args return self def set_jac_params(self,*args): """Set extra parameters for user-supplied function jac.""" self.jac_params = args return self class complex_ode(ode): """ A wrapper of ode for complex systems. For usage examples, see `ode`. """ def __init__(self, f, jac=None): """ Define equation y' = f(y,t), where y and f can be complex. Parameters ---------- f : f(t, y, *f_args) Rhs of the equation. t is a scalar, y.shape == (n,). f_args is set by calling set_f_params(*args) jac : jac(t, y, *jac_args) Jacobian of the rhs, jac[i,j] = d f[i] / d y[j] jac_args is set by calling set_f_params(*args) """ self.cf = f self.cjac = jac if jac is not None: ode.__init__(self, self._wrap, self._wrap_jac) else: ode.__init__(self, self._wrap, None) def _wrap(self, t, y, *f_args): f = self.cf(*((t, y[::2] + 1j*y[1::2]) + f_args)) self.tmp[::2] = real(f) self.tmp[1::2] = imag(f) return self.tmp def _wrap_jac(self, t, y, *jac_args): jac = self.cjac(*((t, y[::2] + 1j*y[1::2]) + jac_args)) self.jac_tmp[1::2,1::2] = self.jac_tmp[::2,::2] = real(jac) self.jac_tmp[1::2,::2] = imag(jac) self.jac_tmp[::2,1::2] = -self.jac_tmp[1::2,::2] return self.jac_tmp def set_integrator(self, name, **integrator_params): """ Set integrator by name. Parameters ---------- name : str Name of the integrator integrator_params : Additional parameters for the integrator. """ if name == 'zvode': raise ValueError("zvode should be used with ode, not zode") return ode.set_integrator(self, name, **integrator_params) def set_initial_value(self, y, t=0.0): """Set initial conditions y(t) = y.""" y = asarray(y) self.tmp = zeros(y.size*2, 'float') self.tmp[::2] = real(y) self.tmp[1::2] = imag(y) if self.cjac is not None: self.jac_tmp = zeros((y.size*2, y.size*2), 'float') return ode.set_initial_value(self, self.tmp, t) def integrate(self, t, step=0, relax=0): """Find y=y(t), set y as an initial condition, and return y.""" y = ode.integrate(self, t, step, relax) return y[::2] + 1j*y[1::2] #------------------------------------------------------------------------------ # ODE integrators #------------------------------------------------------------------------------ def find_integrator(name): for cl in IntegratorBase.integrator_classes: if re.match(name,cl.__name__,re.I): return cl return None class IntegratorBase(object): runner = None # runner is None => integrator is not available success = None # success==1 if integrator was called successfully supports_run_relax = None supports_step = None integrator_classes = [] scalar = float def reset(self,n,has_jac): """Prepare integrator for call: allocate memory, set flags, etc. n - number of equations. has_jac - if user has supplied function for evaluating Jacobian. """ def run(self,f,jac,y0,t0,t1,f_params,jac_params): """Integrate from t=t0 to t=t1 using y0 as an initial condition. Return 2-tuple (y1,t1) where y1 is the result and t=t1 defines the stoppage coordinate of the result. """ raise NotImplementedError('all integrators must define ' 'run(f,jac,t0,t1,y0,f_params,jac_params)') def step(self,f,jac,y0,t0,t1,f_params,jac_params): """Make one integration step and return (y1,t1).""" raise NotImplementedError('%s does not support step() method' % self.__class__.__name__) def run_relax(self,f,jac,y0,t0,t1,f_params,jac_params): """Integrate from t=t0 to t>=t1 and return (y1,t).""" raise NotImplementedError('%s does not support run_relax() method' % self.__class__.__name__) #XXX: __str__ method for getting visual state of the integrator class vode(IntegratorBase): runner = getattr(_vode,'dvode',None) messages = {-1:'Excess work done on this call. (Perhaps wrong MF.)', -2:'Excess accuracy requested. (Tolerances too small.)', -3:'Illegal input detected. (See printed message.)', -4:'Repeated error test failures. (Check all input.)', -5:'Repeated convergence failures. (Perhaps bad' ' Jacobian supplied or wrong choice of MF or tolerances.)', -6:'Error weight became zero during problem. (Solution' ' component i vanished, and ATOL or ATOL(i) = 0.)' } supports_run_relax = 1 supports_step = 1 def __init__(self, method = 'adams', with_jacobian = 0, rtol=1e-6,atol=1e-12, lband=None,uband=None, order = 12, nsteps = 500, max_step = 0.0, # corresponds to infinite min_step = 0.0, first_step = 0.0, # determined by solver ): if re.match(method,r'adams',re.I): self.meth = 1 elif re.match(method,r'bdf',re.I): self.meth = 2 else: raise ValueError('Unknown integration method %s' % method) self.with_jacobian = with_jacobian self.rtol = rtol self.atol = atol self.mu = uband self.ml = lband self.order = order self.nsteps = nsteps self.max_step = max_step self.min_step = min_step self.first_step = first_step self.success = 1 def reset(self,n,has_jac): # Calculate parameters for Fortran subroutine dvode. if has_jac: if self.mu is None and self.ml is None: miter = 1 else: if self.mu is None: self.mu = 0 if self.ml is None: self.ml = 0 miter = 4 else: if self.mu is None and self.ml is None: if self.with_jacobian: miter = 2 else: miter = 0 else: if self.mu is None: self.mu = 0 if self.ml is None: self.ml = 0 if self.ml==self.mu==0: miter = 3 else: miter = 5 mf = 10*self.meth + miter if mf==10: lrw = 20 + 16*n elif mf in [11,12]: lrw = 22 + 16*n + 2*n*n elif mf == 13: lrw = 22 + 17*n elif mf in [14,15]: lrw = 22 + 18*n + (3*self.ml+2*self.mu)*n elif mf == 20: lrw = 20 + 9*n elif mf in [21,22]: lrw = 22 + 9*n + 2*n*n elif mf == 23: lrw = 22 + 10*n elif mf in [24,25]: lrw = 22 + 11*n + (3*self.ml+2*self.mu)*n else: raise ValueError('Unexpected mf=%s' % mf) if miter in [0,3]: liw = 30 else: liw = 30 + n rwork = zeros((lrw,), float) rwork[4] = self.first_step rwork[5] = self.max_step rwork[6] = self.min_step self.rwork = rwork iwork = zeros((liw,), int32) if self.ml is not None: iwork[0] = self.ml if self.mu is not None: iwork[1] = self.mu iwork[4] = self.order iwork[5] = self.nsteps iwork[6] = 2 # mxhnil self.iwork = iwork self.call_args = [self.rtol,self.atol,1,1,self.rwork,self.iwork,mf] self.success = 1 def run(self,*args): y1,t,istate = self.runner(*(args[:5]+tuple(self.call_args)+args[5:])) if istate <0: warnings.warn('vode: ' + self.messages.get(istate,'Unexpected istate=%s'%istate)) self.success = 0 else: self.call_args[3] = 2 # upgrade istate from 1 to 2 return y1,t def step(self,*args): itask = self.call_args[2] self.call_args[2] = 2 r = self.run(*args) self.call_args[2] = itask return r def run_relax(self,*args): itask = self.call_args[2] self.call_args[2] = 3 r = self.run(*args) self.call_args[2] = itask return r if vode.runner is not None: IntegratorBase.integrator_classes.append(vode) class zvode(vode): runner = getattr(_vode,'zvode',None) supports_run_relax = 1 supports_step = 1 scalar = complex def reset(self, n, has_jac): # Calculate parameters for Fortran subroutine dvode. if has_jac: if self.mu is None and self.ml is None: miter = 1 else: if self.mu is None: self.mu = 0 if self.ml is None: self.ml = 0 miter = 4 else: if self.mu is None and self.ml is None: if self.with_jacobian: miter = 2 else: miter = 0 else: if self.mu is None: self.mu = 0 if self.ml is None: self.ml = 0 if self.ml==self.mu==0: miter = 3 else: miter = 5 mf = 10*self.meth + miter if mf in (10,): lzw = 15*n elif mf in (11, 12): lzw = 15*n + 2*n**2 elif mf in (-11, -12): lzw = 15*n + n**2 elif mf in (13,): lzw = 16*n elif mf in (14,15): lzw = 17*n + (3*self.ml + 2*self.mu)*n elif mf in (-14,-15): lzw = 16*n + (2*self.ml + self.mu)*n elif mf in (20,): lzw = 8*n elif mf in (21, 22): lzw = 8*n + 2*n**2 elif mf in (-21,-22): lzw = 8*n + n**2 elif mf in (23,): lzw = 9*n elif mf in (24, 25): lzw = 10*n + (3*self.ml + 2*self.mu)*n elif mf in (-24, -25): lzw = 9*n + (2*self.ml + self.mu)*n lrw = 20 + n if miter in (0, 3): liw = 30 else: liw = 30 + n zwork = zeros((lzw,), complex) self.zwork = zwork rwork = zeros((lrw,), float) rwork[4] = self.first_step rwork[5] = self.max_step rwork[6] = self.min_step self.rwork = rwork iwork = zeros((liw,), int32) if self.ml is not None: iwork[0] = self.ml if self.mu is not None: iwork[1] = self.mu iwork[4] = self.order iwork[5] = self.nsteps iwork[6] = 2 # mxhnil self.iwork = iwork self.call_args = [self.rtol,self.atol,1,1, self.zwork,self.rwork,self.iwork,mf] self.success = 1 def run(self,*args): y1,t,istate = self.runner(*(args[:5]+tuple(self.call_args)+args[5:])) if istate < 0: warnings.warn('zvode: ' + self.messages.get(istate, 'Unexpected istate=%s'%istate)) self.success = 0 else: self.call_args[3] = 2 # upgrade istate from 1 to 2 return y1, t if zvode.runner is not None: IntegratorBase.integrator_classes.append(zvode) class dopri5(IntegratorBase): runner = getattr(_dop,'dopri5',None) name = 'dopri5' messages = { 1 : 'computation successful', 2 : 'comput. successful (interrupted by solout)', -1 : 'input is not consistent', -2 : 'larger nmax is needed', -3 : 'step size becomes too small', -4 : 'problem is probably stiff (interrupted)', } def __init__(self, rtol=1e-6,atol=1e-12, nsteps = 500, max_step = 0.0, first_step = 0.0, # determined by solver safety = 0.9, ifactor = 10.0, dfactor = 0.2, beta = 0.0, method = None ): self.rtol = rtol self.atol = atol self.nsteps = nsteps self.max_step = max_step self.first_step = first_step self.safety = safety self.ifactor = ifactor self.dfactor = dfactor self.beta = beta self.success = 1 def reset(self,n,has_jac): work = zeros((8*n+21,), float) work[1] = self.safety work[2] = self.dfactor work[3] = self.ifactor work[4] = self.beta work[5] = self.max_step work[6] = self.first_step self.work = work iwork = zeros((21,), int32) iwork[0] = self.nsteps self.iwork = iwork self.call_args = [self.rtol,self.atol,self._solout,self.work,self.iwork] self.success = 1 def run(self,f,jac,y0,t0,t1,f_params,jac_params): x,y,iwork,idid = self.runner(*((f,t0,y0,t1) + tuple(self.call_args))) if idid < 0: warnings.warn(self.name + ': ' + self.messages.get(idid, 'Unexpected idid=%s'%idid)) self.success = 0 return y,x def _solout(self, *args): # dummy solout function pass if dopri5.runner is not None: IntegratorBase.integrator_classes.append(dopri5) class dop853(dopri5): runner = getattr(_dop,'dop853',None) name = 'dop853' def __init__(self, rtol=1e-6,atol=1e-12, nsteps = 500, max_step = 0.0, first_step = 0.0, # determined by solver safety = 0.9, ifactor = 6.0, dfactor = 0.3, beta = 0.0, method = None ): self.rtol = rtol self.atol = atol self.nsteps = nsteps self.max_step = max_step self.first_step = first_step self.safety = safety self.ifactor = ifactor self.dfactor = dfactor self.beta = beta self.success = 1 def reset(self,n,has_jac): work = zeros((11*n+21,), float) work[1] = self.safety work[2] = self.dfactor work[3] = self.ifactor work[4] = self.beta work[5] = self.max_step work[6] = self.first_step self.work = work iwork = zeros((21,), int32) iwork[0] = self.nsteps self.iwork = iwork self.call_args = [self.rtol,self.atol,self._solout,self.work,self.iwork] self.success = 1 if dop853.runner is not None: IntegratorBase.integrator_classes.append(dop853)
gpl-3.0
PuZZleDucK/pixelated-user-agent
service/pixelated/extensions/requests_urllib3.py
10
3360
# # Copyright (c) 2014 ThoughtWorks, Inc. # # Pixelated is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Pixelated is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Pixelated. If not, see <http://www.gnu.org/licenses/>. import requests if requests.__version__ == '2.0.0': try: import requests.packages.urllib3.connectionpool from socket import error as SocketError, timeout as SocketTimeout from requests.packages.urllib3.packages.ssl_match_hostname import CertificateError, match_hostname import socket import ssl from requests.packages.urllib3.exceptions import ( ClosedPoolError, ConnectTimeoutError, EmptyPoolError, HostChangedError, MaxRetryError, SSLError, ReadTimeoutError, ProxyError, ) from requests.packages.urllib3.util import ( assert_fingerprint, get_host, is_connection_dropped, resolve_cert_reqs, resolve_ssl_version, ssl_wrap_socket, Timeout, ) def patched_connect(self): # Add certificate verification try: sock = socket.create_connection(address=(self.host, self.port), timeout=self.timeout) except SocketTimeout: raise ConnectTimeoutError(self, "Connection to %s timed out. (connect timeout=%s)" % (self.host, self.timeout)) resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs) resolved_ssl_version = resolve_ssl_version(self.ssl_version) if self._tunnel_host: self.sock = sock # Calls self._set_hostport(), so self.host is # self._tunnel_host below. self._tunnel() # Wrap socket using verification with the root certs in # trusted_root_certs self.sock = ssl_wrap_socket(sock, self.key_file, self.cert_file, cert_reqs=resolved_cert_reqs, ca_certs=self.ca_certs, server_hostname=self.host, ssl_version=resolved_ssl_version) if self.assert_fingerprint: assert_fingerprint(self.sock.getpeercert(binary_form=True), self.assert_fingerprint) elif resolved_cert_reqs != ssl.CERT_NONE and self.assert_hostname is not False: match_hostname(self.sock.getpeercert(), self.assert_hostname or self.host) requests.packages.urllib3.connectionpool.VerifiedHTTPSConnection.connect = patched_connect except ImportError: pass # The patch is specific for the debian package. Ignore it if it can't be found
agpl-3.0
tobi-wan-kenobi/bumblebee-status
bumblebee_status/modules/contrib/spotify.py
1
6553
"""Displays the current song being played and allows pausing, skipping ahead, and skipping back. Requires the following library: * python-dbus Parameters: * spotify.format: Format string (defaults to '{artist} - {title}') Available values are: {album}, {title}, {artist}, {trackNumber} * spotify.layout: Comma-separated list to change order of widgets (defaults to song, previous, pause, next) Widget names are: spotify.song, spotify.prev, spotify.pause, spotify.next * spotify.concise_controls: When enabled, allows spotify to be controlled from just the spotify.song widget. Concise controls are: Left Click: Toggle Pause; Wheel Up: Next; Wheel Down; Previous. * spotify.bus_name: String (defaults to `spotify`) Available values: spotify, spotifyd contributed by `yvesh <https://github.com/yvesh>`_ - many thanks! added controls by `LtPeriwinkle <https://github.com/LtPeriwinkle>`_ - many thanks! fixed icons and layout parameter by `gkeep <https://github.com/gkeep>`_ - many thanks! """ import sys import dbus import core.module import core.widget import core.input import core.decorators import util.format import logging class Module(core.module.Module): def __init__(self, config, theme): super().__init__(config, theme, []) self.background = True self.__bus_name = self.parameter("bus_name", "spotify") self.__layout = util.format.aslist( self.parameter( "layout", "spotify.song,spotify.prev,spotify.pause,spotify.next", ) ) self.__bus = dbus.SessionBus() self.__song = "" self.__pause = "" self.__format = self.parameter("format", "{artist} - {title}") if self.__bus_name == "spotifyd": self.__cmd = "dbus-send --session --type=method_call --dest=org.mpris.MediaPlayer2.spotifyd \ /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player." else: self.__cmd = "dbus-send --session --type=method_call --dest=org.mpris.MediaPlayer2.spotify \ /org/mpris/MediaPlayer2 org.mpris.MediaPlayer2.Player." widget_map = {} for widget_name in self.__layout: widget = self.add_widget(name=widget_name) if widget_name == "spotify.prev": widget_map[widget] = { "button": core.input.LEFT_MOUSE, "cmd": self.__cmd + "Previous", } widget.set("state", "prev") elif widget_name == "spotify.pause": widget_map[widget] = { "button": core.input.LEFT_MOUSE, "cmd": self.__cmd + "PlayPause", } elif widget_name == "spotify.next": widget_map[widget] = { "button": core.input.LEFT_MOUSE, "cmd": self.__cmd + "Next", } widget.set("state", "next") elif widget_name == "spotify.song": if util.format.asbool(self.parameter("concise_controls", "false")): widget_map[widget] = [ { "button": core.input.LEFT_MOUSE, "cmd": self.__cmd + "PlayPause", }, { "button": core.input.WHEEL_UP, "cmd": self.__cmd + "Next", }, { "button": core.input.WHEEL_DOWN, "cmd": self.__cmd + "Previous", } ] else: raise KeyError( "The spotify module does not have a {widget_name!r} widget".format( widget_name=widget_name ) ) # is there any reason the inputs can't be directly registered above? for widget, callback_options in widget_map.items(): if isinstance(callback_options, dict): core.input.register(widget, **callback_options) elif isinstance(callback_options, list): # used by concise_controls for opts in callback_options: core.input.register(widget, **opts) def hidden(self): return self.string_song == "" def __get_song(self): bus = self.__bus if self.__bus_name == "spotifyd": spotify = bus.get_object( "org.mpris.MediaPlayer2.spotifyd", "/org/mpris/MediaPlayer2" ) else: spotify = bus.get_object( "org.mpris.MediaPlayer2.spotify", "/org/mpris/MediaPlayer2" ) spotify_iface = dbus.Interface(spotify, "org.freedesktop.DBus.Properties") props = spotify_iface.Get("org.mpris.MediaPlayer2.Player", "Metadata") self.__song = self.__format.format( album=str(props.get("xesam:album")), title=str(props.get("xesam:title")), artist=",".join(props.get("xesam:artist")), trackNumber=str(props.get("xesam:trackNumber")), ) def update(self): try: self.__get_song() if self.__bus_name == "spotifyd": bus = self.__bus.get_object( "org.mpris.MediaPlayer2.spotifyd", "/org/mpris/MediaPlayer2" ) else: bus = self.__bus.get_object( "org.mpris.MediaPlayer2.spotify", "/org/mpris/MediaPlayer2" ) for widget in self.widgets(): if widget.name == "spotify.pause": playback_status = str( dbus.Interface( bus, "org.freedesktop.DBus.Properties", ).Get("org.mpris.MediaPlayer2.Player", "PlaybackStatus") ) if playback_status == "Playing": widget.set("state", "playing") else: widget.set("state", "paused") elif widget.name == "spotify.song": widget.set("state", "song") widget.full_text(self.__song) except Exception as e: logging.exception(e) self.__song = "" @property def string_song(self): if sys.version_info.major < 3: return unicode(self.__song) return str(self.__song)
mit
dagwieers/ansible
lib/ansible/modules/network/fortios/fortios_webfilter_content_header.py
24
9059
#!/usr/bin/python from __future__ import (absolute_import, division, print_function) # Copyright 2018 Fortinet, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <https://www.gnu.org/licenses/>. # # the lib use python logging can get it if the following is set in your # Ansible config. __metaclass__ = type ANSIBLE_METADATA = {'status': ['preview'], 'supported_by': 'community', 'metadata_version': '1.1'} DOCUMENTATION = ''' --- module: fortios_webfilter_content_header short_description: Configure content types used by Web filter. description: - This module is able to configure a FortiGate or FortiOS by allowing the user to configure webfilter feature and content_header category. Examples includes all options and need to be adjusted to datasources before usage. Tested with FOS v6.0.2 version_added: "2.8" author: - Miguel Angel Munoz (@mamunozgonzalez) - Nicolas Thomas (@thomnico) notes: - Requires fortiosapi library developed by Fortinet - Run as a local_action in your playbook requirements: - fortiosapi>=0.9.8 options: host: description: - FortiOS or FortiGate ip address. required: true username: description: - FortiOS or FortiGate username. required: true password: description: - FortiOS or FortiGate password. default: "" vdom: description: - Virtual domain, among those defined previously. A vdom is a virtual instance of the FortiGate that can be configured and used as a different unit. default: root https: description: - Indicates if the requests towards FortiGate must use HTTPS protocol type: bool default: false webfilter_content_header: description: - Configure content types used by Web filter. default: null suboptions: state: description: - Indicates whether to create or remove the object choices: - present - absent comment: description: - Optional comments. entries: description: - Configure content types used by web filter. suboptions: action: description: - Action to take for this content type. choices: - block - allow - exempt category: description: - Categories that this content type applies to. pattern: description: - Content type (regular expression). required: true id: description: - ID. required: true name: description: - Name of table. ''' EXAMPLES = ''' - hosts: localhost vars: host: "192.168.122.40" username: "admin" password: "" vdom: "root" tasks: - name: Configure content types used by Web filter. fortios_webfilter_content_header: host: "{{ host }}" username: "{{ username }}" password: "{{ password }}" vdom: "{{ vdom }}" webfilter_content_header: state: "present" comment: "Optional comments." entries: - action: "block" category: "<your_own_value>" pattern: "<your_own_value>" id: "8" name: "default_name_9" ''' RETURN = ''' build: description: Build number of the fortigate image returned: always type: str sample: '1547' http_method: description: Last method used to provision the content into FortiGate returned: always type: str sample: 'PUT' http_status: description: Last result given by FortiGate on last operation applied returned: always type: str sample: "200" mkey: description: Master key (id) used in the last call to FortiGate returned: success type: str sample: "key1" name: description: Name of the table used to fulfill the request returned: always type: str sample: "urlfilter" path: description: Path of the table used to fulfill the request returned: always type: str sample: "webfilter" revision: description: Internal revision number returned: always type: str sample: "17.0.2.10658" serial: description: Serial number of the unit returned: always type: str sample: "FGVMEVYYQT3AB5352" status: description: Indication of the operation's result returned: always type: str sample: "success" vdom: description: Virtual domain used returned: always type: str sample: "root" version: description: Version of the FortiGate returned: always type: str sample: "v5.6.3" ''' from ansible.module_utils.basic import AnsibleModule fos = None def login(data): host = data['host'] username = data['username'] password = data['password'] fos.debug('on') if 'https' in data and not data['https']: fos.https('off') else: fos.https('on') fos.login(host, username, password) def filter_webfilter_content_header_data(json): option_list = ['comment', 'entries', 'id', 'name'] dictionary = {} for attribute in option_list: if attribute in json: dictionary[attribute] = json[attribute] return dictionary def webfilter_content_header(data, fos): vdom = data['vdom'] webfilter_content_header_data = data['webfilter_content_header'] filtered_data = filter_webfilter_content_header_data( webfilter_content_header_data) if webfilter_content_header_data['state'] == "present": return fos.set('webfilter', 'content-header', data=filtered_data, vdom=vdom) elif webfilter_content_header_data['state'] == "absent": return fos.delete('webfilter', 'content-header', mkey=filtered_data['id'], vdom=vdom) def fortios_webfilter(data, fos): login(data) methodlist = ['webfilter_content_header'] for method in methodlist: if data[method]: resp = eval(method)(data, fos) break fos.logout() return not resp['status'] == "success", resp['status'] == "success", resp def main(): fields = { "host": {"required": True, "type": "str"}, "username": {"required": True, "type": "str"}, "password": {"required": False, "type": "str", "no_log": True}, "vdom": {"required": False, "type": "str", "default": "root"}, "https": {"required": False, "type": "bool", "default": "False"}, "webfilter_content_header": { "required": False, "type": "dict", "options": { "state": {"required": True, "type": "str", "choices": ["present", "absent"]}, "comment": {"required": False, "type": "str"}, "entries": {"required": False, "type": "list", "options": { "action": {"required": False, "type": "str", "choices": ["block", "allow", "exempt"]}, "category": {"required": False, "type": "str"}, "pattern": {"required": True, "type": "str"} }}, "id": {"required": True, "type": "int"}, "name": {"required": False, "type": "str"} } } } module = AnsibleModule(argument_spec=fields, supports_check_mode=False) try: from fortiosapi import FortiOSAPI except ImportError: module.fail_json(msg="fortiosapi module is required") global fos fos = FortiOSAPI() is_error, has_changed, result = fortios_webfilter(module.params, fos) if not is_error: module.exit_json(changed=has_changed, meta=result) else: module.fail_json(msg="Error in repo", meta=result) if __name__ == '__main__': main()
gpl-3.0
Bolton-and-Menk-GIS/photos-to-map
PhotosToMap/photomapper/gpsimage/PIL/XpmImagePlugin.py
40
3081
# # The Python Imaging Library. # $Id$ # # XPM File handling # # History: # 1996-12-29 fl Created # 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.7) # # Copyright (c) Secret Labs AB 1997-2001. # Copyright (c) Fredrik Lundh 1996-2001. # # See the README file for information on usage and redistribution. # __version__ = "0.2" import re, string import Image, ImageFile, ImagePalette # XPM header xpm_head = re.compile("\"([0-9]*) ([0-9]*) ([0-9]*) ([0-9]*)") def _accept(prefix): return prefix[:9] == "/* XPM */" ## # Image plugin for X11 pixel maps. class XpmImageFile(ImageFile.ImageFile): format = "XPM" format_description = "X11 Pixel Map" def _open(self): if not _accept(self.fp.read(9)): raise SyntaxError, "not an XPM file" # skip forward to next string while 1: s = self.fp.readline() if not s: raise SyntaxError, "broken XPM file" m = xpm_head.match(s) if m: break self.size = int(m.group(1)), int(m.group(2)) pal = int(m.group(3)) bpp = int(m.group(4)) if pal > 256 or bpp != 1: raise ValueError, "cannot read this XPM file" # # load palette description palette = ["\0\0\0"] * 256 for i in range(pal): s = self.fp.readline() if s[-2:] == '\r\n': s = s[:-2] elif s[-1:] in '\r\n': s = s[:-1] c = ord(s[1]) s = string.split(s[2:-2]) for i in range(0, len(s), 2): if s[i] == "c": # process colour key rgb = s[i+1] if rgb == "None": self.info["transparency"] = c elif rgb[0] == "#": # FIXME: handle colour names (see ImagePalette.py) rgb = string.atoi(rgb[1:], 16) palette[c] = chr((rgb >> 16) & 255) +\ chr((rgb >> 8) & 255) +\ chr(rgb & 255) else: # unknown colour raise ValueError, "cannot read this XPM file" break else: # missing colour key raise ValueError, "cannot read this XPM file" self.mode = "P" self.palette = ImagePalette.raw("RGB", string.join(palette, "")) self.tile = [("raw", (0, 0)+self.size, self.fp.tell(), ("P", 0, 1))] def load_read(self, bytes): # # load all image data in one chunk xsize, ysize = self.size s = [None] * ysize for i in range(ysize): s[i] = string.ljust(self.fp.readline()[1:xsize+1], xsize) self.fp = None return string.join(s, "") # # Registry Image.register_open("XPM", XpmImageFile, _accept) Image.register_extension("XPM", ".xpm") Image.register_mime("XPM", "image/xpm")
gpl-3.0
Sixshaman/networkx
networkx/algorithms/isomorphism/tests/test_isomorphism.py
99
1183
#!/usr/bin/env python from nose.tools import * import networkx as nx from networkx.algorithms import isomorphism as iso class TestIsomorph: def setUp(self): self.G1=nx.Graph() self.G2=nx.Graph() self.G3=nx.Graph() self.G4=nx.Graph() self.G1.add_edges_from([ [1,2],[1,3],[1,5],[2,3] ]) self.G2.add_edges_from([ [10,20],[20,30],[10,30],[10,50] ]) self.G3.add_edges_from([ [1,2],[1,3],[1,5],[2,5] ]) self.G4.add_edges_from([ [1,2],[1,3],[1,5],[2,4] ]) def test_could_be_isomorphic(self): assert_true(iso.could_be_isomorphic(self.G1,self.G2)) assert_true(iso.could_be_isomorphic(self.G1,self.G3)) assert_false(iso.could_be_isomorphic(self.G1,self.G4)) assert_true(iso.could_be_isomorphic(self.G3,self.G2)) def test_fast_could_be_isomorphic(self): assert_true(iso.fast_could_be_isomorphic(self.G3,self.G2)) def test_faster_could_be_isomorphic(self): assert_true(iso.faster_could_be_isomorphic(self.G3,self.G2)) def test_is_isomorphic(self): assert_true(iso.is_isomorphic(self.G1,self.G2)) assert_false(iso.is_isomorphic(self.G1,self.G4))
bsd-3-clause
kkdd/arangodb
3rdParty/V8-4.3.61/third_party/python_26/Lib/encodings/cp1006.py
593
13824
""" Python Character Mapping Codec cp1006 generated from 'MAPPINGS/VENDORS/MISC/CP1006.TXT' with gencodec.py. """#" import codecs ### Codec APIs class Codec(codecs.Codec): def encode(self,input,errors='strict'): return codecs.charmap_encode(input,errors,encoding_table) def decode(self,input,errors='strict'): return codecs.charmap_decode(input,errors,decoding_table) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): return codecs.charmap_encode(input,self.errors,encoding_table)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): return codecs.charmap_decode(input,self.errors,decoding_table)[0] class StreamWriter(Codec,codecs.StreamWriter): pass class StreamReader(Codec,codecs.StreamReader): pass ### encodings module API def getregentry(): return codecs.CodecInfo( name='cp1006', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, ) ### Decoding Table decoding_table = ( u'\x00' # 0x00 -> NULL u'\x01' # 0x01 -> START OF HEADING u'\x02' # 0x02 -> START OF TEXT u'\x03' # 0x03 -> END OF TEXT u'\x04' # 0x04 -> END OF TRANSMISSION u'\x05' # 0x05 -> ENQUIRY u'\x06' # 0x06 -> ACKNOWLEDGE u'\x07' # 0x07 -> BELL u'\x08' # 0x08 -> BACKSPACE u'\t' # 0x09 -> HORIZONTAL TABULATION u'\n' # 0x0A -> LINE FEED u'\x0b' # 0x0B -> VERTICAL TABULATION u'\x0c' # 0x0C -> FORM FEED u'\r' # 0x0D -> CARRIAGE RETURN u'\x0e' # 0x0E -> SHIFT OUT u'\x0f' # 0x0F -> SHIFT IN u'\x10' # 0x10 -> DATA LINK ESCAPE u'\x11' # 0x11 -> DEVICE CONTROL ONE u'\x12' # 0x12 -> DEVICE CONTROL TWO u'\x13' # 0x13 -> DEVICE CONTROL THREE u'\x14' # 0x14 -> DEVICE CONTROL FOUR u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE u'\x16' # 0x16 -> SYNCHRONOUS IDLE u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK u'\x18' # 0x18 -> CANCEL u'\x19' # 0x19 -> END OF MEDIUM u'\x1a' # 0x1A -> SUBSTITUTE u'\x1b' # 0x1B -> ESCAPE u'\x1c' # 0x1C -> FILE SEPARATOR u'\x1d' # 0x1D -> GROUP SEPARATOR u'\x1e' # 0x1E -> RECORD SEPARATOR u'\x1f' # 0x1F -> UNIT SEPARATOR u' ' # 0x20 -> SPACE u'!' # 0x21 -> EXCLAMATION MARK u'"' # 0x22 -> QUOTATION MARK u'#' # 0x23 -> NUMBER SIGN u'$' # 0x24 -> DOLLAR SIGN u'%' # 0x25 -> PERCENT SIGN u'&' # 0x26 -> AMPERSAND u"'" # 0x27 -> APOSTROPHE u'(' # 0x28 -> LEFT PARENTHESIS u')' # 0x29 -> RIGHT PARENTHESIS u'*' # 0x2A -> ASTERISK u'+' # 0x2B -> PLUS SIGN u',' # 0x2C -> COMMA u'-' # 0x2D -> HYPHEN-MINUS u'.' # 0x2E -> FULL STOP u'/' # 0x2F -> SOLIDUS u'0' # 0x30 -> DIGIT ZERO u'1' # 0x31 -> DIGIT ONE u'2' # 0x32 -> DIGIT TWO u'3' # 0x33 -> DIGIT THREE u'4' # 0x34 -> DIGIT FOUR u'5' # 0x35 -> DIGIT FIVE u'6' # 0x36 -> DIGIT SIX u'7' # 0x37 -> DIGIT SEVEN u'8' # 0x38 -> DIGIT EIGHT u'9' # 0x39 -> DIGIT NINE u':' # 0x3A -> COLON u';' # 0x3B -> SEMICOLON u'<' # 0x3C -> LESS-THAN SIGN u'=' # 0x3D -> EQUALS SIGN u'>' # 0x3E -> GREATER-THAN SIGN u'?' # 0x3F -> QUESTION MARK u'@' # 0x40 -> COMMERCIAL AT u'A' # 0x41 -> LATIN CAPITAL LETTER A u'B' # 0x42 -> LATIN CAPITAL LETTER B u'C' # 0x43 -> LATIN CAPITAL LETTER C u'D' # 0x44 -> LATIN CAPITAL LETTER D u'E' # 0x45 -> LATIN CAPITAL LETTER E u'F' # 0x46 -> LATIN CAPITAL LETTER F u'G' # 0x47 -> LATIN CAPITAL LETTER G u'H' # 0x48 -> LATIN CAPITAL LETTER H u'I' # 0x49 -> LATIN CAPITAL LETTER I u'J' # 0x4A -> LATIN CAPITAL LETTER J u'K' # 0x4B -> LATIN CAPITAL LETTER K u'L' # 0x4C -> LATIN CAPITAL LETTER L u'M' # 0x4D -> LATIN CAPITAL LETTER M u'N' # 0x4E -> LATIN CAPITAL LETTER N u'O' # 0x4F -> LATIN CAPITAL LETTER O u'P' # 0x50 -> LATIN CAPITAL LETTER P u'Q' # 0x51 -> LATIN CAPITAL LETTER Q u'R' # 0x52 -> LATIN CAPITAL LETTER R u'S' # 0x53 -> LATIN CAPITAL LETTER S u'T' # 0x54 -> LATIN CAPITAL LETTER T u'U' # 0x55 -> LATIN CAPITAL LETTER U u'V' # 0x56 -> LATIN CAPITAL LETTER V u'W' # 0x57 -> LATIN CAPITAL LETTER W u'X' # 0x58 -> LATIN CAPITAL LETTER X u'Y' # 0x59 -> LATIN CAPITAL LETTER Y u'Z' # 0x5A -> LATIN CAPITAL LETTER Z u'[' # 0x5B -> LEFT SQUARE BRACKET u'\\' # 0x5C -> REVERSE SOLIDUS u']' # 0x5D -> RIGHT SQUARE BRACKET u'^' # 0x5E -> CIRCUMFLEX ACCENT u'_' # 0x5F -> LOW LINE u'`' # 0x60 -> GRAVE ACCENT u'a' # 0x61 -> LATIN SMALL LETTER A u'b' # 0x62 -> LATIN SMALL LETTER B u'c' # 0x63 -> LATIN SMALL LETTER C u'd' # 0x64 -> LATIN SMALL LETTER D u'e' # 0x65 -> LATIN SMALL LETTER E u'f' # 0x66 -> LATIN SMALL LETTER F u'g' # 0x67 -> LATIN SMALL LETTER G u'h' # 0x68 -> LATIN SMALL LETTER H u'i' # 0x69 -> LATIN SMALL LETTER I u'j' # 0x6A -> LATIN SMALL LETTER J u'k' # 0x6B -> LATIN SMALL LETTER K u'l' # 0x6C -> LATIN SMALL LETTER L u'm' # 0x6D -> LATIN SMALL LETTER M u'n' # 0x6E -> LATIN SMALL LETTER N u'o' # 0x6F -> LATIN SMALL LETTER O u'p' # 0x70 -> LATIN SMALL LETTER P u'q' # 0x71 -> LATIN SMALL LETTER Q u'r' # 0x72 -> LATIN SMALL LETTER R u's' # 0x73 -> LATIN SMALL LETTER S u't' # 0x74 -> LATIN SMALL LETTER T u'u' # 0x75 -> LATIN SMALL LETTER U u'v' # 0x76 -> LATIN SMALL LETTER V u'w' # 0x77 -> LATIN SMALL LETTER W u'x' # 0x78 -> LATIN SMALL LETTER X u'y' # 0x79 -> LATIN SMALL LETTER Y u'z' # 0x7A -> LATIN SMALL LETTER Z u'{' # 0x7B -> LEFT CURLY BRACKET u'|' # 0x7C -> VERTICAL LINE u'}' # 0x7D -> RIGHT CURLY BRACKET u'~' # 0x7E -> TILDE u'\x7f' # 0x7F -> DELETE u'\x80' # 0x80 -> <control> u'\x81' # 0x81 -> <control> u'\x82' # 0x82 -> <control> u'\x83' # 0x83 -> <control> u'\x84' # 0x84 -> <control> u'\x85' # 0x85 -> <control> u'\x86' # 0x86 -> <control> u'\x87' # 0x87 -> <control> u'\x88' # 0x88 -> <control> u'\x89' # 0x89 -> <control> u'\x8a' # 0x8A -> <control> u'\x8b' # 0x8B -> <control> u'\x8c' # 0x8C -> <control> u'\x8d' # 0x8D -> <control> u'\x8e' # 0x8E -> <control> u'\x8f' # 0x8F -> <control> u'\x90' # 0x90 -> <control> u'\x91' # 0x91 -> <control> u'\x92' # 0x92 -> <control> u'\x93' # 0x93 -> <control> u'\x94' # 0x94 -> <control> u'\x95' # 0x95 -> <control> u'\x96' # 0x96 -> <control> u'\x97' # 0x97 -> <control> u'\x98' # 0x98 -> <control> u'\x99' # 0x99 -> <control> u'\x9a' # 0x9A -> <control> u'\x9b' # 0x9B -> <control> u'\x9c' # 0x9C -> <control> u'\x9d' # 0x9D -> <control> u'\x9e' # 0x9E -> <control> u'\x9f' # 0x9F -> <control> u'\xa0' # 0xA0 -> NO-BREAK SPACE u'\u06f0' # 0xA1 -> EXTENDED ARABIC-INDIC DIGIT ZERO u'\u06f1' # 0xA2 -> EXTENDED ARABIC-INDIC DIGIT ONE u'\u06f2' # 0xA3 -> EXTENDED ARABIC-INDIC DIGIT TWO u'\u06f3' # 0xA4 -> EXTENDED ARABIC-INDIC DIGIT THREE u'\u06f4' # 0xA5 -> EXTENDED ARABIC-INDIC DIGIT FOUR u'\u06f5' # 0xA6 -> EXTENDED ARABIC-INDIC DIGIT FIVE u'\u06f6' # 0xA7 -> EXTENDED ARABIC-INDIC DIGIT SIX u'\u06f7' # 0xA8 -> EXTENDED ARABIC-INDIC DIGIT SEVEN u'\u06f8' # 0xA9 -> EXTENDED ARABIC-INDIC DIGIT EIGHT u'\u06f9' # 0xAA -> EXTENDED ARABIC-INDIC DIGIT NINE u'\u060c' # 0xAB -> ARABIC COMMA u'\u061b' # 0xAC -> ARABIC SEMICOLON u'\xad' # 0xAD -> SOFT HYPHEN u'\u061f' # 0xAE -> ARABIC QUESTION MARK u'\ufe81' # 0xAF -> ARABIC LETTER ALEF WITH MADDA ABOVE ISOLATED FORM u'\ufe8d' # 0xB0 -> ARABIC LETTER ALEF ISOLATED FORM u'\ufe8e' # 0xB1 -> ARABIC LETTER ALEF FINAL FORM u'\ufe8e' # 0xB2 -> ARABIC LETTER ALEF FINAL FORM u'\ufe8f' # 0xB3 -> ARABIC LETTER BEH ISOLATED FORM u'\ufe91' # 0xB4 -> ARABIC LETTER BEH INITIAL FORM u'\ufb56' # 0xB5 -> ARABIC LETTER PEH ISOLATED FORM u'\ufb58' # 0xB6 -> ARABIC LETTER PEH INITIAL FORM u'\ufe93' # 0xB7 -> ARABIC LETTER TEH MARBUTA ISOLATED FORM u'\ufe95' # 0xB8 -> ARABIC LETTER TEH ISOLATED FORM u'\ufe97' # 0xB9 -> ARABIC LETTER TEH INITIAL FORM u'\ufb66' # 0xBA -> ARABIC LETTER TTEH ISOLATED FORM u'\ufb68' # 0xBB -> ARABIC LETTER TTEH INITIAL FORM u'\ufe99' # 0xBC -> ARABIC LETTER THEH ISOLATED FORM u'\ufe9b' # 0xBD -> ARABIC LETTER THEH INITIAL FORM u'\ufe9d' # 0xBE -> ARABIC LETTER JEEM ISOLATED FORM u'\ufe9f' # 0xBF -> ARABIC LETTER JEEM INITIAL FORM u'\ufb7a' # 0xC0 -> ARABIC LETTER TCHEH ISOLATED FORM u'\ufb7c' # 0xC1 -> ARABIC LETTER TCHEH INITIAL FORM u'\ufea1' # 0xC2 -> ARABIC LETTER HAH ISOLATED FORM u'\ufea3' # 0xC3 -> ARABIC LETTER HAH INITIAL FORM u'\ufea5' # 0xC4 -> ARABIC LETTER KHAH ISOLATED FORM u'\ufea7' # 0xC5 -> ARABIC LETTER KHAH INITIAL FORM u'\ufea9' # 0xC6 -> ARABIC LETTER DAL ISOLATED FORM u'\ufb84' # 0xC7 -> ARABIC LETTER DAHAL ISOLATED FORMN u'\ufeab' # 0xC8 -> ARABIC LETTER THAL ISOLATED FORM u'\ufead' # 0xC9 -> ARABIC LETTER REH ISOLATED FORM u'\ufb8c' # 0xCA -> ARABIC LETTER RREH ISOLATED FORM u'\ufeaf' # 0xCB -> ARABIC LETTER ZAIN ISOLATED FORM u'\ufb8a' # 0xCC -> ARABIC LETTER JEH ISOLATED FORM u'\ufeb1' # 0xCD -> ARABIC LETTER SEEN ISOLATED FORM u'\ufeb3' # 0xCE -> ARABIC LETTER SEEN INITIAL FORM u'\ufeb5' # 0xCF -> ARABIC LETTER SHEEN ISOLATED FORM u'\ufeb7' # 0xD0 -> ARABIC LETTER SHEEN INITIAL FORM u'\ufeb9' # 0xD1 -> ARABIC LETTER SAD ISOLATED FORM u'\ufebb' # 0xD2 -> ARABIC LETTER SAD INITIAL FORM u'\ufebd' # 0xD3 -> ARABIC LETTER DAD ISOLATED FORM u'\ufebf' # 0xD4 -> ARABIC LETTER DAD INITIAL FORM u'\ufec1' # 0xD5 -> ARABIC LETTER TAH ISOLATED FORM u'\ufec5' # 0xD6 -> ARABIC LETTER ZAH ISOLATED FORM u'\ufec9' # 0xD7 -> ARABIC LETTER AIN ISOLATED FORM u'\ufeca' # 0xD8 -> ARABIC LETTER AIN FINAL FORM u'\ufecb' # 0xD9 -> ARABIC LETTER AIN INITIAL FORM u'\ufecc' # 0xDA -> ARABIC LETTER AIN MEDIAL FORM u'\ufecd' # 0xDB -> ARABIC LETTER GHAIN ISOLATED FORM u'\ufece' # 0xDC -> ARABIC LETTER GHAIN FINAL FORM u'\ufecf' # 0xDD -> ARABIC LETTER GHAIN INITIAL FORM u'\ufed0' # 0xDE -> ARABIC LETTER GHAIN MEDIAL FORM u'\ufed1' # 0xDF -> ARABIC LETTER FEH ISOLATED FORM u'\ufed3' # 0xE0 -> ARABIC LETTER FEH INITIAL FORM u'\ufed5' # 0xE1 -> ARABIC LETTER QAF ISOLATED FORM u'\ufed7' # 0xE2 -> ARABIC LETTER QAF INITIAL FORM u'\ufed9' # 0xE3 -> ARABIC LETTER KAF ISOLATED FORM u'\ufedb' # 0xE4 -> ARABIC LETTER KAF INITIAL FORM u'\ufb92' # 0xE5 -> ARABIC LETTER GAF ISOLATED FORM u'\ufb94' # 0xE6 -> ARABIC LETTER GAF INITIAL FORM u'\ufedd' # 0xE7 -> ARABIC LETTER LAM ISOLATED FORM u'\ufedf' # 0xE8 -> ARABIC LETTER LAM INITIAL FORM u'\ufee0' # 0xE9 -> ARABIC LETTER LAM MEDIAL FORM u'\ufee1' # 0xEA -> ARABIC LETTER MEEM ISOLATED FORM u'\ufee3' # 0xEB -> ARABIC LETTER MEEM INITIAL FORM u'\ufb9e' # 0xEC -> ARABIC LETTER NOON GHUNNA ISOLATED FORM u'\ufee5' # 0xED -> ARABIC LETTER NOON ISOLATED FORM u'\ufee7' # 0xEE -> ARABIC LETTER NOON INITIAL FORM u'\ufe85' # 0xEF -> ARABIC LETTER WAW WITH HAMZA ABOVE ISOLATED FORM u'\ufeed' # 0xF0 -> ARABIC LETTER WAW ISOLATED FORM u'\ufba6' # 0xF1 -> ARABIC LETTER HEH GOAL ISOLATED FORM u'\ufba8' # 0xF2 -> ARABIC LETTER HEH GOAL INITIAL FORM u'\ufba9' # 0xF3 -> ARABIC LETTER HEH GOAL MEDIAL FORM u'\ufbaa' # 0xF4 -> ARABIC LETTER HEH DOACHASHMEE ISOLATED FORM u'\ufe80' # 0xF5 -> ARABIC LETTER HAMZA ISOLATED FORM u'\ufe89' # 0xF6 -> ARABIC LETTER YEH WITH HAMZA ABOVE ISOLATED FORM u'\ufe8a' # 0xF7 -> ARABIC LETTER YEH WITH HAMZA ABOVE FINAL FORM u'\ufe8b' # 0xF8 -> ARABIC LETTER YEH WITH HAMZA ABOVE INITIAL FORM u'\ufef1' # 0xF9 -> ARABIC LETTER YEH ISOLATED FORM u'\ufef2' # 0xFA -> ARABIC LETTER YEH FINAL FORM u'\ufef3' # 0xFB -> ARABIC LETTER YEH INITIAL FORM u'\ufbb0' # 0xFC -> ARABIC LETTER YEH BARREE WITH HAMZA ABOVE ISOLATED FORM u'\ufbae' # 0xFD -> ARABIC LETTER YEH BARREE ISOLATED FORM u'\ufe7c' # 0xFE -> ARABIC SHADDA ISOLATED FORM u'\ufe7d' # 0xFF -> ARABIC SHADDA MEDIAL FORM ) ### Encoding table encoding_table=codecs.charmap_build(decoding_table)
apache-2.0
NamelessRom/android_kernel_lge_omap4
tools/perf/scripts/python/netdev-times.py
11271
15048
# Display a process of packets and processed time. # It helps us to investigate networking or network device. # # options # tx: show only tx chart # rx: show only rx chart # dev=: show only thing related to specified device # debug: work with debug mode. It shows buffer status. import os import sys sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from perf_trace_context import * from Core import * from Util import * all_event_list = []; # insert all tracepoint event related with this script irq_dic = {}; # key is cpu and value is a list which stacks irqs # which raise NET_RX softirq net_rx_dic = {}; # key is cpu and value include time of NET_RX softirq-entry # and a list which stacks receive receive_hunk_list = []; # a list which include a sequence of receive events rx_skb_list = []; # received packet list for matching # skb_copy_datagram_iovec buffer_budget = 65536; # the budget of rx_skb_list, tx_queue_list and # tx_xmit_list of_count_rx_skb_list = 0; # overflow count tx_queue_list = []; # list of packets which pass through dev_queue_xmit of_count_tx_queue_list = 0; # overflow count tx_xmit_list = []; # list of packets which pass through dev_hard_start_xmit of_count_tx_xmit_list = 0; # overflow count tx_free_list = []; # list of packets which is freed # options show_tx = 0; show_rx = 0; dev = 0; # store a name of device specified by option "dev=" debug = 0; # indices of event_info tuple EINFO_IDX_NAME= 0 EINFO_IDX_CONTEXT=1 EINFO_IDX_CPU= 2 EINFO_IDX_TIME= 3 EINFO_IDX_PID= 4 EINFO_IDX_COMM= 5 # Calculate a time interval(msec) from src(nsec) to dst(nsec) def diff_msec(src, dst): return (dst - src) / 1000000.0 # Display a process of transmitting a packet def print_transmit(hunk): if dev != 0 and hunk['dev'].find(dev) < 0: return print "%7s %5d %6d.%06dsec %12.3fmsec %12.3fmsec" % \ (hunk['dev'], hunk['len'], nsecs_secs(hunk['queue_t']), nsecs_nsecs(hunk['queue_t'])/1000, diff_msec(hunk['queue_t'], hunk['xmit_t']), diff_msec(hunk['xmit_t'], hunk['free_t'])) # Format for displaying rx packet processing PF_IRQ_ENTRY= " irq_entry(+%.3fmsec irq=%d:%s)" PF_SOFT_ENTRY=" softirq_entry(+%.3fmsec)" PF_NAPI_POLL= " napi_poll_exit(+%.3fmsec %s)" PF_JOINT= " |" PF_WJOINT= " | |" PF_NET_RECV= " |---netif_receive_skb(+%.3fmsec skb=%x len=%d)" PF_NET_RX= " |---netif_rx(+%.3fmsec skb=%x)" PF_CPY_DGRAM= " | skb_copy_datagram_iovec(+%.3fmsec %d:%s)" PF_KFREE_SKB= " | kfree_skb(+%.3fmsec location=%x)" PF_CONS_SKB= " | consume_skb(+%.3fmsec)" # Display a process of received packets and interrputs associated with # a NET_RX softirq def print_receive(hunk): show_hunk = 0 irq_list = hunk['irq_list'] cpu = irq_list[0]['cpu'] base_t = irq_list[0]['irq_ent_t'] # check if this hunk should be showed if dev != 0: for i in range(len(irq_list)): if irq_list[i]['name'].find(dev) >= 0: show_hunk = 1 break else: show_hunk = 1 if show_hunk == 0: return print "%d.%06dsec cpu=%d" % \ (nsecs_secs(base_t), nsecs_nsecs(base_t)/1000, cpu) for i in range(len(irq_list)): print PF_IRQ_ENTRY % \ (diff_msec(base_t, irq_list[i]['irq_ent_t']), irq_list[i]['irq'], irq_list[i]['name']) print PF_JOINT irq_event_list = irq_list[i]['event_list'] for j in range(len(irq_event_list)): irq_event = irq_event_list[j] if irq_event['event'] == 'netif_rx': print PF_NET_RX % \ (diff_msec(base_t, irq_event['time']), irq_event['skbaddr']) print PF_JOINT print PF_SOFT_ENTRY % \ diff_msec(base_t, hunk['sirq_ent_t']) print PF_JOINT event_list = hunk['event_list'] for i in range(len(event_list)): event = event_list[i] if event['event_name'] == 'napi_poll': print PF_NAPI_POLL % \ (diff_msec(base_t, event['event_t']), event['dev']) if i == len(event_list) - 1: print "" else: print PF_JOINT else: print PF_NET_RECV % \ (diff_msec(base_t, event['event_t']), event['skbaddr'], event['len']) if 'comm' in event.keys(): print PF_WJOINT print PF_CPY_DGRAM % \ (diff_msec(base_t, event['comm_t']), event['pid'], event['comm']) elif 'handle' in event.keys(): print PF_WJOINT if event['handle'] == "kfree_skb": print PF_KFREE_SKB % \ (diff_msec(base_t, event['comm_t']), event['location']) elif event['handle'] == "consume_skb": print PF_CONS_SKB % \ diff_msec(base_t, event['comm_t']) print PF_JOINT def trace_begin(): global show_tx global show_rx global dev global debug for i in range(len(sys.argv)): if i == 0: continue arg = sys.argv[i] if arg == 'tx': show_tx = 1 elif arg =='rx': show_rx = 1 elif arg.find('dev=',0, 4) >= 0: dev = arg[4:] elif arg == 'debug': debug = 1 if show_tx == 0 and show_rx == 0: show_tx = 1 show_rx = 1 def trace_end(): # order all events in time all_event_list.sort(lambda a,b :cmp(a[EINFO_IDX_TIME], b[EINFO_IDX_TIME])) # process all events for i in range(len(all_event_list)): event_info = all_event_list[i] name = event_info[EINFO_IDX_NAME] if name == 'irq__softirq_exit': handle_irq_softirq_exit(event_info) elif name == 'irq__softirq_entry': handle_irq_softirq_entry(event_info) elif name == 'irq__softirq_raise': handle_irq_softirq_raise(event_info) elif name == 'irq__irq_handler_entry': handle_irq_handler_entry(event_info) elif name == 'irq__irq_handler_exit': handle_irq_handler_exit(event_info) elif name == 'napi__napi_poll': handle_napi_poll(event_info) elif name == 'net__netif_receive_skb': handle_netif_receive_skb(event_info) elif name == 'net__netif_rx': handle_netif_rx(event_info) elif name == 'skb__skb_copy_datagram_iovec': handle_skb_copy_datagram_iovec(event_info) elif name == 'net__net_dev_queue': handle_net_dev_queue(event_info) elif name == 'net__net_dev_xmit': handle_net_dev_xmit(event_info) elif name == 'skb__kfree_skb': handle_kfree_skb(event_info) elif name == 'skb__consume_skb': handle_consume_skb(event_info) # display receive hunks if show_rx: for i in range(len(receive_hunk_list)): print_receive(receive_hunk_list[i]) # display transmit hunks if show_tx: print " dev len Qdisc " \ " netdevice free" for i in range(len(tx_free_list)): print_transmit(tx_free_list[i]) if debug: print "debug buffer status" print "----------------------------" print "xmit Qdisc:remain:%d overflow:%d" % \ (len(tx_queue_list), of_count_tx_queue_list) print "xmit netdevice:remain:%d overflow:%d" % \ (len(tx_xmit_list), of_count_tx_xmit_list) print "receive:remain:%d overflow:%d" % \ (len(rx_skb_list), of_count_rx_skb_list) # called from perf, when it finds a correspoinding event def irq__softirq_entry(name, context, cpu, sec, nsec, pid, comm, vec): if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX": return event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec) all_event_list.append(event_info) def irq__softirq_exit(name, context, cpu, sec, nsec, pid, comm, vec): if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX": return event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec) all_event_list.append(event_info) def irq__softirq_raise(name, context, cpu, sec, nsec, pid, comm, vec): if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX": return event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec) all_event_list.append(event_info) def irq__irq_handler_entry(name, context, cpu, sec, nsec, pid, comm, irq, irq_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, irq_name) all_event_list.append(event_info) def irq__irq_handler_exit(name, context, cpu, sec, nsec, pid, comm, irq, ret): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, ret) all_event_list.append(event_info) def napi__napi_poll(name, context, cpu, sec, nsec, pid, comm, napi, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, napi, dev_name) all_event_list.append(event_info) def net__netif_receive_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen, dev_name) all_event_list.append(event_info) def net__netif_rx(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen, dev_name) all_event_list.append(event_info) def net__net_dev_queue(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen, dev_name) all_event_list.append(event_info) def net__net_dev_xmit(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen, rc, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen, rc ,dev_name) all_event_list.append(event_info) def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr, protocol, location): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, protocol, location) all_event_list.append(event_info) def skb__consume_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr) all_event_list.append(event_info) def skb__skb_copy_datagram_iovec(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen) all_event_list.append(event_info) def handle_irq_handler_entry(event_info): (name, context, cpu, time, pid, comm, irq, irq_name) = event_info if cpu not in irq_dic.keys(): irq_dic[cpu] = [] irq_record = {'irq':irq, 'name':irq_name, 'cpu':cpu, 'irq_ent_t':time} irq_dic[cpu].append(irq_record) def handle_irq_handler_exit(event_info): (name, context, cpu, time, pid, comm, irq, ret) = event_info if cpu not in irq_dic.keys(): return irq_record = irq_dic[cpu].pop() if irq != irq_record['irq']: return irq_record.update({'irq_ext_t':time}) # if an irq doesn't include NET_RX softirq, drop. if 'event_list' in irq_record.keys(): irq_dic[cpu].append(irq_record) def handle_irq_softirq_raise(event_info): (name, context, cpu, time, pid, comm, vec) = event_info if cpu not in irq_dic.keys() \ or len(irq_dic[cpu]) == 0: return irq_record = irq_dic[cpu].pop() if 'event_list' in irq_record.keys(): irq_event_list = irq_record['event_list'] else: irq_event_list = [] irq_event_list.append({'time':time, 'event':'sirq_raise'}) irq_record.update({'event_list':irq_event_list}) irq_dic[cpu].append(irq_record) def handle_irq_softirq_entry(event_info): (name, context, cpu, time, pid, comm, vec) = event_info net_rx_dic[cpu] = {'sirq_ent_t':time, 'event_list':[]} def handle_irq_softirq_exit(event_info): (name, context, cpu, time, pid, comm, vec) = event_info irq_list = [] event_list = 0 if cpu in irq_dic.keys(): irq_list = irq_dic[cpu] del irq_dic[cpu] if cpu in net_rx_dic.keys(): sirq_ent_t = net_rx_dic[cpu]['sirq_ent_t'] event_list = net_rx_dic[cpu]['event_list'] del net_rx_dic[cpu] if irq_list == [] or event_list == 0: return rec_data = {'sirq_ent_t':sirq_ent_t, 'sirq_ext_t':time, 'irq_list':irq_list, 'event_list':event_list} # merge information realted to a NET_RX softirq receive_hunk_list.append(rec_data) def handle_napi_poll(event_info): (name, context, cpu, time, pid, comm, napi, dev_name) = event_info if cpu in net_rx_dic.keys(): event_list = net_rx_dic[cpu]['event_list'] rec_data = {'event_name':'napi_poll', 'dev':dev_name, 'event_t':time} event_list.append(rec_data) def handle_netif_rx(event_info): (name, context, cpu, time, pid, comm, skbaddr, skblen, dev_name) = event_info if cpu not in irq_dic.keys() \ or len(irq_dic[cpu]) == 0: return irq_record = irq_dic[cpu].pop() if 'event_list' in irq_record.keys(): irq_event_list = irq_record['event_list'] else: irq_event_list = [] irq_event_list.append({'time':time, 'event':'netif_rx', 'skbaddr':skbaddr, 'skblen':skblen, 'dev_name':dev_name}) irq_record.update({'event_list':irq_event_list}) irq_dic[cpu].append(irq_record) def handle_netif_receive_skb(event_info): global of_count_rx_skb_list (name, context, cpu, time, pid, comm, skbaddr, skblen, dev_name) = event_info if cpu in net_rx_dic.keys(): rec_data = {'event_name':'netif_receive_skb', 'event_t':time, 'skbaddr':skbaddr, 'len':skblen} event_list = net_rx_dic[cpu]['event_list'] event_list.append(rec_data) rx_skb_list.insert(0, rec_data) if len(rx_skb_list) > buffer_budget: rx_skb_list.pop() of_count_rx_skb_list += 1 def handle_net_dev_queue(event_info): global of_count_tx_queue_list (name, context, cpu, time, pid, comm, skbaddr, skblen, dev_name) = event_info skb = {'dev':dev_name, 'skbaddr':skbaddr, 'len':skblen, 'queue_t':time} tx_queue_list.insert(0, skb) if len(tx_queue_list) > buffer_budget: tx_queue_list.pop() of_count_tx_queue_list += 1 def handle_net_dev_xmit(event_info): global of_count_tx_xmit_list (name, context, cpu, time, pid, comm, skbaddr, skblen, rc, dev_name) = event_info if rc == 0: # NETDEV_TX_OK for i in range(len(tx_queue_list)): skb = tx_queue_list[i] if skb['skbaddr'] == skbaddr: skb['xmit_t'] = time tx_xmit_list.insert(0, skb) del tx_queue_list[i] if len(tx_xmit_list) > buffer_budget: tx_xmit_list.pop() of_count_tx_xmit_list += 1 return def handle_kfree_skb(event_info): (name, context, cpu, time, pid, comm, skbaddr, protocol, location) = event_info for i in range(len(tx_queue_list)): skb = tx_queue_list[i] if skb['skbaddr'] == skbaddr: del tx_queue_list[i] return for i in range(len(tx_xmit_list)): skb = tx_xmit_list[i] if skb['skbaddr'] == skbaddr: skb['free_t'] = time tx_free_list.append(skb) del tx_xmit_list[i] return for i in range(len(rx_skb_list)): rec_data = rx_skb_list[i] if rec_data['skbaddr'] == skbaddr: rec_data.update({'handle':"kfree_skb", 'comm':comm, 'pid':pid, 'comm_t':time}) del rx_skb_list[i] return def handle_consume_skb(event_info): (name, context, cpu, time, pid, comm, skbaddr) = event_info for i in range(len(tx_xmit_list)): skb = tx_xmit_list[i] if skb['skbaddr'] == skbaddr: skb['free_t'] = time tx_free_list.append(skb) del tx_xmit_list[i] return def handle_skb_copy_datagram_iovec(event_info): (name, context, cpu, time, pid, comm, skbaddr, skblen) = event_info for i in range(len(rx_skb_list)): rec_data = rx_skb_list[i] if skbaddr == rec_data['skbaddr']: rec_data.update({'handle':"skb_copy_datagram_iovec", 'comm':comm, 'pid':pid, 'comm_t':time}) del rx_skb_list[i] return
gpl-2.0
jbaayen/sympy
sympy/plotting/plot_window.py
15
4511
from pyglet.gl import * from managed_window import ManagedWindow from plot_camera import PlotCamera from plot_controller import PlotController from time import clock class PlotWindow(ManagedWindow): def __init__(self, plot, **kwargs): """ Named Arguments =============== antialiasing = True True OR False ortho = False True OR False invert_mouse_zoom = False True OR False """ self.plot = plot self.camera = None self._calculating = False self.antialiasing = kwargs.pop('antialiasing', True) self.ortho = kwargs.pop('ortho', False) self.invert_mouse_zoom = kwargs.pop('invert_mouse_zoom', False) self.linewidth = kwargs.pop('linewidth', 1.5) self.title = kwargs.setdefault('caption', "SymPy Plot") self.last_caption_update = 0 self.caption_update_interval = 0.2 self.drawing_first_object = True super(PlotWindow, self).__init__(**kwargs) def setup(self): self.camera = PlotCamera(self, ortho = self.ortho) self.controller = PlotController(self, invert_mouse_zoom=self.invert_mouse_zoom) self.push_handlers(self.controller) glClearColor(1.0, 1.0, 1.0, 0.0) #glClearColor(0.95, 0.95, 0.95, 0.0) glClearDepth(1.0) glDepthFunc(GL_LESS) glEnable(GL_DEPTH_TEST) glEnable(GL_LINE_SMOOTH) glShadeModel(GL_SMOOTH) glLineWidth(self.linewidth) glEnable(GL_BLEND) glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA) if self.antialiasing: glHint(GL_LINE_SMOOTH_HINT, GL_NICEST) glHint(GL_POLYGON_SMOOTH_HINT, GL_NICEST) #glHint(GL_LINE_SMOOTH_HINT, GL_DONT_CARE) #glHint(GL_POLYGON_SMOOTH_HINT, GL_DONT_CARE) self.camera.setup_projection() def on_resize(self, w, h): super(PlotWindow, self).on_resize(w, h) if self.camera is not None: self.camera.setup_projection() def update(self, dt): self.controller.update(dt) def draw(self): self.plot._render_lock.acquire() self.camera.apply_transformation() calc_verts_pos, calc_verts_len = 0, 0 calc_cverts_pos, calc_cverts_len = 0, 0 should_update_caption = (clock()-self.last_caption_update > self.caption_update_interval) if len(self.plot._functions.values()) == 0: self.drawing_first_object = True for r in self.plot._functions.itervalues(): if self.drawing_first_object: self.camera.set_rot_preset(r.default_rot_preset) self.drawing_first_object = False glPushMatrix() r._draw() glPopMatrix() # might as well do this while we are # iterating and have the lock rather # than locking and iterating twice # per frame: if should_update_caption: try: if r.calculating_verts: calc_verts_pos += r.calculating_verts_pos calc_verts_len += r.calculating_verts_len if r.calculating_cverts: calc_cverts_pos += r.calculating_cverts_pos calc_cverts_len += r.calculating_cverts_len except: pass for r in self.plot._pobjects: glPushMatrix() r._draw() glPopMatrix() if should_update_caption: self.update_caption(calc_verts_pos, calc_verts_len, calc_cverts_pos, calc_cverts_len) self.last_caption_update = clock() if self.plot._screenshot: self.plot._screenshot._execute_saving() self.plot._render_lock.release() def update_caption(self, calc_verts_pos, calc_verts_len, calc_cverts_pos, calc_cverts_len): caption = self.title if calc_verts_len or calc_cverts_len: caption += " (calculating" if calc_verts_len > 0: p = (calc_verts_pos/calc_verts_len)*100 caption += " vertices %i%%" % (p) if calc_cverts_len > 0: p = (calc_cverts_pos/calc_cverts_len)*100 caption += " colors %i%%" % (p) caption += ")" if self.caption != caption: self.set_caption(caption)
bsd-3-clause
aferr/TimingCompartments
tests/long/se/50.vortex/test.py
21
1739
# Copyright (c) 2006-2007 The Regents of The University of Michigan # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Korey Sewell m5.util.addToPath('../configs/common') from cpu2000 import vortex workload = vortex(isa, opsys, 'smred') root.system.cpu.workload = workload.makeLiveProcess()
bsd-3-clause
tanwald/passpy
pass.py
1
3781
#!/usr/bin/env python import gi import logging import sys from ConfigParser import ConfigParser gi.require_version('Gtk', '3.0') from gi.repository import Gtk, Gio, GLib from keychain.keychain import Keychain from gui.window import Window from os import path ################################################################################ # GLOBAL LOGGER ################################################################################ logger = logging.getLogger('PassPy') logger.setLevel(logging.INFO) logFormat = '%(asctime)s [%(levelname)s] %(name)s: %(message)s' logfilePath = path.join(path.dirname(path.abspath(__file__)), 'pass.log') logfile = logging.FileHandler(logfilePath, mode='w') logfile.setLevel(logging.INFO) logfile.setFormatter(logging.Formatter(logFormat)) console = logging.StreamHandler() console.setFormatter(logging.Formatter(logFormat)) logger.addHandler(logfile) logger.addHandler(console) ################################################################################ # PASSPY GTK APPLICATION ################################################################################ class PassPy(Gtk.Application): def __init__(self, config): Gtk.Application.__init__( self, flags=Gio.ApplicationFlags.HANDLES_COMMAND_LINE ) self.config = config self.APPLICATION_NAME = 'PASSPY' self.KEYCHAIN_PATH = config.get('keychain', 'path') self.VAULT = config.get('keychain', 'vault') self.LISTED = config.get('entries', 'listed').split(';') self.EXCLUDED = config.get('entries', 'excluded').split(';') self.TRANSLATE = {k: v for k, v in config.items('translate')} self.TRANSLATE['_'] = ' ' # ConfigParser strips whitespaces self.WIN_WIDTH = int(config.get('defaults', 'window.width')) self.WIN_HEIGHT = int(config.get('defaults', 'window.height')) self.ICON = PassPy.getAbsPath('resources/logo.desktop.png') self.add_main_option( 'debug', ord('d'), GLib.OptionFlags.NONE, GLib.OptionArg.NONE, 'Launch in debug mode', None ) self.keychain = Keychain( self.KEYCHAIN_PATH, self.VAULT, { 'listed': self.LISTED, 'excluded': self.EXCLUDED, 'translate': self.TRANSLATE } ) self.bundle = {k: v for k, v in config.items('bundle')} self.locked = True def do_activate(self): self.window = Window(self) self.window.show_all() def do_command_line(self, argv): options = argv.get_options_dict() if options.contains('debug'): logger.setLevel(logging.DEBUG) logger.info('Launched in debug mode') self.activate() return 0 def unlock(self, password): if self.keychain.unlock(password): self.locked = False return not self.locked def getItems(self, type=None, name=None): return self.keychain.getItems(type=type, name=name) def getCategories(self): return self.keychain.getCategories() @staticmethod def getAbsPath(relPath): return path.join(path.dirname(path.abspath(__file__)), relPath) ################################################################################ # MAIN ################################################################################ if __name__ == '__main__': config = ConfigParser() config.optionxform = str config.read(PassPy.getAbsPath('pass.cfg')) app = PassPy(config) logger.info('Started') exitcode = app.run(sys.argv) logger.info('Finished with exit code {}'.format(exitcode)) sys.exit(exitcode)
gpl-3.0
hsagulo/mx
node_modules/grunt-sass/node_modules/node-sass/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py
1835
1748
#!/usr/bin/env python # Copyright (c) 2011 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """These functions are executed via gyp-flock-tool when using the Makefile generator. Used on systems that don't have a built-in flock.""" import fcntl import os import struct import subprocess import sys def main(args): executor = FlockTool() executor.Dispatch(args) class FlockTool(object): """This class emulates the 'flock' command.""" def Dispatch(self, args): """Dispatches a string command to a method.""" if len(args) < 1: raise Exception("Not enough arguments") method = "Exec%s" % self._CommandifyName(args[0]) getattr(self, method)(*args[1:]) def _CommandifyName(self, name_string): """Transforms a tool name like copy-info-plist to CopyInfoPlist""" return name_string.title().replace('-', '') def ExecFlock(self, lockfile, *cmd_list): """Emulates the most basic behavior of Linux's flock(1).""" # Rely on exception handling to report errors. # Note that the stock python on SunOS has a bug # where fcntl.flock(fd, LOCK_EX) always fails # with EBADF, that's why we use this F_SETLK # hack instead. fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0666) if sys.platform.startswith('aix'): # Python on AIX is compiled with LARGEFILE support, which changes the # struct size. op = struct.pack('hhIllqq', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0) else: op = struct.pack('hhllhhl', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0) fcntl.fcntl(fd, fcntl.F_SETLK, op) return subprocess.call(cmd_list) if __name__ == '__main__': sys.exit(main(sys.argv[1:]))
mit
windyuuy/opera
chromium/src/third_party/jinja2/meta.py
406
4144
# -*- coding: utf-8 -*- """ jinja2.meta ~~~~~~~~~~~ This module implements various functions that exposes information about templates that might be interesting for various kinds of applications. :copyright: (c) 2010 by the Jinja Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from jinja2 import nodes from jinja2.compiler import CodeGenerator class TrackingCodeGenerator(CodeGenerator): """We abuse the code generator for introspection.""" def __init__(self, environment): CodeGenerator.__init__(self, environment, '<introspection>', '<introspection>') self.undeclared_identifiers = set() def write(self, x): """Don't write.""" def pull_locals(self, frame): """Remember all undeclared identifiers.""" self.undeclared_identifiers.update(frame.identifiers.undeclared) def find_undeclared_variables(ast): """Returns a set of all variables in the AST that will be looked up from the context at runtime. Because at compile time it's not known which variables will be used depending on the path the execution takes at runtime, all variables are returned. >>> from jinja2 import Environment, meta >>> env = Environment() >>> ast = env.parse('{% set foo = 42 %}{{ bar + foo }}') >>> meta.find_undeclared_variables(ast) set(['bar']) .. admonition:: Implementation Internally the code generator is used for finding undeclared variables. This is good to know because the code generator might raise a :exc:`TemplateAssertionError` during compilation and as a matter of fact this function can currently raise that exception as well. """ codegen = TrackingCodeGenerator(ast.environment) codegen.visit(ast) return codegen.undeclared_identifiers def find_referenced_templates(ast): """Finds all the referenced templates from the AST. This will return an iterator over all the hardcoded template extensions, inclusions and imports. If dynamic inheritance or inclusion is used, `None` will be yielded. >>> from jinja2 import Environment, meta >>> env = Environment() >>> ast = env.parse('{% extends "layout.html" %}{% include helper %}') >>> list(meta.find_referenced_templates(ast)) ['layout.html', None] This function is useful for dependency tracking. For example if you want to rebuild parts of the website after a layout template has changed. """ for node in ast.find_all((nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include)): if not isinstance(node.template, nodes.Const): # a tuple with some non consts in there if isinstance(node.template, (nodes.Tuple, nodes.List)): for template_name in node.template.items: # something const, only yield the strings and ignore # non-string consts that really just make no sense if isinstance(template_name, nodes.Const): if isinstance(template_name.value, basestring): yield template_name.value # something dynamic in there else: yield None # something dynamic we don't know about here else: yield None continue # constant is a basestring, direct template name if isinstance(node.template.value, basestring): yield node.template.value # a tuple or list (latter *should* not happen) made of consts, # yield the consts that are strings. We could warn here for # non string values elif isinstance(node, nodes.Include) and \ isinstance(node.template.value, (tuple, list)): for template_name in node.template.value: if isinstance(template_name, basestring): yield template_name # something else we don't care about, we could warn here else: yield None
bsd-3-clause
rockneurotiko/django
tests/string_lookup/tests.py
290
2573
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.test import TestCase from .models import Article, Bar, Base, Child, Foo, Whiz class StringLookupTests(TestCase): def test_string_form_referencing(self): """ Regression test for #1661 and #1662 Check that string form referencing of models works, both as pre and post reference, on all RelatedField types. """ f1 = Foo(name="Foo1") f1.save() f2 = Foo(name="Foo2") f2.save() w1 = Whiz(name="Whiz1") w1.save() b1 = Bar(name="Bar1", normal=f1, fwd=w1, back=f2) b1.save() self.assertEqual(b1.normal, f1) self.assertEqual(b1.fwd, w1) self.assertEqual(b1.back, f2) base1 = Base(name="Base1") base1.save() child1 = Child(name="Child1", parent=base1) child1.save() self.assertEqual(child1.parent, base1) def test_unicode_chars_in_queries(self): """ Regression tests for #3937 make sure we can use unicode characters in queries. If these tests fail on MySQL, it's a problem with the test setup. A properly configured UTF-8 database can handle this. """ fx = Foo(name='Bjorn', friend='François') fx.save() self.assertEqual(Foo.objects.get(friend__contains='\xe7'), fx) # We can also do the above query using UTF-8 strings. self.assertEqual(Foo.objects.get(friend__contains=b'\xc3\xa7'), fx) def test_queries_on_textfields(self): """ Regression tests for #5087 make sure we can perform queries on TextFields. """ a = Article(name='Test', text='The quick brown fox jumps over the lazy dog.') a.save() self.assertEqual(Article.objects.get(text__exact='The quick brown fox jumps over the lazy dog.'), a) self.assertEqual(Article.objects.get(text__contains='quick brown fox'), a) def test_ipaddress_on_postgresql(self): """ Regression test for #708 "like" queries on IP address fields require casting with HOST() (on PostgreSQL). """ a = Article(name='IP test', text='The body', submitted_from='192.0.2.100') a.save() self.assertEqual(repr(Article.objects.filter(submitted_from__contains='192.0.2')), repr([a])) # Test that the searches do not match the subnet mask (/32 in this case) self.assertEqual(Article.objects.filter(submitted_from__contains='32').count(), 0)
bsd-3-clause
bewing/napalm-base
napalm_base/clitools/helpers.py
2
3676
# -*- coding: utf-8 -*- ''' NAPALM CLI Tools: helpers ========================= Defines helpers for the CLI tools. ''' from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals # stdlib import ast import sys import logging import getpass import argparse import warnings def warning(): warnings.simplefilter('always', DeprecationWarning) warnings.warn("This tool has been deprecated, please use `napalm` instead\n", DeprecationWarning) def build_help(connect_test=False, validate=False, configure=False, napalm_cli=False): parser = argparse.ArgumentParser( description='Command line tool to handle configuration on devices using NAPALM.' 'The script will print the diff on the screen', epilog='Automate all the things!!!' ) parser.add_argument( dest='hostname', action='store', help='Host where you want to deploy the configuration.' ) parser.add_argument( '--user', '-u', dest='user', action='store', default=getpass.getuser(), help='User for authenticating to the host. Default: user running the script.' ) parser.add_argument( '--password', '-p', dest='password', action='store', help='Password for authenticating to the host.' 'If you do not provide a password in the CLI you will be prompted.', ) parser.add_argument( '--vendor', '-v', dest='vendor', action='store', required=True, help='Host Operating System.' ) parser.add_argument( '--optional_args', '-o', dest='optional_args', action='store', help='String with comma separated key=value pairs passed via optional_args to the driver.', ) parser.add_argument( '--debug', dest='debug', action='store_true', help='Enables debug mode; more verbosity.' ) if configure: parser.add_argument( '--strategy', '-s', dest='strategy', action='store', choices=['replace', 'merge'], default='replace', help='Strategy to use to deploy configuration. Default: replace.' ) parser.add_argument( dest='config_file', action='store', help='File containing the configuration you want to deploy.' ) parser.add_argument( '--dry-run', '-d', dest='dry_run', action='store_true', default=None, help='Only returns diff, it does not deploy the configuration.', ) elif validate: parser.add_argument( '--validation_file', '-f', dest='validation_file', action='store', help='Validation file containing resources derised states' ) args = parser.parse_args() if args.password is None: password = getpass.getpass('Enter password: ') setattr(args, 'password', password) return args def configure_logging(logger, debug): if debug: logger.setLevel(logging.DEBUG) else: logger.setLevel(logging.INFO) ch = logging.StreamHandler(sys.stdout) formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') ch.setFormatter(formatter) logger.addHandler(ch) return logger def parse_optional_args(optional_args): if optional_args is not None: return {x.split('=')[0]: ast.literal_eval(x.split('=')[1]) for x in optional_args.split(',')} return {}
apache-2.0
kalahbrown/HueBigSQL
desktop/core/ext-py/markdown/markdown/commandline.py
126
3534
""" COMMAND-LINE SPECIFIC STUFF ============================================================================= The rest of the code is specifically for handling the case where Python Markdown is called from the command line. """ import markdown import sys import logging from logging import DEBUG, INFO, WARN, ERROR, CRITICAL EXECUTABLE_NAME_FOR_USAGE = "python markdown.py" """ The name used in the usage statement displayed for python versions < 2.3. (With python 2.3 and higher the usage statement is generated by optparse and uses the actual name of the executable called.) """ OPTPARSE_WARNING = """ Python 2.3 or higher required for advanced command line options. For lower versions of Python use: %s INPUT_FILE > OUTPUT_FILE """ % EXECUTABLE_NAME_FOR_USAGE def parse_options(): """ Define and parse `optparse` options for command-line usage. """ try: optparse = __import__("optparse") except: if len(sys.argv) == 2: return {'input': sys.argv[1], 'output': None, 'safe': False, 'extensions': [], 'encoding': None }, CRITICAL else: print OPTPARSE_WARNING return None, None parser = optparse.OptionParser(usage="%prog INPUTFILE [options]") parser.add_option("-f", "--file", dest="filename", default=sys.stdout, help="write output to OUTPUT_FILE", metavar="OUTPUT_FILE") parser.add_option("-e", "--encoding", dest="encoding", help="encoding for input and output files",) parser.add_option("-q", "--quiet", default = CRITICAL, action="store_const", const=CRITICAL+10, dest="verbose", help="suppress all messages") parser.add_option("-v", "--verbose", action="store_const", const=INFO, dest="verbose", help="print info messages") parser.add_option("-s", "--safe", dest="safe", default=False, metavar="SAFE_MODE", help="safe mode ('replace', 'remove' or 'escape' user's HTML tag)") parser.add_option("-o", "--output_format", dest="output_format", default='xhtml1', metavar="OUTPUT_FORMAT", help="Format of output. One of 'xhtml1' (default) or 'html4'.") parser.add_option("--noisy", action="store_const", const=DEBUG, dest="verbose", help="print debug messages") parser.add_option("-x", "--extension", action="append", dest="extensions", help = "load extension EXTENSION", metavar="EXTENSION") (options, args) = parser.parse_args() if not len(args) == 1: parser.print_help() return None, None else: input_file = args[0] if not options.extensions: options.extensions = [] return {'input': input_file, 'output': options.filename, 'safe_mode': options.safe, 'extensions': options.extensions, 'encoding': options.encoding, 'output_format': options.output_format}, options.verbose def run(): """Run Markdown from the command line.""" # Parse options and adjust logging level if necessary options, logging_level = parse_options() if not options: sys.exit(0) if logging_level: logging.getLogger('MARKDOWN').setLevel(logging_level) # Run markdown.markdownFromFile(**options)
apache-2.0
nextgis/NextGIS_QGIS_open
python/plugins/processing/ui/ui_DlgHelpEdition.py
6
4336
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'DlgHelpEdition.ui' # # Created: Fri Nov 21 13:25:47 2014 # by: PyQt4 UI code generator 4.11.1 # # WARNING! All changes made in this file will be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_DlgHelpEdition(object): def setupUi(self, DlgHelpEdition): DlgHelpEdition.setObjectName(_fromUtf8("DlgHelpEdition")) DlgHelpEdition.resize(600, 460) self.verticalLayout_3 = QtGui.QVBoxLayout(DlgHelpEdition) self.verticalLayout_3.setSpacing(6) self.verticalLayout_3.setMargin(9) self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3")) self.splitter_2 = QtGui.QSplitter(DlgHelpEdition) self.splitter_2.setOrientation(QtCore.Qt.Vertical) self.splitter_2.setObjectName(_fromUtf8("splitter_2")) self.webView = QtWebKit.QWebView(self.splitter_2) self.webView.setUrl(QtCore.QUrl(_fromUtf8("about:blank"))) self.webView.setObjectName(_fromUtf8("webView")) self.splitter = QtGui.QSplitter(self.splitter_2) self.splitter.setOrientation(QtCore.Qt.Horizontal) self.splitter.setObjectName(_fromUtf8("splitter")) self.layoutWidget = QtGui.QWidget(self.splitter) self.layoutWidget.setObjectName(_fromUtf8("layoutWidget")) self.verticalLayout = QtGui.QVBoxLayout(self.layoutWidget) self.verticalLayout.setSpacing(2) self.verticalLayout.setMargin(0) self.verticalLayout.setObjectName(_fromUtf8("verticalLayout")) self.label = QtGui.QLabel(self.layoutWidget) self.label.setObjectName(_fromUtf8("label")) self.verticalLayout.addWidget(self.label) self.tree = QtGui.QTreeWidget(self.layoutWidget) self.tree.setMinimumSize(QtCore.QSize(0, 200)) self.tree.setAlternatingRowColors(True) self.tree.setObjectName(_fromUtf8("tree")) self.tree.headerItem().setText(0, _fromUtf8("1")) self.tree.header().setVisible(False) self.verticalLayout.addWidget(self.tree) self.layoutWidget1 = QtGui.QWidget(self.splitter) self.layoutWidget1.setObjectName(_fromUtf8("layoutWidget1")) self.verticalLayout_2 = QtGui.QVBoxLayout(self.layoutWidget1) self.verticalLayout_2.setSpacing(2) self.verticalLayout_2.setMargin(0) self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2")) self.lblDescription = QtGui.QLabel(self.layoutWidget1) self.lblDescription.setObjectName(_fromUtf8("lblDescription")) self.verticalLayout_2.addWidget(self.lblDescription) self.text = QtGui.QTextEdit(self.layoutWidget1) self.text.setMinimumSize(QtCore.QSize(0, 200)) self.text.setObjectName(_fromUtf8("text")) self.verticalLayout_2.addWidget(self.text) self.verticalLayout_3.addWidget(self.splitter_2) self.buttonBox = QtGui.QDialogButtonBox(DlgHelpEdition) self.buttonBox.setOrientation(QtCore.Qt.Horizontal) self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok) self.buttonBox.setObjectName(_fromUtf8("buttonBox")) self.verticalLayout_3.addWidget(self.buttonBox) self.retranslateUi(DlgHelpEdition) QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), DlgHelpEdition.accept) QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("rejected()")), DlgHelpEdition.reject) QtCore.QMetaObject.connectSlotsByName(DlgHelpEdition) def retranslateUi(self, DlgHelpEdition): DlgHelpEdition.setWindowTitle(_translate("DlgHelpEdition", "Help editor", None)) self.label.setText(_translate("DlgHelpEdition", "Select element to edit", None)) self.lblDescription.setText(_translate("DlgHelpEdition", "Element description", None)) from PyQt4 import QtWebKit
gpl-2.0
askulkarni2/fabric
fabric/colors.py
57
1149
""" .. versionadded:: 0.9.2 Functions for wrapping strings in ANSI color codes. Each function within this module returns the input string ``text``, wrapped with ANSI color codes for the appropriate color. For example, to print some text as green on supporting terminals:: from fabric.colors import green print(green("This text is green!")) Because these functions simply return modified strings, you can nest them:: from fabric.colors import red, green print(red("This sentence is red, except for " + \ green("these words, which are green") + ".")) If ``bold`` is set to ``True``, the ANSI flag for bolding will be flipped on for that particular invocation, which usually shows up as a bold or brighter version of the original color on most terminals. """ def _wrap_with(code): def inner(text, bold=False): c = code if bold: c = "1;%s" % c return "\033[%sm%s\033[0m" % (c, text) return inner red = _wrap_with('31') green = _wrap_with('32') yellow = _wrap_with('33') blue = _wrap_with('34') magenta = _wrap_with('35') cyan = _wrap_with('36') white = _wrap_with('37')
bsd-2-clause
stonegithubs/odoo
addons/payment_buckaroo/controllers/main.py
325
1270
# -*- coding: utf-8 -*- try: import simplejson as json except ImportError: import json import logging import pprint import werkzeug from openerp import http, SUPERUSER_ID from openerp.http import request _logger = logging.getLogger(__name__) class BuckarooController(http.Controller): _return_url = '/payment/buckaroo/return' _cancel_url = '/payment/buckaroo/cancel' _exception_url = '/payment/buckaroo/error' _reject_url = '/payment/buckaroo/reject' @http.route([ '/payment/buckaroo/return', '/payment/buckaroo/cancel', '/payment/buckaroo/error', '/payment/buckaroo/reject', ], type='http', auth='none') def buckaroo_return(self, **post): """ Buckaroo.""" _logger.info('Buckaroo: entering form_feedback with post data %s', pprint.pformat(post)) # debug request.registry['payment.transaction'].form_feedback(request.cr, SUPERUSER_ID, post, 'buckaroo', context=request.context) return_url = post.pop('return_url', '') if not return_url: data ='' + post.pop('ADD_RETURNDATA', '{}').replace("'", "\"") custom = json.loads(data) return_url = custom.pop('return_url', '/') return werkzeug.utils.redirect(return_url)
agpl-3.0
ossobv/exactonline
exactonline/api/manager.py
1
3444
# vim: set ts=8 sw=4 sts=4 et ai tw=79: """ Base manager class for resource helpers. This file is part of the Exact Online REST API Library in Python (EORALP), licensed under the LGPLv3+. Copyright (C) 2015-2018 Walter Doekes, OSSO B.V. """ from ..exceptions import MultipleObjectsReturned, ObjectDoesNotExist from ..http import binquote from ..resource import DELETE, GET, POST, PUT # Python23 compatibility helpers try: unicode # python2 only except NameError: to_binstr = (lambda x: x.encode('utf-8')) # unistr-to-binstr to_unistr = str # nonstr-to-unistr else: to_binstr = str to_unistr = unicode # noqa: non-str-to-unistr class Manager(object): """ Inherit from this when you're creating a property on the ExactApi, like this: class Relations(Manager): resource = 'some/resource' class ExactApi(...): relations = Relations.as_property() storage = SomeStorage() api = ExactApi(storage=storage) api.relations.all() """ resource = None # set this in your subclass @classmethod def as_property(cls): @property def cached_getter(exactapi): propname = '_prop_%s' % (cls.__name__,) if not hasattr(exactapi, propname): setattr(exactapi, propname, cls(exactapi)) return getattr(exactapi, propname) return cached_getter def __init__(self, api): self._api = api # == GET / get one / get many == def all(self): # Select all without filtering. return self.filter() def get(self, **kwargs): assert 'top' not in kwargs ret = self.filter(top=2, **kwargs) if not ret: raise ObjectDoesNotExist() if len(ret) > 1: raise MultipleObjectsReturned() return ret[0] def filter(self, **kwargs): # kwargs = {'filter': "EntryDate+gt+datetime'2014-01-01'", 'top': 5} args = [] for key, value in kwargs.items(): args.append('$%s=%s' % ( key, binquote(to_unistr(value)))) if args: args = ('?' + '&'.join(args)) else: args = '' ret = self._api.restv1(GET(self.resource + args)) return ret # == POST / create == def create(self, element_dict): ret = self._api.restv1(POST(str(self.resource), element_dict)) return ret # == DELETE / remove == def delete(self, remote_guid): remote_id = self._remote_guid(remote_guid) uri = '%s(%s)' % (self.resource, remote_id) ret = self._api.restv1(DELETE(str(uri))) return ret # == PUT / update == def update(self, remote_guid, element_dict): remote_id = self._remote_guid(remote_guid) uri = '%s(%s)' % (self.resource, remote_id) ret = self._api.restv1(PUT(str(uri), element_dict)) return ret # == helpers == def _filter_append(self, kwargs, extra_filter): if 'filter' in kwargs: kwargs['filter'] = u'(%s) and %s' % (kwargs['filter'], extra_filter) else: kwargs['filter'] = extra_filter def _remote_datetime(self, remote_datetime): return remote_datetime.strftime("datetime'%Y-%m-%d'") def _remote_guid(self, remote_guid): return "guid'%s'" % (remote_guid.replace("'", "''"),)
lgpl-3.0
openstack/os-win
os_win/utils/hostutils.py
1
17843
# Copyright 2013 Cloudbase Solutions Srl # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import socket from oslo_log import log as logging from os_win._i18n import _ from os_win import _utils from os_win import constants from os_win import exceptions from os_win.utils import baseutils from os_win.utils.winapi import libs as w_lib kernel32 = w_lib.get_shared_lib_handle(w_lib.KERNEL32) LOG = logging.getLogger(__name__) class HostUtils(baseutils.BaseUtilsVirt): _windows_version = None _MSVM_PROCESSOR = 'Msvm_Processor' _MSVM_MEMORY = 'Msvm_Memory' _MSVM_NUMA_NODE = 'Msvm_NumaNode' _CENTRAL_PROCESSOR = 'Central Processor' _HOST_FORCED_REBOOT = 6 _HOST_FORCED_SHUTDOWN = 12 _DEFAULT_VM_GENERATION = constants.IMAGE_PROP_VM_GEN_1 FEATURE_RDS_VIRTUALIZATION = 322 FEATURE_MPIO = 57 _wmi_cimv2_namespace = '//./root/cimv2' _wmi_standard_cimv2_namespace = '//./root/StandardCimv2' def __init__(self, host='.'): super(HostUtils, self).__init__(host) self._conn_cimv2 = self._get_wmi_conn(self._wmi_cimv2_namespace, privileges=["Shutdown"]) self._conn_scimv2 = self._get_wmi_conn( self._wmi_standard_cimv2_namespace) self._netutils_prop = None @property def _netutils(self): if not self._netutils_prop: # NOTE(claudiub): we're importing utilsfactory here in order to # avoid circular dependencies. from os_win import utilsfactory self._netutils_prop = utilsfactory.get_networkutils() return self._netutils_prop def get_cpus_info(self): """Returns dictionary containing information about the host's CPUs.""" # NOTE(abalutoiu): Specifying exactly the fields that we need # improves the speed of the query. The LoadPercentage field # is the load capacity of each processor averaged to the last # second, which is time wasted. cpus = self._conn_cimv2.query( "SELECT Architecture, Name, Manufacturer, MaxClockSpeed, " "NumberOfCores, NumberOfLogicalProcessors FROM Win32_Processor " "WHERE ProcessorType = 3") cpus_list = [] for cpu in cpus: cpu_info = {'Architecture': cpu.Architecture, 'Name': cpu.Name, 'Manufacturer': cpu.Manufacturer, 'MaxClockSpeed': cpu.MaxClockSpeed, 'NumberOfCores': cpu.NumberOfCores, 'NumberOfLogicalProcessors': cpu.NumberOfLogicalProcessors} cpus_list.append(cpu_info) return cpus_list def is_cpu_feature_present(self, feature_key): """Checks if the host's CPUs have the given feature.""" return kernel32.IsProcessorFeaturePresent(feature_key) def get_memory_info(self): """Returns a tuple with total visible memory and free physical memory. The returned values are expressed in KB. """ mem_info = self._conn_cimv2.query("SELECT TotalVisibleMemorySize, " "FreePhysicalMemory " "FROM win32_operatingsystem")[0] return (int(mem_info.TotalVisibleMemorySize), int(mem_info.FreePhysicalMemory)) # TODO(atuvenie) This method should be removed once all the callers have # changed to use the get_disk_capacity method from diskutils. def get_volume_info(self, drive): """Returns a tuple with total size and free space of the given drive. Returned values are expressed in bytes. :param drive: the drive letter of the logical disk whose information is required. """ logical_disk = self._conn_cimv2.query("SELECT Size, FreeSpace " "FROM win32_logicaldisk " "WHERE DeviceID='%s'" % drive)[0] return (int(logical_disk.Size), int(logical_disk.FreeSpace)) def check_min_windows_version(self, major, minor, build=0): """Compares the host's kernel version with the given version. :returns: True if the host's kernel version is higher or equal to the given version. """ version_str = self.get_windows_version() return list(map(int, version_str.split('.'))) >= [major, minor, build] def get_windows_version(self): """Returns a string representing the host's kernel version.""" if not HostUtils._windows_version: Win32_OperatingSystem = self._conn_cimv2.Win32_OperatingSystem()[0] HostUtils._windows_version = Win32_OperatingSystem.Version return HostUtils._windows_version def get_local_ips(self): """Returns the list of locally assigned IPs.""" hostname = socket.gethostname() return _utils.get_ips(hostname) def get_host_tick_count64(self): """Returns host uptime in milliseconds.""" return kernel32.GetTickCount64() def host_power_action(self, action): win32_os = self._conn_cimv2.Win32_OperatingSystem()[0] if action == constants.HOST_POWER_ACTION_SHUTDOWN: win32_os.Win32Shutdown(self._HOST_FORCED_SHUTDOWN) elif action == constants.HOST_POWER_ACTION_REBOOT: win32_os.Win32Shutdown(self._HOST_FORCED_REBOOT) else: raise NotImplementedError( _("Host %(action)s is not supported by the Hyper-V driver") % {"action": action}) def get_supported_vm_types(self): """Get the supported Hyper-V VM generations. Hyper-V Generation 2 VMs are supported in Windows 8.1, Windows Server / Hyper-V Server 2012 R2 or newer. :returns: array of supported VM generations (ex. ['hyperv-gen1']) """ if self.check_min_windows_version(6, 3): return [constants.IMAGE_PROP_VM_GEN_1, constants.IMAGE_PROP_VM_GEN_2] else: return [constants.IMAGE_PROP_VM_GEN_1] def get_default_vm_generation(self): return self._DEFAULT_VM_GENERATION def check_server_feature(self, feature_id): """Checks if the given feature exists on the host.""" return len(self._conn_cimv2.Win32_ServerFeature(ID=feature_id)) > 0 def get_nic_sriov_vfs(self): """Get host's NIC SR-IOV VFs. This method will ignore the vSwitches which do not have SR-IOV enabled, or which are poorly configured (the NIC does not support SR-IOV). :returns: a list of dictionaries, containing the following fields: - 'vswitch_name': the vSwtch name. - 'total_vfs': the vSwitch's maximum number of VFs. (> 0) - 'used_vfs': the vSwitch's number of used VFs. (<= 'total_vfs') """ # TODO(claudiub): We have added a different method that returns all # of the offloading capabilities available, including SR-IOV. # Remove this method in S. vfs = [] # NOTE(claudiub): A vSwitch will have to be configured to enable # SR-IOV, otherwise its IOVPreferred flag will be False. vswitch_sds = self._conn.Msvm_VirtualEthernetSwitchSettingData( IOVPreferred=True) for vswitch_sd in vswitch_sds: hw_offload = self._conn.Msvm_EthernetSwitchHardwareOffloadData( SystemName=vswitch_sd.VirtualSystemIdentifier)[0] if not hw_offload.IovVfCapacity: LOG.warning("VSwitch %s has SR-IOV enabled, but it is not " "supported by the NIC or by the OS.", vswitch_sd.ElementName) continue nic_name = self._netutils.get_vswitch_external_network_name( vswitch_sd.ElementName) if not nic_name: # NOTE(claudiub): This can happen if the vSwitch is not # external. LOG.warning("VSwitch %s is not external.", vswitch_sd.ElementName) continue nic = self._conn_scimv2.MSFT_NetAdapter( InterfaceDescription=nic_name)[0] vfs.append({ 'vswitch_name': vswitch_sd.ElementName, 'device_id': nic.PnPDeviceID, 'total_vfs': hw_offload.IovVfCapacity, 'used_vfs': hw_offload.IovVfUsage, }) return vfs def get_nic_hardware_offload_info(self): """Get host's NIC hardware offload information. Hyper-V offers a few different hardware offloading options for VMs and their vNICs, depending on the vSwitches' NICs hardware resources and capabilities. These resources are managed and assigned automatically by Hyper-V. These resources are: VFs, IOV queue pairs, VMQs, IPsec security association offloads. :returns: a list of dictionaries, containing the following fields: - 'vswitch_name': the switch name. - 'device_id': the switch's physical NIC's PnP device ID. - 'total_vfs': the switch's maximum number of VFs. (>= 0) - 'used_vfs': the switch's number of used VFs. (<= 'total_vfs') - 'total_iov_queue_pairs': the switch's maximum number of IOV queue pairs. (>= 'total_vfs') - 'used_iov_queue_pairs': the switch's number of used IOV queue pairs (<= 'total_iov_queue_pairs') - 'total_vmqs': the switch's maximum number of VMQs. (>= 0) - 'used_vmqs': the switch's number of used VMQs. (<= 'total_vmqs') - 'total_ipsecsa': the maximum number of IPsec SA offloads supported by the switch. (>= 0) - 'used_ipsecsa': the switch's number of IPsec SA offloads currently in use. (<= 'total_ipsecsa') """ hw_offload_data = [] vswitch_sds = self._conn.Msvm_VirtualEthernetSwitchSettingData() hw_offload_sds = self._conn.Msvm_EthernetSwitchHardwareOffloadData() for vswitch_sd in vswitch_sds: hw_offload = [ s for s in hw_offload_sds if s.SystemName == vswitch_sd.VirtualSystemIdentifier][0] vswitch_offload_data = self._get_nic_hw_offload_info( vswitch_sd, hw_offload) if vswitch_offload_data: hw_offload_data.append(vswitch_offload_data) return hw_offload_data def _get_nic_hw_offload_info(self, vswitch_sd, hw_offload_sd): nic_name = self._netutils.get_vswitch_external_network_name( vswitch_sd.ElementName) if not nic_name: # NOTE(claudiub): This can happen if the vSwitch is not # external. LOG.warning("VSwitch %s is not external.", vswitch_sd.ElementName) return # check if the vSwitch is misconfigured. if vswitch_sd.IOVPreferred and not hw_offload_sd.IovVfCapacity: LOG.warning("VSwitch %s has SR-IOV enabled, but it is not " "supported by the NIC or by the OS.", vswitch_sd.ElementName) nic = self._conn_scimv2.MSFT_NetAdapter( InterfaceDescription=nic_name)[0] return { 'vswitch_name': vswitch_sd.ElementName, 'device_id': nic.PnPDeviceID, 'total_vfs': hw_offload_sd.IovVfCapacity, 'used_vfs': hw_offload_sd.IovVfUsage, 'total_iov_queue_pairs': hw_offload_sd.IovQueuePairCapacity, 'used_iov_queue_pairs': hw_offload_sd.IovQueuePairUsage, 'total_vmqs': hw_offload_sd.VmqCapacity, 'used_vmqs': hw_offload_sd.VmqUsage, 'total_ipsecsa': hw_offload_sd.IPsecSACapacity, 'used_ipsecsa': hw_offload_sd.IPsecSAUsage, } def get_numa_nodes(self): """Returns the host's list of NUMA nodes. :returns: list of dictionaries containing information about each host NUMA node. Each host has at least one NUMA node. """ numa_nodes = self._conn.Msvm_NumaNode() nodes_info = [] system_memory = self._conn.Msvm_Memory(['NumberOfBlocks']) processors = self._conn.Msvm_Processor(['DeviceID']) for node in numa_nodes: # Due to a bug in vmms, getting Msvm_Processor for the numa # node associators resulted in a vmms crash. # As an alternative to using associators we have to manually get # the related Msvm_Processor classes. # Msvm_HostedDependency is the association class between # Msvm_NumaNode and Msvm_Processor. We need to use this class to # relate the two because using associators on Msvm_Processor # will also result in a crash. numa_assoc = self._conn.Msvm_HostedDependency( Antecedent=node.path_()) numa_node_assoc = [item.Dependent for item in numa_assoc] memory_info = self._get_numa_memory_info(numa_node_assoc, system_memory) if not memory_info: LOG.warning("Could not find memory information for NUMA " "node. Skipping node measurements.") continue cpu_info = self._get_numa_cpu_info(numa_node_assoc, processors) if not cpu_info: LOG.warning("Could not find CPU information for NUMA " "node. Skipping node measurements.") continue node_info = { # NodeID has the format: Microsoft:PhysicalNode\<NODE_ID> 'id': node.NodeID.split('\\')[-1], # memory block size is 1MB. 'memory': memory_info.NumberOfBlocks, 'memory_usage': node.CurrentlyConsumableMemoryBlocks, # DeviceID has the format: Microsoft:UUID\0\<DEV_ID> 'cpuset': set([c.DeviceID.split('\\')[-1] for c in cpu_info]), # cpu_usage can be set, each CPU has a "LoadPercentage" 'cpu_usage': 0, } nodes_info.append(node_info) return nodes_info def _get_numa_memory_info(self, numa_node_assoc, system_memory): memory_info = [] paths = [x.path_().upper() for x in numa_node_assoc] for memory in system_memory: if memory.path_().upper() in paths: memory_info.append(memory) if memory_info: return memory_info[0] def _get_numa_cpu_info(self, numa_node_assoc, processors): cpu_info = [] paths = [x.path_().upper() for x in numa_node_assoc] for proc in processors: if proc.path_().upper() in paths: cpu_info.append(proc) return cpu_info def get_remotefx_gpu_info(self): """Returns information about the GPUs used for RemoteFX. :returns: list with dictionaries containing information about each GPU used for RemoteFX. """ gpus = [] all_gpus = self._conn.Msvm_Physical3dGraphicsProcessor( EnabledForVirtualization=True) for gpu in all_gpus: gpus.append({'name': gpu.Name, 'driver_version': gpu.DriverVersion, 'total_video_ram': gpu.TotalVideoMemory, 'available_video_ram': gpu.AvailableVideoMemory, 'directx_version': gpu.DirectXVersion}) return gpus def verify_host_remotefx_capability(self): """Validates that the host supports RemoteFX. :raises exceptions.HyperVRemoteFXException: if the host has no GPU that supports DirectX 11, or SLAT. """ synth_3d_video_pool = self._conn.Msvm_Synth3dVideoPool()[0] if not synth_3d_video_pool.IsGpuCapable: raise exceptions.HyperVRemoteFXException( _("To enable RemoteFX on Hyper-V at least one GPU supporting " "DirectX 11 is required.")) if not synth_3d_video_pool.IsSlatCapable: raise exceptions.HyperVRemoteFXException( _("To enable RemoteFX on Hyper-V it is required that the host " "GPUs support SLAT.")) def is_host_guarded(self): """Checks if the host is guarded. :returns: False, only Windows / Hyper-V Server 2016 or newer can be guarded. """ return False def supports_nested_virtualization(self): """Checks if the host supports nested virtualization. :returns: False, only Windows / Hyper-V Server 2016 or newer supports nested virtualization. """ return False def get_pci_passthrough_devices(self): """Get host PCI devices path. Discrete device assignment is supported only on Windows / Hyper-V Server 2016 or newer. :returns: a list of the assignable PCI devices. """ return []
apache-2.0
loris0/Cuivre
cuivre/bundle.py
1
4154
"""Convergence notary bundle type.""" # Copyright (c) 2011-2013 Loris Cuoghi <loris.cuoghi@gmail.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import cuivre.client.types import json BUNDLE_FIELD_VERSION = 'version' BUNDLE_FIELD_HOSTS = 'hosts' BUNDLE_FIELD_HOST = 'host' BUNDLE_FIELD_HTTP_PORT = 'http_port' BUNDLE_FIELD_SSL_PORT = 'ssl_port' BUNDLE_FIELD_CERTIFICATE = 'certificate' BUNDLE_FIELD_NAME = 'name' BUNDLE_FIELD_REGION = 'region' BUNDLE_FIELD_BUNDLE_LOCATION = 'bundle_location' BUNDLE_VERSION_1 = 1 class NotaryBundleException(cuivre.CuivreException): def __init__(self, msg): cuivre.CuivreException.__init__(self) class BrokenBundle(NotaryBundleException): def __init__(self, msg): NotaryBundleException.__init__(self, msg) class NotaryBundle(object): """Abstraction of the notary bundle file formats.""" def __init__(self, text=None, file_path=None): self.version = None self.notary_entity = None if text is not None: self.parse(text) elif file_path is not None: self.parse_file(file_path) def get_dict(self): """Compile a python dictionary from the bundle's information.""" d = { BUNDLE_FIELD_VERSION : self.version, BUNDLE_FIELD_HOSTS : [], BUNDLE_FIELD_NAME : self.notary_entity.name, BUNDLE_FIELD_BUNDLE_LOCATION : self.notary_entity.bundle_location } for host in self.notary_entity.hosts.itervalues(): host_dict = { BUNDLE_FIELD_HOST : host.host, BUNDLE_FIELD_HTTP_PORT : host.http_port, BUNDLE_FIELD_SSL_PORT : host.ssl_port, BUNDLE_FIELD_CERTIFICATE : host.pem_cert } d['hosts'].append(host_dict) return d def get_json_string(self): """Compile a JSON string of the bundle.""" d = self.get_dict() j = json.dumps(d) return j def parse(self, text): """Parse a Convergence notary bundle contained in a string.""" d = json.loads(text) if int(d[BUNDLE_FIELD_VERSION]) == BUNDLE_VERSION_1: self.parse_version_1(d) else: raise NotaryBundleParseError( 'Could not identify the notary bundle\'s version number.') def parse_file(self, file_path): """Parse a Convergence notary bundle file.""" with open(file_path, 'r') as f: s = f.read() self.parse(s) def parse_version_1(self, json_dict): """Parse a Convergence notary bundle contained in a string, version 1.""" self.version = BUNDLE_VERSION_1 if BUNDLE_FIELD_NAME not in json_dict.iterkeys(): raise BrokenBundle() # In version 1 of the bundle format, specifying a region is optional. region = json_dict.get(BUNDLE_FIELD_REGION, None) if region is not None and len(region.strip()) < 1: region = None notary_entity = cuivre.client.types.NotaryEntityData( 0, json_dict[BUNDLE_FIELD_NAME], region, json_dict[BUNDLE_FIELD_BUNDLE_LOCATION] ) self.notary_entity = notary_entity notary_host_tmp_id = 0 for host in json_dict[BUNDLE_FIELD_HOSTS]: public_key_pem = cuivre.client.utils. \ extract_public_key_from_certificate(host[BUNDLE_FIELD_CERTIFICATE]) self.notary_entity.add_notary_host( notary_host_tmp_id, host[BUNDLE_FIELD_HOST], host[BUNDLE_FIELD_HTTP_PORT], host[BUNDLE_FIELD_SSL_PORT], host[BUNDLE_FIELD_CERTIFICATE], public_key_pem ) notary_host_tmp_id += 1
gpl-3.0
sujoykroy/motion-picture
editor/MotionPicture/gui_utils/__init__.py
1
1739
from .helper_dialogs import TextInputDialog, YesNoDialog from .name_value_combo_box import NameValueComboBox from .file_op import FileOp, FileSelect from .misc_prop_boxes import CommonShapePropBox from .misc_prop_boxes import RectangleShapePropBox from .misc_prop_boxes import OvalShapePropBox from .misc_prop_boxes import RingShapePropBox from .misc_prop_boxes import MultiShapePropBox from .misc_prop_boxes import TextShapePropBox from .misc_prop_boxes import ImageShapePropBox from .misc_prop_boxes import ImageSeqShapePropBox from .misc_prop_boxes import AudioShapePropBox from .misc_prop_boxes import VideoShapePropBox from .misc_prop_boxes import CameraShapePropBox from .misc_prop_boxes import ThreeDShapePropBox from .misc_prop_boxes import DocumentShapePropBox from .misc_prop_boxes import CurvePointGroupShapePropBox from .misc_prop_boxes import CustomShapePropBox from .misc_prop_boxes import CustomPropsBox from .misc_prop_boxes import CurveJoinerShapePropBox from .misc_prop_boxes import MimicShapePropBox from .misc_prop_boxes import CurveShapePropBox from .curve_smooth_prop_box import CurveSmoothPropBox from .shape_form_prop_box import ShapeFormPropBox from .multi_shape_internal_prop_box import MultiShapeInternalPropBox from .time_slice_prop_box import TimeSlicePropBox from .menu_builder import MenuBar from .recent_files_manager import RecentFilesManager from .file_list_preview import FileListPreview from .array_viewer import ArrayViewer from .meter_bar import MeterBar from .multi_shape_tree_view import MultiShapeTreeView from .prop_grid import PropGrid from .image_combo_box import ImageComboBox from .point_group_shape_list_box import PointGroupShapeListBox from .interior_pose_box import InteriorPoseBox
gpl-3.0
kevinmel2000/brython
www/src/Lib/test/unittests/test_kqueue.py
27
6772
""" Tests for kqueue wrapper. """ import errno import os import select import socket import sys import time import unittest from test import support if not hasattr(select, "kqueue"): raise unittest.SkipTest("test works only on BSD") class TestKQueue(unittest.TestCase): def test_create_queue(self): kq = select.kqueue() self.assertTrue(kq.fileno() > 0, kq.fileno()) self.assertTrue(not kq.closed) kq.close() self.assertTrue(kq.closed) self.assertRaises(ValueError, kq.fileno) def test_create_event(self): from operator import lt, le, gt, ge fd = os.open(os.devnull, os.O_WRONLY) self.addCleanup(os.close, fd) ev = select.kevent(fd) other = select.kevent(1000) self.assertEqual(ev.ident, fd) self.assertEqual(ev.filter, select.KQ_FILTER_READ) self.assertEqual(ev.flags, select.KQ_EV_ADD) self.assertEqual(ev.fflags, 0) self.assertEqual(ev.data, 0) self.assertEqual(ev.udata, 0) self.assertEqual(ev, ev) self.assertNotEqual(ev, other) self.assertTrue(ev < other) self.assertTrue(other >= ev) for op in lt, le, gt, ge: self.assertRaises(TypeError, op, ev, None) self.assertRaises(TypeError, op, ev, 1) self.assertRaises(TypeError, op, ev, "ev") ev = select.kevent(fd, select.KQ_FILTER_WRITE) self.assertEqual(ev.ident, fd) self.assertEqual(ev.filter, select.KQ_FILTER_WRITE) self.assertEqual(ev.flags, select.KQ_EV_ADD) self.assertEqual(ev.fflags, 0) self.assertEqual(ev.data, 0) self.assertEqual(ev.udata, 0) self.assertEqual(ev, ev) self.assertNotEqual(ev, other) ev = select.kevent(fd, select.KQ_FILTER_WRITE, select.KQ_EV_ONESHOT) self.assertEqual(ev.ident, fd) self.assertEqual(ev.filter, select.KQ_FILTER_WRITE) self.assertEqual(ev.flags, select.KQ_EV_ONESHOT) self.assertEqual(ev.fflags, 0) self.assertEqual(ev.data, 0) self.assertEqual(ev.udata, 0) self.assertEqual(ev, ev) self.assertNotEqual(ev, other) ev = select.kevent(1, 2, 3, 4, 5, 6) self.assertEqual(ev.ident, 1) self.assertEqual(ev.filter, 2) self.assertEqual(ev.flags, 3) self.assertEqual(ev.fflags, 4) self.assertEqual(ev.data, 5) self.assertEqual(ev.udata, 6) self.assertEqual(ev, ev) self.assertNotEqual(ev, other) bignum = 0x7fff ev = select.kevent(bignum, 1, 2, 3, bignum - 1, bignum) self.assertEqual(ev.ident, bignum) self.assertEqual(ev.filter, 1) self.assertEqual(ev.flags, 2) self.assertEqual(ev.fflags, 3) self.assertEqual(ev.data, bignum - 1) self.assertEqual(ev.udata, bignum) self.assertEqual(ev, ev) self.assertNotEqual(ev, other) def test_queue_event(self): serverSocket = socket.socket() serverSocket.bind(('127.0.0.1', 0)) serverSocket.listen(1) client = socket.socket() client.setblocking(False) try: client.connect(('127.0.0.1', serverSocket.getsockname()[1])) except socket.error as e: self.assertEqual(e.args[0], errno.EINPROGRESS) else: #raise AssertionError("Connect should have raised EINPROGRESS") pass # FreeBSD doesn't raise an exception here server, addr = serverSocket.accept() kq = select.kqueue() kq2 = select.kqueue.fromfd(kq.fileno()) ev = select.kevent(server.fileno(), select.KQ_FILTER_WRITE, select.KQ_EV_ADD | select.KQ_EV_ENABLE) kq.control([ev], 0) ev = select.kevent(server.fileno(), select.KQ_FILTER_READ, select.KQ_EV_ADD | select.KQ_EV_ENABLE) kq.control([ev], 0) ev = select.kevent(client.fileno(), select.KQ_FILTER_WRITE, select.KQ_EV_ADD | select.KQ_EV_ENABLE) kq2.control([ev], 0) ev = select.kevent(client.fileno(), select.KQ_FILTER_READ, select.KQ_EV_ADD | select.KQ_EV_ENABLE) kq2.control([ev], 0) events = kq.control(None, 4, 1) events = set((e.ident, e.filter) for e in events) self.assertEqual(events, set([ (client.fileno(), select.KQ_FILTER_WRITE), (server.fileno(), select.KQ_FILTER_WRITE)])) client.send(b"Hello!") server.send(b"world!!!") # We may need to call it several times for i in range(10): events = kq.control(None, 4, 1) if len(events) == 4: break time.sleep(1.0) else: self.fail('timeout waiting for event notifications') events = set((e.ident, e.filter) for e in events) self.assertEqual(events, set([ (client.fileno(), select.KQ_FILTER_WRITE), (client.fileno(), select.KQ_FILTER_READ), (server.fileno(), select.KQ_FILTER_WRITE), (server.fileno(), select.KQ_FILTER_READ)])) # Remove completely client, and server read part ev = select.kevent(client.fileno(), select.KQ_FILTER_WRITE, select.KQ_EV_DELETE) kq.control([ev], 0) ev = select.kevent(client.fileno(), select.KQ_FILTER_READ, select.KQ_EV_DELETE) kq.control([ev], 0) ev = select.kevent(server.fileno(), select.KQ_FILTER_READ, select.KQ_EV_DELETE) kq.control([ev], 0, 0) events = kq.control([], 4, 0.99) events = set((e.ident, e.filter) for e in events) self.assertEqual(events, set([ (server.fileno(), select.KQ_FILTER_WRITE)])) client.close() server.close() serverSocket.close() def testPair(self): kq = select.kqueue() a, b = socket.socketpair() a.send(b'foo') event1 = select.kevent(a, select.KQ_FILTER_READ, select.KQ_EV_ADD | select.KQ_EV_ENABLE) event2 = select.kevent(b, select.KQ_FILTER_READ, select.KQ_EV_ADD | select.KQ_EV_ENABLE) r = kq.control([event1, event2], 1, 1) self.assertTrue(r) self.assertFalse(r[0].flags & select.KQ_EV_ERROR) self.assertEqual(b.recv(r[0].data), b'foo') a.close() b.close() kq.close() def test_main(): support.run_unittest(TestKQueue) if __name__ == "__main__": test_main()
bsd-3-clause
aishee/sqlalchemy-migrate
migrate/tests/versioning/test_version.py
32
5951
#!/usr/bin/env python # -*- coding: utf-8 -*- from migrate.exceptions import * from migrate.versioning.version import * from migrate.tests import fixture class TestVerNum(fixture.Base): def test_invalid(self): """Disallow invalid version numbers""" versions = ('-1', -1, 'Thirteen', '') for version in versions: self.assertRaises(ValueError, VerNum, version) def test_str(self): """Test str and repr version numbers""" self.assertEqual(str(VerNum(2)), '2') self.assertEqual(repr(VerNum(2)), '<VerNum(2)>') def test_is(self): """Two version with the same number should be equal""" a = VerNum(1) b = VerNum(1) self.assert_(a is b) self.assertEqual(VerNum(VerNum(2)), VerNum(2)) def test_add(self): self.assertEqual(VerNum(1) + VerNum(1), VerNum(2)) self.assertEqual(VerNum(1) + 1, 2) self.assertEqual(VerNum(1) + 1, '2') self.assert_(isinstance(VerNum(1) + 1, VerNum)) def test_sub(self): self.assertEqual(VerNum(1) - 1, 0) self.assert_(isinstance(VerNum(1) - 1, VerNum)) self.assertRaises(ValueError, lambda: VerNum(0) - 1) def test_eq(self): """Two versions are equal""" self.assertEqual(VerNum(1), VerNum('1')) self.assertEqual(VerNum(1), 1) self.assertEqual(VerNum(1), '1') self.assertNotEqual(VerNum(1), 2) def test_ne(self): self.assert_(VerNum(1) != 2) self.assertFalse(VerNum(1) != 1) def test_lt(self): self.assertFalse(VerNum(1) < 1) self.assert_(VerNum(1) < 2) self.assertFalse(VerNum(2) < 1) def test_le(self): self.assert_(VerNum(1) <= 1) self.assert_(VerNum(1) <= 2) self.assertFalse(VerNum(2) <= 1) def test_gt(self): self.assertFalse(VerNum(1) > 1) self.assertFalse(VerNum(1) > 2) self.assert_(VerNum(2) > 1) def test_ge(self): self.assert_(VerNum(1) >= 1) self.assert_(VerNum(2) >= 1) self.assertFalse(VerNum(1) >= 2) class TestVersion(fixture.Pathed): def setUp(self): super(TestVersion, self).setUp() def test_str_to_filename(self): self.assertEquals(str_to_filename(''), '') self.assertEquals(str_to_filename('__'), '_') self.assertEquals(str_to_filename('a'), 'a') self.assertEquals(str_to_filename('Abc Def'), 'Abc_Def') self.assertEquals(str_to_filename('Abc "D" Ef'), 'Abc_D_Ef') self.assertEquals(str_to_filename("Abc's Stuff"), 'Abc_s_Stuff') self.assertEquals(str_to_filename("a b"), 'a_b') self.assertEquals(str_to_filename("a.b to c"), 'a_b_to_c') def test_collection(self): """Let's see how we handle versions collection""" coll = Collection(self.temp_usable_dir) coll.create_new_python_version("foo bar") coll.create_new_sql_version("postgres", "foo bar") coll.create_new_sql_version("sqlite", "foo bar") coll.create_new_python_version("") self.assertEqual(coll.latest, 4) self.assertEqual(len(coll.versions), 4) self.assertEqual(coll.version(4), coll.version(coll.latest)) coll2 = Collection(self.temp_usable_dir) self.assertEqual(coll.versions, coll2.versions) Collection.clear() def test_old_repository(self): open(os.path.join(self.temp_usable_dir, '1'), 'w') self.assertRaises(Exception, Collection, self.temp_usable_dir) #TODO: def test_collection_unicode(self): # pass def test_create_new_python_version(self): coll = Collection(self.temp_usable_dir) coll.create_new_python_version("'") ver = coll.version() self.assert_(ver.script().source()) def test_create_new_sql_version(self): coll = Collection(self.temp_usable_dir) coll.create_new_sql_version("sqlite", "foo bar") ver = coll.version() ver_up = ver.script('sqlite', 'upgrade') ver_down = ver.script('sqlite', 'downgrade') ver_up.source() ver_down.source() def test_selection(self): """Verify right sql script is selected""" # Create empty directory. path = self.tmp_repos() os.mkdir(path) # Create files -- files must be present or you'll get an exception later. python_file = '001_initial_.py' sqlite_upgrade_file = '001_sqlite_upgrade.sql' default_upgrade_file = '001_default_upgrade.sql' for file_ in [sqlite_upgrade_file, default_upgrade_file, python_file]: filepath = '%s/%s' % (path, file_) open(filepath, 'w').close() ver = Version(1, path, [sqlite_upgrade_file]) self.assertEquals(os.path.basename(ver.script('sqlite', 'upgrade').path), sqlite_upgrade_file) ver = Version(1, path, [default_upgrade_file]) self.assertEquals(os.path.basename(ver.script('default', 'upgrade').path), default_upgrade_file) ver = Version(1, path, [sqlite_upgrade_file, default_upgrade_file]) self.assertEquals(os.path.basename(ver.script('sqlite', 'upgrade').path), sqlite_upgrade_file) ver = Version(1, path, [sqlite_upgrade_file, default_upgrade_file, python_file]) self.assertEquals(os.path.basename(ver.script('postgres', 'upgrade').path), default_upgrade_file) ver = Version(1, path, [sqlite_upgrade_file, python_file]) self.assertEquals(os.path.basename(ver.script('postgres', 'upgrade').path), python_file) def test_bad_version(self): ver = Version(1, self.temp_usable_dir, []) self.assertRaises(ScriptError, ver.add_script, '123.sql') pyscript = os.path.join(self.temp_usable_dir, 'bla.py') open(pyscript, 'w') ver.add_script(pyscript) self.assertRaises(ScriptError, ver.add_script, 'bla.py')
mit
gwct/core
legacy-scripts/count_pos.py
1
2952
#!/usr/bin/python ############################################################################# #Script to count the total number of positions in a fasta file or a directory full of fasta files. # #Usage: python count_pos.py [input file or directory] [1,0] # #The script first checks if the input is a file or directory. If it is a file it will just count the #positions in that file and display. If it is a directory it will count the number of positions in #all files and print the sum. If the second parameter is set to 1, it will also print the number of #positions in each file separately. # #Dependencies: core # #Gregg Thomas, Summer 2015 ############################################################################# import sys, os sys.path.append(sys.path.append(os.path.dirname(os.path.realpath(__file__)) + "/corelib/")) import core if len(sys.argv) not in [1,2,3]: print("Usage:\t$ count_pos.py [input directory or filename] [1,0 to display individual file counts or not]"); sys.exit(); ins = sys.argv[1]; disp_file = 0; if len(sys.argv) > 2: disp_file = sys.argv[2]; if disp_file not in ["0","1"]: print("Not printing file counts."); disp_file = 0; disp_file = int(disp_file); print("======================================================================="); print("\t\t\t" + core.getDateTime()); print("Counting the total number of positions (AAs or NTs) in:\t" + ins); if os.path.isfile(ins): if disp_file == 1: print("----------"); print("Sequence\tLength"); inseqs = core.fastaGetDict(ins); tot_pos = 0; for seq in inseqs: if disp_file == 1: print(seq + "\t" + str(len(inseqs[seq]))); tot_pos = tot_pos + len(inseqs[seq]); print("----------"); print("Total sequences:\t" + str(len(inseqs))); print("Total positions:\t" + str(tot_pos)); print("======================================================================="); else: if not ins.endswith("/"): ins = ins + "/"; filelist = os.listdir(ins); tot_pos = 0; numlines = len(filelist); numbars = 0; donepercent = []; i = 0; for each in filelist: if disp_file == 0: numbars, donepercent = core.loadingBar(i, numlines, donepercent, numbars); elif disp_file == 1: print("----------"); print(each); print("Sequence\tLength"); i = i + 1; if each.find(".fa") == -1: continue; specpos = 0; infilename = ins + each; inseqs = core.fastaGetDict(infilename); for seq in inseqs: tot_pos = tot_pos + len(inseqs[seq]); if disp_file == 1: specpos = specpos + len(inseqs[seq]); print(seq + "\t" + str(len(inseqs[seq]))); if disp_file == 1: print("Total\t" + str(specpos)); if disp_file == 0: pstring = "100.0% complete."; sys.stderr.write('\b' * len(pstring) + pstring); elif disp_file == 1: print("----------"); print("\n" + core.getTime() + " Done!"); print("-----"); print("Total residues:\t", tot_pos); print("=======================================================================");
gpl-3.0
renzon/pswdless
backend/test/base.py
31
2338
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals import json import unittest from google.appengine.api import files from google.appengine.ext import testbed import webapp2 from webapp2_extras import i18n from config.template import render # workaround for i18n. without this test will not run app = webapp2.WSGIApplication( [webapp2.Route('/', None, name='upload_handler')]) request = webapp2.Request({'SERVER_NAME': 'test', 'SERVER_PORT': 80, 'wsgi.url_scheme': 'http'}) request.app = app app.set_globals(app=app, request=request) i18n.default_config['default_locale'] = 'en_US' i18n.default_config['default_timezone'] = 'UTC' # End of workaround class GAETestCase(unittest.TestCase): def setUp(self): self.testbed = testbed.Testbed() self.testbed.setup_env(app_id="_") self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_user_stub() self.testbed.init_urlfetch_stub() self.testbed.init_memcache_stub() self.testbed.init_mail_stub() self.testbed.init_taskqueue_stub() def tearDown(self): self.testbed.deactivate() def assert_can_render(self, template_response): """ Asserts that a template can be rendered. It raises an Exception otherwise :param template_response: a TemplateResponse instance :return: """ render(template_response.template_path, template_response.context) def assert_can_serialize_as_json(self, json_response): """ Asserts that a json_response contains json serializable data. It raises an Exception otherwise :param template_response: a JsonResponse or JsonUnsecureResponse instance :return: """ json.dumps(json_response.context) class BlobstoreTestCase(GAETestCase): def setUp(self): GAETestCase.setUp(self) self.testbed.init_blobstore_stub() self.testbed.init_files_stub() def save_blob(self, blobdata='blobdata'): file_name = files.blobstore.create(mime_type='application/octet-stream') with files.open(file_name, 'a') as f: f.write(blobdata) files.finalize(file_name) blob_key = files.blobstore.get_blob_key(file_name) return blob_key
gpl-2.0
gorjuce/odoo
addons/analytic_contract_hr_expense/analytic_contract_hr_expense.py
223
7860
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, osv from openerp.tools.translate import _ from openerp.addons.decimal_precision import decimal_precision as dp class account_analytic_account(osv.osv): _name = "account.analytic.account" _inherit = "account.analytic.account" def _get_total_estimation(self, account): tot_est = super(account_analytic_account, self)._get_total_estimation(account) if account.charge_expenses: tot_est += account.est_expenses return tot_est def _get_total_invoiced(self, account): total_invoiced = super(account_analytic_account, self)._get_total_invoiced(account) if account.charge_expenses: total_invoiced += account.expense_invoiced return total_invoiced def _get_total_remaining(self, account): total_remaining = super(account_analytic_account, self)._get_total_remaining(account) if account.charge_expenses: total_remaining += account.remaining_expense return total_remaining def _get_total_toinvoice(self, account): total_toinvoice = super(account_analytic_account, self)._get_total_toinvoice(account) if account.charge_expenses: total_toinvoice += account.expense_to_invoice return total_toinvoice def _remaining_expnse_calc(self, cr, uid, ids, name, arg, context=None): res = {} for account in self.browse(cr, uid, ids, context=context): if account.est_expenses != 0: res[account.id] = max(account.est_expenses - account.expense_invoiced, account.expense_to_invoice) else: res[account.id]=0.0 return res def _expense_to_invoice_calc(self, cr, uid, ids, name, arg, context=None): res = {} #We don't want consolidation for each of these fields because those complex computation is resource-greedy. for account in self.pool.get('account.analytic.account').browse(cr, uid, ids, context=context): cr.execute(""" SELECT product_id, sum(amount), user_id, to_invoice, sum(unit_amount), product_uom_id, line.name FROM account_analytic_line line LEFT JOIN account_analytic_journal journal ON (journal.id = line.journal_id) WHERE account_id = %s AND journal.type = 'purchase' AND invoice_id IS NULL AND to_invoice IS NOT NULL GROUP BY product_id, user_id, to_invoice, product_uom_id, line.name""", (account.id,)) res[account.id] = 0.0 for product_id, total_amount, user_id, factor_id, qty, uom, line_name in cr.fetchall(): #the amount to reinvoice is the real cost. We don't use the pricelist total_amount = -total_amount factor = self.pool.get('hr_timesheet_invoice.factor').browse(cr, uid, factor_id, context=context) res[account.id] += total_amount * (100 - factor.factor or 0.0) / 100.0 return res def _expense_invoiced_calc(self, cr, uid, ids, name, arg, context=None): lines_obj = self.pool.get('account.analytic.line') res = {} for account in self.browse(cr, uid, ids, context=context): res[account.id] = 0.0 line_ids = lines_obj.search(cr, uid, [('account_id','=', account.id), ('invoice_id','!=',False), ('to_invoice','!=', False), ('journal_id.type', '=', 'purchase')], context=context) #Put invoices in separate array in order not to calculate them double invoices = [] for line in lines_obj.browse(cr, uid, line_ids, context=context): if line.invoice_id not in invoices: invoices.append(line.invoice_id) for invoice in invoices: res[account.id] += invoice.amount_untaxed return res def _ca_invoiced_calc(self, cr, uid, ids, name, arg, context=None): result = super(account_analytic_account, self)._ca_invoiced_calc(cr, uid, ids, name, arg, context=context) for acc in self.browse(cr, uid, result.keys(), context=context): result[acc.id] = result[acc.id] - (acc.expense_invoiced or 0.0) return result _columns = { 'charge_expenses' : fields.boolean('Charge Expenses'), 'expense_invoiced' : fields.function(_expense_invoiced_calc, type="float"), 'expense_to_invoice' : fields.function(_expense_to_invoice_calc, type='float'), 'remaining_expense' : fields.function(_remaining_expnse_calc, type="float"), 'est_expenses': fields.float('Estimation of Expenses to Invoice'), 'ca_invoiced': fields.function(_ca_invoiced_calc, type='float', string='Invoiced Amount', help="Total customer invoiced amount for this account.", digits_compute=dp.get_precision('Account')), } def on_change_template(self, cr, uid, ids, template_id, date_start=False, context=None): res = super(account_analytic_account, self).on_change_template(cr, uid, ids, template_id, date_start=date_start, context=context) if template_id and 'value' in res: template = self.browse(cr, uid, template_id, context=context) res['value']['charge_expenses'] = template.charge_expenses res['value']['est_expenses'] = template.est_expenses return res def open_hr_expense(self, cr, uid, ids, context=None): mod_obj = self.pool.get('ir.model.data') act_obj = self.pool.get('ir.actions.act_window') dummy, act_window_id = mod_obj.get_object_reference(cr, uid, 'hr_expense', 'expense_all') result = act_obj.read(cr, uid, [act_window_id], context=context)[0] line_ids = self.pool.get('hr.expense.line').search(cr,uid,[('analytic_account', 'in', ids)]) result['domain'] = [('line_ids', 'in', line_ids)] names = [account.name for account in self.browse(cr, uid, ids, context=context)] result['name'] = _('Expenses of %s') % ','.join(names) result['context'] = {'analytic_account': ids[0]} result['view_type'] = 'form' return result def hr_to_invoice_expense(self, cr, uid, ids, context=None): domain = [('invoice_id','=',False),('to_invoice','!=',False), ('journal_id.type', '=', 'purchase'), ('account_id', 'in', ids)] names = [record.name for record in self.browse(cr, uid, ids, context=context)] name = _('Expenses to Invoice of %s') % ','.join(names) return { 'type': 'ir.actions.act_window', 'name': name, 'view_type': 'form', 'view_mode': 'tree,form', 'domain' : domain, 'res_model': 'account.analytic.line', 'nodestroy': True, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
ClovisIRex/Snake-django
env/lib/python3.6/site-packages/django/contrib/auth/urls.py
105
1140
# The views used below are normally mapped in django.contrib.admin.urls.py # This URLs file is used to provide a reliable view deployment for test purposes. # It is also provided as a convenience to those who want to deploy these URLs # elsewhere. from django.conf.urls import url from django.contrib.auth import views urlpatterns = [ url(r'^login/$', views.LoginView.as_view(), name='login'), url(r'^logout/$', views.LogoutView.as_view(), name='logout'), url(r'^password_change/$', views.PasswordChangeView.as_view(), name='password_change'), url(r'^password_change/done/$', views.PasswordChangeDoneView.as_view(), name='password_change_done'), url(r'^password_reset/$', views.PasswordResetView.as_view(), name='password_reset'), url(r'^password_reset/done/$', views.PasswordResetDoneView.as_view(), name='password_reset_done'), url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', views.PasswordResetConfirmView.as_view(), name='password_reset_confirm'), url(r'^reset/done/$', views.PasswordResetCompleteView.as_view(), name='password_reset_complete'), ]
mit
robjordan/sitefinder
src/sitefinder_project/settings/production.py
1
1817
# In production set the environment variable like this: # DJANGO_SETTINGS_MODULE=sitefinder_project.settings.production from .base import * # NOQA import logging.config # For security and performance reasons, DEBUG is turned off DEBUG = False TEMPLATE_DEBUG = False # Must mention ALLOWED_HOSTS in production! # ALLOWED_HOSTS = ["sitefinder_project.com"] # Cache the templates in memory for speed-up loaders = [ ('django.template.loaders.cached.Loader', [ 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ]), ] TEMPLATES[0]['OPTIONS'].update({"loaders": loaders}) TEMPLATES[0].update({"APP_DIRS": False}) # Define STATIC_ROOT for the collectstatic command STATIC_ROOT = join(BASE_DIR, '..', 'site', 'static') # Log everything to the logs directory at the top LOGFILE_ROOT = join(dirname(BASE_DIR), 'logs') # Reset logging LOGGING_CONFIG = None LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'formatters': { 'verbose': { 'format': "[%(asctime)s] %(levelname)s [%(pathname)s:%(lineno)s] %(message)s", 'datefmt': "%d/%b/%Y %H:%M:%S" }, 'simple': { 'format': '%(levelname)s %(message)s' }, }, 'handlers': { 'proj_log_file': { 'level': 'DEBUG', 'class': 'logging.FileHandler', 'filename': join(LOGFILE_ROOT, 'project.log'), 'formatter': 'verbose' }, 'console': { 'level': 'DEBUG', 'class': 'logging.StreamHandler', 'formatter': 'simple' } }, 'loggers': { 'project': { 'handlers': ['proj_log_file'], 'level': 'DEBUG', }, } } logging.config.dictConfig(LOGGING)
mit
jellis18/libstempo
libstempo/utils.py
2
2332
"""Utility functions for noise models.""" import numpy as np def quantize_fast(times, flags, dt=1.0): isort = np.argsort(times) bucket_ref = [times[isort[0]]] bucket_ind = [[isort[0]]] for i in isort[1:]: if times[i] - bucket_ref[-1] < dt and flags[i] != "": bucket_ind[-1].append(i) else: bucket_ref.append(times[i]) bucket_ind.append([i]) # only keep epochs with 2 or more TOAs bucket_ind = [ind for ind in bucket_ind if len(ind) >= 2] avetoas = np.array([np.mean(times[l]) for l in bucket_ind], "d") U = np.zeros((len(times), len(bucket_ind)), "d") for i, l in enumerate(bucket_ind): U[l, i] = 1 return avetoas, U def create_fourier_design_matrix(t, nmodes, freq=False, Tspan=None, logf=False, fmin=None, fmax=None): """ Construct fourier design matrix from eq 11 of Lentati et al, 2013 :param t: vector of time series in seconds :param nmodes: number of fourier coefficients to use :param freq: option to output frequencies :param Tspan: option to some other Tspan :param logf: use log frequency spacing :param fmin: lower sampling frequency :param fmax: upper sampling frequency :return: F: fourier design matrix :return: f: Sampling frequencies (if freq=True) """ N = len(t) F = np.zeros((N, 2 * nmodes)) if Tspan is not None: T = Tspan else: T = t.max() - t.min() # define sampling frequencies if fmin is not None and fmax is not None: f = np.linspace(fmin, fmax, nmodes) else: f = np.linspace(1 / T, nmodes / T, nmodes) if logf: f = np.logspace(np.log10(1 / T), np.log10(nmodes / T), nmodes) Ffreqs = np.zeros(2 * nmodes) Ffreqs[0::2] = f Ffreqs[1::2] = f F[:, ::2] = np.sin(2 * np.pi * t[:, None] * f[None, :]) F[:, 1::2] = np.cos(2 * np.pi * t[:, None] * f[None, :]) if freq: return F, Ffreqs else: return F def powerlaw(f, log10_A=-16, gamma=5): """Power-law PSD. :param f: Sampling frequencies :param log10_A: log10 of red noise Amplitude [GW units] :param gamma: Spectral index of red noise process """ fyr = 1 / 3.16e7 return (10 ** log10_A) ** 2 / 12.0 / np.pi ** 2 * fyr ** (gamma - 3) * f ** (-gamma)
mit
anurag-ks/eden
modules/templates/MCOP/controllers.py
19
7385
# -*- coding: utf-8 -*- from gluon import current from gluon.html import * from s3 import FS, S3CustomController THEME = "MCOP" # ============================================================================= class index(S3CustomController): """ Custom Home Page """ def __call__(self): response = current.response s3 = response.s3 s3db = current.s3db output = {} self._view(THEME, "index.html") # Map # Enable Layers by default callback = '''S3.gis.show_map() var layer,layers=S3.gis.maps.default_map.layers for(var i=0,len=layers.length;i<len;i++){ layer=layers[i] layer_name=layer.name if((layer_name=='Alerts')||(layer_name=='Incidents')||(layer_name=='Tasks')){layer.setVisibility(true)}}''' gis = current.gis #config = gis.get_config() #config.zoom = 8 map = gis.show_map(width=600, height=600, callback=callback, catalogue_layers=True, collapsed=True, save=False, ) output["map"] = map # Alerts Data List resource = s3db.resource("cms_post") # Only show Alerts #resource.add_filter(FS("series_id$name").belongs(["Alert"])) #resource.add_filter(FS("post.series_id") != None) # Only show Open Alerts resource.add_filter(FS("expired") == False) # Only show Alerts which are linked to Open Incidents or not linked to any Incident resource.add_filter((FS("incident.closed") == False) | (FS("incident.id") == None)) list_id = "cms_post_datalist" list_fields = [#"series_id", "location_id", "date", "body", "created_by", "created_by$organisation_id", "document.file", "event_post.event_id", "event_post.incident_id", ] # Order with most recent Alert first orderby = "cms_post.date desc" datalist, numrows, ids = resource.datalist(fields = list_fields, #start = None, limit = 5, list_id = list_id, orderby = orderby, layout = s3db.cms_post_list_layout ) ajax_url = URL(c="cms", f="post", args="datalist.dl", vars={"list_id": list_id}) output[list_id] = datalist.html(ajaxurl = ajax_url, pagesize = 5 ) # Incidents Data List resource = s3db.resource("event_incident") # Only show Open Incidents resource.add_filter(FS("closed") == False) list_id = "event_incident_datalist" list_fields = ["name", "location_id", "zero_hour", "modified_by", "organisation_id", "comments", ] # Order with most recent Incident first orderby = "event_incident.zero_hour desc" datalist, numrows, ids = resource.datalist(fields = list_fields, #start = None, limit = 5, list_id = list_id, orderby = orderby, layout = s3db.event_incident_list_layout ) ajax_url = URL(c="event", f="incident", args="datalist.dl", vars={"list_id": list_id}) output[list_id] = datalist.html(ajaxurl = ajax_url, pagesize = 5 ) # Tasks Data List resource = s3db.resource("project_task") # Only show Active Tasks active_statuses = s3db.project_task_active_statuses resource.add_filter(FS("status").belongs(active_statuses)) # Only show Tasks which are linked to Open Incidents or not linked to any Incident resource.add_filter((FS("incident.incident_id$closed") == False) | (FS("incident.id") == None)) list_id = "project_task_datalist" list_fields = ["name", "description", "comments", "location_id", "priority", "status", "date_due", "pe_id", "task_project.project_id", #"organisation_id$logo", "modified_by", "source_url" ] # Order with most urgent Task first orderby = "project_task.date_due asc" datalist, numrows, ids = resource.datalist(fields = list_fields, #start = None, limit = 5, list_id = list_id, orderby = orderby, layout = s3db.project_task_list_layout ) ajax_url = URL(c="project", f="task", args="datalist.dl", vars={"list_id": list_id}) output[list_id] = datalist.html(ajaxurl = ajax_url, pagesize = 5 ) # MCOP RSS News Feed #s3.external_stylesheets.append("//www.google.com/uds/solutions/dynamicfeed/gfdynamicfeedcontrol.css") s3.scripts.append("//www.google.com/jsapi?key=notsupplied-wizard") s3.scripts.append("//www.google.com/uds/solutions/dynamicfeed/gfdynamicfeedcontrol.js") # feedCycleTime: milliseconds before feed is reloaded (5 minutes) s3.js_global.append( """ function LoadDynamicFeedControl(){ var feeds=[{title:'News', url:'http://psmcop.org/?feed=rss2'}] var options={ feedCycleTime:300000, numResults:5, stacked:true, horizontal:false, } new GFdynamicFeedControl(feeds,'feed-control',options); } google.load('feeds','1'); google.setOnLoadCallback(LoadDynamicFeedControl); """ ) # Data Buttons # Description of available data from s3db.cms import S3CMS resource_content = S3CMS.resource_content for item in response.menu: item["cms"] = resource_content(module = item["c"], resource = item["f"]) return output # ============================================================================= class about(S3CustomController): """ Custom Home Page """ def __call__(self): output = {} self._view(THEME, "about.html") return output # END =========================================================================
mit
suto/infernal-twin
build/reportlab/build/lib.linux-i686-2.7/reportlab/lib/colors.py
31
35959
#Copyright ReportLab Europe Ltd. 2000-2012 #see license.txt for license details #history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/lib/colors.py __version__=''' $Id$ ''' __doc__='''Defines standard colour-handling classes and colour names. We define standard classes to hold colours in two models: RGB and CMYK. rhese can be constructed from several popular formats. We also include - pre-built colour objects for the HTML standard colours - pre-built colours used in ReportLab's branding - various conversion and construction functions These tests are here because doctest cannot find them otherwise. >>> toColor('rgb(128,0,0)')==toColor('rgb(50%,0%,0%)') True >>> toColor('rgb(50%,0%,0%)')!=Color(0.5,0,0,1) True >>> toColor('hsl(0,100%,50%)')==toColor('rgb(255,0,0)') True >>> toColor('hsl(-120,100%,50%)')==toColor('rgb(0,0,255)') True >>> toColor('hsl(120,100%,50%)')==toColor('rgb(0,255,0)') True >>> toColor('rgba( 255,0,0,0.5)')==Color(1,0,0,0.5) True >>> toColor('cmyk(1,0,0,0 )')==CMYKColor(1,0,0,0) True >>> toColor('pcmyk( 100 , 0 , 0 , 0 )')==PCMYKColor(100,0,0,0) True >>> toColor('cmyka(1,0,0,0,0.5)')==CMYKColor(1,0,0,0,alpha=0.5) True >>> toColor('pcmyka(100,0,0,0,0.5)')==PCMYKColor(100,0,0,0,alpha=0.5) True >>> toColor('pcmyka(100,0,0,0)') Traceback (most recent call last): .... ValueError: css color 'pcmyka(100,0,0,0)' has wrong number of components ''' import math, re, functools from reportlab import isPy3 from reportlab.lib.rl_accel import fp_str from reportlab.lib.utils import asNative, isStr import collections class Color: """This class is used to represent color. Components red, green, blue are in the range 0 (dark) to 1 (full intensity).""" def __init__(self, red=0, green=0, blue=0, alpha=1): "Initialize with red, green, blue in range [0-1]." self.red = red self.green = green self.blue = blue self.alpha = alpha def __repr__(self): return "Color(%s)" % fp_str(*(self.red, self.green, self.blue,self.alpha)).replace(' ',',') @property def __key__(self): '''simple comparison by component; cmyk != color ever >>> cmp(Color(0,0,0),None) -1 >>> cmp(Color(0,0,0),black) 0 >>> cmp(Color(0,0,0),CMYKColor(0,0,0,1)),Color(0,0,0).rgba()==CMYKColor(0,0,0,1).rgba() (1, True) ''' return self.red, self.green, self.blue, self.alpha def __hash__(self): return hash(self.__key__) def __comparable__(self,other): return not isinstance(other,CMYKColor) and isinstance(other,Color) def __lt__(self,other): if not self.__comparable__(other): return True try: return self.__key__ < other.__key__ except: pass return True def __eq__(self,other): if not self.__comparable__(other): return False try: return self.__key__ == other.__key__ except: return False def rgb(self): "Returns a three-tuple of components" return (self.red, self.green, self.blue) def rgba(self): "Returns a four-tuple of components" return (self.red, self.green, self.blue, self.alpha) def bitmap_rgb(self): return tuple([int(x*255)&255 for x in self.rgb()]) def bitmap_rgba(self): return tuple([int(x*255)&255 for x in self.rgba()]) def hexval(self): return '0x%02x%02x%02x' % self.bitmap_rgb() def hexvala(self): return '0x%02x%02x%02x%02x' % self.bitmap_rgba() def int_rgb(self): v = self.bitmap_rgb() return v[0]<<16|v[1]<<8|v[2] def int_rgba(self): v = self.bitmap_rgba() return int((v[0]<<24|v[1]<<16|v[2]<<8|v[3])&0xffffff) _cKwds='red green blue alpha'.split() def cKwds(self): for k in self._cKwds: yield k,getattr(self,k) cKwds=property(cKwds) def clone(self,**kwds): '''copy then change values in kwds''' D = dict([kv for kv in self.cKwds]) D.update(kwds) return self.__class__(**D) def _lookupName(self,D={}): if not D: for n,v in getAllNamedColors().items(): if not isinstance(v,CMYKColor): t = v.red,v.green,v.blue if t in D: n = n+'/'+D[t] D[t] = n t = self.red,self.green,self.blue return t in D and D[t] or None @property def normalizedAlpha(self): return self.alpha if isPy3: Color = functools.total_ordering(Color) class CMYKColor(Color): """This represents colors using the CMYK (cyan, magenta, yellow, black) model commonly used in professional printing. This is implemented as a derived class so that renderers which only know about RGB "see it" as an RGB color through its 'red','green' and 'blue' attributes, according to an approximate function. The RGB approximation is worked out when the object in constructed, so the color attributes should not be changed afterwards. Extra attributes may be attached to the class to support specific ink models, and renderers may look for these.""" _scale = 1.0 def __init__(self, cyan=0, magenta=0, yellow=0, black=0, spotName=None, density=1, knockout=None, alpha=1): """ Initialize with four colors in range [0-1]. the optional spotName, density & knockout may be of use to specific renderers. spotName is intended for use as an identifier to the renderer not client programs. density is used to modify the overall amount of ink. knockout is a renderer dependent option that determines whether the applied colour knocksout (removes) existing colour; None means use the global default. """ self.cyan = cyan self.magenta = magenta self.yellow = yellow self.black = black self.spotName = spotName self.density = max(min(density,1),0) # force into right range self.knockout = knockout self.alpha = alpha # now work out the RGB approximation. override self.red, self.green, self.blue = cmyk2rgb( (cyan, magenta, yellow, black) ) if density<1: #density adjustment of rgb approximants, effectively mix with white r, g, b = self.red, self.green, self.blue r = density*(r-1)+1 g = density*(g-1)+1 b = density*(b-1)+1 self.red, self.green, self.blue = (r,g,b) def __repr__(self): return "%s(%s%s%s%s%s)" % (self.__class__.__name__, fp_str(self.cyan, self.magenta, self.yellow, self.black).replace(' ',','), (self.spotName and (',spotName='+repr(self.spotName)) or ''), (self.density!=1 and (',density='+fp_str(self.density)) or ''), (self.knockout is not None and (',knockout=%d' % self.knockout) or ''), (self.alpha is not None and (',alpha=%s' % self.alpha) or ''), ) def fader(self, n, reverse=False): '''return n colors based on density fade *NB* note this dosen't reach density zero''' scale = self._scale dd = scale/float(n) L = [self.clone(density=scale - i*dd) for i in range(n)] if reverse: L.reverse() return L @property def __key__(self): """obvious way to compare colours Comparing across the two color models is of limited use. >>> cmp(CMYKColor(0,0,0,1),None) -1 >>> cmp(CMYKColor(0,0,0,1),_CMYK_black) 0 >>> cmp(PCMYKColor(0,0,0,100),_CMYK_black) 0 >>> cmp(CMYKColor(0,0,0,1),Color(0,0,1)),Color(0,0,0).rgba()==CMYKColor(0,0,0,1).rgba() (-1, True) """ return self.cyan, self.magenta, self.yellow, self.black, self.density, self.spotName, self.alpha def __comparable__(self,other): return isinstance(other,CMYKColor) def cmyk(self): "Returns a tuple of four color components - syntactic sugar" return (self.cyan, self.magenta, self.yellow, self.black) def cmyka(self): "Returns a tuple of five color components - syntactic sugar" return (self.cyan, self.magenta, self.yellow, self.black, self.alpha) def _density_str(self): return fp_str(self.density) _cKwds='cyan magenta yellow black density alpha spotName knockout'.split() def _lookupName(self,D={}): if not D: for n,v in getAllNamedColors().items(): if isinstance(v,CMYKColor): t = v.cyan,v.magenta,v.yellow,v.black if t in D: n = n+'/'+D[t] D[t] = n t = self.cyan,self.magenta,self.yellow,self.black return t in D and D[t] or None @property def normalizedAlpha(self): return self.alpha*self._scale class PCMYKColor(CMYKColor): '''100 based CMYKColor with density and a spotName; just like Rimas uses''' _scale = 100. def __init__(self,cyan,magenta,yellow,black,density=100,spotName=None,knockout=None,alpha=100): CMYKColor.__init__(self,cyan/100.,magenta/100.,yellow/100.,black/100.,spotName,density/100.,knockout=knockout,alpha=alpha/100.) def __repr__(self): return "%s(%s%s%s%s%s)" % (self.__class__.__name__, fp_str(self.cyan*100, self.magenta*100, self.yellow*100, self.black*100).replace(' ',','), (self.spotName and (',spotName='+repr(self.spotName)) or ''), (self.density!=1 and (',density='+fp_str(self.density*100)) or ''), (self.knockout is not None and (',knockout=%d' % self.knockout) or ''), (self.alpha is not None and (',alpha=%s' % (fp_str(self.alpha*100))) or ''), ) def cKwds(self): K=self._cKwds S=K[:6] for k in self._cKwds: v=getattr(self,k) if k in S: v*=100 yield k,v cKwds=property(cKwds) class CMYKColorSep(CMYKColor): '''special case color for making separating pdfs''' _scale = 1. def __init__(self, cyan=0, magenta=0, yellow=0, black=0, spotName=None, density=1,alpha=1): CMYKColor.__init__(self,cyan,magenta,yellow,black,spotName,density,knockout=None,alpha=alpha) _cKwds='cyan magenta yellow black density alpha spotName'.split() class PCMYKColorSep(PCMYKColor,CMYKColorSep): '''special case color for making separating pdfs''' _scale = 100. def __init__(self, cyan=0, magenta=0, yellow=0, black=0, spotName=None, density=100, alpha=100): PCMYKColor.__init__(self,cyan,magenta,yellow,black,density,spotName,knockout=None,alpha=alpha) _cKwds='cyan magenta yellow black density alpha spotName'.split() def cmyk2rgb(cmyk,density=1): "Convert from a CMYK color tuple to an RGB color tuple" c,m,y,k = cmyk # From the Adobe Postscript Ref. Manual 2nd ed. r = 1.0 - min(1.0, c + k) g = 1.0 - min(1.0, m + k) b = 1.0 - min(1.0, y + k) return (r,g,b) def rgb2cmyk(r,g,b): '''one way to get cmyk from rgb''' c = 1 - r m = 1 - g y = 1 - b k = min(c,m,y) c = min(1,max(0,c-k)) m = min(1,max(0,m-k)) y = min(1,max(0,y-k)) k = min(1,max(0,k)) return (c,m,y,k) def color2bw(colorRGB): "Transform an RGB color to a black and white equivalent." col = colorRGB r, g, b, a = col.red, col.green, col.blue, col.alpha n = (r + g + b) / 3.0 bwColorRGB = Color(n, n, n, a) return bwColorRGB def HexColor(val, htmlOnly=False, hasAlpha=False): """This function converts a hex string, or an actual integer number, into the corresponding color. E.g., in "#AABBCC" or 0xAABBCC, AA is the red, BB is the green, and CC is the blue (00-FF). An alpha value can also be given in the form #AABBCCDD or 0xAABBCCDD where DD is the alpha value if hasAlpha is True. For completeness I assume that #aabbcc or 0xaabbcc are hex numbers otherwise a pure integer is converted as decimal rgb. If htmlOnly is true, only the #aabbcc form is allowed. >>> HexColor('#ffffff') Color(1,1,1,1) >>> HexColor('#FFFFFF') Color(1,1,1,1) >>> HexColor('0xffffff') Color(1,1,1,1) >>> HexColor('16777215') Color(1,1,1,1) An '0x' or '#' prefix is required for hex (as opposed to decimal): >>> HexColor('ffffff') Traceback (most recent call last): ValueError: invalid literal for int() with base 10: 'ffffff' >>> HexColor('#FFFFFF', htmlOnly=True) Color(1,1,1,1) >>> HexColor('0xffffff', htmlOnly=True) Traceback (most recent call last): ValueError: not a hex string >>> HexColor('16777215', htmlOnly=True) Traceback (most recent call last): ValueError: not a hex string """ #" for emacs if isStr(val): val = asNative(val) b = 10 if val[:1] == '#': val = val[1:] b = 16 if len(val) == 8: alpha = True else: if htmlOnly: raise ValueError('not a hex string') if val[:2].lower() == '0x': b = 16 val = val[2:] if len(val) == 8: alpha = True val = int(val,b) if hasAlpha: return Color(((val>>24)&0xFF)/255.0,((val>>16)&0xFF)/255.0,((val>>8)&0xFF)/255.0,(val&0xFF)/255.0) return Color(((val>>16)&0xFF)/255.0,((val>>8)&0xFF)/255.0,(val&0xFF)/255.0) def linearlyInterpolatedColor(c0, c1, x0, x1, x): """ Linearly interpolates colors. Can handle RGB, CMYK and PCMYK colors - give ValueError if colours aren't the same. Doesn't currently handle 'Spot Color Interpolation'. """ if c0.__class__ != c1.__class__: raise ValueError("Color classes must be the same for interpolation!\nGot %r and %r'"%(c0,c1)) if x1<x0: x0,x1,c0,c1 = x1,x0,c1,c0 # normalized so x1>x0 if x<x0-1e-8 or x>x1+1e-8: # fudge factor for numerical problems raise ValueError("Can't interpolate: x=%f is not between %f and %f!" % (x,x0,x1)) if x<=x0: return c0 elif x>=x1: return c1 cname = c0.__class__.__name__ dx = float(x1-x0) x = x-x0 if cname == 'Color': # RGB r = c0.red+x*(c1.red - c0.red)/dx g = c0.green+x*(c1.green- c0.green)/dx b = c0.blue+x*(c1.blue - c0.blue)/dx a = c0.alpha+x*(c1.alpha - c0.alpha)/dx return Color(r,g,b,alpha=a) elif cname == 'CMYKColor': if cmykDistance(c0,c1)<1e-8: #colors same do density and preserve spotName if any assert c0.spotName == c1.spotName, "Identical cmyk, but different spotName" c = c0.cyan m = c0.magenta y = c0.yellow k = c0.black d = c0.density+x*(c1.density - c0.density)/dx a = c0.alpha+x*(c1.alpha - c0.alpha)/dx return CMYKColor(c,m,y,k, density=d, spotName=c0.spotName, alpha=a) elif cmykDistance(c0,_CMYK_white)<1e-8: #special c0 is white c = c1.cyan m = c1.magenta y = c1.yellow k = c1.black d = x*c1.density/dx a = x*c1.alpha/dx return CMYKColor(c,m,y,k, density=d, spotName=c1.spotName, alpha=a) elif cmykDistance(c1,_CMYK_white)<1e-8: #special c1 is white c = c0.cyan m = c0.magenta y = c0.yellow k = c0.black d = x*c0.density/dx d = c0.density*(1-x/dx) a = c0.alpha*(1-x/dx) return PCMYKColor(c,m,y,k, density=d, spotName=c0.spotName, alpha=a) else: c = c0.cyan+x*(c1.cyan - c0.cyan)/dx m = c0.magenta+x*(c1.magenta - c0.magenta)/dx y = c0.yellow+x*(c1.yellow - c0.yellow)/dx k = c0.black+x*(c1.black - c0.black)/dx d = c0.density+x*(c1.density - c0.density)/dx a = c0.alpha+x*(c1.alpha - c0.alpha)/dx return CMYKColor(c,m,y,k, density=d, alpha=a) elif cname == 'PCMYKColor': if cmykDistance(c0,c1)<1e-8: #colors same do density and preserve spotName if any assert c0.spotName == c1.spotName, "Identical cmyk, but different spotName" c = c0.cyan m = c0.magenta y = c0.yellow k = c0.black d = c0.density+x*(c1.density - c0.density)/dx a = c0.alpha+x*(c1.alpha - c0.alpha)/dx return PCMYKColor(c*100,m*100,y*100,k*100, density=d*100, spotName=c0.spotName, alpha=100*a) elif cmykDistance(c0,_CMYK_white)<1e-8: #special c0 is white c = c1.cyan m = c1.magenta y = c1.yellow k = c1.black d = x*c1.density/dx a = x*c1.alpha/dx return PCMYKColor(c*100,m*100,y*100,k*100, density=d*100, spotName=c1.spotName, alpha=a*100) elif cmykDistance(c1,_CMYK_white)<1e-8: #special c1 is white c = c0.cyan m = c0.magenta y = c0.yellow k = c0.black d = x*c0.density/dx d = c0.density*(1-x/dx) a = c0.alpha*(1-x/dx) return PCMYKColor(c*100,m*100,y*100,k*100, density=d*100, spotName=c0.spotName, alpha=a*100) else: c = c0.cyan+x*(c1.cyan - c0.cyan)/dx m = c0.magenta+x*(c1.magenta - c0.magenta)/dx y = c0.yellow+x*(c1.yellow - c0.yellow)/dx k = c0.black+x*(c1.black - c0.black)/dx d = c0.density+x*(c1.density - c0.density)/dx a = c0.alpha+x*(c1.alpha - c0.alpha)/dx return PCMYKColor(c*100,m*100,y*100,k*100, density=d*100, alpha=a*100) else: raise ValueError("Can't interpolate: Unknown color class %s!" % cname) def obj_R_G_B(c): '''attempt to convert an object to (red,green,blue)''' if isinstance(c,Color): return c.red,c.green,c.blue elif isinstance(c,(tuple,list)): if len(c)==3: return tuple(c) elif len(c)==4: return toColor(c).rgb() else: raise ValueError('obj_R_G_B(%r) bad argument' % (c)) # special case -- indicates no drawing should be done # this is a hangover from PIDDLE - suggest we ditch it since it is not used anywhere transparent = Color(0,0,0,alpha=0) _CMYK_white=CMYKColor(0,0,0,0) _PCMYK_white=PCMYKColor(0,0,0,0) _CMYK_black=CMYKColor(0,0,0,1) _PCMYK_black=PCMYKColor(0,0,0,100) # Special colors ReportLabBlueOLD = HexColor(0x4e5688) ReportLabBlue = HexColor(0x00337f) ReportLabBluePCMYK = PCMYKColor(100,65,0,30,spotName='Pantone 288U') ReportLabLightBlue = HexColor(0xb7b9d3) ReportLabFidBlue=HexColor(0x3366cc) ReportLabFidRed=HexColor(0xcc0033) ReportLabGreen = HexColor(0x336600) ReportLabLightGreen = HexColor(0x339933) # color constants -- mostly from HTML standard aliceblue = HexColor(0xF0F8FF) antiquewhite = HexColor(0xFAEBD7) aqua = HexColor(0x00FFFF) aquamarine = HexColor(0x7FFFD4) azure = HexColor(0xF0FFFF) beige = HexColor(0xF5F5DC) bisque = HexColor(0xFFE4C4) black = HexColor(0x000000) blanchedalmond = HexColor(0xFFEBCD) blue = HexColor(0x0000FF) blueviolet = HexColor(0x8A2BE2) brown = HexColor(0xA52A2A) burlywood = HexColor(0xDEB887) cadetblue = HexColor(0x5F9EA0) chartreuse = HexColor(0x7FFF00) chocolate = HexColor(0xD2691E) coral = HexColor(0xFF7F50) cornflowerblue = cornflower = HexColor(0x6495ED) cornsilk = HexColor(0xFFF8DC) crimson = HexColor(0xDC143C) cyan = HexColor(0x00FFFF) darkblue = HexColor(0x00008B) darkcyan = HexColor(0x008B8B) darkgoldenrod = HexColor(0xB8860B) darkgray = HexColor(0xA9A9A9) darkgrey = darkgray darkgreen = HexColor(0x006400) darkkhaki = HexColor(0xBDB76B) darkmagenta = HexColor(0x8B008B) darkolivegreen = HexColor(0x556B2F) darkorange = HexColor(0xFF8C00) darkorchid = HexColor(0x9932CC) darkred = HexColor(0x8B0000) darksalmon = HexColor(0xE9967A) darkseagreen = HexColor(0x8FBC8B) darkslateblue = HexColor(0x483D8B) darkslategray = HexColor(0x2F4F4F) darkslategrey = darkslategray darkturquoise = HexColor(0x00CED1) darkviolet = HexColor(0x9400D3) deeppink = HexColor(0xFF1493) deepskyblue = HexColor(0x00BFFF) dimgray = HexColor(0x696969) dimgrey = dimgray dodgerblue = HexColor(0x1E90FF) firebrick = HexColor(0xB22222) floralwhite = HexColor(0xFFFAF0) forestgreen = HexColor(0x228B22) fuchsia = HexColor(0xFF00FF) gainsboro = HexColor(0xDCDCDC) ghostwhite = HexColor(0xF8F8FF) gold = HexColor(0xFFD700) goldenrod = HexColor(0xDAA520) gray = HexColor(0x808080) grey = gray green = HexColor(0x008000) greenyellow = HexColor(0xADFF2F) honeydew = HexColor(0xF0FFF0) hotpink = HexColor(0xFF69B4) indianred = HexColor(0xCD5C5C) indigo = HexColor(0x4B0082) ivory = HexColor(0xFFFFF0) khaki = HexColor(0xF0E68C) lavender = HexColor(0xE6E6FA) lavenderblush = HexColor(0xFFF0F5) lawngreen = HexColor(0x7CFC00) lemonchiffon = HexColor(0xFFFACD) lightblue = HexColor(0xADD8E6) lightcoral = HexColor(0xF08080) lightcyan = HexColor(0xE0FFFF) lightgoldenrodyellow = HexColor(0xFAFAD2) lightgreen = HexColor(0x90EE90) lightgrey = HexColor(0xD3D3D3) lightpink = HexColor(0xFFB6C1) lightsalmon = HexColor(0xFFA07A) lightseagreen = HexColor(0x20B2AA) lightskyblue = HexColor(0x87CEFA) lightslategray = HexColor(0x778899) lightslategrey = lightslategray lightsteelblue = HexColor(0xB0C4DE) lightyellow = HexColor(0xFFFFE0) lime = HexColor(0x00FF00) limegreen = HexColor(0x32CD32) linen = HexColor(0xFAF0E6) magenta = HexColor(0xFF00FF) maroon = HexColor(0x800000) mediumaquamarine = HexColor(0x66CDAA) mediumblue = HexColor(0x0000CD) mediumorchid = HexColor(0xBA55D3) mediumpurple = HexColor(0x9370DB) mediumseagreen = HexColor(0x3CB371) mediumslateblue = HexColor(0x7B68EE) mediumspringgreen = HexColor(0x00FA9A) mediumturquoise = HexColor(0x48D1CC) mediumvioletred = HexColor(0xC71585) midnightblue = HexColor(0x191970) mintcream = HexColor(0xF5FFFA) mistyrose = HexColor(0xFFE4E1) moccasin = HexColor(0xFFE4B5) navajowhite = HexColor(0xFFDEAD) navy = HexColor(0x000080) oldlace = HexColor(0xFDF5E6) olive = HexColor(0x808000) olivedrab = HexColor(0x6B8E23) orange = HexColor(0xFFA500) orangered = HexColor(0xFF4500) orchid = HexColor(0xDA70D6) palegoldenrod = HexColor(0xEEE8AA) palegreen = HexColor(0x98FB98) paleturquoise = HexColor(0xAFEEEE) palevioletred = HexColor(0xDB7093) papayawhip = HexColor(0xFFEFD5) peachpuff = HexColor(0xFFDAB9) peru = HexColor(0xCD853F) pink = HexColor(0xFFC0CB) plum = HexColor(0xDDA0DD) powderblue = HexColor(0xB0E0E6) purple = HexColor(0x800080) red = HexColor(0xFF0000) rosybrown = HexColor(0xBC8F8F) royalblue = HexColor(0x4169E1) saddlebrown = HexColor(0x8B4513) salmon = HexColor(0xFA8072) sandybrown = HexColor(0xF4A460) seagreen = HexColor(0x2E8B57) seashell = HexColor(0xFFF5EE) sienna = HexColor(0xA0522D) silver = HexColor(0xC0C0C0) skyblue = HexColor(0x87CEEB) slateblue = HexColor(0x6A5ACD) slategray = HexColor(0x708090) slategrey = slategray snow = HexColor(0xFFFAFA) springgreen = HexColor(0x00FF7F) steelblue = HexColor(0x4682B4) tan = HexColor(0xD2B48C) teal = HexColor(0x008080) thistle = HexColor(0xD8BFD8) tomato = HexColor(0xFF6347) turquoise = HexColor(0x40E0D0) violet = HexColor(0xEE82EE) wheat = HexColor(0xF5DEB3) white = HexColor(0xFFFFFF) whitesmoke = HexColor(0xF5F5F5) yellow = HexColor(0xFFFF00) yellowgreen = HexColor(0x9ACD32) fidblue=HexColor(0x3366cc) fidred=HexColor(0xcc0033) fidlightblue=HexColor("#d6e0f5") ColorType=type(black) ################################################################ # # Helper functions for dealing with colors. These tell you # which are predefined, so you can print color charts; # and can give the nearest match to an arbitrary color object # ################################################################# def colorDistance(col1, col2): """Returns a number between 0 and root(3) stating how similar two colours are - distance in r,g,b, space. Only used to find names for things.""" return math.sqrt( (col1.red - col2.red)**2 + (col1.green - col2.green)**2 + (col1.blue - col2.blue)**2 ) def cmykDistance(col1, col2): """Returns a number between 0 and root(4) stating how similar two colours are - distance in r,g,b, space. Only used to find names for things.""" return math.sqrt( (col1.cyan - col2.cyan)**2 + (col1.magenta - col2.magenta)**2 + (col1.yellow - col2.yellow)**2 + (col1.black - col2.black)**2 ) _namedColors = None def getAllNamedColors(): #returns a dictionary of all the named ones in the module # uses a singleton for efficiency global _namedColors if _namedColors is not None: return _namedColors from reportlab.lib import colors _namedColors = {} for name, value in colors.__dict__.items(): if isinstance(value, Color): _namedColors[name] = value return _namedColors def describe(aColor,mode=0): '''finds nearest colour match to aColor. mode=0 print a string desription mode=1 return a string description mode=2 return (distance, colorName) ''' namedColors = getAllNamedColors() closest = (10, None, None) #big number, name, color for name, color in namedColors.items(): distance = colorDistance(aColor, color) if distance < closest[0]: closest = (distance, name, color) if mode<=1: s = 'best match is %s, distance %0.4f' % (closest[1], closest[0]) if mode==0: print(s) else: return s elif mode==2: return (closest[1], closest[0]) else: raise ValueError("Illegal value for mode "+str(mode)) def hue2rgb(m1, m2, h): if h<0: h += 1 if h>1: h -= 1 if h*6<1: return m1+(m2-m1)*h*6 if h*2<1: return m2 if h*3<2: return m1+(m2-m1)*(4-6*h) return m1 def hsl2rgb(h, s, l): if l<=0.5: m2 = l*(s+1) else: m2 = l+s-l*s m1 = l*2-m2 return hue2rgb(m1, m2, h+1./3),hue2rgb(m1, m2, h),hue2rgb(m1, m2, h-1./3) import re _re_css = re.compile(r'^\s*(pcmyk|cmyk|rgb|hsl)(a|)\s*\(\s*([^)]*)\)\s*$') class cssParse: def pcVal(self,v): v = v.strip() try: c=eval(v[:-1]) if not isinstance(c,(float,int)): raise ValueError c=min(100,max(0,c))/100. except: raise ValueError('bad percentage argument value %r in css color %r' % (v,self.s)) return c def rgbPcVal(self,v): return int(self.pcVal(v)*255+0.5)/255. def rgbVal(self,v): v = v.strip() try: c=eval(v[:]) if not isinstance(c,int): raise ValueError return int(min(255,max(0,c)))/255. except: raise ValueError('bad argument value %r in css color %r' % (v,self.s)) def hueVal(self,v): v = v.strip() try: c=eval(v[:]) if not isinstance(c,(int,float)): raise ValueError return ((c%360+360)%360)/360. except: raise ValueError('bad hue argument value %r in css color %r' % (v,self.s)) def alphaVal(self,v,c=1,n='alpha'): try: a = eval(v.strip()) if not isinstance(a,(int,float)): raise ValueError return min(c,max(0,a)) except: raise ValueError('bad %s argument value %r in css color %r' % (n,v,self.s)) _n_c = dict(pcmyk=(4,100,True,False),cmyk=(4,1,True,False),hsl=(3,1,False,True),rgb=(3,1,False,False)) def __call__(self,s): n = _re_css.match(s) if not n: return self.s = s b,c,cmyk,hsl = self._n_c[n.group(1)] ha = n.group(2) n = n.group(3).split(',') #strip parens and split on comma if len(n)!=(b+(ha and 1 or 0)): raise ValueError('css color %r has wrong number of components' % s) if ha: n,a = n[:b],self.alphaVal(n[b],c) else: a = c if cmyk: C = self.alphaVal(n[0],c,'cyan') M = self.alphaVal(n[1],c,'magenta') Y = self.alphaVal(n[2],c,'yellow') K = self.alphaVal(n[3],c,'black') return (c>1 and PCMYKColor or CMYKColor)(C,M,Y,K,alpha=a) else: if hsl: R,G,B= hsl2rgb(self.hueVal(n[0]),self.pcVal(n[1]),self.pcVal(n[2])) else: R,G,B = list(map('%' in n[0] and self.rgbPcVal or self.rgbVal,n)) return Color(R,G,B,a) cssParse=cssParse() class toColor: def __init__(self): self.extraColorsNS = {} #used for overriding/adding to existing color names #make case insensitive if that's your wish def setExtraColorsNameSpace(self,NS): self.extraColorsNS = NS def __call__(self,arg,default=None): '''try to map an arbitrary arg to a color instance ''' if isinstance(arg,Color): return arg if isinstance(arg,(tuple,list)): assert 3<=len(arg)<=4, 'Can only convert 3 and 4 sequences to color' assert 0<=min(arg) and max(arg)<=1 return len(arg)==3 and Color(arg[0],arg[1],arg[2]) or CMYKColor(arg[0],arg[1],arg[2],arg[3]) elif isStr(arg): arg = asNative(arg) C = cssParse(arg) if C: return C if arg in self.extraColorsNS: return self.extraColorsNS[arg] C = getAllNamedColors() s = arg.lower() if s in C: return C[s] try: return toColor(eval(arg)) except: pass try: return HexColor(arg) except: if default is None: raise ValueError('Invalid color value %r' % arg) return default toColor = toColor() def toColorOrNone(arg,default=None): '''as above but allows None as a legal value''' if arg is None: return None else: return toColor(arg, default) def setColors(**kw): UNDEF = [] progress = 1 assigned = {} while kw and progress: progress = 0 for k, v in kw.items(): if isinstance(v,(tuple,list)): c = list(map(lambda x,UNDEF=UNDEF: toColor(x,UNDEF),v)) if isinstance(v,tuple): c = tuple(c) ok = UNDEF not in c else: c = toColor(v,UNDEF) ok = c is not UNDEF if ok: assigned[k] = c del kw[k] progress = 1 if kw: raise ValueError("Can't convert\n%s" % str(kw)) getAllNamedColors() for k, c in assigned.items(): globals()[k] = c if isinstance(c,Color): _namedColors[k] = c def Whiter(c,f): '''given a color combine with white as c*f w*(1-f) 0<=f<=1''' c = toColor(c) if isinstance(c,CMYKColorSep): c = c.clone() if isinstance(c,PCMYKColorSep): c.__class__ = PCMYKColor else: c.__class__ = CMYKColor if isinstance(c,PCMYKColor): w = _PCMYK_white elif isinstance(c,CMYKColor): w = _CMYK_white else: w = white return linearlyInterpolatedColor(w, c, 0, 1, f) def Blacker(c,f): '''given a color combine with black as c*f+b*(1-f) 0<=f<=1''' c = toColor(c) if isinstance(c,CMYKColorSep): c = c.clone() if isinstance(c,PCMYKColorSep): c.__class__ = PCMYKColor else: c.__class__ = CMYKColor if isinstance(c,PCMYKColor): b = _PCMYK_black elif isinstance(c,CMYKColor): b = _CMYK_black else: b = black return linearlyInterpolatedColor(b, c, 0, 1, f) def fade(aSpotColor, percentages): """Waters down spot colors and returns a list of new ones e.g fade(myColor, [100,80,60,40,20]) returns a list of five colors """ out = [] for percent in percentages: frac = percent * 0.01 #assume they give us numbers from 0 to 100 newCyan = frac * aSpotColor.cyan newMagenta = frac * aSpotColor.magenta newYellow = frac * aSpotColor.yellow newBlack = frac * aSpotColor.black newDensity = frac * aSpotColor.density newSpot = CMYKColor( newCyan, newMagenta, newYellow, newBlack, spotName = aSpotColor.spotName, density = newDensity) out.append(newSpot) return out def _enforceError(kind,c,tc): if isinstance(tc,Color): xtra = tc._lookupName() xtra = xtra and '(%s)'%xtra or '' else: xtra = '' raise ValueError('Non %s color %r%s' % (kind,c,xtra)) def _enforceSEP(c): '''pure separating colors only, this makes black a problem''' tc = toColor(c) if not isinstance(tc,CMYKColorSep): _enforceError('separating',c,tc) return tc def _enforceSEP_BLACK(c): '''separating + blacks only''' tc = toColor(c) if not isinstance(tc,CMYKColorSep): if isinstance(tc,Color) and tc.red==tc.blue==tc.green: #ahahahah it's a grey tc = _CMYK_black.clone(density=1-tc.red) elif not (isinstance(tc,CMYKColor) and tc.cyan==tc.magenta==tc.yellow==0): #ie some shade of grey _enforceError('separating or black',c,tc) return tc def _enforceSEP_CMYK(c): '''separating or cmyk only''' tc = toColor(c) if not isinstance(tc,CMYKColorSep): if isinstance(tc,Color) and tc.red==tc.blue==tc.green: #ahahahah it's a grey tc = _CMYK_black.clone(density=1-tc.red) elif not isinstance(tc,CMYKColor): _enforceError('separating or CMYK',c,tc) return tc def _enforceCMYK(c): '''cmyk outputs only (rgb greys converted)''' tc = toColor(c) if not isinstance(tc,CMYKColor): if isinstance(tc,Color) and tc.red==tc.blue==tc.green: #ahahahah it's a grey tc = _CMYK_black.clone(black=1-tc.red,alpha=tc.alpha) else: _enforceError('CMYK',c,tc) elif isinstance(tc,CMYKColorSep): tc = tc.clone() tc.__class__ = CMYKColor return tc def _enforceRGB(c): tc = toColor(c) if isinstance(tc,CMYKColor): if tc.cyan==tc.magenta==tc.yellow==0: #ahahahah it's grey v = 1-tc.black*tc.density tc = Color(v,v,v,alpha=tc.alpha) else: _enforceError('RGB',c,tc) return tc def _chooseEnforceColorSpace(enforceColorSpace): if enforceColorSpace is not None and not isinstance(enforceColorSpace, collections.Callable): if isinstance(enforceColorSpace,str): enforceColorSpace=enforceColorSpace.upper() if enforceColorSpace=='CMYK': enforceColorSpace = _enforceCMYK elif enforceColorSpace=='RGB': enforceColorSpace = _enforceRGB elif enforceColorSpace=='SEP': enforceColorSpace = _enforceSEP elif enforceColorSpace=='SEP_BLACK': enforceColorSpace = _enforceSEP_BLACK elif enforceColorSpace=='SEP_CMYK': enforceColorSpace = _enforceSEP_CMYK else: raise ValueError('Invalid value for Canvas argument enforceColorSpace=%r' % enforceColorSpace) return enforceColorSpace if __name__ == "__main__": import doctest doctest.testmod()
gpl-3.0
fyndsi/Django-facebook
docs/docs_env/Lib/encodings/iso2022_jp_2004.py
816
1073
# # iso2022_jp_2004.py: Python Unicode Codec for ISO2022_JP_2004 # # Written by Hye-Shik Chang <perky@FreeBSD.org> # import _codecs_iso2022, codecs import _multibytecodec as mbc codec = _codecs_iso2022.getcodec('iso2022_jp_2004') class Codec(codecs.Codec): encode = codec.encode decode = codec.decode class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): codec = codec class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): codec = codec class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): codec = codec class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): codec = codec def getregentry(): return codecs.CodecInfo( name='iso2022_jp_2004', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, )
bsd-3-clause
TeutoNet-Netzdienste/ansible
v2/ansible/utils/hashing.py
14
3090
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os # Note, sha1 is the only hash algorithm compatible with python2.4 and with # FIPS-140 mode (as of 11-2014) try: from hashlib import sha1 as sha1 except ImportError: from sha import sha as sha1 # Backwards compat only try: from hashlib import md5 as _md5 except ImportError: try: from md5 import md5 as _md5 except ImportError: # Assume we're running in FIPS mode here _md5 = None def secure_hash_s(data, hash_func=sha1): ''' Return a secure hash hex digest of data. ''' digest = hash_func() try: if not isinstance(data, basestring): data = "%s" % data digest.update(data) except UnicodeEncodeError: digest.update(data.encode('utf-8')) return digest.hexdigest() def secure_hash(filename, hash_func=sha1): ''' Return a secure hash hex digest of local file, None if file is not present or a directory. ''' if not os.path.exists(filename) or os.path.isdir(filename): return None digest = hash_func() blocksize = 64 * 1024 try: infile = open(filename, 'rb') block = infile.read(blocksize) while block: digest.update(block) block = infile.read(blocksize) infile.close() except IOError, e: raise errors.AnsibleError("error while accessing the file %s, error was: %s" % (filename, e)) return digest.hexdigest() # The checksum algorithm must match with the algorithm in ShellModule.checksum() method checksum = secure_hash checksum_s = secure_hash_s # Backwards compat functions. Some modules include md5s in their return values # Continue to support that for now. As of ansible-1.8, all of those modules # should also return "checksum" (sha1 for now) # Do not use md5 unless it is needed for: # 1) Optional backwards compatibility # 2) Compliance with a third party protocol # # MD5 will not work on systems which are FIPS-140-2 compliant. def md5s(data): if not _md5: raise ValueError('MD5 not available. Possibly running in FIPS mode') return secure_hash_s(data, _md5) def md5(filename): if not _md5: raise ValueError('MD5 not available. Possibly running in FIPS mode') return secure_hash(filename, _md5)
gpl-3.0
codypiersall/mlab
src/mlab/mlabraw.py
2
2298
#!/usr/bin/env python ''' A quick and a bit less dirty hack to wrap matlabpipe/matlabcom as if they were mlabraw. Author: Dani Valevski <daniva@gmail.com> Yauhen Yakimovich <eugeny.yakimovitch@gmail.com> License: MIT ''' import platform is_win = platform.system() == 'Windows' if is_win: from matlabcom import MatlabCom as MatlabConnection from matlabcom import MatlabError as error from matlabcom import discover_location, find_available_releases from matlabcom import WindowsMatlabReleaseNotFound as MatlabReleaseNotFound else: from matlabpipe import MatlabPipe as MatlabConnection from matlabpipe import MatlabError as error from matlabpipe import discover_location, find_available_releases from matlabpipe import UnixMatlabReleaseNotFound as MatlabReleaseNotFound try: import settings except ImportError: class settings: MATLAB_PATH = 'guess' import traceback _MATLAB_RELEASE='latest' def set_release(matlab_release): global _MATLAB_RELEASE _MATLAB_RELEASE=matlab_release def open(): global _MATLAB_RELEASE '''Opens MATLAB using specified connection (or DCOM+ protocol on Windows)where matlab_location ''' if is_win: ret = MatlabConnection() ret.open() return ret else: if settings.MATLAB_PATH != 'guess': matlab_path = settings.MATLAB_PATH + '/bin/matlab' elif _MATLAB_RELEASE != 'latest': matlab_path = discover_location(_MATLAB_RELEASE) else: # Latest release is found in __init__.by, i.e. higher logical level raise MatlabReleaseNotFound('Please select a matlab release or set its location.') try: ret = MatlabConnection(matlab_path) ret.open() except Exception: #traceback.print_exc(file=sys.stderr) raise MatlabReleaseNotFound('Could not open matlab, is it in %s?' % matlab_path) return ret def close(matlab): matlab.close() def eval(matlab, exp, log=False): if log or is_win: matlab.eval(exp) else: matlab.eval(exp, print_expression=False, on_new_output=None) return '' def get(matlab, var_name): return matlab.get(var_name) def put(matlab, var_name, val): matlab.put({var_name : val})
mit