repo_name
stringlengths
5
100
path
stringlengths
4
254
copies
stringlengths
1
5
size
stringlengths
4
7
content
stringlengths
733
1M
license
stringclasses
15 values
hash
int64
-9,223,351,895,964,839,000
9,223,354,783B
line_mean
float64
3.5
100
line_max
int64
15
1k
alpha_frac
float64
0.25
0.96
autogenerated
bool
1 class
ratio
float64
1.5
8.15
config_test
bool
2 classes
has_no_keywords
bool
2 classes
few_assignments
bool
1 class
nyov/python-daemon
test/test_pidfile.py
1
16395
# -*- coding: utf-8 -*- # # test/test_pidfile.py # Part of ‘python-daemon’, an implementation of PEP 3143. # # Copyright © 2008–2016 Ben Finney <ben+python@benfinney.id.au> # # This is free software: you may copy, modify, and/or distribute this work # under the terms of the Apache License, version 2.0 as published by the # Apache Software Foundation. # No warranty expressed or implied. See the file ‘LICENSE.ASF-2’ for details. """ Unit test for ‘pidfile’ module. """ from __future__ import (absolute_import, unicode_literals) try: # Python 3 standard library. import builtins except ImportError: # Python 2 standard library. import __builtin__ as builtins import os import itertools import tempfile import io import errno import functools import mock import lockfile from . import scaffold import daemon.pidfile class FakeFileDescriptorStringIO(io.StringIO, object): """ A StringIO class that fakes a file descriptor. """ _fileno_generator = itertools.count() def __init__(self, *args, **kwargs): self._fileno = next(self._fileno_generator) super(FakeFileDescriptorStringIO, self).__init__(*args, **kwargs) def fileno(self): return self._fileno def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): pass try: FileNotFoundError PermissionError except NameError: # Python 2 uses IOError. FileNotFoundError = functools.partial(IOError, errno.ENOENT) PermissionError = functools.partial(IOError, errno.EPERM) def make_pidlockfile_scenarios(): """ Make a collection of scenarios for testing `PIDLockFile` instances. :return: A collection of scenarios for tests involving `PIDLockfFile` instances. The collection is a mapping from scenario name to a dictionary of scenario attributes. """ fake_current_pid = 235 fake_other_pid = 8642 fake_pidfile_path = tempfile.mktemp() fake_pidfile_empty = FakeFileDescriptorStringIO() fake_pidfile_current_pid = FakeFileDescriptorStringIO( "{pid:d}\n".format(pid=fake_current_pid)) fake_pidfile_other_pid = FakeFileDescriptorStringIO( "{pid:d}\n".format(pid=fake_other_pid)) fake_pidfile_bogus = FakeFileDescriptorStringIO( "b0gUs") scenarios = { 'simple': {}, 'not-exist': { 'open_func_name': 'fake_open_nonexist', 'os_open_func_name': 'fake_os_open_nonexist', }, 'not-exist-write-denied': { 'open_func_name': 'fake_open_nonexist', 'os_open_func_name': 'fake_os_open_nonexist', }, 'not-exist-write-busy': { 'open_func_name': 'fake_open_nonexist', 'os_open_func_name': 'fake_os_open_nonexist', }, 'exist-read-denied': { 'open_func_name': 'fake_open_read_denied', 'os_open_func_name': 'fake_os_open_read_denied', }, 'exist-locked-read-denied': { 'locking_pid': fake_other_pid, 'open_func_name': 'fake_open_read_denied', 'os_open_func_name': 'fake_os_open_read_denied', }, 'exist-empty': {}, 'exist-invalid': { 'pidfile': fake_pidfile_bogus, }, 'exist-current-pid': { 'pidfile': fake_pidfile_current_pid, 'pidfile_pid': fake_current_pid, }, 'exist-current-pid-locked': { 'pidfile': fake_pidfile_current_pid, 'pidfile_pid': fake_current_pid, 'locking_pid': fake_current_pid, }, 'exist-other-pid': { 'pidfile': fake_pidfile_other_pid, 'pidfile_pid': fake_other_pid, }, 'exist-other-pid-locked': { 'pidfile': fake_pidfile_other_pid, 'pidfile_pid': fake_other_pid, 'locking_pid': fake_other_pid, }, } for scenario in scenarios.values(): scenario['pid'] = fake_current_pid scenario['pidfile_path'] = fake_pidfile_path if 'pidfile' not in scenario: scenario['pidfile'] = fake_pidfile_empty if 'pidfile_pid' not in scenario: scenario['pidfile_pid'] = None if 'locking_pid' not in scenario: scenario['locking_pid'] = None if 'open_func_name' not in scenario: scenario['open_func_name'] = 'fake_open_okay' if 'os_open_func_name' not in scenario: scenario['os_open_func_name'] = 'fake_os_open_okay' return scenarios def setup_pidfile_fixtures(testcase): """ Set up common fixtures for PID file test cases. :param testcase: A `TestCase` instance to decorate. Decorate the `testcase` with attributes to be fixtures for tests involving `PIDLockFile` instances. """ scenarios = make_pidlockfile_scenarios() testcase.pidlockfile_scenarios = scenarios def get_scenario_option(testcase, key, default=None): value = default try: value = testcase.scenario[key] except (NameError, TypeError, AttributeError, KeyError): pass return value func_patcher_os_getpid = mock.patch.object( os, "getpid", return_value=scenarios['simple']['pid']) func_patcher_os_getpid.start() testcase.addCleanup(func_patcher_os_getpid.stop) def make_fake_open_funcs(testcase): def fake_open_nonexist(filename, mode, buffering): if mode.startswith('r'): error = FileNotFoundError( "No such file {filename!r}".format( filename=filename)) raise error else: result = testcase.scenario['pidfile'] return result def fake_open_read_denied(filename, mode, buffering): if mode.startswith('r'): error = PermissionError( "Read denied on {filename!r}".format( filename=filename)) raise error else: result = testcase.scenario['pidfile'] return result def fake_open_okay(filename, mode, buffering): result = testcase.scenario['pidfile'] return result def fake_os_open_nonexist(filename, flags, mode): if (flags & os.O_CREAT): result = testcase.scenario['pidfile'].fileno() else: error = FileNotFoundError( "No such file {filename!r}".format( filename=filename)) raise error return result def fake_os_open_read_denied(filename, flags, mode): if (flags & os.O_CREAT): result = testcase.scenario['pidfile'].fileno() else: error = PermissionError( "Read denied on {filename!r}".format( filename=filename)) raise error return result def fake_os_open_okay(filename, flags, mode): result = testcase.scenario['pidfile'].fileno() return result funcs = dict( (name, obj) for (name, obj) in vars().items() if callable(obj)) return funcs testcase.fake_pidfile_open_funcs = make_fake_open_funcs(testcase) def fake_open(filename, mode='rt', buffering=None): scenario_path = get_scenario_option(testcase, 'pidfile_path') if filename == scenario_path: func_name = testcase.scenario['open_func_name'] fake_open_func = testcase.fake_pidfile_open_funcs[func_name] result = fake_open_func(filename, mode, buffering) else: result = FakeFileDescriptorStringIO() return result mock_open = mock.mock_open() mock_open.side_effect = fake_open func_patcher_builtin_open = mock.patch.object( builtins, "open", new=mock_open) func_patcher_builtin_open.start() testcase.addCleanup(func_patcher_builtin_open.stop) def fake_os_open(filename, flags, mode=None): scenario_path = get_scenario_option(testcase, 'pidfile_path') if filename == scenario_path: func_name = testcase.scenario['os_open_func_name'] fake_os_open_func = testcase.fake_pidfile_open_funcs[func_name] result = fake_os_open_func(filename, flags, mode) else: result = FakeFileDescriptorStringIO().fileno() return result mock_os_open = mock.MagicMock(side_effect=fake_os_open) func_patcher_os_open = mock.patch.object( os, "open", new=mock_os_open) func_patcher_os_open.start() testcase.addCleanup(func_patcher_os_open.stop) def fake_os_fdopen(fd, mode='rt', buffering=None): scenario_pidfile = get_scenario_option( testcase, 'pidfile', FakeFileDescriptorStringIO()) if fd == testcase.scenario['pidfile'].fileno(): result = testcase.scenario['pidfile'] else: raise OSError(errno.EBADF, "Bad file descriptor") return result mock_os_fdopen = mock.MagicMock(side_effect=fake_os_fdopen) func_patcher_os_fdopen = mock.patch.object( os, "fdopen", new=mock_os_fdopen) func_patcher_os_fdopen.start() testcase.addCleanup(func_patcher_os_fdopen.stop) def make_lockfile_method_fakes(scenario): """ Make common fake methods for lockfile class. :param scenario: A scenario for testing with PIDLockFile. :return: A mapping from normal function name to the corresponding fake function. Each fake function behaves appropriately for the specified `scenario`. """ def fake_func_read_pid(): return scenario['pidfile_pid'] def fake_func_is_locked(): return (scenario['locking_pid'] is not None) def fake_func_i_am_locking(): return ( scenario['locking_pid'] == scenario['pid']) def fake_func_acquire(timeout=None): if scenario['locking_pid'] is not None: raise lockfile.AlreadyLocked() scenario['locking_pid'] = scenario['pid'] def fake_func_release(): if scenario['locking_pid'] is None: raise lockfile.NotLocked() if scenario['locking_pid'] != scenario['pid']: raise lockfile.NotMyLock() scenario['locking_pid'] = None def fake_func_break_lock(): scenario['locking_pid'] = None fake_methods = dict( ( func_name.replace('fake_func_', ''), mock.MagicMock(side_effect=fake_func)) for (func_name, fake_func) in vars().items() if func_name.startswith('fake_func_')) return fake_methods def apply_lockfile_method_mocks(mock_lockfile, testcase, scenario): """ Apply common fake methods to mock lockfile class. :param mock_lockfile: An object providing the `LockFile` interface. :param testcase: The `TestCase` instance providing the context for the patch. :param scenario: The `PIDLockFile` test scenario to use. Mock the `LockFile` methods of `mock_lockfile`, by applying fake methods customised for `scenario`. The mock is does by a patch within the context of `testcase`. """ fake_methods = dict( (func_name, fake_func) for (func_name, fake_func) in make_lockfile_method_fakes(scenario).items() if func_name not in ['read_pid']) for (func_name, fake_func) in fake_methods.items(): func_patcher = mock.patch.object( mock_lockfile, func_name, new=fake_func) func_patcher.start() testcase.addCleanup(func_patcher.stop) def setup_pidlockfile_fixtures(testcase, scenario_name=None): """ Set up common fixtures for PIDLockFile test cases. :param testcase: The `TestCase` instance to decorate. :param scenario_name: The name of the `PIDLockFile` scenario to use. Decorate the `testcase` with attributes that are fixtures for test cases involving `PIDLockFile` instances.` """ setup_pidfile_fixtures(testcase) for func_name in [ 'write_pid_to_pidfile', 'remove_existing_pidfile', ]: func_patcher = mock.patch.object(lockfile.pidlockfile, func_name) func_patcher.start() testcase.addCleanup(func_patcher.stop) class TimeoutPIDLockFile_TestCase(scaffold.TestCase): """ Test cases for ‘TimeoutPIDLockFile’ class. """ def setUp(self): """ Set up test fixtures. """ super(TimeoutPIDLockFile_TestCase, self).setUp() pidlockfile_scenarios = make_pidlockfile_scenarios() self.pidlockfile_scenario = pidlockfile_scenarios['simple'] pidfile_path = self.pidlockfile_scenario['pidfile_path'] for func_name in ['__init__', 'acquire']: func_patcher = mock.patch.object( lockfile.pidlockfile.PIDLockFile, func_name) func_patcher.start() self.addCleanup(func_patcher.stop) self.scenario = { 'pidfile_path': self.pidlockfile_scenario['pidfile_path'], 'acquire_timeout': self.getUniqueInteger(), } self.test_kwargs = dict( path=self.scenario['pidfile_path'], acquire_timeout=self.scenario['acquire_timeout'], ) self.test_instance = daemon.pidfile.TimeoutPIDLockFile( **self.test_kwargs) def test_inherits_from_pidlockfile(self): """ Should inherit from PIDLockFile. """ instance = self.test_instance self.assertIsInstance(instance, lockfile.pidlockfile.PIDLockFile) def test_init_has_expected_signature(self): """ Should have expected signature for ‘__init__’. """ def test_func(self, path, acquire_timeout=None, *args, **kwargs): pass test_func.__name__ = str('__init__') self.assertFunctionSignatureMatch( test_func, daemon.pidfile.TimeoutPIDLockFile.__init__) def test_has_specified_acquire_timeout(self): """ Should have specified ‘acquire_timeout’ value. """ instance = self.test_instance expected_timeout = self.test_kwargs['acquire_timeout'] self.assertEqual(expected_timeout, instance.acquire_timeout) @mock.patch.object( lockfile.pidlockfile.PIDLockFile, "__init__", autospec=True) def test_calls_superclass_init(self, mock_init): """ Should call the superclass ‘__init__’. """ expected_path = self.test_kwargs['path'] instance = daemon.pidfile.TimeoutPIDLockFile(**self.test_kwargs) mock_init.assert_called_with(instance, expected_path) @mock.patch.object( lockfile.pidlockfile.PIDLockFile, "acquire", autospec=True) def test_acquire_uses_specified_timeout(self, mock_func_acquire): """ Should call the superclass ‘acquire’ with specified timeout. """ instance = self.test_instance test_timeout = self.getUniqueInteger() expected_timeout = test_timeout instance.acquire(test_timeout) mock_func_acquire.assert_called_with(instance, expected_timeout) @mock.patch.object( lockfile.pidlockfile.PIDLockFile, "acquire", autospec=True) def test_acquire_uses_stored_timeout_by_default(self, mock_func_acquire): """ Should call superclass ‘acquire’ with stored timeout by default. """ instance = self.test_instance test_timeout = self.test_kwargs['acquire_timeout'] expected_timeout = test_timeout instance.acquire() mock_func_acquire.assert_called_with(instance, expected_timeout) # Local variables: # coding: utf-8 # mode: python # End: # vim: fileencoding=utf-8 filetype=python :
gpl-3.0
4,227,802,240,090,852,000
33.652542
80
0.594033
false
4.185261
true
false
false
zamattiac/SHARE
providers/gov/clinicaltrials/harvester.py
1
2090
import time import logging from furl import furl from lxml import etree from share import Harvester logger = logging.getLogger(__name__) class ClinicalTrialsHarvester(Harvester): url = 'https://clinicaltrials.gov/ct2/results' def do_harvest(self, start_date, end_date): end_date = end_date.date() start_date = start_date.date() return self.fetch_records(furl(self.url).set(query_params={ 'displayxml': 'true', 'lup_s': start_date.strftime('%m/%d/%Y'), 'lup_e': end_date.strftime('%m/%d/%Y') }).url) def fetch_records(self, url): resp = self.requests.get(url) resp_xml = etree.XML(resp.content) num_records = int(resp_xml.xpath('//search_results/@count')[0]) if num_records > 0: # create a new URL to request all results url = furl(url).add(query_params={ 'count': num_records }).url all_records_resp = self.requests.get(url) all_records_doc = etree.XML(all_records_resp.content) # retrieve the URLs for each document to make requests for their full content record_urls = [ furl(record.xpath('url/node()')[0]).set(query_params={ 'displayxml': 'true' }).url for record in all_records_doc.xpath('//clinical_study') ] logger.info("There are {} record urls to harvest - this may take a while...".format(len(record_urls))) for url in record_urls: try: record_resp = self.requests.get(url) except self.requests.exceptions.ConnectionError as e: logger.warning('Connection error: {}, wait a bit...'.format(e)) time.sleep(30) record_resp = self.requests.get(url) doc = etree.XML(record_resp.content) record = etree.tostring(doc) doc_id = doc.xpath('//nct_id/node()')[0] yield (doc_id, record)
apache-2.0
2,951,035,884,026,858,500
33.262295
114
0.548804
false
3.87037
false
false
false
peterheim1/robbie
bin/tf_head_tracker.py
1
13024
#!/usr/bin/env python """ tf_head_tracker.py - Version 1.0 2011-08-01 Move the head to track a PointStamped target on the /target_point topic. Created for the Pi Robot Project: http://www.pirobot.org Copyright (c) 2011 Patrick Goebel. All rights reserved. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details at: http://www.gnu.org/licenses/gpl.html """ import roslib; roslib.load_manifest('robbie') import rospy import tf from std_msgs.msg import Float64 from dynamixel_controllers.srv import * from geometry_msgs.msg import PointStamped, Point from sensor_msgs.msg import JointState, RegionOfInterest, CameraInfo from math import radians, sqrt import sys """ A speed of exactly 0 has a special meaning for Dynamixel servos--namely, "move as fast as you can". This can have some very undesirable consequences since it is the complete opposite of what 0 normally means. So we define a very small speed value to represent zero speed. """ ZERO_SPEED = 0.0001 class tfTracker(): def __init__(self): rospy.init_node('tf_head_tracker') rospy.on_shutdown(self.shutdown) """ How fast should we update the servos? """ self.rate = rospy.get_param('~rate', 10) r = rospy.Rate(self.rate) """ Joint speeds are given in radians per second """ self.default_joint_speed = rospy.get_param('~default_joint_speed', 0.3) self.max_joint_speed = rospy.get_param('~max_joint_speed', 0.5) """ How far ahead or behind the target (in radians) should we aim for? """ self.lead_target_angle = rospy.get_param('~lead_target_angle', 0.5) """ How long (in seconds) should we permit the target to be lost before re-centering the servos? """ self.target_timeout = 3.0 self.target_lost = False self.servos_centered = False """ Remap these in the launch file or command line if necessary """ self.camera_link = 'head_cam_link' self.head_pan_joint = 'head_pan_joint' self.head_tilt_joint = 'head_tilt_joint' self.head_pan_link = 'head_pan_link' self.head_tilt_link = 'head_tilt_link' self.dynamixels = rospy.get_param('dynamixels', '') """ The pan/tilt thresholds indicate how far (in meters) the ROI needs to be off-center before we make a movement. """ self.pan_threshold = int(rospy.get_param('~pan_threshold', 0.01)) self.tilt_threshold = int(rospy.get_param('~tilt_threshold', 0.01)) """ The k_pan and k_tilt parameter determine how responsive the servo movements are. If these are set too high, oscillation can result. """ self.k_pan = rospy.get_param('~k_pan', 1.5) self.k_tilt = rospy.get_param('~k_tilt', 1.5) """ Set limits on how far we can pan or tilt """ self.max_pan = rospy.get_param('~max_pan', radians(145)) self.min_pan = rospy.get_param('~min_pan', radians(-145)) self.max_tilt = rospy.get_param('~max_tilt', radians(90)) self.min_tilt = rospy.get_param('~min_tilt', radians(-90)) self.servo_speed = dict() self.servo_position = dict() self.torque_enable = dict() """ Connect to the set_speed and torque_enable services for each servo. Also define a position publisher for each servo. """ for name in sorted(self.dynamixels): try: controller = name # The set_speed services set_speed_service = '/' + controller + '/set_speed' rospy.wait_for_service(set_speed_service) self.servo_speed[name] = rospy.ServiceProxy(set_speed_service, SetSpeed, persistent=True) # Initialize the servo speed to the default_joint_speed self.servo_speed[name](self.default_joint_speed) # Torque enable/disable control for each servo torque_service = '/' + controller + '/torque_enable' rospy.wait_for_service(torque_service) self.torque_enable[name] = rospy.ServiceProxy(torque_service, TorqueEnable) # Start each servo in the disabled state so we can move them by hand self.torque_enable[name](False) # The position controllers self.servo_position[name] = rospy.Publisher('/' + controller + '/command', Float64) except: rospy.loginfo("Can't contact servo services!") rospy.loginfo("TF Tracker node started. Centering servos...") self.pan_position = 0 self.tilt_position = 0 self.pan_speed = ZERO_SPEED self.tilt_speed = ZERO_SPEED self.last_tilt_speed = 0 self.last_pan_speed = 0 """ Use a counter to detect when we have lost the target """ self.tracking_seq = 0 self.last_tracking_seq = -1 self.target_lost_count = 0 self.max_target_lost_count = self.rate * 5 """ Center the pan and tilt servos at the start. """ self.center_head_servos() """ Initialize tf listener """ self.tf = tf.TransformListener() """ Make sure we can see the camera and head pan links """ self.tf.waitForTransform(self.camera_link, self.head_pan_link, rospy.Time(), rospy.Duration(5.0)) """ Wait also for the joint_states topic so we can track our own joint positions """ rospy.wait_for_message('joint_states', JointState) self.joint_state = JointState() rospy.Subscriber('joint_states', JointState, self.update_joint_state) """ Subscribe to the target point topic """ #rospy.Subscriber('target_point', PointStamped, self.update_head_position) rospy.Subscriber('roi', RegionOfInterest, self.update_head_position) rospy.Subscriber('head_cam/rgb/camera_info', CameraInfo, self.getCameraInfo) while not rospy.is_shutdown(): if self.last_tracking_seq == self.tracking_seq: self.pan_speed = ZERO_SPEED self.tilt_speed = ZERO_SPEED self.target_lost_count += 1 else: self.last_tracking_seq = self.tracking_seq self.target_lost_count = 0 if self.target_lost_count > self.max_target_lost_count: self.center_head_servos() else: try: """ Only update the pan speed if it differs from the last value """ if self.last_pan_speed != self.pan_speed: self.servo_speed[self.head_pan_joint](self.pan_speed) self.last_pan_speed = self.pan_speed self.servo_position[self.head_pan_joint].publish(self.pan_position) except: """ If we run into any exceptions, mometarily stop the head movement by setting the target pan position to the current position. """ try: current_pan_position = self.joint_state.position[self.joint_state.name.index(self.head_pan_joint)] self.servo_position[self.head_pan_joint].publish(current_pan_position) rospy.loginfo("Servo SetSpeed Exception!") rospy.loginfo(sys.exc_info()) except: pass try: """ Only update the tilt speed if it differs from the last value """ if self.last_tilt_speed != self.tilt_speed: self.servo_speed[self.head_tilt_joint](self.tilt_speed) self.last_tilt_speed = self.tilt_speed self.servo_position[self.head_tilt_joint].publish(self.tilt_position) except: """ If we run into any exceptions, mometarily stop the head movement by setting the target tilt position to the current position. """ try: current_tilt_position = self.joint_state.position[self.joint_state.name.index(self.head_tilt_joint)] self.servo_position[self.head_tilt_joint].publish(current_tilt_position) rospy.loginfo("Servo SetSpeed Exception!") rospy.loginfo(sys.exc_info()) except: pass r.sleep() def center_head_servos(self): try: self.servo_speed[self.head_pan_joint](self.default_joint_speed) self.servo_speed[self.head_tilt_joint](self.default_joint_speed) for i in range(3): self.servo_position[self.head_pan_joint].publish(0) self.servo_position[self.head_tilt_joint].publish(0) rospy.sleep(1) except: pass def update_joint_state(self, msg): self.joint_state = msg def update_head_position(self, msg): """ We increment a tracking counter upon receiving a target message so we can use the counter to determine when we have lost the target. """ self.tracking_seq += 1 """ Check to see if we have lost the ROI. """ if msg.width == 0 or msg.height == 0 or msg.width > self.image_width / 2 or \ msg.height > self.image_height / 2: self.center_head_servos() return# mod up to here """ Compute the center of the ROI """ COG_x = msg.x_offset + msg.width / 2 - self.image_width / 2 COG_y = msg.y_offset + msg.height / 2 - self.image_height / 2 """ Pan the camera only if the displacement of the target point exceeds the threshold """ if abs(COG_x) > self.pan_threshold: """ Set the pan speed proportion to the horizontal displacement of the target """ #self.pan_speed = trunc(min(self.max_joint_speed, max(ZERO_SPEED, self.k_pan * abs(COG_x))), 2) """ Set the target position ahead or behind the current position """ try: current_pan = self.joint_state.position[self.joint_state.name.index(self.head_pan_joint)] except: return if COG_x > 0: self.pan_position = max(self.min_pan, current_pan - self.lead_target_angle) else: self.pan_position = min(self.max_pan, current_pan + self.lead_target_angle) else: self.pan_speed = ZERO_SPEED """ Tilt the camera only if the displacement of the target point exceeds the threshold """ if abs(COG_y) > self.tilt_threshold: """ Set the pan speed proportion to the vertical displacement of the target """ #self.tilt_speed = trunc(min(self.max_joint_speed, max(ZERO_SPEED, self.k_tilt * abs(COG_y))), 2) """ Set the target position ahead or behind the current position """ try: current_tilt = self.joint_state.position[self.joint_state.name.index(self.head_tilt_joint)] except: return if COG_y < 0: self.tilt_position = max(self.min_tilt, current_tilt - self.lead_target_angle) else: self.tilt_position = min(self.max_tilt, current_tilt + self.lead_target_angle) else: self.tilt_speed = ZERO_SPEED def getCameraInfo(self, msg): self.image_width = msg.width self.image_height = msg.height def shutdown(self): rospy.loginfo("Shutting down frame tracking node...") self.center_head_servos() # Relax all servos to give them a rest. for servo in self.dynamixels: self.torque_enable[servo](False) def trunc(f, n): '''Truncates/pads a float f to n decimal places without rounding''' slen = len('%.*f' % (n, f)) return float(str(f)[:slen]) if __name__ == '__main__': try: tracker = tfTracker() rospy.spin() except rospy.ROSInterruptException: rospy.loginfo("TF tracking node is shut down.")
gpl-3.0
2,277,179,604,030,267,000
43.29932
124
0.575937
false
4.104633
false
false
false
naturalis/imgpheno
docs/conf.py
1
10568
# -*- coding: utf-8 -*- # # imgpheno documentation build configuration file, created by # sphinx-quickstart on Thu Sep 11 15:25:40 2014. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os try: import sphinx_rtd_theme SPHINX_RTD_THEME = True except: SPHINX_RTD_THEME = False # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.viewcode', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'imgpheno' copyright = u'2014, Naturalis Biodiversity Center' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '0.1' # The full version, including alpha/beta/rc tags. release = '0.1.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. if SPHINX_RTD_THEME: html_theme = 'sphinx_rtd_theme' else: html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. if SPHINX_RTD_THEME: html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] else: html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'imgphenodoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'imgpheno.tex', u'imgpheno Documentation', u'Naturalis Biodiversity Center', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'imgpheno', u'imgpheno Documentation', [u'Naturalis Biodiversity Center'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'imgpheno', u'imgpheno Documentation', u'Author', 'imgpheno', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False # -- Options for Epub output ---------------------------------------------- # Bibliographic Dublin Core info. epub_title = u'imgpheno' epub_author = u'Author' epub_publisher = u'Author' epub_copyright = u'2014, Naturalis Biodiversity Center' # The basename for the epub file. It defaults to the project name. #epub_basename = u'imgpheno' # The HTML theme for the epub output. Since the default themes are not optimized # for small screen space, using the same theme for HTML and epub output is # usually not wise. This defaults to 'epub', a theme designed to save visual # space. #epub_theme = 'epub' # The language of the text. It defaults to the language option # or en if the language is not set. #epub_language = '' # The scheme of the identifier. Typical schemes are ISBN or URL. #epub_scheme = '' # The unique identifier of the text. This can be a ISBN number # or the project homepage. #epub_identifier = '' # A unique identification for the text. #epub_uid = '' # A tuple containing the cover image and cover page html template filenames. #epub_cover = () # A sequence of (type, uri, title) tuples for the guide element of content.opf. #epub_guide = () # HTML files that should be inserted before the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_pre_files = [] # HTML files shat should be inserted after the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_post_files = [] # A list of files that should not be packed into the epub file. epub_exclude_files = ['search.html'] # The depth of the table of contents in toc.ncx. #epub_tocdepth = 3 # Allow duplicate toc entries. #epub_tocdup = True # Choose between 'default' and 'includehidden'. #epub_tocscope = 'default' # Fix unsupported image types using the PIL. #epub_fix_images = False # Scale large images. #epub_max_image_width = 0 # How to display URL addresses: 'footnote', 'no', or 'inline'. #epub_show_urls = 'inline' # If false, no index is generated. #epub_use_index = True
mit
-8,311,749,641,249,516,000
29.810496
80
0.707702
false
3.629121
true
false
false
kwameboame/newsdex
news/utils/twitter_utils.py
1
5325
# coding=utf-8 import json import logging import time from datetime import datetime, timedelta from django.utils import timezone from tweepy import StreamListener, OAuthHandler, Stream, API from news.models import TwitterUser, Tweet, TwitterAPISetting from news.models.twitter import FilterKeyword, FilterLocation, TwitterStream from news.utils.common import chunks __author__ = 'ilov3' logger = logging.getLogger(__name__) def authenticate(api_settings=None): if not api_settings: try: api_settings = TwitterAPISetting.objects.get() except TwitterAPISetting.MultipleObjectsReturned: logger.error('You have more than one twitter API settings! Go to admin page, and fix the problem.') raise Exception() except TwitterAPISetting.DoesNotExist: logger.error('You haven\'t got any twitter API settings! Go to admin page, and add one.') raise Exception() auth = OAuthHandler(api_settings.consumer_key, api_settings.consumer_secret) auth.set_access_token(api_settings.access_token, api_settings.access_token_secret) return auth class SaveListener(StreamListener): def __init__(self, stream, api=None): self.stream = stream super().__init__(api) def save_tweet(self, tweet): dt_format = '%a %b %d %X %z %Y' data = { 'text': tweet['text'], 'created_time': datetime.strptime(tweet['created_at'], dt_format).strftime('%Y-%m-%d %H:%M:%S'), 'tweet_id': tweet['id_str'], 'coordinates': tweet.get('coordinates', None), 'retweet_count': tweet.get('retweet_count', None), 'user': TwitterUser.objects.get(user_id=tweet['user']['id_str']), 'stream': self.stream, } Tweet.objects.get_or_create(tweet_id=data['tweet_id'], defaults=data) @staticmethod def save_twitter_user(user): data = { 'user_id': user['id_str'], 'name': user['name'], 'location': user.get('location'), 'description': user.get('description'), } TwitterUser.objects.get_or_create(user_id=data['user_id'], defaults=data) @staticmethod def is_retweet(tweet): if 'retweeted_status' in tweet: logger.debug('Retweet found: %s' % tweet['text']) return True return False def process_retweet(self, retweet): logger.debug('Getting original tweet from retweet') original_tweet = retweet['retweeted_status'] self.save_twitter_user(original_tweet['user']) self.save_tweet(original_tweet) def on_data(self, data): try: tweet = json.loads(data) logger.debug('%s %s:%s' % (tweet['created_at'], tweet['user']['name'], tweet['text'])) if not self.is_retweet(tweet): self.save_twitter_user(tweet['user']) self.save_tweet(tweet) else: self.process_retweet(tweet) return True except Exception as e: logger.error(e) time.sleep(2) def on_error(self, status): logger.error('Error: status code %s' % status) def subscribe_on_stream(task_id, api_settings=None, keyword=None, location=None): logger.debug('Starting parse twitter stream on keyword/location: "%s"' % (keyword or location)) assert not (keyword and location), logger.error('Error: can\'t fetch by keyword and location in the same time!') assert keyword or location, logger.error('Nor keyword or location param is given') auth = authenticate(api_settings) if keyword: filter_keyword, created = FilterKeyword.objects.get_or_create(keyword=keyword) stream_obj = TwitterStream.objects.create(filter_keyword=filter_keyword, celery_task_id=task_id) l = SaveListener(stream=stream_obj) stream = Stream(auth, l) stream.filter(track=[keyword]) if location: filter_location, created = FilterLocation.objects.get_or_create(west_limit=location[0], south_limit=location[1], east_limit=location[2], north_limit=location[3]) stream_obj = TwitterStream.objects.create(filter_location=filter_location, celery_task_id=task_id) l = SaveListener(stream=stream_obj) stream = Stream(auth, l) stream.filter(locations=location) def count_retweets(): auth = authenticate() api = API(auth) week_ago = timezone.now().replace() - timedelta(days=7) tweets_ids = Tweet.objects.filter(created_time__gt=week_ago).values_list('tweet_id', flat=True) logger.debug('Count retweets for %s tweets from %s' % (tweets_ids.count(), week_ago)) try: for chunk in chunks(tweets_ids, 100): for tweet in api.statuses_lookup(chunk): try: tweet_obj = Tweet.objects.get(tweet_id=tweet.id_str) logger.debug('Tweet %s::before - %s retweets, after - %s retweets' % (tweet_obj.tweet_id, tweet_obj.retweet_count, tweet.retweet_count)) tweet_obj.retweet_count = tweet.retweet_count tweet_obj.save() except Exception as e: logger.error(e) except Exception as e: logger.error(e) logger.debug('Finish count retweets!')
bsd-2-clause
-7,017,367,773,884,610,000
40.601563
169
0.627793
false
3.836455
false
false
false
rangermeier/flaskberry
flaskberry/models/disk.py
1
3422
# -*- coding: utf-8 -*- import subprocess import re import os import psutil MOUNTPOINTS = ["/home/media/disks/usb%s" % i for i in range(8)] class Disk(dict): def __init__(self, **args): self.mounted = False if args.has_key("uuid"): self.uuid = args["uuid"] if self.uuid_exists(): self.get_device() self.get_id() if args.has_key("dev"): self.dev = args["dev"] self.get_id() if args.has_key("partition"): self.set_partition_info(args["partition"]) self.get_id(); def set_partition_info(self, info): self.dev = info.device self.mountpoint = info.mountpoint self.type = info.fstype self.options = info.opts self.mounted = True def get_usage(self): if not self.is_mounted(): return self.usage = psutil.disk_usage(self.mountpoint) def get_id(self): blkid = subprocess.check_output(["sudo", "blkid", "-p", self.dev]) #/dev/sdb1: LABEL="Kingston" UUID="1C86-3319" VERSION="FAT32" TYPE="vfat" fields = ["label", "uuid", "version", "type"] for field in fields: regexp = '%s="(.+?)"' % field.upper() parts = re.search(regexp, blkid) if parts: self[field] = parts.groups()[0] def get_device(self): if not self.has_key("dev"): self.dev = subprocess.check_output(["sudo", "blkid", "-U", self.uuid]).rstrip() return self.dev def is_mounted(self): if not self.has_key("mounted"): df = subprocess.check_output(["df", "-hT", self.dev]).splitlines()[1] if re.search("/dev$", df): self.mounted = False else: self.mounted = True return self.mounted def is_mountable(self): mountable = False; if self.has_key("uuid") and self.has_key("type"): if not self["type"].startswith("crypto_"): if self["type"] != "swap": mountable = True return mountable def uuid_exists(self): return os.path.exists("/dev/disk/by-uuid/%s" % self.uuid) def find_mountpoint(self): # look for fstab entries with open("/etc/fstab") as fstab: regexp = re.compile("UUID=%s\s+?(/.*?)\s" % self.uuid) for line in fstab.readlines(): match = regexp.match(line) if match: return match.groups()[0] # try empty media directories mi = iter(MOUNTPOINTS) mountpoint = mi.next() while os.path.exists(mountpoint) and not os.listdir(mountpoint) == []: try: mountpoint.next() except StopIteration: return if not os.path.exists(mountpoint): return None return mountpoint def mount(self): mountpoint = self.find_mountpoint() if mountpoint and not self.is_mounted() and self.uuid_exists(): subprocess.call(["sudo", "/bin/mount", "/dev/disk/by-uuid/%s" % self.uuid, mountpoint]) self.mounted = True return True return False def unmount(self): if self.uuid_exists(): return subprocess.call(["sudo", "/bin/umount", "/dev/disk/by-uuid/%s" % self.uuid])
mit
7,954,179,522,211,220,000
32.54902
95
0.527177
false
3.884222
false
false
false
all-of-us/raw-data-repository
rdr_service/lib_fhir/fhirclient_1_0_6/models/imagingobjectselection.py
1
9601
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Generated from FHIR 1.0.2.7202 (http://hl7.org/fhir/StructureDefinition/ImagingObjectSelection) on 2016-06-23. # 2016, SMART Health IT. from . import domainresource class ImagingObjectSelection(domainresource.DomainResource): """ Key Object Selection. A manifest of a set of DICOM Service-Object Pair Instances (SOP Instances). The referenced SOP Instances (images or other content) are for a single patient, and may be from one or more studies. The referenced SOP Instances have been selected for a purpose, such as quality assurance, conference, or consult. Reflecting that range of purposes, typical ImagingObjectSelection resources may include all SOP Instances in a study (perhaps for sharing through a Health Information Exchange); key images from multiple studies (for reference by a referring or treating physician); a multi-frame ultrasound instance ("cine" video clip) and a set of measurements taken from that instance (for inclusion in a teaching file); and so on. """ resource_name = "ImagingObjectSelection" def __init__(self, jsondict=None, strict=True): """ Initialize all valid properties. :raises: FHIRValidationError on validation errors, unless strict is False :param dict jsondict: A JSON dictionary to use for initialization :param bool strict: If True (the default), invalid variables will raise a TypeError """ self.author = None """ Author (human or machine). Type `FHIRReference` referencing `Practitioner, Device, Organization, Patient, RelatedPerson` (represented as `dict` in JSON). """ self.authoringTime = None """ Authoring time of the selection. Type `FHIRDate` (represented as `str` in JSON). """ self.description = None """ Description text. Type `str`. """ self.patient = None """ Patient of the selected objects. Type `FHIRReference` referencing `Patient` (represented as `dict` in JSON). """ self.study = None """ Study identity of the selected instances. List of `ImagingObjectSelectionStudy` items (represented as `dict` in JSON). """ self.title = None """ Reason for selection. Type `CodeableConcept` (represented as `dict` in JSON). """ self.uid = None """ Instance UID. Type `str`. """ super(ImagingObjectSelection, self).__init__(jsondict=jsondict, strict=strict) def elementProperties(self): js = super(ImagingObjectSelection, self).elementProperties() js.extend([ ("author", "author", fhirreference.FHIRReference, False, None, False), ("authoringTime", "authoringTime", fhirdate.FHIRDate, False, None, False), ("description", "description", str, False, None, False), ("patient", "patient", fhirreference.FHIRReference, False, None, True), ("study", "study", ImagingObjectSelectionStudy, True, None, True), ("title", "title", codeableconcept.CodeableConcept, False, None, True), ("uid", "uid", str, False, None, True), ]) return js from . import backboneelement class ImagingObjectSelectionStudy(backboneelement.BackboneElement): """ Study identity of the selected instances. Study identity and locating information of the DICOM SOP instances in the selection. """ resource_name = "ImagingObjectSelectionStudy" def __init__(self, jsondict=None, strict=True): """ Initialize all valid properties. :raises: FHIRValidationError on validation errors, unless strict is False :param dict jsondict: A JSON dictionary to use for initialization :param bool strict: If True (the default), invalid variables will raise a TypeError """ self.imagingStudy = None """ Reference to ImagingStudy. Type `FHIRReference` referencing `ImagingStudy` (represented as `dict` in JSON). """ self.series = None """ Series identity of the selected instances. List of `ImagingObjectSelectionStudySeries` items (represented as `dict` in JSON). """ self.uid = None """ Study instance UID. Type `str`. """ self.url = None """ Retrieve study URL. Type `str`. """ super(ImagingObjectSelectionStudy, self).__init__(jsondict=jsondict, strict=strict) def elementProperties(self): js = super(ImagingObjectSelectionStudy, self).elementProperties() js.extend([ ("imagingStudy", "imagingStudy", fhirreference.FHIRReference, False, None, False), ("series", "series", ImagingObjectSelectionStudySeries, True, None, True), ("uid", "uid", str, False, None, True), ("url", "url", str, False, None, False), ]) return js class ImagingObjectSelectionStudySeries(backboneelement.BackboneElement): """ Series identity of the selected instances. Series identity and locating information of the DICOM SOP instances in the selection. """ resource_name = "ImagingObjectSelectionStudySeries" def __init__(self, jsondict=None, strict=True): """ Initialize all valid properties. :raises: FHIRValidationError on validation errors, unless strict is False :param dict jsondict: A JSON dictionary to use for initialization :param bool strict: If True (the default), invalid variables will raise a TypeError """ self.instance = None """ The selected instance. List of `ImagingObjectSelectionStudySeriesInstance` items (represented as `dict` in JSON). """ self.uid = None """ Series instance UID. Type `str`. """ self.url = None """ Retrieve series URL. Type `str`. """ super(ImagingObjectSelectionStudySeries, self).__init__(jsondict=jsondict, strict=strict) def elementProperties(self): js = super(ImagingObjectSelectionStudySeries, self).elementProperties() js.extend([ ("instance", "instance", ImagingObjectSelectionStudySeriesInstance, True, None, True), ("uid", "uid", str, False, None, False), ("url", "url", str, False, None, False), ]) return js class ImagingObjectSelectionStudySeriesInstance(backboneelement.BackboneElement): """ The selected instance. Identity and locating information of the selected DICOM SOP instances. """ resource_name = "ImagingObjectSelectionStudySeriesInstance" def __init__(self, jsondict=None, strict=True): """ Initialize all valid properties. :raises: FHIRValidationError on validation errors, unless strict is False :param dict jsondict: A JSON dictionary to use for initialization :param bool strict: If True (the default), invalid variables will raise a TypeError """ self.frames = None """ The frame set. List of `ImagingObjectSelectionStudySeriesInstanceFrames` items (represented as `dict` in JSON). """ self.sopClass = None """ SOP class UID of instance. Type `str`. """ self.uid = None """ Selected instance UID. Type `str`. """ self.url = None """ Retrieve instance URL. Type `str`. """ super(ImagingObjectSelectionStudySeriesInstance, self).__init__(jsondict=jsondict, strict=strict) def elementProperties(self): js = super(ImagingObjectSelectionStudySeriesInstance, self).elementProperties() js.extend([ ("frames", "frames", ImagingObjectSelectionStudySeriesInstanceFrames, True, None, False), ("sopClass", "sopClass", str, False, None, True), ("uid", "uid", str, False, None, True), ("url", "url", str, False, None, True), ]) return js class ImagingObjectSelectionStudySeriesInstanceFrames(backboneelement.BackboneElement): """ The frame set. Identity and location information of the frames in the selected instance. """ resource_name = "ImagingObjectSelectionStudySeriesInstanceFrames" def __init__(self, jsondict=None, strict=True): """ Initialize all valid properties. :raises: FHIRValidationError on validation errors, unless strict is False :param dict jsondict: A JSON dictionary to use for initialization :param bool strict: If True (the default), invalid variables will raise a TypeError """ self.frameNumbers = None """ Frame numbers. List of `int` items. """ self.url = None """ Retrieve frame URL. Type `str`. """ super(ImagingObjectSelectionStudySeriesInstanceFrames, self).__init__(jsondict=jsondict, strict=strict) def elementProperties(self): js = super(ImagingObjectSelectionStudySeriesInstanceFrames, self).elementProperties() js.extend([ ("frameNumbers", "frameNumbers", int, True, None, True), ("url", "url", str, False, None, True), ]) return js from . import codeableconcept from . import fhirdate from . import fhirreference
bsd-3-clause
9,194,539,053,608,248,000
37.404
138
0.629726
false
4.418316
false
false
false
DhrubajyotiDas/PyAbel
examples/example_all_Ominus.py
1
3594
# -*- coding: utf-8 -*- # This example compares the available inverse Abel transform methods # for the Ominus sample image # # Note it transforms only the Q0 (top-right) quadrant # using the fundamental transform code from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import numpy as np import abel import collections import matplotlib.pylab as plt from time import time # inverse Abel transform methods ----------------------------- # dictionary of method: function() transforms = { "direct": abel.direct.direct_transform, "onion": abel.dasch.onion_peeling_transform, "hansenlaw": abel.hansenlaw.hansenlaw_transform, "basex": abel.basex.basex_transform, "three_point": abel.dasch.three_point_transform, } # sort dictionary transforms = collections.OrderedDict(sorted(transforms.items())) ntrans = np.size(transforms.keys()) # number of transforms # Image: O2- VMI 1024x1024 pixel ------------------ IM = abel.tools.analytical.sample_image(n=501, name="Ominus") h, w = IM.shape # forward transform (whole image) fIM = abel.Transform(IM, direction="forward", method="hansenlaw").transform Q0, Q1, Q2, Q3 = abel.tools.symmetry.get_image_quadrants(fIM, reorient=True) Q0fresh = Q0.copy() # keep clean copy print ("quadrant shape {}".format(Q0.shape)) # process Q0 quadrant using each method -------------------- iabelQ = [] # keep inverse Abel transformed image for q, method in enumerate(transforms.keys()): Q0 = Q0fresh.copy() # top-right quadrant of O2- image print ("\n------- {:s} inverse ...".format(method)) t0 = time() # inverse Abel transform using 'method' IAQ0 = transforms[method](Q0, direction="inverse", basis_dir='bases') print (" {:.1f} sec".format(time()-t0)) iabelQ.append(IAQ0) # store for plot # polar projection and speed profile radial, speed = abel.tools.vmi.angular_integration(IAQ0, origin=(0, 0)) # normalize image intensity and speed distribution IAQ0 /= IAQ0.max() speed /= speed.max() # plots #121 whole image, #122 speed distributions plt.subplot(121) # method label for each quadrant annot_angle = -(45+q*90)*np.pi/180 # -ve because numpy coords from top if q > 3: annot_angle += 50*np.pi/180 # shared quadrant - move the label annot_coord = (h/2+(h*0.9)*np.cos(annot_angle)/2, w/2+(w*0.9)*np.sin(annot_angle)/2) plt.annotate(method, annot_coord, color="yellow") # plot speed distribution plt.subplot(122) plt.plot(radial, speed, label=method) # reassemble image, each quadrant a different method # for < 4 images pad using a blank quadrant blank = np.zeros(IAQ0.shape) for q in range(ntrans, 4): iabelQ.append(blank) # more than 4, split quadrant if ntrans == 5: # split last quadrant into 2 = upper and lower triangles tmp_img = np.tril(np.flipud(iabelQ[-2])) +\ np.triu(np.flipud(iabelQ[-1])) iabelQ[3] = np.flipud(tmp_img) # Fix me when > 5 images im = abel.tools.symmetry.put_image_quadrants((iabelQ[0], iabelQ[1], iabelQ[2], iabelQ[3]), original_image_shape=IM.shape) plt.subplot(121) plt.imshow(im, vmin=0, vmax=0.8) plt.subplot(122) plt.title("Ominus sample image") plt.axis(ymin=-0.05, ymax=1.1) plt.legend(loc=0, labelspacing=0.1) plt.tight_layout() plt.savefig('plot_example_all_Ominus.png', dpi=100) plt.show()
mit
-267,479,111,371,456,580
29.457627
76
0.648859
false
3.217547
false
false
false
MalloyPower/parsing-python
front-end/testsuite-python-lib/Python-3.2/Lib/calendar.py
1
22677
"""Calendar printing functions Note when comparing these calendars to the ones printed by cal(1): By default, these calendars have Monday as the first day of the week, and Sunday as the last (the European convention). Use setfirstweekday() to set the first day of the week (0=Monday, 6=Sunday).""" import sys import datetime import locale as _locale __all__ = ["IllegalMonthError", "IllegalWeekdayError", "setfirstweekday", "firstweekday", "isleap", "leapdays", "weekday", "monthrange", "monthcalendar", "prmonth", "month", "prcal", "calendar", "timegm", "month_name", "month_abbr", "day_name", "day_abbr"] # Exception raised for bad input (with string parameter for details) error = ValueError # Exceptions raised for bad input class IllegalMonthError(ValueError): def __init__(self, month): self.month = month def __str__(self): return "bad month number %r; must be 1-12" % self.month class IllegalWeekdayError(ValueError): def __init__(self, weekday): self.weekday = weekday def __str__(self): return "bad weekday number %r; must be 0 (Monday) to 6 (Sunday)" % self.weekday # Constants for months referenced later January = 1 February = 2 # Number of days per month (except for February in leap years) mdays = [0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] # This module used to have hard-coded lists of day and month names, as # English strings. The classes following emulate a read-only version of # that, but supply localized names. Note that the values are computed # fresh on each call, in case the user changes locale between calls. class _localized_month: _months = [datetime.date(2001, i+1, 1).strftime for i in range(12)] _months.insert(0, lambda x: "") def __init__(self, format): self.format = format def __getitem__(self, i): funcs = self._months[i] if isinstance(i, slice): return [f(self.format) for f in funcs] else: return funcs(self.format) def __len__(self): return 13 class _localized_day: # January 1, 2001, was a Monday. _days = [datetime.date(2001, 1, i+1).strftime for i in range(7)] def __init__(self, format): self.format = format def __getitem__(self, i): funcs = self._days[i] if isinstance(i, slice): return [f(self.format) for f in funcs] else: return funcs(self.format) def __len__(self): return 7 # Full and abbreviated names of weekdays day_name = _localized_day('%A') day_abbr = _localized_day('%a') # Full and abbreviated names of months (1-based arrays!!!) month_name = _localized_month('%B') month_abbr = _localized_month('%b') # Constants for weekdays (MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY) = range(7) def isleap(year): """Return True for leap years, False for non-leap years.""" return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0) def leapdays(y1, y2): """Return number of leap years in range [y1, y2). Assume y1 <= y2.""" y1 -= 1 y2 -= 1 return (y2//4 - y1//4) - (y2//100 - y1//100) + (y2//400 - y1//400) def weekday(year, month, day): """Return weekday (0-6 ~ Mon-Sun) for year (1970-...), month (1-12), day (1-31).""" return datetime.date(year, month, day).weekday() def monthrange(year, month): """Return weekday (0-6 ~ Mon-Sun) and number of days (28-31) for year, month.""" if not 1 <= month <= 12: raise IllegalMonthError(month) day1 = weekday(year, month, 1) ndays = mdays[month] + (month == February and isleap(year)) return day1, ndays class Calendar(object): """ Base calendar class. This class doesn't do any formatting. It simply provides data to subclasses. """ def __init__(self, firstweekday=0): self.firstweekday = firstweekday # 0 = Monday, 6 = Sunday def getfirstweekday(self): return self._firstweekday % 7 def setfirstweekday(self, firstweekday): self._firstweekday = firstweekday firstweekday = property(getfirstweekday, setfirstweekday) def iterweekdays(self): """ Return a iterator for one week of weekday numbers starting with the configured first one. """ for i in range(self.firstweekday, self.firstweekday + 7): yield i%7 def itermonthdates(self, year, month): """ Return an iterator for one month. The iterator will yield datetime.date values and will always iterate through complete weeks, so it will yield dates outside the specified month. """ date = datetime.date(year, month, 1) # Go back to the beginning of the week days = (date.weekday() - self.firstweekday) % 7 date -= datetime.timedelta(days=days) oneday = datetime.timedelta(days=1) while True: yield date date += oneday if date.month != month and date.weekday() == self.firstweekday: break def itermonthdays2(self, year, month): """ Like itermonthdates(), but will yield (day number, weekday number) tuples. For days outside the specified month the day number is 0. """ for date in self.itermonthdates(year, month): if date.month != month: yield (0, date.weekday()) else: yield (date.day, date.weekday()) def itermonthdays(self, year, month): """ Like itermonthdates(), but will yield day numbers. For days outside the specified month the day number is 0. """ for date in self.itermonthdates(year, month): if date.month != month: yield 0 else: yield date.day def monthdatescalendar(self, year, month): """ Return a matrix (list of lists) representing a month's calendar. Each row represents a week; week entries are datetime.date values. """ dates = list(self.itermonthdates(year, month)) return [ dates[i:i+7] for i in range(0, len(dates), 7) ] def monthdays2calendar(self, year, month): """ Return a matrix representing a month's calendar. Each row represents a week; week entries are (day number, weekday number) tuples. Day numbers outside this month are zero. """ days = list(self.itermonthdays2(year, month)) return [ days[i:i+7] for i in range(0, len(days), 7) ] def monthdayscalendar(self, year, month): """ Return a matrix representing a month's calendar. Each row represents a week; days outside this month are zero. """ days = list(self.itermonthdays(year, month)) return [ days[i:i+7] for i in range(0, len(days), 7) ] def yeardatescalendar(self, year, width=3): """ Return the data for the specified year ready for formatting. The return value is a list of month rows. Each month row contains upto width months. Each month contains between 4 and 6 weeks and each week contains 1-7 days. Days are datetime.date objects. """ months = [ self.monthdatescalendar(year, i) for i in range(January, January+12) ] return [months[i:i+width] for i in range(0, len(months), width) ] def yeardays2calendar(self, year, width=3): """ Return the data for the specified year ready for formatting (similar to yeardatescalendar()). Entries in the week lists are (day number, weekday number) tuples. Day numbers outside this month are zero. """ months = [ self.monthdays2calendar(year, i) for i in range(January, January+12) ] return [months[i:i+width] for i in range(0, len(months), width) ] def yeardayscalendar(self, year, width=3): """ Return the data for the specified year ready for formatting (similar to yeardatescalendar()). Entries in the week lists are day numbers. Day numbers outside this month are zero. """ months = [ self.monthdayscalendar(year, i) for i in range(January, January+12) ] return [months[i:i+width] for i in range(0, len(months), width) ] class TextCalendar(Calendar): """ Subclass of Calendar that outputs a calendar as a simple plain text similar to the UNIX program cal. """ def prweek(self, theweek, width): """ Print a single week (no newline). """ print(self.formatweek(theweek, width), end=' ') def formatday(self, day, weekday, width): """ Returns a formatted day. """ if day == 0: s = '' else: s = '%2i' % day # right-align single-digit days return s.center(width) def formatweek(self, theweek, width): """ Returns a single week in a string (no newline). """ return ' '.join(self.formatday(d, wd, width) for (d, wd) in theweek) def formatweekday(self, day, width): """ Returns a formatted week day name. """ if width >= 9: names = day_name else: names = day_abbr return names[day][:width].center(width) def formatweekheader(self, width): """ Return a header for a week. """ return ' '.join(self.formatweekday(i, width) for i in self.iterweekdays()) def formatmonthname(self, theyear, themonth, width, withyear=True): """ Return a formatted month name. """ s = month_name[themonth] if withyear: s = "%s %r" % (s, theyear) return s.center(width) def prmonth(self, theyear, themonth, w=0, l=0): """ Print a month's calendar. """ print(self.formatmonth(theyear, themonth, w, l), end=' ') def formatmonth(self, theyear, themonth, w=0, l=0): """ Return a month's calendar string (multi-line). """ w = max(2, w) l = max(1, l) s = self.formatmonthname(theyear, themonth, 7 * (w + 1) - 1) s = s.rstrip() s += '\n' * l s += self.formatweekheader(w).rstrip() s += '\n' * l for week in self.monthdays2calendar(theyear, themonth): s += self.formatweek(week, w).rstrip() s += '\n' * l return s def formatyear(self, theyear, w=2, l=1, c=6, m=3): """ Returns a year's calendar as a multi-line string. """ w = max(2, w) l = max(1, l) c = max(2, c) colwidth = (w + 1) * 7 - 1 v = [] a = v.append a(repr(theyear).center(colwidth*m+c*(m-1)).rstrip()) a('\n'*l) header = self.formatweekheader(w) for (i, row) in enumerate(self.yeardays2calendar(theyear, m)): # months in this row months = range(m*i+1, min(m*(i+1)+1, 13)) a('\n'*l) names = (self.formatmonthname(theyear, k, colwidth, False) for k in months) a(formatstring(names, colwidth, c).rstrip()) a('\n'*l) headers = (header for k in months) a(formatstring(headers, colwidth, c).rstrip()) a('\n'*l) # max number of weeks for this row height = max(len(cal) for cal in row) for j in range(height): weeks = [] for cal in row: if j >= len(cal): weeks.append('') else: weeks.append(self.formatweek(cal[j], w)) a(formatstring(weeks, colwidth, c).rstrip()) a('\n' * l) return ''.join(v) def pryear(self, theyear, w=0, l=0, c=6, m=3): """Print a year's calendar.""" print(self.formatyear(theyear, w, l, c, m)) class HTMLCalendar(Calendar): """ This calendar returns complete HTML pages. """ # CSS classes for the day <td>s cssclasses = ["mon", "tue", "wed", "thu", "fri", "sat", "sun"] def formatday(self, day, weekday): """ Return a day as a table cell. """ if day == 0: return '<td class="noday">&nbsp;</td>' # day outside month else: return '<td class="%s">%d</td>' % (self.cssclasses[weekday], day) def formatweek(self, theweek): """ Return a complete week as a table row. """ s = ''.join(self.formatday(d, wd) for (d, wd) in theweek) return '<tr>%s</tr>' % s def formatweekday(self, day): """ Return a weekday name as a table header. """ return '<th class="%s">%s</th>' % (self.cssclasses[day], day_abbr[day]) def formatweekheader(self): """ Return a header for a week as a table row. """ s = ''.join(self.formatweekday(i) for i in self.iterweekdays()) return '<tr>%s</tr>' % s def formatmonthname(self, theyear, themonth, withyear=True): """ Return a month name as a table row. """ if withyear: s = '%s %s' % (month_name[themonth], theyear) else: s = '%s' % month_name[themonth] return '<tr><th colspan="7" class="month">%s</th></tr>' % s def formatmonth(self, theyear, themonth, withyear=True): """ Return a formatted month as a table. """ v = [] a = v.append a('<table border="0" cellpadding="0" cellspacing="0" class="month">') a('\n') a(self.formatmonthname(theyear, themonth, withyear=withyear)) a('\n') a(self.formatweekheader()) a('\n') for week in self.monthdays2calendar(theyear, themonth): a(self.formatweek(week)) a('\n') a('</table>') a('\n') return ''.join(v) def formatyear(self, theyear, width=3): """ Return a formatted year as a table of tables. """ v = [] a = v.append width = max(width, 1) a('<table border="0" cellpadding="0" cellspacing="0" class="year">') a('\n') a('<tr><th colspan="%d" class="year">%s</th></tr>' % (width, theyear)) for i in range(January, January+12, width): # months in this row months = range(i, min(i+width, 13)) a('<tr>') for m in months: a('<td>') a(self.formatmonth(theyear, m, withyear=False)) a('</td>') a('</tr>') a('</table>') return ''.join(v) def formatyearpage(self, theyear, width=3, css='calendar.css', encoding=None): """ Return a formatted year as a complete HTML page. """ if encoding is None: encoding = sys.getdefaultencoding() v = [] a = v.append a('<?xml version="1.0" encoding="%s"?>\n' % encoding) a('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">\n') a('<html>\n') a('<head>\n') a('<meta http-equiv="Content-Type" content="text/html; charset=%s" />\n' % encoding) if css is not None: a('<link rel="stylesheet" type="text/css" href="%s" />\n' % css) a('<title>Calendar for %d</title>\n' % theyear) a('</head>\n') a('<body>\n') a(self.formatyear(theyear, width)) a('</body>\n') a('</html>\n') return ''.join(v).encode(encoding, "xmlcharrefreplace") class different_locale: def __init__(self, locale): self.locale = locale def __enter__(self): self.oldlocale = _locale.getlocale(_locale.LC_TIME) _locale.setlocale(_locale.LC_TIME, self.locale) def __exit__(self, *args): _locale.setlocale(_locale.LC_TIME, self.oldlocale) class LocaleTextCalendar(TextCalendar): """ This class can be passed a locale name in the constructor and will return month and weekday names in the specified locale. If this locale includes an encoding all strings containing month and weekday names will be returned as unicode. """ def __init__(self, firstweekday=0, locale=None): TextCalendar.__init__(self, firstweekday) if locale is None: locale = _locale.getdefaultlocale() self.locale = locale def formatweekday(self, day, width): with different_locale(self.locale): if width >= 9: names = day_name else: names = day_abbr name = names[day] return name[:width].center(width) def formatmonthname(self, theyear, themonth, width, withyear=True): with different_locale(self.locale): s = month_name[themonth] if withyear: s = "%s %r" % (s, theyear) return s.center(width) class LocaleHTMLCalendar(HTMLCalendar): """ This class can be passed a locale name in the constructor and will return month and weekday names in the specified locale. If this locale includes an encoding all strings containing month and weekday names will be returned as unicode. """ def __init__(self, firstweekday=0, locale=None): HTMLCalendar.__init__(self, firstweekday) if locale is None: locale = _locale.getdefaultlocale() self.locale = locale def formatweekday(self, day): with different_locale(self.locale): s = day_abbr[day] return '<th class="%s">%s</th>' % (self.cssclasses[day], s) def formatmonthname(self, theyear, themonth, withyear=True): with different_locale(self.locale): s = month_name[themonth] if withyear: s = '%s %s' % (s, theyear) return '<tr><th colspan="7" class="month">%s</th></tr>' % s # Support for old module level interface c = TextCalendar() firstweekday = c.getfirstweekday def setfirstweekday(firstweekday): if not MONDAY <= firstweekday <= SUNDAY: raise IllegalWeekdayError(firstweekday) c.firstweekday = firstweekday monthcalendar = c.monthdayscalendar prweek = c.prweek week = c.formatweek weekheader = c.formatweekheader prmonth = c.prmonth month = c.formatmonth calendar = c.formatyear prcal = c.pryear # Spacing of month columns for multi-column year calendar _colwidth = 7*3 - 1 # Amount printed by prweek() _spacing = 6 # Number of spaces between columns def format(cols, colwidth=_colwidth, spacing=_spacing): """Prints multi-column formatting for year calendars""" print(formatstring(cols, colwidth, spacing)) def formatstring(cols, colwidth=_colwidth, spacing=_spacing): """Returns a string formatted from n strings, centered within n columns.""" spacing *= ' ' return spacing.join(c.center(colwidth) for c in cols) EPOCH = 1970 _EPOCH_ORD = datetime.date(EPOCH, 1, 1).toordinal() def timegm(tuple): """Unrelated but handy function to calculate Unix timestamp from GMT.""" year, month, day, hour, minute, second = tuple[:6] days = datetime.date(year, month, 1).toordinal() - _EPOCH_ORD + day - 1 hours = days*24 + hour minutes = hours*60 + minute seconds = minutes*60 + second return seconds def main(args): import optparse parser = optparse.OptionParser(usage="usage: %prog [options] [year [month]]") parser.add_option( "-w", "--width", dest="width", type="int", default=2, help="width of date column (default 2, text only)" ) parser.add_option( "-l", "--lines", dest="lines", type="int", default=1, help="number of lines for each week (default 1, text only)" ) parser.add_option( "-s", "--spacing", dest="spacing", type="int", default=6, help="spacing between months (default 6, text only)" ) parser.add_option( "-m", "--months", dest="months", type="int", default=3, help="months per row (default 3, text only)" ) parser.add_option( "-c", "--css", dest="css", default="calendar.css", help="CSS to use for page (html only)" ) parser.add_option( "-L", "--locale", dest="locale", default=None, help="locale to be used from month and weekday names" ) parser.add_option( "-e", "--encoding", dest="encoding", default=None, help="Encoding to use for output" ) parser.add_option( "-t", "--type", dest="type", default="text", choices=("text", "html"), help="output type (text or html)" ) (options, args) = parser.parse_args(args) if options.locale and not options.encoding: parser.error("if --locale is specified --encoding is required") sys.exit(1) locale = options.locale, options.encoding if options.type == "html": if options.locale: cal = LocaleHTMLCalendar(locale=locale) else: cal = HTMLCalendar() encoding = options.encoding if encoding is None: encoding = sys.getdefaultencoding() optdict = dict(encoding=encoding, css=options.css) if len(args) == 1: print(cal.formatyearpage(datetime.date.today().year, **optdict)) elif len(args) == 2: print(cal.formatyearpage(int(args[1]), **optdict)) else: parser.error("incorrect number of arguments") sys.exit(1) else: if options.locale: cal = LocaleTextCalendar(locale=locale) else: cal = TextCalendar() optdict = dict(w=options.width, l=options.lines) if len(args) != 3: optdict["c"] = options.spacing optdict["m"] = options.months if len(args) == 1: result = cal.formatyear(datetime.date.today().year, **optdict) elif len(args) == 2: result = cal.formatyear(int(args[1]), **optdict) elif len(args) == 3: result = cal.formatmonth(int(args[1]), int(args[2]), **optdict) else: parser.error("incorrect number of arguments") sys.exit(1) if options.encoding: result = result.encode(options.encoding) print(result) if __name__ == "__main__": main(sys.argv)
mit
-7,777,727,036,793,404,000
31.581897
124
0.570402
false
3.78076
false
false
false
tochikuji/pyPyrTools
pyrtools/blurDn.py
1
2593
import numpy from .namedFilter import namedFilter from .corrDn import corrDn def blurDn(*args): ''' RES = blurDn(IM, LEVELS, FILT) Blur and downsample an image. The blurring is done with filter kernel specified by FILT (default = 'binom5'), which can be a string (to be passed to namedFilter), a vector (applied separably as a 1D convolution kernel in X and Y), or a matrix (applied as a 2D convolution kernel). The downsampling is always by 2 in each direction. The procedure is applied recursively LEVELS times (default=1). Eero Simoncelli, 3/97. Ported to python by Rob Young 4/14 function res = blurDn(im, nlevs, filt) ''' if len(args) == 0: print("Error: image input parameter required.") return im = numpy.array(args[0]) # optional args if len(args) > 1: nlevs = args[1] else: nlevs = 1 if len(args) > 2: filt = args[2] if isinstance(filt, str): filt = namedFilter(filt) else: filt = namedFilter('binom5') if filt.shape[0] == 1 or filt.shape[1] == 1: filt = [x / sum(filt) for x in filt] else: filt = [x / sum(sum(filt)) for x in filt] filt = numpy.array(filt) if nlevs > 1: im = blurDn(im, nlevs - 1, filt) if nlevs >= 1: if len(im.shape) == 1 or im.shape[0] == 1 or im.shape[1] == 1: # 1D image if len(filt.shape) > 1 and (filt.shape[1] != 1 and filt.shape[2] != 1): # >1D filter print('Error: Cannot apply 2D filter to 1D signal') return # orient filter and image correctly if im.shape[0] == 1: if len(filt.shape) == 1 or filt.shape[1] == 1: filt = filt.T else: if filt.shape[0] == 1: filt = filt.T res = corrDn(image=im, filt=filt, step=(2, 2)) if len(im.shape) == 1 or im.shape[1] == 1: res = numpy.reshape(res, (numpy.ceil(im.shape[0] / 2.0), 1)) else: res = numpy.reshape(res, (1, numpy.ceil(im.shape[1] / 2.0))) elif len(filt.shape) == 1 or filt.shape[0] == 1 or filt.shape[1] == 1: # 2D image and 1D filter res = corrDn(image=im, filt=filt.T, step=(2, 1)) res = corrDn(image=res, filt=filt, step=(1, 2)) else: # 2D image and 2D filter res = corrDn(image=im, filt=filt, step=(2, 2)) else: res = im return res
mit
7,257,363,821,098,295,000
32.675325
83
0.528731
false
3.320102
false
false
false
D4N/FSM_exercise_class
sheet_3/2d_plot.py
1
2451
#-*- coding: utf-8 -*- from __future__ import division, print_function import numpy as np import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D class simulation_output(object): def __init__(self, filename): self.__filename = str(filename) self.get_data_from_file() def get_data_from_file(self): self.__data_count = sum(1 for line in open(self.__filename)) self.__time = np.zeros((self.__data_count)) self.__energy = np.zeros((self.__data_count)) with open(self.__filename, 'r') as data: first_line = data.readline() tmp = first_line.split(' ') self.__object_count = int((len(tmp) - 2)/6) self.__x = np.zeros((self.__object_count, self.__data_count)) self.__y = np.zeros((self.__object_count, self.__data_count)) self.__z = np.zeros((self.__object_count, self.__data_count)) with open(self.__filename, 'r') as data: j = 0 for line in data: tmp = line.split(' ') self.__time[j] = float(tmp[0]) self.__energy[j] = float(tmp[1]) for i in xrange(self.__object_count): self.__x[i,j] = float(tmp[2+6*i]) self.__y[i,j] = float(tmp[3+6*i]) self.__z[i,j] = float(tmp[4+6*i]) j += 1 def plot_data(self, plot_type = "xy"): if not plot_type in ["xy", "yz", "xz", "xyz", "energy"]: raise ValueError("Possible values for the plot_type are: xy, yz, xz, xyz and energy") self.fig = plt.figure() if plot_type == "xyz": self.ax = self.fig.add_subplot(111, projection='3d') else: self.ax = self.fig.add_subplot(111) if plot_type == "xy": for i in xrange(self.__object_count): self.ax.plot(self.__x[i], self.__y[i]) elif plot_type == "yz": for i in xrange(self.__object_count): self.ax.plot(self.__y[i], self.__z[i]) elif plot_type == "xz": for i in xrange(self.__object_count): self.ax.plot(self.__x[i], self.__z[i]) elif plot_type == "xyz": for i in xrange(self.__object_count): self.ax.plot(self.__x[i], self.__y[i], self.__z[i]) elif plot_type == "energy": self.ax.plot(self.__time, self.__energy) self.ax.set_xlabel(plot_type[0]) self.ax.set_ylabel(plot_type[1]) if plot_type == "xyz": self.ax.set_zlabel("z") elif plot_type == "energy": self.ax.set_xlabel("t") self.ax.set_ylabel(r"$E_{tot}$") if not plot_type == "xyz": plt.grid() plt.show() plt.close() if __name__ == '__main__': import sys S = simulation_output(sys.argv[1]) S.plot_data(sys.argv[2])
gpl-3.0
-4,800,749,468,022,277,000
22.796117
88
0.598939
false
2.64973
false
false
false
twilio/twilio-python
tests/integration/insights/v1/call/test_event.py
1
7025
# coding=utf-8 r""" This code was generated by \ / _ _ _| _ _ | (_)\/(_)(_|\/| |(/_ v1.0.0 / / """ from tests import IntegrationTestCase from tests.holodeck import Request from twilio.base.exceptions import TwilioException from twilio.http.response import Response class EventTestCase(IntegrationTestCase): def test_list_request(self): self.holodeck.mock(Response(500, '')) with self.assertRaises(TwilioException): self.client.insights.v1.calls("CAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \ .events.list() self.holodeck.assert_has_request(Request( 'get', 'https://insights.twilio.com/v1/Voice/CAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Events', )) def test_read_response(self): self.holodeck.mock(Response( 200, ''' { "meta": { "page": 0, "page_size": 50, "first_page_url": "https://insights.twilio.com/v1/Voice/CAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Events?PageSize=50&Page=0", "previous_page_url": null, "next_page_url": null, "key": "events", "url": "https://insights.twilio.com/v1/Voice/CAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Events?PageSize=50&Page=0" }, "events": [ { "timestamp": "2019-09-19T22:15:23Z", "call_sid": "CA03a02b156c6faa96c86906f7e9ad0f38", "account_sid": "AC998c10b68cbfda9f67277f7d8f4439c9", "edge": "sdk_edge", "group": "connection", "name": "error", "level": "ERROR", "sdk_edge": { "error": { "code": 31600 }, "metadata": { "client_name": "GTI9300323095d271b890c91568931321395", "location": { "lat": 37.4192, "lon": -122.0574 }, "city": "Mountain View", "country_code": "US", "country_subdivision": "California", "ip_address": "108.177.7.83", "sdk": { "type": "twilio-voice-android", "version": "4.5.1", "platform": "android", "selected_region": "gll", "os": { "name": "android", "version": "4.3" }, "device": { "model": "GT-I9300", "type": "GT-I9300", "vendor": "samsung", "arch": "armeabi-v7a" } } } }, "client_edge": null, "carrier_edge": null, "sip_edge": null } ] } ''' )) actual = self.client.insights.v1.calls("CAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \ .events.list() self.assertIsNotNone(actual) def test_read_deep_response(self): self.holodeck.mock(Response( 200, ''' { "meta": { "page": 10, "page_size": 5, "first_page_url": "https://insights.twilio.com/v1/Voice/CAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Events?PageSize=5&Page=0", "previous_page_url": "https://insights.twilio.com/v1/Voice/CAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Events?PageSize=5&Page=9&PageToken=DP10", "next_page_url": null, "key": "events", "url": "https://insights.twilio.com/v1/Voice/CAaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Events?PageSize=5&Page=10" }, "events": [ { "timestamp": "2019-09-19T22:15:23Z", "call_sid": "CA03a02b156c6faa96c86906f7e9ad0f38", "account_sid": "AC998c10b68cbfda9f67277f7d8f4439c9", "edge": "sdk_edge", "group": "connection", "name": "error", "level": "ERROR", "sdk_edge": { "error": { "code": 31600 }, "metadata": { "client_name": "GTI9300323095d271b890c91568931321395", "location": { "lat": 37.4192, "lon": -122.0574 }, "city": "Mountain View", "country_code": "US", "country_subdivision": "California", "ip_address": "108.177.7.83", "sdk": { "type": "twilio-voice-android", "version": "4.5.1", "platform": "android", "selected_region": "gll", "os": { "name": "android", "version": "4.3" }, "device": { "model": "GT-I9300", "type": "GT-I9300", "vendor": "samsung", "arch": "armeabi-v7a" } } } }, "client_edge": null, "carrier_edge": null, "sip_edge": null } ] } ''' )) actual = self.client.insights.v1.calls("CAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \ .events.list() self.assertIsNotNone(actual)
mit
-8,465,989,922,263,330,000
41.575758
155
0.34306
false
4.961158
true
false
false
hlzz/dotfiles
graphics/VTK-7.0.0/Wrapping/Python/vtk/wx/wxVTKRenderWindowInteractor.py
1
25228
""" A VTK RenderWindowInteractor widget for wxPython. Find wxPython info at http://wxPython.org Created by Prabhu Ramachandran, April 2002 Based on wxVTKRenderWindow.py Fixes and updates by Charl P. Botha 2003-2008 Updated to new wx namespace and some cleaning up by Andrea Gavana, December 2006 """ """ Please see the example at the end of this file. ---------------------------------------- Creation: wxVTKRenderWindowInteractor(parent, ID, stereo=0, [wx keywords]): You should create a wx.App(False) or some other wx.App subclass before creating the window. Behaviour: Uses __getattr__ to make the wxVTKRenderWindowInteractor behave just like a vtkGenericRenderWindowInteractor. ---------------------------------------- """ # import usual libraries import math, os, sys import wx import vtk # a few configuration items, see what works best on your system # Use GLCanvas as base class instead of wx.Window. # This is sometimes necessary under wxGTK or the image is blank. # (in wxWindows 2.3.1 and earlier, the GLCanvas had scroll bars) baseClass = wx.Window if wx.Platform == "__WXGTK__": import wx.glcanvas baseClass = wx.glcanvas.GLCanvas # Keep capturing mouse after mouse is dragged out of window # (in wxGTK 2.3.2 there is a bug that keeps this from working, # but it is only relevant in wxGTK if there are multiple windows) _useCapture = (wx.Platform == "__WXMSW__") # end of configuration items class EventTimer(wx.Timer): """Simple wx.Timer class. """ def __init__(self, iren): """Default class constructor. @param iren: current render window """ wx.Timer.__init__(self) self.iren = iren def Notify(self): """ The timer has expired. """ self.iren.TimerEvent() class wxVTKRenderWindowInteractor(baseClass): """ A wxRenderWindow for wxPython. Use GetRenderWindow() to get the vtkRenderWindow. Create with the keyword stereo=1 in order to generate a stereo-capable window. """ # class variable that can also be used to request instances that use # stereo; this is overridden by the stereo=1/0 parameter. If you set # it to True, the NEXT instantiated object will attempt to allocate a # stereo visual. E.g.: # wxVTKRenderWindowInteractor.USE_STEREO = True # myRWI = wxVTKRenderWindowInteractor(parent, -1) USE_STEREO = False def __init__(self, parent, ID, *args, **kw): """Default class constructor. @param parent: parent window @param ID: window id @param **kw: wxPython keywords (position, size, style) plus the 'stereo' keyword """ # private attributes self.__RenderWhenDisabled = 0 # First do special handling of some keywords: # stereo, position, size, width, height, style try: stereo = bool(kw['stereo']) del kw['stereo'] except KeyError: stereo = False try: position = kw['position'] del kw['position'] except KeyError: position = wx.DefaultPosition try: size = kw['size'] del kw['size'] except KeyError: try: size = parent.GetSize() except AttributeError: size = wx.DefaultSize # wx.WANTS_CHARS says to give us e.g. TAB # wx.NO_FULL_REPAINT_ON_RESIZE cuts down resize flicker under GTK style = wx.WANTS_CHARS | wx.NO_FULL_REPAINT_ON_RESIZE try: style = style | kw['style'] del kw['style'] except KeyError: pass # the enclosing frame must be shown under GTK or the windows # don't connect together properly if wx.Platform != '__WXMSW__': l = [] p = parent while p: # make a list of all parents l.append(p) p = p.GetParent() l.reverse() # sort list into descending order for p in l: p.Show(1) if baseClass.__name__ == 'GLCanvas': # code added by cpbotha to enable stereo and double # buffering correctly where the user requests this; remember # that the glXContext in this case is NOT allocated by VTK, # but by WX, hence all of this. # Initialize GLCanvas with correct attriblist attribList = [wx.glcanvas.WX_GL_RGBA, wx.glcanvas.WX_GL_MIN_RED, 1, wx.glcanvas.WX_GL_MIN_GREEN, 1, wx.glcanvas.WX_GL_MIN_BLUE, 1, wx.glcanvas.WX_GL_DEPTH_SIZE, 16, wx.glcanvas.WX_GL_DOUBLEBUFFER] if stereo: attribList.append(wx.glcanvas.WX_GL_STEREO) try: baseClass.__init__(self, parent, ID, pos=position, size=size, style=style, attribList=attribList) except wx.PyAssertionError: # visual couldn't be allocated, so we go back to default baseClass.__init__(self, parent, ID, pos=position, size=size, style=style) if stereo: # and make sure everyone knows that the stereo # visual wasn't set. stereo = 0 else: baseClass.__init__(self, parent, ID, pos=position, size=size, style=style) # create the RenderWindow and initialize it self._Iren = vtk.vtkGenericRenderWindowInteractor() self._Iren.SetRenderWindow( vtk.vtkRenderWindow() ) self._Iren.AddObserver('CreateTimerEvent', self.CreateTimer) self._Iren.AddObserver('DestroyTimerEvent', self.DestroyTimer) self._Iren.GetRenderWindow().AddObserver('CursorChangedEvent', self.CursorChangedEvent) try: self._Iren.GetRenderWindow().SetSize(size.width, size.height) except AttributeError: self._Iren.GetRenderWindow().SetSize(size[0], size[1]) if stereo: self._Iren.GetRenderWindow().StereoCapableWindowOn() self._Iren.GetRenderWindow().SetStereoTypeToCrystalEyes() self.__handle = None self.BindEvents() # with this, we can make sure that the reparenting logic in # Render() isn't called before the first OnPaint() has # successfully been run (and set up the VTK/WX display links) self.__has_painted = False # set when we have captured the mouse. self._own_mouse = False # used to store WHICH mouse button led to mouse capture self._mouse_capture_button = 0 # A mapping for cursor changes. self._cursor_map = {0: wx.CURSOR_ARROW, # VTK_CURSOR_DEFAULT 1: wx.CURSOR_ARROW, # VTK_CURSOR_ARROW 2: wx.CURSOR_SIZENESW, # VTK_CURSOR_SIZENE 3: wx.CURSOR_SIZENWSE, # VTK_CURSOR_SIZENWSE 4: wx.CURSOR_SIZENESW, # VTK_CURSOR_SIZESW 5: wx.CURSOR_SIZENWSE, # VTK_CURSOR_SIZESE 6: wx.CURSOR_SIZENS, # VTK_CURSOR_SIZENS 7: wx.CURSOR_SIZEWE, # VTK_CURSOR_SIZEWE 8: wx.CURSOR_SIZING, # VTK_CURSOR_SIZEALL 9: wx.CURSOR_HAND, # VTK_CURSOR_HAND 10: wx.CURSOR_CROSS, # VTK_CURSOR_CROSSHAIR } def BindEvents(self): """Binds all the necessary events for navigation, sizing, drawing. """ # refresh window by doing a Render self.Bind(wx.EVT_PAINT, self.OnPaint) # turn off background erase to reduce flicker self.Bind(wx.EVT_ERASE_BACKGROUND, lambda e: None) # Bind the events to the event converters self.Bind(wx.EVT_RIGHT_DOWN, self.OnButtonDown) self.Bind(wx.EVT_LEFT_DOWN, self.OnButtonDown) self.Bind(wx.EVT_MIDDLE_DOWN, self.OnButtonDown) self.Bind(wx.EVT_RIGHT_UP, self.OnButtonUp) self.Bind(wx.EVT_LEFT_UP, self.OnButtonUp) self.Bind(wx.EVT_MIDDLE_UP, self.OnButtonUp) self.Bind(wx.EVT_MOUSEWHEEL, self.OnMouseWheel) self.Bind(wx.EVT_MOTION, self.OnMotion) self.Bind(wx.EVT_ENTER_WINDOW, self.OnEnter) self.Bind(wx.EVT_LEAVE_WINDOW, self.OnLeave) # If we use EVT_KEY_DOWN instead of EVT_CHAR, capital versions # of all characters are always returned. EVT_CHAR also performs # other necessary keyboard-dependent translations. self.Bind(wx.EVT_CHAR, self.OnKeyDown) self.Bind(wx.EVT_KEY_UP, self.OnKeyUp) self.Bind(wx.EVT_SIZE, self.OnSize) # the wx 2.8.7.1 documentation states that you HAVE to handle # this event if you make use of CaptureMouse, which we do. if _useCapture and hasattr(wx, 'EVT_MOUSE_CAPTURE_LOST'): self.Bind(wx.EVT_MOUSE_CAPTURE_LOST, self.OnMouseCaptureLost) def __getattr__(self, attr): """Makes the object behave like a vtkGenericRenderWindowInteractor. """ if attr == '__vtk__': return lambda t=self._Iren: t elif hasattr(self._Iren, attr): return getattr(self._Iren, attr) else: raise AttributeError(self.__class__.__name__ + " has no attribute named " + attr) def CreateTimer(self, obj, evt): """ Creates a timer. """ self._timer = EventTimer(self) self._timer.Start(10, True) def DestroyTimer(self, obj, evt): """The timer is a one shot timer so will expire automatically. """ return 1 def _CursorChangedEvent(self, obj, evt): """Change the wx cursor if the renderwindow's cursor was changed. """ cur = self._cursor_map[obj.GetCurrentCursor()] c = wx.StockCursor(cur) self.SetCursor(c) def CursorChangedEvent(self, obj, evt): """Called when the CursorChangedEvent fires on the render window.""" # This indirection is needed since when the event fires, the # current cursor is not yet set so we defer this by which time # the current cursor should have been set. wx.CallAfter(self._CursorChangedEvent, obj, evt) def HideCursor(self): """Hides the cursor.""" c = wx.StockCursor(wx.CURSOR_BLANK) self.SetCursor(c) def ShowCursor(self): """Shows the cursor.""" rw = self._Iren.GetRenderWindow() cur = self._cursor_map[rw.GetCurrentCursor()] c = wx.StockCursor(cur) self.SetCursor(c) def GetDisplayId(self): """Function to get X11 Display ID from WX and return it in a format that can be used by VTK Python. We query the X11 Display with a new call that was added in wxPython 2.6.0.1. The call returns a SWIG object which we can query for the address and subsequently turn into an old-style SWIG-mangled string representation to pass to VTK. """ d = None try: d = wx.GetXDisplay() except AttributeError: # wx.GetXDisplay was added by Robin Dunn in wxPython 2.6.0.1 # if it's not available, we can't pass it. In general, # things will still work; on some setups, it'll break. pass else: # wx returns None on platforms where wx.GetXDisplay is not relevant if d: d = hex(d) # On wxPython-2.6.3.2 and above there is no leading '0x'. if not d.startswith('0x'): d = '0x' + d # VTK wants it as: _xxxxxxxx_p_void (SWIG pointer) d = '_%s_%s\0' % (d[2:], 'p_void') return d def OnMouseCaptureLost(self, event): """This is signalled when we lose mouse capture due to an external event, such as when a dialog box is shown. See the wx documentation. """ # the documentation seems to imply that by this time we've # already lost capture. I have to assume that we don't need # to call ReleaseMouse ourselves. if _useCapture and self._own_mouse: self._own_mouse = False def OnPaint(self,event): """Handles the wx.EVT_PAINT event for wxVTKRenderWindowInteractor. """ # wx should continue event processing after this handler. # We call this BEFORE Render(), so that if Render() raises # an exception, wx doesn't re-call OnPaint repeatedly. event.Skip() dc = wx.PaintDC(self) # make sure the RenderWindow is sized correctly self._Iren.GetRenderWindow().SetSize(self.GetSize()) # Tell the RenderWindow to render inside the wx.Window. if not self.__handle: # on relevant platforms, set the X11 Display ID d = self.GetDisplayId() if d and self.__has_painted: self._Iren.GetRenderWindow().SetDisplayId(d) # store the handle self.__handle = self.GetHandle() # and give it to VTK self._Iren.GetRenderWindow().SetWindowInfo(str(self.__handle)) # now that we've painted once, the Render() reparenting logic # is safe self.__has_painted = True self.Render() def OnSize(self,event): """Handles the wx.EVT_SIZE event for wxVTKRenderWindowInteractor. """ # event processing should continue (we call this before the # Render(), in case it raises an exception) event.Skip() try: width, height = event.GetSize() except: width = event.GetSize().width height = event.GetSize().height self._Iren.SetSize(width, height) self._Iren.ConfigureEvent() # this will check for __handle self.Render() def OnMotion(self,event): """Handles the wx.EVT_MOTION event for wxVTKRenderWindowInteractor. """ # event processing should continue # we call this early in case any of the VTK code raises an # exception. event.Skip() self._Iren.SetEventInformationFlipY(event.GetX(), event.GetY(), event.ControlDown(), event.ShiftDown(), chr(0), 0, None) self._Iren.MouseMoveEvent() def OnEnter(self,event): """Handles the wx.EVT_ENTER_WINDOW event for wxVTKRenderWindowInteractor. """ # event processing should continue event.Skip() self._Iren.SetEventInformationFlipY(event.GetX(), event.GetY(), event.ControlDown(), event.ShiftDown(), chr(0), 0, None) self._Iren.EnterEvent() def OnLeave(self,event): """Handles the wx.EVT_LEAVE_WINDOW event for wxVTKRenderWindowInteractor. """ # event processing should continue event.Skip() self._Iren.SetEventInformationFlipY(event.GetX(), event.GetY(), event.ControlDown(), event.ShiftDown(), chr(0), 0, None) self._Iren.LeaveEvent() def OnButtonDown(self,event): """Handles the wx.EVT_LEFT/RIGHT/MIDDLE_DOWN events for wxVTKRenderWindowInteractor. """ # allow wx event processing to continue # on wxPython 2.6.0.1, omitting this will cause problems with # the initial focus, resulting in the wxVTKRWI ignoring keypresses # until we focus elsewhere and then refocus the wxVTKRWI frame # we do it this early in case any of the following VTK code # raises an exception. event.Skip() ctrl, shift = event.ControlDown(), event.ShiftDown() self._Iren.SetEventInformationFlipY(event.GetX(), event.GetY(), ctrl, shift, chr(0), 0, None) button = 0 if event.RightDown(): self._Iren.RightButtonPressEvent() button = 'Right' elif event.LeftDown(): self._Iren.LeftButtonPressEvent() button = 'Left' elif event.MiddleDown(): self._Iren.MiddleButtonPressEvent() button = 'Middle' # save the button and capture mouse until the button is released # we only capture the mouse if it hasn't already been captured if _useCapture and not self._own_mouse: self._own_mouse = True self._mouse_capture_button = button self.CaptureMouse() def OnButtonUp(self,event): """Handles the wx.EVT_LEFT/RIGHT/MIDDLE_UP events for wxVTKRenderWindowInteractor. """ # event processing should continue event.Skip() button = 0 if event.RightUp(): button = 'Right' elif event.LeftUp(): button = 'Left' elif event.MiddleUp(): button = 'Middle' # if the same button is released that captured the mouse, and # we have the mouse, release it. # (we need to get rid of this as soon as possible; if we don't # and one of the event handlers raises an exception, mouse # is never released.) if _useCapture and self._own_mouse and \ button==self._mouse_capture_button: self.ReleaseMouse() self._own_mouse = False ctrl, shift = event.ControlDown(), event.ShiftDown() self._Iren.SetEventInformationFlipY(event.GetX(), event.GetY(), ctrl, shift, chr(0), 0, None) if button == 'Right': self._Iren.RightButtonReleaseEvent() elif button == 'Left': self._Iren.LeftButtonReleaseEvent() elif button == 'Middle': self._Iren.MiddleButtonReleaseEvent() def OnMouseWheel(self,event): """Handles the wx.EVT_MOUSEWHEEL event for wxVTKRenderWindowInteractor. """ # event processing should continue event.Skip() ctrl, shift = event.ControlDown(), event.ShiftDown() self._Iren.SetEventInformationFlipY(event.GetX(), event.GetY(), ctrl, shift, chr(0), 0, None) if event.GetWheelRotation() > 0: self._Iren.MouseWheelForwardEvent() else: self._Iren.MouseWheelBackwardEvent() def OnKeyDown(self,event): """Handles the wx.EVT_KEY_DOWN event for wxVTKRenderWindowInteractor. """ # event processing should continue event.Skip() ctrl, shift = event.ControlDown(), event.ShiftDown() keycode, keysym = event.GetKeyCode(), None key = chr(0) if keycode < 256: key = chr(keycode) # wxPython 2.6.0.1 does not return a valid event.Get{X,Y}() # for this event, so we use the cached position. (x,y)= self._Iren.GetEventPosition() self._Iren.SetEventInformation(x, y, ctrl, shift, key, 0, keysym) self._Iren.KeyPressEvent() self._Iren.CharEvent() def OnKeyUp(self,event): """Handles the wx.EVT_KEY_UP event for wxVTKRenderWindowInteractor. """ # event processing should continue event.Skip() ctrl, shift = event.ControlDown(), event.ShiftDown() keycode, keysym = event.GetKeyCode(), None key = chr(0) if keycode < 256: key = chr(keycode) self._Iren.SetEventInformationFlipY(event.GetX(), event.GetY(), ctrl, shift, key, 0, keysym) self._Iren.KeyReleaseEvent() def GetRenderWindow(self): """Returns the render window (vtkRenderWindow). """ return self._Iren.GetRenderWindow() def Render(self): """Actually renders the VTK scene on screen. """ RenderAllowed = 1 if not self.__RenderWhenDisabled: # the user doesn't want us to render when the toplevel frame # is disabled - first find the top level parent topParent = wx.GetTopLevelParent(self) if topParent: # if it exists, check whether it's enabled # if it's not enabeld, RenderAllowed will be false RenderAllowed = topParent.IsEnabled() if RenderAllowed: if self.__handle and self.__handle == self.GetHandle(): self._Iren.GetRenderWindow().Render() elif self.GetHandle() and self.__has_painted: # this means the user has reparented us; let's adapt to the # new situation by doing the WindowRemap dance self._Iren.GetRenderWindow().SetNextWindowInfo( str(self.GetHandle())) # make sure the DisplayId is also set correctly d = self.GetDisplayId() if d: self._Iren.GetRenderWindow().SetDisplayId(d) # do the actual remap with the new parent information self._Iren.GetRenderWindow().WindowRemap() # store the new situation self.__handle = self.GetHandle() self._Iren.GetRenderWindow().Render() def SetRenderWhenDisabled(self, newValue): """Change value of __RenderWhenDisabled ivar. If __RenderWhenDisabled is false (the default), this widget will not call Render() on the RenderWindow if the top level frame (i.e. the containing frame) has been disabled. This prevents recursive rendering during wx.SafeYield() calls. wx.SafeYield() can be called during the ProgressMethod() callback of a VTK object to have progress bars and other GUI elements updated - it does this by disabling all windows (disallowing user-input to prevent re-entrancy of code) and then handling all outstanding GUI events. However, this often triggers an OnPaint() method for wxVTKRWIs, resulting in a Render(), resulting in Update() being called whilst still in progress. """ self.__RenderWhenDisabled = bool(newValue) #-------------------------------------------------------------------- def wxVTKRenderWindowInteractorConeExample(): """Like it says, just a simple example """ # every wx app needs an app app = wx.App(False) # create the top-level frame, sizer and wxVTKRWI frame = wx.Frame(None, -1, "wxVTKRenderWindowInteractor", size=(400,400)) widget = wxVTKRenderWindowInteractor(frame, -1) sizer = wx.BoxSizer(wx.VERTICAL) sizer.Add(widget, 1, wx.EXPAND) frame.SetSizer(sizer) frame.Layout() # It would be more correct (API-wise) to call widget.Initialize() and # widget.Start() here, but Initialize() calls RenderWindow.Render(). # That Render() call will get through before we can setup the # RenderWindow() to render via the wxWidgets-created context; this # causes flashing on some platforms and downright breaks things on # other platforms. Instead, we call widget.Enable(). This means # that the RWI::Initialized ivar is not set, but in THIS SPECIFIC CASE, # that doesn't matter. widget.Enable(1) widget.AddObserver("ExitEvent", lambda o,e,f=frame: f.Close()) ren = vtk.vtkRenderer() widget.GetRenderWindow().AddRenderer(ren) cone = vtk.vtkConeSource() cone.SetResolution(8) coneMapper = vtk.vtkPolyDataMapper() coneMapper.SetInputConnection(cone.GetOutputPort()) coneActor = vtk.vtkActor() coneActor.SetMapper(coneMapper) ren.AddActor(coneActor) # show the window frame.Show() app.MainLoop() if __name__ == "__main__": wxVTKRenderWindowInteractorConeExample()
bsd-3-clause
-2,148,267,250,161,116,000
33.733711
79
0.561915
false
4.216614
false
false
false
openfisca/legislation-ipp-to-code
ipp_tax_benefit_tables_to_openfisca_parameters.py
1
24949
#! /usr/bin/env python # -*- coding: utf-8 -*- # OpenFisca -- A versatile microsimulation software # By: OpenFisca Team <contact@openfisca.fr> # # Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team # https://github.com/openfisca # # This file is part of OpenFisca. # # OpenFisca is free software; you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # OpenFisca is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Extract parameters from IPP's tax benefit tables. Note: Currently this script requires an XLS version of the tables. XLSX file must be converted to XLS before use. IPP = Institut des politiques publiques http://www.ipp.eu/en/tools/ipp-tax-and-benefit-tables/ http://www.ipp.eu/fr/outils/baremes-ipp/ """ import argparse import collections import datetime import itertools import logging import os import re import sys import textwrap from biryani import baseconv, custom_conv, datetimeconv, states from biryani import strings import xlrd app_name = os.path.splitext(os.path.basename(__file__))[0] baremes = [ # u'Chomage', # u'Impot Revenu', # u'Marche du travail', u'prelevements sociaux', # u'Prestations', # u'Taxation indirecte', # u'Taxation du capital', # u'Taxes locales', ] conv = custom_conv(baseconv, datetimeconv, states) forbiden_sheets = { # u'Impot Revenu': (u'Barème IGR',), u'prelevements sociaux': ( u'ASSIETTE PU', u'AUBRYI', # u'AUBRYII', u'CNRACL', u'FILLON', ), # u'Taxation indirecte': (u'TVA par produit',), } french_date_re = re.compile(ur'(?P<day>0?[1-9]|[12]\d|3[01])/(?P<month>0?[1-9]|1[0-2])/(?P<year>[12]\d{3})$') log = logging.getLogger(app_name) N_ = lambda message: message parameters = [] year_re = re.compile(ur'[12]\d{3}$') def input_to_french_date(value, state = None): if value is None: return None, None if state is None: state = conv.default_state match = french_date_re.match(value) if match is None: return value, state._(u'Invalid french date') return datetime.date(int(match.group('year')), int(match.group('month')), int(match.group('day'))), None cell_to_date = conv.condition( conv.test_isinstance(int), conv.pipe( conv.test_between(1914, 2020), conv.function(lambda year: datetime.date(year, 1, 1)), ), conv.pipe( conv.test_isinstance(basestring), conv.first_match( conv.pipe( conv.test(lambda date: year_re.match(date), error = 'Not a valid year'), conv.function(lambda year: datetime.date(year, 1, 1)), ), input_to_french_date, conv.iso8601_input_to_date, ), ), ) # currency_converter = conv.first_match( # conv.pipe( # conv.test_isinstance(basestring), # conv.cleanup_line, # conv.test_none(), # ), # conv.pipe( # conv.test_isinstance(tuple), # conv.test(lambda couple: len(couple) == 2, error = N_(u"Invalid couple length")), # conv.struct( # ( # conv.pipe( # conv.test_isinstance((float, int)), # conv.not_none, # ), # conv.pipe( # conv.test_isinstance(basestring), # conv.test_in([ # u'%', # u'EUR', # u'FRF', # ]), # ), # ), # ), # ), # ) currency_or_number_converter = conv.first_match( conv.test_isinstance(float), conv.test_isinstance(int), conv.pipe( conv.test_isinstance(basestring), conv.cleanup_line, conv.test_none(), ), conv.pipe( conv.test_isinstance(tuple), conv.test(lambda couple: len(couple) == 2, error = N_(u"Invalid couple length")), conv.struct( ( conv.pipe( conv.test_isinstance((float, int)), conv.not_none, ), conv.pipe( conv.test_isinstance(basestring), conv.test_in([ u'%', u'EUR', u'FRF', ]), ), ), ), ), ) def rename_keys(new_key_by_old_key): def rename_keys_converter(value, state = None): if value is None: return value, None renamed_value = value.__class__() for item_key, item_value in value.iteritems(): renamed_value[new_key_by_old_key.get(item_key, item_key)] = item_value return renamed_value, None return rename_keys_converter values_row_converter = conv.pipe( rename_keys({ u"Date d'effet": u"Date d'entrée en vigueur", u"Note": u"Notes", u"Publication au JO": u"Parution au JO", u"Publication JO": u"Parution au JO", u"Publication JO": u"Parution au JO", u"Référence": u"Références législatives", u"Référence législative": u"Références législatives", u"Références législatives (taux d'appel)": u"Références législatives", u"Références législatives (taux de cotisation)": u"Références législatives", u"Références législatives ou BOI": u"Références législatives", u"Remarques": u"Notes", }), conv.struct( collections.OrderedDict(( (u"Date d'entrée en vigueur", conv.pipe( conv.test_isinstance(basestring), conv.iso8601_input_to_date, conv.not_none, )), (u"Références législatives", conv.pipe( conv.test_isinstance(basestring), conv.cleanup_line, )), (u"Parution au JO", conv.pipe( conv.test_isinstance(basestring), conv.iso8601_input_to_date, conv.date_to_iso8601_str, )), (u"Notes", conv.pipe( conv.test_isinstance(basestring), conv.cleanup_line, )), (None, conv.pipe( conv.test_isinstance(basestring), conv.cleanup_line, conv.test_none(), )), )), default = currency_or_number_converter, ), ) def escape_xml(value): if value is None: return value if isinstance(value, str): return value.decode('utf-8') if not isinstance(value, unicode): value = unicode(value) return value.replace('&', '&amp;').replace('<', '&lt;').replace('>', '&gt;').replace('"', '&quot;') def get_hyperlink(sheet, row_index, column_index): return sheet.hyperlink_map.get((row_index, column_index)) def get_unmerged_cell_coordinates(row_index, column_index, merged_cells_tree): unmerged_cell_coordinates = merged_cells_tree.get(row_index, {}).get(column_index) if unmerged_cell_coordinates is None: return row_index, column_index return unmerged_cell_coordinates def main(): parser = argparse.ArgumentParser() parser.add_argument('-d', '--dir', default = 'Baremes_IPP_2015', help = 'path of IPP XLS directory') parser.add_argument('-v', '--verbose', action = 'store_true', default = False, help = "increase output verbosity") args = parser.parse_args() # args.dir = path logging.basicConfig(level = logging.DEBUG if args.verbose else logging.WARNING, stream = sys.stdout) root_node = dict( children = [], name = "root", text = textwrap.dedent(u"""\ Ce document présente l'ensemble de la législation permettant le calcul des contributions sociales, taxes sur les salaires et cotisations sociales. Il s'agit des barèmes bruts de la législation utilisés dans le micro-simulateur de l'IPP, TAXIPP. Les sources législatives (texte de loi, numéro du décret ou arrêté) ainsi que la date de publication au Journal Officiel de la République française (JORF) sont systématiquement indiquées. La première ligne du fichier (masquée) indique le nom des paramètres dans TAXIPP. Citer cette source : Barèmes IPP: prélèvements sociaux, Institut des politiques publiques, avril 2014. Auteurs : Antoine Bozio, Julien Grenet, Malka Guillot, Laura Khoury et Marianne Tenand Contacts : marianne.tenand@ipp.eu; antoine.bozio@ipp.eu; malka.guillot@ipp.eu Licence : Licence ouverte / Open Licence """).split(u'\n'), title = u"Barème IPP", type = u'NODE', ) for bareme in baremes: xls_path = os.path.join(args.dir.decode('utf-8'), u"Baremes IPP - {0}.xls".format(bareme)) if not os.path.exists(xls_path): log.warning("Skipping file {} that doesn't exist: {}".format(bareme, xls_path)) continue log.info(u'Parsing file {}'.format(bareme)) book = xlrd.open_workbook(filename = xls_path, formatting_info = True) sheet_names = [ sheet_name for sheet_name in book.sheet_names() if not sheet_name.startswith((u'Abréviations', u'Outline')) and sheet_name not in forbiden_sheets.get( bareme, []) ] sheet_title_by_name = {} for sheet_name in sheet_names: log.info(u' Parsing sheet {}'.format(sheet_name)) sheet = book.sheet_by_name(sheet_name) # Extract coordinates of merged cells. merged_cells_tree = {} for row_low, row_high, column_low, column_high in sheet.merged_cells: for row_index in range(row_low, row_high): cell_coordinates_by_merged_column_index = merged_cells_tree.setdefault( row_index, {}) for column_index in range(column_low, column_high): cell_coordinates_by_merged_column_index[column_index] = (row_low, column_low) if sheet_name.startswith(u'Sommaire'): # Associate the titles of the sheets to their Excel names. for row_index in range(sheet.nrows): linked_sheet_number = transform_xls_cell_to_json(book, sheet, merged_cells_tree, row_index, 2) if isinstance(linked_sheet_number, int): linked_sheet_title = transform_xls_cell_to_str(book, sheet, merged_cells_tree, row_index, 3) if linked_sheet_title is not None: hyperlink = get_hyperlink(sheet, row_index, 3) if hyperlink is not None and hyperlink.type == u'workbook': linked_sheet_name = hyperlink.textmark.split(u'!', 1)[0].strip(u'"').strip(u"'") sheet_title_by_name[linked_sheet_name] = linked_sheet_title continue descriptions_rows = [] labels_rows = [] notes_rows = [] state = 'taxipp_names' taxipp_names_row = None values_rows = [] for row_index in range(sheet.nrows): columns_count = len(sheet.row_values(row_index)) if state == 'taxipp_names': taxipp_names_row = [ taxipp_name for taxipp_name in ( transform_xls_cell_to_str(book, sheet, merged_cells_tree, row_index, column_index) for column_index in range(columns_count) ) ] state = 'labels' continue if state == 'labels': first_cell_value = transform_xls_cell_to_json(book, sheet, merged_cells_tree, row_index, 0) date_or_year, error = conv.pipe( conv.test_isinstance((int, basestring)), cell_to_date, conv.not_none, )(first_cell_value, state = conv.default_state) if error is not None: # First cell of row is not a date => Assume it is a label. labels_rows.append([ transform_xls_cell_to_str(book, sheet, merged_cells_tree, row_index, column_index) for column_index in range(columns_count) ]) continue state = 'values' if state == 'values': first_cell_value = transform_xls_cell_to_json(book, sheet, merged_cells_tree, row_index, 0) if first_cell_value is None or isinstance(first_cell_value, (int, basestring)): date_or_year, error = cell_to_date(first_cell_value, state = conv.default_state) if error is None: # First cell of row is a valid date or year. values_row = [ transform_xls_cell_to_json(book, sheet, merged_cells_tree, row_index, column_index) for column_index in range(columns_count) ] if date_or_year is not None: assert date_or_year.year < 2601, 'Invalid date {} in {} at row {}'.format(date_or_year, sheet_name, row_index + 1) values_rows.append(values_row) continue if all(value in (None, u'') for value in values_row): # If first cell is empty and all other cells in line are also empty, ignore this line. continue # First cell has no date and other cells in row are not empty => Assume it is a note. state = 'notes' if state == 'notes': first_cell_value = transform_xls_cell_to_json(book, sheet, merged_cells_tree, row_index, 0) if isinstance(first_cell_value, basestring) and first_cell_value.strip().lower() == 'notes': notes_rows.append([ transform_xls_cell_to_str(book, sheet, merged_cells_tree, row_index, column_index) for column_index in range(columns_count) ]) continue state = 'description' assert state == 'description' descriptions_rows.append([ transform_xls_cell_to_str(book, sheet, merged_cells_tree, row_index, column_index) for column_index in range(columns_count) ]) text_lines = [] for row in notes_rows: text_lines.append(u' | '.join( cell for cell in row if cell )) if text_lines: text_lines.append(None) for row in descriptions_rows: text_lines.append(u' | '.join( cell for cell in row if cell )) sheet_title = sheet_title_by_name.get(sheet_name) if sheet_title is None: log.warning(u"Missing title for sheet {} in summary".format(sheet_name)) continue labels = [] for labels_row in labels_rows: for column_index, label in enumerate(labels_row): if not label: continue while column_index >= len(labels): labels.append([]) labels_column = labels[column_index] if not labels_column or labels_column[-1] != label: labels_column.append(label) labels = [ tuple(labels_column1) if len(labels_column1) > 1 else labels_column1[0] for labels_column1 in labels ] cell_by_label_rows = [] for value_row in values_rows: cell_by_label = collections.OrderedDict(itertools.izip(labels, value_row)) cell_by_label, errors = values_row_converter(cell_by_label, state = conv.default_state) assert errors is None, "Errors in {}:\n{}".format(cell_by_label, errors) cell_by_label_rows.append(cell_by_label) sheet_node = dict( children = [], name = strings.slugify(sheet_name, separator = u'_'), text = text_lines, title = sheet_title, type = u'NODE', ) root_node['children'].append(sheet_node) for taxipp_name, labels_column in zip(taxipp_names_row, labels): if not taxipp_name or taxipp_name in (u'date',): continue variable_node = dict( children = [], name = strings.slugify(taxipp_name, separator = u'_'), title = u' - '.join(labels_column) if isinstance(labels_column, tuple) else labels_column, type = u'CODE', ) sheet_node['children'].append(variable_node) for cell_by_label in cell_by_label_rows: amount_and_unit = cell_by_label[labels_column] variable_node['children'].append(dict( law_reference = cell_by_label[u'Références législatives'], notes = cell_by_label[u'Notes'], publication_date = cell_by_label[u"Parution au JO"], start_date = cell_by_label[u"Date d'entrée en vigueur"], type = u'VALUE', unit = amount_and_unit[1] if isinstance(amount_and_unit, tuple) else None, value = amount_and_unit[0] if isinstance(amount_and_unit, tuple) else amount_and_unit, )) # dates = [ # conv.check(cell_to_date)( # row[1] if bareme == u'Impot Revenu' else row[0], # state = conv.default_state, # ) # for row in values_rows # ] # for column_index, taxipp_name in enumerate(taxipp_names_row): # if taxipp_name and strings.slugify(taxipp_name) not in ( # 'date', # 'date-ir', # 'date-rev', # 'note', # 'notes', # 'ref-leg', # ): # vector = [ # transform_cell_value(date, row[column_index]) # for date, row in zip(dates, values_rows) # ] # vector = [ # cell if not isinstance(cell, basestring) or cell == u'nc' else '-' # for cell in vector # ] # # vector_by_taxipp_name[taxipp_name] = pd.Series(vector, index = dates) # vector_by_taxipp_name[taxipp_name] = vector # print_node(root_node) return 0 def print_node(node, indent = 0): attributes = node.copy() children = attributes.pop('children', None) text = attributes.pop('text', None) if text: while text and not (text[0] and text[0].strip()): del text[0] while text and not (text[-1] and text[-1].strip()): del text[-1] type = attributes.pop('type') print u'{}<{}{}{}>'.format( u' ' * indent, type, u''.join( u' {}="{}"'.format(name, escape_xml(value)) for name, value in sorted(attributes.iteritems()) if value is not None ), u'' if children or text else u'/', ).encode('utf-8') if text: for line in text: if line and line.strip(): print u'{}{}'.format(u' ' * (indent + 1), escape_xml(line)).encode('utf-8') else: print if children or text: for child in children: print_node(child, indent = indent + 1) print u'{}</{}>'.format(u' ' * indent, type).encode('utf-8') def transform_cell_value(date, cell_value): if isinstance(cell_value, tuple): value, currency = cell_value if currency == u'FRF': if date < datetime.date(1960, 1, 1): return round(value / (100 * 6.55957), 2) return round(value / 6.55957, 2) return value return cell_value def transform_xls_cell_to_json(book, sheet, merged_cells_tree, row_index, column_index): """Convert an XLS cell (type & value) to an unicode string. Code taken from http://code.activestate.com/recipes/546518-simple-conversion-of-excel-files-into-csv-and-yaml/ Type Codes: EMPTY 0 TEXT 1 a Unicode string NUMBER 2 float DATE 3 float BOOLEAN 4 int; 1 means TRUE, 0 means FALSE ERROR 5 """ unmerged_cell_coordinates = merged_cells_tree.get(row_index, {}).get(column_index) if unmerged_cell_coordinates is None: unmerged_row_index = row_index unmerged_column_index = column_index else: unmerged_row_index, unmerged_column_index = unmerged_cell_coordinates type = sheet.row_types(unmerged_row_index)[unmerged_column_index] value = sheet.row_values(unmerged_row_index)[unmerged_column_index] if type == 0: value = None elif type == 1: if not value: value = None elif type == 2: # NUMBER value_int = int(value) if value_int == value: value = value_int xf_index = sheet.cell_xf_index(row_index, column_index) xf = book.xf_list[xf_index] # Get an XF object. format_key = xf.format_key format = book.format_map[format_key] # Get a Format object. format_str = format.format_str # This is the "number format string". if format_str in ( u'0', u'General', u'GENERAL', u'_-* #,##0\ _€_-;\-* #,##0\ _€_-;_-* \-??\ _€_-;_-@_-', ) or format_str.endswith(u'0.00'): return value if u'€' in format_str: return (value, u'EUR') if u'FRF' in format_str or ur'\F\R\F' in format_str: return (value, u'FRF') assert format_str.endswith(u'%'), 'Unexpected format "{}" for value: {}'.format(format_str, value) return (value, u'%') elif type == 3: # DATE y, m, d, hh, mm, ss = xlrd.xldate_as_tuple(value, book.datemode) date = u'{0:04d}-{1:02d}-{2:02d}'.format(y, m, d) if any(n != 0 for n in (y, m, d)) else None value = u'T'.join( fragment for fragment in ( date, (u'{0:02d}:{1:02d}:{2:02d}'.format(hh, mm, ss) if any(n != 0 for n in (hh, mm, ss)) or date is None else None), ) if fragment is not None ) elif type == 4: value = bool(value) elif type == 5: # ERROR value = xlrd.error_text_from_code[value] # elif type == 6: # TODO # else: # assert False, str((type, value)) return value def transform_xls_cell_to_str(book, sheet, merged_cells_tree, row_index, column_index): cell = transform_xls_cell_to_json(book, sheet, merged_cells_tree, row_index, column_index) assert cell is None or isinstance(cell, basestring), u'Expected a string. Got: {}'.format(cell).encode('utf-8') return cell if __name__ == "__main__": sys.exit(main())
agpl-3.0
6,170,964,241,538,797,000
38.871795
120
0.521463
false
3.857364
true
false
false
vecnet/om
website/apps/ts_om/views/ScenarioListView.py
1
1416
# -*- coding: utf-8 -*- # # This file is part of the VecNet OpenMalaria Portal. # For copyright and licensing information about this package, see the # NOTICE.txt and LICENSE.txt files in its top-level directory; they are # available at https://github.com/vecnet/om # # This Source Code Form is subject to the terms of the Mozilla Public # License (MPL), version 2.0. If a copy of the MPL was not distributed # with this file, You can obtain one at http://mozilla.org/MPL/2.0/. from django.utils.decorators import method_decorator from django.views.decorators.csrf import ensure_csrf_cookie from django.views.generic import ListView from website.apps.ts_om.models import Scenario as ScenarioModel class ScenarioListView(ListView): template_name = 'ts_om/list.html' paginate_by = 10 model = ScenarioModel # ensure_csrf_cookie is to send CSRF cookie with this view - to ensure that DeleteView is working properly @method_decorator(ensure_csrf_cookie) def dispatch(self, request, *args, **kwargs): return super(ScenarioListView, self).dispatch(request, *args, **kwargs) def get_queryset(self): scenarios = ScenarioModel.objects.filter(user=self.request.user, deleted=False).order_by('-last_modified') return scenarios def get_context_data(self, **kwargs): context = super(ScenarioListView, self).get_context_data(**kwargs) return context
mpl-2.0
7,419,997,059,402,621,000
38.333333
114
0.731638
false
3.847826
false
false
false
FilipeMaia/h5proxy
h5proxy/serializer.py
1
5488
import numpy import h5py import cPickle as pickle class Serializer(object): def __init__(self, parent, socket = None): self._parent = parent self._socket = socket if(socket): import threading self.lock = threading.Lock() else: # Use an internal server is there's no socket self._server = Server(None) def call(self, data): if(self._socket): with self.lock: self.send(data) return self.recv() else: if(data['func'] == 'attrs'): ret, _ = self._serialize(self._server.handleRPC(data),[],data['fileName'],data['path']) return self._deserialize(ret) else: ret, _ = self._serialize(self._server.handleRPC(data),[],None,None) return self._deserialize(ret) def recv(self): data = pickle.loads(self._socket.recv()) ret = self._deserialize(data) return ret def _deserialize(self, data): if(isinstance(data, dict)): if('className' in data): if(data['className'] == "Dataset"): data = Dataset(self._parent, data['fileName'], data['path']) elif(data['className'] == "Group"): data = Group(self._parent, data['fileName'], data['path']) elif(data['className'] == "Attributes"): data = Attributes(self._parent, data['fileName'], data['path']) elif(data['className'] == "SoftLink"): data = h5py.SoftLink(data['path']) elif(data['className'] == "ExternalLink"): data = h5py.ExternalLink(data['fileName'],data['path']) elif(data['className'] == "exception"): exc_type = data['exc_type'] exc_value = data['exc_value'] raise exc_type(exc_value) elif(data['className'] == "ndarray" and self._socket): d = self._socket.recv() data = numpy.frombuffer(buffer(d), dtype=data['dtype']).reshape(data['shape']) elif(data['className'] == "File"): pass else: raise RuntimeError('Unknown class: %s' % data['className']) else: # We need to sort to be able to receive any possible arrays # in the correct order for k in sorted(data.keys()): data[k] = self._deserialize(data[k]) elif isinstance(data, list) or isinstance(data, tuple): ldata = [None]*len(data) for i in range(len(data)): ldata[i] = self._deserialize(data[i]) data = type(data)(ldata) return data def send(self,data, fileName = None, path = None): data, arrays = self._serialize(data, [], fileName, path) flags = 0 if(len(arrays)): import zmq flags = zmq.SNDMORE self._socket.send(pickle.dumps(data), flags) for i in range(len(arrays)): # When sending the last array change the flag back if(i == len(arrays) -1): flags = 0 self._socket.send(arrays[i], flags) def _serialize(self, data, arrays, fileName, path): if type(data) is h5py.Dataset: data = dict( className = "Dataset", fileName = data.file.filename, path = data.name ) elif type(data) is h5py.Group: data = dict( className = "Group", fileName = data.file.filename, path = data.name ) elif type(data) is h5py.AttributeManager: data = dict( className = "Attributes", fileName = fileName, path = path, ) elif type(data) is h5py.File: data = dict( className = "File", fileName = data.file.filename, path = '' ) elif type(data) is h5proxy.ExternalLink: data = dict( className = "ExternalLink", fileName = data.filename, path = data.path ) elif type(data) is h5proxy.SoftLink: data = dict( className = "SoftLink", path = data.path ) elif isinstance(data, numpy.ndarray) and self._socket: arrays.append(data) data = dict( className = "ndarray", dtype = data.dtype, shape = data.shape ) elif isinstance(data, dict): # We need to sort to be able to receive any possible arrays # in the correct order for k in sorted(data.keys()): data[k], arrays = self._serialize(data[k], arrays, fileName, path) elif isinstance(data, list) or isinstance(data, tuple): ldata = [None]*len(data) for i in range(len(data)): ldata[i], arrays = self._serialize(data[i], arrays, fileName, path) data = type(data)(ldata) return data, arrays from .h5proxy import Dataset,Group,File,Attributes, SoftLink, ExternalLink import h5proxy from .server import Server
bsd-2-clause
-8,297,233,515,295,991,000
36.589041
103
0.493258
false
4.447326
false
false
false
rssalessio/PythonVRFT
test/test_vrft.py
1
3518
# test_vrft.py - Unittest for VRFT # # Code author: [Alessio Russo - alessior@kth.se] # Last update: 10th January 2021, by alessior@kth.se # # Copyright (c) [2017-2021] Alessio Russo [alessior@kth.se]. All rights reserved. # This file is part of PythonVRFT. # PythonVRFT is free software: you can redistribute it and/or modify # it under the terms of the MIT License. You should have received a copy of # the MIT License along with PythonVRFT. # If not, see <https://opensource.org/licenses/MIT>. # from unittest import TestCase import numpy as np import scipy.signal as scipysig from vrft.iddata import * from vrft.vrft_algo import * from vrft.extended_tf import ExtendedTF class TestVRFT(TestCase): def test_vrft(self): t_start = 0 t_step = 1e-2 t_ends = [10, 10 + t_step] expected_theta = np.array([1.93220784, -1.05808206, 1.26623764, 0.0088772]) expected_loss = 0.00064687904235295 for t_end in t_ends: t = np.arange(t_start, t_end, t_step) u = np.ones(len(t)).tolist() num = [0.1] den = [1, -0.9] sys = scipysig.TransferFunction(num, den, dt=t_step) t, y = scipysig.dlsim(sys, u, t) y = y[:,0] data = iddata(y,u,t_step,[0]) refModel = ExtendedTF([0.2], [1, -0.8], dt=t_step) prefilter = refModel * (1-refModel) control = [ExtendedTF([1], [1,0], dt=t_step), ExtendedTF([1], [1,0,0], dt=t_step), ExtendedTF([1], [1,0,0,0], dt=t_step), ExtendedTF([1, 0], [1,1], dt=t_step)] theta1, _, loss1, _ = compute_vrft(data, refModel, control, prefilter) theta2, _, loss2, _ = compute_vrft([data], refModel, control, prefilter) theta3, _, loss3, _ = compute_vrft([data, data], refModel, control, prefilter) self.assertTrue(np.isclose(loss1, loss2)) self.assertTrue(np.isclose(loss1, loss3)) self.assertTrue(np.linalg.norm(theta1-theta2)<1e-15) self.assertTrue(np.linalg.norm(theta1-theta3)<1e-15) self.assertTrue(np.linalg.norm(theta1-expected_theta, np.infty) < 1e-5) self.assertTrue(abs(expected_loss - loss1) < 1e-5) def test_iv(self): t_start = 0 t_step = 1e-2 t_ends = [10, 10 + t_step] for t_end in t_ends: t = np.arange(t_start, t_end, t_step) u = np.ones(len(t)).tolist() num = [0.1] den = [1, -0.9] sys = scipysig.TransferFunction(num, den, dt=t_step) _, y = scipysig.dlsim(sys, u, t) y = y.flatten() + 1e-2 * np.random.normal(size=t.size) data1 = iddata(y,u,t_step,[0]) _, y = scipysig.dlsim(sys, u, t) y = y.flatten() + 1e-2 * np.random.normal(size=t.size) data2 = iddata(y,u,t_step,[0]) refModel = ExtendedTF([0.2], [1, -0.8], dt=t_step) prefilter = refModel * (1-refModel) control = [ExtendedTF([1], [1,0], dt=t_step), ExtendedTF([1], [1,0,0], dt=t_step), ExtendedTF([1], [1,0,0,0], dt=t_step), ExtendedTF([1, 0], [1,1], dt=t_step)] with self.assertRaises(ValueError): compute_vrft(data1, refModel, control, prefilter, iv=True) compute_vrft([data1, data2], refModel, control, prefilter, iv=True)
gpl-3.0
-6,444,991,884,711,760,000
36.425532
90
0.545765
false
3.043253
true
false
false
vmonaco/single-hashing
single_hash.py
1
2647
''' Created on Nov 20, 2012 @author: vinnie ''' from utils import * def in1d_running(q, A): ''' j where q[k] in A for 0 <= k <= j This is the maximum index j where q[0:j] is in A ''' j = 0 while j < len(q) and q[j] in A: j += 1 return j def s_A(Q, A): ''' s(A) = {(i,j) | q[i,k] in A for 0 <= k <= j} The set of all coordinates where Q[i,0:k] is in A for 0 <= k <= j, where j is defined by the ind1d_running function above ''' return [(i, k) for i in A for k in range(in1d_running(Q[i], A))] def P(Q, A, m): ''' Given the single hashing scheme defined by matrix Q, compute the probably that the first |A| slots are occupied by the slots in A ''' if len(A) == 0: return 0 elif len(A) == 1: return 1.0 / m else: return (1.0 / m) * sum([P(Q, tuple(a for a in A if a != Q[i][j]), m) for (i, j) in s_A(Q, A)]) def P_map(Q): ''' Compute P(A) for each n-combination in [0,1,2...m) for 0 <= n < m Also compute P( [0,1,2...m] ). Only one combination is needed, this should always be equal to 1.0 ''' m = len(Q) m_range = range(m) p = {A: P(Q, A, m) for A in generate_A(m_range)} return p def delta_prime(Q): ''' The average number of spaces probed for each insertion by the time the table is full. This is the best measure for the efficiency of a single hashing scheme ''' m = len(Q) m_range = [row[0] for row in Q] set_A = generate_A(m_range) return (1.0 / (m ** 2)) * sum(P(Q, A, m) * len(s_A(Q, A)) for A in set_A) def d_prime(Q, n): ''' The average number of probes needed to insert the nth element into a table with single hashing scheme Q ''' m = len(Q) m_range = [row[0] for row in Q] assert n <= m set_A = [A for A in generate_A(m_range) if len(A) == n - 1] return (1.0 / m) * sum(P(Q, A, m) * len(s_A(Q, A)) for A in set_A) def search_random(m, N): from operator import itemgetter import matplotlib.pyplot as plt import random random.seed(1234) score_Q = [(delta_prime(Q), Q) for Q in [random_Q(m) for _ in range(N)]] min_score, min_Q = min(score_Q, key=itemgetter(0)) max_score, max_Q = max(score_Q, key=itemgetter(0)) print('Best score:', min_score, min_Q) print('Worst score:', max_score, max_Q) plt.hist(list(zip(*score_Q))[0], bins=100, normed=True) plt.xlabel('Probes per insertion') plt.ylabel('Density') plt.savefig('m%d_scores.png' % m) return if __name__ == '__main__': search_random(5, 10000)
mit
-4,057,426,931,684,197,400
24.451923
79
0.553457
false
2.870933
false
false
false
nudomarinero/mltier1
test/test_extinction.py
1
3308
""" Test the extinction module """ from __future__ import print_function import sys import os import unittest import numpy as np import requests.exceptions from astropy import units as u import numpy.testing as npt # Append the module to test sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) from extinction import (query, f99_extinction, get_filter_extinction, FILTER_URIS) # Expected data response_gal_0_0 = {'EBV_SFD': 99.69757} response_equ_0_0 = {'EBV_SFD': 0.03182} response_equ_array = {'EBV_SFD': [0.03182, 0.03301]} class TestQueryExtinction(unittest.TestCase): """ Test the query of extiction data """ def test_query_position_gal_0_0(self): self.assertEqual(query(0, 0, coordsys="gal"), response_gal_0_0) def test_query_position_equ_0_0(self): self.assertEqual(query(0, 0), response_equ_0_0) def test_query_equ_array(self): self.assertEqual(query([0, 1], [0, 1]), response_equ_array) def test_query_equ_out_limits(self): with self.assertRaises(requests.exceptions.HTTPError): query(100, 380, verbose=False) def test_query_out_of_size(self): #with self.assertRaises(requests.exceptions.HTTPError): #print(query(list(np.zeros(50000)), list(np.zeros(50000)))) pass class TestExtinctionCurve(unittest.TestCase): """ Test the computing of the extinction curve from Fitzpatrick 99 """ def test_fir_wavelenghts(self): self.assertEqual(f99_extinction(500*u.micron), [0.0010772042713472958]) def test_normal_wavelenghts(self): self.assertEqual(f99_extinction(1*u.micron), [1.16611075588672]) def test_normal_wavelenghts_change_units(self): npt.assert_array_max_ulp(f99_extinction(10000*u.Angstrom), np.array(1.16611075588672), dtype="float32") def test_normal_wavelenghts_array(self): npt.assert_array_max_ulp(f99_extinction([1, 1]*u.micron), np.array([1.16611075588672, 1.16611075588672]), dtype="float32") class TestFilterExtinction(unittest.TestCase): """ Test the retrieval and computing of the extinction associated to the main filters used. """ def test_PanSTARRS_g(self): self.assertEqual(get_filter_extinction(FILTER_URIS["g"]), 3.6121011749827514) def test_PanSTARRS_r(self): self.assertEqual(get_filter_extinction(FILTER_URIS["r"]), 2.5687511251039137) def test_PanSTARRS_i(self): self.assertEqual(get_filter_extinction(FILTER_URIS["i"]), 1.897167710862949) def test_PanSTARRS_z(self): self.assertEqual(get_filter_extinction(FILTER_URIS["z"]), 1.4948335405125801) def test_PanSTARRS_y(self): self.assertEqual(get_filter_extinction(FILTER_URIS["y"]), 1.2478667172854474) def test_WISE_W1(self): self.assertEqual(get_filter_extinction(FILTER_URIS["W1"]), 0.19562893570345422) def test_WISE_W2(self): self.assertEqual(get_filter_extinction(FILTER_URIS["W2"]), 0.13438419437135862) def test_WISE_W3(self): self.assertEqual(get_filter_extinction(FILTER_URIS["W3"]), 0.046003159224496736) def test_WISE_W4(self): self.assertEqual(get_filter_extinction(FILTER_URIS["W4"]), 0.024851094687942197) if __name__ == '__main__': unittest.main()
gpl-3.0
-3,343,834,670,868,604,000
33.458333
130
0.682285
false
3.018248
true
false
false
DailyActie/Surrogate-Model
01-codes/scipy-master/scipy/optimize/cobyla.py
1
9922
""" Interface to Constrained Optimization By Linear Approximation Functions --------- .. autosummary:: :toctree: generated/ fmin_cobyla """ from __future__ import division, print_function, absolute_import import numpy as np from scipy._lib.six import callable from scipy.optimize import _cobyla from .optimize import OptimizeResult, _check_unknown_options try: from itertools import izip except ImportError: izip = zip __all__ = ['fmin_cobyla'] def fmin_cobyla(func, x0, cons, args=(), consargs=None, rhobeg=1.0, rhoend=1e-4, iprint=1, maxfun=1000, disp=None, catol=2e-4): """ Minimize a function using the Constrained Optimization BY Linear Approximation (COBYLA) method. This method wraps a FORTRAN implementation of the algorithm. Parameters ---------- func : callable Function to minimize. In the form func(x, \\*args). x0 : ndarray Initial guess. cons : sequence Constraint functions; must all be ``>=0`` (a single function if only 1 constraint). Each function takes the parameters `x` as its first argument, and it can return either a single number or an array or list of numbers. args : tuple, optional Extra arguments to pass to function. consargs : tuple, optional Extra arguments to pass to constraint functions (default of None means use same extra arguments as those passed to func). Use ``()`` for no extra arguments. rhobeg : float, optional Reasonable initial changes to the variables. rhoend : float, optional Final accuracy in the optimization (not precisely guaranteed). This is a lower bound on the size of the trust region. iprint : {0, 1, 2, 3}, optional Controls the frequency of output; 0 implies no output. Deprecated. disp : {0, 1, 2, 3}, optional Over-rides the iprint interface. Preferred. maxfun : int, optional Maximum number of function evaluations. catol : float, optional Absolute tolerance for constraint violations. Returns ------- x : ndarray The argument that minimises `f`. See also -------- minimize: Interface to minimization algorithms for multivariate functions. See the 'COBYLA' `method` in particular. Notes ----- This algorithm is based on linear approximations to the objective function and each constraint. We briefly describe the algorithm. Suppose the function is being minimized over k variables. At the jth iteration the algorithm has k+1 points v_1, ..., v_(k+1), an approximate solution x_j, and a radius RHO_j. (i.e. linear plus a constant) approximations to the objective function and constraint functions such that their function values agree with the linear approximation on the k+1 points v_1,.., v_(k+1). This gives a linear program to solve (where the linear approximations of the constraint functions are constrained to be non-negative). However the linear approximations are likely only good approximations near the current simplex, so the linear program is given the further requirement that the solution, which will become x_(j+1), must be within RHO_j from x_j. RHO_j only decreases, never increases. The initial RHO_j is rhobeg and the final RHO_j is rhoend. In this way COBYLA's iterations behave like a trust region algorithm. Additionally, the linear program may be inconsistent, or the approximation may give poor improvement. For details about how these issues are resolved, as well as how the points v_i are updated, refer to the source code or the references below. References ---------- Powell M.J.D. (1994), "A direct search optimization method that models the objective and constraint functions by linear interpolation.", in Advances in Optimization and Numerical Analysis, eds. S. Gomez and J-P Hennart, Kluwer Academic (Dordrecht), pp. 51-67 Powell M.J.D. (1998), "Direct search algorithms for optimization calculations", Acta Numerica 7, 287-336 Powell M.J.D. (2007), "A view of algorithms for optimization without derivatives", Cambridge University Technical Report DAMTP 2007/NA03 Examples -------- Minimize the objective function f(x,y) = x*y subject to the constraints x**2 + y**2 < 1 and y > 0:: >>> def objective(x): ... return x[0]*x[1] ... >>> def constr1(x): ... return 1 - (x[0]**2 + x[1]**2) ... >>> def constr2(x): ... return x[1] ... >>> from scipy.optimize import fmin_cobyla >>> fmin_cobyla(objective, [0.0, 0.1], [constr1, constr2], rhoend=1e-7) array([-0.70710685, 0.70710671]) The exact solution is (-sqrt(2)/2, sqrt(2)/2). """ err = "cons must be a sequence of callable functions or a single" \ " callable function." try: len(cons) except TypeError: if callable(cons): cons = [cons] else: raise TypeError(err) else: for thisfunc in cons: if not callable(thisfunc): raise TypeError(err) if consargs is None: consargs = args # build constraints con = tuple({'type': 'ineq', 'fun': c, 'args': consargs} for c in cons) # options if disp is not None: iprint = disp opts = {'rhobeg': rhobeg, 'tol': rhoend, 'iprint': iprint, 'disp': iprint != 0, 'maxiter': maxfun, 'catol': catol} sol = _minimize_cobyla(func, x0, args, constraints=con, **opts) if iprint > 0 and not sol['success']: print("COBYLA failed to find a solution: %s" % (sol.message,)) return sol['x'] def _minimize_cobyla(fun, x0, args=(), constraints=(), rhobeg=1.0, tol=1e-4, iprint=1, maxiter=1000, disp=False, catol=2e-4, **unknown_options): """ Minimize a scalar function of one or more variables using the Constrained Optimization BY Linear Approximation (COBYLA) algorithm. Options ------- rhobeg : float Reasonable initial changes to the variables. tol : float Final accuracy in the optimization (not precisely guaranteed). This is a lower bound on the size of the trust region. disp : bool Set to True to print convergence messages. If False, `verbosity` is ignored as set to 0. maxiter : int Maximum number of function evaluations. catol : float Tolerance (absolute) for constraint violations """ _check_unknown_options(unknown_options) maxfun = maxiter rhoend = tol if not disp: iprint = 0 # check constraints if isinstance(constraints, dict): constraints = (constraints,) for ic, con in enumerate(constraints): # check type try: ctype = con['type'].lower() except KeyError: raise KeyError('Constraint %d has no type defined.' % ic) except TypeError: raise TypeError('Constraints must be defined using a ' 'dictionary.') except AttributeError: raise TypeError("Constraint's type must be a string.") else: if ctype != 'ineq': raise ValueError("Constraints of type '%s' not handled by " "COBYLA." % con['type']) # check function if 'fun' not in con: raise KeyError('Constraint %d has no function defined.' % ic) # check extra arguments if 'args' not in con: con['args'] = () # m is the total number of constraint values # it takes into account that some constraints may be vector-valued cons_lengths = [] for c in constraints: f = c['fun'](x0, *c['args']) try: cons_length = len(f) except TypeError: cons_length = 1 cons_lengths.append(cons_length) m = sum(cons_lengths) def calcfc(x, con): f = fun(x, *args) i = 0 for size, c in izip(cons_lengths, constraints): con[i: i + size] = c['fun'](x, *c['args']) i += size return f info = np.zeros(4, np.float64) xopt, info = _cobyla.minimize(calcfc, m=m, x=np.copy(x0), rhobeg=rhobeg, rhoend=rhoend, iprint=iprint, maxfun=maxfun, dinfo=info) if info[3] > catol: # Check constraint violation info[0] = 4 return OptimizeResult(x=xopt, status=int(info[0]), success=info[0] == 1, message={1: 'Optimization terminated successfully.', 2: 'Maximum number of function evaluations has ' 'been exceeded.', 3: 'Rounding errors are becoming damaging in ' 'COBYLA subroutine.', 4: 'Did not converge to a solution satisfying ' 'the constraints. See `maxcv` for magnitude ' 'of violation.' }.get(info[0], 'Unknown exit status.'), nfev=int(info[1]), fun=info[2], maxcv=info[3]) if __name__ == '__main__': from math import sqrt def fun(x): return x[0] * x[1] def cons(x): return 1 - x[0] ** 2 - x[1] ** 2 x = fmin_cobyla(fun, [1., 1.], cons, iprint=3, disp=1) print('\nTheoretical solution: %e, %e' % (1. / sqrt(2.), -1. / sqrt(2.)))
mit
5,685,683,423,823,020,000
32.52027
83
0.584056
false
4.101695
false
false
false
futurecolors/gopython3
gopython3/core/rest.py
1
2372
from django.db import transaction from rest_framework import viewsets, routers, status, mixins from rest_framework.decorators import api_view, action from rest_framework.generics import RetrieveAPIView, ListAPIView from rest_framework.response import Response from rest_framework.reverse import reverse from rest_framework_extensions.mixins import DetailSerializerMixin from .serializers import JobSerializer, PackageSerializer, JobDetailSerialzier from .models import Job, Spec, TASK_STATUS class JobViewSet(DetailSerializerMixin, mixins.CreateModelMixin, viewsets.ReadOnlyModelViewSet): model = Job serializer_class = JobSerializer serializer_detail_class = JobDetailSerialzier def create(self, request, *args, **kwargs): try: with transaction.atomic(): job = Job.objects.create_from_requirements(request.DATA['requirements']) job.start() serializer = self.get_serializer(job) headers = self.get_success_headers(serializer.data) except Exception as e: return Response({'requirements': 'Bad requirements. %s' % e}, status=status.HTTP_400_BAD_REQUEST) else: return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers) @action() def restart(self, request, pk=None): """ Restart existing job """ job = self.get_object() if job.status in (TASK_STATUS.error, TASK_STATUS.success): job.start() return Response({'message': 'Job #%s has been restarted' % pk}, status=status.HTTP_202_ACCEPTED) else: return Response({'message': 'Job #%s was not restarted. It is %s.' % (pk, job.status)}, status=status.HTTP_400_BAD_REQUEST) class PackageListView(ListAPIView): model = Spec serializer_class = PackageSerializer class PackageView(RetrieveAPIView): model = Spec serializer_class = PackageSerializer lookup_field = 'code' @api_view(('GET',)) def api_root(request, format=None): return Response({ 'jobs': reverse('job-list', request=request, format=format), 'packages': reverse('spec-list', request=request, format=format) }) router = routers.SimpleRouter() router.include_format_suffixes = False router.register(r'jobs', JobViewSet)
mit
-1,590,789,759,737,583,600
36.0625
135
0.676644
false
4.243292
false
false
false
Horace1117/MKTCloud
openstack_dashboard/dashboards/project/dashboard.py
1
1184
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 Nebula, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from django.utils.translation import ugettext_lazy as _ import horizon class NetworkPanels(horizon.PanelGroup): slug = "network" name = _("") panels = ('networks', 'routers', 'loadbalancers', 'network_topology',) class Project(horizon.Dashboard): name = _("Project") slug = "project" panels = ('overview','instances','volumes','images','routers','images_and_snapshots','access_and_security','containers') default_panel = 'overview' horizon.register(Project)
apache-2.0
7,982,516,707,399,958,000
31
124
0.684122
false
4.054795
false
false
false
Detailscool/YHSpider
BillboardAnalysis/bill/spiders/billtoprap.py
1
1396
#!/usr/bin/python # -*- coding:utf-8 -*- # billtoprap.py # Created by HenryLee on 2017/9/14. # Copyright © 2017年. All rights reserved. # Description : from bill.items import BillItem from scrapy import Spider, Request from bs4 import BeautifulSoup class BillSpider(Spider): name = 'billtoprap_spider' allowed_ulrs = ['http://www.billboard.com/charts'] # start_urls = ['http://www.billboard.com/charts/year-end/2014/hot-rap-songs'] start_urls = ['http://www.billboard.com/charts/year-end/' + str(i) + '/hot-rap-songs' for i in range(2006, 2017)] def parse(self, response): artist_selectors = response.xpath('//a[@class="ye-chart__item-subtitle-link"]') year = response.xpath('.//div[@class="ye-chart__year-nav"]/text()').extract()[2].strip('\n') for selector in artist_selectors: parent = selector.xpath("ancestor::div[@class='ye-chart__item-text']")[0] artist = selector.xpath('text()').extract_first() name = parent.xpath('h1[@class="ye-chart__item-title"]')[0].xpath('text()').extract_first().strip() ranking = parent.xpath('div[@class="ye-chart__item-rank"]')[0].xpath('text()').extract_first() item = BillItem() item['ranking'] = ranking item['name'] = name item['artists'] = artist item['year'] = year yield item
mit
-5,613,891,393,085,021,000
38.8
117
0.608758
false
3.364734
false
false
false
DecipherOne/Troglodyte
Trog Build Dependencies/Python26/Lib/idlelib/MultiCall.py
1
17470
""" MultiCall - a class which inherits its methods from a Tkinter widget (Text, for example), but enables multiple calls of functions per virtual event - all matching events will be called, not only the most specific one. This is done by wrapping the event functions - event_add, event_delete and event_info. MultiCall recognizes only a subset of legal event sequences. Sequences which are not recognized are treated by the original Tk handling mechanism. A more-specific event will be called before a less-specific event. The recognized sequences are complete one-event sequences (no emacs-style Ctrl-X Ctrl-C, no shortcuts like <3>), for all types of events. Key/Button Press/Release events can have modifiers. The recognized modifiers are Shift, Control, Option and Command for Mac, and Control, Alt, Shift, Meta/M for other platforms. For all events which were handled by MultiCall, a new member is added to the event instance passed to the binded functions - mc_type. This is one of the event type constants defined in this module (such as MC_KEYPRESS). For Key/Button events (which are handled by MultiCall and may receive modifiers), another member is added - mc_state. This member gives the state of the recognized modifiers, as a combination of the modifier constants also defined in this module (for example, MC_SHIFT). Using these members is absolutely portable. The order by which events are called is defined by these rules: 1. A more-specific event will be called before a less-specific event. 2. A recently-binded event will be called before a previously-binded event, unless this conflicts with the first rule. Each function will be called at most once for each event. """ import sys import string import re import Tkinter from idlelib import macosxSupport # the event type constants, which define the meaning of mc_type MC_KEYPRESS=0; MC_KEYRELEASE=1; MC_BUTTONPRESS=2; MC_BUTTONRELEASE=3; MC_ACTIVATE=4; MC_CIRCULATE=5; MC_COLORMAP=6; MC_CONFIGURE=7; MC_DEACTIVATE=8; MC_DESTROY=9; MC_ENTER=10; MC_EXPOSE=11; MC_FOCUSIN=12; MC_FOCUSOUT=13; MC_GRAVITY=14; MC_LEAVE=15; MC_MAP=16; MC_MOTION=17; MC_MOUSEWHEEL=18; MC_PROPERTY=19; MC_REPARENT=20; MC_UNMAP=21; MC_VISIBILITY=22; # the modifier state constants, which define the meaning of mc_state MC_SHIFT = 1<<0; MC_CONTROL = 1<<2; MC_ALT = 1<<3; MC_META = 1<<5 MC_OPTION = 1<<6; MC_COMMAND = 1<<7 # define the list of modifiers, to be used in complex event types. if macosxSupport.runningAsOSXApp(): _modifiers = (("Shift",), ("Control",), ("Option",), ("Command",)) _modifier_masks = (MC_SHIFT, MC_CONTROL, MC_OPTION, MC_COMMAND) else: _modifiers = (("Control",), ("Alt",), ("Shift",), ("Meta", "M")) _modifier_masks = (MC_CONTROL, MC_ALT, MC_SHIFT, MC_META) # a dictionary to map a modifier name into its number _modifier_names = dict([(name, number) for number in range(len(_modifiers)) for name in _modifiers[number]]) # A binder is a class which binds functions to one type of event. It has two # methods: bind and unbind, which get a function and a parsed sequence, as # returned by _parse_sequence(). There are two types of binders: # _SimpleBinder handles event types with no modifiers and no detail. # No Python functions are called when no events are binded. # _ComplexBinder handles event types with modifiers and a detail. # A Python function is called each time an event is generated. class _SimpleBinder: def __init__(self, type, widget, widgetinst): self.type = type self.sequence = '<'+_types[type][0]+'>' self.widget = widget self.widgetinst = widgetinst self.bindedfuncs = [] self.handlerid = None def bind(self, triplet, func): if not self.handlerid: def handler(event, l = self.bindedfuncs, mc_type = self.type): event.mc_type = mc_type wascalled = {} for i in range(len(l)-1, -1, -1): func = l[i] if func not in wascalled: wascalled[func] = True r = func(event) if r: return r self.handlerid = self.widget.bind(self.widgetinst, self.sequence, handler) self.bindedfuncs.append(func) def unbind(self, triplet, func): self.bindedfuncs.remove(func) if not self.bindedfuncs: self.widget.unbind(self.widgetinst, self.sequence, self.handlerid) self.handlerid = None def __del__(self): if self.handlerid: self.widget.unbind(self.widgetinst, self.sequence, self.handlerid) # An int in range(1 << len(_modifiers)) represents a combination of modifiers # (if the least significent bit is on, _modifiers[0] is on, and so on). # _state_subsets gives for each combination of modifiers, or *state*, # a list of the states which are a subset of it. This list is ordered by the # number of modifiers is the state - the most specific state comes first. _states = range(1 << len(_modifiers)) _state_names = [''.join(m[0]+'-' for i, m in enumerate(_modifiers) if (1 << i) & s) for s in _states] def expand_substates(states): '''For each item of states return a list containing all combinations of that item with individual bits reset, sorted by the number of set bits. ''' def nbits(n): "number of bits set in n base 2" nb = 0 while n: n, rem = divmod(n, 2) nb += rem return nb statelist = [] for state in states: substates = list(set(state & x for x in states)) substates.sort(key=nbits, reverse=True) statelist.append(substates) return statelist _state_subsets = expand_substates(_states) # _state_codes gives for each state, the portable code to be passed as mc_state _state_codes = [] for s in _states: r = 0 for i in range(len(_modifiers)): if (1 << i) & s: r |= _modifier_masks[i] _state_codes.append(r) class _ComplexBinder: # This class binds many functions, and only unbinds them when it is deleted. # self.handlerids is the list of seqs and ids of binded handler functions. # The binded functions sit in a dictionary of lists of lists, which maps # a detail (or None) and a state into a list of functions. # When a new detail is discovered, handlers for all the possible states # are binded. def __create_handler(self, lists, mc_type, mc_state): def handler(event, lists = lists, mc_type = mc_type, mc_state = mc_state, ishandlerrunning = self.ishandlerrunning, doafterhandler = self.doafterhandler): ishandlerrunning[:] = [True] event.mc_type = mc_type event.mc_state = mc_state wascalled = {} r = None for l in lists: for i in range(len(l)-1, -1, -1): func = l[i] if func not in wascalled: wascalled[func] = True r = l[i](event) if r: break if r: break ishandlerrunning[:] = [] # Call all functions in doafterhandler and remove them from list while doafterhandler: doafterhandler.pop()() if r: return r return handler def __init__(self, type, widget, widgetinst): self.type = type self.typename = _types[type][0] self.widget = widget self.widgetinst = widgetinst self.bindedfuncs = {None: [[] for s in _states]} self.handlerids = [] # we don't want to change the lists of functions while a handler is # running - it will mess up the loop and anyway, we usually want the # change to happen from the next event. So we have a list of functions # for the handler to run after it finishes calling the binded functions. # It calls them only once. # ishandlerrunning is a list. An empty one means no, otherwise - yes. # this is done so that it would be mutable. self.ishandlerrunning = [] self.doafterhandler = [] for s in _states: lists = [self.bindedfuncs[None][i] for i in _state_subsets[s]] handler = self.__create_handler(lists, type, _state_codes[s]) seq = '<'+_state_names[s]+self.typename+'>' self.handlerids.append((seq, self.widget.bind(self.widgetinst, seq, handler))) def bind(self, triplet, func): if not self.bindedfuncs.has_key(triplet[2]): self.bindedfuncs[triplet[2]] = [[] for s in _states] for s in _states: lists = [ self.bindedfuncs[detail][i] for detail in (triplet[2], None) for i in _state_subsets[s] ] handler = self.__create_handler(lists, self.type, _state_codes[s]) seq = "<%s%s-%s>"% (_state_names[s], self.typename, triplet[2]) self.handlerids.append((seq, self.widget.bind(self.widgetinst, seq, handler))) doit = lambda: self.bindedfuncs[triplet[2]][triplet[0]].append(func) if not self.ishandlerrunning: doit() else: self.doafterhandler.append(doit) def unbind(self, triplet, func): doit = lambda: self.bindedfuncs[triplet[2]][triplet[0]].remove(func) if not self.ishandlerrunning: doit() else: self.doafterhandler.append(doit) def __del__(self): for seq, id in self.handlerids: self.widget.unbind(self.widgetinst, seq, id) # define the list of event types to be handled by MultiEvent. the order is # compatible with the definition of event type constants. _types = ( ("KeyPress", "Key"), ("KeyRelease",), ("ButtonPress", "Button"), ("ButtonRelease",), ("Activate",), ("Circulate",), ("Colormap",), ("Configure",), ("Deactivate",), ("Destroy",), ("Enter",), ("Expose",), ("FocusIn",), ("FocusOut",), ("Gravity",), ("Leave",), ("Map",), ("Motion",), ("MouseWheel",), ("Property",), ("Reparent",), ("Unmap",), ("Visibility",), ) # which binder should be used for every event type? _binder_classes = (_ComplexBinder,) * 4 + (_SimpleBinder,) * (len(_types)-4) # A dictionary to map a type name into its number _type_names = dict([(name, number) for number in range(len(_types)) for name in _types[number]]) _keysym_re = re.compile(r"^\w+$") _button_re = re.compile(r"^[1-5]$") def _parse_sequence(sequence): """Get a string which should describe an event sequence. If it is successfully parsed as one, return a tuple containing the state (as an int), the event type (as an index of _types), and the detail - None if none, or a string if there is one. If the parsing is unsuccessful, return None. """ if not sequence or sequence[0] != '<' or sequence[-1] != '>': return None words = string.split(sequence[1:-1], '-') modifiers = 0 while words and words[0] in _modifier_names: modifiers |= 1 << _modifier_names[words[0]] del words[0] if words and words[0] in _type_names: type = _type_names[words[0]] del words[0] else: return None if _binder_classes[type] is _SimpleBinder: if modifiers or words: return None else: detail = None else: # _ComplexBinder if type in [_type_names[s] for s in ("KeyPress", "KeyRelease")]: type_re = _keysym_re else: type_re = _button_re if not words: detail = None elif len(words) == 1 and type_re.match(words[0]): detail = words[0] else: return None return modifiers, type, detail def _triplet_to_sequence(triplet): if triplet[2]: return '<'+_state_names[triplet[0]]+_types[triplet[1]][0]+'-'+ \ triplet[2]+'>' else: return '<'+_state_names[triplet[0]]+_types[triplet[1]][0]+'>' _multicall_dict = {} def MultiCallCreator(widget): """Return a MultiCall class which inherits its methods from the given widget class (for example, Tkinter.Text). This is used instead of a templating mechanism. """ if widget in _multicall_dict: return _multicall_dict[widget] class MultiCall (widget): assert issubclass(widget, Tkinter.Misc) def __init__(self, *args, **kwargs): widget.__init__(self, *args, **kwargs) # a dictionary which maps a virtual event to a tuple with: # 0. the function binded # 1. a list of triplets - the sequences it is binded to self.__eventinfo = {} self.__binders = [_binder_classes[i](i, widget, self) for i in range(len(_types))] def bind(self, sequence=None, func=None, add=None): #print "bind(%s, %s, %s) called." % (sequence, func, add) if type(sequence) is str and len(sequence) > 2 and \ sequence[:2] == "<<" and sequence[-2:] == ">>": if sequence in self.__eventinfo: ei = self.__eventinfo[sequence] if ei[0] is not None: for triplet in ei[1]: self.__binders[triplet[1]].unbind(triplet, ei[0]) ei[0] = func if ei[0] is not None: for triplet in ei[1]: self.__binders[triplet[1]].bind(triplet, func) else: self.__eventinfo[sequence] = [func, []] return widget.bind(self, sequence, func, add) def unbind(self, sequence, funcid=None): if type(sequence) is str and len(sequence) > 2 and \ sequence[:2] == "<<" and sequence[-2:] == ">>" and \ sequence in self.__eventinfo: func, triplets = self.__eventinfo[sequence] if func is not None: for triplet in triplets: self.__binders[triplet[1]].unbind(triplet, func) self.__eventinfo[sequence][0] = None return widget.unbind(self, sequence, funcid) def event_add(self, virtual, *sequences): #print "event_add(%s,%s) was called"%(repr(virtual),repr(sequences)) if virtual not in self.__eventinfo: self.__eventinfo[virtual] = [None, []] func, triplets = self.__eventinfo[virtual] for seq in sequences: triplet = _parse_sequence(seq) if triplet is None: #print >> sys.stderr, "Seq. %s was added by Tkinter."%seq widget.event_add(self, virtual, seq) else: if func is not None: self.__binders[triplet[1]].bind(triplet, func) triplets.append(triplet) def event_delete(self, virtual, *sequences): if virtual not in self.__eventinfo: return func, triplets = self.__eventinfo[virtual] for seq in sequences: triplet = _parse_sequence(seq) if triplet is None: #print >> sys.stderr, "Seq. %s was deleted by Tkinter."%seq widget.event_delete(self, virtual, seq) else: if func is not None: self.__binders[triplet[1]].unbind(triplet, func) triplets.remove(triplet) def event_info(self, virtual=None): if virtual is None or virtual not in self.__eventinfo: return widget.event_info(self, virtual) else: return tuple(map(_triplet_to_sequence, self.__eventinfo[virtual][1])) + \ widget.event_info(self, virtual) def __del__(self): for virtual in self.__eventinfo: func, triplets = self.__eventinfo[virtual] if func: for triplet in triplets: self.__binders[triplet[1]].unbind(triplet, func) _multicall_dict[widget] = MultiCall return MultiCall if __name__ == "__main__": # Test root = Tkinter.Tk() text = MultiCallCreator(Tkinter.Text)(root) text.pack() def bindseq(seq, n=[0]): def handler(event): print seq text.bind("<<handler%d>>"%n[0], handler) text.event_add("<<handler%d>>"%n[0], seq) n[0] += 1 bindseq("<Key>") bindseq("<Control-Key>") bindseq("<Alt-Key-a>") bindseq("<Control-Key-a>") bindseq("<Alt-Control-Key-a>") bindseq("<Key-b>") bindseq("<Control-Button-1>") bindseq("<Alt-Button-1>") bindseq("<FocusOut>") bindseq("<Enter>") bindseq("<Leave>") root.mainloop()
mit
1,858,231,030,308,648,000
40.398104
80
0.574814
false
3.932028
false
false
false
HewlettPackard/python-hpOneView
hpOneView/oneview_client.py
1
39340
# -*- coding: utf-8 -*- ### # (C) Copyright (2012-2018) Hewlett Packard Enterprise Development LP # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. ### """ This module implements a common client for HPE OneView REST API. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from future import standard_library standard_library.install_aliases() import json import os from hpOneView.connection import connection from hpOneView.image_streamer.image_streamer_client import ImageStreamerClient from hpOneView.resources.security.certificate_authority import CertificateAuthority from hpOneView.resources.servers.connections import Connections from hpOneView.resources.networking.fc_networks import FcNetworks from hpOneView.resources.networking.fcoe_networks import FcoeNetworks from hpOneView.resources.networking.ethernet_networks import EthernetNetworks from hpOneView.resources.networking.connection_templates import ConnectionTemplates from hpOneView.resources.networking.fabrics import Fabrics from hpOneView.resources.networking.network_sets import NetworkSets from hpOneView.resources.data_services.metric_streaming import MetricStreaming from hpOneView.resources.networking.switches import Switches from hpOneView.resources.networking.switch_types import SwitchTypes from hpOneView.resources.activity.tasks import Tasks from hpOneView.resources.settings.restores import Restores from hpOneView.resources.settings.scopes import Scopes from hpOneView.resources.settings.licenses import Licenses from hpOneView.resources.servers.enclosures import Enclosures from hpOneView.resources.servers.logical_enclosures import LogicalEnclosures from hpOneView.resources.servers.enclosure_groups import EnclosureGroups from hpOneView.resources.servers.server_hardware import ServerHardware from hpOneView.resources.servers.server_hardware_types import ServerHardwareTypes from hpOneView.resources.servers.id_pools_ranges import IdPoolsRanges from hpOneView.resources.servers.id_pools_ipv4_ranges import IdPoolsIpv4Ranges from hpOneView.resources.servers.id_pools_ipv4_subnets import IdPoolsIpv4Subnets from hpOneView.resources.servers.id_pools import IdPools from hpOneView.resources.networking.interconnects import Interconnects from hpOneView.resources.networking.interconnect_types import InterconnectTypes from hpOneView.resources.networking.interconnect_link_topologies import InterconnectLinkTopologies from hpOneView.resources.networking.sas_interconnect_types import SasInterconnectTypes from hpOneView.resources.networking.internal_link_sets import InternalLinkSets from hpOneView.resources.uncategorized.unmanaged_devices import UnmanagedDevices from hpOneView.resources.networking.logical_downlinks import LogicalDownlinks from hpOneView.resources.facilities.power_devices import PowerDevices from hpOneView.resources.facilities.racks import Racks from hpOneView.resources.facilities.datacenters import Datacenters from hpOneView.resources.fc_sans.managed_sans import ManagedSANs from hpOneView.resources.fc_sans.san_managers import SanManagers from hpOneView.resources.fc_sans.endpoints import Endpoints from hpOneView.resources.networking.logical_interconnects import LogicalInterconnects from hpOneView.resources.networking.logical_interconnect_groups import LogicalInterconnectGroups from hpOneView.resources.networking.sas_logical_interconnects import SasLogicalInterconnects from hpOneView.resources.networking.logical_switch_groups import LogicalSwitchGroups from hpOneView.resources.networking.logical_switches import LogicalSwitches from hpOneView.resources.networking.sas_interconnects import SasInterconnects from hpOneView.resources.servers.server_profiles import ServerProfiles from hpOneView.resources.servers.server_profile_templates import ServerProfileTemplate from hpOneView.resources.storage.sas_logical_jbods import SasLogicalJbods from hpOneView.resources.storage.storage_systems import StorageSystems from hpOneView.resources.storage.storage_pools import StoragePools from hpOneView.resources.storage.storage_volume_templates import StorageVolumeTemplates from hpOneView.resources.storage.storage_volume_attachments import StorageVolumeAttachments from hpOneView.resources.storage.drive_enclosures import DriveEnclosures from hpOneView.resources.settings.firmware_drivers import FirmwareDrivers from hpOneView.resources.settings.firmware_bundles import FirmwareBundles from hpOneView.resources.settings.backups import Backups from hpOneView.resources.storage.volumes import Volumes from hpOneView.resources.storage.sas_logical_jbod_attachments import SasLogicalJbodAttachments from hpOneView.resources.networking.uplink_sets import UplinkSets from hpOneView.resources.servers.migratable_vc_domains import MigratableVcDomains from hpOneView.resources.networking.sas_logical_interconnect_groups import SasLogicalInterconnectGroups from hpOneView.resources.search.index_resources import IndexResources from hpOneView.resources.search.labels import Labels from hpOneView.resources.activity.alerts import Alerts from hpOneView.resources.activity.events import Events from hpOneView.resources.uncategorized.os_deployment_plans import OsDeploymentPlans from hpOneView.resources.uncategorized.os_deployment_servers import OsDeploymentServers from hpOneView.resources.security.certificate_rabbitmq import CertificateRabbitMQ from hpOneView.resources.security.login_details import LoginDetails from hpOneView.resources.security.roles import Roles from hpOneView.resources.security.users import Users from hpOneView.resources.settings.appliance_device_read_community import ApplianceDeviceReadCommunity from hpOneView.resources.settings.appliance_device_snmp_v1_trap_destinations import ApplianceDeviceSNMPv1TrapDestinations from hpOneView.resources.settings.appliance_device_snmp_v3_trap_destinations import ApplianceDeviceSNMPv3TrapDestinations from hpOneView.resources.settings.appliance_device_snmp_v3_users import ApplianceDeviceSNMPv3Users from hpOneView.resources.settings.appliance_node_information import ApplianceNodeInformation from hpOneView.resources.settings.appliance_time_and_locale_configuration import ApplianceTimeAndLocaleConfiguration from hpOneView.resources.settings.versions import Versions ONEVIEW_CLIENT_INVALID_PROXY = 'Invalid Proxy format' class OneViewClient(object): DEFAULT_API_VERSION = 300 def __init__(self, config): self.__connection = connection(config["ip"], config.get('api_version', self.DEFAULT_API_VERSION), config.get('ssl_certificate', False), config.get('timeout')) self.__image_streamer_ip = config.get("image_streamer_ip") self.__set_proxy(config) self.__connection.login(config["credentials"]) self.__certificate_authority = None self.__connections = None self.__connection_templates = None self.__fc_networks = None self.__fcoe_networks = None self.__ethernet_networks = None self.__fabrics = None self.__network_sets = None self.__switches = None self.__switch_types = None self.__tasks = None self.__scopes = None self.__enclosures = None self.__logical_enclosures = None self.__enclosure_groups = None self.__metric_streaming = None self.__server_hardware = None self.__server_hardware_types = None self.__id_pools_vsn_ranges = None self.__id_pools_vmac_ranges = None self.__id_pools_vwwn_ranges = None self.__id_pools_ipv4_ranges = None self.__id_pools_ipv4_subnets = None self.__id_pools = None self.__interconnects = None self.__interconnect_types = None self.__interconnect_link_topologies = None self.__sas_interconnect_types = None self.__internal_link_sets = None self.__power_devices = None self.__unmanaged_devices = None self.__racks = None self.__roles = None self.__datacenters = None self.__san_managers = None self.__endpoints = None self.__logical_interconnects = None self.__sas_logical_interconnects = None self.__logical_interconnect_groups = None self.__logical_switch_groups = None self.__logical_switches = None self.__logical_downlinks = None self.__restores = None self.__server_profiles = None self.__server_profile_templates = None self.__sas_logical_jbods = None self.__storage_systems = None self.__storage_pools = None self.__storage_volume_templates = None self.__storage_volume_attachments = None self.__firmware_drivers = None self.__firmware_bundles = None self.__uplink_sets = None self.__volumes = None self.__sas_logical_jbod_attachments = None self.__managed_sans = None self.__migratable_vc_domains = None self.__sas_interconnects = None self.__index_resources = None self.__labels = None self.__sas_logical_interconnect_groups = None self.__alerts = None self.__events = None self.__drive_enclures = None self.__os_deployment_plans = None self.__os_deployment_servers = None self.__certificate_rabbitmq = None self.__users = None self.__appliance_device_read_community = None self.__appliance_device_snmp_v1_trap_destinations = None self.__appliance_device_snmp_v3_trap_destinations = None self.__appliance_device_snmp_v3_users = None self.__appliance_time_and_locale_configuration = None self.__appliance_node_information = None self.__versions = None self.__backups = None self.__login_details = None self.__licenses = None @classmethod def from_json_file(cls, file_name): """ Construct OneViewClient using a json file. Args: file_name: json full path. Returns: OneViewClient: """ with open(file_name) as json_data: config = json.load(json_data) return cls(config) @classmethod def from_environment_variables(cls): """ Construct OneViewClient using environment variables. Allowed variables: ONEVIEWSDK_IP (required), ONEVIEWSDK_USERNAME (required), ONEVIEWSDK_PASSWORD (required), ONEVIEWSDK_AUTH_LOGIN_DOMAIN, ONEVIEWSDK_API_VERSION, ONEVIEWSDK_IMAGE_STREAMER_IP, ONEVIEWSDK_SESSIONID, ONEVIEWSDK_SSL_CERTIFICATE, ONEVIEWSDK_CONNECTION_TIMEOUT and ONEVIEWSDK_PROXY. Returns: OneViewClient: """ ip = os.environ.get('ONEVIEWSDK_IP', '') image_streamer_ip = os.environ.get('ONEVIEWSDK_IMAGE_STREAMER_IP', '') api_version = int(os.environ.get('ONEVIEWSDK_API_VERSION', OneViewClient.DEFAULT_API_VERSION)) ssl_certificate = os.environ.get('ONEVIEWSDK_SSL_CERTIFICATE', '') username = os.environ.get('ONEVIEWSDK_USERNAME', '') auth_login_domain = os.environ.get('ONEVIEWSDK_AUTH_LOGIN_DOMAIN', '') password = os.environ.get('ONEVIEWSDK_PASSWORD', '') proxy = os.environ.get('ONEVIEWSDK_PROXY', '') sessionID = os.environ.get('ONEVIEWSDK_SESSIONID', '') timeout = os.environ.get('ONEVIEWSDK_CONNECTION_TIMEOUT') config = dict(ip=ip, image_streamer_ip=image_streamer_ip, api_version=api_version, ssl_certificate=ssl_certificate, credentials=dict(userName=username, authLoginDomain=auth_login_domain, password=password, sessionID=sessionID), proxy=proxy, timeout=timeout) return cls(config) def __set_proxy(self, config): """ Set proxy if needed Args: config: Config dict """ if "proxy" in config and config["proxy"]: proxy = config["proxy"] splitted = proxy.split(':') if len(splitted) != 2: raise ValueError(ONEVIEW_CLIENT_INVALID_PROXY) proxy_host = splitted[0] proxy_port = int(splitted[1]) self.__connection.set_proxy(proxy_host, proxy_port) @property def api_version(self): """ Gets the OneView API Version. Returns: int: API Version. """ return self.__connection._apiVersion @property def connection(self): """ Gets the underlying HPE OneView connection used by the OneViewClient. Returns: connection: """ return self.__connection def create_image_streamer_client(self): """ Create the Image Streamer API Client. Returns: ImageStreamerClient: """ image_streamer = ImageStreamerClient(self.__image_streamer_ip, self.__connection.get_session_id(), self.__connection._apiVersion, self.__connection._sslBundle) return image_streamer @property def certificate_authority(self): """ Gets the Certificate Authority API client. Returns: CertificateAuthority: """ if not self.__certificate_authority: self.__certificate_authority = CertificateAuthority(self.__connection) return self.__certificate_authority @property def connections(self): """ Gets the Connections API client. Returns: Connections: """ if not self.__connections: self.__connections = Connections( self.__connection) return self.__connections @property def connection_templates(self): """ Gets the ConnectionTemplates API client. Returns: ConnectionTemplates: """ if not self.__connection_templates: self.__connection_templates = ConnectionTemplates( self.__connection) return self.__connection_templates @property def fc_networks(self): """ Gets the FcNetworks API client. Returns: FcNetworks: """ if not self.__fc_networks: self.__fc_networks = FcNetworks(self.__connection) return self.__fc_networks @property def fcoe_networks(self): """ Gets the FcoeNetworks API client. Returns: FcoeNetworks: """ if not self.__fcoe_networks: self.__fcoe_networks = FcoeNetworks(self.__connection) return self.__fcoe_networks @property def ethernet_networks(self): """ Gets the EthernetNetworks API client. Returns: EthernetNetworks: """ if not self.__ethernet_networks: self.__ethernet_networks = EthernetNetworks(self.__connection) return self.__ethernet_networks @property def fabrics(self): """ Gets the Fabrics API client. Returns: Fabrics: """ if not self.__fabrics: self.__fabrics = Fabrics(self.__connection) return self.__fabrics @property def restores(self): """ Gets the Restores API client. Returns: Restores: """ if not self.__restores: self.__restores = Restores(self.__connection) return self.__restores @property def scopes(self): """ Gets the Scopes API client. Returns: Scopes: """ if not self.__scopes: self.__scopes = Scopes(self.__connection) return self.__scopes @property def datacenters(self): """ Gets the Datacenters API client. Returns: Datacenters: """ if not self.__datacenters: self.__datacenters = Datacenters(self.__connection) return self.__datacenters @property def network_sets(self): """ Gets the NetworkSets API client. Returns: NetworkSets: """ if not self.__network_sets: self.__network_sets = NetworkSets(self.__connection) return self.__network_sets @property def server_hardware(self): """ Gets the ServerHardware API client. Returns: ServerHardware: """ if not self.__server_hardware: self.__server_hardware = ServerHardware(self.__connection) return self.__server_hardware @property def server_hardware_types(self): """ Gets the ServerHardwareTypes API client. Returns: ServerHardwareTypes: """ if not self.__server_hardware_types: self.__server_hardware_types = ServerHardwareTypes( self.__connection) return self.__server_hardware_types @property def id_pools_vsn_ranges(self): """ Gets the IdPoolsRanges API Client for VSN Ranges. Returns: IdPoolsRanges: """ if not self.__id_pools_vsn_ranges: self.__id_pools_vsn_ranges = IdPoolsRanges('vsn', self.__connection) return self.__id_pools_vsn_ranges @property def id_pools_vmac_ranges(self): """ Gets the IdPoolsRanges API Client for VMAC Ranges. Returns: IdPoolsRanges: """ if not self.__id_pools_vmac_ranges: self.__id_pools_vmac_ranges = IdPoolsRanges('vmac', self.__connection) return self.__id_pools_vmac_ranges @property def id_pools_vwwn_ranges(self): """ Gets the IdPoolsRanges API Client for VWWN Ranges. Returns: IdPoolsRanges: """ if not self.__id_pools_vwwn_ranges: self.__id_pools_vwwn_ranges = IdPoolsRanges('vwwn', self.__connection) return self.__id_pools_vwwn_ranges @property def id_pools_ipv4_ranges(self): """ Gets the IdPoolsIpv4Ranges API client. Returns: IdPoolsIpv4Ranges: """ if not self.__id_pools_ipv4_ranges: self.__id_pools_ipv4_ranges = IdPoolsIpv4Ranges(self.__connection) return self.__id_pools_ipv4_ranges @property def id_pools_ipv4_subnets(self): """ Gets the IdPoolsIpv4Subnets API client. Returns: IdPoolsIpv4Subnets: """ if not self.__id_pools_ipv4_subnets: self.__id_pools_ipv4_subnets = IdPoolsIpv4Subnets(self.__connection) return self.__id_pools_ipv4_subnets @property def id_pools(self): """ Gets the IdPools API client. Returns: IdPools: """ if not self.__id_pools: self.__id_pools = IdPools(self.__connection) return self.__id_pools @property def switches(self): """ Gets the Switches API client. Returns: Switches: """ if not self.__switches: self.__switches = Switches(self.__connection) return self.__switches @property def roles(self): """ Gets the Roles API client. Returns: Roles: """ if not self.__roles: self.__roles = Roles(self.__connection) return self.__roles @property def switch_types(self): """ Gets the SwitchTypes API client. Returns: SwitchTypes: """ if not self.__switch_types: self.__switch_types = SwitchTypes(self.__connection) return self.__switch_types @property def logical_switch_groups(self): """ Gets the LogicalSwitchGroups API client. Returns: LogicalSwitchGroups: """ if not self.__logical_switch_groups: self.__logical_switch_groups = LogicalSwitchGroups(self.__connection) return self.__logical_switch_groups @property def logical_switches(self): """ Gets the LogicalSwitches API client. Returns: LogicalSwitches: """ if not self.__logical_switches: self.__logical_switches = LogicalSwitches(self.__connection) return self.__logical_switches @property def tasks(self): """ Gets the Tasks API client. Returns: Tasks: """ if not self.__tasks: self.__tasks = Tasks(self.__connection) return self.__tasks @property def enclosure_groups(self): """ Gets the EnclosureGroups API client. Returns: EnclosureGroups: """ if not self.__enclosure_groups: self.__enclosure_groups = EnclosureGroups(self.__connection) return self.__enclosure_groups @property def enclosures(self): """ Gets the Enclosures API client. Returns: Enclosures: """ if not self.__enclosures: self.__enclosures = Enclosures(self.__connection) return self.__enclosures @property def logical_enclosures(self): """ Gets the LogicalEnclosures API client. Returns: LogicalEnclosures: """ if not self.__logical_enclosures: self.__logical_enclosures = LogicalEnclosures(self.__connection) return self.__logical_enclosures @property def metric_streaming(self): """ Gets the MetricStreaming API client. Returns: MetricStreaming: """ if not self.__metric_streaming: self.__metric_streaming = MetricStreaming(self.__connection) return self.__metric_streaming @property def interconnects(self): """ Gets the Interconnects API client. Returns: Interconnects: """ if not self.__interconnects: self.__interconnects = Interconnects(self.__connection) return self.__interconnects @property def interconnect_types(self): """ Gets the InterconnectTypes API client. Returns: InterconnectTypes: """ if not self.__interconnect_types: self.__interconnect_types = InterconnectTypes(self.__connection) return self.__interconnect_types @property def interconnect_link_topologies(self): """ Gets the InterconnectLinkTopologies API client. Returns: InterconnectLinkTopologies: """ if not self.__interconnect_link_topologies: self.__interconnect_link_topologies = InterconnectLinkTopologies(self.__connection) return self.__interconnect_link_topologies @property def sas_interconnect_types(self): """ Gets the SasInterconnectTypes API client. Returns: SasInterconnectTypes: """ if not self.__sas_interconnect_types: self.__sas_interconnect_types = SasInterconnectTypes(self.__connection) return self.__sas_interconnect_types @property def internal_link_sets(self): """ Gets the InternalLinkSets API client. Returns: InternalLinkSets: """ if not self.__internal_link_sets: self.__internal_link_sets = InternalLinkSets(self.__connection) return self.__internal_link_sets @property def logical_interconnect_groups(self): """ Gets the LogicalInterconnectGroups API client. Returns: LogicalInterconnectGroups: """ if not self.__logical_interconnect_groups: self.__logical_interconnect_groups = LogicalInterconnectGroups( self.__connection) return self.__logical_interconnect_groups @property def logical_interconnects(self): """ Gets the LogicalInterconnects API client. Returns: LogicalInterconnects: """ if not self.__logical_interconnects: self.__logical_interconnects = LogicalInterconnects( self.__connection) return self.__logical_interconnects @property def sas_logical_interconnects(self): """ Gets the SasLogicalInterconnects API client. Returns: SasLogicalInterconnects: """ if not self.__sas_logical_interconnects: self.__sas_logical_interconnects = SasLogicalInterconnects(self.__connection) return self.__sas_logical_interconnects @property def logical_downlinks(self): """ Gets the LogicalDownlinks API client. Returns: LogicalDownlinks: """ if not self.__logical_downlinks: self.__logical_downlinks = LogicalDownlinks( self.__connection) return self.__logical_downlinks @property def power_devices(self): """ Gets the PowerDevices API client. Returns: PowerDevices: """ if not self.__power_devices: self.__power_devices = PowerDevices(self.__connection) return self.__power_devices @property def unmanaged_devices(self): """ Gets the Unmanaged Devices API client. Returns: UnmanagedDevices: """ if not self.__unmanaged_devices: self.__unmanaged_devices = UnmanagedDevices(self.__connection) return self.__unmanaged_devices @property def racks(self): """ Gets the Racks API client. Returns: Racks: """ if not self.__racks: self.__racks = Racks(self.__connection) return self.__racks @property def san_managers(self): """ Gets the SanManagers API client. Returns: SanManagers: """ if not self.__san_managers: self.__san_managers = SanManagers(self.__connection) return self.__san_managers @property def endpoints(self): """ Gets the Endpoints API client. Returns: Endpoints: """ if not self.__endpoints: self.__endpoints = Endpoints(self.__connection) return self.__endpoints @property def server_profiles(self): """ Gets the ServerProfiles API client. Returns: ServerProfiles: """ if not self.__server_profiles: self.__server_profiles = ServerProfiles(self.__connection) return self.__server_profiles @property def server_profile_templates(self): """ Gets the ServerProfileTemplate API client. Returns: ServerProfileTemplate: """ if not self.__server_profile_templates: self.__server_profile_templates = ServerProfileTemplate(self.__connection) return self.__server_profile_templates @property def storage_systems(self): """ Gets the StorageSystems API client. Returns: StorageSystems: """ if not self.__storage_systems: self.__storage_systems = StorageSystems(self.__connection) return self.__storage_systems @property def storage_pools(self): """ Gets the StoragePools API client. Returns: StoragePools: """ if not self.__storage_pools: self.__storage_pools = StoragePools(self.__connection) return self.__storage_pools @property def storage_volume_templates(self): """ Gets the StorageVolumeTemplates API client. Returns: StorageVolumeTemplates: """ if not self.__storage_volume_templates: self.__storage_volume_templates = StorageVolumeTemplates(self.__connection) return self.__storage_volume_templates @property def storage_volume_attachments(self): """ Gets the StorageVolumeAttachments API client. Returns: StorageVolumeAttachments: """ if not self.__storage_volume_attachments: self.__storage_volume_attachments = StorageVolumeAttachments(self.__connection) return self.__storage_volume_attachments @property def firmware_drivers(self): """ Gets the FirmwareDrivers API client. Returns: FirmwareDrivers: """ if not self.__firmware_drivers: self.__firmware_drivers = FirmwareDrivers(self.__connection) return self.__firmware_drivers @property def firmware_bundles(self): """ Gets the FirmwareBundles API client. Returns: FirmwareBundles: """ if not self.__firmware_bundles: self.__firmware_bundles = FirmwareBundles(self.__connection) return self.__firmware_bundles @property def uplink_sets(self): """ Gets the UplinkSets API client. Returns: UplinkSets: """ if not self.__uplink_sets: self.__uplink_sets = UplinkSets(self.__connection) return self.__uplink_sets @property def volumes(self): """ Gets the Volumes API client. Returns: Volumes: """ if not self.__volumes: self.__volumes = Volumes(self.__connection) return self.__volumes @property def sas_logical_jbod_attachments(self): """ Gets the SAS Logical JBOD Attachments client. Returns: SasLogicalJbodAttachments: """ if not self.__sas_logical_jbod_attachments: self.__sas_logical_jbod_attachments = SasLogicalJbodAttachments(self.__connection) return self.__sas_logical_jbod_attachments @property def managed_sans(self): """ Gets the Managed SANs API client. Returns: ManagedSANs: """ if not self.__managed_sans: self.__managed_sans = ManagedSANs(self.__connection) return self.__managed_sans @property def migratable_vc_domains(self): """ Gets the VC Migration Manager API client. Returns: MigratableVcDomains: """ if not self.__migratable_vc_domains: self.__migratable_vc_domains = MigratableVcDomains(self.__connection) return self.__migratable_vc_domains @property def sas_interconnects(self): """ Gets the SAS Interconnects API client. Returns: SasInterconnects: """ if not self.__sas_interconnects: self.__sas_interconnects = SasInterconnects(self.__connection) return self.__sas_interconnects @property def sas_logical_interconnect_groups(self): """ Gets the SasLogicalInterconnectGroups API client. Returns: SasLogicalInterconnectGroups: """ if not self.__sas_logical_interconnect_groups: self.__sas_logical_interconnect_groups = SasLogicalInterconnectGroups(self.__connection) return self.__sas_logical_interconnect_groups @property def drive_enclosures(self): """ Gets the Drive Enclosures API client. Returns: DriveEnclosures: """ if not self.__drive_enclures: self.__drive_enclures = DriveEnclosures(self.__connection) return self.__drive_enclures @property def sas_logical_jbods(self): """ Gets the SAS Logical JBODs API client. Returns: SasLogicalJbod: """ if not self.__sas_logical_jbods: self.__sas_logical_jbods = SasLogicalJbods(self.__connection) return self.__sas_logical_jbods @property def labels(self): """ Gets the Labels API client. Returns: Labels: """ if not self.__labels: self.__labels = Labels(self.__connection) return self.__labels @property def index_resources(self): """ Gets the Index Resources API client. Returns: IndexResources: """ if not self.__index_resources: self.__index_resources = IndexResources(self.__connection) return self.__index_resources @property def alerts(self): """ Gets the Alerts API client. Returns: Alerts: """ if not self.__alerts: self.__alerts = Alerts(self.__connection) return self.__alerts @property def events(self): """ Gets the Events API client. Returns: Events: """ if not self.__events: self.__events = Events(self.__connection) return self.__events @property def os_deployment_plans(self): """ Gets the Os Deployment Plans API client. Returns: OsDeploymentPlans: """ if not self.__os_deployment_plans: self.__os_deployment_plans = OsDeploymentPlans(self.__connection) return self.__os_deployment_plans @property def os_deployment_servers(self): """ Gets the Os Deployment Servers API client. Returns: OsDeploymentServers: """ if not self.__os_deployment_servers: self.__os_deployment_servers = OsDeploymentServers(self.__connection) return self.__os_deployment_servers @property def certificate_rabbitmq(self): """ Gets the Certificate RabbitMQ API client. Returns: CertificateRabbitMQ: """ if not self.__certificate_rabbitmq: self.__certificate_rabbitmq = CertificateRabbitMQ(self.__connection) return self.__certificate_rabbitmq @property def users(self): """ Gets the Users API client. Returns: Users: """ if not self.__users: self.__users = Users(self.__connection) return self.__users @property def appliance_device_read_community(self): """ Gets the ApplianceDeviceReadCommunity API client. Returns: ApplianceDeviceReadCommunity: """ if not self.__appliance_device_read_community: self.__appliance_device_read_community = ApplianceDeviceReadCommunity(self.__connection) return self.__appliance_device_read_community @property def appliance_device_snmp_v1_trap_destinations(self): """ Gets the ApplianceDeviceSNMPv1TrapDestinations API client. Returns: ApplianceDeviceSNMPv1TrapDestinations: """ if not self.__appliance_device_snmp_v1_trap_destinations: self.__appliance_device_snmp_v1_trap_destinations = ApplianceDeviceSNMPv1TrapDestinations(self.__connection) return self.__appliance_device_snmp_v1_trap_destinations @property def appliance_device_snmp_v3_trap_destinations(self): """ Gets the ApplianceDeviceSNMPv3TrapDestinations API client. Returns: ApplianceDeviceSNMPv3TrapDestinations: """ if not self.__appliance_device_snmp_v3_trap_destinations: self.__appliance_device_snmp_v3_trap_destinations = ApplianceDeviceSNMPv3TrapDestinations(self.__connection) return self.__appliance_device_snmp_v3_trap_destinations @property def appliance_device_snmp_v3_users(self): """ Gets the ApplianceDeviceSNMPv3Users API client. Returns: ApplianceDeviceSNMPv3Users: """ if not self.__appliance_device_snmp_v3_users: self.__appliance_device_snmp_v3_users = ApplianceDeviceSNMPv3Users(self.__connection) return self.__appliance_device_snmp_v3_users @property def appliance_node_information(self): """ Gets the ApplianceNodeInformation API client. Returns: ApplianceNodeInformation: """ if not self.__appliance_node_information: self.__appliance_node_information = ApplianceNodeInformation(self.__connection) return self.__appliance_node_information @property def appliance_time_and_locale_configuration(self): """ Gets the ApplianceTimeAndLocaleConfiguration API client. Returns: ApplianceTimeAndLocaleConfiguration: """ if not self.__appliance_time_and_locale_configuration: self.__appliance_time_and_locale_configuration = ApplianceTimeAndLocaleConfiguration(self.__connection) return self.__appliance_time_and_locale_configuration @property def versions(self): """ Gets the Version API client. Returns: Version: """ if not self.__versions: self.__versions = Versions(self.__connection) return self.__versions @property def backups(self): """ Gets the Backup API client. Returns: Backups: """ if not self.__backups: self.__backups = Backups(self.__connection) return self.__backups @property def login_details(self): """ Gets the login details Returns: List of login details """ if not self.__login_details: self.__login_details = LoginDetails(self.__connection) return self.__login_details @property def licenses(self): """ Gets all the licenses Returns: List of licenses """ if not self.__licenses: self.__licenses = Licenses(self.__connection) return self.__licenses
mit
2,875,821,582,522,807,000
30.598394
143
0.627173
false
4.40982
true
false
false
gpfreitas/bokeh
bokeh/_json_encoder.py
1
2903
from __future__ import absolute_import import json import logging import datetime as dt import calendar import decimal from .util.serialization import transform_series, transform_array import numpy as np try: import pandas as pd is_pandas = True except ImportError: is_pandas = False try: from dateutil.relativedelta import relativedelta is_dateutil = True except ImportError: is_dateutil = False log = logging.getLogger(__name__) class BokehJSONEncoder(json.JSONEncoder): def transform_python_types(self, obj): """handle special scalars, default to default json encoder """ # Pandas Timestamp if is_pandas and isinstance(obj, pd.tslib.Timestamp): return obj.value / 10**6.0 #nanosecond to millisecond elif np.issubdtype(type(obj), np.float): return float(obj) elif np.issubdtype(type(obj), np.int): return int(obj) elif np.issubdtype(type(obj), np.bool_): return bool(obj) # Datetime # datetime is a subclass of date. elif isinstance(obj, dt.datetime): return calendar.timegm(obj.timetuple()) * 1000. + obj.microsecond / 1000. # Date elif isinstance(obj, dt.date): return calendar.timegm(obj.timetuple()) * 1000. # Numpy datetime64 elif isinstance(obj, np.datetime64): epoch_delta = obj - np.datetime64('1970-01-01T00:00:00Z') return (epoch_delta / np.timedelta64(1, 'ms')) # Time elif isinstance(obj, dt.time): return (obj.hour * 3600 + obj.minute * 60 + obj.second) * 1000 + obj.microsecond / 1000. elif is_dateutil and isinstance(obj, relativedelta): return dict(years=obj.years, months=obj.months, days=obj.days, hours=obj.hours, minutes=obj.minutes, seconds=obj.seconds, microseconds=obj.microseconds) # Decimal elif isinstance(obj, decimal.Decimal): return float(obj) else: return super(BokehJSONEncoder, self).default(obj) def default(self, obj): #argh! local import! from .plot_object import PlotObject from .properties import HasProps from .colors import Color ## array types if is_pandas and isinstance(obj, (pd.Series, pd.Index)): return transform_series(obj) elif isinstance(obj, np.ndarray): return transform_array(obj) elif isinstance(obj, PlotObject): return obj.ref elif isinstance(obj, HasProps): return obj.changed_properties_with_values() elif isinstance(obj, Color): return obj.to_css() else: return self.transform_python_types(obj) def serialize_json(obj, encoder=BokehJSONEncoder, **kwargs): return json.dumps(obj, cls=encoder, allow_nan=False, **kwargs)
bsd-3-clause
7,690,189,662,388,368,000
34.402439
100
0.630038
false
4.082982
false
false
false
RuthAngus/chronometer
chronometer/fit_dispersion.py
1
2000
import numpy as np from action_age_evolution import calc_dispersion import emcee import corner import matplotlib.pyplot as plt plotpar = {'axes.labelsize': 18, 'font.size': 10, 'legend.fontsize': 15, 'xtick.labelsize': 18, 'ytick.labelsize': 18, 'text.usetex': True} plt.rcParams.update(plotpar) def lnprob(pars, x, y, yerr): sz0, t1, beta, hsz = pars model = calc_dispersion([np.exp(sz0), np.exp(t1), beta, np.exp(hsz)], x) return sum(-.5*((model - y)/yerr)**2) + lnprior(pars) def lnprior(pars): lnsz0, lnt1, beta, lnhsz = pars if -20 < lnsz0 < 20 and -20 < lnt1 < 20 and -100 < beta < 100 \ and -20 < lnhsz < 20: return 0. else: return -np.inf if __name__ == "__main__": time = np.linspace(0, 14, 100) sz0 = 50. sr0 = 50. t1 = .1 tm = 10. beta = .33 R0 = 1. Rc = 1. hsz = 9. hsr = 9. solar_radius = 8. hr = 2.68/solar_radius # Today sr = 34. sz = 25.1 zpar_init = np.array([np.log(sz0), np.log(t1), beta, np.log(hsz)]) rpar_init = np.array([np.log(sr0), np.log(t1), beta, np.log(hsz)]) sigma_z = calc_dispersion([sz0 + 5, t1, beta + .2, hsz], time) sigma_r = calc_dispersion([sr0 + 5, t1, beta + .2, hsz], time) print(lnprob(zpar_init, time, sigma_z, sigma_z*.1)) x, y, yerr = time, sigma_z, sigma_z*.1 ndim, nwalkers, nsteps = len(zpar_init), 24, 10000 p0 = [1e-4*np.random.rand(ndim) + zpar_init for i in range(nwalkers)] sampler = emcee.EnsembleSampler(nwalkers, ndim, lnprob, args=[x, y, yerr]) pos, _, _ = sampler.run_mcmc(p0, 500) sampler.reset() sampler.run_mcmc(pos, nsteps) flat = np.reshape(sampler.chain, (nwalkers*nsteps, ndim)) # flat[:, :2] = np.exp(flat[:, :2]) # flat[:, 3:] = np.exp(flat[:, 3:]) labels = ["$\ln \sigma_{z0}$", "$t_1$", "$\\beta$", "$\sigma_{Hz}$"] fig = corner.corner(flat, labels=labels) fig.savefig("zcorner")
mit
5,217,928,310,203,504,000
27.985507
78
0.5585
false
2.628121
false
false
false
mozilla/universal-search-recommendation
recommendation/mozlog/middleware.py
1
2336
import json import re import time from flask import current_app, request IS_PROTOCOL = r'^[^\s]+\:\S' IS_HOSTNAME = r'^[^\s]+\.\S' LOG_PATH_BLACKLIST = [ '/favicon.ico', '/__heartbeat__', '/__lbheartbeat__', '/nginx_status', '/robots.txt', '/images' ] def request_timer(): """ before_request middleware that attaches the processing start time to the request object, for later performance assessment. """ request.start_time = time.time() def request_summary(response): """ after_request middleware that generates and logs a mozlog-formatted log about the request. Read more: https://github.com/mozilla/universal-search/blob/master/docs/metrics.md https://github.com/mozilla-services/Dockerflow/blob/master/docs/mozlog.md """ request.finish_time = time.time() response.direct_passthrough = False if request.path in LOG_PATH_BLACKLIST: return response log = {} query = request.args.get('q') log['agent'] = request.headers.get('User-Agent') log['errno'] = 0 if response.status_code < 400 else response.status_code log['lang'] = request.headers.get('Accept-Language') log['method'] = request.method log['path'] = request.path log['t'] = (request.finish_time - request.start_time) * 1000 # in ms if query: data = response.get_data(as_text=True) try: body = json.loads(data) except json.decoder.JSONDecodeError: body = {} query = query.lower() log['predicates.query_length'] = len(query) > 20 log['predicates.is_protocol'] = (re.match(IS_PROTOCOL, query) is not None) log['predicates.is_hostname'] = (re.match(IS_HOSTNAME, query) is not None) if not any([log['predicates.query_length'], log['predicates.is_protocol'], log['predicates.is_hostname']]): log['query'] = query if query else None log['status_code'] = response.status_code classifiers = body.get('enhancements') log['classifiers'] = (list(classifiers.keys()) if classifiers else []) current_app.logger.info('', extra=log) return response
mpl-2.0
9,072,272,571,820,170,000
28.948718
78
0.587329
false
3.952623
false
false
false
Gargamel1989/Seasoning-old
Seasoning/authentication/views/account_views.py
1
6951
from django.contrib.auth.decorators import login_required from django.contrib.auth import get_user_model from authentication.forms import AccountSettingsForm, DeleteAccountForm,\ CheckActiveAuthenticationForm from authentication.models import NewEmail, User from django.contrib import messages from django.contrib.sites.models import RequestSite from django.shortcuts import render, redirect from django.http.response import Http404 from django.views.decorators.debug import sensitive_post_parameters from django.contrib.auth.views import login as django_login, logout from django.utils.translation import ugettext_lazy as _ from django.contrib.auth.forms import PasswordChangeForm, SetPasswordForm from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage from django.db.models.aggregates import Avg, Count def login(request): return django_login(request, template_name='authentication/login.html', authentication_form=CheckActiveAuthenticationForm) @login_required def account_settings(request, user_id=None): viewing_self = False try: if user_id is None or user_id == request.user.id: user = get_user_model().objects.prefetch_related('recipes').get(id=request.user.id) viewing_self = True else: user = get_user_model().objects.prefetch_related('recipes').get(id=user_id) except get_user_model().DoesNotExist: raise Http404 recipes_list = user.recipes.all().order_by('-rating') try: averages = user.recipes.all().aggregate(Avg('footprint'), Avg('rating')) most_used_veganism = max(user.recipes.values('veganism').annotate(dcount=Count('veganism')), key=lambda i: i['dcount'])['veganism'] except ValueError: averages = {'footprint__avg': None, 'rating__avg': None} most_used_veganism = None # Split the result by 9 paginator = Paginator(recipes_list, 9) page = request.GET.get('page') try: recipes = paginator.page(page) except PageNotAnInteger: recipes = paginator.page(1) except EmptyPage: recipes = paginator.page(paginator.num_pages) if request.is_ajax(): return render(request, 'includes/recipe_summaries.html', {'recipes': recipes}) return render(request, 'authentication/account_settings.html', {'viewed_user': user, 'viewing_other': not viewing_self, 'recipes': recipes, 'average_fp': 4*averages['footprint__avg'], 'average_rating': averages['rating__avg'], 'most_used_veganism': most_used_veganism}) @login_required def account_settings_profile(request): """ Allow a user to change his account settings If the user has changed his email address, an activation email will be sent to this new address. The new address will not be activated until the link in this email has been clicked. If the user has an alternate email that should be activated, this will also be displayed on this page. """ context = {} user = get_user_model().objects.get(id=request.user.id) if request.method == "POST": form = AccountSettingsForm(request.POST, request.FILES, instance=user) if form.is_valid(): if form.new_email is not None: # Send an activation email to the new email NewEmail.objects.create_inactive_email(user, form.new_email, RequestSite(request)) messages.add_message(request, messages.INFO, _('An email has been sent to the new email address provided by you. Please follow the instructions ' 'in this email to complete the changing of your email address.')) # New email address has been replaced by old email address in the form, so it will not be saved until activated form.save() user = get_user_model().objects.get(id=request.user.id) else: form = AccountSettingsForm(instance=user) try: new_email = NewEmail.objects.get(user=request.user) context['new_email'] = new_email.email except NewEmail.DoesNotExist: pass context['form'] = form context['user'] = user return render(request, 'authentication/account_settings_profile.html', context) @login_required def account_settings_social(request): return render(request, 'authentication/account_settings_social.html') @login_required def account_settings_privacy(request): return render(request, 'authentication/account_settings_privacy.html') @login_required def change_email(request, activation_key): """ This checks if the given activation key belongs to the current users new, inactive email address. If so, this new email address is activated, and the users old email address is deleted. """ activated = NewEmail.objects.activate_email(request.user, activation_key) if activated: messages.add_message(request, messages.INFO, _('Your email address has been successfully changed.')) return redirect(account_settings) raise Http404 @sensitive_post_parameters() @login_required def change_password(request, template_name='authentication/password_change_form.html', password_change_form=PasswordChangeForm): """ Provides a form where the users password can be changed. """ if request.user.password == '!': password_change_form = SetPasswordForm if request.method == "POST": form = password_change_form(user=request.user, data=request.POST) if form.is_valid(): form.save() messages.add_message(request, messages.INFO, _('Your password has been successfully changed.')) return redirect(account_settings) form = password_change_form(user=request.user) return render(request, template_name, {'form': form}) @login_required def account_delete(request): """ Provides a method for deleting the users account """ if request.method == 'POST': form = DeleteAccountForm(request.POST) if form.is_valid(): user = User.objects.get(pk=request.user.id) logout(request) user.delete() return redirect('/') else: form = DeleteAccountForm() return render(request, 'authentication/account_delete.html', {'form': form})
gpl-3.0
177,147,796,472,188,670
40.907407
161
0.625522
false
4.413333
false
false
false
javiercantero/streamlink
src/streamlink/plugins/mlgtv.py
1
3963
import re from streamlink.exceptions import NoStreamsError from streamlink.plugin import Plugin from streamlink.plugin.api import http from streamlink.plugin.api import validate from streamlink.plugin.api.utils import parse_json from streamlink.stream import HDSStream from streamlink.stream import HLSStream class MLGTV(Plugin): """Streamlink Plugin for Livestreams on mlg.tv / majorleaguegaming.com""" PLAYER_EMBED_URL = "http://player2.majorleaguegaming.com/api/v2/player/embed/live/?ch={0}" CHANNEL_API = "https://www.majorleaguegaming.com/api/channel/{0}" _player_config_re = re.compile(r"var playerConfig = (.+);") _player_embed_re = re.compile(r"""https?://player2\.majorleaguegaming\.com/api/v2/player/embed/live/\?ch=(?P<channel_id>[^"']+)""") _site_data_re = re.compile(r"window\.siteData = (?P<data>.+);") _stream_id_re = re.compile(r"<meta content='.+/([\w_-]+).+' property='og:video'>") _url_re = re.compile(r"http(s)?://(\w+\.)?(majorleaguegaming\.com|mlg\.tv)") _player_config_schema = validate.Schema( { "media": { "streams": [{ "streamUrl": validate.text, "abr": validate.text }] } }, validate.get("media", {}), validate.get("streams") ) _site_data_schema = validate.Schema( { "status_code": 200, "status_text": "OK", "data": { "slug": validate.text, } }, validate.get("data", {}), validate.get("slug") ) @classmethod def can_handle_url(cls, url): return cls._url_re.match(url) def _find_channel_id(self, text): match = self._stream_id_re.search(text) if match: return match.group(1) match = self._site_data_re.search(text) if match: r_json = parse_json(match.group("data")) if r_json: mlg_channel_id = r_json.get("mlg_channel_id") if mlg_channel_id: res = http.get(self.CHANNEL_API.format(mlg_channel_id)) channel_id = http.json(res, schema=self._site_data_schema) return channel_id match = self._player_embed_re.search(text) if match: return match.group("channel_id") def _find_stream_id(self, text): match = self._player_config_re.search(text) if match: stream_id = parse_json(match.group(1), schema=self._player_config_schema) return stream_id def _get_streams(self): match = self._player_embed_re.match(self.url) if match: channel_id = match.group("channel_id") else: try: res = http.get(self.url) except Exception as e: raise NoStreamsError(self.url) channel_id = self._find_channel_id(res.text) if not channel_id: return self.logger.info("Channel ID: {0}".format(channel_id)) res = http.get(self.PLAYER_EMBED_URL.format(channel_id)) items = self._find_stream_id(res.text) if not items: return a = b = False for stream in items: if stream["abr"] == "hls": try: for s in HLSStream.parse_variant_playlist(self.session, stream["streamUrl"]).items(): yield s except IOError: a = True elif stream["abr"] == "hds": try: for s in HDSStream.parse_manifest(self.session, stream["streamUrl"]).items(): yield s except IOError: b = True if a and b: self.logger.warning("Could not open the stream, perhaps the channel is offline") __plugin__ = MLGTV
bsd-2-clause
-9,079,036,932,772,741,000
32.302521
135
0.53621
false
3.756398
false
false
false
pythoneasyway/python-class
class8.py
1
1656
#!/usr/bin/python #: Title : class8.py #: Date : #: Author : pythoneasyway@gmail.com #: Description : Class number 8 #: - exercises with lists #: - adding steps into circle() to change the shape #: Version : 1.0 # define 2 lists even_list = list() odd_list = list() # we'll use the numbers from 1 to 1000 for i in range(1,1001): # % is modulo, which is here the remainder of the division of number by 2 if i % 2 == 0: # add the even number to the list even_list.append(i) else: # add the odd number to the list odd_list.append(i) print "the odd numbers are ", odd_list print "the even numbers are ", even_list # import everything from the file colors_lib from colors_lib import * # print out the color_list defined in the previous imported module print color_list # total of colors print "the total of colors is", len(color_list) import turtle as t t.showturtle() total = len(color_list) index = 1 t.up() t.goto(0,-350) t.down() for i in color_list: t.color(i) if index <100: # create first triangle t.circle(index, steps = 3) elif index<200: # create the square t.circle(index, steps = 4) elif index<250: # creae the pentagon t.circle(index, steps = 5) elif index<300: # create the hexagon t.circle(index, steps = 6) elif index<350: # last circle t.circle(index) else: # change the background t.bgcolor(i) # print in the title the color's name and the number of the color. t.title(i+" "+str(index)) t.speed(0) index = index +1 # finish t.done()
mit
3,465,932,965,838,154,000
23
77
0.618961
false
3.272727
false
false
false
TacticalGoat/reddit
WeeklyUnsolved/weeklyflairmanagers.py
2
2215
import bot import datetime import praw import warnings warnings.filterwarnings('ignore') print('logging in') r=bot.oG() FLAIR_UNSOLVED = 'unsolved' MAX_ROOT_COMMENTS = 1 MAX_TOTAL_COMMENTS = 24 IGNORE_DELETED_AUTHORS = True SAVE_TO_TXT = 'results_%Y%b%d.txt' MINIMUM_AGE = 60 * 60 * 24 MAXIMUM_AGE = 7 * 60 * 60 * 24 now = datetime.datetime.now(datetime.timezone.utc) nowstamp = now.timestamp() outfile = now.strftime(SAVE_TO_TXT) print('getting new') subreddit = r.get_subreddit('excel') new = subreddit.get_new(limit=1000) results = [] old_in_a_row = 0 for submissionindex, submission in enumerate(new): print('Checked %d submissions\r' % (submissionindex), end='') age = nowstamp - submission.created_utc if age < MINIMUM_AGE: continue if age > MAXIMUM_AGE: old_in_a_row += 1 if old_in_a_row >= 10: break continue old_in_a_row = 0 if submission.link_flair_text != FLAIR_UNSOLVED: continue if IGNORE_DELETED_AUTHORS and submission.author is None: continue # make sure to perform this part AS LATE AS POSSIBLE to avoid # api calls. submission.replace_more_comments(limit=None, threshold=1) roots = submission.comments[:] total = praw.helpers.flatten_tree(submission.comments) submission.croots = roots submission.ctotal = total if len(total) > MAX_TOTAL_COMMENTS: continue # only counts roots if len(roots) > MAX_ROOT_COMMENTS: continue results.append(submission) print() results.sort(key=lambda s: (s.created_utc, s.num_comments)) for (submissionindex, submission) in enumerate(results): author = '/u/'+submission.author.name if submission.author else '[deleted]' timeformat = datetime.datetime.utcfromtimestamp(submission.created_utc) timeformat = timeformat.strftime('%d %b %Y %H:%M:%S') formatted = '[%s](%s) | %s | %s | %d' % (submission.title, submission.short_link, author, timeformat, len(submission.ctotal)) results[submissionindex] = formatted table = 'title | author | time | comments\n' table += ':- | :- | -: | -:\n' table += '\n'.join(results) outfile = open(outfile, 'w') print(table, file=outfile) outfile.close()
mit
-1,803,044,578,515,429,000
28.144737
129
0.671783
false
3.30597
false
false
false
gdsfactory/gdsfactory
pp/components/extend_ports_list.py
1
1197
from typing import Any, Dict, List, Optional from pp.cell import cell from pp.component import Component from pp.components.straight_heater import straight_with_heater from pp.port import Port, auto_rename_ports from pp.types import ComponentOrFactory @cell def extend_ports_list( ports: List[Port], extension_factory: ComponentOrFactory = straight_with_heater, extension_settings: Optional[Dict[str, Any]] = None, extension_port_name: str = "W0", ) -> Component: """Returns a component with extension to list of ports.""" c = Component() extension_settings = extension_settings or {} extension = ( extension_factory(**extension_settings) if callable(extension_factory) else extension_factory ) for i, port in enumerate(ports): extension_ref = c << extension extension_ref.connect(extension_port_name, port) for port_name, port in extension_ref.ports.items(): c.add_port(f"{i}_{port_name}", port=port) auto_rename_ports(c) return c if __name__ == "__main__": import pp c = pp.c.mmi1x2() cr = extend_ports_list(ports=c.get_ports_list()) c.add_ref(cr) c.show()
mit
2,180,702,165,455,955,500
26.204545
65
0.663325
false
3.638298
false
false
false
tksn/phoneauto
phoneauto/scriptgenerator/scriptgenerator_ui.py
1
26121
# -*- coding: utf-8 -*- """scriptgenerator GUI :copyright: (c) 2015 by tksn :license: MIT """ # pylint: disable=invalid-name # pylint: disable=too-many-instance-attributes # pylint: disable=too-few-public-methods from __future__ import unicode_literals, print_function import contextlib import logging import math import platform import tkinter import tkinter.font from tkinter import ttk import time from PIL import Image, ImageTk, ImageDraw, ImageFont from phoneauto.scriptgenerator.exception import ( UiInconsitencyError, UiObjectNotFound) from phoneauto.scriptgenerator.screenrecord import Screenrecord def get_filedialog(): # pragma: no cover """Returns filedialog module object Returns appropriate filedialog module depending on sys.version. The reason doing this is because python.future's tkinter.filedialog is alias to FileDialog, not to tkFileDialog. """ import sys if sys.version_info.major >= 3: import tkinter.filedialog return tkinter.filedialog else: import tkFileDialog return tkFileDialog @contextlib.contextmanager def display_wait(root_window): """Displays wait icon while context is alive""" root_window.config(cursor='wait') root_window.update() yield root_window.config(cursor='') class ScriptGeneratorUI(object): """Automation script generator UI""" _SCR_REFRESH_INTERVAL = 100 _HVIEW_REFRESH_INTERVAL = 3 _HVIEW_REFRESH_INTERVAL_AFTER_SCR_REFRESH = 1 _MOUSE_MOVE_THRESH = 20 _CLICKCIRCLE_RADIUS = 5 def __init__(self, screen_size=(480, 800), platform_sys=None, timeouts=None): """Initialization Args: scale (float): magnification scale which is used when screenshot is displayed in this UI """ self.logger = logging.getLogger(__name__) self.logger.info('initialization start') self._controller = None self._scale = None self._screenshot = None self._mouse_action = None self.hierarchy_view_timestamp = 0 timeouts = timeouts or {} self._wait_timeouts = {} default_timeouts = { 'idle': 5000, 'update': 1000, 'exists': 5000, 'gone': 5000} for name, default_value in default_timeouts.items(): self._wait_timeouts[name] = timeouts.get(name, default_value) self._hold_timer_id = None self._root = None self._platform = platform_sys or platform.system() self._screenrecord = Screenrecord( width=screen_size[0], height=screen_size[1]) self._build_ui() self.logger.info('initialization end') def run(self, controller): """Launches UI and enter the event loop Args: controller (object): scriptgenerator object """ self._controller = controller self._enable_ui() try: self._root.mainloop() finally: if self._screenrecord: self._screenrecord.join() self._screenrecord = None def _build_ui(self): """Creates UI components and builds up application UI""" from tkinter import N, W, E, S self._root = tkinter.Tk() self._root.title('phoneauto-scriptgenerator') mainframe = ttk.Frame(self._root, name='mainframe') mainframe.grid(row=0, column=0, sticky=(N, W, E, S)) canvas = self._create_canvas(mainframe) canvas.grid(row=1, column=0, columnspan=3, sticky=(N, W, E, S)) back_button = ttk.Button( mainframe, text='Back', name='back_button') back_button.grid(row=2, column=0, sticky=(N, W, E, S)) home_button = ttk.Button( mainframe, text='Home', name='home_button') home_button.grid(row=2, column=1, sticky=(N, W, E, S)) recent_button = ttk.Button( mainframe, text='Recent Apps', name='recent_button') recent_button.grid(row=2, column=2, sticky=(N, W, E, S)) sidebar = ttk.Frame(self._root, name='sidebar') sidebar.grid(row=0, column=1, sticky=(N, W, E, S)) self._build_sidebar(sidebar) self._root.update() def _create_canvas(self, parent): """Displays placeholder (Initializing message) screen before actual screenshot is aquired """ from tkinter import NW screencap = self._screenrecord.capture_oneshot() placeholder_tk = ImageTk.PhotoImage(screencap) canvas = tkinter.Canvas(parent, width=screencap.width, height=screencap.height, name='canvas') image_id = canvas.create_image(0, 0, anchor=NW, image=placeholder_tk) text = 'Initializing' text_x, text_y = screencap.width / 2, screencap.height / 2 text_id = canvas.create_text( text_x, text_y, text=text, fill='white', font=('Courier', 32), tag='init_text') bgrect_id = canvas.create_rectangle( canvas.bbox(text_id), fill='black', tag='init_text_bg') canvas.tag_lower(bgrect_id, text_id) self._screenshot = {'image': placeholder_tk, 'id': image_id, 'size': screencap.size} return canvas @staticmethod def _build_sidebar(sidebar): """Constructs side panel""" def button(master, widget_options, pack_options=None): """Creates a button""" pack_options = pack_options or {'fill': tkinter.X} btn = ttk.Button(master, **widget_options) btn.pack(**pack_options) def label(master, widget_options, pack_options=None): """Creates a label""" pack_options = (pack_options or {'fill': tkinter.X, 'anchor': tkinter.NW}) btn = ttk.Label(master, **widget_options) btn.pack(**pack_options) def separator(master, widget_options, pack_options=None): """Creates a separator""" pack_options = pack_options or {'fill': tkinter.X, 'pady': 5} sep = ttk.Separator(master, **widget_options) sep.pack(**pack_options) button(sidebar, {'name': 'refresh_button', 'text': 'Refresh'}) button(sidebar, {'name': 'screenshot_button', 'text': 'Screenshot'}) separator(sidebar, {'orient': tkinter.HORIZONTAL}) button(sidebar, {'name': 'power_button', 'text': 'Power'}) button(sidebar, {'name': 'notification_button', 'text': 'Notification'}) button(sidebar, {'name': 'quicksettings_button', 'text': 'QuickSettings'}) button(sidebar, {'name': 'volume_up_button', 'text': 'Volume Up'}) button(sidebar, {'name': 'volume_down_button', 'text': 'Volume Down'}) label(sidebar, {'text': 'Orientation:'}) frm = ttk.Frame(sidebar, name='orientation_frame') def orient_button(name, text): """Orientation button""" button(frm, {'name': name, 'text': text, 'width': 2}, {'side': tkinter.LEFT}) orient_button('orientation_natural', 'N') orient_button('orientation_left', 'L') orient_button('orientation_right', 'R') orient_button('orientation_upsidedown', 'U') orient_button('orientation_unfreeze', 'Z') frm.pack() separator(sidebar, {'orient': tkinter.HORIZONTAL}) label(sidebar, {'text': 'Insert line to script:'}) button(sidebar, {'name': 'ins_screenshot_cap', 'text': 'screenshot capture'}) button(sidebar, {'name': 'ins_wait_idle', 'text': 'wait.idle'}) button(sidebar, {'name': 'ins_wait_update', 'text': 'wait.update'}) separator(sidebar, {'orient': tkinter.HORIZONTAL}) text = tkinter.Text(sidebar, width=30, name='infotext') text.pack(padx=3, pady=2) def _enable_ui(self): """2nd phase initialization - activate UI""" self._bind_commands_to_widgets() self._acquire_hierarchy_view() self._set_screen_scale() self._screenrecord.start() self._kick_video_update() self._refresh_screen() canvas = self._root.nametowidget('mainframe.canvas') canvas.delete('init_text') canvas.delete('init_text_bg') def _bind_commands_to_widgets(self): """Initialization after controller became available""" def bind_custom_command(widget_name, command): self._root.nametowidget(widget_name).config(command=command) def bind_command(widget_name, command_name, **command_kwargs): bind_custom_command(widget_name, self.__get_command_wrap(command_name, **command_kwargs)) bind_command('mainframe.back_button', 'press_key', key_name='BACK') bind_command('mainframe.home_button', 'press_key', key_name='HOME') bind_command('mainframe.recent_button', 'press_key', key_name='APP_SWITCH') bind_custom_command('sidebar.refresh_button', lambda _: self._acquire_hierarchy_view()) bind_custom_command('sidebar.screenshot_button', self._take_screenshot) bind_command('sidebar.power_button', 'press_key', key_name='POWER') bind_command('sidebar.notification_button', 'open_notification') bind_command('sidebar.quicksettings_button', 'open_quick_settings') bind_command('sidebar.volume_up_button', 'press_key', key_name='VOLUME_UP') bind_command('sidebar.volume_down_button', 'press_key', key_name='VOLUME_DOWN') bind_command('sidebar.orientation_frame.orientation_natural', 'set_orientation', orientation='natural') bind_command('sidebar.orientation_frame.orientation_left', 'set_orientation', orientation='left') bind_command('sidebar.orientation_frame.orientation_right', 'set_orientation', orientation='right') bind_command( 'sidebar.orientation_frame.orientation_upsidedown', 'set_orientation', orientation='upsidedown') bind_command('sidebar.orientation_frame.orientation_unfreeze', 'set_orientation', orientation='unfreeze') bind_command('sidebar.ins_screenshot_cap', 'insert_screenshot_capture') bind_command('sidebar.ins_wait_idle', 'insert_wait', for_what='idle', timeout=self._wait_timeouts['idle']) bind_command('sidebar.ins_wait_update', 'insert_wait', for_what='update', timeout=self._wait_timeouts['update']) canvas = self._root.nametowidget('mainframe.canvas') canvas.bind('<Motion>', self._on_mouse_motion) canvas.bind('<Leave>', self._on_mouse_leave) canvas.bind('<Button-1>', self._on_mouse_left_down) canvas.bind('<ButtonRelease-1>', self._on_mouse_left_up) canvas.bind('<B1-Motion>', self._on_mouse_b1motion) rbutton_events = ( ('<Button-2>', '<ButtonRelease-2>', '<B2-Motion>') if self._platform == 'Darwin' else ('<Button-3>', '<ButtonRelease-3>', '<B3-Motion>')) canvas.bind(rbutton_events[0], self._on_mouse_right_down) canvas.bind(rbutton_events[1], self._on_mouse_right_up) canvas.bind(rbutton_events[2], self._on_mouse_b1motion) def _kick_video_update(self): """Workaround: Some movements on the device's screen are needed in order to pull up first few frames from the device.. """ self._screenrecord.kick() def _refresh_hierarchy_view(self, screen_refreshed): if self._controller is None: return interval = (self._HVIEW_REFRESH_INTERVAL_AFTER_SCR_REFRESH if screen_refreshed else self._HVIEW_REFRESH_INTERVAL) hierarchy_view_age = time.time() - self.hierarchy_view_timestamp if hierarchy_view_age > interval: self._acquire_hierarchy_view() def _refresh_screen(self): from tkinter import NW frame = None while not self._screenrecord.queue.empty(): frame = self._screenrecord.queue.get_nowait() hierarchy_view_age = time.time() - self.hierarchy_view_timestamp if frame: disp_frame = ImageTk.PhotoImage(frame) canvas = self._root.nametowidget('mainframe.canvas') canvas.delete(self._screenshot['id']) canvas.config(width=self._screenrecord.width, height=self._screenrecord.height) all_other_items = canvas.find_all() image_id = canvas.create_image(0, 0, anchor=NW, image=disp_frame) if all_other_items: canvas.tag_lower(image_id, all_other_items[0]) self._screenshot = {'image': disp_frame, 'id': image_id} self._refresh_hierarchy_view(frame) self._root.after(self._SCR_REFRESH_INTERVAL, self._refresh_screen) def _acquire_hierarchy_view(self): """Acquires screenshot from the device, and place it on the UI's canvas Returns: Tkinter.Canvas: canvas object """ self._controller.execute('update_view_dump') self.hierarchy_view_timestamp = time.time() def _set_screen_scale(self): """Sets screen scale information""" self._scale = self._screenrecord.get_scale() def _descale(self, coord): """Converts a coordinate from canvas-coordinats to device-screen-coorinates Args: coord (tuple): coordinats (x, y) """ return int(coord[0] / self._scale[0]), int(coord[1] / self._scale[1]) def _on_mouse_leave(self, event): """Callback for mouse leave event Args: event (object): event information which is passed by Tk framework """ canvas = self._root.nametowidget('mainframe.canvas') canvas.delete('object_rect') def _on_mouse_motion(self, event): """Callback for mouse motion event Args: event (object): event information which is passed by Tk framework """ canvas = self._root.nametowidget('mainframe.canvas') canvas.delete('object_rect') text = self._root.nametowidget('sidebar.infotext') text.delete(1.0, tkinter.END) command_args = {'start': self._descale((event.x, event.y))} obj_info = self._controller.execute( 'get_hierarchy_view_object_info', command_args) if obj_info: bounds = obj_info['visibleBounds'] def scale(coord): """Scale coordinates from actual screen -> view""" return ( int(coord[0] * self._scale[0]), int(coord[1] * self._scale[1])) xy0 = scale((bounds['left'], bounds['top'])) xy1 = scale((bounds['right'], bounds['bottom'])) canvas.create_rectangle( xy0[0], xy0[1], xy1[0], xy1[1], outline='red', width=2, tag='object_rect') for k, v in obj_info.items(): v = v or '-' text.insert(tkinter.END, '{0}: {1}\n'.format(k, v)) def _on_mouse_b1motion(self, event): """Callback for left-button motion event Args: event (object): event information which is passed by Tk framework """ self._mouse_action['current'] = event.x, event.y self._draw_mouse_action() def _mouse_moved(self): """Queries if mouse is moved""" xS, yS = self._mouse_action['start'] xC, yC = self._mouse_action['current'] return math.hypot(xC - xS, yC - yS) > self._MOUSE_MOVE_THRESH def _draw_mouse_action(self, erase=False): """Draws mouse action (swipe, drag, etc) on the screen""" canvas = self._root.nametowidget('mainframe.canvas') canvas.delete('down_point') canvas.delete('move_line') if erase: return xS, yS = self._mouse_action['start'] xC, yC = self._mouse_action['current'] color = ('blue' if self._mouse_action['left_or_right'] == 'left' else 'yellow') fill = color canvas.create_line(xS, yS, xC, yC, fill=color, width=2, tag='move_line') def oval_coords(radius): """Returns oval coordinates""" tl = tuple(p - radius for p in (xS, yS)) br = tuple(p + radius for p in (xS, yS)) return (tl[0], tl[1], br[0], br[1]) canvas.create_oval(*oval_coords(self._CLICKCIRCLE_RADIUS), outline=color, fill=fill, tag='down_point') def _on_mouse_left_down(self, event): """Callback for mouse left-button-down event Args: event (object): event information which is passed by Tk framework """ x, y = event.x, event.y self._mouse_action = { 'start': (x, y), 'current': (x, y), 'left_or_right': 'left' } self._draw_mouse_action() def _on_mouse_left_up(self, event): """Callback for left-button-up event Args: event (object): Event information which is passed by Tk framework """ cur = event.x, event.y self._mouse_action['current'] = cur if self._mouse_moved(): self._left_2point_action_menu(cur) else: self._left_1point_action_menu(cur) self._draw_mouse_action(erase=True) def _on_mouse_right_down(self, event): """Callback for mouse right-button-down event Args: event (object): event information which is passed by Tk framework """ x, y = event.x, event.y self._mouse_action = { 'start': (x, y), 'current': (x, y), 'left_or_right': 'right' } self._draw_mouse_action() def _on_mouse_right_up(self, event): """Callback for right-button-up event Args: event (object): Event information which is passed by Tk framework """ cur = event.x, event.y self._mouse_action['current'] = cur if self._mouse_moved(): self._right_2point_action_menu(cur) else: self._right_1point_action_menu(cur) self._draw_mouse_action(erase=True) def __get_command_wrap(self, command_name, **aditional_args): """Returns wrapped controller command""" command_args = dict(aditional_args) if self._mouse_action: command_args['start'] = self._descale(self._mouse_action['start']) command_args['end'] = self._descale(self._mouse_action['current']) def command_wrap(): """controller command execution""" try: with display_wait(self._root): retval = self._controller.execute( command_name, command_args) return retval except (UiObjectNotFound, UiInconsitencyError): self._acquire_hierarchy_view() return command_wrap def _left_1point_action_menu(self, position): """Displays 1-point left-click menu""" menu = tkinter.Menu(self._root, name='menu') menu.add_command( label='Click(xy)', command=self.__get_command_wrap('click_xy')) menu.add_command( label='Long click(xy)', command=self.__get_command_wrap('long_click_xy')) menu.post(*position) def _left_2point_action_menu(self, position): """Displays 2-points left-click menu""" menu = tkinter.Menu(self._root, name='menu') menu.add_command( label='Swipe(xy -> xy)', command=self.__get_command_wrap('swipe_xy_to_xy', options={'steps': 10})) menu.add_command( label='Drag(xy -> xy)', command=self.__get_command_wrap('drag_xy_to_xy')) menu.add_command( label='Drag(object -> xy)', command=self.__get_command_wrap('drag_object_to_xy')) menu.add_command( label='Fling', command=self.__get_command_wrap('fling')) menu.add_command( label='Scroll', command=self.__get_command_wrap('scroll')) menu.post(*position) def _right_1point_action_menu(self, position): """Displays 1-point right-click menu""" menu = tkinter.Menu(self._root, name='menu') menu.add_command( label='Click(object)', command=self.__get_command_wrap('click_object')) menu.add_command( label='Click(object) and wait', command=self.__get_command_wrap( 'click_object', wait=self._wait_timeouts['update'])) menu.add_command( label='Long click(object)', command=self.__get_command_wrap('long_click_object')) menu.add_command( label='Clear text', command=self.__get_command_wrap('clear_text')) menu.add_command( label='Enter text', command=lambda: self._text_action( 'enter_text', lambda text: {'text': text})) menu.add_command(label='Pinch in', command=lambda: self._pinch('In')) menu.add_command(label='Pinch out', command=lambda: self._pinch('Out')) menu.add_separator() menu.add_command( label='Insert wait-exists', command=self.__get_command_wrap( 'insert_wait_object', for_what='exists', timeout=self._wait_timeouts['exists'])) menu.add_command( label='Insert wait-gone', command=self.__get_command_wrap( 'insert_wait_object', for_what='gone', timeout=self._wait_timeouts['gone'])) menu.post(*position) def _right_2point_action_menu(self, position): """Displays 2-points right-click menu""" menu = tkinter.Menu(self._root, name='menu') menu.add_command( label='Swipe(object + direction)', command=self.__get_command_wrap('swipe_object_with_direction')) menu.add_command( label='Drag(object -> object)', command=self.__get_command_wrap('drag_object_to_object')) menu.add_command( label='Fling to end', command=self.__get_command_wrap('fling_to_end')) menu.add_command( label='Scroll to end', command=self.__get_command_wrap('scroll_to_end')) menu.add_command( label='Scroll to text', command=lambda: self._text_action( 'scroll_to', lambda text: {'options': {'text': text}})) menu.post(*position) def _text_action(self, command_name, command_kwargs_gen): """Callback for Enter text event""" from tkinter import NW # Create a dialog on the canvas canvas = self._root.nametowidget('mainframe.canvas') top = tkinter.Toplevel(canvas, name='textentrywindow') # Place a TextEntry on the dialog entry = ttk.Entry(top, name='textentry') entry.grid(row=0, column=0, sticky=NW) def on_ok(): """Callback for ok-click""" text = entry.get() top.destroy() self._root.after( 0, self.__get_command_wrap(command_name, **command_kwargs_gen(text))) # Place a OK button on the dialog ok_button = ttk.Button(top, text='OK', command=on_ok, name='ok_button') ok_button.grid(row=0, column=1, sticky=NW) canvas.wait_window(top) def _pinch(self, in_or_out): """Pinch-in/out event handler implementation""" from tkinter import NW, SE, StringVar # Create a dialog on the canvas canvas = self._root.nametowidget('mainframe.canvas') top = tkinter.Toplevel(canvas, name='pinchwindow') # Place a TextEntry on the dialog pinch_label_text = 'Pinch {0}:'.format(in_or_out) lebel0 = ttk.Label(top, text=pinch_label_text, name='pinchlabel') lebel0.grid(row=0, column=0, sticky=NW) slider = ttk.Scale(top, value=1.0, name='pinchinslider') slider.grid(row=0, column=1, sticky=NW) lebel1 = ttk.Label(top, text='Steps:', name='steplabel') lebel1.grid(row=1, column=0, sticky=NW) stepsStr = StringVar(value='10') entry = ttk.Entry(top, textvariable=stepsStr, name='steps') entry.grid(row=1, column=1, sticky=NW) def on_ok(): """Callback for ok-click""" percent = int(slider.get() * 100) steps = int(stepsStr.get()) top.destroy() self._root.after(0, self.__get_command_wrap( 'pinch', in_or_out=in_or_out, options={ 'percent': percent, 'steps': steps })) # Place a OK button on the dialog ok_button = ttk.Button(top, text='OK', command=on_ok, name='ok_button') ok_button.grid(row=0, column=2, rowspan=2, sticky=(NW, SE)) canvas.wait_window(top) def _take_screenshot(self): """Callback for Take Screenshot""" filename = get_filedialog().asksaveasfilename(defaultextension='.png') if not filename: return with display_wait(self._root): scr = self._controller.execute('get_screenshot') scr.save(filename)
mit
-2,263,941,709,933,397,800
37.526549
79
0.567015
false
3.984289
false
false
false
b3orn/mania
mania/compiler.py
1
3056
# -*- coding: utf-8 -*- ''' mania.compiler ~~~~~~~~~~~~~~ :copyright: (c) 2014 by Björn Schulz. :license: MIT, see LICENSE for more details. ''' from __future__ import absolute_import import logging import io import mania.types import mania.instructions logger = logging.getLogger(__name__) class Placeholder(object): def __init__(self, instruction): self.instruction = instruction class Builder(object): def __init__(self, name, entry_point): self.name = name self.entry_point = entry_point self.constants = [name] self.instructions = [] @property def module(self): return mania.types.Module( name=self.name, entry_point=self.entry_point, constants=self.constants, instructions=self.instructions ) def constant(self, value): if value in self.constants: return self.constants.index(value) self.constants.append(value) return len(self.constants) - 1 def index(self): return len(self.instructions) def add(self, instruction): index = self.index() self.instructions.append(instruction) return index def replace(self, index, instruction): self.instructions[index] = instruction class Compiler(object): def __init__(self, name=None): self.name = name or mania.types.Symbol('') self.builder = Builder(self.name, 0) def compile(self, code): raise NotImplementedError('"eval" needs to be implemented in subclasses') class SimpleCompiler(Compiler): def compile(self, module): for element in module: self.compile_any(element) self.builder.add(mania.instructions.Eval()) self.builder.add(mania.instructions.Exit()) return self.builder.module def compile_any(self, code): if isinstance(code, mania.types.Pair): self.compile_pair(code) elif isinstance(code, mania.types.Quoted): self.compile_quoted(code) elif isinstance(code, mania.types.Quasiquoted): self.compile_quasiquoted(code) elif isinstance(code, mania.types.Unquoted): self.compile_unquoted(code) else: self.compile_constant(code) def compile_pair(self, code): self.compile_any(code.head) self.compile_any(code.tail) self.builder.add(mania.instructions.BuildPair()) def compile_quoted(self, code): self.compile_any(code.value) self.builder.add(mania.instructions.BuildQuoted()) def compile_quasiquoted(self, code): self.compile_any(code.value) self.builder.add(mania.instructions.BuildQuasiquoted()) def compile_unquoted(self, code): self.compile_any(code.value) self.builder.add(mania.instructions.BuildUnquoted()) def compile_constant(self, code): index = self.builder.constant(code) self.builder.add(mania.instructions.LoadConstant(index))
mit
-7,002,074,155,497,473,000
22.867188
81
0.629787
false
3.993464
false
false
false
brunosantos/Bsan-kodi-repo
plugin.video.sparrowtv/resources/lib/net.py
1
10147
''' common XBMC Module Copyright (C) 2011 t0mm0 This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ''' import cookielib import gzip import re import StringIO import urllib import urllib2 import socket # Set Global timeout - Useful for slow connections and Putlocker. socket.setdefaulttimeout(60) class HeadRequest(urllib2.Request): '''A Request class that sends HEAD requests''' def get_method(self): return 'HEAD' class Net: ''' This class wraps :mod:`urllib2` and provides an easy way to make http requests while taking care of cookies, proxies, gzip compression and character encoding. Example:: from addon.common.net import Net net = Net() response = net.http_GET('http://xbmc.org') print response.content ''' _cj = cookielib.LWPCookieJar() _proxy = None _user_agent = 'Mozilla/5.0 (X11; Linux i686) AppleWebKit/535.1 ' + \ '(KHTML, like Gecko) Chrome/13.0.782.99 Safari/535.1' _http_debug = False def __init__(self, cookie_file='', proxy='', user_agent='', http_debug=False): ''' Kwargs: cookie_file (str): Full path to a file to be used to load and save cookies to. proxy (str): Proxy setting (eg. ``'http://user:pass@example.com:1234'``) user_agent (str): String to use as the User Agent header. If not supplied the class will use a default user agent (chrome) http_debug (bool): Set ``True`` to have HTTP header info written to the XBMC log for all requests. ''' if cookie_file: self.set_cookies(cookie_file) if proxy: self.set_proxy(proxy) if user_agent: self.set_user_agent(user_agent) self._http_debug = http_debug self._update_opener() def set_cookies(self, cookie_file): ''' Set the cookie file and try to load cookies from it if it exists. Args: cookie_file (str): Full path to a file to be used to load and save cookies to. ''' try: self._cj.load(cookie_file, ignore_discard=True) self._update_opener() return True except: return False def get_cookies(self): '''Returns A dictionary containing all cookie information by domain.''' return self._cj._cookies def save_cookies(self, cookie_file): ''' Saves cookies to a file. Args: cookie_file (str): Full path to a file to save cookies to. ''' self._cj.save(cookie_file, ignore_discard=True) def set_proxy(self, proxy): ''' Args: proxy (str): Proxy setting (eg. ``'http://user:pass@example.com:1234'``) ''' self._proxy = proxy self._update_opener() def get_proxy(self): '''Returns string containing proxy details.''' return self._proxy def set_user_agent(self, user_agent): ''' Args: user_agent (str): String to use as the User Agent header. ''' self._user_agent = user_agent def get_user_agent(self): '''Returns user agent string.''' return self._user_agent def _update_opener(self): ''' Builds and installs a new opener to be used by all future calls to :func:`urllib2.urlopen`. ''' if self._http_debug: http = urllib2.HTTPHandler(debuglevel=1) else: http = urllib2.HTTPHandler() if self._proxy: opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj), urllib2.ProxyHandler({'http': self._proxy}), urllib2.HTTPBasicAuthHandler(), http) else: opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self._cj), urllib2.HTTPBasicAuthHandler(), http) urllib2.install_opener(opener) def http_GET(self, url, headers={}, compression=True): ''' Perform an HTTP GET request. Args: url (str): The URL to GET. Kwargs: headers (dict): A dictionary describing any headers you would like to add to the request. (eg. ``{'X-Test': 'testing'}``) compression (bool): If ``True`` (default), try to use gzip compression. Returns: An :class:`HttpResponse` object containing headers and other meta-information about the page and the page content. ''' return self._fetch(url, headers=headers, compression=compression) def http_POST(self, url, form_data, headers={}, compression=True): ''' Perform an HTTP POST request. Args: url (str): The URL to POST. form_data (dict): A dictionary of form data to POST. Kwargs: headers (dict): A dictionary describing any headers you would like to add to the request. (eg. ``{'X-Test': 'testing'}``) compression (bool): If ``True`` (default), try to use gzip compression. Returns: An :class:`HttpResponse` object containing headers and other meta-information about the page and the page content. ''' return self._fetch(url, form_data, headers=headers, compression=compression) def http_HEAD(self, url, headers={}): ''' Perform an HTTP HEAD request. Args: url (str): The URL to GET. Kwargs: headers (dict): A dictionary describing any headers you would like to add to the request. (eg. ``{'X-Test': 'testing'}``) Returns: An :class:`HttpResponse` object containing headers and other meta-information about the page. ''' req = HeadRequest(url) req.add_header('User-Agent', self._user_agent) for k, v in headers.items(): req.add_header(k, v) response = urllib2.urlopen(req) return HttpResponse(response) def _fetch(self, url, form_data={}, headers={}, compression=True): ''' Perform an HTTP GET or POST request. Args: url (str): The URL to GET or POST. form_data (dict): A dictionary of form data to POST. If empty, the request will be a GET, if it contains form data it will be a POST. Kwargs: headers (dict): A dictionary describing any headers you would like to add to the request. (eg. ``{'X-Test': 'testing'}``) compression (bool): If ``True`` (default), try to use gzip compression. Returns: An :class:`HttpResponse` object containing headers and other meta-information about the page and the page content. ''' encoding = '' req = urllib2.Request(url) if form_data: form_data = urllib.urlencode(form_data) req = urllib2.Request(url, form_data) req.add_header('User-Agent', self._user_agent) for k, v in headers.items(): req.add_header(k, v) if compression: req.add_header('Accept-Encoding', 'gzip') response = urllib2.urlopen(req) return HttpResponse(response) class HttpResponse: ''' This class represents a resoponse from an HTTP request. The content is examined and every attempt is made to properly encode it to Unicode. .. seealso:: :meth:`Net.http_GET`, :meth:`Net.http_HEAD` and :meth:`Net.http_POST` ''' content = '' '''Unicode encoded string containing the body of the reposne.''' def __init__(self, response): ''' Args: response (:class:`mimetools.Message`): The object returned by a call to :func:`urllib2.urlopen`. ''' self._response = response html = response.read() try: if response.headers['content-encoding'].lower() == 'gzip': html = gzip.GzipFile(fileobj=StringIO.StringIO(html)).read() except: pass try: content_type = response.headers['content-type'] if 'charset=' in content_type: encoding = content_type.split('charset=')[-1] except: pass r = re.search('<meta\s+http-equiv="Content-Type"\s+content="(?:.+?);' + '\s+charset=(.+?)"', html, re.IGNORECASE) if r: encoding = r.group(1) try: html = unicode(html, encoding) except: pass self.content = html def get_headers(self): '''Returns a List of headers returned by the server.''' return self._response.info().headers def get_url(self): ''' Return the URL of the resource retrieved, commonly used to determine if a redirect was followed. ''' return self._response.geturl()
gpl-2.0
-6,202,957,328,608,819,000
31.315287
82
0.550212
false
4.558401
true
false
false
hbenniou/trunk
doc/sphinx/tutorial/04-periodic-simple-shear.py
1
4853
# encoding: utf-8 # script for periodic simple shear test, with periodic boundary # first compresses to attain some isotropic stress (checkStress), # then loads in shear (checkDistorsion) # # the initial packing is either regular (hexagonal), with empty bands along the boundary, # or periodic random cloud of spheres # # material friction angle is initially set to zero, so that the resulting packing is dense # (sphere rearrangement is easier if there is no friction) # # setup the periodic boundary O.periodic=True O.cell.refSize=(2,2,2) from yade import pack,plot # the "if 0:" block will be never executed, therefore the "else:" block will be # to use cloud instead of regular packing, change to "if 1:" or something similar if 0: # create cloud of spheres and insert them into the simulation # we give corners, mean radius, radius variation sp=pack.SpherePack() sp.makeCloud((0,0,0),(2,2,2),rMean=.1,rRelFuzz=.6,periodic=True) # insert the packing into the simulation sp.toSimulation(color=(0,0,1)) # pure blue else: # in this case, add dense packing O.bodies.append( pack.regularHexa(pack.inAlignedBox((0,0,0),(2,2,2)),radius=.1,gap=0,color=(0,0,1)) ) # create "dense" packing by setting friction to zero initially O.materials[0].frictionAngle=0 # simulation loop (will be run at every step) O.engines=[ ForceResetter(), InsertionSortCollider([Bo1_Sphere_Aabb()]), InteractionLoop( [Ig2_Sphere_Sphere_L3Geom()], [Ip2_FrictMat_FrictMat_FrictPhys()], [Law2_L3Geom_FrictPhys_ElPerfPl()] ), NewtonIntegrator(damping=.4), # run checkStress function (defined below) every second # the label is arbitrary, and is used later to refer to this engine PyRunner(command='checkStress()',realPeriod=1,label='checker'), # record data for plotting every 100 steps; addData function is defined below PyRunner(command='addData()',iterPeriod=100) ] # set the integration timestep to be 1/2 of the "critical" timestep O.dt=.5*PWaveTimeStep() # prescribe isotropic normal deformation (constant strain rate) # of the periodic cell O.cell.velGrad=Matrix3(-.1,0,0, 0,-.1,0, 0,0,-.1) # when to stop the isotropic compression (used inside checkStress) limitMeanStress=-5e5 # called every second by the PyRunner engine def checkStress(): # stress tensor as the sum of normal and shear contributions # Matrix3.Zero is the intial value for sum(...) stress=sum(normalShearStressTensors(),Matrix3.Zero) print 'mean stress',stress.trace()/3. # if mean stress is below (bigger in absolute value) limitMeanStress, start shearing if stress.trace()/3.<limitMeanStress: # apply constant-rate distorsion on the periodic cell O.cell.velGrad=Matrix3(0,0,.1, 0,0,0, 0,0,0) # change the function called by the checker engine # (checkStress will not be called anymore) checker.command='checkDistorsion()' # block rotations of particles to increase tanPhi, if desired # disabled by default if 0: for b in O.bodies: # block X,Y,Z rotations, translations are free b.state.blockedDOFs='XYZ' # stop rotations if any, as blockedDOFs block accelerations really b.state.angVel=(0,0,0) # set friction angle back to non-zero value # tangensOfFrictionAngle is computed by the Ip2_* functor from material # for future contacts change material (there is only one material for all particles) O.materials[0].frictionAngle=.5 # radians # for existing contacts, set contact friction directly for i in O.interactions: i.phys.tangensOfFrictionAngle=tan(.5) # called from the 'checker' engine periodically, during the shear phase def checkDistorsion(): # if the distorsion value is >.3, exit; otherwise do nothing if abs(O.cell.trsf[0,2])>.5: # save data from addData(...) before exiting into file # use O.tags['id'] to distinguish individual runs of the same simulation plot.saveDataTxt(O.tags['id']+'.txt') # exit the program #import sys #sys.exit(0) # no error (0) O.pause() # called periodically to store data history def addData(): # get the stress tensor (as 3x3 matrix) stress=sum(normalShearStressTensors(),Matrix3.Zero) # give names to values we are interested in and save them plot.addData(exz=O.cell.trsf[0,2],szz=stress[2,2],sxz=stress[0,2],tanPhi=stress[0,2]/stress[2,2],i=O.iter) # color particles based on rotation amount for b in O.bodies: # rot() gives rotation vector between reference and current position b.shape.color=scalarOnColorScale(b.state.rot().norm(),0,pi/2.) # define what to plot (3 plots in total) ## exz(i), [left y axis, separate by None:] szz(i), sxz(i) ## szz(exz), sxz(exz) ## tanPhi(i) # note the space in 'i ' so that it does not overwrite the 'i' entry plot.plots={'i':('exz',None,'szz','sxz'),'exz':('szz','sxz'),'i ':('tanPhi',)} # better show rotation of particles Gl1_Sphere.stripes=True # open the plot on the screen plot.plot() O.saveTmp()
gpl-2.0
1,826,319,611,159,597,000
36.330769
107
0.732743
false
3.018035
false
false
false
tshi04/machine-learning-codes
GAN-tf-ff/dc_gen.py
1
2717
import re import math import numpy as np import tensorflow as tf import tensorflow.contrib as tc from utils import * class generator(object): def __init__(self, data_name='MNIST'): self.data_name = data_name def __call__(self, input_data, img_shape, reuse=False, name='generator'): if self.data_name == 'MNIST': self.img_shape = img_shape [fh, fw, fd] = img_shape [fh2, fw2, fd2] = [int(fh/2), int(fw/2), 64] [fh4, fw4, fd4] = [int(fh2/2), int(fw2/2), 128] [batch_size, in_shape] = np.array(input_data.shape, dtype='int').tolist() with tf.variable_scope(name) as self.gs: if reuse: self.gs.reuse_variables() h_fc1, w_fc1, b_fc1 = linear(input_data, fh4*fw4*fd4, name='dfc1') dconv1 = tf.reshape(h_fc1, [-1, fh4, fw4, fd4]) dconv1 = leakyrelu(dconv1, name='dconv1') dconv2, w_dconv2, b_dconv2 = dconv2d(dconv1, [batch_size, fh2, fw2, fd2], name='dconv2') dconv2 = leakyrelu(dconv2) dconv3, w_dconv3, b_dconv3 = dconv2d(dconv2, [batch_size, fh, fw, fd], name='dconv3') dconv3 = tf.nn.tanh(dconv3) return dconv3 if self.data_name == 'CIFAR-100': self.img_shape = img_shape [fh, fw, fd] = img_shape [fh2, fw2, fd2] = [int(fh/2), int(fw/2), 64] [fh4, fw4, fd4] = [int(fh2/2), int(fw2/2), fd2*2] [fh8, fw8, fd8] = [int(fh4/2), int(fw4/2), fd4*2] [batch_size, in_shape] = np.array(input_data.shape, dtype='int').tolist() with tf.variable_scope(name) as self.gs: if reuse: self.gs.reuse_variables() h_fc1, w_fc1, b_fc1 = linear(input_data, fh8*fw8*fd8, name='dfc1') dconv1 = tf.reshape(h_fc1, [-1, fh8, fw8, fd8]) dconv1 = leakyrelu(batch_norm(dconv1, name='dc1'), name='dconv1') dconv2, w_dconv2, b_dconv2 = dconv2d(dconv1, [batch_size, fh4, fw4, fd4], name='dconv2') dconv2 = leakyrelu(batch_norm(dconv2, name='dc2')) dconv3, w_dconv3, b_dconv3 = dconv2d(dconv2, [batch_size, fh2, fw2, fd2], name='dconv3') dconv3 = leakyrelu(batch_norm(dconv3, name='dc3')) dconv4, w_dconv4, b_dconv4 = dconv2d(dconv3, [batch_size, fh, fw, fd], name='dconv4') dconv4 = tf.nn.tanh(dconv4) return dconv4 @property def vars(self): return tf.contrib.framework.get_variables(self.gs)
gpl-3.0
7,319,559,567,595,346,000
35.716216
104
0.517483
false
2.915236
false
false
false
tmhorne/celtx
extensions/python/dom/nsdom/domcompile.py
1
4935
# ***** BEGIN LICENSE BLOCK ***** # Version: MPL 1.1/GPL 2.0/LGPL 2.1 # # The contents of this file are subject to the Mozilla Public License Version # 1.1 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # http://www.mozilla.org/MPL/ # # Software distributed under the License is distributed on an "AS IS" basis, # WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License # for the specific language governing rights and limitations under the # License. # # The Original Code is mozilla.org code # # The Initial Developer of the Original Code is mozilla.org. # Portions created by the Initial Developer are Copyright (C) 2005 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Hammond: initial author # # Alternatively, the contents of this file may be used under the terms of # either the GNU General Public License Version 2 or later (the "GPL"), or # the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), # in which case the provisions of the GPL or the LGPL are applicable instead # of those above. If you wish to allow use of your version of this file only # under the terms of either the GPL or the LGPL, and not to allow others to # use your version of this file under the terms of the MPL, indicate your # decision by deleting the provisions above and replace them with the notice # and other provisions required by the GPL or the LGPL. If you do not delete # the provisions above, a recipient may use your version of this file under # the terms of any one of the MPL, the GPL or the LGPL. # # ***** END LICENSE BLOCK ***** # A utility for compiling Python code, using features not available via # the builtin compile. # # (a) It is not possible to compile the body of a Python function, without the # function declaration. ie, 'return None' will always give a syntax error when # passed to compile. # (b) It is very tricky to compile code with the line-number starting at # anything other than zero. # # Both of these are solved by this module, which uses the 'compiler' module # XXX - sad side-effect is that Unicode is not correctly supported - # PyCF_SOURCE_IS_UTF8 is not exposed via compiler (in 2.3 at least) # On the upside here, all 'src' params are unicode today, so expansion here # requires no interface changes. from compiler import parse, syntax, compile from compiler.pycodegen import ModuleCodeGenerator import compiler.ast import new def _fix_src(src): # windows first - \r\n -> \n, then for mac, remaining \r -> \n # Trailing whitespace can cause problems - make sure a final '\n' exists. return src.replace("\r\n", "\n").replace("\r", "\n") + "\n" # from compiler.misc.set_filename - but we also adjust lineno attributes. def set_filename_and_offset(filename, offset, tree): """Set the filename attribute to filename on every node in tree""" worklist = [tree] while worklist: node = worklist.pop(0) node.filename = filename if node.lineno is not None: node.lineno += offset worklist.extend(node.getChildNodes()) def parse_function(src, func_name, arg_names, defaults=[]): tree = parse(src, "exec") defaults = [compiler.ast.Const(d) for d in defaults] # Insert a Stmt with function object. try: decs = compiler.ast.Decorators([]) except AttributeError: # 2.3 has no such concept (and different args!) func = compiler.ast.Function(func_name, arg_names, defaults, 0, None, tree.node) else: # 2.4 and later func = compiler.ast.Function(decs, func_name, arg_names, defaults, 0, None, tree.node) stmt = compiler.ast.Stmt((func,)) tree.node = stmt syntax.check(tree) return tree def compile_function(src, filename, func_name, arg_names, defaults=[], # more args to come... lineno=0): assert filename, "filename is required" try: tree = parse_function(_fix_src(src), func_name, arg_names, defaults) except SyntaxError, err: err.lineno += lineno err.filename = filename raise SyntaxError, err set_filename_and_offset(filename, lineno, tree) gen = ModuleCodeGenerator(tree) return gen.getCode() # And a 'standard' compile, but with the filename offset feature. def compile(src, filename, mode='exec', flags=None, dont_inherit=None, lineno=0): if flags is not None or dont_inherit is not None or mode != 'exec': raise RuntimeError, "not implemented yet" try: tree = parse(_fix_src(src), mode) except SyntaxError, err: err.lineno += lineno err.filename = filename raise SyntaxError, err set_filename_and_offset(filename, lineno, tree) gen = ModuleCodeGenerator(tree) return gen.getCode()
mpl-2.0
78,361,717,112,699,170
39.45082
83
0.684904
false
3.913561
false
false
false
x5zone/Mynote
bet365/test.py
1
5052
#!/usr/bin/python # coding:utf-8 import gzip import re import time import redis from urllib2 import urlopen import urlparse import bs4 r = redis.Redis(host='localhost',port=6379,db=0) def save2redis(match_id,key,value): r.hset(match_id,key,value) def getfromredis(match_id): return r.hgetall(match_id) def main(): #for match_id in range(86000,86001): for match_id in range(66481,99999): if match_id % 3 == 0: time.sleep(0.1) if get_pool_result(match_id): get_fb_match_hhad(match_id) def get_pool_result(match_id): base_url = "http://info.sporttery.cn/football/pool_result.php?id=" base_url = "%s%d" % (base_url,match_id) print base_url html = urlopen(base_url).read() html = html.decode('gbk') #print dir(html),type(html) soup = bs4.BeautifulSoup(html, "html.parser") match_begin = re.sub(r'[^- :0-9]', "", soup.find_all('span',class_='Centers')[0].contents[0]).strip() if match_begin == '': return False match_begin = time.mktime(time.strptime(match_begin,"%Y-%m-%d %H:%M")) if time.time()-7200 < match_begin: print "last match_id %d" % match_id exit() #return False #比赛结果 for i,tag in enumerate(soup.find_all('tr',class_='Tr3 Tr_normal')): if i == 0: continue for j,x in enumerate(tag.find_all('td')): if j != 4: continue bifen = re.sub(r'[^:0-9]', "", str(x)) if bifen == "": return False else: save2redis(match_id,'result', bifen) print match_id, " : ", bifen break return True #总进球数 """ keys = [] for i,tag in enumerate(soup.find_all('tr',class_='Tr3 Tr_normal bg')): if i != 2: continue for j,x in enumerate(tag.find_all('td')): print j,type(x),x.contents[0] if j == 0: keys.append('scoresnum_time') else: keys.append(x.contents[0]) """ def get_fb_match_hhad(match_id): """ 获取竞彩胜平负赔率,彩民支持率,让球胜平负赔率数据 match_id,彩民投票支持率胜,平,负,误差值,竞彩胜平负固定赔率胜,平,负,胜率,平率,负率,发布日期,发布时间 """ base_url = "http://info.sporttery.cn/football/info/fb_match_hhad.php?m=" base_url = "%s%d" % (base_url,match_id) print base_url html = urlopen(base_url).read() html = html.decode('gbk') soup = bs4.BeautifulSoup(html, "html.parser") tag = soup.find_all('div',class_='floatR font12') odds = [] """ for x in tag: for i,y in enumerate(x): if i == 0: continue if y.__class__ == bs4.element.Tag: for z in y: print re.sub(r'\D', "", z) else: print re.sub(r'\D', "", y) if i == 4: break # 让球赔率忽略 for i,tag in enumerate(soup.find_all('tr',class_='Tr3')): print "odds:",i for j,x in enumerate(tag.find_all('td')): if x.__class__ == bs4.element.Tag: for y in x: print re.sub(r'[^.:0-9]',"",str(y)) else: print re.sub(r'[^.:0-9]',"",x) if j == 5: break #if i == 6: #break """ # 99家赔率 for i,tag in enumerate(soup.find_all('tr',class_='Tr33')): #print "odds:",i key = 'null' for j,x in enumerate(tag.find_all('td')): #print j,x if j == 1: key = x.contents[0] if type(key) == bs4.element.Tag: key = key.contents[0] if type(key) == bs4.element.Tag: key = key.contents[0] if j < 2: continue #print j,x if x.__class__ == bs4.element.Tag: for y in x: if type(y) == bs4.element.Tag: y = y.contents[0] value = re.sub(r'[^.:0-9]',"",y) print key+str(j),value save2redis(match_id,key+str(j), value) break else: value = re.sub(r'[^.:0-9]',"",x) print key+str(j),value save2redis(match_id,key+str(j), value) if (i<3 and j == 10) or j == 13: break if __name__ == '__main__': main() exit() for i in (86000,87000,87001,87002): get_pool_result(i) #num = get_fb_match_hhad(i) #print "results:",num exit() #print html soup = bs4.BeautifulSoup(html, "html.parser") baijia = soup.find("",{"class":"Tr33"}) exp1 = re.compile("(?isu)<tr[^>]*>(.*?)</tr>") h = re.findall(r'<td[^>]*><a[^>]*>(.*?)</a></td>', baijia, re.I|re.M) print h #for dd in soup.select('.searchResult tr') if dd.contents[1].name != 'th':
unlicense
2,712,315,846,516,340,700
30.779221
105
0.483449
false
3.032218
false
false
false
avian2/ec3k
ec3k.py
1
12641
"""Software receiver for EnergyCount 3000 Copyright (C) 2015 Tomaz Solc <tomaz.solc@tablix.org> This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ from gnuradio import digital from gnuradio import gr, blocks, filter, analog import itertools import math import os.path import osmosdr import select import signal import subprocess import tempfile import threading import time def which(program): for path in os.environ["PATH"].split(os.pathsep): fpath = os.path.join(path, program) if os.path.isfile(fpath) and os.access(fpath, os.X_OK): return fpath return None class InvalidPacket(Exception): pass class EnergyCount3KState: """EnergyCount 3000 transmitter state. This object contains fields contained in a single radio packet: id -- 16-bit ID of the device time_total -- time in seconds since last reset time_on -- time in seconds since last reset with non-zero device power energy -- total energy in Ws (watt-seconds) power_current -- current device power in watts power_max -- maximum device power in watts (reset at unknown intervals) reset_counter -- total number of transmitter resets device_on_flag -- true if device is currently drawing non-zero power timestamp -- UNIX timestamp of the packet reception (not accurate) """ CRC = 0xf0b8 def __init__(self, hex_bytes): bits = self._get_bits(hex_bytes) bits = [ not bit for bit in bits ] bits = self._descrambler([18, 17, 13, 12, 1], bits) bits = [ not bit for bit in bits ] bits = self._bit_unstuff(bits) bits = self._bit_shuffle(bits) nibbles = self._get_nibbles(bits) self._check_crc(nibbles) self._decode_packet(nibbles) def _get_bits(self, hex_bytes): """Unpacks hex printed data into individual bits""" bits = [] for hex_byte in hex_bytes: i = int(hex_byte, 16) for n in xrange(8): bits.append(bool((i<<n) & 0x80)) return bits def _get_nibbles(self, bits): """Shift bits into bytes, MSB first""" nibbles = [0] * (len(bits) / 4) for n, bit in enumerate(bits): nibbles[n/4] |= (int(bit) << (3-n%4)) return nibbles def _bit_shuffle(self, bits): """Weird bit shuffling operation required""" nbits = [] # first, invert byte bit order args = [iter(bits)] * 8 for bit_group in itertools.izip_longest(fillvalue=False, *args): nbits += reversed(bit_group) return nbits def _descrambler(self, taps, bits): """Multiplicative, self-synchronizing scrambler""" nbits = [] state = [ False ] * max(taps) for bit in bits: out = bit for tap in taps: out = out ^ state[tap-1] nbits.append(out) state = [ bit ] + state[:-1] return nbits def _bit_unstuff(self, bits): """Bit stuffing reversal. 6 consecutive 1s serve as a packet start/stop condition. In the packet, one zero is stuffed after 5 consecutive 1s """ nbits = [] start = False cnt = 0 for n, bit in enumerate(bits): if bit: cnt += 1 if start: nbits.append(bit) else: if cnt < 5: if start: nbits.append(bit) elif cnt == 5: pass elif cnt == 6: start = not start else: raise InvalidPacket("Wrong bit stuffing: %d concecutive ones" % cnt) cnt = 0 return nbits def _crc_ccitt_update(self, crc, data): assert data >= 0 assert data < 0x100 assert crc >= 0 assert crc <= 0x10000 data ^= crc & 0xff data ^= (data << 4) & 0xff return ((data << 8) | (crc >> 8)) ^ (data >> 4) ^ (data << 3) def _check_crc(self, nibbles): if len(nibbles) != 84: raise InvalidPacket("Wrong length: %d" % len(nibbles)) crc = 0xffff for i in xrange(0, 82, 2): crc = self._crc_ccitt_update(crc, nibbles[i] * 0x10 + nibbles[i+1]) if crc != self.CRC: raise InvalidPacket("CRC mismatch: %d != %d" % (crc, self.CRC)) def _unpack_int(self, nibbles): i = 0 for nibble in nibbles: i = (i * 0x10) + nibble return i def _decode_packet(self, nibbles): start_mark = self._unpack_int( nibbles[ 0: 1]) if start_mark != 0x9: raise InvalidPacket("Unknown start mark: 0x%x (please report this)" % (start_mark,)) self.id = self._unpack_int( nibbles[ 1: 5]) time_total_low = nibbles[ 5: 9] pad_1 = self._unpack_int( nibbles[ 9:13]) time_on_low = nibbles[13:17] pad_2 = self._unpack_int( nibbles[17:24]) energy_low = nibbles[24:31] self.power_current = self._unpack_int( nibbles[31:35]) / 10.0 self.power_max = self._unpack_int( nibbles[35:39]) / 10.0 # unknown? (seems to be used for internal calculations) self.energy_2 = self._unpack_int( nibbles[39:45]) # nibbles[45:59] time_total_high = nibbles[59:62] pad_3 = self._unpack_int( nibbles[62:67]) energy_high = nibbles[67:71] time_on_high = nibbles[71:74] self.reset_counter = self._unpack_int( nibbles[74:76]) flags = self._unpack_int( nibbles[76:77]) pad_4 = self._unpack_int( nibbles[77:78]) # crc = self._unpack_int( nibbles[78:82]) # We don't really care about the end mark, or whether it got # corrupted, since it's not covered by the CRC check. #end_mark = self._unpack_int( nibbles[82:84]) #if end_mark != 0x7e: # raise InvalidPacket("Invalid end mark: %d" % (end_mark,)) if pad_1 != 0: raise InvalidPacket("Padding 1 not zero: 0x%x (please report this)" % (pad_1,)) if pad_2 != 0: raise InvalidPacket("Padding 2 not zero: 0x%x (please report this)" % (pad_2,)) if pad_3 != 0: raise InvalidPacket("Padding 3 not zero: 0x%x (please report this)" % (pad_3,)) if pad_4 != 0: raise InvalidPacket("Padding 4 not zero: 0x%x (please report this)" % (pad_4,)) self.timestamp = time.time() self.time_total = self._unpack_int(time_total_high + time_total_low) self.time_on = self._unpack_int(time_on_high + time_on_low) self.energy = self._unpack_int(energy_high + energy_low) if flags == 0x8: self.device_on_flag = True elif flags == 0x0: self.device_on_flag = False else: raise InvalidPacket("Unknown flag value: 0x%x (please report this)" % (flags,)) # Set properties for compatibility with older ec3k module versions self.uptime = self.time_total self.since_reset = self.time_on self.energy_1 = self.energy self.current_power = self.power_current self.max_power = self.power_max def __str__(self): if self.device_on_flag: flag = '*' else: flag = ' ' return ("id : %04x\n" "time total : %d seconds\n" "time on %s : %d seconds\n" "energy %s : %d Ws\n" "power current : %.1f W\n" "power max : %.1f W\n" "reset counter : %d") % ( self.id, self.time_total, flag, self.time_on, flag, self.energy, self.power_current, self.power_max, self.reset_counter) class EnergyCount3K: """Object representing EnergyCount 3000 receiver""" def __init__(self, id=None, callback=None, freq=868.402e6, device=0, osmosdr_args=None): """Create a new EnergyCount3K object Takes the following optional keyword arguments: id -- ID of the device to monitor callback -- callable to call for each received packet freq -- central frequency of the channel on which to listen for updates (default is known to work for European devices) device -- rtl-sdr device to use osmosdr_args -- any additional OsmoSDR arguments (e.g. "offset_tune=1") If ID is None, then packets for all devices will be received. callback should be a function of a callable object that takes one EnergyCount3KState object as its argument. """ self.id = id self.callback = callback self.freq = freq self.device = device self.osmosdr_args = osmosdr_args self.want_stop = True self.state = None self.noise_level = -90 def start(self): """Start the receiver""" assert self.want_stop self.want_stop = False self.threads = [] self._start_capture() capture_thread = threading.Thread(target=self._capture_thread) capture_thread.start() self.threads.append(capture_thread) self._setup_top_block() self.tb.start() def stop(self): """Stop the receiver and clean up""" assert not self.want_stop self.want_stop = True for thread in self.threads: thread.join() self.tb.stop() self.tb.wait() self._clean_capture() def get(self): """Get the last received state Returns data from the last received packet as a EnergyCount3KState object. """ return self.state def _log(self, msg): """Override this method to capture debug information""" pass def _start_capture(self): self.tempdir = tempfile.mkdtemp() self.pipe = os.path.join(self.tempdir, "ec3k.pipe") os.mkfifo(self.pipe) self.capture_process = None try: for program in ["capture", "capture.py"]: fpath = which(program) if fpath is not None: self.capture_process = subprocess.Popen( [fpath, "-f", self.pipe], bufsize=1, stdout=subprocess.PIPE) return raise Exception("Can't find capture binary in PATH") except: self._clean_capture() raise def _clean_capture(self): if self.capture_process: self.capture_process.send_signal(signal.SIGTERM) self.capture_process.wait() self.capture_process = None os.unlink(self.pipe) os.rmdir(self.tempdir) def _capture_thread(self): while not self.want_stop: rlist, wlist, xlist = select.select([self.capture_process.stdout], [], [], 1) if rlist: line = rlist[0].readline() fields = line.split() if fields and (fields[0] == 'data'): self._log("Decoding packet") try: state = EnergyCount3KState(fields[1:]) except InvalidPacket, e: self._log("Invalid packet: %s" % (e,)) continue if (not self.id) or (state.id == self.id): self.state = state if self.callback: self.callback(self.state) def _noise_probe_thread(self): while not self.want_stop: power = self.noise_probe.level() self.noise_level = 10 * math.log10(max(1e-9, power)) self._log("Current noise level: %.1f dB" % (self.noise_level,)) self.squelch.set_threshold(self.noise_level+7.0) time.sleep(1.0) def _setup_top_block(self): self.tb = gr.top_block() samp_rate = 96000 oversample = 10 # Radio receiver, initial downsampling args = "rtl=%d,buffers=16" % (self.device,) if self.osmosdr_args: args += ",%s" % (self.osmosdr_args,) osmosdr_source = osmosdr.source(args=args) osmosdr_source.set_sample_rate(samp_rate*oversample) osmosdr_source.set_center_freq(self.freq, 0) osmosdr_source.set_freq_corr(0, 0) osmosdr_source.set_gain_mode(True, 0) osmosdr_source.set_gain(0, 0) taps = filter.firdes.low_pass(1, samp_rate*oversample, 90e3, 8e3, filter.firdes.WIN_HAMMING, 6.76) low_pass_filter = filter.fir_filter_ccf(oversample, taps) self.tb.connect((osmosdr_source, 0), (low_pass_filter, 0)) # Squelch self.noise_probe = analog.probe_avg_mag_sqrd_c(0, 1.0/samp_rate/1e2) self.squelch = analog.simple_squelch_cc(self.noise_level, 1) noise_probe_thread = threading.Thread(target=self._noise_probe_thread) noise_probe_thread.start() self.threads.append(noise_probe_thread) self.tb.connect((low_pass_filter, 0), (self.noise_probe, 0)) self.tb.connect((low_pass_filter, 0), (self.squelch, 0)) # FM demodulation quadrature_demod = analog.quadrature_demod_cf(1) self.tb.connect((self.squelch, 0), (quadrature_demod, 0)) # Binary slicing, transformation into capture-compatible format add_offset = blocks.add_const_vff((-1e-3, )) binary_slicer = digital.binary_slicer_fb() char_to_float = blocks.char_to_float(1, 1) multiply_const = blocks.multiply_const_vff((255, )) float_to_uchar = blocks.float_to_uchar() pipe_sink = blocks.file_sink(gr.sizeof_char*1, self.pipe) pipe_sink.set_unbuffered(False) self.tb.connect((quadrature_demod, 0), (add_offset, 0)) self.tb.connect((add_offset, 0), (binary_slicer, 0)) self.tb.connect((binary_slicer, 0), (char_to_float, 0)) self.tb.connect((char_to_float, 0), (multiply_const, 0)) self.tb.connect((multiply_const, 0), (float_to_uchar, 0)) self.tb.connect((float_to_uchar, 0), (pipe_sink, 0))
gpl-3.0
8,677,844,192,735,593,000
26.126609
89
0.665849
false
2.827964
false
false
false
deepmind/dm_alchemy
dm_alchemy/symbolic_alchemy_wrapper.py
1
7573
# Lint as: python3 # Copyright 2020 DeepMind Technologies Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """Wrapper for a 3d alchemy to keep a symbolic alchemy in sync.""" from dm_alchemy import get_meta_data from dm_alchemy import symbolic_alchemy from dm_alchemy.types import event_unpacking from dm_alchemy.types import stones_and_potions from dm_alchemy.types import unity_python_conversion from dm_alchemy.types import utils import dm_env def _add_to_obs(obs, to_add, name): if isinstance(obs, tuple): return obs + (to_add,) if isinstance(obs, dict): obs[name] = to_add return obs if isinstance(obs, list): return obs + [to_add] # If it is not already a tuple, dict or list, then make it a tuple. return obs, to_add class SymbolicAlchemyWrapper(dm_env.Environment): """Take a 3d alchemy environment and keep a symbolic env in sync with it.""" def __init__( self, env3d, level_name, see_chemistries=None, see_symbolic_observation=False): self.env3d = env3d value_coefficients, value_offset, _, bonus, _ = get_meta_data.to_meta_data( level_name) reward_weights = stones_and_potions.RewardWeights( coefficients=value_coefficients, offset=value_offset, bonus=bonus) self.env_symbolic = symbolic_alchemy.SymbolicAlchemy( chemistry_gen=lambda: self.chemistry, reward_weights=reward_weights, items_gen=lambda unused_trial_number: self.items, num_trials=10, see_chemistries=see_chemistries, observe_used=True, ) self.items = utils.TrialItems(stones=[], potions=[]) self._perceived_stones = [] self._perceived_potions = [] self.chemistry = None self.see_symbolic_observation = see_symbolic_observation self._trial_in_progress = False self._trial_has_started = False def process_step_events(self, events): for event in events: if 'TrialEnded' in event.name: self._trial_has_started = False self.items = utils.TrialItems(stones=[], potions=[]) self._perceived_stones = [] self._perceived_potions = [] elif 'TrialStarted' in event.name: self._trial_has_started = True # At this point we should have all stones and potions and the chemistry. aligned_stones = [ stones_and_potions.align(stone, self.chemistry.rotation) for stone, _ in self._perceived_stones] latent_stones = [self.chemistry.stone_map.apply(stone) for stone in aligned_stones] stones = [ stones_and_potions.Stone(i, stone.latent_coords) for (_, i), stone in zip(self._perceived_stones, latent_stones)] latent_potions = [self.chemistry.potion_map.apply(potion) for potion, _ in self._perceived_potions] potions = [ stones_and_potions.Potion(i, potion.latent_dim, potion.latent_dir) for (_, i), potion in zip(self._perceived_potions, latent_potions)] self.items = utils.TrialItems(stones=stones, potions=potions) # When we get an event saying that the new trial has started in the 3d # version it should be safe to end the previous trial in the symbolic # version. if self._trial_in_progress: self.env_symbolic.end_trial() if self.env_symbolic.is_last_step(): self.env_symbolic.reset() # Once the first trial is started there is always a trial in progress # from then on. self._trial_in_progress = True elif 'PotionUsed' in event.name: potion_inst_id, stone_inst_id = event_unpacking.unpack_potion_used( event) stone_ind = self.env_symbolic.game_state.get_stone_ind( stone_inst=stone_inst_id) potion_ind = self.env_symbolic.game_state.get_potion_ind( potion_inst=potion_inst_id) # Take an action putting the stone in the potion. self.env_symbolic.step_slot_based_action(utils.SlotBasedAction( stone_ind=stone_ind, potion_ind=potion_ind)) elif 'StoneUsed' in event.name: stone_inst_id = event_unpacking.unpack_stone_used(event) stone_ind = self.env_symbolic.game_state.get_stone_ind( stone_inst=stone_inst_id) # Take an action putting the stone in the cauldron. self.env_symbolic.step_slot_based_action(utils.SlotBasedAction( stone_ind=stone_ind, cauldron=True)) elif 'ChemistryCreated' in event.name: chem, rot = event_unpacking.unpack_chemistry_and_rotation(event) self.chemistry = unity_python_conversion.from_unity_chemistry(chem, rot) else: potions = event_unpacking.get_potions([event]) stones = event_unpacking.get_stones([event]) if (potions or stones) and self._trial_has_started: self.items = utils.TrialItems(stones=[], potions=[]) self._perceived_stones = [] self._perceived_potions = [] self._trial_has_started = False self._perceived_potions.extend(potions) self._perceived_stones.extend(stones) def step(self, action) -> dm_env.TimeStep: timestep = self.env3d.step(action) # If a symbolic action has occurred take the action in the symbolic # environment. self.process_step_events(self.env3d.events()) return self.add_observations(timestep) def reset(self) -> dm_env.TimeStep: timestep = self.env3d.reset() self.items = utils.TrialItems(stones=[], potions=[]) self._perceived_stones = [] self._perceived_potions = [] self._trial_has_started = False self.process_step_events(self.env3d.events()) return self.add_observations(timestep) def add_observations(self, timestep): new_observation = timestep.observation symbolic_observation = self.env_symbolic.observation() if self.see_symbolic_observation: new_observation = _add_to_obs( new_observation, symbolic_observation['symbolic_obs'], 'symbolic_obs') for name in self.env_symbolic.see_chemistries.keys(): new_observation = _add_to_obs( new_observation, symbolic_observation[name], name) return dm_env.TimeStep( step_type=timestep.step_type, reward=timestep.reward, discount=timestep.discount, observation=new_observation) def observation_spec(self): obs_spec = self.env3d.observation_spec() if self.see_symbolic_observation: symbolic_obs = self.env_symbolic.observation_spec()['symbolic_obs'] obs_spec = _add_to_obs(obs_spec, symbolic_obs, 'symbolic_obs') for name in self.env_symbolic.see_chemistries.keys(): chem_obs_spec = self.env_symbolic.observation_spec()[name] obs_spec = _add_to_obs(obs_spec, chem_obs_spec, name) return obs_spec def action_spec(self): return self.env3d.action_spec() # Forward other attribute lookups to the 3d environment. def __getattr__(self, name): return getattr(self.env3d, name)
apache-2.0
2,939,894,921,043,146,000
41.785311
80
0.663938
false
3.552064
false
false
false
Alcheri/Plugins
MyPing/__init__.py
1
1307
### # Copyright (c) 2020, Barry Suridge # All rights reserved. # # ### """ MyPing: An alternative to Supybot's Ping function. """ import sys # Python 3 ONLY!! if sys.version_info[0] < 3: raise RuntimeError("This plugin requires Python 3.") import supybot import supybot.world as world # Use this for the version of this plugin. You may wish to put a CVS keyword # in here if you're keeping the plugin in CVS or some similar system. __version__ = '1.0.1' # XXX Replace this with an appropriate author or supybot.Author instance. __author__ = 'Barry Suridge' # This is a dictionary mapping supybot.Author instances to lists of # contributions. __contributors__ = {} # This is a url where the most recent plugin package can be downloaded. __url__ = 'https://github.com/Alcheri/Plugins.git' from . import config from . import plugin if sys.version_info >= (3, 4): from importlib import reload else: from imp import reload # In case we're being reloaded. reload(config) reload(plugin) # Add more reloads here if you add third-party modules and want them to be # reloaded when this plugin is reloaded. Don't forget to import them as well! if world.testing: from . import test Class = plugin.Class configure = config.configure # vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
bsd-3-clause
5,813,308,800,761,745,000
23.660377
78
0.72456
false
3.457672
false
false
false
miguelfrde/openautomata
openautomata/regex.py
1
3734
from automata import * from collections import defaultdict OR = '|' CLOSURE = '*' POS_CLOSURE = '+' WILD_CARD = '.' SYMBOLS = (')', '(', OR, CLOSURE, POS_CLOSURE) def balanced_parenthesis(txt): count = 0 for c in txt: if c == '(': count += 1 if c == ')': count -= 1 if count < 0: return False return count == 0 class RegularExpression: def __init__(self, regex_str): if not balanced_parenthesis(regex_str): raise Exception("Parenthesis not balanced.") self.regex = '(' + regex_str + ')' self.nfa = None self.dfa = DFA.from_nfa(self.__get_nfa()) self.dfa.minimize() def __get_nfa(self): "Regular Expression to NFA" alphabet = set(c for c in self.regex if c not in SYMBOLS) nfa = NFA(alphabet) nfa.set_initial(0) nfa.add_final(len(self.regex) - 1) stack = list() N = len(self.regex) for i, c in enumerate(self.regex): ind = i if c in alphabet: nfa.add_transition(i, i + 1, c) elif c == '(': nfa.add_transition(i, i + 1, EPSILON) stack.append(i) elif c == ')': nfa.add_transition(i, i + 1, EPSILON) ind = stack.pop() tmplist = list() # Adds a transition between every or and the closing parenthesis while self.regex[ind] == OR: tmplist.append(ind) nfa.add_transition(ind, i, EPSILON) ind = stack.pop() # Adds a transition between the opening parenthesis and every or for n in tmplist: nfa.add_transition(ind, n + 1, EPSILON) elif c == OR: stack.append(i) elif c in (CLOSURE, POS_CLOSURE): nfa.add_transition(i, i + 1, EPSILON) if i < N - 1 and self.regex[i + 1] in (CLOSURE, POS_CLOSURE): if self.regex[i + 1] == CLOSURE: nfa.add_transition(ind, i + 1, EPSILON) nfa.add_transition(i + 1, ind, EPSILON) nfa.states.remove(N) nfa.transition = defaultdict(set, [(k, v) for k, v in nfa.transition.iteritems() if N not in v]) return nfa def __str__(self): return self.regex[1:-1] def matches(self, text): "Match the regular expression against the text" state = self.dfa.initial_state for i, letter in enumerate(text): try: state = self.dfa.get_transition(state, letter) except SymbolNotInAlphabetError: return (False, i) result = any(map(lambda s: s in state, (f for f in self.dfa.final_states))) return (result, len(text)) def search(self, text): "Search for all matches of a regular expression in a text" current_states = list() result = list() for i, c in enumerate(text): current_states.append((i, {self.dfa.initial_state})) new_states = list() for c in set([WILD_CARD, c]): if c not in self.dfa.alphabet: continue for initial, s in current_states: t = self.dfa.get_transition(s, c) if not t: continue new_states.append((initial, t)) if self.dfa.contains_final(t): yield (initial, i, text[initial:i+1]) current_states = new_states if __name__ == '__main__': r = RegularExpression("a.e") print list(r.search("ade"))
mit
-8,601,334,346,309,791,000
34.226415
88
0.502678
false
3.783181
false
false
false
openlawlibrary/pygls
tests/lsp/test_code_action.py
1
4987
############################################################################ # Copyright(c) Open Law Library. All rights reserved. # # See ThirdPartyNotices.txt in the project root for additional notices. # # # # Licensed under the Apache License, Version 2.0 (the "License") # # you may not use this file except in compliance with the License. # # You may obtain a copy of the License at # # # # http: // www.apache.org/licenses/LICENSE-2.0 # # # # Unless required by applicable law or agreed to in writing, software # # distributed under the License is distributed on an "AS IS" BASIS, # # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # # See the License for the specific language governing permissions and # # limitations under the License. # ############################################################################ import unittest from typing import List, Optional, Union from pygls.lsp.methods import CODE_ACTION from pygls.lsp.types import (CodeAction, CodeActionContext, CodeActionKind, CodeActionOptions, CodeActionParams, Command, Diagnostic, Position, Range, TextDocumentIdentifier) from ..conftest import CALL_TIMEOUT, ClientServer class TestCodeAction(unittest.TestCase): @classmethod def setUpClass(cls): cls.client_server = ClientServer() cls.client, cls.server = cls.client_server @cls.server.feature( CODE_ACTION, CodeActionOptions(code_action_kinds=[CodeActionKind.Refactor]) ) def f(params: CodeActionParams) -> Optional[List[Union[Command, CodeAction]]]: if params.text_document.uri == 'file://return.list': return [ CodeAction(title='action1'), CodeAction(title='action2', kind=CodeActionKind.Refactor), Command(title='cmd1', command='cmd1', arguments=[1, 'two']), ] else: return None cls.client_server.start() @classmethod def tearDownClass(cls): cls.client_server.stop() def test_capabilities(self): capabilities = self.server.server_capabilities assert capabilities.code_action_provider assert capabilities.code_action_provider.code_action_kinds == [CodeActionKind.Refactor] def test_code_action_return_list(self): response = self.client.lsp.send_request( CODE_ACTION, CodeActionParams( text_document=TextDocumentIdentifier(uri='file://return.list'), range=Range( start=Position(line=0, character=0), end=Position(line=1, character=1), ), context=CodeActionContext( diagnostics=[ Diagnostic( range=Range( start=Position(line=0, character=0), end=Position(line=1, character=1), ), message='diagnostic' ) ], only=[CodeActionKind.Refactor] ) ) ).result(timeout=CALL_TIMEOUT) assert response[0]['title'] == 'action1' assert response[1]['title'] == 'action2' assert response[1]['kind'] == CodeActionKind.Refactor assert response[2]['title'] == 'cmd1' assert response[2]['command'] == 'cmd1' assert response[2]['arguments'] == [1, 'two'] def test_code_action_return_none(self): response = self.client.lsp.send_request( CODE_ACTION, CodeActionParams( text_document=TextDocumentIdentifier(uri='file://return.none'), range=Range( start=Position(line=0, character=0), end=Position(line=1, character=1), ), context=CodeActionContext( diagnostics=[ Diagnostic( range=Range( start=Position(line=0, character=0), end=Position(line=1, character=1), ), message='diagnostic', ) ], only=[CodeActionKind.Refactor], ) ) ).result(timeout=CALL_TIMEOUT) assert response is None if __name__ == '__main__': unittest.main()
apache-2.0
7,890,839,024,633,215,000
40.907563
95
0.487868
false
5.052685
true
false
false
santoshphilip/pyclearsky
tests/test_clearskyrad.py
1
7459
# Copyright (c) 2013 Santosh Philip # ======================================================================= # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # ======================================================================= """py.test for clearskyrad.py""" from pyclearsky import clearskyrad from pyclearsky.pytest_helpers import almostequal from datetime import datetime def test_func(): """py.test for func""" result = clearskyrad.func(1, 2) assert result def test_degcos(): """py.test for degcos""" data = ((60, 0.5), # deg, thecos ) for deg, thecos in data: result = clearskyrad.degcos(deg) assert almostequal(result, thecos) is True def test_degsin(): """py.test for degcos""" data = ((30, 0.5), # deg, thesin ) for deg, thesin in data: result = clearskyrad.degsin(deg) assert almostequal(result, thesin) is True def test_ETradiation(): """py.test for ETradiation""" data = ( (21, 1410), # daynum, radiation (52, 1397), # daynum, radiation (80, 1378), # daynum, radiation (111, 1354), # daynum, radiation (141, 1334), # daynum, radiation (172, 1323), # daynum, radiation (202, 1324), # daynum, radiation (233, 1336), # daynum, radiation (264, 1357), # daynum, radiation (294, 1380), # daynum, radiation (325, 1400), # daynum, radiation (355, 1411), # daynum, radiation ) for daynum, radiation in data: result = clearskyrad.ETradiation(daynum=daynum) assert almostequal(result, radiation, places=0) is True from datetime import datetime as dt data = ( (dt(2013, 1, 21), 1410), # thedate, radiation (dt(2013, 2, 21), 1397), # thedate, radiation (dt(2013, 3, 21), 1378), # thedate, radiation (dt(2013, 4, 21), 1354), # thedate, radiations (dt(2013, 5, 21), 1334), # thedate, radiation (dt(2013, 6, 21), 1323), # thedate, radiation (dt(2013, 7, 21), 1324), # thedate, radiation (dt(2013, 8, 21), 1336), # thedate, radiation (dt(2013, 9, 21), 1357), # thedate, radiation (dt(2013, 10, 21), 1380), # thedate, radiation (dt(2013, 11, 21), 1400), # thedate, radiation (dt(2013, 12, 21), 1411), # thedate, radiation ) for thedate, radiation in data: result = clearskyrad.ETradiation(thedate=thedate) # print result, radiation assert almostequal(result, radiation, places=0) is True def test_airmass(): """py.test for airmass""" data = ( (30, 1.9942928525), # alt, theairmass (45, 1.412595252), # alt, theairmass (60, 1.1539922334), # alt, theairmass (90, 0.9997119919), # alt, theairmass ) for alt, theairmass in data: result = clearskyrad.airmass(alt) assert almostequal(result, theairmass) def test_tau(): """py.test for tau""" data = ( (""" - Displaying Monthly Design Conditions "Climate Design Data 2009 ASHRAE Handbook" - Monthly Optical Sky Depth Beam (taub) and Diffuse (taud) Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec taub (beam) 0.289 0.29 0.325 0.351 0.377 0.37 0.362 0.352 0.343 0.323 0.302 0.289 taud (diffuse) 2.641 2.681 2.392 2.292 2.224 2.361 2.489 2.561 2.539 2.534 2.618 2.633 taub = Clear Sky Optical Depth for Beam Irradiance taud = Clear Sky Optical Depth for Diffuse Irradiance """, # noqa: E112, E501, W191, E101 -> copied from weather file and triggering these flake8 warnings [0.289, 0.29, 0.325, 0.351, 0.377, 0.37, 0.362, 0.352, 0.343, 0.323, 0.302, 0.289], [2.641, 2.681, 2.392, 2.292, 2.224, 2.361, 2.489, 2.561, 2.539, 2.534, 2.618, 2.633] ), # txt, taub, taud ) for txt, taub, taud in data: from six import StringIO fhandle = StringIO(txt) result = clearskyrad.tau(fhandle) assert result == (taub, taud) def test_getab(): """py.test for getab""" data = ( (0.289, 2.641, 0.652079204), # taub, taud, theab ) for taub, taud, theab in data: result = clearskyrad.getab(taub, taud) assert almostequal(result, theab) def test_getad(): """py.test for getad""" data = ( # (0.289, 2.641, -0.335194893), # taub, taud, thead (0.556, 1.779, 0.310), # taub, taud, thead ) for taub, taud, thead in data: result = clearskyrad.getad(taub, taud) assert almostequal(result, thead, 3) def test_directnormal_inner(): """py.test for directnormal_inner""" data = ( (1409.962705, 0.289, 0.999711992, 0.652079204, 1056.136599), # E0, taub, m, ab, Eb ) for E0, taub, m, ab, Eb in data: result = clearskyrad.directnormal_inner(E0, taub, m, ab) assert almostequal(result, Eb, places=6) def test_diffhoriz_inner(): """py.test for diffhoriz_inner""" data = ( (1409.962705, 2.641, 0.999711992, -0.335194893, 100.490533,), # E0, taud, m, ad, Ed ) for E0, taud, m, ad, Ed in data: result = clearskyrad.diffhoriz_inner(E0, taud, m, ad) assert almostequal(result, Ed, places=6) def test_directnormal(): """py.test for directnormal""" data = ( (0.289, 2.641, 90, 21, 1056.136599), # taub, taud, alt, daynum, Eb ) for taub, taud, alt, daynum, Eb in data: result = clearskyrad.directnormal(taub, taud, alt, daynum) assert almostequal(result, Eb, places=5) def test_diffusehorizontal(): """py.test for diffusehorizontal""" data = ( (0.289, 2.641, 90, 21, 100.528187871), # taub, taud, alt, daynum, Eb ) for taub, taud, alt, daynum, Eb in data: result = clearskyrad.diffusehorizontal(taub, taud, alt, daynum=daynum) assert almostequal(result, Eb) def test_weatherdata(): """py.test with real weather data""" # from USA_AZ_Phoenix.722780_TMY2.ddy # datetime = month, date, hour = 1, 24, 13 # ET Horizontal Radiation = 852 # ET Direct Normal radiation = 1412 # Direct Normal Radiation = 969 # Diffuse horizontal radiation = 71 # Total Sky cover = 0 # Opaque sky cover = 0 # from USA_AZ_Phoenix.722780_TMY2.stat # Jan taub = 0.306 # Jan taud = 2.534 # Feb taub = 0.317 # Feb taud = 2.463 # from <http://www.esrl.noaa.gov/gmd/grad/solcalc/azel.html> # solar azimuth = 185.8 degrees # solar altitude = 37.36 degrees # Eb = 969 taub = 0.306 taud = 2.534 alt = 37.36 daynum = 24 result = clearskyrad.directnormal(taub, taud, alt, daynum=daynum) # assert result == Eb Ed = 71 result = clearskyrad.diffusehorizontal(taub, taud, alt, daynum=daynum) # assert result == Ed # other days # 1975, 2, 16, 14 # Eb = 816 taub = 0.317 taud = 2.463 alt = 40.67 daynum = 24 thedate = datetime(1975, 2, 16) result = clearskyrad.directnormal(taub, taud, alt, thedate=thedate) # assert result == Eb Ed = 98.3801828381502 result = clearskyrad.diffusehorizontal(taub, taud, alt, thedate=thedate) assert result == Ed
mpl-2.0
3,839,150,458,369,474,000
32.299107
101
0.578764
false
2.906859
true
false
false
SimFre/EasyMoney
EasyMoney.py
1
2677
#!/usr/local/bin/python3 import sys from PyQt5 import QtCore, QtGui, QtWidgets from ImportBase import ImportBase from DbConnection import DbConnection from Ui_MainWindow import Ui_MainWindow from Control_MainWindow import Control_MainWindow if __name__ == '__main__': #dbFile = "/Users/laban/Documents/Ekonomi/Transactions.db" dbFile = "/home/laban/Documents/Ekonomi/Transactions.db" with DbConnection(dbFile, False) as db: print("Database:", dbFile) # Initiate data class ib = ImportBase(db) app = QtWidgets.QApplication(sys.argv) window = QtWidgets.QMainWindow() ui = Ui_MainWindow() ui.setupUi(window) ctl = Control_MainWindow(ui, ib) window.show() app.exec_() # SAS Eurobonus Mastercard # inputFilename = "/Users/laban/Documents/Ekonomi/SAS Eurobonus Mastercard/" # card = "Fredriksson Simon (nnnnnn******nnnn)" # ib.importFile(inputFilename + "Kontoutdrag-201405.xlsx", card) # ib.importFile(inputFilename + "Kontoutdrag-201406.xlsx", card) # ib.importFile(inputFilename + "Kontoutdrag-201407.xlsx", card) # ib.importFile(inputFilename + "Kontoutdrag-201408.xlsx", card) # ib.importFile(inputFilename + "Kontoutdrag-201409.xlsx", card) # ib.importFile(inputFilename + "Kontoutdrag-201410.xlsx", card) # ib.importFile(inputFilename + "Kontoutdrag-201411.xlsx", card) # ib.importFile(inputFilename + "Kontoutdrag-201412.xlsx", card) # ib.importFile(inputFilename + "Kontoutdrag-201501.xlsx", card) # ib.importFile(inputFilename + "Kontoutdrag-201502.xlsx", card) # Diners # inputFilename = "/Users/laban/Documents/Ekonomi/Diners Club/" # card = "Diners Club" # ib.importFile(inputFilename + "Diners20140618.xls", card) # ib.importFile(inputFilename + "Diners20140721.xls", card) # ib.importFile(inputFilename + "Diners20140819.xls", card) # ib.importFile(inputFilename + "Diners20140918.xls", card) # ib.importFile(inputFilename + "Diners20141021.xls", card) # ib.importFile(inputFilename + "Diners20141118.xls", card) # ib.importFile(inputFilename + "Diners20141218.xls", card) # ib.importFile(inputFilename + "Diners20150120.xls", card) # ib.importFile(inputFilename + "Diners20150217.xls", card) # Swedbank # inputFilename = "/Users/laban/Documents/Ekonomi/Swedbank/Swedbank_20140530-20150304.txt" # card = "Privatkonto (nnnnn.nnnnnnnnnn)" # codepage = "utf8" # ib.importFile(inputFilename, card, codepage)
gpl-2.0
3,995,125,351,226,500,000
44.372881
98
0.661188
false
3.260658
false
true
false
Arcanemagus/SickRage
sickbeard/auto_postprocessor.py
1
1341
# coding=utf-8 # Author: Nic Wolfe <nic@wolfeden.ca> # URL: https://sick-rage.github.io # # This file is part of SickRage. # # SickRage is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # SickRage is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. from __future__ import unicode_literals import threading import sickbeard class PostProcessor(object): def __init__(self): self.lock = threading.Lock() self.amActive = False def run(self, force=False): """ Runs the postprocessor :param force: Forces postprocessing run :return: Returns when done without a return state/code """ self.amActive = True sickbeard.postProcessorTaskScheduler.action.add_item(sickbeard.TV_DOWNLOAD_DIR, force=force) self.amActive = False def __del__(self): pass
gpl-3.0
7,391,872,668,848,582,000
29.477273
100
0.700969
false
3.932551
false
false
false
alberts/check_mk
modules/inventory.py
1
11923
#!/usr/bin/python # -*- encoding: utf-8; py-indent-offset: 4 -*- # +------------------------------------------------------------------+ # | ____ _ _ __ __ _ __ | # | / ___| |__ ___ ___| | __ | \/ | |/ / | # | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / | # | | |___| | | | __/ (__| < | | | | . \ | # | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ | # | | # | Copyright Mathias Kettner 2014 mk@mathias-kettner.de | # +------------------------------------------------------------------+ # # This file is part of Check_MK. # The official homepage is at http://mathias-kettner.de/check_mk. # # check_mk is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation in version 2. check_mk is distributed # in the hope that it will be useful, but WITHOUT ANY WARRANTY; with- # out even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. See the GNU General Public License for more de- # ails. You should have received a copy of the GNU General Public # License along with GNU Make; see the file COPYING. If not, write # to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, # Boston, MA 02110-1301 USA. import gzip inventory_output_dir = var_dir + "/inventory" inventory_archive_dir = var_dir + "/inventory_archive" inventory_pprint_output = True # .--Plugins-------------------------------------------------------------. # | ____ _ _ | # | | _ \| |_ _ __ _(_)_ __ ___ | # | | |_) | | | | |/ _` | | '_ \/ __| | # | | __/| | |_| | (_| | | | | \__ \ | # | |_| |_|\__,_|\__, |_|_| |_|___/ | # | |___/ | # +----------------------------------------------------------------------+ # | Code for reading the inventory plugins, help functions that are | # | called by the plugins. # '----------------------------------------------------------------------' # Plugins register here inv_info = {} # Inventory plugins inv_export = {} # Inventory export hooks # Read all inventory plugins right now filelist = glob.glob(inventory_dir + "/*") filelist.sort() # read local checks *after* shipped ones! if local_inventory_dir: local_files = glob.glob(local_inventory_dir + "/*") local_files.sort() filelist += local_files # read include files always first, but still in the sorted # order with local ones last (possibly overriding variables) filelist = [ f for f in filelist if f.endswith(".include") ] + \ [ f for f in filelist if not f.endswith(".include") ] for f in filelist: if not f.endswith("~"): # ignore emacs-like backup files try: execfile(f) except Exception, e: sys.stderr.write("Error in inventory plugin file %s: %s\n" % (f, e)) if opt_debug: raise sys.exit(5) # Function for accessing the inventory tree of the current host # Example: path = "software.packages:17." # The path must end with : or . # -> software is a dict # -> packages is a list def inv_tree(path): global g_inv_tree node = g_inv_tree current_what = "." current_path = "" while path: parts = re.split("[:.]", path) name = parts[0] what = path[len(name)] path = path[1 + len(name):] current_path += what + name if current_what == '.': # node is a dict if name not in node: if what == '.': node[name] = {} else: node[name] = [] node = node[name] else: # node is a list try: index = int(name) except: raise MKGeneralException("Cannot convert index %s of path %s into int" % (name, current_path)) if type(node) != list: raise MKGeneralException("Path %s is exptected to by of type list, but is dict" % current_path) if index < 0 or index >= len(node): raise MKGeneralException("Index %d not existing in list node %s" % (index, current_path)) node = node[index] current_what = what return node # Removes empty nodes from a (sub)-tree. Returns # True if the tree itself is empty def inv_cleanup_tree(tree): if type(tree) == dict: for key, value in tree.items(): if inv_cleanup_tree(value): del tree[key] return not tree elif type(tree) == list: to_delete = [] for nr, entry in enumerate(tree): if inv_cleanup_tree(entry): to_delete.append(nr) for nr in to_delete[::-1]: del tree[nr] return not tree else: return False # cannot clean non-container nodes #. # .--Inventory-----------------------------------------------------------. # | ___ _ | # | |_ _|_ ____ _____ _ __ | |_ ___ _ __ _ _ | # | | || '_ \ \ / / _ \ '_ \| __/ _ \| '__| | | | | # | | || | | \ V / __/ | | | || (_) | | | |_| | | # | |___|_| |_|\_/ \___|_| |_|\__\___/|_| \__, | | # | |___/ | # +----------------------------------------------------------------------+ # | Code for doing the actual inventory | # '----------------------------------------------------------------------' def do_inv(hostnames): if not os.path.exists(inventory_output_dir): os.makedirs(inventory_output_dir) if not os.path.exists(inventory_archive_dir): os.makedirs(inventory_archive_dir) # No hosts specified: do all hosts and force caching if hostnames == None: hostnames = all_active_realhosts() global opt_use_cachefile opt_use_cachefile = True errors = [] for hostname in hostnames: try: verbose("Doing HW/SW-Inventory for %s..." % hostname) do_inv_for(hostname) verbose("..OK\n") except Exception, e: if opt_debug: raise verbose("Failed: %s\n" % e) errors.append("Failed to inventorize %s: %s" % (hostname, e)) if errors: raise MKGeneralException("\n".join(errors)) def do_inv_check(hostname): try: inv_tree, old_timestamp = do_inv_for(hostname) num_entries = count_nodes(g_inv_tree) if not num_entries: sys.stdout.write("WARN - Found no data\n") sys.exit(1) infotext = "found %d entries" % num_entries state = 0 if old_timestamp: path = inventory_archive_dir + "/" + hostname + "/%d" % old_timestamp old_tree = eval(file(path).read()) if inv_tree.get("software") != old_tree.get("software"): infotext += ", software changes" if opt_inv_sw_changes: state = opt_inv_sw_changes infotext += state_markers[opt_inv_sw_changes] if inv_tree.get("hardware") != old_tree.get("hardware"): infotext += ", hardware changes" if state == 2 or opt_inv_hw_changes == 2: state = 2 else: state = max(state, opt_inv_sw_changes) if opt_inv_hw_changes: infotext += state_markers[opt_inv_hw_changes] sys.stdout.write(core_state_names[state] + " - " + infotext + "\n") sys.exit(state) except Exception, e: if opt_debug: raise sys.stdout.write("WARN - Inventory failed: %s\n" % e) sys.exit(1) def count_nodes(tree): if type(tree) == dict: return len(tree) + sum([count_nodes(v) for v in tree.values()]) elif type(tree) == list: return len(tree) + sum([count_nodes(v) for v in tree]) elif tree == None: return 0 else: return 1 def do_inv_for(hostname): try: ipaddress = lookup_ipaddress(hostname) except: raise MKGeneralException("Cannot resolve hostname '%s'." % hostname) global g_inv_tree g_inv_tree = {} for secname, plugin in inv_info.items(): try: info = get_realhost_info(hostname, ipaddress, secname, 999999999999, ignore_check_interval = True) except Exception, e: if str(e): raise # Otherwise simply ignore missing agent section continue if not info: # section not present (None or []) # Note: this also excludes existing sections without info.. continue if opt_verbose: sys.stdout.write(tty_green + tty_bold + secname + " " + tty_normal) sys.stdout.flush() plugin["inv_function"](info) # Remove empty paths inv_cleanup_tree(g_inv_tree) old_timestamp = save_inv_tree(hostname) if opt_verbose: sys.stdout.write("..%s%s%d%s entries" % (tty_bold, tty_yellow, count_nodes(g_inv_tree), tty_normal)) sys.stdout.flush() run_inv_export_hooks(hostname, g_inv_tree) return g_inv_tree, old_timestamp # Returns the time stamp of the previous inventory with different # outcome or None. def save_inv_tree(hostname): if not os.path.exists(inventory_output_dir): os.makedirs(inventory_output_dir) old_time = None if inventory_pprint_output: r = pprint.pformat(g_inv_tree) else: r = repr(g_inv_tree) path = inventory_output_dir + "/" + hostname if g_inv_tree: old_tree = None if os.path.exists(path): try: old_tree = eval(file(path).read()) except: pass if old_tree != g_inv_tree: if old_tree: verbose("..changed") old_time = os.stat(path).st_mtime arcdir = "%s/%s" % (inventory_archive_dir, hostname) if not os.path.exists(arcdir): os.makedirs(arcdir) os.rename(path, arcdir + ("/%d" % old_time)) else: verbose("..new") file(path, "w").write(r + "\n") gzip.open(path + ".gz", "w").write(r + "\n") # Inform Livestatus about the latest inventory update file(inventory_output_dir + "/.last", "w") else: verbose("..unchanged") else: if os.path.exists(path): # Remove empty inventory files. Important for host inventory icon os.remove(path) if os.path.exists(path + ".gz"): os.remove(path + ".gz") return old_time def run_inv_export_hooks(hostname, tree): for hookname, ruleset in inv_exports.items(): entries = host_extra_conf(hostname, ruleset) if entries: if opt_verbose: sys.stdout.write(", running %s%s%s%s..." % (tty_blue, tty_bold, hookname, tty_normal)) sys.stdout.flush() params = entries[0] try: inv_export[hookname]["export_function"](hostname, params, tree) except Exception, e: if opt_debug: raise raise MKGeneralException("Failed to execute export hook %s: %s" % ( hookname, e))
gpl-2.0
6,273,460,965,785,690,000
34.171091
111
0.474713
false
3.900229
false
false
false
zhaofengli/refill
backend/refill/models/context.py
1
6166
from concurrent.futures import ThreadPoolExecutor from importlib import import_module from ..utils import Utils from uuid import uuid1 import mwparserfromhell import celery.utils.log import logging import re class Context: def __init__(self): """Initialize the context Note: This does not depend on Celery. If no Celery task is attached, Celery-related methods are noop. """ self._task = None self._page = None self.preferences = {} self.changes = [] self.errors = [] self.transforms = [] self.transformMetadata = {} self.currentTransform = None self.currentTransformIndex = 0 self.wikicode = None self.origWikicode = '' self.uuid = str(uuid1()) self.executor = ThreadPoolExecutor(max_workers=10) self.getLogger = logging.getLogger self.logging = self.getLogger('refill') def attachTask(self, task): """Attach a Celery Task object """ self._task = task self.getLogger = celery.utils.log.get_logger self.logging = self.getLogger('refill') def attachPage(self, page): """Attach a pywikibot page """ self._page = page def setPreferences(self, preferences): """Set user preferences """ self.preferences = preferences def getPreference(self, preference: str, default: str = None): """Get user preference """ return self.preferences.get(preference, default) def applyTransforms(self, wikicode: str): """Apply scheduled transforms on the wikicode """ self.wikicode = mwparserfromhell.parse(Utils.protectMarkers(wikicode, self.uuid)) self.origWikicode = wikicode for index, transform in enumerate(self.transforms): self.currentTransform = transform self.currentTransformIndex = index self._updateState() transform.apply(self.wikicode) def getResult(self): """Get the final result as Celery metadata """ return self._generateTaskMetadata() def getPage(self): """Get the associated pywikibot Page object """ if self._page: return self._page return False def getDateFormat(self): """Get the preferred date format of the page """ page = self.getPage() if not page: return False lang = page.site.lang userPreference = self.getPreference('dateFormat', {}).get(lang, False) if not self.wikicode: return userPreference if lang == 'en': try: hint = next(self.wikicode.ifilter_templates( recursive=False, matches=lambda e: re.match(r'^(U|u)se (mdy|dmy) dates$', str(e.name)), )) except StopIteration: return userPreference return 'mdy' if 'mdy' in str(hint.name) else 'dmy' return userPreference def reportProgress(self, state: str, percentage: float, metadata: dict): """Report progress of the current transform """ name = self.currentTransform.__class__.__name__ self.transformMetadata[name] = { 'state': state, 'percentage': percentage, 'metadata': metadata, } self._updateState() def reportChange(self, change: dict): """Report a change to the wikicode by the current transform """ change['transform'] = self.currentTransform.__class__.__name__ self.changes.append(change) return len(self.changes) - 1 def reportError(self, error: dict): """Report an error encountered during the current transform """ error['transform'] = self.currentTransform.__class__.__name__ self.errors.append(error) return len(self.errors) - 1 def _updateState(self): """Actually send our state to Celery """ if self._task: self._task.update_state(state='PROGRESS', meta=self._generateTaskMetadata()) def _generateTaskMetadata(self): """Generate task metadata for Celery """ # Generate percentage name = self.currentTransform.__class__.__name__ ind = self.currentTransformIndex if name in self.transformMetadata and \ 'percentage' in self.transformMetadata[name]: ind += self.transformMetadata[name]['percentage'] percentage = ind / len(self.transforms) # Generate partial wikicode wikicode = str(self.wikicode) if self.wikicode else '' # Generate wiki page information if self._page: site = self._page.site family = site.family wikipage = { 'fam': family.name, 'code': site.code, 'lang': site.lang, 'page': self._page.title(), 'upage': self._page.title(underscore=True), 'domain': site.hostname(), 'path': site.path(), 'protocol': site.protocol(), 'editTime': self._page.editTime().totimestampformat(), 'startTime': site.getcurrenttimestamp(), } else: wikipage = {} cleanWikicode = Utils.unprotectMarkers(Utils.unmarkWikicode(wikicode), self.uuid) markedWikicode = Utils.unprotectMarkers(wikicode, self.uuid) return { 'overall': { 'percentage': percentage, 'currentTransform': self.currentTransform.__class__.__name__, 'currentTransformIndex': self.currentTransformIndex, 'totalTransforms': len(self.transforms), }, 'transforms': self.transformMetadata, 'changes': self.changes, 'errors': self.errors, 'wikicode': cleanWikicode, 'markedWikicode': markedWikicode, 'origWikicode': self.origWikicode, 'wikipage': wikipage, }
bsd-2-clause
8,985,644,120,945,626,000
31.452632
90
0.572494
false
4.550554
false
false
false
e-koch/VLA_Lband
14B-088/HI/imaging/imaging_tests/HI_testing_channel_clean.py
1
2992
import sys import numpy as np import os from tasks import clean, feather ''' Cleans an MS with a single channel given a mask and a model ''' default("clean") major, minor, revision = casadef.casa_version.split('.') casa_version = 100 * int(major) + 10 * int(minor) + int(revision) vis = sys.argv[-6] model = sys.argv[-5] mask = sys.argv[-4] use_all_fields = True if sys.argv[-3] == "T" else False use_multiscale = True if sys.argv[-2] == "T" else False use_tclean = True if sys.argv[-1] == "T" else False if model == "None": model = None if mask == "None": mask = None if use_tclean: if casa_version < 450: raise Warning("tclean only works for CASA versions above 4.5.") if use_all_fields: field = 'M33*' else: # Drop M33_3, the incorrect pointing. field = ",".join(["M33_{}".format(i) for i in range(1, 15) if i not in [3, 7]]) + ", M33_7_center" if use_multiscale: multiscale = [0, 4, 8, 20, 40, 80] # Different progression based on # pixels within a beam # multiscale = list(np.array([0, 1, 3, 9, 27, 81]) * 4) # multiscale = list(np.array([0, 2, 5]) * 4) else: multiscale = [] out_root = "{0}.CASAVer_{1}.Model_{2}.Mask_{3}.AllFields_{4}.MScale_{5}" \ ".Tclean_{6}".format(vis[:-3], casa_version, "T" if model is not None else "F", "T" if mask is not None else "F", "T" if use_all_fields else "F", "T" if use_multiscale else "F", "T" if use_tclean else "F") if use_tclean: from tasks import tclean tclean(vis=vis, imagename=out_root + '.clean', field=field, restfreq='1420.40575177MHz', specmode='cube', nchan=1, start=1, width=1, cell='3arcsec', scales=multiscale, niter=200000, threshold="1.8mJy/bm", gain=0.1, imsize=[2560, 2560], gridder='mosaic', weighting='natural', veltype='radio', pblimit=0.2, interpolation='linear', startmodel=model, usemask='user', mask=mask, phasecenter='J2000 01h33m50.904 +30d39m35.79', ) else: clean(vis=vis, imagename=out_root + '.clean', field=field, restfreq='1420.40575177MHz', mode='channel', width=1, nchan=1, start=1, cell='3arcsec', multiscale=multiscale, threshold='1.8mJy/beam', imagermode='mosaic', gain=0.1, imsize=[2560, 2560], weighting='natural', robust=0.0, niter=200000, pbcor=True, minpb=0.2, interpolation='linear', usescratch=False, phasecenter='J2000 01h33m50.904 +30d39m35.79', veltype='radio', modelimage=model, mask=mask) # Run feathering with the model if model is not None: if os.path.exists(out_root + ".clean.image"): feather(imagename=out_root + ".clean.image.feathered", highres=out_root + ".clean.image", lowres=model)
mit
-1,046,448,077,323,866,400
34.2
79
0.573529
false
3.126437
false
false
false
44px/redash
redash/settings/__init__.py
1
12942
import os from funcy import distinct, remove from .helpers import fix_assets_path, array_from_string, parse_boolean, int_or_none, set_from_string def all_settings(): from types import ModuleType settings = {} for name, item in globals().iteritems(): if not callable(item) and not name.startswith("__") and not isinstance(item, ModuleType): settings[name] = item return settings REDIS_URL = os.environ.get('REDASH_REDIS_URL', os.environ.get('REDIS_URL', "redis://localhost:6379/0")) PROXIES_COUNT = int(os.environ.get('REDASH_PROXIES_COUNT', "1")) STATSD_HOST = os.environ.get('REDASH_STATSD_HOST', "127.0.0.1") STATSD_PORT = int(os.environ.get('REDASH_STATSD_PORT', "8125")) STATSD_PREFIX = os.environ.get('REDASH_STATSD_PREFIX', "redash") STATSD_USE_TAGS = parse_boolean(os.environ.get('REDASH_STATSD_USE_TAGS', "false")) # Connection settings for Redash's own database (where we store the queries, results, etc) SQLALCHEMY_DATABASE_URI = os.environ.get("REDASH_DATABASE_URL", os.environ.get('DATABASE_URL', "postgresql:///postgres")) SQLALCHEMY_MAX_OVERFLOW = int_or_none(os.environ.get("SQLALCHEMY_MAX_OVERFLOW")) SQLALCHEMY_POOL_SIZE = int_or_none(os.environ.get("SQLALCHEMY_POOL_SIZE")) SQLALCHEMY_DISABLE_POOL = parse_boolean(os.environ.get("SQLALCHEMY_DISABLE_POOL", "false")) SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_ECHO = False # Celery related settings CELERY_BROKER = os.environ.get("REDASH_CELERY_BROKER", REDIS_URL) CELERY_RESULT_BACKEND = os.environ.get( "REDASH_CELERY_RESULT_BACKEND", os.environ.get("REDASH_CELERY_BACKEND", CELERY_BROKER)) CELERY_RESULT_EXPIRES = int(os.environ.get( "REDASH_CELERY_RESULT_EXPIRES", os.environ.get("REDASH_CELERY_TASK_RESULT_EXPIRES", 3600 * 4))) # The following enables periodic job (every 5 minutes) of removing unused query results. QUERY_RESULTS_CLEANUP_ENABLED = parse_boolean(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_ENABLED", "true")) QUERY_RESULTS_CLEANUP_COUNT = int(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_COUNT", "100")) QUERY_RESULTS_CLEANUP_MAX_AGE = int(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_MAX_AGE", "7")) SCHEMAS_REFRESH_SCHEDULE = int(os.environ.get("REDASH_SCHEMAS_REFRESH_SCHEDULE", 30)) AUTH_TYPE = os.environ.get("REDASH_AUTH_TYPE", "api_key") ENFORCE_HTTPS = parse_boolean(os.environ.get("REDASH_ENFORCE_HTTPS", "false")) INVITATION_TOKEN_MAX_AGE = int(os.environ.get("REDASH_INVITATION_TOKEN_MAX_AGE", 60 * 60 * 24 * 7)) MULTI_ORG = parse_boolean(os.environ.get("REDASH_MULTI_ORG", "false")) GOOGLE_CLIENT_ID = os.environ.get("REDASH_GOOGLE_CLIENT_ID", "") GOOGLE_CLIENT_SECRET = os.environ.get("REDASH_GOOGLE_CLIENT_SECRET", "") GOOGLE_OAUTH_ENABLED = bool(GOOGLE_CLIENT_ID and GOOGLE_CLIENT_SECRET) # Enables the use of an externally-provided and trusted remote user via an HTTP # header. The "user" must be an email address. # # By default the trusted header is X-Forwarded-Remote-User. You can change # this by setting REDASH_REMOTE_USER_HEADER. # # Enabling this authentication method is *potentially dangerous*, and it is # your responsibility to ensure that only a trusted frontend (usually on the # same server) can talk to the redash backend server, otherwise people will be # able to login as anyone they want by directly talking to the redash backend. # You must *also* ensure that any special header in the original request is # removed or always overwritten by your frontend, otherwise your frontend may # pass it through to the backend unchanged. # # Note that redash will only check the remote user once, upon the first need # for a login, and then set a cookie which keeps the user logged in. Dropping # the remote user header after subsequent requests won't automatically log the # user out. Doing so could be done with further work, but usually it's # unnecessary. # # If you also set the organization setting auth_password_login_enabled to false, # then your authentication will be seamless. Otherwise a link will be presented # on the login page to trigger remote user auth. REMOTE_USER_LOGIN_ENABLED = parse_boolean(os.environ.get("REDASH_REMOTE_USER_LOGIN_ENABLED", "false")) REMOTE_USER_HEADER = os.environ.get("REDASH_REMOTE_USER_HEADER", "X-Forwarded-Remote-User") # If the organization setting auth_password_login_enabled is not false, then users will still be # able to login through Redash instead of the LDAP server LDAP_LOGIN_ENABLED = parse_boolean(os.environ.get('REDASH_LDAP_LOGIN_ENABLED', 'false')) # The LDAP directory address (ex. ldap://10.0.10.1:389) LDAP_HOST_URL = os.environ.get('REDASH_LDAP_URL', None) # The DN & password used to connect to LDAP to determine the identity of the user being authenticated. # For AD this should be "org\\user". LDAP_BIND_DN = os.environ.get('REDASH_LDAP_BIND_DN', None) LDAP_BIND_DN_PASSWORD = os.environ.get('REDASH_LDAP_BIND_DN_PASSWORD', '') # AD/LDAP email and display name keys LDAP_DISPLAY_NAME_KEY = os.environ.get('REDASH_LDAP_DISPLAY_NAME_KEY', 'displayName') LDAP_EMAIL_KEY = os.environ.get('REDASH_LDAP_EMAIL_KEY', "mail") # Prompt that should be shown above username/email field. LDAP_CUSTOM_USERNAME_PROMPT = os.environ.get('REDASH_LDAP_CUSTOM_USERNAME_PROMPT', 'LDAP/AD/SSO username:') # LDAP Search DN TEMPLATE (for AD this should be "(sAMAccountName=%(username)s)"") LDAP_SEARCH_TEMPLATE = os.environ.get('REDASH_LDAP_SEARCH_TEMPLATE', '(cn=%(username)s)') # The schema to bind to (ex. cn=users,dc=ORG,dc=local) LDAP_SEARCH_DN = os.environ.get('REDASH_LDAP_SEARCH_DN', os.environ.get('REDASH_SEARCH_DN')) STATIC_ASSETS_PATH = fix_assets_path(os.environ.get("REDASH_STATIC_ASSETS_PATH", "../client/dist/")) JOB_EXPIRY_TIME = int(os.environ.get("REDASH_JOB_EXPIRY_TIME", 3600 * 12)) COOKIE_SECRET = os.environ.get("REDASH_COOKIE_SECRET", "c292a0a3aa32397cdb050e233733900f") SESSION_COOKIE_SECURE = parse_boolean(os.environ.get("REDASH_SESSION_COOKIE_SECURE") or str(ENFORCE_HTTPS)) LOG_LEVEL = os.environ.get("REDASH_LOG_LEVEL", "INFO") LOG_STDOUT = parse_boolean(os.environ.get('REDASH_LOG_STDOUT', 'false')) LOG_PREFIX = os.environ.get('REDASH_LOG_PREFIX', '') LOG_FORMAT = os.environ.get('REDASH_LOG_FORMAT', LOG_PREFIX + '[%(asctime)s][PID:%(process)d][%(levelname)s][%(name)s] %(message)s') CELERYD_WORKER_LOG_FORMAT = os.environ.get( "REDASH_CELERYD_WORKER_LOG_FORMAT", os.environ.get('REDASH_CELERYD_LOG_FORMAT', LOG_PREFIX + '[%(asctime)s][PID:%(process)d][%(levelname)s][%(processName)s] %(message)s')) CELERYD_WORKER_TASK_LOG_FORMAT = os.environ.get( "REDASH_CELERYD_WORKER_TASK_LOG_FORMAT", os.environ.get('REDASH_CELERYD_TASK_LOG_FORMAT', (LOG_PREFIX + '[%(asctime)s][PID:%(process)d][%(levelname)s][%(processName)s] ' 'task_name=%(task_name)s ' 'task_id=%(task_id)s %(message)s'))) # Mail settings: MAIL_SERVER = os.environ.get('REDASH_MAIL_SERVER', 'localhost') MAIL_PORT = int(os.environ.get('REDASH_MAIL_PORT', 25)) MAIL_USE_TLS = parse_boolean(os.environ.get('REDASH_MAIL_USE_TLS', 'false')) MAIL_USE_SSL = parse_boolean(os.environ.get('REDASH_MAIL_USE_SSL', 'false')) MAIL_USERNAME = os.environ.get('REDASH_MAIL_USERNAME', None) MAIL_PASSWORD = os.environ.get('REDASH_MAIL_PASSWORD', None) MAIL_DEFAULT_SENDER = os.environ.get('REDASH_MAIL_DEFAULT_SENDER', None) MAIL_MAX_EMAILS = os.environ.get('REDASH_MAIL_MAX_EMAILS', None) MAIL_ASCII_ATTACHMENTS = parse_boolean(os.environ.get('REDASH_MAIL_ASCII_ATTACHMENTS', 'false')) HOST = os.environ.get('REDASH_HOST', '') ALERTS_DEFAULT_MAIL_SUBJECT_TEMPLATE = os.environ.get('REDASH_ALERTS_DEFAULT_MAIL_SUBJECT_TEMPLATE', "({state}) {alert_name}") # How many requests are allowed per IP to the login page before # being throttled? # See https://flask-limiter.readthedocs.io/en/stable/#rate-limit-string-notation THROTTLE_LOGIN_PATTERN = os.environ.get('REDASH_THROTTLE_LOGIN_PATTERN', '50/hour') LIMITER_STORAGE = os.environ.get("REDASH_LIMITER_STORAGE", REDIS_URL) # CORS settings for the Query Result API (and possbily future external APIs). # In most cases all you need to do is set REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN # to the calling domain (or domains in a comma separated list). ACCESS_CONTROL_ALLOW_ORIGIN = set_from_string(os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN", "")) ACCESS_CONTROL_ALLOW_CREDENTIALS = parse_boolean(os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_CREDENTIALS", "false")) ACCESS_CONTROL_REQUEST_METHOD = os.environ.get("REDASH_CORS_ACCESS_CONTROL_REQUEST_METHOD", "GET, POST, PUT") ACCESS_CONTROL_ALLOW_HEADERS = os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_HEADERS", "Content-Type") # Query Runners default_query_runners = [ 'redash.query_runner.athena', 'redash.query_runner.big_query', 'redash.query_runner.google_spreadsheets', 'redash.query_runner.graphite', 'redash.query_runner.mongodb', 'redash.query_runner.mysql', 'redash.query_runner.pg', 'redash.query_runner.url', 'redash.query_runner.influx_db', 'redash.query_runner.elasticsearch', 'redash.query_runner.presto', 'redash.query_runner.hive_ds', 'redash.query_runner.impala_ds', 'redash.query_runner.vertica', 'redash.query_runner.clickhouse', 'redash.query_runner.yandex_metrica', 'redash.query_runner.treasuredata', 'redash.query_runner.sqlite', 'redash.query_runner.dynamodb_sql', 'redash.query_runner.mssql', 'redash.query_runner.memsql_ds', 'redash.query_runner.mapd', 'redash.query_runner.jql', 'redash.query_runner.google_analytics', 'redash.query_runner.axibase_tsd', 'redash.query_runner.salesforce', 'redash.query_runner.query_results', 'redash.query_runner.prometheus', 'redash.query_runner.qubole' ] enabled_query_runners = array_from_string(os.environ.get("REDASH_ENABLED_QUERY_RUNNERS", ",".join(default_query_runners))) additional_query_runners = array_from_string(os.environ.get("REDASH_ADDITIONAL_QUERY_RUNNERS", "")) disabled_query_runners = array_from_string(os.environ.get("REDASH_DISABLED_QUERY_RUNNERS", "")) QUERY_RUNNERS = remove(set(disabled_query_runners), distinct(enabled_query_runners + additional_query_runners)) ADHOC_QUERY_TIME_LIMIT = int_or_none(os.environ.get('REDASH_ADHOC_QUERY_TIME_LIMIT', None)) # Destinations default_destinations = [ 'redash.destinations.email', 'redash.destinations.slack', 'redash.destinations.webhook', 'redash.destinations.hipchat', 'redash.destinations.mattermost', 'redash.destinations.chatwork', ] enabled_destinations = array_from_string(os.environ.get("REDASH_ENABLED_DESTINATIONS", ",".join(default_destinations))) additional_destinations = array_from_string(os.environ.get("REDASH_ADDITIONAL_DESTINATIONS", "")) DESTINATIONS = distinct(enabled_destinations + additional_destinations) EVENT_REPORTING_WEBHOOKS = array_from_string(os.environ.get("REDASH_EVENT_REPORTING_WEBHOOKS", "")) # Support for Sentry (http://getsentry.com/). Just set your Sentry DSN to enable it: SENTRY_DSN = os.environ.get("REDASH_SENTRY_DSN", "") # Client side toggles: ALLOW_SCRIPTS_IN_USER_INPUT = parse_boolean(os.environ.get("REDASH_ALLOW_SCRIPTS_IN_USER_INPUT", "false")) DATE_FORMAT = os.environ.get("REDASH_DATE_FORMAT", "DD/MM/YY") DASHBOARD_REFRESH_INTERVALS = map(int, array_from_string(os.environ.get("REDASH_DASHBOARD_REFRESH_INTERVALS", "60,300,600,1800,3600,43200,86400"))) QUERY_REFRESH_INTERVALS = map(int, array_from_string(os.environ.get("REDASH_QUERY_REFRESH_INTERVALS", "60, 300, 600, 900, 1800, 3600, 7200, 10800, 14400, 18000, 21600, 25200, 28800, 32400, 36000, 39600, 43200, 86400, 604800, 1209600, 2592000"))) # Features: VERSION_CHECK = parse_boolean(os.environ.get("REDASH_VERSION_CHECK", "true")) FEATURE_DISABLE_REFRESH_QUERIES = parse_boolean(os.environ.get("REDASH_FEATURE_DISABLE_REFRESH_QUERIES", "false")) FEATURE_SHOW_QUERY_RESULTS_COUNT = parse_boolean(os.environ.get("REDASH_FEATURE_SHOW_QUERY_RESULTS_COUNT", "true")) FEATURE_SHOW_PERMISSIONS_CONTROL = parse_boolean(os.environ.get("REDASH_FEATURE_SHOW_PERMISSIONS_CONTROL", "false")) FEATURE_ALLOW_CUSTOM_JS_VISUALIZATIONS = parse_boolean(os.environ.get("REDASH_FEATURE_ALLOW_CUSTOM_JS_VISUALIZATIONS", "false")) FEATURE_DUMB_RECENTS = parse_boolean(os.environ.get("REDASH_FEATURE_DUMB_RECENTS", "false")) FEATURE_AUTO_PUBLISH_NAMED_QUERIES = parse_boolean(os.environ.get("REDASH_FEATURE_AUTO_PUBLISH_NAMED_QUERIES", "true")) # BigQuery BIGQUERY_HTTP_TIMEOUT = int(os.environ.get("REDASH_BIGQUERY_HTTP_TIMEOUT", "600")) # Enhance schema fetching SCHEMA_RUN_TABLE_SIZE_CALCULATIONS = parse_boolean(os.environ.get("REDASH_SCHEMA_RUN_TABLE_SIZE_CALCULATIONS", "false")) # Allow Parameters in Embeds # WARNING: With this option enabled, Redash reads query parameters from the request URL (risk of SQL injection!) ALLOW_PARAMETERS_IN_EMBEDS = parse_boolean(os.environ.get("REDASH_ALLOW_PARAMETERS_IN_EMBEDS", "false"))
bsd-2-clause
8,744,156,869,932,731,000
53.378151
245
0.738217
false
3.043028
false
false
false
our-city-app/oca-backend
src/rogerthat/web_client/pages/web_client.py
1
2973
# -*- coding: utf-8 -*- # Copyright 2020 Green Valley NV # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # @@license_version:1.5@@ import threading from datetime import datetime import webapp2 from dateutil.relativedelta import relativedelta from webapp2 import Request, Response from rogerthat.templates import get_language_from_request from rogerthat.web_client.models import COOKIE_KEY, WebClientSession, SESSION_EXPIRE_TIME class CurrentRequest(threading.local): def __init__(self): self.session = None # type: WebClientSession def set_session(self, session): self.session = session def get_session(self): return self.session _current_request = CurrentRequest() del CurrentRequest def get_current_web_session(): # type: () -> WebClientSession return _current_request.get_session() class WebRequestHandler(webapp2.RequestHandler): session = None # type: WebClientSession def get(self, *args, **kwargs): session = handle_web_request(self.request, self.response) _current_request.set_session(session) self.response.set_cookie(COOKIE_KEY, str(session.id), max_age=SESSION_EXPIRE_TIME, httponly=True) def get_language(self): session = get_current_web_session() return session.language if session else get_language_from_request(self.request) def handle_web_request(request, response): # type: (Request, Response) -> WebClientSession cookie = request.cookies.get(COOKIE_KEY) now = datetime.now() web_session = None should_save = False if cookie: try: session_id = long(cookie) web_session = WebClientSession.create_key(session_id).get() # Only update the session once per day if web_session and now > (web_session.last_use_date + relativedelta(days=1)): web_session.last_use_date = now should_save = True except ValueError: # Cookie is not an integer/long pass language = get_language_from_request(request) if not web_session: web_session = WebClientSession(last_use_date=now, language=language) should_save = True if web_session.language != language: web_session.language = language should_save = True if should_save: web_session.put() response.set_cookie(COOKIE_KEY, str(web_session.id), max_age=SESSION_EXPIRE_TIME, httponly=True) return web_session
apache-2.0
-7,659,937,712,498,835,000
32.404494
105
0.690885
false
3.906702
false
false
false
quantmind/lux
tests/mail/test_smtp.py
1
1139
from lux.utils import test from lux.ext.smtp import EmailBackend class SmtpTest(test.AppTestCase): config_file = 'tests.mail' @classmethod def beforeAll(cls): email = cls.app.email_backend email.send_mails = email._send_mails def test_backend(self): backend = self.app.email_backend self.assertIsInstance(backend, EmailBackend) def test_send_mail(self): backend = self.app.email_backend sent = backend.send_mail(to='pippo@foo.com', subject='Hello!', message='This is a test message') self.assertEqual(sent, 1) def test_send_html_mail(self): backend = self.app.email_backend sent = backend.send_mail(to='pippo@foo.com', subject='Hello!', html_message='<p>This is a test</p>') self.assertEqual(sent, 1) message, _ = backend.sent.pop() body = message[2].decode('utf-8') self.assertEqual(message[1][0], 'pippo@foo.com') self.assertTrue('<p>This is a test</p>' in body)
bsd-3-clause
-7,785,915,141,844,986,000
33.515152
70
0.56453
false
3.900685
true
false
false
saklar13/Meowth
project/models.py
1
7499
from enum import IntEnum import datetime from project.bl.utils import Resource from project.extensions import db from project.lib.orm.types import TypeEnum, GUID from sqlalchemy.ext.orderinglist import ordering_list from sqlalchemy.ext.associationproxy import association_proxy from project.lib.orm.conditions import ConditionDeleted, ConditionHidden class Vacancy(db.Model): __tablename__ = 'vacancies' id = db.Column(db.Integer, primary_key=True) title = db.Column(db.String(100), nullable=False) short_description = db.Column(db.String(300), nullable=False) text = db.Column(db.Text(), nullable=False) category_id = db.Column(db.Integer, db.ForeignKey('categories.id')) category = db.relationship('Category', backref=db.backref('vacancies')) name_in_url = db.Column(db.String(50), nullable=False, unique=True) visits = db.Column(db.Integer, nullable=False, default=0) salary = db.Column(db.String(50)) description = db.Column(db.String(200)) # for search spider keywords = db.Column(db.String(1000)) city_id = db.Column(db.Integer, db.ForeignKey('cities.id')) city = db.relationship('City', backref=db.backref('vacancies')) is_hidden = db.Column(db.Boolean, nullable=False, default=False) is_deleted = db.Column(db.Boolean, nullable=False, default=False) updated_at = db.Column(db.DateTime, default=datetime.datetime.now, onupdate=datetime.datetime.now) user_id = db.Column(db.Integer, db.ForeignKey('users.id')) who_updated = db.relationship('User') condition_is_hidden = ConditionHidden() condition_is_deleted = ConditionDeleted() bl = Resource("bl.vacancy") def __repr__(self): return "[{}] {}".format(self.__class__.__name__, self.title) class Category(db.Model): __tablename__ = 'categories' id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(50), nullable=False, unique=True) bl = Resource('bl.category') def __str__(self): return self.name def __repr__(self): return "[{}] {}".format(self.__class__.__name__, self.name) class User(db.Model): __tablename__ = 'users' # noinspection PyTypeChecker ROLE = IntEnum('Role', { 'staff': 0, 'superuser': 1, }) id = db.Column(db.Integer, primary_key=True) login = db.Column(db.String(30), unique=True, nullable=False) password = db.Column(db.String(100), nullable=False) name = db.Column(db.String(30)) surname = db.Column(db.String(30)) email = db.Column(db.String(320), nullable=False, unique=True) role = db.Column(TypeEnum(ROLE), nullable=False, default=ROLE.staff) bl = Resource('bl.user') def __repr__(self): return '{} ({})'.format(self.login, self.get_full_name()) def get_full_name(self): return '{} {}'.format(self.name or '', self.surname or '').strip() def is_superuser(self): return self.role == self.ROLE.superuser class City(db.Model): __tablename__ = 'cities' id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(20), nullable=False, unique=True) bl = Resource('bl.city') def __str__(self): return self.name def __repr__(self): return "[{}] {}".format(self.__class__.__name__, self.name) class BlockPageAssociation(db.Model): __tablename__ = 'block_page_associations' page_id = db.Column( db.Integer, db.ForeignKey('pages.id'), primary_key=True ) block_id = db.Column( db.Integer, db.ForeignKey('pageblocks.id'), primary_key=True ) position = db.Column(db.Integer) block = db.relationship( 'PageBlock', ) class PageChunk(db.Model): __tablename__ = 'pagechunks' id = db.Column(db.Integer, primary_key=True) name = db.Column(db.Text, unique=True, nullable=False) # use in template title = db.Column(db.Text, unique=True, nullable=False) text = db.Column(db.Text) bl = Resource('bl.pagechunk') class PageBlock(db.Model): __tablename__ = 'pageblocks' # noinspection PyTypeChecker TYPE = IntEnum( 'Block_type', { 'img_left': 0, 'img_right': 1, 'no_img': 2, }, ) id = db.Column(db.Integer, primary_key=True) block_type = db.Column( TypeEnum(TYPE), default=TYPE.img_left, nullable=False ) # header title = db.Column(db.VARCHAR(128), nullable=True) text = db.Column(db.Text) # used for mainpage short_description = db.Column(db.VARCHAR(256), nullable=True) image = db.Column(db.Text, nullable=True) bl = Resource('bl.pageblock') def __str__(self): return '%s: %s' % (self.title, self.text or self.short_description) class Page(db.Model): __tablename__ = 'pages' # noinspection PyTypeChecker TYPE = IntEnum('Page_type', { 'PROJECTS': 1, 'ABOUT': 2, 'CONTACTS': 3, 'MAINPAGE': 4, }) id = db.Column(db.Integer, primary_key=True) type = db.Column(TypeEnum(TYPE), unique=True, nullable=False) title = db.Column(db.VARCHAR(128)) _blocks = db.relationship( "BlockPageAssociation", order_by='BlockPageAssociation.position', collection_class=ordering_list('position'), cascade='save-update, merge, delete, delete-orphan', ) blocks = association_proxy( '_blocks', 'block', creator=lambda _pb: BlockPageAssociation(block=_pb) ) bl = Resource('bl.page') def __str__(self): return '%s (%s)' % (self.title, self.url) class Token(db.Model): __tablename__ = 'tokens' id = db.Column(db.Integer, primary_key=True) user_id = db.Column(db.Integer, db.ForeignKey('users.id')) user = db.relationship( 'User', ) token = db.Column(db.String, nullable=False) bl = Resource('bl.token') class MailTemplate(db.Model): __tablename__ = 'mailtemplates' # noinspection PyTypeChecker MAIL = IntEnum('Mail', { 'CV': 0, 'REPLY': 1, }) id = db.Column(db.Integer, primary_key=True) mail = db.Column(TypeEnum(MAIL), nullable=False) title = db.Column(db.String, nullable=False) subject = db.Column(db.String(79), nullable=False) html = db.Column(db.Text, nullable=False) help_msg = db.Column(db.Text) updated_at = db.Column(db.Date, onupdate=datetime.datetime.now, default=datetime.datetime.now) bl = Resource('bl.mailtemplate') user_id = db.Column(db.Integer, db.ForeignKey('users.id')) who_updated = db.relationship('User') def __repr__(self): return str(self.title) class UploadedImage(db.Model): __tablename__ = 'uploaded_images' IMG_CATEGORY = IntEnum('ImageCategory', { 'other': 0, 'gallery': 1, }) id = db.Column(db.Integer, primary_key=True) name = db.Column(GUID, nullable=False) ext = db.Column(db.VARCHAR, nullable=False) img_category = db.Column( TypeEnum(IMG_CATEGORY), default=IMG_CATEGORY.other, nullable=False, ) title = db.Column(db.VARCHAR(32)) description = db.Column(db.VARCHAR(128)) __table_args__ = ( db.UniqueConstraint( 'name', 'ext', 'img_category', ), ) bl = Resource('bl.uploadedimage') def init_db(): db.drop_all() db.create_all()
bsd-3-clause
8,629,560,758,626,716,000
27.622137
77
0.616349
false
3.465342
false
false
false
0sw4l/villas-de-san-pablo
apps/habilidades_blandas/migrations/0001_initial.py
1
1800
# -*- coding: utf-8 -*- # Generated by Django 1.11.1 on 2017-06-21 18:47 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ('personas', '0001_initial'), ] operations = [ migrations.CreateModel( name='Capacitacion', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('nombre', models.CharField(max_length=50)), ], options={ 'verbose_name': 'Capacitacion', 'verbose_name_plural': 'Capacitaciones', }, ), migrations.CreateModel( name='HabilidadBlanda', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('estado_certificado', models.CharField(choices=[('entregado', 'entregado'), ('en proceso', 'en proceso'), ('pendiente', 'pendiente')], max_length=30)), ('tipo_alerta', models.CharField(choices=[('baja', 'baja'), ('media', 'media'), ('alta', 'alta')], max_length=30, verbose_name='alertas')), ('test', models.BooleanField(default=False, verbose_name='Test de habilidades blandas')), ('observaciones', models.CharField(blank=True, max_length=100, null=True)), ('capacitacion', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='habilidades_blandas.Capacitacion')), ('persona', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='item_e', to='personas.Persona')), ], ), ]
mit
489,150,653,691,920,600
42.902439
168
0.587778
false
3.805497
false
false
false