repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values | var_hash
int64 -9,223,186,179,200,150,000
9,223,291,175B
| doc_hash
int64 -9,223,304,365,658,930,000
9,223,309,051B
| line_mean
float64 3.5
99.8
| line_max
int64 13
999
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
mgit-at/ansible | lib/ansible/modules/network/onyx/onyx_magp.py | 66 | 7830 | #!/usr/bin/python
#
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: onyx_magp
version_added: "2.5"
author: "Samer Deeb (@samerd)"
short_description: Manage MAGP protocol on Mellanox ONYX network devices
description:
- This module provides declarative management of MAGP protocol on vlan
interface of Mellanox ONYX network devices.
notes:
- Tested on ONYX 3.6.4000
options:
magp_id:
description:
- "MAGP instance number 1-255"
required: true
interface:
description:
- VLAN Interface name.
required: true
state:
description:
- MAGP state.
default: present
choices: ['present', 'absent', 'enabled', 'disabled']
router_ip:
description:
- MAGP router IP address.
router_mac:
description:
- MAGP router MAC address.
"""
EXAMPLES = """
- name: run add vlan interface with magp
onyx_magp:
magp_id: 103
router_ip: 192.168.8.2
router_mac: AA:1B:2C:3D:4E:5F
interface: Vlan 1002
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device.
returned: always
type: list
sample:
- interface vlan 234 magp 103
- exit
- interface vlan 234 magp 103 ip virtual-router address 1.2.3.4
"""
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.onyx.onyx import BaseOnyxModule
from ansible.module_utils.network.onyx.onyx import show_cmd
class OnyxMagpModule(BaseOnyxModule):
IF_VLAN_REGEX = re.compile(r"^Vlan (\d+)$")
@classmethod
def _get_element_spec(cls):
return dict(
magp_id=dict(type='int', required=True),
state=dict(default='present',
choices=['present', 'absent', 'enabled', 'disabled']),
interface=dict(required=True),
router_ip=dict(),
router_mac=dict(),
)
def init_module(self):
""" Ansible module initialization
"""
element_spec = self._get_element_spec()
argument_spec = dict()
argument_spec.update(element_spec)
self._module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True)
def validate_magp_id(self, value):
if value and not 1 <= int(value) <= 255:
self._module.fail_json(msg='magp id must be between 1 and 255')
def get_required_config(self):
module_params = self._module.params
interface = module_params['interface']
match = self.IF_VLAN_REGEX.match(interface)
vlan_id = 0
if match:
vlan_id = int(match.group(1))
else:
self._module.fail_json(
msg='Invalid interface name: should be "Vlan <vlan_id>"')
self._required_config = dict(
magp_id=module_params['magp_id'],
state=module_params['state'],
vlan_id=vlan_id,
router_ip=module_params['router_ip'],
router_mac=module_params['router_mac'])
self.validate_param_values(self._required_config)
@classmethod
def get_magp_id(cls, item):
header = cls.get_config_attr(item, "header")
return int(header.split()[1])
def _create_magp_instance_data(self, magp_id, item):
vlan_id = int(self.get_config_attr(item, "Interface vlan"))
state = self.get_config_attr(item, "Admin state").lower()
return dict(
magp_id=magp_id,
state=state,
vlan_id=vlan_id,
router_ip=self.get_config_attr(item, "Virtual IP"),
router_mac=self.get_config_attr(item, "Virtual MAC"))
def _update_magp_data(self, magp_data):
for magp_item in magp_data:
magp_id = self.get_magp_id(magp_item)
inst_data = self._create_magp_instance_data(magp_id, magp_item)
self._current_config[magp_id] = inst_data
def _get_magp_config(self):
cmd = "show magp"
return show_cmd(self._module, cmd, json_fmt=True, fail_on_error=False)
def load_current_config(self):
# called in base class in run function
self._current_config = dict()
magp_data = self._get_magp_config()
if magp_data:
self._update_magp_data(magp_data)
def _generate_no_magp_commands(self):
req_vlan_id = self._required_config['vlan_id']
req_magp_id = self._required_config['magp_id']
curr_magp_data = self._current_config.get(req_magp_id)
if not curr_magp_data:
return
curr_vlan_id = curr_magp_data.get(req_vlan_id)
if curr_vlan_id == req_vlan_id:
cmd = 'interface vlan %s no magp %s' % (req_vlan_id, req_magp_id)
self._commands.append(cmd)
def _generate_magp_commands(self, req_state):
req_vlan_id = self._required_config['vlan_id']
req_magp_id = self._required_config['magp_id']
curr_magp_data = self._current_config.get(req_magp_id, dict())
curr_vlan_id = curr_magp_data.get('vlan_id')
magp_prefix = 'interface vlan %s magp %s' % (req_vlan_id, req_magp_id)
create_new_magp = False
if curr_vlan_id != req_vlan_id:
if curr_vlan_id:
cmd = 'interface vlan %s no magp %s' % (
curr_vlan_id, req_magp_id)
self._commands.append(cmd)
create_new_magp = True
self._commands.append(magp_prefix)
self._commands.append('exit')
req_router_ip = self._required_config['router_ip']
curr_router_ip = curr_magp_data.get('router_ip')
if req_router_ip:
if curr_router_ip != req_router_ip or create_new_magp:
cmd = '%s ip virtual-router address %s' % (
magp_prefix, req_router_ip)
self._commands.append(cmd)
else:
if curr_router_ip and curr_router_ip != '0.0.0.0':
cmd = '%s no ip virtual-router address' % magp_prefix
self._commands.append(cmd)
req_router_mac = self._required_config['router_mac']
curr_router_mac = curr_magp_data.get('router_mac')
if curr_router_mac:
curr_router_mac = curr_router_mac.lower()
if req_router_mac:
req_router_mac = req_router_mac.lower()
if curr_router_mac != req_router_mac or create_new_magp:
cmd = '%s ip virtual-router mac-address %s' % (
magp_prefix, req_router_mac)
self._commands.append(cmd)
else:
if curr_router_mac and curr_router_mac != '00:00:00:00:00:00':
cmd = '%s no ip virtual-router mac-address' % magp_prefix
self._commands.append(cmd)
if req_state in ('enabled', 'disabled'):
curr_state = curr_magp_data.get('state', 'enabled')
if curr_state != req_state:
if req_state == 'enabled':
suffix = 'no shutdown'
else:
suffix = 'shutdown'
cmd = '%s %s' % (magp_prefix, suffix)
self._commands.append(cmd)
def generate_commands(self):
req_state = self._required_config['state']
if req_state == 'absent':
return self._generate_no_magp_commands()
return self._generate_magp_commands(req_state)
def main():
""" main entry point for module execution
"""
OnyxMagpModule.main()
if __name__ == '__main__':
main()
| gpl-3.0 | -843,410,199,158,102,000 | -230,030,497,900,502,000 | 33.493392 | 92 | 0.582503 | false |
pshen/ansible | test/runner/lib/cloud/aws.py | 58 | 3311 | """AWS plugin for integration tests."""
from __future__ import absolute_import, print_function
import os
from lib.util import (
ApplicationError,
display,
is_shippable,
)
from lib.cloud import (
CloudProvider,
CloudEnvironment,
)
from lib.core_ci import (
AnsibleCoreCI,
)
class AwsCloudProvider(CloudProvider):
"""AWS cloud provider plugin. Sets up cloud resources before delegation."""
def filter(self, targets, exclude):
"""Filter out the cloud tests when the necessary config and resources are not available.
:type targets: tuple[TestTarget]
:type exclude: list[str]
"""
if os.path.isfile(self.config_static_path):
return
aci = self._create_ansible_core_ci()
if os.path.isfile(aci.ci_key):
return
if is_shippable():
return
super(AwsCloudProvider, self).filter(targets, exclude)
def setup(self):
"""Setup the cloud resource before delegation and register a cleanup callback."""
super(AwsCloudProvider, self).setup()
aws_config_path = os.path.expanduser('~/.aws')
if os.path.exists(aws_config_path) and not self.args.docker and not self.args.remote:
raise ApplicationError('Rename "%s" or use the --docker or --remote option to isolate tests.' % aws_config_path)
if not self._use_static_config():
self._setup_dynamic()
def _setup_dynamic(self):
"""Request AWS credentials through the Ansible Core CI service."""
display.info('Provisioning %s cloud environment.' % self.platform, verbosity=1)
config = self._read_config_template()
aci = self._create_ansible_core_ci()
response = aci.start()
if not self.args.explain:
credentials = response['aws']['credentials']
values = dict(
ACCESS_KEY=credentials['access_key'],
SECRET_KEY=credentials['secret_key'],
SECURITY_TOKEN=credentials['session_token'],
)
config = self._populate_config_template(config, values)
self._write_config(config)
def _create_ansible_core_ci(self):
"""
:rtype: AnsibleCoreCI
"""
return AnsibleCoreCI(self.args, 'aws', 'sts', persist=False, stage=self.args.remote_stage)
class AwsCloudEnvironment(CloudEnvironment):
"""AWS cloud environment plugin. Updates integration test environment after delegation."""
def configure_environment(self, env, cmd):
"""
:type env: dict[str, str]
:type cmd: list[str]
"""
cmd.append('-e')
cmd.append('@%s' % self.config_path)
cmd.append('-e')
cmd.append('resource_prefix=%s' % self.resource_prefix)
def on_failure(self, target, tries):
"""
:type target: TestTarget
:type tries: int
"""
if not tries and self.managed:
display.notice('If %s failed due to permissions, the IAM test policy may need to be updated. '
'For help, consult @mattclay or @gundalow on GitHub or #ansible-devel on IRC.' % target.name)
@property
def inventory_hosts(self):
"""
:rtype: str | None
"""
return 'amazon'
| gpl-3.0 | 8,588,095,173,000,436,000 | -4,894,962,827,250,990,000 | 28.828829 | 124 | 0.605859 | false |
kamalx/edx-platform | common/djangoapps/student/roles.py | 30 | 11451 | """
Classes used to model the roles used in the courseware. Each role is responsible for checking membership,
adding users, removing users, and listing members
"""
from abc import ABCMeta, abstractmethod
from django.contrib.auth.models import User
import logging
from student.models import CourseAccessRole
from xmodule_django.models import CourseKeyField
log = logging.getLogger(__name__)
# A list of registered access roles.
REGISTERED_ACCESS_ROLES = {}
def register_access_role(cls):
"""
Decorator that allows access roles to be registered within the roles module and referenced by their
string values.
Assumes that the decorated class has a "ROLE" attribute, defining its type.
"""
try:
role_name = getattr(cls, 'ROLE')
REGISTERED_ACCESS_ROLES[role_name] = cls
except AttributeError:
log.exception(u"Unable to register Access Role with attribute 'ROLE'.")
return cls
class RoleCache(object):
"""
A cache of the CourseAccessRoles held by a particular user
"""
def __init__(self, user):
self._roles = set(
CourseAccessRole.objects.filter(user=user).all()
)
def has_role(self, role, course_id, org):
"""
Return whether this RoleCache contains a role with the specified role, course_id, and org
"""
return any(
access_role.role == role and
access_role.course_id == course_id and
access_role.org == org
for access_role in self._roles
)
class AccessRole(object):
"""
Object representing a role with particular access to a resource
"""
__metaclass__ = ABCMeta
@abstractmethod
def has_user(self, user): # pylint: disable=unused-argument
"""
Return whether the supplied django user has access to this role.
"""
return False
@abstractmethod
def add_users(self, *users):
"""
Add the role to the supplied django users.
"""
pass
@abstractmethod
def remove_users(self, *users):
"""
Remove the role from the supplied django users.
"""
pass
@abstractmethod
def users_with_role(self):
"""
Return a django QuerySet for all of the users with this role
"""
return User.objects.none()
class GlobalStaff(AccessRole):
"""
The global staff role
"""
def has_user(self, user):
return user.is_staff
def add_users(self, *users):
for user in users:
if (user.is_authenticated() and user.is_active):
user.is_staff = True
user.save()
def remove_users(self, *users):
for user in users:
# don't check is_authenticated nor is_active on purpose
user.is_staff = False
user.save()
def users_with_role(self):
raise Exception("This operation is un-indexed, and shouldn't be used")
class RoleBase(AccessRole):
"""
Roles by type (e.g., instructor, beta_user) and optionally org, course_key
"""
def __init__(self, role_name, org='', course_key=None):
"""
Create role from required role_name w/ optional org and course_key. You may just provide a role
name if it's a global role (not constrained to an org or course). Provide org if constrained to
an org. Provide org and course if constrained to a course. Although, you should use the subclasses
for all of these.
"""
super(RoleBase, self).__init__()
self.org = org
self.course_key = course_key
self._role_name = role_name
def has_user(self, user):
"""
Return whether the supplied django user has access to this role.
"""
if not (user.is_authenticated() and user.is_active):
return False
# pylint: disable=protected-access
if not hasattr(user, '_roles'):
# Cache a list of tuples identifying the particular roles that a user has
# Stored as tuples, rather than django models, to make it cheaper to construct objects for comparison
user._roles = RoleCache(user)
return user._roles.has_role(self._role_name, self.course_key, self.org)
def add_users(self, *users):
"""
Add the supplied django users to this role.
"""
# silently ignores anonymous and inactive users so that any that are
# legit get updated.
from student.models import CourseAccessRole
for user in users:
if user.is_authenticated and user.is_active and not self.has_user(user):
entry = CourseAccessRole(user=user, role=self._role_name, course_id=self.course_key, org=self.org)
entry.save()
if hasattr(user, '_roles'):
del user._roles
def remove_users(self, *users):
"""
Remove the supplied django users from this role.
"""
entries = CourseAccessRole.objects.filter(
user__in=users, role=self._role_name, org=self.org, course_id=self.course_key
)
entries.delete()
for user in users:
if hasattr(user, '_roles'):
del user._roles
def users_with_role(self):
"""
Return a django QuerySet for all of the users with this role
"""
# Org roles don't query by CourseKey, so use CourseKeyField.Empty for that query
if self.course_key is None:
self.course_key = CourseKeyField.Empty
entries = User.objects.filter(
courseaccessrole__role=self._role_name,
courseaccessrole__org=self.org,
courseaccessrole__course_id=self.course_key
)
return entries
class CourseRole(RoleBase):
"""
A named role in a particular course
"""
def __init__(self, role, course_key):
"""
Args:
course_key (CourseKey)
"""
super(CourseRole, self).__init__(role, course_key.org, course_key)
@classmethod
def course_group_already_exists(self, course_key):
return CourseAccessRole.objects.filter(org=course_key.org, course_id=course_key).exists()
class OrgRole(RoleBase):
"""
A named role in a particular org independent of course
"""
def __init__(self, role, org):
super(OrgRole, self).__init__(role, org)
@register_access_role
class CourseStaffRole(CourseRole):
"""A Staff member of a course"""
ROLE = 'staff'
def __init__(self, *args, **kwargs):
super(CourseStaffRole, self).__init__(self.ROLE, *args, **kwargs)
@register_access_role
class CourseInstructorRole(CourseRole):
"""A course Instructor"""
ROLE = 'instructor'
def __init__(self, *args, **kwargs):
super(CourseInstructorRole, self).__init__(self.ROLE, *args, **kwargs)
@register_access_role
class CourseFinanceAdminRole(CourseRole):
"""A course staff member with privileges to review financial data."""
ROLE = 'finance_admin'
def __init__(self, *args, **kwargs):
super(CourseFinanceAdminRole, self).__init__(self.ROLE, *args, **kwargs)
@register_access_role
class CourseSalesAdminRole(CourseRole):
"""A course staff member with privileges to perform sales operations. """
ROLE = 'sales_admin'
def __init__(self, *args, **kwargs):
super(CourseSalesAdminRole, self).__init__(self.ROLE, *args, **kwargs)
@register_access_role
class CourseBetaTesterRole(CourseRole):
"""A course Beta Tester"""
ROLE = 'beta_testers'
def __init__(self, *args, **kwargs):
super(CourseBetaTesterRole, self).__init__(self.ROLE, *args, **kwargs)
@register_access_role
class LibraryUserRole(CourseRole):
"""
A user who can view a library and import content from it, but not edit it.
Used in Studio only.
"""
ROLE = 'library_user'
def __init__(self, *args, **kwargs):
super(LibraryUserRole, self).__init__(self.ROLE, *args, **kwargs)
class CourseCcxCoachRole(CourseRole):
"""A CCX Coach"""
ROLE = 'ccx_coach'
def __init__(self, *args, **kwargs):
super(CourseCcxCoachRole, self).__init__(self.ROLE, *args, **kwargs)
class OrgStaffRole(OrgRole):
"""An organization staff member"""
def __init__(self, *args, **kwargs):
super(OrgStaffRole, self).__init__('staff', *args, **kwargs)
class OrgInstructorRole(OrgRole):
"""An organization instructor"""
def __init__(self, *args, **kwargs):
super(OrgInstructorRole, self).__init__('instructor', *args, **kwargs)
class OrgLibraryUserRole(OrgRole):
"""
A user who can view any libraries in an org and import content from them, but not edit them.
Used in Studio only.
"""
ROLE = LibraryUserRole.ROLE
def __init__(self, *args, **kwargs):
super(OrgLibraryUserRole, self).__init__(self.ROLE, *args, **kwargs)
@register_access_role
class CourseCreatorRole(RoleBase):
"""
This is the group of people who have permission to create new courses (we may want to eventually
make this an org based role).
"""
ROLE = "course_creator_group"
def __init__(self, *args, **kwargs):
super(CourseCreatorRole, self).__init__(self.ROLE, *args, **kwargs)
class UserBasedRole(object):
"""
Backward mapping: given a user, manipulate the courses and roles
"""
def __init__(self, user, role):
"""
Create a UserBasedRole accessor: for a given user and role (e.g., "instructor")
"""
self.user = user
self.role = role
def has_course(self, course_key):
"""
Return whether the role's user has the configured role access to the passed course
"""
if not (self.user.is_authenticated() and self.user.is_active):
return False
# pylint: disable=protected-access
if not hasattr(self.user, '_roles'):
self.user._roles = RoleCache(self.user)
return self.user._roles.has_role(self.role, course_key, course_key.org)
def add_course(self, *course_keys):
"""
Grant this object's user the object's role for the supplied courses
"""
if self.user.is_authenticated and self.user.is_active:
for course_key in course_keys:
entry = CourseAccessRole(user=self.user, role=self.role, course_id=course_key, org=course_key.org)
entry.save()
if hasattr(self.user, '_roles'):
del self.user._roles
else:
raise ValueError("user is not active. Cannot grant access to courses")
def remove_courses(self, *course_keys):
"""
Remove the supplied courses from this user's configured role.
"""
entries = CourseAccessRole.objects.filter(user=self.user, role=self.role, course_id__in=course_keys)
entries.delete()
if hasattr(self.user, '_roles'):
del self.user._roles
def courses_with_role(self):
"""
Return a django QuerySet for all of the courses with this user x role. You can access
any of these properties on each result record:
* user (will be self.user--thus uninteresting)
* org
* course_id
* role (will be self.role--thus uninteresting)
"""
return CourseAccessRole.objects.filter(role=self.role, user=self.user)
| agpl-3.0 | 7,294,773,134,185,642,000 | 2,938,873,392,439,274,500 | 30.116848 | 114 | 0.617413 | false |
mattcongy/itshop | docker-images/taigav2/taiga-back/taiga/events/events.py | 1 | 3483 | # -*- coding: utf-8 -*-
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import collections
from django.db import connection
from taiga.base.utils import json
from taiga.base.utils.db import get_typename_for_model_instance
from . import middleware as mw
from . import backends
# The complete list of content types
# of allowed models for change events
watched_types = set([
"userstories.userstory",
"issues.issue",
"tasks.task",
"wiki.wiki_page",
"milestones.milestone",
])
def emit_event(data:dict, routing_key:str, *,
sessionid:str=None, channel:str="events",
on_commit:bool=True):
if not sessionid:
sessionid = mw.get_current_session_id()
data = {"session_id": sessionid,
"data": data}
backend = backends.get_events_backend()
def backend_emit_event():
backend.emit_event(message=json.dumps(data), routing_key=routing_key, channel=channel)
if on_commit:
connection.on_commit(backend_emit_event)
else:
backend_emit_event()
def emit_event_for_model(obj, *, type:str="change", channel:str="events",
content_type:str=None, sessionid:str=None):
"""
Sends a model change event.
"""
if obj._importing:
return None
assert type in set(["create", "change", "delete"])
assert hasattr(obj, "project_id")
if not content_type:
content_type = get_typename_for_model_instance(obj)
projectid = getattr(obj, "project_id")
pk = getattr(obj, "pk", None)
app_name, model_name = content_type.split(".", 1)
routing_key = "changes.project.{0}.{1}".format(projectid, app_name)
data = {"type": type,
"matches": content_type,
"pk": pk}
return emit_event(routing_key=routing_key,
channel=channel,
sessionid=sessionid,
data=data)
def emit_event_for_ids(ids, content_type:str, projectid:int, *,
type:str="change", channel:str="events", sessionid:str=None):
assert type in set(["create", "change", "delete"])
assert isinstance(ids, collections.Iterable)
assert content_type, "'content_type' parameter is mandatory"
app_name, model_name = content_type.split(".", 1)
routing_key = "changes.project.{0}.{1}".format(projectid, app_name)
data = {"type": type,
"matches": content_type,
"pk": ids}
return emit_event(routing_key=routing_key,
channel=channel,
sessionid=sessionid,
data=data)
| mit | -8,769,309,758,276,039,000 | -8,699,667,271,333,267,000 | 31.53271 | 94 | 0.644642 | false |
kivymd/KivyMD | kivymd/uix/banner.py | 1 | 11889 | """
Components/Banner
=================
.. seealso::
`Material Design spec, Banner <https://material.io/components/banners>`_
.. rubric:: A banner displays a prominent message and related optional actions.
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/banner.png
:align: center
Usage
=====
.. code-block:: python
from kivy.lang import Builder
from kivy.factory import Factory
from kivymd.app import MDApp
Builder.load_string('''
<ExampleBanner@Screen>
MDBanner:
id: banner
text: ["One line string text example without actions."]
# The widget that is under the banner.
# It will be shifted down to the height of the banner.
over_widget: screen
vertical_pad: toolbar.height
MDToolbar:
id: toolbar
title: "Example Banners"
elevation: 10
pos_hint: {'top': 1}
BoxLayout:
id: screen
orientation: "vertical"
size_hint_y: None
height: Window.height - toolbar.height
OneLineListItem:
text: "Banner without actions"
on_release: banner.show()
Widget:
''')
class Test(MDApp):
def build(self):
return Factory.ExampleBanner()
Test().run()
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/banner-example-1.gif
:align: center
.. rubric:: Banner type.
By default, the banner is of the type ``'one-line'``:
.. code-block:: kv
MDBanner:
text: ["One line string text example without actions."]
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/banner-one-line.png
:align: center
To use a two-line banner, specify the ``'two-line'`` :attr:`MDBanner.type` for the banner
and pass the list of two lines to the :attr:`MDBanner.text` parameter:
.. code-block:: kv
MDBanner:
type: "two-line"
text:
["One line string text example without actions.", "This is the second line of the banner message."]
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/banner-two-line.png
:align: center
Similarly, create a three-line banner:
.. code-block:: kv
MDBanner:
type: "three-line"
text:
["One line string text example without actions.", "This is the second line of the banner message.", "and this is the third line of the banner message."]
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/banner-three-line.png
:align: center
To add buttons to any type of banner,
use the :attr:`MDBanner.left_action` and :attr:`MDBanner.right_action` parameters,
which should take a list ['Button name', function]:
.. code-block:: kv
MDBanner:
text: ["One line string text example without actions."]
left_action: ["CANCEL", lambda x: None]
Or two buttons:
.. code-block:: kv
MDBanner:
text: ["One line string text example without actions."]
left_action: ["CANCEL", lambda x: None]
right_action: ["CLOSE", lambda x: None]
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/banner-actions.png
:align: center
If you want to use the icon on the left in the banner,
add the prefix `'-icon'` to the banner type:
.. code-block:: kv
MDBanner:
type: "one-line-icon"
icon: f"{images_path}/kivymd.png"
text: ["One line string text example without actions."]
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/banner-icon.png
:align: center
.. Note:: `See full example <https://github.com/kivymd/KivyMD/wiki/Components-Banner>`_
"""
__all__ = ("MDBanner",)
from kivy.animation import Animation
from kivy.clock import Clock
from kivy.lang import Builder
from kivy.metrics import dp
from kivy.properties import (
ListProperty,
NumericProperty,
ObjectProperty,
OptionProperty,
StringProperty,
)
from kivy.uix.widget import Widget
from kivymd.uix.button import MDFlatButton
from kivymd.uix.card import MDCard
from kivymd.uix.list import (
OneLineAvatarListItem,
OneLineListItem,
ThreeLineAvatarListItem,
ThreeLineListItem,
TwoLineAvatarListItem,
TwoLineListItem,
)
Builder.load_string(
"""
#:import Window kivy.core.window.Window
#:import Clock kivy.clock.Clock
<ThreeLineIconBanner>
text: root.text_message[0]
secondary_text: root.text_message[1]
tertiary_text: root.text_message[2]
divider: None
_no_ripple_effect: True
ImageLeftWidget:
source: root.icon
<TwoLineIconBanner>
text: root.text_message[0]
secondary_text: root.text_message[1]
divider: None
_no_ripple_effect: True
ImageLeftWidget:
source: root.icon
<OneLineIconBanner>
text: root.text_message[0]
divider: None
_no_ripple_effect: True
ImageLeftWidget:
source: root.icon
<ThreeLineBanner>
text: root.text_message[0]
secondary_text: root.text_message[1]
tertiary_text: root.text_message[2]
divider: None
_no_ripple_effect: True
<TwoLineBanner>
text: root.text_message[0]
secondary_text: root.text_message[1]
divider: None
_no_ripple_effect: True
<OneLineBanner>
text: root.text_message[0]
divider: None
_no_ripple_effect: True
<MDBanner>
size_hint_y: None
height: self.minimum_height
banner_y: 0
orientation: "vertical"
y: Window.height - self.banner_y
canvas:
Color:
rgba: 0, 0, 0, 0
Rectangle:
pos: self.pos
size: self.size
BoxLayout:
id: container_message
size_hint_y: None
height: self.minimum_height
BoxLayout:
size_hint: None, None
size: self.minimum_size
pos_hint: {"right": 1}
padding: 0, 0, "8dp", "8dp"
spacing: "8dp"
BoxLayout:
id: left_action_box
size_hint: None, None
size: self.minimum_size
BoxLayout:
id: right_action_box
size_hint: None, None
size: self.minimum_size
"""
)
class MDBanner(MDCard):
vertical_pad = NumericProperty(dp(68))
"""
Indent the banner at the top of the screen.
:attr:`vertical_pad` is an :class:`~kivy.properties.NumericProperty`
and defaults to `dp(68)`.
"""
opening_transition = StringProperty("in_quad")
"""
The name of the animation transition.
:attr:`opening_transition` is an :class:`~kivy.properties.StringProperty`
and defaults to `'in_quad'`.
"""
icon = StringProperty("data/logo/kivy-icon-128.png")
"""Icon banner.
:attr:`icon` is an :class:`~kivy.properties.StringProperty`
and defaults to `'data/logo/kivy-icon-128.png'`.
"""
over_widget = ObjectProperty()
"""
The widget that is under the banner.
It will be shifted down to the height of the banner.
:attr:`over_widget` is an :class:`~kivy.properties.ObjectProperty`
and defaults to `None`.
"""
text = ListProperty()
"""List of lines for banner text.
Must contain no more than three lines for a
`'one-line'`, `'two-line'` and `'three-line'` banner, respectively.
:attr:`text` is an :class:`~kivy.properties.ListProperty`
and defaults to `[]`.
"""
left_action = ListProperty()
"""The action of banner.
To add one action, make a list [`'name_action'`, callback]
where `'name_action'` is a string that corresponds to an action name and
``callback`` is the function called on a touch release event.
:attr:`left_action` is an :class:`~kivy.properties.ListProperty`
and defaults to `[]`.
"""
right_action = ListProperty()
"""Works the same way as :attr:`left_action`.
:attr:`right_action` is an :class:`~kivy.properties.ListProperty`
and defaults to `[]`.
"""
type = OptionProperty(
"one-line",
options=[
"one-line",
"two-line",
"three-line",
"one-line-icon",
"two-line-icon",
"three-line-icon",
],
allownone=True,
)
"""Banner type. . Available options are: (`"one-line"`, `"two-line"`,
`"three-line"`, `"one-line-icon"`, `"two-line-icon"`, `"three-line-icon"`).
:attr:`type` is an :class:`~kivy.properties.OptionProperty`
and defaults to `'one-line'`.
"""
_type_message = None
_progress = False
def add_actions_buttons(self, box, data):
if data:
name_action_button, function_action_button = data
action_button = MDFlatButton(
text=f"[b]{name_action_button}[/b]",
theme_text_color="Custom",
text_color=self.theme_cls.primary_color,
on_release=function_action_button,
)
action_button.markup = True
box.add_widget(action_button)
def set_left_action(self):
self.add_actions_buttons(self.ids.left_action_box, self.left_action)
def set_right_action(self):
self.add_actions_buttons(self.ids.right_action_box, self.right_action)
def set_type_banner(self):
self._type_message = {
"three-line-icon": ThreeLineIconBanner,
"two-line-icon": TwoLineIconBanner,
"one-line-icon": OneLineIconBanner,
"three-line": ThreeLineBanner,
"two-line": TwoLineBanner,
"one-line": OneLineBanner,
}[self.type]
def add_banner_to_container(self):
self.ids.container_message.add_widget(
self._type_message(text_message=self.text, icon=self.icon)
)
def show(self):
def show(interval):
self.set_type_banner()
self.set_left_action()
self.set_right_action()
self.add_banner_to_container()
Clock.schedule_once(self.animation_display_banner, 0.1)
if self._progress:
return
self._progress = True
if self.ids.container_message.children:
self.hide()
Clock.schedule_once(show, 0.7)
def animation_display_banner(self, i):
Animation(
banner_y=self.height + self.vertical_pad,
d=0.15,
t=self.opening_transition,
).start(self)
anim = Animation(
y=self.over_widget.y - self.height,
d=0.15,
t=self.opening_transition,
)
anim.bind(on_complete=self._reset_progress)
anim.start(self.over_widget)
def hide(self):
def hide(interval):
anim = Animation(banner_y=0, d=0.15)
anim.bind(on_complete=self._remove_banner)
anim.start(self)
Animation(y=self.over_widget.y + self.height, d=0.15).start(
self.over_widget
)
Clock.schedule_once(hide, 0.5)
def _remove_banner(self, *args):
self.ids.container_message.clear_widgets()
self.ids.left_action_box.clear_widgets()
self.ids.right_action_box.clear_widgets()
def _reset_progress(self, *args):
self._progress = False
class BaseBanner(Widget):
text_message = ListProperty(["", "", ""])
icon = StringProperty()
def on_touch_down(self, touch):
self.parent.parent.hide()
class ThreeLineIconBanner(ThreeLineAvatarListItem, BaseBanner):
pass
class TwoLineIconBanner(TwoLineAvatarListItem, BaseBanner):
pass
class OneLineIconBanner(OneLineAvatarListItem, BaseBanner):
pass
class ThreeLineBanner(ThreeLineListItem, BaseBanner):
pass
class TwoLineBanner(TwoLineListItem, BaseBanner):
pass
class OneLineBanner(OneLineListItem, BaseBanner):
pass
| mit | -1,750,471,527,671,242,800 | -3,692,761,362,416,632,000 | 25.12967 | 164 | 0.61948 | false |
HadiOfBBG/pegasusrises | gdata/tlslite/integration/HTTPTLSConnection.py | 271 | 6668 | """TLS Lite + httplib."""
import socket
import httplib
from gdata.tlslite.TLSConnection import TLSConnection
from gdata.tlslite.integration.ClientHelper import ClientHelper
class HTTPBaseTLSConnection(httplib.HTTPConnection):
"""This abstract class provides a framework for adding TLS support
to httplib."""
default_port = 443
def __init__(self, host, port=None, strict=None):
if strict == None:
#Python 2.2 doesn't support strict
httplib.HTTPConnection.__init__(self, host, port)
else:
httplib.HTTPConnection.__init__(self, host, port, strict)
def connect(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if hasattr(sock, 'settimeout'):
sock.settimeout(10)
sock.connect((self.host, self.port))
#Use a TLSConnection to emulate a socket
self.sock = TLSConnection(sock)
#When httplib closes this, close the socket
self.sock.closeSocket = True
self._handshake(self.sock)
def _handshake(self, tlsConnection):
"""Called to perform some sort of handshake.
This method must be overridden in a subclass to do some type of
handshake. This method will be called after the socket has
been connected but before any data has been sent. If this
method does not raise an exception, the TLS connection will be
considered valid.
This method may (or may not) be called every time an HTTP
request is performed, depending on whether the underlying HTTP
connection is persistent.
@type tlsConnection: L{tlslite.TLSConnection.TLSConnection}
@param tlsConnection: The connection to perform the handshake
on.
"""
raise NotImplementedError()
class HTTPTLSConnection(HTTPBaseTLSConnection, ClientHelper):
"""This class extends L{HTTPBaseTLSConnection} to support the
common types of handshaking."""
def __init__(self, host, port=None,
username=None, password=None, sharedKey=None,
certChain=None, privateKey=None,
cryptoID=None, protocol=None,
x509Fingerprint=None,
x509TrustList=None, x509CommonName=None,
settings = None):
"""Create a new HTTPTLSConnection.
For client authentication, use one of these argument
combinations:
- username, password (SRP)
- username, sharedKey (shared-key)
- certChain, privateKey (certificate)
For server authentication, you can either rely on the
implicit mutual authentication performed by SRP or
shared-keys, or you can do certificate-based server
authentication with one of these argument combinations:
- cryptoID[, protocol] (requires cryptoIDlib)
- x509Fingerprint
- x509TrustList[, x509CommonName] (requires cryptlib_py)
Certificate-based server authentication is compatible with
SRP or certificate-based client authentication. It is
not compatible with shared-keys.
The constructor does not perform the TLS handshake itself, but
simply stores these arguments for later. The handshake is
performed only when this class needs to connect with the
server. Thus you should be prepared to handle TLS-specific
exceptions when calling methods inherited from
L{httplib.HTTPConnection} such as request(), connect(), and
send(). See the client handshake functions in
L{tlslite.TLSConnection.TLSConnection} for details on which
exceptions might be raised.
@type host: str
@param host: Server to connect to.
@type port: int
@param port: Port to connect to.
@type username: str
@param username: SRP or shared-key username. Requires the
'password' or 'sharedKey' argument.
@type password: str
@param password: SRP password for mutual authentication.
Requires the 'username' argument.
@type sharedKey: str
@param sharedKey: Shared key for mutual authentication.
Requires the 'username' argument.
@type certChain: L{tlslite.X509CertChain.X509CertChain} or
L{cryptoIDlib.CertChain.CertChain}
@param certChain: Certificate chain for client authentication.
Requires the 'privateKey' argument. Excludes the SRP or
shared-key related arguments.
@type privateKey: L{tlslite.utils.RSAKey.RSAKey}
@param privateKey: Private key for client authentication.
Requires the 'certChain' argument. Excludes the SRP or
shared-key related arguments.
@type cryptoID: str
@param cryptoID: cryptoID for server authentication. Mutually
exclusive with the 'x509...' arguments.
@type protocol: str
@param protocol: cryptoID protocol URI for server
authentication. Requires the 'cryptoID' argument.
@type x509Fingerprint: str
@param x509Fingerprint: Hex-encoded X.509 fingerprint for
server authentication. Mutually exclusive with the 'cryptoID'
and 'x509TrustList' arguments.
@type x509TrustList: list of L{tlslite.X509.X509}
@param x509TrustList: A list of trusted root certificates. The
other party must present a certificate chain which extends to
one of these root certificates. The cryptlib_py module must be
installed to use this parameter. Mutually exclusive with the
'cryptoID' and 'x509Fingerprint' arguments.
@type x509CommonName: str
@param x509CommonName: The end-entity certificate's 'CN' field
must match this value. For a web server, this is typically a
server name such as 'www.amazon.com'. Mutually exclusive with
the 'cryptoID' and 'x509Fingerprint' arguments. Requires the
'x509TrustList' argument.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites, certificate types, and SSL/TLS versions
offered by the client.
"""
HTTPBaseTLSConnection.__init__(self, host, port)
ClientHelper.__init__(self,
username, password, sharedKey,
certChain, privateKey,
cryptoID, protocol,
x509Fingerprint,
x509TrustList, x509CommonName,
settings)
def _handshake(self, tlsConnection):
ClientHelper._handshake(self, tlsConnection)
| apache-2.0 | 2,302,011,665,272,650,500 | -6,577,620,190,261,861,000 | 38.455621 | 71 | 0.662268 | false |
sfagmenos/ker | tools/perf/util/setup.py | 766 | 1540 | #!/usr/bin/python2
from distutils.core import setup, Extension
from os import getenv
from distutils.command.build_ext import build_ext as _build_ext
from distutils.command.install_lib import install_lib as _install_lib
class build_ext(_build_ext):
def finalize_options(self):
_build_ext.finalize_options(self)
self.build_lib = build_lib
self.build_temp = build_tmp
class install_lib(_install_lib):
def finalize_options(self):
_install_lib.finalize_options(self)
self.build_dir = build_lib
cflags = getenv('CFLAGS', '').split()
# switch off several checks (need to be at the end of cflags list)
cflags += ['-fno-strict-aliasing', '-Wno-write-strings', '-Wno-unused-parameter' ]
build_lib = getenv('PYTHON_EXTBUILD_LIB')
build_tmp = getenv('PYTHON_EXTBUILD_TMP')
libtraceevent = getenv('LIBTRACEEVENT')
libapikfs = getenv('LIBAPI')
ext_sources = [f.strip() for f in file('util/python-ext-sources')
if len(f.strip()) > 0 and f[0] != '#']
perf = Extension('perf',
sources = ext_sources,
include_dirs = ['util/include'],
extra_compile_args = cflags,
extra_objects = [libtraceevent, libapikfs],
)
setup(name='perf',
version='0.1',
description='Interface with the Linux profiling infrastructure',
author='Arnaldo Carvalho de Melo',
author_email='acme@redhat.com',
license='GPLv2',
url='http://perf.wiki.kernel.org',
ext_modules=[perf],
cmdclass={'build_ext': build_ext, 'install_lib': install_lib})
| gpl-2.0 | 2,289,801,645,684,477,700 | -4,207,667,862,009,479,000 | 31.083333 | 82 | 0.666234 | false |
mpattyn/fumiste | prototypePython/steamapi/requests/adapters.py | 10 | 14863 | # -*- coding: utf-8 -*-
"""
requests.adapters
~~~~~~~~~~~~~~~~~
This module contains the transport adapters that Requests uses to define
and maintain connections.
"""
import socket
from .models import Response
from .packages.urllib3.poolmanager import PoolManager, proxy_from_url
from .packages.urllib3.response import HTTPResponse
from .packages.urllib3.util import Timeout as TimeoutSauce
from .compat import urlparse, basestring, urldefrag, unquote
from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
except_on_missing_scheme, get_auth_from_url)
from .structures import CaseInsensitiveDict
from .packages.urllib3.exceptions import MaxRetryError
from .packages.urllib3.exceptions import TimeoutError
from .packages.urllib3.exceptions import SSLError as _SSLError
from .packages.urllib3.exceptions import HTTPError as _HTTPError
from .packages.urllib3.exceptions import ProxyError as _ProxyError
from .cookies import extract_cookies_to_jar
from .exceptions import ConnectionError, Timeout, SSLError, ProxyError
from .auth import _basic_auth_str
DEFAULT_POOLBLOCK = False
DEFAULT_POOLSIZE = 10
DEFAULT_RETRIES = 0
class BaseAdapter(object):
"""The Base Transport Adapter"""
def __init__(self):
super(BaseAdapter, self).__init__()
def send(self):
raise NotImplementedError
def close(self):
raise NotImplementedError
class HTTPAdapter(BaseAdapter):
"""The built-in HTTP Adapter for urllib3.
Provides a general-case interface for Requests sessions to contact HTTP and
HTTPS urls by implementing the Transport Adapter interface. This class will
usually be created by the :class:`Session <Session>` class under the
covers.
:param pool_connections: The number of urllib3 connection pools to cache.
:param pool_maxsize: The maximum number of connections to save in the pool.
:param int max_retries: The maximum number of retries each connection
should attempt. Note, this applies only to failed connections and
timeouts, never to requests where the server returns a response.
:param pool_block: Whether the connection pool should block for connections.
Usage::
>>> import requests
>>> s = requests.Session()
>>> a = requests.adapters.HTTPAdapter(max_retries=3)
>>> s.mount('http://', a)
"""
__attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
'_pool_block']
def __init__(self, pool_connections=DEFAULT_POOLSIZE,
pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
pool_block=DEFAULT_POOLBLOCK):
self.max_retries = max_retries
self.config = {}
self.proxy_manager = {}
super(HTTPAdapter, self).__init__()
self._pool_connections = pool_connections
self._pool_maxsize = pool_maxsize
self._pool_block = pool_block
self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
def __getstate__(self):
return dict((attr, getattr(self, attr, None)) for attr in
self.__attrs__)
def __setstate__(self, state):
# Can't handle by adding 'proxy_manager' to self.__attrs__ because
# because self.poolmanager uses a lambda function, which isn't pickleable.
self.proxy_manager = {}
self.config = {}
for attr, value in state.items():
setattr(self, attr, value)
self.init_poolmanager(self._pool_connections, self._pool_maxsize,
block=self._pool_block)
def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK):
"""Initializes a urllib3 PoolManager. This method should not be called
from user code, and is only exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param connections: The number of urllib3 connection pools to cache.
:param maxsize: The maximum number of connections to save in the pool.
:param block: Block when no free connections are available.
"""
# save these values for pickling
self._pool_connections = connections
self._pool_maxsize = maxsize
self._pool_block = block
self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
block=block)
def cert_verify(self, conn, url, verify, cert):
"""Verify a SSL certificate. This method should not be called from user
code, and is only exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param conn: The urllib3 connection object associated with the cert.
:param url: The requested URL.
:param verify: Whether we should actually verify the certificate.
:param cert: The SSL certificate to verify.
"""
if url.lower().startswith('https') and verify:
cert_loc = None
# Allow self-specified cert location.
if verify is not True:
cert_loc = verify
if not cert_loc:
cert_loc = DEFAULT_CA_BUNDLE_PATH
if not cert_loc:
raise Exception("Could not find a suitable SSL CA certificate bundle.")
conn.cert_reqs = 'CERT_REQUIRED'
conn.ca_certs = cert_loc
else:
conn.cert_reqs = 'CERT_NONE'
conn.ca_certs = None
if cert:
if not isinstance(cert, basestring):
conn.cert_file = cert[0]
conn.key_file = cert[1]
else:
conn.cert_file = cert
def build_response(self, req, resp):
"""Builds a :class:`Response <requests.Response>` object from a urllib3
response. This should not be called from user code, and is only exposed
for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
:param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
:param resp: The urllib3 response object.
"""
response = Response()
# Fallback to None if there's no status_code, for whatever reason.
response.status_code = getattr(resp, 'status', None)
# Make headers case-insensitive.
response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
# Set encoding.
response.encoding = get_encoding_from_headers(response.headers)
response.raw = resp
response.reason = response.raw.reason
if isinstance(req.url, bytes):
response.url = req.url.decode('utf-8')
else:
response.url = req.url
# Add new cookies from the server.
extract_cookies_to_jar(response.cookies, req, resp)
# Give the Response some context.
response.request = req
response.connection = self
return response
def get_connection(self, url, proxies=None):
"""Returns a urllib3 connection for the given URL. This should not be
called from user code, and is only exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param url: The URL to connect to.
:param proxies: (optional) A Requests-style dictionary of proxies used on this request.
"""
proxies = proxies or {}
proxy = proxies.get(urlparse(url.lower()).scheme)
if proxy:
except_on_missing_scheme(proxy)
proxy_headers = self.proxy_headers(proxy)
if not proxy in self.proxy_manager:
self.proxy_manager[proxy] = proxy_from_url(
proxy,
proxy_headers=proxy_headers,
num_pools=self._pool_connections,
maxsize=self._pool_maxsize,
block=self._pool_block)
conn = self.proxy_manager[proxy].connection_from_url(url)
else:
# Only scheme should be lower case
parsed = urlparse(url)
url = parsed.geturl()
conn = self.poolmanager.connection_from_url(url)
return conn
def close(self):
"""Disposes of any internal state.
Currently, this just closes the PoolManager, which closes pooled
connections.
"""
self.poolmanager.clear()
def request_url(self, request, proxies):
"""Obtain the url to use when making the final request.
If the message is being sent through a HTTP proxy, the full URL has to
be used. Otherwise, we should only use the path portion of the URL.
This should not be called from user code, and is only exposed for use
when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param proxies: A dictionary of schemes to proxy URLs.
"""
proxies = proxies or {}
scheme = urlparse(request.url).scheme
proxy = proxies.get(scheme)
if proxy and scheme != 'https':
url, _ = urldefrag(request.url)
else:
url = request.path_url
return url
def add_headers(self, request, **kwargs):
"""Add any headers needed by the connection. As of v2.0 this does
nothing by default, but is left for overriding by users that subclass
the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
This should not be called from user code, and is only exposed for use
when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
:param kwargs: The keyword arguments from the call to send().
"""
pass
def proxy_headers(self, proxy):
"""Returns a dictionary of the headers to add to any request sent
through a proxy. This works with urllib3 magic to ensure that they are
correctly sent to the proxy, rather than in a tunnelled request if
CONNECT is being used.
This should not be called from user code, and is only exposed for use
when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param proxies: The url of the proxy being used for this request.
:param kwargs: Optional additional keyword arguments.
"""
headers = {}
username, password = get_auth_from_url(proxy)
if username and password:
# Proxy auth usernames and passwords will be urlencoded, we need
# to decode them.
username = unquote(username)
password = unquote(password)
headers['Proxy-Authorization'] = _basic_auth_str(username,
password)
return headers
def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) The timeout on the request.
:param verify: (optional) Whether to verify SSL certificates.
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
"""
conn = self.get_connection(request.url, proxies)
self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies)
self.add_headers(request)
chunked = not (request.body is None or 'Content-Length' in request.headers)
if stream:
timeout = TimeoutSauce(connect=timeout)
else:
timeout = TimeoutSauce(connect=timeout, read=timeout)
try:
if not chunked:
resp = conn.urlopen(
method=request.method,
url=url,
body=request.body,
headers=request.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=False,
retries=self.max_retries,
timeout=timeout
)
# Send the request.
else:
if hasattr(conn, 'proxy_pool'):
conn = conn.proxy_pool
low_conn = conn._get_conn(timeout=timeout)
try:
low_conn.putrequest(request.method,
url,
skip_accept_encoding=True)
for header, value in request.headers.items():
low_conn.putheader(header, value)
low_conn.endheaders()
for i in request.body:
low_conn.send(hex(len(i))[2:].encode('utf-8'))
low_conn.send(b'\r\n')
low_conn.send(i)
low_conn.send(b'\r\n')
low_conn.send(b'0\r\n\r\n')
r = low_conn.getresponse()
resp = HTTPResponse.from_httplib(
r,
pool=conn,
connection=low_conn,
preload_content=False,
decode_content=False
)
except:
# If we hit any problems here, clean up the connection.
# Then, reraise so that we can handle the actual exception.
low_conn.close()
raise
else:
# All is well, return the connection to the pool.
conn._put_conn(low_conn)
except socket.error as sockerr:
raise ConnectionError(sockerr)
except MaxRetryError as e:
raise ConnectionError(e)
except _ProxyError as e:
raise ProxyError(e)
except (_SSLError, _HTTPError) as e:
if isinstance(e, _SSLError):
raise SSLError(e)
elif isinstance(e, TimeoutError):
raise Timeout(e)
else:
raise
r = self.build_response(request, resp)
if not stream:
r.content
return r
| mit | 5,800,067,306,527,971,000 | 8,980,940,017,846,666,000 | 36.1575 | 97 | 0.592209 | false |
shesselba/linux-berlin | tools/perf/scripts/python/futex-contention.py | 1997 | 1508 | # futex contention
# (c) 2010, Arnaldo Carvalho de Melo <acme@redhat.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Translation of:
#
# http://sourceware.org/systemtap/wiki/WSFutexContention
#
# to perf python scripting.
#
# Measures futex contention
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + '/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Util import *
process_names = {}
thread_thislock = {}
thread_blocktime = {}
lock_waits = {} # long-lived stats on (tid,lock) blockage elapsed time
process_names = {} # long-lived pid-to-execname mapping
def syscalls__sys_enter_futex(event, ctxt, cpu, s, ns, tid, comm, callchain,
nr, uaddr, op, val, utime, uaddr2, val3):
cmd = op & FUTEX_CMD_MASK
if cmd != FUTEX_WAIT:
return # we don't care about originators of WAKE events
process_names[tid] = comm
thread_thislock[tid] = uaddr
thread_blocktime[tid] = nsecs(s, ns)
def syscalls__sys_exit_futex(event, ctxt, cpu, s, ns, tid, comm, callchain,
nr, ret):
if thread_blocktime.has_key(tid):
elapsed = nsecs(s, ns) - thread_blocktime[tid]
add_stats(lock_waits, (tid, thread_thislock[tid]), elapsed)
del thread_blocktime[tid]
del thread_thislock[tid]
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
for (tid, lock) in lock_waits:
min, max, avg, count = lock_waits[tid, lock]
print "%s[%d] lock %x contended %d times, %d avg ns" % \
(process_names[tid], tid, lock, count, avg)
| gpl-2.0 | 1,174,811,497,020,820,700 | -318,638,663,691,767,230 | 29.16 | 96 | 0.690318 | false |
devs1991/test_edx_docmode | venv/lib/python2.7/site-packages/testfixtures/tests/test_roundcomparison.py | 2 | 5182 | # Copyright (c) 2014 Simplistix Ltd
# See license.txt for license details.
from decimal import Decimal
from testfixtures import RoundComparison as R, compare, ShouldRaise
from unittest import TestCase
from ..compat import PY2, PY3
class Tests(TestCase):
def test_equal_yes_rhs(self):
self.assertTrue(0.123457 == R(0.123456, 5))
def test_equal_yes_lhs(self):
self.assertTrue(R(0.123456, 5) == 0.123457)
def test_equal_no_rhs(self):
self.assertFalse(0.123453 == R(0.123456, 5))
def test_equal_no_lhs(self):
self.assertFalse(R(0.123456, 5) == 0.123453)
def test_not_equal_yes_rhs(self):
self.assertFalse(0.123457 != R(0.123456, 5))
def test_not_equal_yes_lhs(self):
self.assertFalse(R(0.123456, 5) != 0.123457)
def test_not_equal_no_rhs(self):
self.assertTrue(0.123453 != R(0.123456, 5))
def test_not_equal_no_lhs(self):
self.assertTrue(R(0.123456, 5) != 0.123453)
def test_equal_in_sequence_rhs(self):
self.assertEqual((1, 2, 0.123457),
(1, 2, R(0.123456, 5)))
def test_equal_in_sequence_lhs(self):
self.assertEqual((1, 2, R(0.123456, 5)),
(1, 2, 0.123457))
def test_not_equal_in_sequence_rhs(self):
self.assertNotEqual((1, 2, 0.1236),
(1, 2, R(0.123456, 5)))
def test_not_equal_in_sequence_lhs(self):
self.assertNotEqual((1, 2, R(0.123456, 5)),
(1, 2, 0.1236))
def test_not_numeric_rhs(self):
with ShouldRaise(TypeError):
'abc' == R(0.123456, 5)
def test_not_numeric_lhs(self):
with ShouldRaise(TypeError):
R(0.123456, 5) == 'abc'
def test_repr(self):
compare('<R:0.12346 to 5 digits>',
repr(R(0.123456, 5)))
def test_str(self):
compare('<R:0.12346 to 5 digits>',
repr(R(0.123456, 5)))
def test_str_negative(self):
if PY3:
expected = '<R:123500 to -2 digits>'
else:
expected = '<R:123500.0 to -2 digits>'
compare(expected, repr(R(123456, -2)))
TYPE_ERROR_DECIMAL = TypeError(
"Cannot compare <R:0.12346 to 5 digits> with <class 'decimal.Decimal'>"
)
def test_equal_yes_decimal_to_float_rhs(self):
with ShouldRaise(self.TYPE_ERROR_DECIMAL, unless=PY2):
self.assertTrue(Decimal("0.123457") == R(0.123456, 5))
def test_equal_yes_decimal_to_float_lhs(self):
with ShouldRaise(self.TYPE_ERROR_DECIMAL, unless=PY2):
self.assertTrue(R(0.123456, 5) == Decimal("0.123457"))
def test_equal_no_decimal_to_float_rhs(self):
with ShouldRaise(self.TYPE_ERROR_DECIMAL, unless=PY2):
self.assertFalse(Decimal("0.123453") == R(0.123456, 5))
def test_equal_no_decimal_to_float_lhs(self):
with ShouldRaise(self.TYPE_ERROR_DECIMAL, unless=PY2):
self.assertFalse(R(0.123456, 5) == Decimal("0.123453"))
TYPE_ERROR_FLOAT = TypeError(
"Cannot compare <R:0.12346 to 5 digits> with <class 'float'>"
)
def test_equal_yes_float_to_decimal_rhs(self):
with ShouldRaise(self.TYPE_ERROR_FLOAT, unless=PY2):
self.assertTrue(0.123457 == R(Decimal("0.123456"), 5))
def test_equal_yes_float_to_decimal_lhs(self):
with ShouldRaise(self.TYPE_ERROR_FLOAT, unless=PY2):
self.assertTrue(R(Decimal("0.123456"), 5) == 0.123457)
def test_equal_no_float_to_decimal_rhs(self):
with ShouldRaise(self.TYPE_ERROR_FLOAT, unless=PY2):
self.assertFalse(0.123453 == R(Decimal("0.123456"), 5))
def test_equal_no_float_to_decimal_lhs(self):
with ShouldRaise(self.TYPE_ERROR_FLOAT, unless=PY2):
self.assertFalse(R(Decimal("0.123456"), 5) == 0.123453)
def test_integer_float(self):
with ShouldRaise(TypeError, unless=PY2):
1 == R(1.000001, 5)
def test_float_integer(self):
with ShouldRaise(TypeError, unless=PY2):
R(1.000001, 5) == 1
def test_equal_yes_integer_other_rhs(self):
self.assertTrue(10 == R(11, -1))
def test_equal_yes_integer_lhs(self):
self.assertTrue(R(11, -1) == 10)
def test_equal_no_integer_rhs(self):
self.assertFalse(10 == R(16, -1))
def test_equal_no_integer_lhs(self):
self.assertFalse(R(16, -1) == 10)
def test_equal_integer_zero_precision(self):
self.assertTrue(1 == R(1, 0))
def test_equal_yes_negative_precision(self):
self.assertTrue(149.123 == R(101.123, -2))
def test_equal_no_negative_precision(self):
self.assertFalse(149.123 == R(150.001, -2))
def test_decimal_yes_rhs(self):
self.assertTrue(Decimal('0.123457') == R(Decimal('0.123456'), 5))
def test_decimal_yes_lhs(self):
self.assertTrue(R(Decimal('0.123456'), 5) == Decimal('0.123457'))
def test_decimal_no_rhs(self):
self.assertFalse(Decimal('0.123453') == R(Decimal('0.123456'), 5))
def test_decimal_no_lhs(self):
self.assertFalse(R(Decimal('0.123456'), 5) == Decimal('0.123453'))
| agpl-3.0 | 4,199,761,178,725,058,000 | -6,623,047,996,357,978,000 | 32.432258 | 79 | 0.594944 | false |
40223210/w16b_test | static/Brython3.1.1-20150328-091302/Lib/collections/__init__.py | 625 | 25849 | #__all__ = ['deque', 'defaultdict', 'Counter']
from _collections import deque, defaultdict
#from itertools import repeat as _repeat, chain as _chain, starmap as _starmap
__all__ = ['deque', 'defaultdict', 'namedtuple', 'UserDict', 'UserList',
'UserString', 'Counter', 'OrderedDict']
# For bootstrapping reasons, the collection ABCs are defined in _abcoll.py.
# They should however be considered an integral part of collections.py.
# fixme brython.. there is an issue with _abcoll
#from _abcoll import *
#from _abcoll import Set
from _abcoll import MutableMapping
#import _abcoll
#__all__ += _abcoll.__all__
from collections.abc import *
import collections.abc
__all__ += collections.abc.__all__
from _collections import deque, defaultdict, namedtuple
from operator import itemgetter as _itemgetter
from keyword import iskeyword as _iskeyword
import sys as _sys
import heapq as _heapq
#fixme brython
#from weakref import proxy as _proxy
from itertools import repeat as _repeat, chain as _chain, starmap as _starmap
from reprlib import recursive_repr as _recursive_repr
class Set(set):
pass
class Sequence(list):
pass
def _proxy(obj):
return obj
################################################################################
### OrderedDict
################################################################################
class _Link(object):
__slots__ = 'prev', 'next', 'key', '__weakref__'
class OrderedDict(dict):
'Dictionary that remembers insertion order'
# An inherited dict maps keys to values.
# The inherited dict provides __getitem__, __len__, __contains__, and get.
# The remaining methods are order-aware.
# Big-O running times for all methods are the same as regular dictionaries.
# The internal self.__map dict maps keys to links in a doubly linked list.
# The circular doubly linked list starts and ends with a sentinel element.
# The sentinel element never gets deleted (this simplifies the algorithm).
# The sentinel is in self.__hardroot with a weakref proxy in self.__root.
# The prev links are weakref proxies (to prevent circular references).
# Individual links are kept alive by the hard reference in self.__map.
# Those hard references disappear when a key is deleted from an OrderedDict.
def __init__(self, *args, **kwds):
'''Initialize an ordered dictionary. The signature is the same as
regular dictionaries, but keyword arguments are not recommended because
their insertion order is arbitrary.
'''
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__root
except AttributeError:
self.__hardroot = _Link()
self.__root = root = _proxy(self.__hardroot)
root.prev = root.next = root
self.__map = {}
self.__update(*args, **kwds)
def __setitem__(self, key, value,
dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link):
'od.__setitem__(i, y) <==> od[i]=y'
# Setting a new item creates a new link at the end of the linked list,
# and the inherited dictionary is updated with the new key/value pair.
if key not in self:
self.__map[key] = link = Link()
root = self.__root
last = root.prev
link.prev, link.next, link.key = last, root, key
last.next = link
root.prev = proxy(link)
dict_setitem(self, key, value)
def __delitem__(self, key, dict_delitem=dict.__delitem__):
'od.__delitem__(y) <==> del od[y]'
# Deleting an existing item uses self.__map to find the link which gets
# removed by updating the links in the predecessor and successor nodes.
dict_delitem(self, key)
link = self.__map.pop(key)
link_prev = link.prev
link_next = link.next
link_prev.next = link_next
link_next.prev = link_prev
def __iter__(self):
'od.__iter__() <==> iter(od)'
# Traverse the linked list in order.
root = self.__root
curr = root.next
while curr is not root:
yield curr.key
curr = curr.next
def __reversed__(self):
'od.__reversed__() <==> reversed(od)'
# Traverse the linked list in reverse order.
root = self.__root
curr = root.prev
while curr is not root:
yield curr.key
curr = curr.prev
def clear(self):
'od.clear() -> None. Remove all items from od.'
root = self.__root
root.prev = root.next = root
self.__map.clear()
dict.clear(self)
def popitem(self, last=True):
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
Pairs are returned in LIFO order if last is true or FIFO order if false.
'''
if not self:
raise KeyError('dictionary is empty')
root = self.__root
if last:
link = root.prev
link_prev = link.prev
link_prev.next = root
root.prev = link_prev
else:
link = root.next
link_next = link.next
root.next = link_next
link_next.prev = root
key = link.key
del self.__map[key]
value = dict.pop(self, key)
return key, value
def move_to_end(self, key, last=True):
'''Move an existing element to the end (or beginning if last==False).
Raises KeyError if the element does not exist.
When last=True, acts like a fast version of self[key]=self.pop(key).
'''
link = self.__map[key]
link_prev = link.prev
link_next = link.next
link_prev.next = link_next
link_next.prev = link_prev
root = self.__root
if last:
last = root.prev
link.prev = last
link.next = root
last.next = root.prev = link
else:
first = root.next
link.prev = root
link.next = first
root.next = first.prev = link
def __sizeof__(self):
sizeof = _sys.getsizeof
n = len(self) + 1 # number of links including root
size = sizeof(self.__dict__) # instance dictionary
size += sizeof(self.__map) * 2 # internal dict and inherited dict
size += sizeof(self.__hardroot) * n # link objects
size += sizeof(self.__root) * n # proxy objects
return size
#fixme brython.. Issue with _abcoll, which contains MutableMapping
update = __update = MutableMapping.update
keys = MutableMapping.keys
values = MutableMapping.values
items = MutableMapping.items
__ne__ = MutableMapping.__ne__
__marker = object()
def pop(self, key, default=__marker):
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding
value. If key is not found, d is returned if given, otherwise KeyError
is raised.
'''
if key in self:
result = self[key]
del self[key]
return result
if default is self.__marker:
raise KeyError(key)
return default
def setdefault(self, key, default=None):
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
if key in self:
return self[key]
self[key] = default
return default
#fixme, brython issue
#@_recursive_repr()
def __repr__(self):
'od.__repr__() <==> repr(od)'
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, list(self.items()))
def __reduce__(self):
'Return state information for pickling'
items = [[k, self[k]] for k in self]
inst_dict = vars(self).copy()
for k in vars(OrderedDict()):
inst_dict.pop(k, None)
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def copy(self):
'od.copy() -> a shallow copy of od'
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S.
If not specified, the value defaults to None.
'''
self = cls()
for key in iterable:
self[key] = value
return self
def __eq__(self, other):
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
'''
if isinstance(other, OrderedDict):
return len(self)==len(other) and \
all(p==q for p, q in zip(self.items(), other.items()))
return dict.__eq__(self, other)
########################################################################
### Counter
########################################################################
def _count_elements(mapping, iterable):
'Tally elements from the iterable.'
mapping_get = mapping.get
for elem in iterable:
mapping[elem] = mapping_get(elem, 0) + 1
#try: # Load C helper function if available
# from _collections import _count_elements
#except ImportError:
# pass
class Counter(dict):
'''Dict subclass for counting hashable items. Sometimes called a bag
or multiset. Elements are stored as dictionary keys and their counts
are stored as dictionary values.
>>> c = Counter('abcdeabcdabcaba') # count elements from a string
>>> c.most_common(3) # three most common elements
[('a', 5), ('b', 4), ('c', 3)]
>>> sorted(c) # list all unique elements
['a', 'b', 'c', 'd', 'e']
>>> ''.join(sorted(c.elements())) # list elements with repetitions
'aaaaabbbbcccdde'
>>> sum(c.values()) # total of all counts
15
>>> c['a'] # count of letter 'a'
5
>>> for elem in 'shazam': # update counts from an iterable
... c[elem] += 1 # by adding 1 to each element's count
>>> c['a'] # now there are seven 'a'
7
>>> del c['b'] # remove all 'b'
>>> c['b'] # now there are zero 'b'
0
>>> d = Counter('simsalabim') # make another counter
>>> c.update(d) # add in the second counter
>>> c['a'] # now there are nine 'a'
9
>>> c.clear() # empty the counter
>>> c
Counter()
Note: If a count is set to zero or reduced to zero, it will remain
in the counter until the entry is deleted or the counter is cleared:
>>> c = Counter('aaabbc')
>>> c['b'] -= 2 # reduce the count of 'b' by two
>>> c.most_common() # 'b' is still in, but its count is zero
[('a', 3), ('c', 1), ('b', 0)]
'''
# References:
# http://en.wikipedia.org/wiki/Multiset
# http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html
# http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm
# http://code.activestate.com/recipes/259174/
# Knuth, TAOCP Vol. II section 4.6.3
def __init__(self, iterable=None, **kwds):
'''Create a new, empty Counter object. And if given, count elements
from an input iterable. Or, initialize the count from another mapping
of elements to their counts.
>>> c = Counter() # a new, empty counter
>>> c = Counter('gallahad') # a new counter from an iterable
>>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping
>>> c = Counter(a=4, b=2) # a new counter from keyword args
'''
#super().__init__() #BE modified since super not supported
dict.__init__(self)
self.update(iterable, **kwds)
def __missing__(self, key):
'The count of elements not in the Counter is zero.'
# Needed so that self[missing_item] does not raise KeyError
return 0
def most_common(self, n=None):
'''List the n most common elements and their counts from the most
common to the least. If n is None, then list all element counts.
>>> Counter('abcdeabcdabcaba').most_common(3)
[('a', 5), ('b', 4), ('c', 3)]
'''
# Emulate Bag.sortedByCount from Smalltalk
if n is None:
return sorted(self.items(), key=_itemgetter(1), reverse=True)
return _heapq.nlargest(n, self.items(), key=_itemgetter(1))
def elements(self):
'''Iterator over elements repeating each as many times as its count.
>>> c = Counter('ABCABC')
>>> sorted(c.elements())
['A', 'A', 'B', 'B', 'C', 'C']
# Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1
>>> prime_factors = Counter({2: 2, 3: 3, 17: 1})
>>> product = 1
>>> for factor in prime_factors.elements(): # loop over factors
... product *= factor # and multiply them
>>> product
1836
Note, if an element's count has been set to zero or is a negative
number, elements() will ignore it.
'''
# Emulate Bag.do from Smalltalk and Multiset.begin from C++.
return _chain.from_iterable(_starmap(_repeat, self.items()))
# Override dict methods where necessary
@classmethod
def fromkeys(cls, iterable, v=None):
# There is no equivalent method for counters because setting v=1
# means that no element can have a count greater than one.
raise NotImplementedError(
'Counter.fromkeys() is undefined. Use Counter(iterable) instead.')
def update(self, iterable=None, **kwds):
'''Like dict.update() but add counts instead of replacing them.
Source can be an iterable, a dictionary, or another Counter instance.
>>> c = Counter('which')
>>> c.update('witch') # add elements from another iterable
>>> d = Counter('watch')
>>> c.update(d) # add elements from another counter
>>> c['h'] # four 'h' in which, witch, and watch
4
'''
# The regular dict.update() operation makes no sense here because the
# replace behavior results in the some of original untouched counts
# being mixed-in with all of the other counts for a mismash that
# doesn't have a straight-forward interpretation in most counting
# contexts. Instead, we implement straight-addition. Both the inputs
# and outputs are allowed to contain zero and negative counts.
if iterable is not None:
if isinstance(iterable, Mapping):
if self:
self_get = self.get
for elem, count in iterable.items():
self[elem] = count + self_get(elem, 0)
else:
super().update(iterable) # fast path when counter is empty
else:
_count_elements(self, iterable)
if kwds:
self.update(kwds)
def subtract(self, iterable=None, **kwds):
'''Like dict.update() but subtracts counts instead of replacing them.
Counts can be reduced below zero. Both the inputs and outputs are
allowed to contain zero and negative counts.
Source can be an iterable, a dictionary, or another Counter instance.
>>> c = Counter('which')
>>> c.subtract('witch') # subtract elements from another iterable
>>> c.subtract(Counter('watch')) # subtract elements from another counter
>>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch
0
>>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch
-1
'''
if iterable is not None:
self_get = self.get
if isinstance(iterable, Mapping):
for elem, count in iterable.items():
self[elem] = self_get(elem, 0) - count
else:
for elem in iterable:
self[elem] = self_get(elem, 0) - 1
if kwds:
self.subtract(kwds)
def copy(self):
'Return a shallow copy.'
return self.__class__(self)
def __reduce__(self):
return self.__class__, (dict(self),)
def __delitem__(self, elem):
'Like dict.__delitem__() but does not raise KeyError for missing values.'
if elem in self:
super().__delitem__(elem)
def __repr__(self):
if not self:
return '%s()' % self.__class__.__name__
try:
items = ', '.join(map('%r: %r'.__mod__, self.most_common()))
return '%s({%s})' % (self.__class__.__name__, items)
except TypeError:
# handle case where values are not orderable
return '{0}({1!r})'.format(self.__class__.__name__, dict(self))
# Multiset-style mathematical operations discussed in:
# Knuth TAOCP Volume II section 4.6.3 exercise 19
# and at http://en.wikipedia.org/wiki/Multiset
#
# Outputs guaranteed to only include positive counts.
#
# To strip negative and zero counts, add-in an empty counter:
# c += Counter()
def __add__(self, other):
'''Add counts from two counters.
>>> Counter('abbb') + Counter('bcc')
Counter({'b': 4, 'c': 2, 'a': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem, count in self.items():
newcount = count + other[elem]
if newcount > 0:
result[elem] = newcount
for elem, count in other.items():
if elem not in self and count > 0:
result[elem] = count
return result
def __sub__(self, other):
''' Subtract count, but keep only results with positive counts.
>>> Counter('abbbc') - Counter('bccd')
Counter({'b': 2, 'a': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem, count in self.items():
newcount = count - other[elem]
if newcount > 0:
result[elem] = newcount
for elem, count in other.items():
if elem not in self and count < 0:
result[elem] = 0 - count
return result
def __or__(self, other):
'''Union is the maximum of value in either of the input counters.
>>> Counter('abbb') | Counter('bcc')
Counter({'b': 3, 'c': 2, 'a': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem, count in self.items():
other_count = other[elem]
newcount = other_count if count < other_count else count
if newcount > 0:
result[elem] = newcount
for elem, count in other.items():
if elem not in self and count > 0:
result[elem] = count
return result
def __and__(self, other):
''' Intersection is the minimum of corresponding counts.
>>> Counter('abbb') & Counter('bcc')
Counter({'b': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem, count in self.items():
other_count = other[elem]
newcount = count if count < other_count else other_count
if newcount > 0:
result[elem] = newcount
return result
########################################################################
### ChainMap (helper for configparser)
########################################################################
class ChainMap(MutableMapping):
''' A ChainMap groups multiple dicts (or other mappings) together
to create a single, updateable view.
The underlying mappings are stored in a list. That list is public and can
accessed or updated using the *maps* attribute. There is no other state.
Lookups search the underlying mappings successively until a key is found.
In contrast, writes, updates, and deletions only operate on the first
mapping.
'''
def __init__(self, *maps):
'''Initialize a ChainMap by setting *maps* to the given mappings.
If no mappings are provided, a single empty dictionary is used.
'''
self.maps = list(maps) or [{}] # always at least one map
def __missing__(self, key):
raise KeyError(key)
def __getitem__(self, key):
for mapping in self.maps:
try:
return mapping[key] # can't use 'key in mapping' with defaultdict
except KeyError:
pass
return self.__missing__(key) # support subclasses that define __missing__
def get(self, key, default=None):
return self[key] if key in self else default
def __len__(self):
return len(set().union(*self.maps)) # reuses stored hash values if possible
def __iter__(self):
return iter(set().union(*self.maps))
def __contains__(self, key):
return any(key in m for m in self.maps)
def __bool__(self):
return any(self.maps)
#fixme, brython
#@_recursive_repr()
def __repr__(self):
return '{0.__class__.__name__}({1})'.format(
self, ', '.join(map(repr, self.maps)))
def __repr__(self):
return ','.join(str(_map) for _map in self.maps)
@classmethod
def fromkeys(cls, iterable, *args):
'Create a ChainMap with a single dict created from the iterable.'
return cls(dict.fromkeys(iterable, *args))
def copy(self):
'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]'
return self.__class__(self.maps[0].copy(), *self.maps[1:])
__copy__ = copy
def new_child(self): # like Django's Context.push()
'New ChainMap with a new dict followed by all previous maps.'
return self.__class__({}, *self.maps)
@property
def parents(self): # like Django's Context.pop()
'New ChainMap from maps[1:].'
return self.__class__(*self.maps[1:])
def __setitem__(self, key, value):
self.maps[0][key] = value
def __delitem__(self, key):
try:
del self.maps[0][key]
except KeyError:
raise KeyError('Key not found in the first mapping: {!r}'.format(key))
def popitem(self):
'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.'
try:
return self.maps[0].popitem()
except KeyError:
raise KeyError('No keys found in the first mapping.')
def pop(self, key, *args):
'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].'
try:
return self.maps[0].pop(key, *args)
except KeyError:
#raise KeyError('Key not found in the first mapping: {!r}'.format(key))
raise KeyError('Key not found in the first mapping: %s' % key)
def clear(self):
'Clear maps[0], leaving maps[1:] intact.'
self.maps[0].clear()
################################################################################
### UserDict
################################################################################
class UserDict(MutableMapping):
# Start by filling-out the abstract methods
def __init__(self, dict=None, **kwargs):
self.data = {}
if dict is not None:
self.update(dict)
if len(kwargs):
self.update(kwargs)
def __len__(self): return len(self.data)
def __getitem__(self, key):
if key in self.data:
return self.data[key]
if hasattr(self.__class__, "__missing__"):
return self.__class__.__missing__(self, key)
raise KeyError(key)
def __setitem__(self, key, item): self.data[key] = item
def __delitem__(self, key): del self.data[key]
def __iter__(self):
return iter(self.data)
# Modify __contains__ to work correctly when __missing__ is present
def __contains__(self, key):
return key in self.data
# Now, add the methods in dicts but not in MutableMapping
def __repr__(self): return repr(self.data)
def copy(self):
if self.__class__ is UserDict:
return UserDict(self.data.copy())
import copy
data = self.data
try:
self.data = {}
c = copy.copy(self)
finally:
self.data = data
c.update(self)
return c
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[key] = value
return d
################################################################################
### UserList
################################################################################
################################################################################
### UserString
################################################################################
| gpl-3.0 | 4,652,953,645,605,477,000 | -7,439,167,285,095,059,000 | 34.555708 | 97 | 0.53681 | false |
woodpecker1/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/port/server_process_unittest.py | 121 | 5514 | # Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import time
import unittest2 as unittest
from webkitpy.port.factory import PortFactory
from webkitpy.port import server_process
from webkitpy.common.system.systemhost import SystemHost
from webkitpy.common.system.systemhost_mock import MockSystemHost
from webkitpy.common.system.outputcapture import OutputCapture
class TrivialMockPort(object):
def __init__(self):
self.host = MockSystemHost()
self.host.executive.kill_process = lambda x: None
self.host.executive.kill_process = lambda x: None
def results_directory(self):
return "/mock-results"
def check_for_leaks(self, process_name, process_pid):
pass
def process_kill_time(self):
return 1
class MockFile(object):
def __init__(self, server_process):
self._server_process = server_process
self.closed = False
def fileno(self):
return 1
def write(self, line):
self._server_process.broken_pipes.append(self)
raise IOError
def close(self):
self.closed = True
class MockProc(object):
def __init__(self, server_process):
self.stdin = MockFile(server_process)
self.stdout = MockFile(server_process)
self.stderr = MockFile(server_process)
self.pid = 1
def poll(self):
return 1
def wait(self):
return 0
class FakeServerProcess(server_process.ServerProcess):
def _start(self):
self._proc = MockProc(self)
self.stdin = self._proc.stdin
self.stdout = self._proc.stdout
self.stderr = self._proc.stderr
self._pid = self._proc.pid
self.broken_pipes = []
class TestServerProcess(unittest.TestCase):
def test_basic(self):
cmd = [sys.executable, '-c', 'import sys; import time; time.sleep(0.02); print "stdout"; sys.stdout.flush(); print >>sys.stderr, "stderr"']
host = SystemHost()
factory = PortFactory(host)
port = factory.get()
now = time.time()
proc = server_process.ServerProcess(port, 'python', cmd)
proc.write('')
self.assertEqual(proc.poll(), None)
self.assertFalse(proc.has_crashed())
# check that doing a read after an expired deadline returns
# nothing immediately.
line = proc.read_stdout_line(now - 1)
self.assertEqual(line, None)
# FIXME: This part appears to be flaky. line should always be non-None.
# FIXME: https://bugs.webkit.org/show_bug.cgi?id=88280
line = proc.read_stdout_line(now + 1.0)
if line:
self.assertEqual(line.strip(), "stdout")
line = proc.read_stderr_line(now + 1.0)
if line:
self.assertEqual(line.strip(), "stderr")
proc.stop(0)
def test_cleanup(self):
port_obj = TrivialMockPort()
server_process = FakeServerProcess(port_obj=port_obj, name="test", cmd=["test"])
server_process._start()
server_process.stop()
self.assertTrue(server_process.stdin.closed)
self.assertTrue(server_process.stdout.closed)
self.assertTrue(server_process.stderr.closed)
def test_broken_pipe(self):
port_obj = TrivialMockPort()
port_obj.host.platform.os_name = 'win'
server_process = FakeServerProcess(port_obj=port_obj, name="test", cmd=["test"])
server_process.write("should break")
self.assertTrue(server_process.has_crashed())
self.assertIsNotNone(server_process.pid())
self.assertIsNone(server_process._proc)
self.assertEqual(server_process.broken_pipes, [server_process.stdin])
port_obj.host.platform.os_name = 'mac'
server_process = FakeServerProcess(port_obj=port_obj, name="test", cmd=["test"])
server_process.write("should break")
self.assertTrue(server_process.has_crashed())
self.assertIsNone(server_process._proc)
self.assertEqual(server_process.broken_pipes, [server_process.stdin])
| bsd-3-clause | 8,628,579,739,072,046,000 | -5,129,836,375,445,347,000 | 35.276316 | 147 | 0.68317 | false |
itkvideo/ITK | Modules/Nonunit/IntegratedTest/test/itkCurvatureFlowTestPython2.py | 41 | 3475 | #==========================================================================
#
# Copyright Insight Software Consortium
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#==========================================================================*/
from InsightToolkit import *
import itktesting
import sys
import os
import shutil
basename = os.path.basename( sys.argv[0] )
name = os.path.splitext( basename )[0]
dir = "Algorithms"
testInput = itktesting.ITK_TEST_INPUT
testOutput = itktesting.ITK_TEST_OUTPUT
baseLine = itktesting.ITK_TEST_BASELINE
reader = itkImageFileReaderF2_New()
reader.SetFileName( testInput+"/cthead1.png")
cf = itkCurvatureFlowImageFilterF2F2_New()
cf.SetInput( reader.GetOutput() )
cf.SetTimeStep( 0.25 )
cf.SetNumberOfIterations( 10 )
cfss = itkShiftScaleImageFilterF2US2_New()
cfss.SetInput( cf.GetOutput() )
cfss.SetShift( 0.7 )
cfss.SetScale( 0.9 )
valid = itkImageFileReaderUS2_New()
valid.SetFileName( baseLine+"/"+dir+"/"+name+".png")
diff = itkDifferenceImageFilterUS2_New()
diff.SetValidInput( valid.GetOutput() )
diff.SetTestInput( cfss.GetOutput() )
diff.SetToleranceRadius( 1 )
diff.SetDifferenceThreshold( 0 )
diff.Update()
meanDiff = diff.GetMeanDifference()
totalDiff = diff.GetTotalDifference()
print "MeanDifference = ", meanDiff
print "TotalDifference = ", totalDiff
print "<DartMeasurement name=\"MeanDifference\" type=\"numeric/double\">",meanDiff,"</DartMeasurement>"
print "<DartMeasurement name=\"TotalDifference\" type=\"numeric/double\">",totalDiff,"</DartMeasurement>"
if ( meanDiff > 0.1 ) :
convert = itkCastImageFilterUS2UC2_New()
rescale = itkRescaleIntensityImageFilterUS2UC2_New()
rescale.SetInput( diff.GetOutput() )
rescale.SetOutputMinimum( 0 )
rescale.SetOutputMaximum( 255 )
io = itkPNGImageIO_New()
io.SetUseCompression( 1 )
io.SetCompressionLevel( 9 )
writer = itkImageFileWriterUC2_New()
writer.SetImageIO( io.GetPointer() )
writer.SetInput( convert.GetOutput() )
writer.SetFileName( testOutput+"/"+name+".test.png" )
convert.SetInput( cfss.GetOutput() )
writer.Write()
writer.SetFileName( testOutput+"/"+name+".diff.png" )
writer.SetInput( rescale.GetOutput() )
writer.Write()
shutil.copyfile( baseLine+"/"+dir+"/"+name+".png", testOutput+"/"+name+".valid.png" )
print "<DartMeasurementFile name=\"TestImage\" type=\"image/png\">"+testOutput+"/"+name+".test.png</DartMeasurementFile>"
print "<DartMeasurementFile name=\"DifferenceImage\" type=\"image/png\">"+testOutput+"/"+name+".diff.png</DartMeasurementFile>"
print "<DartMeasurementFile name=\"ValidImage\" type=\"image/png\">"+testOutput+"/"+name+".valid.png</DartMeasurementFile>"
print "<DartMeasurement name=\"DifferenceShift\" type=\"numeric/double\">",rescale.GetShift(),"</DartMeasurement>"
print "<DartMeasurement name=\"DifferenceScale\" type=\"numeric/double\">",rescale.GetScale(),"</DartMeasurement>"
# return 1
#return 0
| apache-2.0 | -1,759,630,087,037,905,700 | -7,801,538,441,589,399,000 | 30.306306 | 129 | 0.697266 | false |
csieg/ardupilot | libraries/AP_Math/tools/geodesic_grid/geodesic_grid.py | 89 | 8697 | #!/usr/bin/python
# Copyright (C) 2016 Intel Corporation. All rights reserved.
#
# This file is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
import argparse
import numpy as np
import sys
import icosahedron as ico
import grid
def print_code_gen_notice():
print("/* This was generated with")
print(" * libraries/AP_Math/tools/geodesic_grid/geodesic_grid.py */")
def header_neighbor_umbrella(index):
t = ico.triangles[0]
a, b, c = t
triangle, edge = (
( t, ( a, b)),
( t, ( b, c)),
( t, ( c, a)),
(-t, (-a, -b)),
(-t, (-b, -c)),
(-t, (-c, -a)),
)[index]
return ico.neighbor_umbrella(triangle, edge), edge
parser = argparse.ArgumentParser(
description="""
Utility script for helping to understand concepts used by AP_GeodesicGrid as
well as for aiding its development.
When passing a vertex as argument to one of the options, the valid values for
the coordinates are 0, -1, 1, g and -g, where g is the golden ratio.
""",
)
parser.add_argument(
'-p', '--plot',
action='store_true',
help="""
Plot results when applicable.
""",
)
parser.add_argument(
'-b', '--plot-subtriangles',
action='store_true',
help="""
Plot subtriangles as well. This implies -p.
""",
)
parser.add_argument(
'--icosahedron',
action='store_true',
help='Get the icosahedron triangles.',
)
parser.add_argument(
'-t', '--triangle',
action='append',
type=int,
nargs='+',
metavar='INDEX',
help="""
Get the icosahedron triangle at INDEX.
""",
)
parser.add_argument(
'-s', '--section',
action='append',
type=int,
nargs='+',
help="""
Get the grid section SECTION. If --plot is passed, then --plot-subtriangles is
implied.
""",
)
parser.add_argument(
'-u', '--umbrella',
action='append',
nargs=3,
metavar=('X', 'Y', 'Z'),
help="""
Get the umbrella with pivot denoted by (X, Y, Z). The pivot must be one of the
icosahedron's vertices.
""",
)
parser.add_argument(
'-n', '--neighbor-umbrella',
action='append',
nargs='+',
metavar='INDEX',
help="""
Get the neighbor umbrella at INDEX as described by _neighbor_umbrellas in
AP_GeodesicGrid.h. The special value "all" for INDEX is also accepted, which
will make it ignore other indexes passed and get all neighbor umbrellas for
that member.
""",
)
parser.add_argument(
'--neighbor-umbrella-gen',
action='store_true',
help="""
Generate C++ code for the initialization of the member _neighbor_umbrellas
described in AP_GeodesicGrid.h.
""",
)
parser.add_argument(
'--inverses-gen',
action='store_true',
help="""
Generate C++ code for the initialization of members _inverses and _mid_inverses
declared in AP_GeodesicGrid.h.
""")
args = parser.parse_args()
if args.plot_subtriangles:
args.plot = True
if args.plot:
import plot
polygons_to_plot = []
if args.triangle:
indexes = []
for l in args.triangle:
indexes += l
for i in indexes:
if 0 > i or i >= len(ico.triangles):
print(
'Triangle index must be in the range [0,%d)' % len(ico.triangles),
file=sys.stderr,
)
sys.exit(1)
print(ico.triangles[i])
if args.plot:
plot.polygon(ico.triangles[i])
if args.section:
sections = []
for l in args.section:
sections += l
for s in sections:
if 0 > s or s >= 4 * len(ico.triangles):
print(
'Section must be in the range [0,%d)' % 4 * len(ico.triangles),
file=sys.stderr,
)
sys.exit(1)
print(grid.section_triangle(s))
if args.plot:
args.plot_subtriangles = True
plot.sections(sections)
if args.umbrella:
for pivot in args.umbrella:
for i, x in enumerate(pivot):
if x == 'g':
x = ico.g
elif x == '-g':
x = -ico.g
else:
try:
x = int(x)
if x not in (0, -1, 1):
raise ValueError()
except ValueError:
print(
'umbrella: invalid pivot coordinate: %s' % str(x),
file=sys.stderr,
)
sys.exit(1)
pivot[i] = x
pivot = ico.Vertex(*pivot)
if pivot not in ico.vertices:
print(
'umbrella: invalid pivot:', pivot,
file=sys.stderr,
)
sys.exit(1)
u = ico.umbrella(pivot)
print("Components of the umbrella of %s:" % str(pivot))
for c in u.components:
print(" %s" % str(c))
if args.plot:
plot.polygons(u.components)
if args.neighbor_umbrella:
indexes = []
for l in args.neighbor_umbrella:
indexes += l
if 'all' in indexes:
indexes = range(6)
else:
for i, arg in enumerate(indexes):
try:
arg = int(arg)
if arg not in range(6):
raise ValueError()
except ValueError:
print(
'neighbor_umbrella: invalid index %s' % str(arg),
file=sys.stderr,
)
sys.exit(1)
indexes[i] = arg
for i in indexes:
u, order_edge = header_neighbor_umbrella(i)
print("Header umbrella %d:" % i)
print(" Pivot:", u.pivot)
for i in range(5):
print(" Vertex %d:" % i, u.vertex(i, order_edge))
for i in range(5):
print(" Component %d:" % i, u.component(i, order_edge))
if args.plot:
plot.polygons(u.components)
if args.neighbor_umbrella_gen:
print("Header neighbor umbrellas code generation:")
print_code_gen_notice()
print("const struct AP_GeodesicGrid::neighbor_umbrella")
print("AP_GeodesicGrid::_neighbor_umbrellas[3]{")
for i in range(6):
u, order_edge = header_neighbor_umbrella(i)
components = tuple(
ico.triangles.index(u.component(i, order_edge)) for i in range(5)
)
def vi_cj(i, j):
v = u.vertex(i, order_edge)
t = u.component(j, order_edge)
return t.index(v)
vi_cj_values = tuple(
vi_cj(a, b) for a, b in ((0, 0), (1, 1), (2, 1), (4, 4), (0, 4))
)
print(" {{%s}, %s}," % (
", ".join("%2d" % i for i in components),
", ".join(str(i) for i in vi_cj_values),
))
print("};")
if args.inverses_gen:
print("Header inverses code generation:")
print_code_gen_notice()
print("const Matrix3f AP_GeodesicGrid::_inverses[10]{")
for i in range(10):
a, b, c = ico.triangles[i]
m = np.matrix((
(a.x, b.x, c.x),
(a.y, b.y, c.y),
(a.z, b.z, c.z),
)).getI()
print(" {{%9.6ff, %9.6ff, %9.6ff}," % (m[0,0], m[0,1], m[0,2]))
print(" {%9.6ff, %9.6ff, %9.6ff}," % (m[1,0], m[1,1], m[1,2]))
print(" {%9.6ff, %9.6ff, %9.6ff}}," % (m[2,0], m[2,1], m[2,2]))
print("};")
print()
print_code_gen_notice()
print("const Matrix3f AP_GeodesicGrid::_mid_inverses[10]{")
for i in range(10):
a, b, c = ico.triangles[i]
ma, mb, mc = .5 * (a + b), .5 * (b + c), .5 * (c + a)
m = np.matrix((
(ma.x, mb.x, mc.x),
(ma.y, mb.y, mc.y),
(ma.z, mb.z, mc.z),
)).getI()
print(" {{%9.6ff, %9.6ff, %9.6ff}," % (m[0,0], m[0,1], m[0,2]))
print(" {%9.6ff, %9.6ff, %9.6ff}," % (m[1,0], m[1,1], m[1,2]))
print(" {%9.6ff, %9.6ff, %9.6ff}}," % (m[2,0], m[2,1], m[2,2]))
print("};")
if args.icosahedron:
print('Icosahedron:')
for i, t in enumerate(ico.triangles):
print(' %s' % str(t))
if args.plot:
plot.polygons(ico.triangles)
if args.plot:
plot.show(subtriangles=args.plot_subtriangles)
| gpl-3.0 | -4,566,905,074,265,532,000 | 7,514,800,005,527,789,000 | 26.178125 | 82 | 0.538577 | false |
chfoo/wpull | wpull/application/plugin_test.py | 1 | 1250 | import unittest
from wpull.application.plugin import WpullPlugin, hook, event, InterfaceRegistry, \
event_interface, PluginClientFunctionInfo, PluginFunctionCategory
class MockPlugin(WpullPlugin):
@hook('hook_thing')
def my_hook_callback(self):
pass
@event('event_thing')
def my_event_callback(self, data):
pass
def unrelated_function(self):
pass
class TestPlugin(unittest.TestCase):
def test_plugin_function_discovery(self):
plugin = MockPlugin()
funcs = list(plugin.get_plugin_functions())
self.assertEqual(2, len(funcs))
self.assertIn(
PluginClientFunctionInfo(
plugin.my_event_callback, 'event_thing',
PluginFunctionCategory.event),
funcs)
self.assertIn(
PluginClientFunctionInfo(
plugin.my_hook_callback, 'hook_thing',
PluginFunctionCategory.hook),
funcs)
def test_plugin_interface_registry(self):
registry = InterfaceRegistry()
@event_interface('test_event', registry)
def event_callback(data):
pass
self.assertEqual(1, len(registry))
self.assertIn('test_event', registry)
| gpl-3.0 | -2,295,059,068,205,931,000 | 4,105,152,030,257,508,000 | 26.777778 | 83 | 0.6264 | false |
Rademade/taiga-back | taiga/base/api/templatetags/api.py | 3 | 9932 | # Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# The code is partially taken (and modified) from django rest framework
# that is licensed under the following terms:
#
# Copyright (c) 2011-2014, Tom Christie
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice, this
# list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from django import template
from django.core.urlresolvers import reverse, NoReverseMatch
from django.contrib.staticfiles.templatetags.staticfiles import StaticFilesNode
from django.http import QueryDict
from django.utils.encoding import iri_to_uri
from django.utils.html import escape
from django.utils.safestring import SafeData, mark_safe
from django.utils import six
from django.utils.encoding import force_text
from django.utils.html import smart_urlquote
from urllib import parse as urlparse
import re
register = template.Library()
@register.tag("static")
def do_static(parser, token):
return StaticFilesNode.handle_token(parser, token)
def replace_query_param(url, key, val):
"""
Given a URL and a key/val pair, set or replace an item in the query
parameters of the URL, and return the new URL.
"""
(scheme, netloc, path, query, fragment) = urlparse.urlsplit(url)
query_dict = QueryDict(query).copy()
query_dict[key] = val
query = query_dict.urlencode()
return urlparse.urlunsplit((scheme, netloc, path, query, fragment))
# Regex for adding classes to html snippets
class_re = re.compile(r'(?<=class=["\'])(.*)(?=["\'])')
# And the template tags themselves...
@register.simple_tag
def optional_login(request):
"""
Include a login snippet if REST framework's login view is in the URLconf.
"""
try:
login_url = reverse("api:login")
except NoReverseMatch:
return ""
snippet = "<a href='%s?next=%s'>Log in</a>" % (login_url, request.path)
return snippet
@register.simple_tag
def optional_logout(request):
"""
Include a logout snippet if REST framework's logout view is in the URLconf.
"""
try:
logout_url = reverse("api:logout")
except NoReverseMatch:
return ""
snippet = "<a href='%s?next=%s'>Log out</a>" % (logout_url, request.path)
return snippet
@register.simple_tag
def add_query_param(request, key, val):
"""
Add a query parameter to the current request url, and return the new url.
"""
iri = request.get_full_path()
uri = iri_to_uri(iri)
return replace_query_param(uri, key, val)
@register.filter
def add_class(value, css_class):
"""
http://stackoverflow.com/questions/4124220/django-adding-css-classes-when-rendering-form-fields-in-a-template
Inserts classes into template variables that contain HTML tags,
useful for modifying forms without needing to change the Form objects.
Usage:
{{ field.label_tag|add_class:"control-label" }}
In the case of REST Framework, the filter is used to add Bootstrap-specific
classes to the forms.
"""
html = six.text_type(value)
match = class_re.search(html)
if match:
m = re.search(r"^%s$|^%s\s|\s%s\s|\s%s$" % (css_class, css_class,
css_class, css_class),
match.group(1))
if not m:
return mark_safe(class_re.sub(match.group(1) + " " + css_class,
html))
else:
return mark_safe(html.replace(">", ' class="%s">' % css_class, 1))
return value
# Bunch of stuff cloned from urlize
TRAILING_PUNCTUATION = [".", ",", ":", ";", ".)", "\"", "'"]
WRAPPING_PUNCTUATION = [("(", ")"), ("<", ">"), ("[", "]"), ("<", ">"),
("\"", "\""), ("'", "'")]
word_split_re = re.compile(r"(\s+)")
simple_url_re = re.compile(r"^https?://\[?\w", re.IGNORECASE)
simple_url_2_re = re.compile(r"^www\.|^(?!http)\w[^@]+\.(com|edu|gov|int|mil|net|org)$", re.IGNORECASE)
simple_email_re = re.compile(r"^\S+@\S+\.\S+$")
def smart_urlquote_wrapper(matched_url):
"""
Simple wrapper for smart_urlquote. ValueError("Invalid IPv6 URL") can
be raised here, see issue #1386
"""
try:
return smart_urlquote(matched_url)
except ValueError:
return None
@register.filter
def urlize_quoted_links(text, trim_url_limit=None, nofollow=True, autoescape=True):
"""
Converts any URLs in text into clickable links.
Works on http://, https://, www. links, and also on links ending in one of
the original seven gTLDs (.com, .edu, .gov, .int, .mil, .net, and .org).
Links can have trailing punctuation (periods, commas, close-parens) and
leading punctuation (opening parens) and it"ll still do the right thing.
If trim_url_limit is not None, the URLs in link text longer than this limit
will truncated to trim_url_limit-3 characters and appended with an elipsis.
If nofollow is True, the URLs in link text will get a rel="nofollow"
attribute.
If autoescape is True, the link text and URLs will get autoescaped.
"""
trim_url = lambda x, limit=trim_url_limit: limit is not None and (len(x) > limit and ("%s..." % x[:max(0, limit - 3)])) or x
safe_input = isinstance(text, SafeData)
words = word_split_re.split(force_text(text))
for i, word in enumerate(words):
if "." in word or "@" in word or ":" in word:
# Deal with punctuation.
lead, middle, trail = "", word, ""
for punctuation in TRAILING_PUNCTUATION:
if middle.endswith(punctuation):
middle = middle[:-len(punctuation)]
trail = punctuation + trail
for opening, closing in WRAPPING_PUNCTUATION:
if middle.startswith(opening):
middle = middle[len(opening):]
lead = lead + opening
# Keep parentheses at the end only if they"re balanced.
if (middle.endswith(closing)
and middle.count(closing) == middle.count(opening) + 1):
middle = middle[:-len(closing)]
trail = closing + trail
# Make URL we want to point to.
url = None
nofollow_attr = ' rel="nofollow"' if nofollow else ""
if simple_url_re.match(middle):
url = smart_urlquote_wrapper(middle)
elif simple_url_2_re.match(middle):
url = smart_urlquote_wrapper("http://%s" % middle)
elif not ":" in middle and simple_email_re.match(middle):
local, domain = middle.rsplit("@", 1)
try:
domain = domain.encode("idna").decode("ascii")
except UnicodeError:
continue
url = "mailto:%s@%s" % (local, domain)
nofollow_attr = ""
# Make link.
if url:
trimmed = trim_url(middle)
if autoescape and not safe_input:
lead, trail = escape(lead), escape(trail)
url, trimmed = escape(url), escape(trimmed)
middle = '<a href="%s"%s>%s</a>' % (url, nofollow_attr, trimmed)
words[i] = mark_safe("%s%s%s" % (lead, middle, trail))
else:
if safe_input:
words[i] = mark_safe(word)
elif autoescape:
words[i] = escape(word)
elif safe_input:
words[i] = mark_safe(word)
elif autoescape:
words[i] = escape(word)
return "".join(words)
@register.filter
def break_long_headers(header):
"""
Breaks headers longer than 160 characters (~page length)
when possible (are comma separated)
"""
if len(header) > 160 and "," in header:
header = mark_safe("<br> " + ", <br>".join(header.split(",")))
return header
| agpl-3.0 | 9,119,751,141,974,895,000 | -6,104,010,327,361,614,000 | 37.638132 | 128 | 0.63575 | false |
facebookresearch/fastText | python/doc/examples/bin_to_vec.py | 1 | 1120 | #!/usr/bin/env python
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division, absolute_import, print_function
from fasttext import load_model
import argparse
import errno
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description=("Print fasttext .vec file to stdout from .bin file")
)
parser.add_argument(
"model",
help="Model to use",
)
args = parser.parse_args()
f = load_model(args.model)
words = f.get_words()
print(str(len(words)) + " " + str(f.get_dimension()))
for w in words:
v = f.get_word_vector(w)
vstr = ""
for vi in v:
vstr += " " + str(vi)
try:
print(w + vstr)
except IOError as e:
if e.errno == errno.EPIPE:
pass
| mit | 4,604,653,004,951,029,000 | 5,541,701,695,421,304,000 | 26.317073 | 73 | 0.611607 | false |
haeusser/tensorflow | tensorflow/python/kernel_tests/dynamic_stitch_op_test.py | 77 | 6501 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.data_flow_ops.dynamic_stitch."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import gradients_impl
import tensorflow.python.ops.data_flow_grad # pylint: disable=unused-import
from tensorflow.python.platform import test
class DynamicStitchTest(test.TestCase):
def testScalar(self):
with self.test_session():
indices = [constant_op.constant(0), constant_op.constant(1)]
data = [constant_op.constant(40), constant_op.constant(60)]
for step in -1, 1:
stitched_t = data_flow_ops.dynamic_stitch(indices[::step], data)
stitched_val = stitched_t.eval()
self.assertAllEqual([40, 60][::step], stitched_val)
# Dimension 0 is determined by the max index in indices, so we
# can only infer that the output is a vector of some unknown
# length.
self.assertEqual([None], stitched_t.get_shape().as_list())
def testSimpleOneDimensional(self):
with self.test_session():
indices = [
constant_op.constant([0, 4, 7]), constant_op.constant([1, 6, 2, 3, 5])
]
data = [
constant_op.constant([0, 40, 70]),
constant_op.constant([10, 60, 20, 30, 50])
]
stitched_t = data_flow_ops.dynamic_stitch(indices, data)
stitched_val = stitched_t.eval()
self.assertAllEqual([0, 10, 20, 30, 40, 50, 60, 70], stitched_val)
# Dimension 0 is determined by the max index in indices, so we
# can only infer that the output is a vector of some unknown
# length.
self.assertEqual([None], stitched_t.get_shape().as_list())
def testOneListOneDimensional(self):
with self.test_session():
indices = [constant_op.constant([1, 6, 2, 3, 5, 0, 4, 7])]
data = [constant_op.constant([10, 60, 20, 30, 50, 0, 40, 70])]
stitched_t = data_flow_ops.dynamic_stitch(indices, data)
stitched_val = stitched_t.eval()
self.assertAllEqual([0, 10, 20, 30, 40, 50, 60, 70], stitched_val)
# Dimension 0 is determined by the max index in indices, so we
# can only infer that the output is a vector of some unknown
# length.
self.assertEqual([None], stitched_t.get_shape().as_list())
def testSimpleTwoDimensional(self):
with self.test_session():
indices = [
constant_op.constant([0, 4, 7]), constant_op.constant([1, 6]),
constant_op.constant([2, 3, 5])
]
data = [
constant_op.constant([[0, 1], [40, 41], [70, 71]]),
constant_op.constant([[10, 11], [60, 61]]),
constant_op.constant([[20, 21], [30, 31], [50, 51]])
]
stitched_t = data_flow_ops.dynamic_stitch(indices, data)
stitched_val = stitched_t.eval()
self.assertAllEqual([[0, 1], [10, 11], [20, 21], [30, 31], [40, 41],
[50, 51], [60, 61], [70, 71]], stitched_val)
# Dimension 0 is determined by the max index in indices, so we
# can only infer that the output is a matrix with 2 columns and
# some unknown number of rows.
self.assertEqual([None, 2], stitched_t.get_shape().as_list())
def testHigherRank(self):
with self.test_session() as sess:
indices = [
constant_op.constant(6), constant_op.constant([4, 1]),
constant_op.constant([[5, 2], [0, 3]])
]
data = [
constant_op.constant([61, 62]),
constant_op.constant([[41, 42], [11, 12]]),
constant_op.constant([[[51, 52], [21, 22]], [[1, 2], [31, 32]]])
]
stitched_t = data_flow_ops.dynamic_stitch(indices, data)
stitched_val = stitched_t.eval()
correct = 10 * np.arange(7)[:, None] + [1, 2]
self.assertAllEqual(correct, stitched_val)
self.assertEqual([None, 2], stitched_t.get_shape().as_list())
# Test gradients
stitched_grad = 7 * stitched_val
grads = gradients_impl.gradients(stitched_t, indices + data,
stitched_grad)
self.assertEqual(grads[:3], [None] * 3) # Indices have no gradients
for datum, grad in zip(data, sess.run(grads[3:])):
self.assertAllEqual(7 * datum.eval(), grad)
def testErrorIndicesMultiDimensional(self):
indices = [
constant_op.constant([0, 4, 7]), constant_op.constant([[1, 6, 2, 3, 5]])
]
data = [
constant_op.constant([[0, 40, 70]]),
constant_op.constant([10, 60, 20, 30, 50])
]
with self.assertRaises(ValueError):
data_flow_ops.dynamic_stitch(indices, data)
def testErrorDataNumDimsMismatch(self):
indices = [
constant_op.constant([0, 4, 7]), constant_op.constant([1, 6, 2, 3, 5])
]
data = [
constant_op.constant([0, 40, 70]),
constant_op.constant([[10, 60, 20, 30, 50]])
]
with self.assertRaises(ValueError):
data_flow_ops.dynamic_stitch(indices, data)
def testErrorDataDimSizeMismatch(self):
indices = [
constant_op.constant([0, 4, 5]), constant_op.constant([1, 6, 2, 3])
]
data = [
constant_op.constant([[0], [40], [70]]),
constant_op.constant([[10, 11], [60, 61], [20, 21], [30, 31]])
]
with self.assertRaises(ValueError):
data_flow_ops.dynamic_stitch(indices, data)
def testErrorDataAndIndicesSizeMismatch(self):
indices = [
constant_op.constant([0, 4, 7]), constant_op.constant([1, 6, 2, 3, 5])
]
data = [
constant_op.constant([0, 40, 70]),
constant_op.constant([10, 60, 20, 30])
]
with self.assertRaises(ValueError):
data_flow_ops.dynamic_stitch(indices, data)
if __name__ == "__main__":
test.main()
| apache-2.0 | 8,149,713,229,172,692,000 | -3,428,370,183,257,032,700 | 38.640244 | 80 | 0.611752 | false |
tersmitten/ansible | test/units/modules/storage/netapp/test_na_ontap_qos_policy_group.py | 45 | 12789 | # (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
''' unit test template for ONTAP Ansible module '''
from __future__ import print_function
import json
import pytest
from units.compat import unittest
from units.compat.mock import patch, Mock
from ansible.module_utils import basic
from ansible.module_utils._text import to_bytes
import ansible.module_utils.netapp as netapp_utils
from ansible.modules.storage.netapp.na_ontap_qos_policy_group \
import NetAppOntapQosPolicyGroup as qos_policy_group_module # module under test
if not netapp_utils.has_netapp_lib():
pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
def set_module_args(args):
"""prepare arguments so that they will be picked up during module creation"""
args = json.dumps({'ANSIBLE_MODULE_ARGS': args})
basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access
class AnsibleExitJson(Exception):
"""Exception class to be raised by module.exit_json and caught by the test case"""
pass
class AnsibleFailJson(Exception):
"""Exception class to be raised by module.fail_json and caught by the test case"""
pass
def exit_json(*args, **kwargs): # pylint: disable=unused-argument
"""function to patch over exit_json; package return data into an exception"""
if 'changed' not in kwargs:
kwargs['changed'] = False
raise AnsibleExitJson(kwargs)
def fail_json(*args, **kwargs): # pylint: disable=unused-argument
"""function to patch over fail_json; package return data into an exception"""
kwargs['failed'] = True
raise AnsibleFailJson(kwargs)
class MockONTAPConnection(object):
''' mock server connection to ONTAP host '''
def __init__(self, kind=None, data=None):
''' save arguments '''
self.kind = kind
self.params = data
self.xml_in = None
self.xml_out = None
def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
''' mock invoke_successfully returning xml data '''
self.xml_in = xml
if self.kind == 'policy':
xml = self.build_policy_group_info(self.params)
if self.kind == 'error':
error = netapp_utils.zapi.NaApiError('test', 'error')
raise error
self.xml_out = xml
return xml
@staticmethod
def build_policy_group_info(vol_details):
''' build xml data for volume-attributes '''
xml = netapp_utils.zapi.NaElement('xml')
attributes = {
'num-records': 1,
'attributes-list': {
'qos-policy-group-info': {
'is-shared': 'true',
'max-throughput': '800KB/s,800IOPS',
'min-throughput': '100IOPS',
'num-workloads': 0,
'pgid': 8690,
'policy-group': vol_details['name'],
'vserver': vol_details['vserver']
}
}
}
xml.translate_struct(attributes)
return xml
class TestMyModule(unittest.TestCase):
''' a group of related Unit Tests '''
def setUp(self):
self.mock_module_helper = patch.multiple(basic.AnsibleModule,
exit_json=exit_json,
fail_json=fail_json)
self.mock_module_helper.start()
self.addCleanup(self.mock_module_helper.stop)
self.mock_policy_group = {
'name': 'policy_1',
'vserver': 'policy_vserver',
'max_throughput': '800KB/s,800IOPS',
'min_throughput': '100IOPS'
}
def mock_args(self):
return {
'name': self.mock_policy_group['name'],
'vserver': self.mock_policy_group['vserver'],
'max_throughput': '800KB/s,800IOPS',
'min_throughput': '100IOPS',
'hostname': 'test',
'username': 'test_user',
'password': 'test_pass!',
'https': 'False'
}
def get_policy_group_mock_object(self, kind=None):
"""
Helper method to return an na_ontap_volume object
:param kind: passes this param to MockONTAPConnection()
:return: na_ontap_volume object
"""
policy_obj = qos_policy_group_module()
policy_obj.asup_log_for_cserver = Mock(return_value=None)
policy_obj.cluster = Mock()
policy_obj.cluster.invoke_successfully = Mock()
if kind is None:
policy_obj.server = MockONTAPConnection()
else:
policy_obj.server = MockONTAPConnection(kind=kind, data=self.mock_policy_group)
return policy_obj
def test_module_fail_when_required_args_missing(self):
''' required arguments are reported as errors '''
with pytest.raises(AnsibleFailJson) as exc:
set_module_args({})
qos_policy_group_module()
print('Info: %s' % exc.value.args[0]['msg'])
def test_get_nonexistent_policy(self):
''' Test if get_policy_group returns None for non-existent policy_group '''
set_module_args(self.mock_args())
result = self.get_policy_group_mock_object().get_policy_group()
assert result is None
def test_get_existing_policy_group(self):
''' Test if get_policy_group returns details for existing policy_group '''
set_module_args(self.mock_args())
result = self.get_policy_group_mock_object('policy').get_policy_group()
assert result['name'] == self.mock_policy_group['name']
assert result['vserver'] == self.mock_policy_group['vserver']
def test_create_error_missing_param(self):
''' Test if create throws an error if name is not specified'''
data = self.mock_args()
del data['name']
set_module_args(data)
with pytest.raises(AnsibleFailJson) as exc:
self.get_policy_group_mock_object('policy').create_policy_group()
msg = 'missing required arguments: name'
assert exc.value.args[0]['msg'] == msg
def test_successful_create(self):
''' Test successful create '''
data = self.mock_args()
set_module_args(data)
with pytest.raises(AnsibleExitJson) as exc:
self.get_policy_group_mock_object().apply()
assert exc.value.args[0]['changed']
def test_create_idempotency(self):
''' Test create idempotency '''
set_module_args(self.mock_args())
obj = self.get_policy_group_mock_object('policy')
with pytest.raises(AnsibleExitJson) as exc:
obj.apply()
assert not exc.value.args[0]['changed']
@patch('ansible.modules.storage.netapp.na_ontap_qos_policy_group.NetAppOntapQosPolicyGroup.get_policy_group')
def test_create_error(self, get_policy_group):
''' Test create error '''
set_module_args(self.mock_args())
get_policy_group.side_effect = [
None
]
with pytest.raises(AnsibleFailJson) as exc:
self.get_policy_group_mock_object('error').apply()
assert exc.value.args[0]['msg'] == 'Error creating qos policy group policy_1: NetApp API failed. Reason - test:error'
def test_successful_delete(self):
''' Test delete existing volume '''
data = self.mock_args()
data['state'] = 'absent'
set_module_args(data)
with pytest.raises(AnsibleExitJson) as exc:
self.get_policy_group_mock_object('policy').apply()
assert exc.value.args[0]['changed']
def test_delete_idempotency(self):
''' Test delete idempotency '''
data = self.mock_args()
data['state'] = 'absent'
set_module_args(data)
with pytest.raises(AnsibleExitJson) as exc:
self.get_policy_group_mock_object().apply()
assert not exc.value.args[0]['changed']
@patch('ansible.modules.storage.netapp.na_ontap_qos_policy_group.NetAppOntapQosPolicyGroup.get_policy_group')
def test_delete_error(self, get_policy_group):
''' Test create idempotency '''
data = self.mock_args()
data['state'] = 'absent'
set_module_args(data)
current = {
'max_throughput': '800KB/s,800IOPS',
'min_throughput': '100IOPS',
'name': 'policy_1',
'vserver': 'policy_vserver'
}
get_policy_group.side_effect = [
current
]
with pytest.raises(AnsibleFailJson) as exc:
self.get_policy_group_mock_object('error').apply()
assert exc.value.args[0]['msg'] == 'Error deleting qos policy group policy_1: NetApp API failed. Reason - test:error'
def test_successful_modify_max_throughput(self):
''' Test successful modify max throughput '''
data = self.mock_args()
data['max_throughput'] = '900KB/s,800iops'
set_module_args(data)
with pytest.raises(AnsibleExitJson) as exc:
self.get_policy_group_mock_object('policy').apply()
assert exc.value.args[0]['changed']
def test_modify_max_throughput_idempotency(self):
''' Test modify idempotency '''
data = self.mock_args()
set_module_args(data)
with pytest.raises(AnsibleExitJson) as exc:
self.get_policy_group_mock_object('policy').apply()
assert not exc.value.args[0]['changed']
@patch('ansible.modules.storage.netapp.na_ontap_qos_policy_group.NetAppOntapQosPolicyGroup.get_policy_group')
def test_modify_error(self, get_policy_group):
''' Test create idempotency '''
data = self.mock_args()
data['max_throughput'] = '900KB/s,900IOPS'
set_module_args(data)
current = {
'max_throughput': '800KB/s,800IOPS',
'min_throughput': '100IOPS',
'name': 'policy_1',
'vserver': 'policy_vserver'
}
get_policy_group.side_effect = [
current
]
with pytest.raises(AnsibleFailJson) as exc:
self.get_policy_group_mock_object('error').apply()
assert exc.value.args[0]['msg'] == 'Error modifying qos policy group policy_1: NetApp API failed. Reason - test:error'
@patch('ansible.modules.storage.netapp.na_ontap_qos_policy_group.NetAppOntapQosPolicyGroup.get_policy_group')
def test_rename(self, get_policy_group):
''' Test rename idempotency '''
data = self.mock_args()
data['name'] = 'policy_2'
data['from_name'] = 'policy_1'
set_module_args(data)
current = {
'max_throughput': '800KB/s,800IOPS',
'min_throughput': '100IOPS',
'name': 'policy_1',
'vserver': 'policy_vserver'
}
get_policy_group.side_effect = [
None,
current
]
with pytest.raises(AnsibleExitJson) as exc:
self.get_policy_group_mock_object('policy').apply()
assert exc.value.args[0]['changed']
@patch('ansible.modules.storage.netapp.na_ontap_qos_policy_group.NetAppOntapQosPolicyGroup.get_policy_group')
def test_rename_idempotency(self, get_policy_group):
''' Test rename idempotency '''
data = self.mock_args()
data['name'] = 'policy_1'
data['from_name'] = 'policy_1'
current = {
'max_throughput': '800KB/s,800IOPS',
'min_throughput': '100IOPS',
'name': 'policy_1',
'vserver': 'policy_vserver'
}
get_policy_group.side_effect = [
current,
current
]
set_module_args(data)
with pytest.raises(AnsibleExitJson) as exc:
self.get_policy_group_mock_object('policy').apply()
assert not exc.value.args[0]['changed']
@patch('ansible.modules.storage.netapp.na_ontap_qos_policy_group.NetAppOntapQosPolicyGroup.get_policy_group')
def test_rename_error(self, get_policy_group):
''' Test create idempotency '''
data = self.mock_args()
data['from_name'] = 'policy_1'
data['name'] = 'policy_2'
set_module_args(data)
current = {
'is_shared': 'true',
'max_throughput': '800KB/s,800IOPS',
'min_throughput': '100IOPS',
'name': 'policy_1',
'vserver': 'policy_vserver'
}
get_policy_group.side_effect = [
None,
current
]
with pytest.raises(AnsibleFailJson) as exc:
self.get_policy_group_mock_object('error').apply()
assert exc.value.args[0]['msg'] == 'Error renaming qos policy group policy_1: NetApp API failed. Reason - test:error'
| gpl-3.0 | 6,178,372,628,997,769,000 | 5,656,228,933,346,635,000 | 37.637462 | 126 | 0.597154 | false |
whausen/part | src/adhocracy/lib/text/mdx_showmore.py | 1 | 2762 | """Showmore extension for Markdown.
To hide something with [more...], surround the corresponding text with triple
parentheses, e.g. (((text_to_be_hidden))).
In order to show the text, you have to include the following Javascript in your
code, which depends on the availability of jquery.
$('.showmore').each(function () {
var self = $(this);
self.find('.showmore_morelink').bind('click', function (event) {
self.find('.showmore_collapsed').css('display', 'none');
self.find('.showmore_uncollapsed').css('display', 'inline');
});
self.find('.showmore_lesslink').bind('click', function (event) {
self.find('.showmore_collapsed').css('display', 'inline');
self.find('.showmore_uncollapsed').css('display', 'none');
});
});
Additionally, you have to add the following to your css code:
.showmore, .showmore_content {
display: inline;
}
.showmore_uncollapsed {
display: none;
}
"""
import re
import markdown
from pylons.i18n import _
SHOWMORE_RE = re.compile(r'\({3,}(?P<text>.*?)\){3,}',
re.MULTILINE | re.DOTALL)
MORE_STRING = u'show more'
LESS_STRING = u'show less'
PRE_HTML = u'''
<div class="showmore">
<span class="showmore_collapsed">
<span> </span>
<a class="showmore_morelink" href="#">[%s]</a>
<span> </span>
</span>
<div class="showmore_uncollapsed">
<div class="showmore_content">
'''
POST_HTML = u'''
</div>
<span> </span>
<a class="showmore_lesslink" href="#">[%s]</a>
<span> </span>
</div>
</div>
'''
class ShowmoreExtension(markdown.Extension):
""" Showmore Extension for Python-Markdown. """
def extendMarkdown(self, md, md_globals):
md.registerExtension(self)
md.preprocessors.add('showmore', ShowmorePreprocessor(md),
'>normalize_whitespace')
class ShowmorePreprocessor(markdown.preprocessors.Preprocessor):
def run(self, lines):
text = "\n".join(lines)
while 1:
m = SHOWMORE_RE.search(text)
if m:
text = '%s%s%s%s%s' % (
text[:m.start()],
self.markdown.htmlStash.store(PRE_HTML % _(MORE_STRING),
safe=True),
m.group('text'),
self.markdown.htmlStash.store(POST_HTML % _(LESS_STRING),
safe=True),
text[m.end():])
else:
break
return text.split("\n")
def makeExtension(configs=None):
return ShowmoreExtension(configs=configs)
| agpl-3.0 | -2,377,708,392,046,185,000 | -1,280,791,210,351,445 | 28.698925 | 79 | 0.543085 | false |
bobthekingofegypt/servo | tests/wpt/web-platform-tests/tools/html5lib/html5lib/treewalkers/etree.py | 658 | 4613 | from __future__ import absolute_import, division, unicode_literals
try:
from collections import OrderedDict
except ImportError:
try:
from ordereddict import OrderedDict
except ImportError:
OrderedDict = dict
import gettext
_ = gettext.gettext
import re
from six import text_type
from . import _base
from ..utils import moduleFactoryFactory
tag_regexp = re.compile("{([^}]*)}(.*)")
def getETreeBuilder(ElementTreeImplementation):
ElementTree = ElementTreeImplementation
ElementTreeCommentType = ElementTree.Comment("asd").tag
class TreeWalker(_base.NonRecursiveTreeWalker):
"""Given the particular ElementTree representation, this implementation,
to avoid using recursion, returns "nodes" as tuples with the following
content:
1. The current element
2. The index of the element relative to its parent
3. A stack of ancestor elements
4. A flag "text", "tail" or None to indicate if the current node is a
text node; either the text or tail of the current element (1)
"""
def getNodeDetails(self, node):
if isinstance(node, tuple): # It might be the root Element
elt, key, parents, flag = node
if flag in ("text", "tail"):
return _base.TEXT, getattr(elt, flag)
else:
node = elt
if not(hasattr(node, "tag")):
node = node.getroot()
if node.tag in ("DOCUMENT_ROOT", "DOCUMENT_FRAGMENT"):
return (_base.DOCUMENT,)
elif node.tag == "<!DOCTYPE>":
return (_base.DOCTYPE, node.text,
node.get("publicId"), node.get("systemId"))
elif node.tag == ElementTreeCommentType:
return _base.COMMENT, node.text
else:
assert type(node.tag) == text_type, type(node.tag)
# This is assumed to be an ordinary element
match = tag_regexp.match(node.tag)
if match:
namespace, tag = match.groups()
else:
namespace = None
tag = node.tag
attrs = OrderedDict()
for name, value in list(node.attrib.items()):
match = tag_regexp.match(name)
if match:
attrs[(match.group(1), match.group(2))] = value
else:
attrs[(None, name)] = value
return (_base.ELEMENT, namespace, tag,
attrs, len(node) or node.text)
def getFirstChild(self, node):
if isinstance(node, tuple):
element, key, parents, flag = node
else:
element, key, parents, flag = node, None, [], None
if flag in ("text", "tail"):
return None
else:
if element.text:
return element, key, parents, "text"
elif len(element):
parents.append(element)
return element[0], 0, parents, None
else:
return None
def getNextSibling(self, node):
if isinstance(node, tuple):
element, key, parents, flag = node
else:
return None
if flag == "text":
if len(element):
parents.append(element)
return element[0], 0, parents, None
else:
return None
else:
if element.tail and flag != "tail":
return element, key, parents, "tail"
elif key < len(parents[-1]) - 1:
return parents[-1][key + 1], key + 1, parents, None
else:
return None
def getParentNode(self, node):
if isinstance(node, tuple):
element, key, parents, flag = node
else:
return None
if flag == "text":
if not parents:
return element
else:
return element, key, parents, None
else:
parent = parents.pop()
if not parents:
return parent
else:
return parent, list(parents[-1]).index(parent), parents, None
return locals()
getETreeModule = moduleFactoryFactory(getETreeBuilder)
| mpl-2.0 | -1,960,500,214,069,361,400 | -3,154,913,931,057,146,000 | 32.427536 | 81 | 0.502276 | false |
dcsquared13/Diamond | src/collectors/interrupt/test/testsoft.py | 31 | 3042 | #!/usr/bin/python
# coding=utf-8
##########################################################################
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from diamond.collector import Collector
from soft import SoftInterruptCollector
##########################################################################
class TestSoftInterruptCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('SoftInterruptCollector', {
'interval': 1
})
self.collector = SoftInterruptCollector(config, None)
def test_import(self):
self.assertTrue(SoftInterruptCollector)
@patch('__builtin__.open')
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_open_proc_stat(self, publish_mock, open_mock):
open_mock.return_value = StringIO('')
self.collector.collect()
open_mock.assert_called_once_with('/proc/stat', 'r')
@patch.object(Collector, 'publish')
def test_should_work_with_synthetic_data(self, publish_mock):
patch_open = patch('__builtin__.open', Mock(return_value=StringIO(
'softirq 0 0 0 0 0 0 0 0 0 0 0'
)))
patch_open.start()
self.collector.collect()
patch_open.stop()
self.assertPublishedMany(publish_mock, {})
patch_open = patch('__builtin__.open', Mock(return_value=StringIO(
'softirq 55 1 2 3 4 5 6 7 8 9 10'
)))
patch_open.start()
self.collector.collect()
patch_open.stop()
self.assertPublishedMany(publish_mock, {
'total': 55.0,
'0': 1,
'1': 2,
'2': 3,
'3': 4,
'4': 5,
'5': 6,
'6': 7,
'7': 8,
'8': 9,
'9': 10,
})
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
SoftInterruptCollector.PROC = self.getFixturePath('proc_stat_1')
self.collector.collect()
self.assertPublishedMany(publish_mock, {})
SoftInterruptCollector.PROC = self.getFixturePath('proc_stat_2')
self.collector.collect()
metrics = {
'total': 4971,
'0': 0,
'1': 1729,
'2': 2,
'3': 240,
'4': 31,
'5': 0,
'6': 0,
'7': 1480,
'8': 0,
'9': 1489,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
| mit | 326,179,207,738,004,860 | -8,472,218,736,676,975,000 | 27.429907 | 74 | 0.515122 | false |
chdecultot/erpnext | erpnext/patches/v11_0/change_healthcare_desktop_icons.py | 4 | 2450 | import frappe
from frappe import _
change_icons_map = [
{
"module_name": "Patient",
"color": "#6BE273",
"icon": "fa fa-user",
"doctype": "Patient",
"type": "link",
"link": "List/Patient",
"label": _("Patient")
},
{
"module_name": "Patient Encounter",
"color": "#2ecc71",
"icon": "fa fa-stethoscope",
"doctype": "Patient Encounter",
"type": "link",
"link": "List/Patient Encounter",
"label": _("Patient Encounter"),
},
{
"module_name": "Healthcare Practitioner",
"color": "#2ecc71",
"icon": "fa fa-user-md",
"doctype": "Healthcare Practitioner",
"type": "link",
"link": "List/Healthcare Practitioner",
"label": _("Healthcare Practitioner")
},
{
"module_name": "Patient Appointment",
"color": "#934F92",
"icon": "fa fa-calendar-plus-o",
"doctype": "Patient Appointment",
"type": "link",
"link": "List/Patient Appointment",
"label": _("Patient Appointment")
},
{
"module_name": "Lab Test",
"color": "#7578f6",
"icon": "octicon octicon-beaker",
"doctype": "Lab Test",
"type": "link",
"link": "List/Lab Test",
"label": _("Lab Test")
}
]
def execute():
change_healthcare_desktop_icons()
def change_healthcare_desktop_icons():
doctypes = ["patient", "patient_encounter", "healthcare_practitioner",
"patient_appointment", "lab_test"]
for doctype in doctypes:
frappe.reload_doc("healthcare", "doctype", doctype)
for spec in change_icons_map:
frappe.db.sql("""
delete from `tabDesktop Icon`
where _doctype = '{0}'
""".format(spec['doctype']))
desktop_icon = frappe.new_doc("Desktop Icon")
desktop_icon.hidden = 1
desktop_icon.standard = 1
desktop_icon.icon = spec['icon']
desktop_icon.color = spec['color']
desktop_icon.module_name = spec['module_name']
desktop_icon.label = spec['label']
desktop_icon.app = "erpnext"
desktop_icon.type = spec['type']
desktop_icon._doctype = spec['doctype']
desktop_icon.link = spec['link']
desktop_icon.save(ignore_permissions=True)
frappe.db.sql("""
delete from `tabDesktop Icon`
where module_name = 'Healthcare' and type = 'module'
""")
desktop_icon = frappe.new_doc("Desktop Icon")
desktop_icon.hidden = 1
desktop_icon.standard = 1
desktop_icon.icon = "fa fa-heartbeat"
desktop_icon.color = "#FF888B"
desktop_icon.module_name = "Healthcare"
desktop_icon.label = _("Healthcare")
desktop_icon.app = "erpnext"
desktop_icon.type = 'module'
desktop_icon.save(ignore_permissions=True)
| gpl-3.0 | -6,027,214,040,024,782,000 | 3,825,073,006,543,346,700 | 25.344086 | 71 | 0.650204 | false |
jszymon/pacal | pacal/__init__.py | 1 | 3420 | """PaCal, the probabilistic calculator."""
from __future__ import print_function
from . import params
import numpy as _np
from pylab import show
_np.seterr(all="ignore")
from .utils import Inf
from .distr import DiscreteDistr
from .distr import exp, log, atan, min, max, sqrt, sign, sin, cos, tan, tanh, sigmoid
from .standard_distr import FunDistr
from .standard_distr import NormalDistr
from .standard_distr import UniformDistr
from .standard_distr import TrapezoidalDistr
from .standard_distr import CauchyDistr
from .standard_distr import ChiSquareDistr
from .standard_distr import ExponentialDistr
from .standard_distr import GammaDistr
from .standard_distr import BetaDistr
from .standard_distr import ParetoDistr
from .standard_distr import LevyDistr
from .standard_distr import LaplaceDistr
from .standard_distr import StudentTDistr
from .standard_distr import SemicircleDistr
from .standard_distr import FDistr
from .standard_distr import WeibullDistr
from .standard_distr import GumbelDistr
from .standard_distr import FrechetDistr
from .standard_distr import LogLogisticDistr
from .standard_distr import MollifierDistr
from .standard_distr import OneDistr
from .standard_distr import ZeroDistr
from .standard_distr import BinomialDistr
from .standard_distr import BernoulliDistr
from .standard_distr import PoissonDistr
from .standard_distr import MixDistr
from .distr import CondGtDistr
from .distr import CondLtDistr
from .distr import ConstDistr
from .distr import Gt
from .distr import Lt
from .distr import Between
from .stats.noncentral_distr import NoncentralTDistr
from .stats.noncentral_distr import NoncentralChiSquareDistr
from .stats.noncentral_distr import NoncentralBetaDistr
from .stats.noncentral_distr import NoncentralFDistr
from .stats.iid_ops import iid_sum, iid_prod, iid_max, iid_min, iid_average, iid_average_geom
from .stats.iid_ops import iid_order_stat, iid_median
from .stats.distr_est import LoglikelihoodEstimator
# dependent variables
from .depvars.copulas import PiCopula, FrankCopula, ClaytonCopula, GumbelCopula
from .depvars.nddistr import NDNormalDistr, IJthOrderStatsNDDistr
from .depvars.models import TwoVarsModel, Model
def _pickle_method(method):
obj = method.__self__
cls = method.__self__.__class__
func_name = method.__func__.__name__
#print "pickle>>>", func_name, obj, cls
if func_name.startswith('__') and not func_name.endswith('__'):
#deal with mangled names
cls_name = cls.__name__.lstrip('_')
func_name = '_%s%s' % (cls_name, func_name)
return _unpickle_method, (func_name, obj, cls)
def _unpickle_method(func_name, obj, cls):
#print "upickle>>>", func_name, obj, cls
if obj and func_name in obj.__dict__:
cls, obj = obj, None # if func_name is classmethod
for cls in cls.__mro__:
try:
func = cls.__dict__[func_name]
except KeyError:
pass
else:
break
return func.__get__(obj, cls)
if params.general.parallel:
# make ufuncs picklable
import types
try:
import copyreg
#copy_reg.pickle(_np.ufunc, _pickle_ufunc, _unpickle_ufunc)
copyreg.pickle(types.MethodType, _pickle_method, _unpickle_method)
except:
import copy_reg
#copy_reg.pickle(_np.ufunc, _pickle_ufunc, _unpickle_ufunc)
copy_reg.pickle(types.MethodType, _pickle_method, _unpickle_method)
| gpl-3.0 | 7,221,591,864,244,547,000 | -5,258,193,248,483,478,000 | 32.203883 | 93 | 0.738304 | false |
arhik/nupic | src/nupic/frameworks/opf/opfhelpers.py | 39 | 3728 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
# This file contains utility functions that are may be imported
# by clients of the framework. Functions that are used only by
# the prediction framework should be in opfutils.py
#
# TODO: Rename as helpers.py once we're ready to replace the legacy
# helpers.py
import imp
import os
import expdescriptionapi
def loadExperiment(path):
"""Loads the experiment description file from the path.
Args:
path: The path to a directory containing a description.py file or the file
itself.
Returns:
(config, control)
"""
if not os.path.isdir(path):
path = os.path.dirname(path)
descriptionPyModule = loadExperimentDescriptionScriptFromDir(path)
expIface = getExperimentDescriptionInterfaceFromModule(descriptionPyModule)
return expIface.getModelDescription(), expIface.getModelControl()
def loadExperimentDescriptionScriptFromDir(experimentDir):
""" Loads the experiment description python script from the given experiment
directory.
experimentDir: experiment directory path
Returns: module of the loaded experiment description scripts
"""
descriptionScriptPath = os.path.join(experimentDir, "description.py")
module = _loadDescriptionFile(descriptionScriptPath)
return module
def getExperimentDescriptionInterfaceFromModule(module):
"""
module: imported description.py module
Returns: An expdescriptionapi.DescriptionIface-based instance that
represents the experiment description
"""
result = module.descriptionInterface
assert isinstance(result, expdescriptionapi.DescriptionIface), \
"expected DescriptionIface-based instance, but got %s" % type(result)
return result
g_descriptionImportCount = 0
def _loadDescriptionFile(descriptionPyPath):
"""Loads a description file and returns it as a module.
descriptionPyPath: path of description.py file to load
"""
global g_descriptionImportCount
if not os.path.isfile(descriptionPyPath):
raise RuntimeError(("Experiment description file %s does not exist or " + \
"is not a file") % (descriptionPyPath,))
mod = imp.load_source("pf_description%d" % g_descriptionImportCount,
descriptionPyPath)
g_descriptionImportCount += 1
if not hasattr(mod, "descriptionInterface"):
raise RuntimeError("Experiment description file %s does not define %s" % \
(descriptionPyPath, "descriptionInterface"))
if not isinstance(mod.descriptionInterface, expdescriptionapi.DescriptionIface):
raise RuntimeError(("Experiment description file %s defines %s but it " + \
"is not DescriptionIface-based") % \
(descriptionPyPath, name))
return mod
| agpl-3.0 | 945,352,190,241,631,500 | 8,503,486,054,650,098,000 | 34.846154 | 82 | 0.704667 | false |
yvaucher/vertical-ngo | logistic_requisition/wizard/assign_line.py | 3 | 1508 | # -*- coding: utf-8 -*-
#
#
# Copyright 2013-2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
from openerp import models, fields, api
class LogisticsRequisitionLineAssign(models.TransientModel):
_name = 'logistic.requisition.line.assign'
_description = 'Assign a logistic requisition line'
logistic_user_id = fields.Many2one(
'res.users',
'Logistics Specialist',
required=True,
help="Logistics Specialist in charge of the "
"Logistics Requisition Line")
@api.multi
def assign(self):
line_ids = self.env.context.get('active_ids')
if not line_ids:
return
lines = self.env['logistic.requisition.line'].browse(line_ids)
lines.write({'logistic_user_id': self.logistic_user_id.id})
return {'type': 'ir.actions.act_window_close'}
| agpl-3.0 | 365,275,806,104,345,700 | -4,829,417,783,181,398,000 | 35.780488 | 77 | 0.681698 | false |
omnirom/android_external_chromium-org | tools/code_coverage/croc_scan.py | 178 | 4383 | # Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Crocodile source scanners."""
import re
class Scanner(object):
"""Generic source scanner."""
def __init__(self):
"""Constructor."""
self.re_token = re.compile('#')
self.comment_to_eol = ['#']
self.comment_start = None
self.comment_end = None
def ScanLines(self, lines):
"""Scans the lines for executable statements.
Args:
lines: Iterator returning source lines.
Returns:
An array of line numbers which are executable.
"""
exe_lines = []
lineno = 0
in_string = None
in_comment = None
comment_index = None
for line in lines:
lineno += 1
in_string_at_start = in_string
for t in self.re_token.finditer(line):
tokenstr = t.groups()[0]
if in_comment:
# Inside a multi-line comment, so look for end token
if tokenstr == in_comment:
in_comment = None
# Replace comment with spaces
line = (line[:comment_index]
+ ' ' * (t.end(0) - comment_index)
+ line[t.end(0):])
elif in_string:
# Inside a string, so look for end token
if tokenstr == in_string:
in_string = None
elif tokenstr in self.comment_to_eol:
# Single-line comment, so truncate line at start of token
line = line[:t.start(0)]
break
elif tokenstr == self.comment_start:
# Multi-line comment start - end token is comment_end
in_comment = self.comment_end
comment_index = t.start(0)
else:
# Starting a string - end token is same as start
in_string = tokenstr
# If still in comment at end of line, remove comment
if in_comment:
line = line[:comment_index]
# Next line, delete from the beginnine
comment_index = 0
# If line-sans-comments is not empty, claim it may be executable
if line.strip() or in_string_at_start:
exe_lines.append(lineno)
# Return executable lines
return exe_lines
def Scan(self, filename):
"""Reads the file and scans its lines.
Args:
filename: Path to file to scan.
Returns:
An array of line numbers which are executable.
"""
# TODO: All manner of error checking
f = None
try:
f = open(filename, 'rt')
return self.ScanLines(f)
finally:
if f:
f.close()
class PythonScanner(Scanner):
"""Python source scanner."""
def __init__(self):
"""Constructor."""
Scanner.__init__(self)
# TODO: This breaks for strings ending in more than 2 backslashes. Need
# a pattern which counts only an odd number of backslashes, so the last
# one thus escapes the quote.
self.re_token = re.compile(r'(#|\'\'\'|"""|(?<!(?<!\\)\\)["\'])')
self.comment_to_eol = ['#']
self.comment_start = None
self.comment_end = None
class CppScanner(Scanner):
"""C / C++ / ObjC / ObjC++ source scanner."""
def __init__(self):
"""Constructor."""
Scanner.__init__(self)
# TODO: This breaks for strings ending in more than 2 backslashes. Need
# a pattern which counts only an odd number of backslashes, so the last
# one thus escapes the quote.
self.re_token = re.compile(r'(^\s*#|//|/\*|\*/|(?<!(?<!\\)\\)["\'])')
# TODO: Treat '\' at EOL as a token, and handle it as continuing the
# previous line. That is, if in a comment-to-eol, this line is a comment
# too.
# Note that we treat # at beginning of line as a comment, so that we ignore
# preprocessor definitions
self.comment_to_eol = ['//', '#']
self.comment_start = '/*'
self.comment_end = '*/'
def ScanFile(filename, language):
"""Scans a file for executable lines.
Args:
filename: Path to file to scan.
language: Language for file ('C', 'C++', 'python', 'ObjC', 'ObjC++')
Returns:
A list of executable lines, or an empty list if the file was not a handled
language.
"""
if language == 'python':
return PythonScanner().Scan(filename)
elif language in ['C', 'C++', 'ObjC', 'ObjC++']:
return CppScanner().Scan(filename)
# Something we don't handle
return []
| bsd-3-clause | -179,373,250,171,204,260 | -48,192,449,662,778,430 | 25.72561 | 79 | 0.591832 | false |
arborh/tensorflow | tensorflow/python/training/session_manager.py | 4 | 22050 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Training helper that checkpoints models and creates session."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
import numpy as np
from tensorflow.python.client import session
from tensorflow.python.distribute import distribution_strategy_context
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import checkpoint_management
from tensorflow.python.util.tf_export import tf_export
def _maybe_name(obj):
"""Returns object name if it has one, or a message otherwise.
This is useful for names that apper in error messages.
Args:
obj: Object to get the name of.
Returns:
name, "None", or a "no name" message.
"""
if obj is None:
return "None"
elif hasattr(obj, "name"):
return obj.name
else:
return "<no name for %s>" % type(obj)
@tf_export(v1=["train.SessionManager"])
class SessionManager(object):
"""Training helper that restores from checkpoint and creates session.
This class is a small wrapper that takes care of session creation and
checkpoint recovery. It also provides functions that to facilitate
coordination among multiple training threads or processes.
* Checkpointing trained variables as the training progresses.
* Initializing variables on startup, restoring them from the most recent
checkpoint after a crash, or wait for checkpoints to become available.
### Usage:
```python
with tf.Graph().as_default():
...add operations to the graph...
# Create a SessionManager that will checkpoint the model in '/tmp/mydir'.
sm = SessionManager()
sess = sm.prepare_session(master, init_op, saver, checkpoint_dir)
# Use the session to train the graph.
while True:
sess.run(<my_train_op>)
```
`prepare_session()` initializes or restores a model. It requires `init_op`
and `saver` as an argument.
A second process could wait for the model to be ready by doing the following:
```python
with tf.Graph().as_default():
...add operations to the graph...
# Create a SessionManager that will wait for the model to become ready.
sm = SessionManager()
sess = sm.wait_for_session(master)
# Use the session to train the graph.
while True:
sess.run(<my_train_op>)
```
`wait_for_session()` waits for a model to be initialized by other processes.
"""
def __init__(self,
local_init_op=None,
ready_op=None,
ready_for_local_init_op=None,
graph=None,
recovery_wait_secs=30,
local_init_run_options=None,
local_init_feed_dict=None):
"""Creates a SessionManager.
The `local_init_op` is an `Operation` that is run always after a new session
was created. If `None`, this step is skipped.
The `ready_op` is an `Operation` used to check if the model is ready. The
model is considered ready if that operation returns an empty 1D string
tensor. If the operation returns a non empty 1D string tensor, the elements
are concatenated and used to indicate to the user why the model is not
ready.
The `ready_for_local_init_op` is an `Operation` used to check if the model
is ready to run local_init_op. The model is considered ready if that
operation returns an empty 1D string tensor. If the operation returns a non
empty 1D string tensor, the elements are concatenated and used to indicate
to the user why the model is not ready.
If `ready_op` is `None`, the model is not checked for readiness.
`recovery_wait_secs` is the number of seconds between checks that
the model is ready. It is used by processes to wait for a model to
be initialized or restored. Defaults to 30 seconds.
Args:
local_init_op: An `Operation` run immediately after session creation.
Usually used to initialize tables and local variables.
ready_op: An `Operation` to check if the model is initialized.
ready_for_local_init_op: An `Operation` to check if the model is ready
to run local_init_op.
graph: The `Graph` that the model will use.
recovery_wait_secs: Seconds between checks for the model to be ready.
local_init_run_options: RunOptions to be passed to session.run when
executing the local_init_op.
local_init_feed_dict: Optional session feed dictionary to use when running
the local_init_op.
Raises:
ValueError: If ready_for_local_init_op is not None but local_init_op is
None
"""
# Sets default values of arguments.
if graph is None:
graph = ops.get_default_graph()
self._local_init_op = local_init_op
self._ready_op = ready_op
self._ready_for_local_init_op = ready_for_local_init_op
self._graph = graph
self._recovery_wait_secs = recovery_wait_secs
self._target = None
self._local_init_run_options = local_init_run_options
self._local_init_feed_dict = local_init_feed_dict
if ready_for_local_init_op is not None and local_init_op is None:
raise ValueError("If you pass a ready_for_local_init_op "
"you must also pass a local_init_op "
", ready_for_local_init_op [%s]" %
ready_for_local_init_op)
def _restore_checkpoint(self,
master,
saver=None,
checkpoint_dir=None,
checkpoint_filename_with_path=None,
wait_for_checkpoint=False,
max_wait_secs=7200,
config=None):
"""Creates a `Session`, and tries to restore a checkpoint.
Args:
master: `String` representation of the TensorFlow master to use.
saver: A `Saver` object used to restore a model.
checkpoint_dir: Path to the checkpoint files. The latest checkpoint in the
dir will be used to restore.
checkpoint_filename_with_path: Full file name path to the checkpoint file.
wait_for_checkpoint: Whether to wait for checkpoint to become available.
max_wait_secs: Maximum time to wait for checkpoints to become available.
config: Optional `ConfigProto` proto used to configure the session.
Returns:
A pair (sess, is_restored) where 'is_restored' is `True` if
the session could be restored, `False` otherwise.
Raises:
ValueError: If both checkpoint_dir and checkpoint_filename_with_path are
set.
"""
self._target = master
# This is required to so that we initialize the TPU device before
# restoring from checkpoint since we'll be placing variables on the device
# and TPUInitialize wipes out the memory of the device.
strategy = distribution_strategy_context.get_strategy()
if strategy and hasattr(strategy.extended,
"_experimental_initialize_system"):
strategy.extended._experimental_initialize_system() # pylint: disable=protected-access
sess = session.Session(self._target, graph=self._graph, config=config)
if checkpoint_dir and checkpoint_filename_with_path:
raise ValueError("Can not provide both checkpoint_dir and "
"checkpoint_filename_with_path.")
# If either saver or checkpoint_* is not specified, cannot restore. Just
# return.
if not saver or not (checkpoint_dir or checkpoint_filename_with_path):
return sess, False
if checkpoint_filename_with_path:
saver.restore(sess, checkpoint_filename_with_path)
return sess, True
# Waits up until max_wait_secs for checkpoint to become available.
wait_time = 0
ckpt = checkpoint_management.get_checkpoint_state(checkpoint_dir)
while not ckpt or not ckpt.model_checkpoint_path:
if wait_for_checkpoint and wait_time < max_wait_secs:
logging.info("Waiting for checkpoint to be available.")
time.sleep(self._recovery_wait_secs)
wait_time += self._recovery_wait_secs
ckpt = checkpoint_management.get_checkpoint_state(checkpoint_dir)
else:
return sess, False
# Loads the checkpoint.
saver.restore(sess, ckpt.model_checkpoint_path)
saver.recover_last_checkpoints(ckpt.all_model_checkpoint_paths)
return sess, True
def prepare_session(self,
master,
init_op=None,
saver=None,
checkpoint_dir=None,
checkpoint_filename_with_path=None,
wait_for_checkpoint=False,
max_wait_secs=7200,
config=None,
init_feed_dict=None,
init_fn=None):
"""Creates a `Session`. Makes sure the model is ready to be used.
Creates a `Session` on 'master'. If a `saver` object is passed in, and
`checkpoint_dir` points to a directory containing valid checkpoint
files, then it will try to recover the model from checkpoint. If
no checkpoint files are available, and `wait_for_checkpoint` is
`True`, then the process would check every `recovery_wait_secs`,
up to `max_wait_secs`, for recovery to succeed.
If the model cannot be recovered successfully then it is initialized by
running the `init_op` and calling `init_fn` if they are provided.
The `local_init_op` is also run after init_op and init_fn, regardless of
whether the model was recovered successfully, but only if
`ready_for_local_init_op` passes.
If the model is recovered from a checkpoint it is assumed that all
global variables have been initialized, in particular neither `init_op`
nor `init_fn` will be executed.
It is an error if the model cannot be recovered and no `init_op`
or `init_fn` or `local_init_op` are passed.
Args:
master: `String` representation of the TensorFlow master to use.
init_op: Optional `Operation` used to initialize the model.
saver: A `Saver` object used to restore a model.
checkpoint_dir: Path to the checkpoint files. The latest checkpoint in the
dir will be used to restore.
checkpoint_filename_with_path: Full file name path to the checkpoint file.
wait_for_checkpoint: Whether to wait for checkpoint to become available.
max_wait_secs: Maximum time to wait for checkpoints to become available.
config: Optional `ConfigProto` proto used to configure the session.
init_feed_dict: Optional dictionary that maps `Tensor` objects to feed
values. This feed dictionary is passed to the session `run()` call when
running the init op.
init_fn: Optional callable used to initialize the model. Called after the
optional `init_op` is called. The callable must accept one argument,
the session being initialized.
Returns:
A `Session` object that can be used to drive the model.
Raises:
RuntimeError: If the model cannot be initialized or recovered.
ValueError: If both checkpoint_dir and checkpoint_filename_with_path are
set.
"""
sess, is_loaded_from_checkpoint = self._restore_checkpoint(
master,
saver,
checkpoint_dir=checkpoint_dir,
checkpoint_filename_with_path=checkpoint_filename_with_path,
wait_for_checkpoint=wait_for_checkpoint,
max_wait_secs=max_wait_secs,
config=config)
if not is_loaded_from_checkpoint:
if init_op is None and not init_fn and self._local_init_op is None:
raise RuntimeError("Model is not initialized and no init_op or "
"init_fn or local_init_op was given")
if init_op is not None:
sess.run(init_op, feed_dict=init_feed_dict)
if init_fn:
init_fn(sess)
local_init_success, msg = self._try_run_local_init_op(sess)
if not local_init_success:
raise RuntimeError(
"Init operations did not make model ready for local_init. "
"Init op: %s, init fn: %s, error: %s" % (_maybe_name(init_op),
init_fn,
msg))
is_ready, msg = self._model_ready(sess)
if not is_ready:
raise RuntimeError(
"Init operations did not make model ready. "
"Init op: %s, init fn: %s, local_init_op: %s, error: %s" %
(_maybe_name(init_op), init_fn, self._local_init_op, msg))
return sess
def recover_session(self,
master,
saver=None,
checkpoint_dir=None,
checkpoint_filename_with_path=None,
wait_for_checkpoint=False,
max_wait_secs=7200,
config=None):
"""Creates a `Session`, recovering if possible.
Creates a new session on 'master'. If the session is not initialized
and can be recovered from a checkpoint, recover it.
Args:
master: `String` representation of the TensorFlow master to use.
saver: A `Saver` object used to restore a model.
checkpoint_dir: Path to the checkpoint files. The latest checkpoint in the
dir will be used to restore.
checkpoint_filename_with_path: Full file name path to the checkpoint file.
wait_for_checkpoint: Whether to wait for checkpoint to become available.
max_wait_secs: Maximum time to wait for checkpoints to become available.
config: Optional `ConfigProto` proto used to configure the session.
Returns:
A pair (sess, initialized) where 'initialized' is `True` if
the session could be recovered and initialized, `False` otherwise.
Raises:
ValueError: If both checkpoint_dir and checkpoint_filename_with_path are
set.
"""
sess, is_loaded_from_checkpoint = self._restore_checkpoint(
master,
saver,
checkpoint_dir=checkpoint_dir,
checkpoint_filename_with_path=checkpoint_filename_with_path,
wait_for_checkpoint=wait_for_checkpoint,
max_wait_secs=max_wait_secs,
config=config)
# Always try to run local_init_op
local_init_success, msg = self._try_run_local_init_op(sess)
if not is_loaded_from_checkpoint:
# Do not need to run checks for readiness
return sess, False
restoring_file = checkpoint_dir or checkpoint_filename_with_path
if not local_init_success:
logging.info(
"Restoring model from %s did not make model ready for local init:"
" %s", restoring_file, msg)
return sess, False
is_ready, msg = self._model_ready(sess)
if not is_ready:
logging.info("Restoring model from %s did not make model ready: %s",
restoring_file, msg)
return sess, False
logging.info("Restored model from %s", restoring_file)
return sess, is_loaded_from_checkpoint
def wait_for_session(self, master, config=None, max_wait_secs=float("Inf")):
"""Creates a new `Session` and waits for model to be ready.
Creates a new `Session` on 'master'. Waits for the model to be
initialized or recovered from a checkpoint. It's expected that
another thread or process will make the model ready, and that this
is intended to be used by threads/processes that participate in a
distributed training configuration where a different thread/process
is responsible for initializing or recovering the model being trained.
NB: The amount of time this method waits for the session is bounded
by max_wait_secs. By default, this function will wait indefinitely.
Args:
master: `String` representation of the TensorFlow master to use.
config: Optional ConfigProto proto used to configure the session.
max_wait_secs: Maximum time to wait for the session to become available.
Returns:
A `Session`. May be None if the operation exceeds the timeout
specified by config.operation_timeout_in_ms.
Raises:
tf.DeadlineExceededError: if the session is not available after
max_wait_secs.
"""
self._target = master
if max_wait_secs is None:
max_wait_secs = float("Inf")
timer = _CountDownTimer(max_wait_secs)
while True:
sess = session.Session(self._target, graph=self._graph, config=config)
not_ready_msg = None
not_ready_local_msg = None
local_init_success, not_ready_local_msg = self._try_run_local_init_op(
sess)
if local_init_success:
# Successful if local_init_op is None, or ready_for_local_init_op passes
is_ready, not_ready_msg = self._model_ready(sess)
if is_ready:
return sess
self._safe_close(sess)
# Do we have enough time left to try again?
remaining_ms_after_wait = (
timer.secs_remaining() - self._recovery_wait_secs)
if remaining_ms_after_wait < 0:
raise errors.DeadlineExceededError(
None, None,
"Session was not ready after waiting %d secs." % (max_wait_secs,))
logging.info("Waiting for model to be ready. "
"Ready_for_local_init_op: %s, ready: %s",
not_ready_local_msg, not_ready_msg)
time.sleep(self._recovery_wait_secs)
def _safe_close(self, sess):
"""Closes a session without raising an exception.
Just like sess.close() but ignores exceptions.
Args:
sess: A `Session`.
"""
# pylint: disable=broad-except
try:
sess.close()
except Exception:
# Intentionally not logging to avoid user complaints that
# they get cryptic errors. We really do not care that Close
# fails.
pass
# pylint: enable=broad-except
def _model_ready(self, sess):
"""Checks if the model is ready or not.
Args:
sess: A `Session`.
Returns:
A tuple (is_ready, msg), where is_ready is True if ready and False
otherwise, and msg is `None` if the model is ready, a `String` with the
reason why it is not ready otherwise.
"""
return _ready(self._ready_op, sess, "Model not ready")
def _model_ready_for_local_init(self, sess):
"""Checks if the model is ready to run local_init_op.
Args:
sess: A `Session`.
Returns:
A tuple (is_ready, msg), where is_ready is True if ready to run
local_init_op and False otherwise, and msg is `None` if the model is
ready to run local_init_op, a `String` with the reason why it is not ready
otherwise.
"""
return _ready(self._ready_for_local_init_op, sess,
"Model not ready for local init")
def _try_run_local_init_op(self, sess):
"""Tries to run _local_init_op, if not None, and is ready for local init.
Args:
sess: A `Session`.
Returns:
A tuple (is_successful, msg), where is_successful is True if
_local_init_op is None, or we ran _local_init_op, and False otherwise;
and msg is a `String` with the reason why the model was not ready to run
local init.
"""
if self._local_init_op is not None:
is_ready_for_local_init, msg = self._model_ready_for_local_init(sess)
if is_ready_for_local_init:
logging.info("Running local_init_op.")
sess.run(self._local_init_op, feed_dict=self._local_init_feed_dict,
options=self._local_init_run_options)
logging.info("Done running local_init_op.")
return True, None
else:
return False, msg
return True, None
def _ready(op, sess, msg):
"""Checks if the model is ready or not, as determined by op.
Args:
op: An op, either _ready_op or _ready_for_local_init_op, which defines the
readiness of the model.
sess: A `Session`.
msg: A message to log to warning if not ready
Returns:
A tuple (is_ready, msg), where is_ready is True if ready and False
otherwise, and msg is `None` if the model is ready, a `String` with the
reason why it is not ready otherwise.
"""
if op is None:
return True, None
else:
try:
ready_value = sess.run(op)
# The model is considered ready if ready_op returns an empty 1-D tensor.
# Also compare to `None` and dtype being int32 for backward
# compatibility.
if (ready_value is None or ready_value.dtype == np.int32 or
ready_value.size == 0):
return True, None
else:
# TODO(sherrym): If a custom ready_op returns other types of tensor,
# or strings other than variable names, this message could be
# confusing.
non_initialized_varnames = ", ".join(
[i.decode("utf-8") for i in ready_value])
return False, "Variables not initialized: " + non_initialized_varnames
except errors.FailedPreconditionError as e:
if "uninitialized" not in str(e):
logging.warning("%s : error [%s]", msg, str(e))
raise e
return False, str(e)
class _CountDownTimer(object):
def __init__(self, duration_secs):
self._start_time_secs = time.time()
self._duration_secs = duration_secs
def secs_remaining(self):
diff = self._duration_secs - (time.time() - self._start_time_secs)
return max(0, diff)
| apache-2.0 | 5,686,560,652,814,320,000 | 2,815,097,770,678,838,300 | 38.304813 | 93 | 0.653651 | false |
andreparames/odoo | addons/hr_timesheet_invoice/report/report_analytic.py | 299 | 5164 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields,osv
from openerp import tools
from openerp.addons.decimal_precision import decimal_precision as dp
class report_analytic_account_close(osv.osv):
_name = "report.analytic.account.close"
_description = "Analytic account to close"
_auto = False
_columns = {
'name': fields.many2one('account.analytic.account', 'Analytic account', readonly=True),
'state': fields.char('Status', readonly=True),
'partner_id': fields.many2one('res.partner', 'Partner', readonly=True),
'quantity': fields.float('Quantity', readonly=True),
'quantity_max': fields.float('Max. Quantity', readonly=True),
'balance': fields.float('Balance', readonly=True),
'date_deadline': fields.date('Deadline', readonly=True),
}
def init(self, cr):
tools.drop_view_if_exists(cr, 'report_analytic_account_close')
cr.execute("""
create or replace view report_analytic_account_close as (
select
a.id as id,
a.id as name,
a.state as state,
sum(l.unit_amount) as quantity,
sum(l.amount) as balance,
a.partner_id as partner_id,
a.quantity_max as quantity_max,
a.date as date_deadline
from
account_analytic_line l
right join
account_analytic_account a on (l.account_id=a.id)
group by
a.id,a.state, a.quantity_max,a.date,a.partner_id
having
(a.quantity_max>0 and (sum(l.unit_amount)>=a.quantity_max)) or
a.date <= current_date
)""")
class report_account_analytic_line_to_invoice(osv.osv):
_name = "report.account.analytic.line.to.invoice"
_description = "Analytic lines to invoice report"
_auto = False
_columns = {
'name': fields.char('Year', required=False, readonly=True),
'product_id':fields.many2one('product.product', 'Product', readonly=True),
'account_id':fields.many2one('account.analytic.account', 'Analytic account', readonly=True),
'product_uom_id':fields.many2one('product.uom', 'Unit of Measure', readonly=True),
'unit_amount': fields.float('Units', readonly=True),
'sale_price': fields.float('Sale price', readonly=True, digits_compute=dp.get_precision('Product Price')),
'amount': fields.float('Amount', readonly=True, digits_compute=dp.get_precision('Account')),
'month':fields.selection([('01','January'), ('02','February'), ('03','March'), ('04','April'), ('05','May'), ('06','June'),
('07','July'), ('08','August'), ('09','September'), ('10','October'), ('11','November'), ('12','December')],'Month', readonly=True),
}
_order = 'name desc, product_id asc, account_id asc'
def init(self, cr):
tools.drop_view_if_exists(cr, 'report_account_analytic_line_to_invoice')
cr.execute("""
CREATE OR REPLACE VIEW report_account_analytic_line_to_invoice AS (
SELECT
DISTINCT(to_char(l.date,'MM')) as month,
to_char(l.date, 'YYYY') as name,
MIN(l.id) AS id,
l.product_id,
l.account_id,
SUM(l.amount) AS amount,
SUM(l.unit_amount*t.list_price) AS sale_price,
SUM(l.unit_amount) AS unit_amount,
l.product_uom_id
FROM
account_analytic_line l
left join
product_product p on (l.product_id=p.id)
left join
product_template t on (p.product_tmpl_id=t.id)
WHERE
(invoice_id IS NULL) and (to_invoice IS NOT NULL)
GROUP BY
to_char(l.date, 'YYYY'), to_char(l.date,'MM'), product_id, product_uom_id, account_id
)
""")
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 7,511,067,192,795,093,000 | -8,210,517,344,149,626,000 | 46.376147 | 166 | 0.556158 | false |
xtiankisutsa/MARA_Framework | tools/androwarn/androwarn/core/core.py | 1 | 16646 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of Androwarn.
#
# Copyright (C) 2012, Thomas Debize <tdebize at mail.com>
# All rights reserved.
#
# Androwarn is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Androwarn is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Androwarn. If not, see <http://www.gnu.org/licenses/>.
# Global imports
import re, logging
# Androguard imports
from androguard.core.analysis.analysis import *
# Androwarn modules import
from androwarn.constants.api_constants import *
# Constants
ERROR_VALUE_NOT_FOUND = 'N/A'
ERROR_CONSTANT_NAME_NOT_FOUND = 'N/A'
CONST_STRING = 'const-string'
CONST = 'const'
MOVE = 'move'
MOVE_RESULT = 'move-result'
APUT = 'aput'
INVOKE = 'invoke'
INVOKE_NO_REGISTER = 'invoke-no-register'
INVOKE_2_REGISTERS = 'invoke-2-registers'
NEW_INSTANCE = 'new-instance'
# Logguer
log = logging.getLogger('log')
# Instruction matcher
def match_current_instruction(current_instruction, registers_found) :
"""
@param current_instruction : the current instruction to be analyzed
@param registers_found : a dictionary of registers recovered so far
@rtype : the instruction name from the constants above, the local register number and its value, an updated version of the registers_found
"""
p_const = re.compile('^const(?:\/4|\/16|\/high16|-wide(?:\/16|\/32)|-wide\/high16|)? v([0-9]+), \#\+?(-?[0-9]+(?:\.[0-9]+)?)$')
p_const_string = re.compile("^const-string(?:||-jumbo) v([0-9]+), '(.*)'$")
p_move = re.compile('^move(?:|\/from16|-wide(?:\/from16|\/16)|-object(?:|\/from16|\/16))? v([0-9]+), (v[0-9]+)$')
p_move_result = re.compile('^move(?:-result(?:|-wide|-object)|-exception)? v([0-9]+)$')
p_aput = re.compile('^aput(?:-wide|-object|-boolean|-byte|-char|-short|) v([0-9]+), v([0-9]+), v([0-9]+)$')
p_invoke = re.compile('^invoke-(?:static|virtual|direct|super|interface|interface-range|virtual-quick|super-quick) v([0-9]+), (L(?:.*);->.*)$')
p_invoke_2_registers = re.compile('^invoke-(?:static|virtual|direct|super|interface|interface-range|virtual-quick|super-quick) v([0-9]+), v([0-9]+), (L(?:.*);->.*)$')
p_invoke_no_register = re.compile('^invoke-(?:static|virtual|direct|super|interface|interface-range|virtual-quick|super-quick) (L(?:.*);->.*)$')
p_new_instance = re.compile('^new-instance v([0-9]+), (L(?:.*);)$')
# String concat
current_instruction = "%s %s" % (current_instruction.get_name(), current_instruction.get_output())
# Returned values init
instruction_name = ''
local_register_number = -1
local_register_value = -1
if p_const_string.match(current_instruction) :
#print p_const_string.match(current_instruction).groups()
instruction_name = CONST_STRING
register_number = p_const_string.match(current_instruction).groups()[0]
register_value = p_const_string.match(current_instruction).groups()[1]
if not(register_number in registers_found) :
registers_found[register_number] = register_value
else :
old_string = registers_found[register_number]
new_string = "%s %s" % (str(register_value), str(old_string))
registers_found[register_number] = new_string
local_register_number = register_number
local_register_value = register_value
if p_const.match(current_instruction) :
#print p_const.match(current_instruction).groups()
instruction_name = CONST
register_number = p_const.match(current_instruction).groups()[0]
register_value = p_const.match(current_instruction).groups()[1]
if not(register_number in registers_found) :
registers_found[register_number] = register_value
local_register_number = register_number
local_register_value = register_value
if p_move.match(current_instruction) :
#print p_move.match(current_instruction).groups()
instruction_name = MOVE
register_number = p_move.match(current_instruction).groups()[0]
register_value = p_move.match(current_instruction).groups()[1]
if not(register_number in registers_found) :
registers_found[register_number] = register_value
local_register_number = register_number
local_register_value = register_value
if p_move_result.match(current_instruction) :
#print p_move_result.match(current_instruction).groups()
instruction_name = MOVE_RESULT
register_number = p_move_result.match(current_instruction).groups()[0]
register_value = ''
if not(register_number in registers_found) :
registers_found[register_number] = register_value
local_register_number = register_number
local_register_value = register_value
#print "number returned %s" % local_register_number
#print "value returned %s" % local_register_value
if p_invoke.match(current_instruction) :
#print p_invoke.match(current_instruction).groups()
instruction_name = INVOKE
register_number = p_invoke.match(current_instruction).groups()[0]
register_value = p_invoke.match(current_instruction).groups()[1]
if not(register_number in registers_found) :
registers_found[register_number] = register_value
local_register_number = register_number
local_register_value = register_value
if p_invoke_no_register.match(current_instruction) :
#print p_invoke.match(current_instruction).groups()
instruction_name = INVOKE_NO_REGISTER
register_number = ''
register_value = p_invoke_no_register.match(current_instruction).groups()[0]
local_register_number = register_number
local_register_value = register_value
if p_invoke_2_registers.match(current_instruction) :
#print p_invoke.match(current_instruction).groups()
instruction_name = INVOKE_NO_REGISTER
register_number = p_invoke_2_registers.match(current_instruction).groups()[0]
register_value = p_invoke_2_registers.match(current_instruction).groups()[1]
local_register_number = register_number
local_register_value = register_value
if p_new_instance.match(current_instruction) :
#print p_new_instance.match(current_instruction).groups()
instruction_name = NEW_INSTANCE
register_number = p_new_instance.match(current_instruction).groups()[0]
register_value = p_new_instance.match(current_instruction).groups()[1]
if not(register_number in registers_found) :
registers_found[register_number] = register_value
local_register_number = register_number
local_register_value = register_value
if p_aput.match(current_instruction) :
#print p_aput.match(current_instruction).groups()
instruction_name = APUT
register_object_reference = p_aput.match(current_instruction).groups()[0]
register_array_reference = p_aput.match(current_instruction).groups()[1]
register_element_index = p_aput.match(current_instruction).groups()[2]
local_register_number = register_object_reference
local_register_value = register_array_reference
return instruction_name, local_register_number, local_register_value, registers_found
# Backtrace registers #
def find_call_index_in_code_list(index_to_find, instruction_list):
"""
@param index_to_find : index of the method call
@param code_list : instruction list of the parent method called
@rtype : the index of the method call in the instruction listing
"""
idx = 0
for i in instruction_list :
if index_to_find <= idx :
#print "[+] code offset found at the index " + str(instruction_list.index(i))
return instruction_list.index(i)
else :
idx += i.get_length()
# in case of failure, return an inconsistent value
return ERROR_INDEX_NOT_FOUND
def backtrace_registers_before_call(x, method, index_to_find) :
"""
@param x : a VMAnalysis instance
@param method : a regexp for the method (the package)
@param index_to_find : index of the matching method
@rtype : an ordered list of dictionaries of each register content [{ 'register #': 'value' }, { 'register #': 'value' } ...]
"""
registers = {}
code = method.get_code()
#code.show()
bc = code.get_bc()
instruction_list = [ i for i in bc.get_instructions() ]
found_index = find_call_index_in_code_list(index_to_find, instruction_list)
if (found_index < 0) :
log.error("The call index in the code list can not be found")
return 0
else :
# Initialize the returned list of dictionaries
registers_final = []
# Initialize the harvesting dictionary
registers_found = {}
# List the register indexes related to the method call
relevant_registers = relevant_registers_for_the_method(instruction_list[found_index])
#print relevant_registers
i = int(found_index) - 1 # start index
while ((all_relevant_registers_filled(registers_found,relevant_registers) != True) and (i >= 0)) :
#current_instruction = instruction_list[i].show_buff(0)
#print current_instruction
current_instruction = instruction_list[i]
instruction_name, local_register_number, local_register_value, registers_found = match_current_instruction(current_instruction, registers_found)
if cmp(instruction_name, APUT) == 0:
try :
list_index_to_be_changed = relevant_registers.index(str(local_register_value))
#print "index_to_be_changed %s" % list_index_to_be_changed
del(relevant_registers[int(local_register_value)])
relevant_registers.insert(list_index_to_be_changed, local_register_number)
log.debug("New relevant_registers %s" % relevant_registers)
except :
log.debug("'%s' does not exist anymore in the relevant_registers list" % local_register_value)
if (cmp(instruction_name, MOVE_RESULT) == 0) and (local_register_number in relevant_registers):
try:
#past_instruction = instruction_list[i-1].show_buff(0)
#print past_instruction
past_instruction = instruction_list[i-1]
p_instruction_name, p_local_register_number, p_local_register_value, registers_found = match_current_instruction(past_instruction, registers_found)
if cmp(p_instruction_name, INVOKE_NO_REGISTER) == 0 :
registers_found[local_register_number] = p_local_register_value
else:
list_index_to_be_changed = relevant_registers.index(str(local_register_number))
del(relevant_registers[int(list_index_to_be_changed)])
relevant_registers.insert(list_index_to_be_changed, p_local_register_number)
log.debug("New relevant_registers %s" % relevant_registers)
except:
log.debug("'%s' does not exist anymore in the relevant_registers list" % local_register_value)
i = i - 1
#log.info('Registers found during the analysis %s' % registers_found)
final_answer = all_relevant_registers_filled(registers_found,relevant_registers)
log.debug("Are all relevant registers filled ? %s" % str(final_answer))
for i in relevant_registers :
try:
register_number = i
#print register_number
register_value = registers_found[i]
#print register_value
temp_dict = { register_number : register_value }
registers_final.append(temp_dict)
except KeyError:
registers_final = []
log.debug("KeyError exception : The value of the register # %s could not be found for the relevant registers %s" % (register_number, relevant_registers))
break
return registers_final
def extract_register_index_out_splitted_values(registers_raw_list_splitted) :
"""
@param : registers_raw_list_splitted : a list of registers still containing the 'v' prefix [' v1 ', ' v2 ' ...]
@rtype : an ordered list of register indexes ['1', '2' ...]
"""
relevant_registers = []
# Trim the values
registers_raw_list_splitted[:] = (value.strip() for value in registers_raw_list_splitted if len(value) > 0)
for value in registers_raw_list_splitted :
# Remove that 'v'
p_register_index_out_of_split = re.compile('^v([0-9]+)$')
if p_register_index_out_of_split.match(value) :
#print p_register_index_out_of_split.match(value).groups()
register_index = p_register_index_out_of_split.match(value).groups()[0]
relevant_registers.append(register_index)
else :
relevant_registers.append('N/A')
return relevant_registers
def relevant_registers_for_the_method(instruction) :
"""
@param method : a method instance
@param index_to_find : index of the matching method
@rtype : an ordered list of register indexes related to that method call
"""
relevant_registers = []
current_instruction_name = instruction.get_name()
current_instruction = instruction.show_buff(0)
p_invoke_name = re.compile('^invoke-(?:static|virtual|direct|super|interface|interface-range|virtual-quick|super-quick)$')
p_invoke_range_name = re.compile('^invoke-(?:static|virtual|direct|super|interface|interface-range|virtual-quick|super-quick)(?:\/range)$')
if p_invoke_name.match(current_instruction_name) :
p_invoke_registers = re.compile('(v[0-9]+),')
if p_invoke_registers.findall(current_instruction) :
registers_raw_list_splitted = p_invoke_registers.findall(current_instruction)
relevant_registers = extract_register_index_out_splitted_values(registers_raw_list_splitted)
if p_invoke_range_name.match(current_instruction_name) :
# We're facing implicit an implicit range declaration, for instance "invoke v19..v20"
p_invoke_registers_range = re.compile('^v([0-9]+) ... v([0-9]+), L.*$')
if p_invoke_registers_range.match(current_instruction) :
register_start_number = p_invoke_registers_range.match(current_instruction).groups()[0]
register_end_number = p_invoke_registers_range.match(current_instruction).groups()[1]
if int(register_start_number) > int(register_end_number) :
log.error("invoke-kind/range incoherent: # of the start register is lower than the end one")
else :
relevant_registers = [ str(i) for i in xrange(int(register_start_number), int(register_end_number))]
# +1 because range does not provide the higher boundary value
return relevant_registers
def all_relevant_registers_filled(registers, relevant_registers) :
"""
@param registers : a dictionary of each register content { 'register #': 'value' }
@param relevant_registers : an ordered list of register indexes related to that method call
@rtype : True if all the relevant_registers are filled, False if not
"""
answer = True
for i in relevant_registers :
# assert a False answer for null registers from the "move-result" instruction
if not(i in registers) or (i in registers and len(registers[i]) < 1) :
answer = False
return answer
def get_register_value(index, registers) :
"""
@param index : integer value of the index
@param registers : an ordered list of register indexes related to that method call
@rtype : a value casted in string
"""
# Index - 1, list starts at index 0
if index <= len(registers) :
dict = registers[index]
return dict.values()[0]
else :
return ERROR_VALUE_NOT_FOUND
def get_constants_name_from_value(constant_dict, value) :
"""
@param constant_dict : constant dictionary to consider
@param value : value's constant name to retrieve
@rtype : a string
"""
try:
return constant_dict[value]
except KeyError:
log.error("The constant name corresponding to the value '%s' can not be found in the dictionary '%s'" % (value, constant_dict))
return ERROR_CONSTANT_NAME_NOT_FOUND
def data_flow_analysis(tab, result, x) :
"""
@param tab : structural analysis results tab
@param result : current iteration
@param x : a VMAnalysis instance
@rtype : an ordered list of dictionaries of each register content [{ 'register #': 'value' }, { 'register #': 'value' } ...]
"""
method = tab[result].get_method()
method_call_index_to_find = tab[result].get_idx()
registers = backtrace_registers_before_call(x, method, method_call_index_to_find)
#log.info("Class '%s' - Method '%s' - register state before call %s" % (tab[result].get_class_name(),tab[result].get_name(), registers))
class_str = "Class '%s'" % tab[result].get_class_name()
method_str = "Method '%s'" % tab[result].get_name()
regs_str = "Register state before call %s" % registers
formatted_str = "{0:50}- {1:35}- {2:30}".format(class_str,method_str, regs_str)
log.info(formatted_str)
return registers
#########################
| lgpl-3.0 | 2,029,867,343,228,669,700 | 272,022,381,116,945,820 | 34.568376 | 168 | 0.70191 | false |
Mhynlo/SickRage | lib/github/Label.py | 72 | 4251 | # -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2012 Zearin <zearin@gonk.net> #
# Copyright 2013 AKFish <akfish@gmail.com> #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2013 martinqt <m.ki2@laposte.net> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import urllib
import github.GithubObject
class Label(github.GithubObject.CompletableGithubObject):
"""
This class represents Labels. The reference can be found here http://developer.github.com/v3/issues/labels/
"""
@property
def color(self):
"""
:type: string
"""
self._completeIfNotSet(self._color)
return self._color.value
@property
def name(self):
"""
:type: string
"""
self._completeIfNotSet(self._name)
return self._name.value
@property
def url(self):
"""
:type: string
"""
self._completeIfNotSet(self._url)
return self._url.value
def delete(self):
"""
:calls: `DELETE /repos/:owner/:repo/labels/:name <http://developer.github.com/v3/issues/labels>`_
:rtype: None
"""
headers, data = self._requester.requestJsonAndCheck(
"DELETE",
self.url
)
def edit(self, name, color):
"""
:calls: `PATCH /repos/:owner/:repo/labels/:name <http://developer.github.com/v3/issues/labels>`_
:param name: string
:param color: string
:rtype: None
"""
assert isinstance(name, (str, unicode)), name
assert isinstance(color, (str, unicode)), color
post_parameters = {
"name": name,
"color": color,
}
headers, data = self._requester.requestJsonAndCheck(
"PATCH",
self.url,
input=post_parameters
)
self._useAttributes(data)
@property
def _identity(self):
return urllib.quote(self.name)
def _initAttributes(self):
self._color = github.GithubObject.NotSet
self._name = github.GithubObject.NotSet
self._url = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "color" in attributes: # pragma no branch
self._color = self._makeStringAttribute(attributes["color"])
if "name" in attributes: # pragma no branch
self._name = self._makeStringAttribute(attributes["name"])
if "url" in attributes: # pragma no branch
self._url = self._makeStringAttribute(attributes["url"])
| gpl-3.0 | 1,634,408,082,040,062,500 | -4,479,837,245,360,565,000 | 38.728972 | 111 | 0.490238 | false |
florentchandelier/zipline | tests/data/bundles/test_csvdir.py | 1 | 5092 | from __future__ import division
import numpy as np
import pandas as pd
from zipline.utils.calendars import get_calendar
from zipline.data.bundles import ingest, load, bundles
from zipline.testing import test_resource_path
from zipline.testing.fixtures import ZiplineTestCase
from zipline.testing.predicates import assert_equal
from zipline.utils.functional import apply
class CSVDIRBundleTestCase(ZiplineTestCase):
symbols = 'AAPL', 'IBM', 'KO', 'MSFT'
asset_start = pd.Timestamp('2012-01-03', tz='utc')
asset_end = pd.Timestamp('2014-12-31', tz='utc')
bundle = bundles['csvdir']
calendar = get_calendar(bundle.calendar_name)
start_date = calendar.first_session
end_date = calendar.last_session
api_key = 'ayylmao'
columns = 'open', 'high', 'low', 'close', 'volume'
def _expected_data(self, asset_finder):
sids = {
symbol: asset_finder.lookup_symbol(
symbol,
self.asset_start,
).sid
for symbol in self.symbols
}
def per_symbol(symbol):
df = pd.read_csv(
test_resource_path('csvdir_samples', 'csvdir',
'daily', symbol + '.csv.gz'),
parse_dates=['date'],
index_col='date',
usecols=[
'open',
'high',
'low',
'close',
'volume',
'date',
'dividend',
'split',
],
na_values=['NA'],
)
df['sid'] = sids[symbol]
return df
all_ = pd.concat(map(per_symbol, self.symbols)).set_index(
'sid',
append=True,
).unstack()
# fancy list comprehension with statements
@list
@apply
def pricing():
for column in self.columns:
vs = all_[column].values
if column == 'volume':
vs = np.nan_to_num(vs)
yield vs
adjustments = [[5572, 5576, 5595, 5634, 5639, 5659, 5698, 5699,
5701, 5702, 5722, 5760, 5764, 5774, 5821, 5822,
5829, 5845, 5884, 5885, 5888, 5908, 5947, 5948,
5951, 5972, 6011, 6020, 6026, 6073, 6080, 6096,
6135, 6136, 6139, 6157, 6160, 6198, 6199, 6207,
6223, 6263, 6271, 6277],
[5572, 5576, 5595, 5634, 5639, 5659, 5698, 5699,
5701, 5702, 5722, 5760, 5764, 5774, 5821, 5822,
5829, 5845, 5884, 5885, 5888, 5908, 5947, 5948,
5951, 5972, 6011, 6020, 6026, 6073, 6080, 6096,
6135, 6136, 6139, 6157, 6160, 6198, 6199, 6207,
6223, 6263, 6271, 6277],
[5572, 5576, 5595, 5634, 5639, 5659, 5698, 5699,
5701, 5702, 5722, 5760, 5764, 5774, 5821, 5822,
5829, 5845, 5884, 5885, 5888, 5908, 5947, 5948,
5951, 5972, 6011, 6020, 6026, 6073, 6080, 6096,
6135, 6136, 6139, 6157, 6160, 6198, 6199, 6207,
6223, 6263, 6271, 6277],
[5572, 5576, 5595, 5634, 5639, 5659, 5698, 5699,
5701, 5702, 5722, 5760, 5764, 5774, 5821, 5822,
5829, 5845, 5884, 5885, 5888, 5908, 5947, 5948,
5951, 5972, 6011, 6020, 6026, 6073, 6080, 6096,
6135, 6136, 6139, 6157, 6160, 6198, 6199, 6207,
6223, 6263, 6271, 6277],
[5701, 6157]]
return pricing, adjustments
def test_bundle(self):
environ = {
'CSVDIR': test_resource_path('csvdir_samples', 'csvdir')
}
ingest('csvdir', environ=environ)
bundle = load('csvdir', environ=environ)
sids = 0, 1, 2, 3
assert_equal(set(bundle.asset_finder.sids), set(sids))
for equity in bundle.asset_finder.retrieve_all(sids):
assert_equal(equity.start_date, self.asset_start, msg=equity)
assert_equal(equity.end_date, self.asset_end, msg=equity)
sessions = self.calendar.all_sessions
actual = bundle.equity_daily_bar_reader.load_raw_arrays(
self.columns,
sessions[sessions.get_loc(self.asset_start, 'bfill')],
sessions[sessions.get_loc(self.asset_end, 'ffill')],
sids,
)
expected_pricing, expected_adjustments = self._expected_data(
bundle.asset_finder,
)
assert_equal(actual, expected_pricing, array_decimal=2)
adjustments_for_cols = bundle.adjustment_reader.load_adjustments(
self.columns,
sessions,
pd.Index(sids),
)
assert_equal([sorted(adj.keys()) for adj in adjustments_for_cols],
expected_adjustments)
| apache-2.0 | -3,258,116,218,685,862,000 | -2,736,115,250,906,752,000 | 37.870229 | 74 | 0.503928 | false |
rafaelolg/django-guardian | guardian/migrations/0004_auto__del_field_groupobjectpermission_object_id__del_unique_groupobjec.py | 31 | 8186 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'GroupObjectPermission.object_pk'
db.alter_column('guardian_groupobjectpermission', 'object_pk', self.gf('django.db.models.fields.CharField')(max_length=255))
# Changing field 'UserObjectPermission.object_pk'
db.alter_column('guardian_userobjectpermission', 'object_pk', self.gf('django.db.models.fields.CharField')(max_length=255))
# Removing unique constraint on 'UserObjectPermission', fields ['object_id', 'user', 'content_type', 'permission']
db.delete_unique('guardian_userobjectpermission', ['object_id', 'user_id', 'content_type_id', 'permission_id'])
# Removing unique constraint on 'GroupObjectPermission', fields ['group', 'object_id', 'content_type', 'permission']
db.delete_unique('guardian_groupobjectpermission', ['group_id', 'object_id', 'content_type_id', 'permission_id'])
# Deleting field 'GroupObjectPermission.object_id'
db.delete_column('guardian_groupobjectpermission', 'object_id')
# Adding unique constraint on 'GroupObjectPermission', fields ['object_pk', 'group', 'content_type', 'permission']
db.create_unique('guardian_groupobjectpermission', ['object_pk', 'group_id', 'content_type_id', 'permission_id'])
# Deleting field 'UserObjectPermission.object_id'
db.delete_column('guardian_userobjectpermission', 'object_id')
# Adding unique constraint on 'UserObjectPermission', fields ['object_pk', 'user', 'content_type', 'permission']
db.create_unique('guardian_userobjectpermission', ['object_pk', 'user_id', 'content_type_id', 'permission_id'])
def backwards(self, orm):
# Changing field 'GroupObjectPermission.object_pk'
db.alter_column('guardian_groupobjectpermission', 'object_pk', self.gf('django.db.models.fields.TextField')())
# Changing field 'UserObjectPermission.object_pk'
db.alter_column('guardian_userobjectpermission', 'object_pk', self.gf('django.db.models.fields.TextField')())
# Removing unique constraint on 'UserObjectPermission', fields ['object_pk', 'user', 'content_type', 'permission']
db.delete_unique('guardian_userobjectpermission', ['object_pk', 'user_id', 'content_type_id', 'permission_id'])
# Removing unique constraint on 'GroupObjectPermission', fields ['object_pk', 'group', 'content_type', 'permission']
db.delete_unique('guardian_groupobjectpermission', ['object_pk', 'group_id', 'content_type_id', 'permission_id'])
# We cannot add back in field 'GroupObjectPermission.object_id'
raise RuntimeError(
"Cannot reverse this migration. 'GroupObjectPermission.object_id' and its values cannot be restored.")
# Adding unique constraint on 'GroupObjectPermission', fields ['group', 'object_id', 'content_type', 'permission']
db.create_unique('guardian_groupobjectpermission', ['group_id', 'object_id', 'content_type_id', 'permission_id'])
# We cannot add back in field 'UserObjectPermission.object_id'
raise RuntimeError(
"Cannot reverse this migration. 'UserObjectPermission.object_id' and its values cannot be restored.")
# Adding unique constraint on 'UserObjectPermission', fields ['object_id', 'user', 'content_type', 'permission']
db.create_unique('guardian_userobjectpermission', ['object_id', 'user_id', 'content_type_id', 'permission_id'])
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'guardian.groupobjectpermission': {
'Meta': {'unique_together': "(['group', 'permission', 'content_type', 'object_pk'],)", 'object_name': 'GroupObjectPermission'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_pk': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'permission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Permission']"})
},
'guardian.userobjectpermission': {
'Meta': {'unique_together': "(['user', 'permission', 'content_type', 'object_pk'],)", 'object_name': 'UserObjectPermission'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_pk': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'permission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Permission']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['guardian']
| bsd-2-clause | 1,887,078,911,292,637,200 | -7,678,844,809,811,644,000 | 67.216667 | 182 | 0.614464 | false |
lunafeng/django | django/db/backends/postgresql/operations.py | 207 | 10109 | from __future__ import unicode_literals
from psycopg2.extras import Inet
from django.conf import settings
from django.db.backends.base.operations import BaseDatabaseOperations
class DatabaseOperations(BaseDatabaseOperations):
def unification_cast_sql(self, output_field):
internal_type = output_field.get_internal_type()
if internal_type in ("GenericIPAddressField", "IPAddressField", "TimeField", "UUIDField"):
# PostgreSQL will resolve a union as type 'text' if input types are
# 'unknown'.
# http://www.postgresql.org/docs/9.4/static/typeconv-union-case.html
# These fields cannot be implicitly cast back in the default
# PostgreSQL configuration so we need to explicitly cast them.
# We must also remove components of the type within brackets:
# varchar(255) -> varchar.
return 'CAST(%%s AS %s)' % output_field.db_type(self.connection).split('(')[0]
return '%s'
def date_extract_sql(self, lookup_type, field_name):
# http://www.postgresql.org/docs/current/static/functions-datetime.html#FUNCTIONS-DATETIME-EXTRACT
if lookup_type == 'week_day':
# For consistency across backends, we return Sunday=1, Saturday=7.
return "EXTRACT('dow' FROM %s) + 1" % field_name
else:
return "EXTRACT('%s' FROM %s)" % (lookup_type, field_name)
def date_trunc_sql(self, lookup_type, field_name):
# http://www.postgresql.org/docs/current/static/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC
return "DATE_TRUNC('%s', %s)" % (lookup_type, field_name)
def _convert_field_to_tz(self, field_name, tzname):
if settings.USE_TZ:
field_name = "%s AT TIME ZONE %%s" % field_name
params = [tzname]
else:
params = []
return field_name, params
def datetime_cast_date_sql(self, field_name, tzname):
field_name, params = self._convert_field_to_tz(field_name, tzname)
sql = '(%s)::date' % field_name
return sql, params
def datetime_extract_sql(self, lookup_type, field_name, tzname):
field_name, params = self._convert_field_to_tz(field_name, tzname)
sql = self.date_extract_sql(lookup_type, field_name)
return sql, params
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
field_name, params = self._convert_field_to_tz(field_name, tzname)
# http://www.postgresql.org/docs/current/static/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC
sql = "DATE_TRUNC('%s', %s)" % (lookup_type, field_name)
return sql, params
def deferrable_sql(self):
return " DEFERRABLE INITIALLY DEFERRED"
def lookup_cast(self, lookup_type, internal_type=None):
lookup = '%s'
# Cast text lookups to text to allow things like filter(x__contains=4)
if lookup_type in ('iexact', 'contains', 'icontains', 'startswith',
'istartswith', 'endswith', 'iendswith', 'regex', 'iregex'):
if internal_type in ('IPAddressField', 'GenericIPAddressField'):
lookup = "HOST(%s)"
else:
lookup = "%s::text"
# Use UPPER(x) for case-insensitive lookups; it's faster.
if lookup_type in ('iexact', 'icontains', 'istartswith', 'iendswith'):
lookup = 'UPPER(%s)' % lookup
return lookup
def last_insert_id(self, cursor, table_name, pk_name):
# Use pg_get_serial_sequence to get the underlying sequence name
# from the table name and column name (available since PostgreSQL 8)
cursor.execute("SELECT CURRVAL(pg_get_serial_sequence('%s','%s'))" % (
self.quote_name(table_name), pk_name))
return cursor.fetchone()[0]
def no_limit_value(self):
return None
def prepare_sql_script(self, sql):
return [sql]
def quote_name(self, name):
if name.startswith('"') and name.endswith('"'):
return name # Quoting once is enough.
return '"%s"' % name
def set_time_zone_sql(self):
return "SET TIME ZONE %s"
def sql_flush(self, style, tables, sequences, allow_cascade=False):
if tables:
# Perform a single SQL 'TRUNCATE x, y, z...;' statement. It allows
# us to truncate tables referenced by a foreign key in any other
# table.
tables_sql = ', '.join(
style.SQL_FIELD(self.quote_name(table)) for table in tables)
if allow_cascade:
sql = ['%s %s %s;' % (
style.SQL_KEYWORD('TRUNCATE'),
tables_sql,
style.SQL_KEYWORD('CASCADE'),
)]
else:
sql = ['%s %s;' % (
style.SQL_KEYWORD('TRUNCATE'),
tables_sql,
)]
sql.extend(self.sequence_reset_by_name_sql(style, sequences))
return sql
else:
return []
def sequence_reset_by_name_sql(self, style, sequences):
# 'ALTER SEQUENCE sequence_name RESTART WITH 1;'... style SQL statements
# to reset sequence indices
sql = []
for sequence_info in sequences:
table_name = sequence_info['table']
column_name = sequence_info['column']
if not (column_name and len(column_name) > 0):
# This will be the case if it's an m2m using an autogenerated
# intermediate table (see BaseDatabaseIntrospection.sequence_list)
column_name = 'id'
sql.append("%s setval(pg_get_serial_sequence('%s','%s'), 1, false);" %
(style.SQL_KEYWORD('SELECT'),
style.SQL_TABLE(self.quote_name(table_name)),
style.SQL_FIELD(column_name))
)
return sql
def tablespace_sql(self, tablespace, inline=False):
if inline:
return "USING INDEX TABLESPACE %s" % self.quote_name(tablespace)
else:
return "TABLESPACE %s" % self.quote_name(tablespace)
def sequence_reset_sql(self, style, model_list):
from django.db import models
output = []
qn = self.quote_name
for model in model_list:
# Use `coalesce` to set the sequence for each model to the max pk value if there are records,
# or 1 if there are none. Set the `is_called` property (the third argument to `setval`) to true
# if there are records (as the max pk value is already in use), otherwise set it to false.
# Use pg_get_serial_sequence to get the underlying sequence name from the table name
# and column name (available since PostgreSQL 8)
for f in model._meta.local_fields:
if isinstance(f, models.AutoField):
output.append(
"%s setval(pg_get_serial_sequence('%s','%s'), "
"coalesce(max(%s), 1), max(%s) %s null) %s %s;" % (
style.SQL_KEYWORD('SELECT'),
style.SQL_TABLE(qn(model._meta.db_table)),
style.SQL_FIELD(f.column),
style.SQL_FIELD(qn(f.column)),
style.SQL_FIELD(qn(f.column)),
style.SQL_KEYWORD('IS NOT'),
style.SQL_KEYWORD('FROM'),
style.SQL_TABLE(qn(model._meta.db_table)),
)
)
break # Only one AutoField is allowed per model, so don't bother continuing.
for f in model._meta.many_to_many:
if not f.remote_field.through:
output.append(
"%s setval(pg_get_serial_sequence('%s','%s'), "
"coalesce(max(%s), 1), max(%s) %s null) %s %s;" % (
style.SQL_KEYWORD('SELECT'),
style.SQL_TABLE(qn(f.m2m_db_table())),
style.SQL_FIELD('id'),
style.SQL_FIELD(qn('id')),
style.SQL_FIELD(qn('id')),
style.SQL_KEYWORD('IS NOT'),
style.SQL_KEYWORD('FROM'),
style.SQL_TABLE(qn(f.m2m_db_table()))
)
)
return output
def prep_for_iexact_query(self, x):
return x
def max_name_length(self):
"""
Returns the maximum length of an identifier.
Note that the maximum length of an identifier is 63 by default, but can
be changed by recompiling PostgreSQL after editing the NAMEDATALEN
macro in src/include/pg_config_manual.h .
This implementation simply returns 63, but can easily be overridden by a
custom database backend that inherits most of its behavior from this one.
"""
return 63
def distinct_sql(self, fields):
if fields:
return 'DISTINCT ON (%s)' % ', '.join(fields)
else:
return 'DISTINCT'
def last_executed_query(self, cursor, sql, params):
# http://initd.org/psycopg/docs/cursor.html#cursor.query
# The query attribute is a Psycopg extension to the DB API 2.0.
if cursor.query is not None:
return cursor.query.decode('utf-8')
return None
def return_insert_id(self):
return "RETURNING %s", ()
def bulk_insert_sql(self, fields, num_values):
items_sql = "(%s)" % ", ".join(["%s"] * len(fields))
return "VALUES " + ", ".join([items_sql] * num_values)
def adapt_datefield_value(self, value):
return value
def adapt_datetimefield_value(self, value):
return value
def adapt_timefield_value(self, value):
return value
def adapt_ipaddressfield_value(self, value):
if value:
return Inet(value)
return None
| bsd-3-clause | -265,947,179,402,991,970 | 1,649,690,846,013,895,200 | 41.120833 | 107 | 0.559205 | false |
fxia22/ASM_xf | PythonD/site_python/twisted/protocols/oscar.py | 2 | 43247 | # Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""An implementation of the OSCAR protocol, which AIM and ICQ use to communcate.
This module is unstable.
Maintainer: U{Paul Swartz<mailto:z3p@twistedmatrix.com>}
"""
from __future__ import nested_scopes
from twisted.internet import reactor, main, defer, protocol
from twisted.python import log
import struct
import md5
import string
import socket
import random
import time
import types
import re
def logPacketData(data):
lines = len(data)/16
if lines*16 != len(data): lines=lines+1
for i in range(lines):
d = tuple(data[16*i:16*i+16])
hex = map(lambda x: "%02X"%ord(x),d)
text = map(lambda x: (len(repr(x))>3 and '.') or x, d)
log.msg(' '.join(hex)+ ' '*3*(16-len(d)) +''.join(text))
log.msg('')
def SNAC(fam,sub,id,data,flags=[0,0]):
header="!HHBBL"
head=struct.pack(header,fam,sub,
flags[0],flags[1],
id)
return head+str(data)
def readSNAC(data):
header="!HHBBL"
head=list(struct.unpack(header,data[:10]))
return head+[data[10:]]
def TLV(type,value):
header="!HH"
head=struct.pack(header,type,len(value))
return head+str(value)
def readTLVs(data,count=None):
header="!HH"
dict={}
while data and len(dict)!=count:
head=struct.unpack(header,data[:4])
dict[head[0]]=data[4:4+head[1]]
data=data[4+head[1]:]
if not count:
return dict
return dict,data
def encryptPasswordMD5(password,key):
m=md5.new()
m.update(key)
m.update(md5.new(password).digest())
m.update("AOL Instant Messenger (SM)")
return m.digest()
def encryptPasswordICQ(password):
key=[0xF3,0x26,0x81,0xC4,0x39,0x86,0xDB,0x92,0x71,0xA3,0xB9,0xE6,0x53,0x7A,0x95,0x7C]
bytes=map(ord,password)
r=""
for i in range(len(bytes)):
r=r+chr(bytes[i]^key[i%len(key)])
return r
def dehtml(text):
text=string.replace(text,"<br>","\n")
text=string.replace(text,"<BR>","\n")
text=string.replace(text,"<Br>","\n") # XXX make this a regexp
text=string.replace(text,"<bR>","\n")
text=re.sub('<.*?>','',text)
text=string.replace(text,'>','>')
text=string.replace(text,'<','<')
text=string.replace(text,'&','&')
text=string.replace(text,' ',' ')
text=string.replace(text,'"','"')
return text
def html(text):
text=string.replace(text,'"','"')
text=string.replace(text,'&','&')
text=string.replace(text,'<','<')
text=string.replace(text,'>','>')
text=string.replace(text,"\n","<br>")
return '<html><body bgcolor="white"><font color="black">%s</font></body></html>'%text
class OSCARUser:
def __init__(self, name, warn, tlvs):
self.name = name
self.warning = warn
self.flags = []
self.caps = []
for k,v in tlvs.items():
if k == 1: # user flags
v=struct.unpack('!H',v)[0]
for o, f in [(1,'trial'),
(2,'unknown bit 2'),
(4,'aol'),
(8,'unknown bit 4'),
(16,'aim'),
(32,'away'),
(1024,'activebuddy')]:
if v&o: self.flags.append(f)
elif k == 2: # member since date
self.memberSince = struct.unpack('!L',v)[0]
elif k == 3: # on-since
self.onSince = struct.unpack('!L',v)[0]
elif k == 4: # idle time
self.idleTime = struct.unpack('!H',v)[0]
elif k == 5: # unknown
pass
elif k == 6: # icq online status
if v[2] == '\x00':
self.icqStatus = 'online'
elif v[2] == '\x01':
self.icqStatus = 'away'
else:
self.icqStatus = 'unknown'
elif k == 10: # icq ip address
self.icqIPaddy = socket.inet_ntoa(v)
elif k == 12: # icq random stuff
self.icqRandom = v
elif k == 13: # capabilities
caps=[]
while v:
c=v[:16]
if c==CAP_ICON: caps.append("icon")
elif c==CAP_IMAGE: caps.append("image")
elif c==CAP_VOICE: caps.append("voice")
elif c==CAP_CHAT: caps.append("chat")
elif c==CAP_GET_FILE: caps.append("getfile")
elif c==CAP_SEND_FILE: caps.append("sendfile")
elif c==CAP_SEND_LIST: caps.append("sendlist")
elif c==CAP_GAMES: caps.append("games")
else: caps.append(("unknown",c))
v=v[16:]
caps.sort()
self.caps=caps
elif k == 14: pass
elif k == 15: # session length (aim)
self.sessionLength = struct.unpack('!L',v)[0]
elif k == 16: # session length (aol)
self.sessionLength = struct.unpack('!L',v)[0]
elif k == 30: # no idea
pass
else:
log.msg("unknown tlv for user %s\nt: %s\nv: %s"%(self.name,k,repr(v)))
def __str__(self):
s = '<OSCARUser %s' % self.name
o = []
if self.warning!=0: o.append('warning level %s'%self.warning)
if hasattr(self, 'flags'): o.append('flags %s'%self.flags)
if hasattr(self, 'sessionLength'): o.append('online for %i minutes' % (self.sessionLength/60,))
if hasattr(self, 'idleTime'): o.append('idle for %i minutes' % self.idleTime)
if self.caps: o.append('caps %s'%self.caps)
if o:
s=s+', '+', '.join(o)
s=s+'>'
return s
class SSIGroup:
def __init__(self, name, tlvs = {}):
self.name = name
#self.tlvs = []
#self.userIDs = []
self.usersToID = {}
self.users = []
#if not tlvs.has_key(0xC8): return
#buddyIDs = tlvs[0xC8]
#while buddyIDs:
# bid = struct.unpack('!H',buddyIDs[:2])[0]
# buddyIDs = buddyIDs[2:]
# self.users.append(bid)
def findIDFor(self, user):
return self.usersToID[user]
def addUser(self, buddyID, user):
self.usersToID[user] = buddyID
self.users.append(user)
user.group = self
def oscarRep(self, groupID, buddyID):
tlvData = TLV(0xc8, reduce(lambda x,y:x+y, [struct.pack('!H',self.usersToID[x]) for x in self.users]))
return struct.pack('!H', len(self.name)) + self.name + \
struct.pack('!HH', groupID, buddyID) + '\000\001' + tlvData
class SSIBuddy:
def __init__(self, name, tlvs = {}):
self.name = name
self.tlvs = tlvs
for k,v in tlvs.items():
if k == 0x013c: # buddy comment
self.buddyComment = v
elif k == 0x013d: # buddy alerts
actionFlag = ord(v[0])
whenFlag = ord(v[1])
self.alertActions = []
self.alertWhen = []
if actionFlag&1:
self.alertActions.append('popup')
if actionFlag&2:
self.alertActions.append('sound')
if whenFlag&1:
self.alertWhen.append('online')
if whenFlag&2:
self.alertWhen.append('unidle')
if whenFlag&4:
self.alertWhen.append('unaway')
elif k == 0x013e:
self.alertSound = v
def oscarRep(self, groupID, buddyID):
tlvData = reduce(lambda x,y: x+y, map(lambda (k,v):TLV(k,v), self.tlvs.items())) or '\000\000'
return struct.pack('!H', len(self.name)) + self.name + \
struct.pack('!HH', groupID, buddyID) + '\000\000' + tlvData
class OscarConnection(protocol.Protocol):
def connectionMade(self):
self.state=""
self.seqnum=0
self.buf=''
self.stopKeepAliveID = None
self.setKeepAlive(4*60) # 4 minutes
def connectionLost(self, reason):
log.msg("Connection Lost! %s" % self)
self.stopKeepAlive()
# def connectionFailed(self):
# log.msg("Connection Failed! %s" % self)
# self.stopKeepAlive()
def sendFLAP(self,data,channel = 0x02):
header="!cBHH"
self.seqnum=(self.seqnum+1)%0xFFFF
seqnum=self.seqnum
head=struct.pack(header,'*', channel,
seqnum, len(data))
self.transport.write(head+str(data))
# if isinstance(self, ChatService):
# logPacketData(head+str(data))
def readFlap(self):
header="!cBHH"
if len(self.buf)<6: return
flap=struct.unpack(header,self.buf[:6])
if len(self.buf)<6+flap[3]: return
data,self.buf=self.buf[6:6+flap[3]],self.buf[6+flap[3]:]
return [flap[1],data]
def dataReceived(self,data):
# if isinstance(self, ChatService):
# logPacketData(data)
self.buf=self.buf+data
flap=self.readFlap()
while flap:
func=getattr(self,"oscar_%s"%self.state,None)
if not func:
log.msg("no func for state: %s" % self.state)
state=func(flap)
if state:
self.state=state
flap=self.readFlap()
def setKeepAlive(self,t):
self.keepAliveDelay=t
self.stopKeepAlive()
self.stopKeepAliveID = reactor.callLater(t, self.sendKeepAlive)
def sendKeepAlive(self):
self.sendFLAP("",0x05)
self.stopKeepAliveID = reactor.callLater(self.keepAliveDelay, self.sendKeepAlive)
def stopKeepAlive(self):
if self.stopKeepAliveID:
self.stopKeepAliveID.cancel()
self.stopKeepAliveID = None
def disconnect(self):
"""
send the disconnect flap, and sever the connection
"""
self.sendFLAP('', 0x04)
def f(reason): pass
self.connectionLost = f
self.transport.loseConnection()
class SNACBased(OscarConnection):
snacFamilies = {
# family : (version, toolID, toolVersion)
}
def __init__(self,cookie):
self.cookie=cookie
self.lastID=0
self.supportedFamilies = ()
self.requestCallbacks={} # request id:Deferred
def sendSNAC(self,fam,sub,data,flags=[0,0]):
"""
send a snac and wait for the response by returning a Deferred.
"""
reqid=self.lastID
self.lastID=reqid+1
d = defer.Deferred()
d.reqid = reqid
#d.addErrback(self._ebDeferredError,fam,sub,data) # XXX for testing
self.requestCallbacks[reqid] = d
self.sendFLAP(SNAC(fam,sub,reqid,data))
return d
def _ebDeferredError(self, error, fam, sub, data):
log.msg('ERROR IN DEFERRED %s' % error)
log.msg('on sending of message, family 0x%02x, subtype 0x%02x' % (fam, sub))
log.msg('data: %s' % repr(data))
def sendSNACnr(self,fam,sub,data,flags=[0,0]):
"""
send a snac, but don't bother adding a deferred, we don't care.
"""
self.sendFLAP(SNAC(fam,sub,0x10000*fam+sub,data))
def oscar_(self,data):
self.sendFLAP("\000\000\000\001"+TLV(6,self.cookie), 0x01)
return "Data"
def oscar_Data(self,data):
snac=readSNAC(data[1])
if self.requestCallbacks.has_key(snac[4]):
d = self.requestCallbacks[snac[4]]
del self.requestCallbacks[snac[4]]
if snac[1]!=1:
d.callback(snac)
else:
d.errback(snac)
return
func=getattr(self,'oscar_%02X_%02X'%(snac[0],snac[1]),None)
if not func:
self.oscar_unknown(snac)
else:
func(snac[2:])
return "Data"
def oscar_unknown(self,snac):
log.msg("unknown for %s" % self)
log.msg(snac)
def oscar_01_03(self, snac):
numFamilies = len(snac[3])/2
self.supportedFamilies = struct.unpack("!"+str(numFamilies)+'H', snac[3])
d = ''
for fam in self.supportedFamilies:
if self.snacFamilies.has_key(fam):
d=d+struct.pack('!2H',fam,self.snacFamilies[fam][0])
self.sendSNACnr(0x01,0x17, d)
def oscar_01_0A(self,snac):
"""
change of rate information.
"""
# this can be parsed, maybe we can even work it in
pass
def oscar_01_18(self,snac):
"""
host versions, in the same format as we sent
"""
self.sendSNACnr(0x01,0x06,"") #pass
def clientReady(self):
"""
called when the client is ready to be online
"""
d = ''
for fam in self.supportedFamilies:
if self.snacFamilies.has_key(fam):
version, toolID, toolVersion = self.snacFamilies[fam]
d = d + struct.pack('!4H',fam,version,toolID,toolVersion)
self.sendSNACnr(0x01,0x02,d)
class BOSConnection(SNACBased):
snacFamilies = {
0x01:(3, 0x0110, 0x059b),
0x13:(3, 0x0110, 0x059b),
0x02:(1, 0x0110, 0x059b),
0x03:(1, 0x0110, 0x059b),
0x04:(1, 0x0110, 0x059b),
0x06:(1, 0x0110, 0x059b),
0x08:(1, 0x0104, 0x0001),
0x09:(1, 0x0110, 0x059b),
0x0a:(1, 0x0110, 0x059b),
0x0b:(1, 0x0104, 0x0001),
0x0c:(1, 0x0104, 0x0001)
}
capabilities = None
def __init__(self,username,cookie):
SNACBased.__init__(self,cookie)
self.username=username
self.profile = None
self.awayMessage = None
self.services = {}
if not self.capabilities:
self.capabilities = [CAP_CHAT]
def parseUser(self,data,count=None):
l=ord(data[0])
name=data[1:1+l]
warn,foo=struct.unpack("!HH",data[1+l:5+l])
warn=int(warn/10)
tlvs=data[5+l:]
if count:
tlvs,rest = readTLVs(tlvs,foo)
else:
tlvs,rest = readTLVs(tlvs), None
u = OSCARUser(name, warn, tlvs)
if rest == None:
return u
else:
return u, rest
def oscar_01_05(self, snac, d = None):
"""
data for a new service connection
d might be a deferred to be called back when the service is ready
"""
tlvs = readTLVs(snac[3][2:])
service = struct.unpack('!H',tlvs[0x0d])[0]
ip = tlvs[5]
cookie = tlvs[6]
#c = serviceClasses[service](self, cookie, d)
c = protocol.ClientCreator(reactor, serviceClasses[service], self, cookie, d)
def addService(x):
self.services[service] = x
c.connectTCP(ip, 5190).addCallback(addService)
#self.services[service] = c
def oscar_01_07(self,snac):
"""
rate paramaters
"""
self.sendSNACnr(0x01,0x08,"\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05") # ack
self.initDone()
self.sendSNACnr(0x13,0x02,'') # SSI rights info
self.sendSNACnr(0x02,0x02,'') # location rights info
self.sendSNACnr(0x03,0x02,'') # buddy list rights
self.sendSNACnr(0x04,0x04,'') # ICBM parms
self.sendSNACnr(0x09,0x02,'') # BOS rights
def oscar_01_10(self,snac):
"""
we've been warned
"""
skip = struct.unpack('!H',snac[3][:2])[0]
newLevel = struct.unpack('!H',snac[3][2+skip:4+skip])[0]/10
if len(snac[3])>4+skip:
by = self.parseUser(snac[3][4+skip:])
else:
by = None
self.receiveWarning(newLevel, by)
def oscar_01_13(self,snac):
"""
MOTD
"""
pass # we don't care for now
def oscar_02_03(self, snac):
"""
location rights response
"""
tlvs = readTLVs(snac[3])
self.maxProfileLength = tlvs[1]
def oscar_03_03(self, snac):
"""
buddy list rights response
"""
tlvs = readTLVs(snac[3])
self.maxBuddies = tlvs[1]
self.maxWatchers = tlvs[2]
def oscar_03_0B(self, snac):
"""
buddy update
"""
self.updateBuddy(self.parseUser(snac[3]))
def oscar_03_0C(self, snac):
"""
buddy offline
"""
self.offlineBuddy(self.parseUser(snac[3]))
# def oscar_04_03(self, snac):
def oscar_04_05(self, snac):
"""
ICBM parms response
"""
self.sendSNACnr(0x04,0x02,'\x00\x00\x00\x00\x00\x0b\x1f@\x03\xe7\x03\xe7\x00\x00\x00\x00') # IM rights
def oscar_04_07(self, snac):
"""
ICBM message (instant message)
"""
data = snac[3]
cookie, data = data[:8], data[8:]
channel = struct.unpack('!H',data[:2])[0]
data = data[2:]
user, data = self.parseUser(data, 1)
tlvs = readTLVs(data)
if channel == 1: # message
flags = []
multiparts = []
for k, v in tlvs.items():
if k == 2:
while v:
v = v[2:] # skip bad data
messageLength, charSet, charSubSet = struct.unpack('!3H', v[:6])
messageLength -= 4
message = [v[6:6+messageLength]]
if charSet == 0:
pass # don't add anything special
elif charSet == 2:
message.append('unicode')
elif charSet == 3:
message.append('iso-8859-1')
elif charSet == 0xffff:
message.append('none')
if charSubSet == 0xb:
message.append('macintosh')
if messageLength > 0: multiparts.append(tuple(message))
v = v[6+messageLength:]
elif k == 3:
flags.append('acknowledge')
elif k == 4:
flags.append('auto')
elif k == 6:
flags.append('offline')
elif k == 8:
iconLength, foo, iconSum, iconStamp = struct.unpack('!LHHL',v)
if iconLength:
flags.append('icon')
flags.append((iconLength, iconSum, iconStamp))
elif k == 9:
flags.append('buddyrequest')
elif k == 0xb: # unknown
pass
elif k == 0x17:
flags.append('extradata')
flags.append(v)
else:
log.msg('unknown TLV for incoming IM, %04x, %s' % (k,repr(v)))
# unknown tlv for user SNewdorf
# t: 29
# v: '\x00\x00\x00\x05\x02\x01\xd2\x04r\x00\x01\x01\x10/\x8c\x8b\x8a\x1e\x94*\xbc\x80}\x8d\xc4;\x1dEM'
# XXX what is this?
self.receiveMessage(user, multiparts, flags)
elif channel == 2: # rondevouz
status = struct.unpack('!H',tlvs[5][:2])[0]
requestClass = tlvs[5][10:26]
moreTLVs = readTLVs(tlvs[5][26:])
if requestClass == CAP_CHAT: # a chat request
exchange = struct.unpack('!H',moreTLVs[10001][:2])[0]
name = moreTLVs[10001][3:-2]
instance = struct.unpack('!H',moreTLVs[10001][-2:])[0]
if not self.services.has_key(SERVICE_CHATNAV):
self.connectService(SERVICE_CHATNAV,1).addCallback(lambda x: self.services[SERVICE_CHATNAV].getChatInfo(exchange, name, instance).\
addCallback(self._cbGetChatInfoForInvite, user, moreTLVs[12]))
else:
self.services[SERVICE_CHATNAV].getChatInfo(exchange, name, instance).\
addCallback(self._cbGetChatInfoForInvite, user, moreTLVs[12])
elif requestClass == CAP_SEND_FILE:
if moreTLVs.has_key(11): # cancel
log.msg('cancelled file request')
log.msg(status)
return # handle this later
name = moreTLVs[10001][9:-7]
desc = moreTLVs[12]
log.msg('file request from %s, %s, %s' % (user, name, desc))
self.receiveSendFileRequest(user, name, desc, cookie)
else:
log.msg('unsupported rondevouz: %s' % requestClass)
log.msg(repr(moreTLVs))
else:
log.msg('unknown channel %02x' % channel)
log.msg(tlvs)
def _cbGetChatInfoForInvite(self, info, user, message):
apply(self.receiveChatInvite, (user,message)+info)
def oscar_09_03(self, snac):
"""
BOS rights response
"""
tlvs = readTLVs(snac[3])
self.maxPermitList = tlvs[1]
self.maxDenyList = tlvs[2]
def oscar_0B_02(self, snac):
"""
stats reporting interval
"""
self.reportingInterval = struct.unpack('!H',snac[3])[0]
def oscar_13_03(self, snac):
"""
SSI rights response
"""
#tlvs = readTLVs(snac[3])
pass # we don't know how to parse this
# methods to be called by the client, and their support methods
def requestSelfInfo(self):
"""
ask for the OSCARUser for ourselves
"""
d = defer.Deferred()
self.sendSNAC(0x01, 0x0E, '').addCallback(self._cbRequestSelfInfo, d)
return d
def _cbRequestSelfInfo(self, snac, d):
d.callback(self.parseUser(snac[5]))
def initSSI(self):
"""
this sends the rate request for family 0x13 (Server Side Information)
so we can then use it
"""
return self.sendSNAC(0x13, 0x02, '').addCallback(self._cbInitSSI)
def _cbInitSSI(self, snac, d):
return {} # don't even bother parsing this
def requestSSI(self, timestamp = 0, revision = 0):
"""
request the server side information
if the deferred gets None, it means the SSI is the same
"""
return self.sendSNAC(0x13, 0x05,
struct.pack('!LH',timestamp,revision)).addCallback(self._cbRequestSSI)
def _cbRequestSSI(self, snac, args = ()):
if snac[1] == 0x0f: # same SSI as we have
return
itemdata = snac[5][3:]
if args:
revision, groups, permit, deny, permitMode, visibility = args
else:
version, revision = struct.unpack('!BH', snac[5][:3])
groups = {}
permit = []
deny = []
permitMode = None
visibility = None
while len(itemdata)>4:
nameLength = struct.unpack('!H', itemdata[:2])[0]
name = itemdata[2:2+nameLength]
groupID, buddyID, itemType, restLength = \
struct.unpack('!4H', itemdata[2+nameLength:10+nameLength])
tlvs = readTLVs(itemdata[10+nameLength:10+nameLength+restLength])
itemdata = itemdata[10+nameLength+restLength:]
if itemType == 0: # buddies
groups[groupID].addUser(buddyID, SSIBuddy(name, tlvs))
elif itemType == 1: # group
g = SSIGroup(name, tlvs)
if groups.has_key(0): groups[0].addUser(groupID, g)
groups[groupID] = g
elif itemType == 2: # permit
permit.append(name)
elif itemType == 3: # deny
deny.append(name)
elif itemType == 4: # permit deny info
if not tlvs.has_key(0xcb):
continue # this happens with ICQ
permitMode = {1:'permitall',2:'denyall',3:'permitsome',4:'denysome',5:'permitbuddies'}[ord(tlvs[0xca])]
visibility = {'\xff\xff\xff\xff':'all','\x00\x00\x00\x04':'notaim'}[tlvs[0xcb]]
elif itemType == 5: # unknown (perhaps idle data)?
pass
else:
log.msg('%s %s %s %s %s' % (name, groupID, buddyID, itemType, tlvs))
timestamp = struct.unpack('!L',itemdata)[0]
if not timestamp: # we've got more packets coming
# which means add some deferred stuff
d = defer.Deferred()
self.requestCallbacks[snac[4]] = d
d.addCallback(self._cbRequestSSI, (revision, groups, permit, deny, permitMode, visibility))
return d
return (groups[0].users,permit,deny,permitMode,visibility,timestamp,revision)
def activateSSI(self):
"""
active the data stored on the server (use buddy list, permit deny settings, etc.)
"""
self.sendSNACnr(0x13,0x07,'')
def startModifySSI(self):
"""
tell the OSCAR server to be on the lookout for SSI modifications
"""
self.sendSNACnr(0x13,0x11,'')
def addItemSSI(self, item, groupID = None, buddyID = None):
"""
add an item to the SSI server. if buddyID == 0, then this should be a group.
this gets a callback when it's finished, but you can probably ignore it.
"""
if not groupID:
groupID = item.group.group.findIDFor(item.group)
if not buddyID:
buddyID = item.group.findIDFor(item)
return self.sendSNAC(0x13,0x08, item.oscarRep(groupID, buddyID))
def modifyItemSSI(self, item, groupID = None, buddyID = None):
if not groupID:
groupID = item.group.group.findIDFor(item.group)
if not buddyID:
buddyID = item.group.findIDFor(item)
return self.sendSNAC(0x13,0x09, item.oscarRep(groupID, buddyID))
def delItemSSI(self, item, groupID = None, buddyID = None):
if not groupID:
groupID = item.group.group.findIDFor(item.group)
if not buddyID:
buddyID = item.group.findIDFor(item)
return self.sendSNAC(0x13,0x0A, item.oscarRep(groupID, buddyID))
def endModifySSI(self):
self.sendSNACnr(0x13,0x12,'')
def setProfile(self, profile):
"""
set the profile.
send None to not set a profile (different from '' for a blank one)
"""
self.profile = profile
tlvs = ''
if self.profile:
tlvs = TLV(1,'text/aolrtf; charset="us-ascii"') + \
TLV(2,self.profile)
tlvs = tlvs + TLV(5, ''.join(self.capabilities))
self.sendSNACnr(0x02, 0x04, tlvs)
def setAway(self, away = None):
"""
set the away message, or return (if away == None)
"""
self.awayMessage = away
tlvs = TLV(3,'text/aolrtf; charset="us-ascii"') + \
TLV(4,away or '')
self.sendSNACnr(0x02, 0x04, tlvs)
def setIdleTime(self, idleTime):
"""
set our idle time. don't call more than once with a non-0 idle time.
"""
self.sendSNACnr(0x01, 0x11, struct.pack('!L',idleTime))
def sendMessage(self, user, message, wantAck = 0, autoResponse = 0, offline = 0 ): \
#haveIcon = 0, ):
"""
send a message to user (not an OSCARUseR).
message can be a string, or a multipart tuple.
if wantAck, we return a Deferred that gets a callback when the message is sent.
if autoResponse, this message is an autoResponse, as if from an away message.
if offline, this is an offline message (ICQ only, I think)
"""
data = ''.join([chr(random.randrange(0, 127)) for i in range(8)]) # cookie
data = data + '\x00\x01' + chr(len(user)) + user
if not type(message) in (types.TupleType, types.ListType):
message = [[message,]]
if type(message[0][0]) == types.UnicodeType:
message[0].append('unicode')
messageData = ''
for part in message:
charSet = 0
if 'unicode' in part[1:]:
charSet = 2
elif 'iso-8859-1' in part[1:]:
charSet = 3
elif 'none' in part[1:]:
charSet = 0xffff
if 'macintosh' in part[1:]:
charSubSet = 0xb
else:
charSubSet = 0
messageData = messageData + '\x01\x01' + \
struct.pack('!3H',len(part[0])+4,charSet,charSubSet)
messageData = messageData + part[0]
data = data + TLV(2, '\x05\x01\x00\x03\x01\x01\x02'+messageData)
if wantAck:
data = data + TLV(3,'')
if autoResponse:
data = data + TLV(4,'')
if offline:
data = data + TLV(6,'')
if wantAck:
return self.sendSNAC(0x04, 0x06, data).addCallback(self._cbSendMessageAck, user, message)
self.sendSNACnr(0x04, 0x06, data)
def _cbSendMessageAck(self, snac, user, message):
return user, message
def connectService(self, service, wantCallback = 0, extraData = ''):
"""
connect to another service
if wantCallback, we return a Deferred that gets called back when the service is online.
if extraData, append that to our request.
"""
if wantCallback:
d = defer.Deferred()
self.sendSNAC(0x01,0x04,struct.pack('!H',service) + extraData).addCallback(self._cbConnectService, d)
return d
else:
self.sendSNACnr(0x01,0x04,struct.pack('!H',service))
def _cbConnectService(self, snac, d):
d.arm()
self.oscar_01_05(snac[2:], d)
def createChat(self, shortName):
"""
create a chat room
"""
if self.services.has_key(SERVICE_CHATNAV):
return self.services[SERVICE_CHATNAV].createChat(shortName)
else:
d = defer.Deferred()
self.connectService(SERVICE_CHATNAV,1).addCallback(lambda s:d.arm() or s.createChat(shortName).chainDeferred(d))
return d
def joinChat(self, exchange, fullName, instance):
"""
join a chat room
"""
#d = defer.Deferred()
return self.connectService(0x0e, 1, TLV(0x01, struct.pack('!HB',exchange, len(fullName)) + fullName +
struct.pack('!H', instance))).addCallback(self._cbJoinChat) #, d)
#return d
def _cbJoinChat(self, chat):
del self.services[SERVICE_CHAT]
return chat
def warnUser(self, user, anon = 0):
return self.sendSNAC(0x04, 0x08, '\x00'+chr(anon)+chr(len(user))+user).addCallback(self._cbWarnUser)
def _cbWarnUser(self, snac):
oldLevel, newLevel = struct.unpack('!2H', snac[5])
return oldLevel, newLevel
def getInfo(self, user):
#if user.
return self.sendSNAC(0x02, 0x05, '\x00\x01'+chr(len(user))+user).addCallback(self._cbGetInfo)
def _cbGetInfo(self, snac):
user, rest = self.parseUser(snac[5],1)
tlvs = readTLVs(rest)
return tlvs.get(0x02,None)
def getAway(self, user):
return self.sendSNAC(0x02, 0x05, '\x00\x03'+chr(len(user))+user).addCallback(self._cbGetAway)
def _cbGetAway(self, snac):
user, rest = self.parseUser(snac[5],1)
tlvs = readTLVs(rest)
return tlvs.get(0x04,None) # return None if there is no away message
#def acceptSendFileRequest(self,
# methods to be overriden by the client
def initDone(self):
"""
called when we get the rate information, which means we should do other init. stuff.
"""
log.msg('%s initDone' % self)
pass
def updateBuddy(self, user):
"""
called when a buddy changes status, with the OSCARUser for that buddy.
"""
log.msg('%s updateBuddy %s' % (self, user))
pass
def offlineBuddy(self, user):
"""
called when a buddy goes offline
"""
log.msg('%s offlineBuddy %s' % (self, user))
pass
def receiveMessage(self, user, multiparts, flags):
"""
called when someone sends us a message
"""
pass
def receiveWarning(self, newLevel, user):
"""
called when someone warns us.
user is either None (if it was anonymous) or an OSCARUser
"""
pass
def receiveChatInvite(self, user, message, exchange, fullName, instance, shortName, inviteTime):
"""
called when someone invites us to a chat room
"""
pass
def chatReceiveMessage(self, chat, user, message):
"""
called when someone in a chatroom sends us a message in the chat
"""
pass
def chatMemberJoined(self, chat, member):
"""
called when a member joins the chat
"""
pass
def chatMemberLeft(self, chat, member):
"""
called when a member leaves the chat
"""
pass
def receiveSendFileRequest(self, user, file, description, cookie):
"""
called when someone tries to send a file to us
"""
pass
class OSCARService(SNACBased):
def __init__(self, bos, cookie, d = None):
SNACBased.__init__(self, cookie)
self.bos = bos
self.d = d
def connectionLost(self, reason):
for k,v in self.bos.services.items():
if v == self:
del self.bos.services[k]
return
def clientReady(self):
SNACBased.clientReady(self)
if self.d:
self.d.callback(self)
self.d = None
class ChatNavService(OSCARService):
snacFamilies = {
0x01:(3, 0x0010, 0x059b),
0x0d:(1, 0x0010, 0x059b)
}
def oscar_01_07(self, snac):
# rate info
self.sendSNACnr(0x01, 0x08, '\000\001\000\002\000\003\000\004\000\005')
self.sendSNACnr(0x0d, 0x02, '')
def oscar_0D_09(self, snac):
self.clientReady()
def getChatInfo(self, exchange, name, instance):
d = defer.Deferred()
self.sendSNAC(0x0d,0x04,struct.pack('!HB',exchange,len(name)) + \
name + struct.pack('!HB',instance,2)). \
addCallback(self._cbGetChatInfo, d)
return d
def _cbGetChatInfo(self, snac, d):
data = snac[5][4:]
exchange, length = struct.unpack('!HB',data[:3])
fullName = data[3:3+length]
instance = struct.unpack('!H',data[3+length:5+length])[0]
tlvs = readTLVs(data[8+length:])
shortName = tlvs[0x6a]
inviteTime = struct.unpack('!L',tlvs[0xca])[0]
info = (exchange,fullName,instance,shortName,inviteTime)
d.callback(info)
def createChat(self, shortName):
#d = defer.Deferred()
data = '\x00\x04\x06create\xff\xff\x01\x00\x03'
data = data + TLV(0xd7, 'en')
data = data + TLV(0xd6, 'us-ascii')
data = data + TLV(0xd3, shortName)
return self.sendSNAC(0x0d, 0x08, data).addCallback(self._cbCreateChat)
#return d
def _cbCreateChat(self, snac): #d):
exchange, length = struct.unpack('!HB',snac[5][4:7])
fullName = snac[5][7:7+length]
instance = struct.unpack('!H',snac[5][7+length:9+length])[0]
#d.callback((exchange, fullName, instance))
return exchange, fullName, instance
class ChatService(OSCARService):
snacFamilies = {
0x01:(3, 0x0010, 0x059b),
0x0E:(1, 0x0010, 0x059b)
}
def __init__(self,bos,cookie, d = None):
OSCARService.__init__(self,bos,cookie,d)
self.exchange = None
self.fullName = None
self.instance = None
self.name = None
self.members = None
clientReady = SNACBased.clientReady # we'll do our own callback
def oscar_01_07(self,snac):
self.sendSNAC(0x01,0x08,"\000\001\000\002\000\003\000\004\000\005")
self.clientReady()
def oscar_0E_02(self, snac):
# try: # this is EVIL
# data = snac[3][4:]
# self.exchange, length = struct.unpack('!HB',data[:3])
# self.fullName = data[3:3+length]
# self.instance = struct.unpack('!H',data[3+length:5+length])[0]
# tlvs = readTLVs(data[8+length:])
# self.name = tlvs[0xd3]
# self.d.callback(self)
# except KeyError:
data = snac[3]
self.exchange, length = struct.unpack('!HB',data[:3])
self.fullName = data[3:3+length]
self.instance = struct.unpack('!H',data[3+length:5+length])[0]
tlvs = readTLVs(data[8+length:])
self.name = tlvs[0xd3]
self.d.callback(self)
def oscar_0E_03(self,snac):
users=[]
rest=snac[3]
while rest:
user, rest = self.bos.parseUser(rest, 1)
users.append(user)
if not self.fullName:
self.members = users
else:
self.members.append(users[0])
self.bos.chatMemberJoined(self,users[0])
def oscar_0E_04(self,snac):
user=self.bos.parseUser(snac[3])
for u in self.members:
if u.name == user.name: # same person!
self.members.remove(u)
self.bos.chatMemberLeft(self,user)
def oscar_0E_06(self,snac):
data = snac[3]
user,rest=self.bos.parseUser(snac[3][14:],1)
tlvs = readTLVs(rest[8:])
message=tlvs[1]
self.bos.chatReceiveMessage(self,user,message)
def sendMessage(self,message):
tlvs=TLV(0x02,"us-ascii")+TLV(0x03,"en")+TLV(0x01,message)
self.sendSNAC(0x0e,0x05,
"\x46\x30\x38\x30\x44\x00\x63\x00\x00\x03\x00\x01\x00\x00\x00\x06\x00\x00\x00\x05"+
struct.pack("!H",len(tlvs))+
tlvs)
def leaveChat(self):
self.disconnect()
class OscarAuthenticator(OscarConnection):
BOSClass = BOSConnection
def __init__(self,username,password,deferred=None,icq=0):
self.username=username
self.password=password
self.deferred=deferred
self.icq=icq # icq mode is disabled
#if icq and self.BOSClass==BOSConnection:
# self.BOSClass=ICQConnection
def oscar_(self,flap):
if not self.icq:
self.sendFLAP("\000\000\000\001", 0x01)
self.sendFLAP(SNAC(0x17,0x06,0,
TLV(TLV_USERNAME,self.username)+
TLV(0x004B,'')))
self.state="Key"
else:
encpass=encryptPasswordICQ(self.password)
self.sendFLAP('\000\000\000\001'+
TLV(0x01,self.username)+
TLV(0x02,encpass)+
TLV(0x03,'ICQ Inc. - Product of ICQ (TM).2001b.5.18.1.3659.85')+
TLV(0x16,"\x01\x0a")+
TLV(0x17,"\x00\x05")+
TLV(0x18,"\x00\x12")+
TLV(0x19,"\000\001")+
TLV(0x1a,"\x0eK")+
TLV(0x14,"\x00\x00\x00U")+
TLV(0x0f,"en")+
TLV(0x0e,"us"),0x01)
self.state="Cookie"
def oscar_Key(self,data):
snac=readSNAC(data[1])
key=snac[5][2:]
encpass=encryptPasswordMD5(self.password,key)
self.sendFLAP(SNAC(0x17,0x02,0,
TLV(TLV_USERNAME,self.username)+
TLV(TLV_PASSWORD,encpass)+
TLV(0x004C, '')+ # unknown
TLV(TLV_CLIENTNAME,"AOL Instant Messenger (SM), version 4.8.2790/WIN32")+
TLV(0x0016,"\x01\x09")+
TLV(TLV_CLIENTMAJOR,"\000\004")+
TLV(TLV_CLIENTMINOR,"\000\010")+
TLV(0x0019,"\000\000")+
TLV(TLV_CLIENTSUB,"\x0A\xE6")+
TLV(0x0014,"\x00\x00\x00\xBB")+
TLV(TLV_LANG,"en")+
TLV(TLV_COUNTRY,"us")+
TLV(TLV_USESSI,"\001")))
return "Cookie"
def oscar_Cookie(self,data):
snac=readSNAC(data[1])
if self.icq:
i=snac[5].find("\000")
snac[5]=snac[5][i:]
tlvs=readTLVs(snac[5])
if tlvs.has_key(6):
self.cookie=tlvs[6]
server,port=string.split(tlvs[5],":")
c = protocol.ClientCreator(reactor, self.BOSClass, self.username, self.cookie)
d = c.connectTCP(server, int(port))
d.addErrback(lambda x: log.msg("Connection Failed! Reason: %s" % x))
if self.deferred:
d.chainDeferred(self.deferred)
self.disconnect()
elif tlvs.has_key(8):
errorcode=tlvs[8]
errorurl=tlvs[4]
if errorcode=='\000\030':
error="You are attempting to sign on again too soon. Please try again later."
elif errorcode=='\000\005':
error="Invalid Username or Password."
else: error=repr(errorcode)
self.error(error,errorurl)
else:
log.msg('hmm, weird tlvs for %s cookie packet' % str(self))
log.msg(tlvs)
log.msg('snac')
log.msg(str(snac))
return "None"
def oscar_None(self,data): pass
def error(self,error,url):
log.msg("ERROR! %s %s" % (error,url))
if self.deferred: self.deferred.errback((error,url))
self.transport.loseConnection()
FLAP_CHANNEL_NEW_CONNECTION = 0x01
FLAP_CHANNEL_DATA = 0x02
FLAP_CHANNEL_ERROR = 0x03
FLAP_CHANNEL_CLOSE_CONNECTION = 0x04
SERVICE_CHATNAV = 0x0d
SERVICE_CHAT = 0x0e
serviceClasses = {
SERVICE_CHATNAV:ChatNavService,
SERVICE_CHAT:ChatService
}
TLV_USERNAME = 0x0001
TLV_CLIENTNAME = 0x0003
TLV_COUNTRY = 0x000E
TLV_LANG = 0x000F
TLV_CLIENTMAJOR = 0x0017
TLV_CLIENTMINOR = 0x0018
TLV_CLIENTSUB = 0x001A
TLV_PASSWORD = 0x0025
TLV_USESSI = 0x004A
CAP_ICON = '\011F\023FL\177\021\321\202"DEST\000\000'
CAP_VOICE = '\011F\023AL\177\021\321\202"DEST\000\000'
CAP_IMAGE = '\011F\023EL\177\021\321\202"DEST\000\000'
CAP_CHAT = 't\217$ b\207\021\321\202"DEST\000\000'
CAP_GET_FILE = '\011F\023HL\177\021\321\202"DEST\000\000'
CAP_SEND_FILE = '\011F\023CL\177\021\321\202"DEST\000\000'
CAP_GAMES = '\011F\023GL\177\021\321\202"DEST\000\000'
CAP_SEND_LIST = '\011F\023KL\177\021\321\202"DEST\000\000'
| gpl-2.0 | -7,954,481,976,799,081,000 | 2,996,183,882,271,819,300 | 34.103084 | 151 | 0.542026 | false |
mollstam/UnrealPy | UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/Python-2.7.10/Lib/lib2to3/fixes/fix_intern.py | 315 | 1405 | # Copyright 2006 Georg Brandl.
# Licensed to PSF under a Contributor Agreement.
"""Fixer for intern().
intern(s) -> sys.intern(s)"""
# Local imports
from .. import pytree
from .. import fixer_base
from ..fixer_util import Name, Attr, touch_import
class FixIntern(fixer_base.BaseFix):
BM_compatible = True
order = "pre"
PATTERN = """
power< 'intern'
trailer< lpar='('
( not(arglist | argument<any '=' any>) obj=any
| obj=arglist<(not argument<any '=' any>) any ','> )
rpar=')' >
after=any*
>
"""
def transform(self, node, results):
syms = self.syms
obj = results["obj"].clone()
if obj.type == syms.arglist:
newarglist = obj.clone()
else:
newarglist = pytree.Node(syms.arglist, [obj.clone()])
after = results["after"]
if after:
after = [n.clone() for n in after]
new = pytree.Node(syms.power,
Attr(Name(u"sys"), Name(u"intern")) +
[pytree.Node(syms.trailer,
[results["lpar"].clone(),
newarglist,
results["rpar"].clone()])] + after)
new.prefix = node.prefix
touch_import(None, u'sys', node)
return new
| mit | -251,752,574,301,332,830 | 8,969,037,696,612,287,000 | 29.543478 | 75 | 0.485409 | false |
EricMuller/mynotes-backend | requirements/twisted/Twisted-17.1.0/build/lib.linux-x86_64-3.5/twisted/test/iosim.py | 12 | 17684 | # -*- test-case-name: twisted.test.test_amp,twisted.test.test_iosim -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Utilities and helpers for simulating a network
"""
from __future__ import absolute_import, division, print_function
import itertools
try:
from OpenSSL.SSL import Error as NativeOpenSSLError
except ImportError:
pass
from zope.interface import implementer, directlyProvides
from twisted.internet.endpoints import TCP4ClientEndpoint, TCP4ServerEndpoint
from twisted.internet.protocol import Factory, Protocol
from twisted.internet.error import ConnectionRefusedError
from twisted.python.failure import Failure
from twisted.internet import error
from twisted.internet import interfaces
from .proto_helpers import MemoryReactorClock
class TLSNegotiation:
def __init__(self, obj, connectState):
self.obj = obj
self.connectState = connectState
self.sent = False
self.readyToSend = connectState
def __repr__(self):
return 'TLSNegotiation(%r)' % (self.obj,)
def pretendToVerify(self, other, tpt):
# Set the transport problems list here? disconnections?
# hmmmmm... need some negative path tests.
if not self.obj.iosimVerify(other.obj):
tpt.disconnectReason = NativeOpenSSLError()
tpt.loseConnection()
@implementer(interfaces.IAddress)
class FakeAddress(object):
"""
The default address type for the host and peer of L{FakeTransport}
connections.
"""
@implementer(interfaces.ITransport,
interfaces.ITLSTransport)
class FakeTransport:
"""
A wrapper around a file-like object to make it behave as a Transport.
This doesn't actually stream the file to the attached protocol,
and is thus useful mainly as a utility for debugging protocols.
"""
_nextserial = staticmethod(lambda counter=itertools.count(): next(counter))
closed = 0
disconnecting = 0
disconnected = 0
disconnectReason = error.ConnectionDone("Connection done")
producer = None
streamingProducer = 0
tls = None
def __init__(self, protocol, isServer, hostAddress=None, peerAddress=None):
"""
@param protocol: This transport will deliver bytes to this protocol.
@type protocol: L{IProtocol} provider
@param isServer: C{True} if this is the accepting side of the
connection, C{False} if it is the connecting side.
@type isServer: L{bool}
@param hostAddress: The value to return from C{getHost}. L{None}
results in a new L{FakeAddress} being created to use as the value.
@type hostAddress: L{IAddress} provider or L{None}
@param peerAddress: The value to return from C{getPeer}. L{None}
results in a new L{FakeAddress} being created to use as the value.
@type peerAddress: L{IAddress} provider or L{None}
"""
self.protocol = protocol
self.isServer = isServer
self.stream = []
self.serial = self._nextserial()
if hostAddress is None:
hostAddress = FakeAddress()
self.hostAddress = hostAddress
if peerAddress is None:
peerAddress = FakeAddress()
self.peerAddress = peerAddress
def __repr__(self):
return 'FakeTransport<%s,%s,%s>' % (
self.isServer and 'S' or 'C', self.serial,
self.protocol.__class__.__name__)
def write(self, data):
if self.tls is not None:
self.tlsbuf.append(data)
else:
self.stream.append(data)
def _checkProducer(self):
# Cheating; this is called at "idle" times to allow producers to be
# found and dealt with
if self.producer:
self.producer.resumeProducing()
def registerProducer(self, producer, streaming):
"""
From abstract.FileDescriptor
"""
self.producer = producer
self.streamingProducer = streaming
if not streaming:
producer.resumeProducing()
def unregisterProducer(self):
self.producer = None
def stopConsuming(self):
self.unregisterProducer()
self.loseConnection()
def writeSequence(self, iovec):
self.write(b"".join(iovec))
def loseConnection(self):
self.disconnecting = True
def abortConnection(self):
"""
For the time being, this is the same as loseConnection; no buffered
data will be lost.
"""
self.disconnecting = True
def reportDisconnect(self):
if self.tls is not None:
# We were in the middle of negotiating! Must have been a TLS
# problem.
err = NativeOpenSSLError()
else:
err = self.disconnectReason
self.protocol.connectionLost(Failure(err))
def logPrefix(self):
"""
Identify this transport/event source to the logging system.
"""
return "iosim"
def getPeer(self):
return self.peerAddress
def getHost(self):
return self.hostAddress
def resumeProducing(self):
# Never sends data anyways
pass
def pauseProducing(self):
# Never sends data anyways
pass
def stopProducing(self):
self.loseConnection()
def startTLS(self, contextFactory, beNormal=True):
# Nothing's using this feature yet, but startTLS has an undocumented
# second argument which defaults to true; if set to False, servers will
# behave like clients and clients will behave like servers.
connectState = self.isServer ^ beNormal
self.tls = TLSNegotiation(contextFactory, connectState)
self.tlsbuf = []
def getOutBuffer(self):
"""
Get the pending writes from this transport, clearing them from the
pending buffer.
@return: the bytes written with C{transport.write}
@rtype: L{bytes}
"""
S = self.stream
if S:
self.stream = []
return b''.join(S)
elif self.tls is not None:
if self.tls.readyToSend:
# Only _send_ the TLS negotiation "packet" if I'm ready to.
self.tls.sent = True
return self.tls
else:
return None
else:
return None
def bufferReceived(self, buf):
if isinstance(buf, TLSNegotiation):
assert self.tls is not None # By the time you're receiving a
# negotiation, you have to have called
# startTLS already.
if self.tls.sent:
self.tls.pretendToVerify(buf, self)
self.tls = None # We're done with the handshake if we've gotten
# this far... although maybe it failed...?
# TLS started! Unbuffer...
b, self.tlsbuf = self.tlsbuf, None
self.writeSequence(b)
directlyProvides(self, interfaces.ISSLTransport)
else:
# We haven't sent our own TLS negotiation: time to do that!
self.tls.readyToSend = True
else:
self.protocol.dataReceived(buf)
def makeFakeClient(clientProtocol):
"""
Create and return a new in-memory transport hooked up to the given protocol.
@param clientProtocol: The client protocol to use.
@type clientProtocol: L{IProtocol} provider
@return: The transport.
@rtype: L{FakeTransport}
"""
return FakeTransport(clientProtocol, isServer=False)
def makeFakeServer(serverProtocol):
"""
Create and return a new in-memory transport hooked up to the given protocol.
@param serverProtocol: The server protocol to use.
@type serverProtocol: L{IProtocol} provider
@return: The transport.
@rtype: L{FakeTransport}
"""
return FakeTransport(serverProtocol, isServer=True)
class IOPump:
"""
Utility to pump data between clients and servers for protocol testing.
Perhaps this is a utility worthy of being in protocol.py?
"""
def __init__(self, client, server, clientIO, serverIO, debug):
self.client = client
self.server = server
self.clientIO = clientIO
self.serverIO = serverIO
self.debug = debug
def flush(self, debug=False):
"""
Pump until there is no more input or output.
Returns whether any data was moved.
"""
result = False
for x in range(1000):
if self.pump(debug):
result = True
else:
break
else:
assert 0, "Too long"
return result
def pump(self, debug=False):
"""
Move data back and forth.
Returns whether any data was moved.
"""
if self.debug or debug:
print('-- GLUG --')
sData = self.serverIO.getOutBuffer()
cData = self.clientIO.getOutBuffer()
self.clientIO._checkProducer()
self.serverIO._checkProducer()
if self.debug or debug:
print('.')
# XXX slightly buggy in the face of incremental output
if cData:
print('C: ' + repr(cData))
if sData:
print('S: ' + repr(sData))
if cData:
self.serverIO.bufferReceived(cData)
if sData:
self.clientIO.bufferReceived(sData)
if cData or sData:
return True
if (self.serverIO.disconnecting and
not self.serverIO.disconnected):
if self.debug or debug:
print('* C')
self.serverIO.disconnected = True
self.clientIO.disconnecting = True
self.clientIO.reportDisconnect()
return True
if self.clientIO.disconnecting and not self.clientIO.disconnected:
if self.debug or debug:
print('* S')
self.clientIO.disconnected = True
self.serverIO.disconnecting = True
self.serverIO.reportDisconnect()
return True
return False
def connect(serverProtocol, serverTransport, clientProtocol, clientTransport,
debug=False, greet=True):
"""
Create a new L{IOPump} connecting two protocols.
@param serverProtocol: The protocol to use on the accepting side of the
connection.
@type serverProtocol: L{IProtocol} provider
@param serverTransport: The transport to associate with C{serverProtocol}.
@type serverTransport: L{FakeTransport}
@param clientProtocol: The protocol to use on the initiating side of the
connection.
@type clientProtocol: L{IProtocol} provider
@param clientTransport: The transport to associate with C{clientProtocol}.
@type clientTransport: L{FakeTransport}
@param debug: A flag indicating whether to log information about what the
L{IOPump} is doing.
@type debug: L{bool}
@param greet: Should the L{IOPump} be L{flushed <IOPump.flush>} once before
returning to put the protocols into their post-handshake or
post-server-greeting state?
@type greet: L{bool}
@return: An L{IOPump} which connects C{serverProtocol} and
C{clientProtocol} and delivers bytes between them when it is pumped.
@rtype: L{IOPump}
"""
serverProtocol.makeConnection(serverTransport)
clientProtocol.makeConnection(clientTransport)
pump = IOPump(
clientProtocol, serverProtocol, clientTransport, serverTransport, debug
)
if greet:
# Kick off server greeting, etc
pump.flush()
return pump
def connectedServerAndClient(ServerClass, ClientClass,
clientTransportFactory=makeFakeClient,
serverTransportFactory=makeFakeServer,
debug=False, greet=True):
"""
Connect a given server and client class to each other.
@param ServerClass: a callable that produces the server-side protocol.
@type ServerClass: 0-argument callable returning L{IProtocol} provider.
@param ClientClass: like C{ServerClass} but for the other side of the
connection.
@type ClientClass: 0-argument callable returning L{IProtocol} provider.
@param clientTransportFactory: a callable that produces the transport which
will be attached to the protocol returned from C{ClientClass}.
@type clientTransportFactory: callable taking (L{IProtocol}) and returning
L{FakeTransport}
@param serverTransportFactory: a callable that produces the transport which
will be attached to the protocol returned from C{ServerClass}.
@type serverTransportFactory: callable taking (L{IProtocol}) and returning
L{FakeTransport}
@param debug: Should this dump an escaped version of all traffic on this
connection to stdout for inspection?
@type debug: L{bool}
@param greet: Should the L{IOPump} be L{flushed <IOPump.flush>} once before
returning to put the protocols into their post-handshake or
post-server-greeting state?
@type greet: L{bool}
@return: the client protocol, the server protocol, and an L{IOPump} which,
when its C{pump} and C{flush} methods are called, will move data
between the created client and server protocol instances.
@rtype: 3-L{tuple} of L{IProtocol}, L{IProtocol}, L{IOPump}
"""
c = ClientClass()
s = ServerClass()
cio = clientTransportFactory(c)
sio = serverTransportFactory(s)
return c, s, connect(s, sio, c, cio, debug, greet)
def _factoriesShouldConnect(clientInfo, serverInfo):
"""
Should the client and server described by the arguments be connected to
each other, i.e. do their port numbers match?
@param clientInfo: the args for connectTCP
@type clientInfo: L{tuple}
@param serverInfo: the args for listenTCP
@type serverInfo: L{tuple}
@return: If they do match, return factories for the client and server that
should connect; otherwise return L{None}, indicating they shouldn't be
connected.
@rtype: L{None} or 2-L{tuple} of (L{ClientFactory},
L{IProtocolFactory})
"""
(clientHost, clientPort, clientFactory, clientTimeout,
clientBindAddress) = clientInfo
(serverPort, serverFactory, serverBacklog,
serverInterface) = serverInfo
if serverPort == clientPort:
return clientFactory, serverFactory
else:
return None
class ConnectionCompleter(object):
"""
A L{ConnectionCompleter} can cause synthetic TCP connections established by
L{MemoryReactor.connectTCP} and L{MemoryReactor.listenTCP} to succeed or
fail.
"""
def __init__(self, memoryReactor):
"""
Create a L{ConnectionCompleter} from a L{MemoryReactor}.
@param memoryReactor: The reactor to attach to.
@type memoryReactor: L{MemoryReactor}
"""
self._reactor = memoryReactor
def succeedOnce(self, debug=False):
"""
Complete a single TCP connection established on this
L{ConnectionCompleter}'s L{MemoryReactor}.
@param debug: A flag; whether to dump output from the established
connection to stdout.
@type debug: L{bool}
@return: a pump for the connection, or L{None} if no connection could
be established.
@rtype: L{IOPump} or L{None}
"""
memoryReactor = self._reactor
for clientIdx, clientInfo in enumerate(memoryReactor.tcpClients):
for serverInfo in memoryReactor.tcpServers:
factories = _factoriesShouldConnect(clientInfo, serverInfo)
if factories:
memoryReactor.tcpClients.remove(clientInfo)
memoryReactor.connectors.pop(clientIdx)
clientFactory, serverFactory = factories
clientProtocol = clientFactory.buildProtocol(None)
serverProtocol = serverFactory.buildProtocol(None)
serverTransport = makeFakeServer(serverProtocol)
clientTransport = makeFakeClient(clientProtocol)
return connect(serverProtocol, serverTransport,
clientProtocol, clientTransport,
debug)
def failOnce(self, reason=Failure(ConnectionRefusedError())):
"""
Fail a single TCP connection established on this
L{ConnectionCompleter}'s L{MemoryReactor}.
@param reason: the reason to provide that the connection failed.
@type reason: L{Failure}
"""
self._reactor.tcpClients.pop(0)[2].clientConnectionFailed(
self._reactor.connectors.pop(0), reason
)
def connectableEndpoint(debug=False):
"""
Create an endpoint that can be fired on demand.
@param debug: A flag; whether to dump output from the established
connection to stdout.
@type debug: L{bool}
@return: A client endpoint, and an object that will cause one of the
L{Deferred}s returned by that client endpoint.
@rtype: 2-L{tuple} of (L{IStreamClientEndpoint}, L{ConnectionCompleter})
"""
reactor = MemoryReactorClock()
clientEndpoint = TCP4ClientEndpoint(reactor, "0.0.0.0", 4321)
serverEndpoint = TCP4ServerEndpoint(reactor, 4321)
serverEndpoint.listen(Factory.forProtocol(Protocol))
return clientEndpoint, ConnectionCompleter(reactor)
| mit | 580,646,678,271,824,500 | 8,012,286,798,948,177,000 | 30.9783 | 80 | 0.634246 | false |
longmen21/edx-platform | common/djangoapps/third_party_auth/admin.py | 14 | 5780 | # -*- coding: utf-8 -*-
"""
Admin site configuration for third party authentication
"""
from django import forms
from django.contrib import admin
from config_models.admin import ConfigurationModelAdmin, KeyedConfigurationModelAdmin
from .models import (
OAuth2ProviderConfig,
SAMLProviderConfig,
SAMLConfiguration,
SAMLProviderData,
LTIProviderConfig,
ProviderApiPermissions,
_PSA_OAUTH2_BACKENDS,
_PSA_SAML_BACKENDS
)
from .tasks import fetch_saml_metadata
from third_party_auth.provider import Registry
class OAuth2ProviderConfigForm(forms.ModelForm):
""" Django Admin form class for OAuth2ProviderConfig """
backend_name = forms.ChoiceField(choices=((name, name) for name in _PSA_OAUTH2_BACKENDS))
class OAuth2ProviderConfigAdmin(KeyedConfigurationModelAdmin):
""" Django Admin class for OAuth2ProviderConfig """
form = OAuth2ProviderConfigForm
def get_list_display(self, request):
""" Don't show every single field in the admin change list """
return (
'name', 'enabled', 'backend_name', 'secondary', 'skip_registration_form',
'skip_email_verification', 'change_date', 'changed_by', 'edit_link',
)
admin.site.register(OAuth2ProviderConfig, OAuth2ProviderConfigAdmin)
class SAMLProviderConfigForm(forms.ModelForm):
""" Django Admin form class for SAMLProviderConfig """
backend_name = forms.ChoiceField(choices=((name, name) for name in _PSA_SAML_BACKENDS))
class SAMLProviderConfigAdmin(KeyedConfigurationModelAdmin):
""" Django Admin class for SAMLProviderConfig """
form = SAMLProviderConfigForm
def get_list_display(self, request):
""" Don't show every single field in the admin change list """
return (
'name', 'enabled', 'backend_name', 'entity_id', 'metadata_source',
'has_data', 'icon_class', 'icon_image', 'change_date',
'changed_by', 'edit_link'
)
def has_data(self, inst):
""" Do we have cached metadata for this SAML provider? """
if not inst.is_active:
return None # N/A
data = SAMLProviderData.current(inst.entity_id)
return bool(data and data.is_valid())
has_data.short_description = u'Metadata Ready'
has_data.boolean = True
def save_model(self, request, obj, form, change):
"""
Post save: Queue an asynchronous metadata fetch to update SAMLProviderData.
We only want to do this for manual edits done using the admin interface.
Note: This only works if the celery worker and the app worker are using the
same 'configuration' cache.
"""
super(SAMLProviderConfigAdmin, self).save_model(request, obj, form, change)
fetch_saml_metadata.apply_async((), countdown=2)
admin.site.register(SAMLProviderConfig, SAMLProviderConfigAdmin)
class SAMLConfigurationAdmin(ConfigurationModelAdmin):
""" Django Admin class for SAMLConfiguration """
def get_list_display(self, request):
""" Shorten the public/private keys in the change view """
return (
'change_date', 'changed_by', 'enabled', 'entity_id',
'org_info_str', 'key_summary',
)
def key_summary(self, inst):
""" Short summary of the key pairs configured """
public_key = inst.get_setting('SP_PUBLIC_CERT')
private_key = inst.get_setting('SP_PRIVATE_KEY')
if not public_key or not private_key:
return u'<em>Key pair incomplete/missing</em>'
pub1, pub2 = public_key[0:10], public_key[-10:]
priv1, priv2 = private_key[0:10], private_key[-10:]
return u'Public: {}…{}<br>Private: {}…{}'.format(pub1, pub2, priv1, priv2)
key_summary.allow_tags = True
admin.site.register(SAMLConfiguration, SAMLConfigurationAdmin)
class SAMLProviderDataAdmin(admin.ModelAdmin):
""" Django Admin class for SAMLProviderData (Read Only) """
list_display = ('entity_id', 'is_valid', 'fetched_at', 'expires_at', 'sso_url')
readonly_fields = ('is_valid', )
def get_readonly_fields(self, request, obj=None):
if obj: # editing an existing object
return self.model._meta.get_all_field_names() # pylint: disable=protected-access
return self.readonly_fields
admin.site.register(SAMLProviderData, SAMLProviderDataAdmin)
class LTIProviderConfigAdmin(KeyedConfigurationModelAdmin):
""" Django Admin class for LTIProviderConfig """
exclude = (
'icon_class',
'icon_image',
'secondary',
)
def get_list_display(self, request):
""" Don't show every single field in the admin change list """
return (
'name',
'enabled',
'lti_consumer_key',
'lti_max_timestamp_age',
'change_date',
'changed_by',
'edit_link',
)
admin.site.register(LTIProviderConfig, LTIProviderConfigAdmin)
class ApiPermissionsAdminForm(forms.ModelForm):
""" Django admin form for ApiPermissions model """
class Meta(object):
model = ProviderApiPermissions
fields = ['client', 'provider_id']
provider_id = forms.ChoiceField(choices=[], required=True)
def __init__(self, *args, **kwargs):
super(ApiPermissionsAdminForm, self).__init__(*args, **kwargs)
self.fields['provider_id'].choices = (
(provider.provider_id, "{} ({})".format(provider.name, provider.provider_id))
for provider in Registry.enabled()
)
class ApiPermissionsAdmin(admin.ModelAdmin):
""" Django Admin class for ApiPermissions """
list_display = ('client', 'provider_id')
form = ApiPermissionsAdminForm
admin.site.register(ProviderApiPermissions, ApiPermissionsAdmin)
| agpl-3.0 | -3,763,696,914,612,866,600 | -7,701,499,428,500,476,000 | 34.219512 | 93 | 0.661011 | false |
milankl/swm | calc/misc/c_diss_plot.py | 1 | 3966 | from __future__ import print_function
path = '/home/mkloewer/python/swm/'
import os; os.chdir(path) # change working directory
import numpy as np
from scipy import sparse
import time as tictoc
from netCDF4 import Dataset
import glob
import matplotlib.pyplot as plt
# OPTIONS
runfolder = [2,3]
## read data
for r,i in zip(runfolder,range(len(runfolder))):
runpath = path+'data/run%04i' % r
if i == 0:
u = np.load(runpath+'/u_sub.npy')
v = np.load(runpath+'/v_sub.npy')
h = np.load(runpath+'/h_sub.npy')
time = np.load(runpath+'/t_sub.npy')
print('run %i read.' % r)
else:
u = np.concatenate((u,np.load(runpath+'/u_sub.npy')))
v = np.concatenate((v,np.load(runpath+'/v_sub.npy')))
h = np.concatenate((h,np.load(runpath+'/h_sub.npy')))
time = np.hstack((time,np.load(runpath+'/t_sub.npy')))
print('run %i read.' % r)
t = time / 3600. / 24. # in days
## read param
global param
param = np.load(runpath+'/param.npy').all()
param['dat_type'] = np.float32
# import functions
exec(open(path+'swm_param.py').read())
exec(open(path+'swm_operators.py').read())
exec(open(path+'swm_output.py').read())
param['output'] = 0
set_grad_mat()
set_interp_mat()
set_lapl_mat()
set_coriolis()
tlen = len(time)
## create ouputfolder
try:
os.mkdir(runpath+'/analysis')
except:
pass
## reshape u,v
u = u.reshape((tlen,param['Nu'])).T
v = v.reshape((tlen,param['Nv'])).T
h = h.reshape((tlen,param['NT'])).T
print('Reshape done.')
##
dudx = Gux.dot(u)
dudy = Guy.dot(u)
dvdx = Gvx.dot(v)
dvdy = Gvy.dot(v)
n = 2
D = np.sqrt((dudx - dvdy)**2 + IqT.dot((dudy + dvdx)**2))
Ro = (D.T/f_T)
Rom = Ro.mean(axis=0)
c = (1/(1+Ro)**n).mean(axis=0)
# REYNOLDS, ROSSBY, EKMAN NUMBER MEAN
u_T = IuT.dot(u)
v_T = IvT.dot(v)
print('u,v interpolation done.')
#advective term
adv_u = u_T*Gux.dot(u) + v_T*IqT.dot(Guy.dot(u))
adv_v = u_T*IqT.dot(Gvx.dot(v)) + v_T*Gvy.dot(v)
del u_T,v_T
adv_term = np.sqrt(adv_u**2 + adv_v**2)
del adv_u, adv_v
print('Advection term done.')
#coriolis term
cor_term = (f_T*np.sqrt(IuT.dot(u**2) + IvT.dot(v**2)).T).T
print('Coriolis term done.')
Ro2 = adv_term / cor_term
c2 = (1/(1+Ro2)**n).mean(axis=1)
Ro2m = Ro2.mean(axis=1)
##
levs1 = np.linspace(0,.2,21)
levs2 = np.linspace(0.5,1,21)
fig,axs = plt.subplots(2,3,sharex=True,sharey=True,figsize=(9,5.5))
plt.tight_layout(rect=[-.02,-.03,1.12,.97],w_pad=0.1)
axs[0,0].contourf(param['x_T'],param['y_T'],h2mat(Ro2m),levs1)
axs[0,1].contourf(param['x_T'],param['y_T'],h2mat(Rom),levs1,extend='max')
m1 = axs[0,2].contourf(param['x_T'],param['y_T'],h2mat(Ro[-1,:]),levs1,extend='max')
plt.colorbar(m1,ax=(axs[0,0],axs[0,1],axs[0,2]),ticks=np.arange(0,.22,.04))
axs[1,0].contourf(param['x_T'],param['y_T'],h2mat(c2),levs2)
m21 = axs[1,0].contour(param['x_T'],param['y_T'],h2mat(c2),[0.8],linewidths=0.7)
axs[1,1].contourf(param['x_T'],param['y_T'],h2mat(c),levs2)
m2 = axs[1,2].contourf(param['x_T'],param['y_T'],h2mat(1/(1+Ro[-1,:])**n),levs2,extend='min')
axs[1,2].contour(param['x_T'],param['y_T'],h2mat(1/(1+Ro[-1,:])**n),[0.8],linewidths=0.7)
m22 = axs[1,1].contour(param['x_T'],param['y_T'],h2mat(c),[0.8],linewidths=0.7)
plt.colorbar(m2,ax=(axs[1,0],axs[1,1],axs[1,2]),ticks=np.arange(0.5,1.05,.05))
plt.clabel(m22, inline=1, fontsize=5,fmt='%.1f')
plt.clabel(m21, inline=1, fontsize=5,fmt='%.1f')
axs[0,0].set_xticks([])
axs[0,0].set_yticks([])
axs[0,0].set_title(r'$\overline{R_o} = \overline{\frac{|(\mathbf{u} \cdot \nabla)\mathbf{u}|}{|f\mathbf{u}|}}$')
axs[0,1].set_title(r'$\overline{R_o^*} = \overline{\frac{|D|}{f}}$')
axs[0,2].set_title(r'snapshot: $R_o^*$')
axs[1,0].set_title(r'$(1+\overline{R_o})^{-2}$')
axs[1,1].set_title(r'$(1+\overline{R_o}^*)^{-2}$')
axs[1,2].set_title(r'$(1+R_o^*)^{-2}$')
axs[0,0].set_ylabel('y')
axs[1,0].set_ylabel('y')
axs[1,0].set_xlabel('x')
axs[1,1].set_xlabel('x')
plt.savefig(path+'compare/Ro_scaling.png',dpi=150)
plt.close(fig)
#plt.show()
| gpl-3.0 | -4,024,873,602,370,258,400 | -4,544,702,483,397,165,000 | 27.73913 | 112 | 0.61296 | false |
ArthurGarnier/SickRage | lib/sqlalchemy/dialects/mysql/types.py | 8 | 25137 | # mysql/types.py
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import datetime
from ... import exc, util
from ... import types as sqltypes
class _NumericType(object):
"""Base for MySQL numeric types.
This is the base both for NUMERIC as well as INTEGER, hence
it's a mixin.
"""
def __init__(self, unsigned=False, zerofill=False, **kw):
self.unsigned = unsigned
self.zerofill = zerofill
super(_NumericType, self).__init__(**kw)
def __repr__(self):
return util.generic_repr(self,
to_inspect=[_NumericType, sqltypes.Numeric])
class _FloatType(_NumericType, sqltypes.Float):
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
if isinstance(self, (REAL, DOUBLE)) and \
(
(precision is None and scale is not None) or
(precision is not None and scale is None)
):
raise exc.ArgumentError(
"You must specify both precision and scale or omit "
"both altogether.")
super(_FloatType, self).__init__(
precision=precision, asdecimal=asdecimal, **kw)
self.scale = scale
def __repr__(self):
return util.generic_repr(self, to_inspect=[_FloatType,
_NumericType,
sqltypes.Float])
class _IntegerType(_NumericType, sqltypes.Integer):
def __init__(self, display_width=None, **kw):
self.display_width = display_width
super(_IntegerType, self).__init__(**kw)
def __repr__(self):
return util.generic_repr(self, to_inspect=[_IntegerType,
_NumericType,
sqltypes.Integer])
class _StringType(sqltypes.String):
"""Base for MySQL string types."""
def __init__(self, charset=None, collation=None,
ascii=False, binary=False, unicode=False,
national=False, **kw):
self.charset = charset
# allow collate= or collation=
kw.setdefault('collation', kw.pop('collate', collation))
self.ascii = ascii
self.unicode = unicode
self.binary = binary
self.national = national
super(_StringType, self).__init__(**kw)
def __repr__(self):
return util.generic_repr(self,
to_inspect=[_StringType, sqltypes.String])
class _MatchType(sqltypes.Float, sqltypes.MatchType):
def __init__(self, **kw):
# TODO: float arguments?
sqltypes.Float.__init__(self)
sqltypes.MatchType.__init__(self)
class NUMERIC(_NumericType, sqltypes.NUMERIC):
"""MySQL NUMERIC type."""
__visit_name__ = 'NUMERIC'
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
"""Construct a NUMERIC.
:param precision: Total digits in this number. If scale and precision
are both None, values are stored to limits allowed by the server.
:param scale: The number of digits after the decimal point.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super(NUMERIC, self).__init__(precision=precision,
scale=scale, asdecimal=asdecimal, **kw)
class DECIMAL(_NumericType, sqltypes.DECIMAL):
"""MySQL DECIMAL type."""
__visit_name__ = 'DECIMAL'
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
"""Construct a DECIMAL.
:param precision: Total digits in this number. If scale and precision
are both None, values are stored to limits allowed by the server.
:param scale: The number of digits after the decimal point.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super(DECIMAL, self).__init__(precision=precision, scale=scale,
asdecimal=asdecimal, **kw)
class DOUBLE(_FloatType):
"""MySQL DOUBLE type."""
__visit_name__ = 'DOUBLE'
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
"""Construct a DOUBLE.
.. note::
The :class:`.DOUBLE` type by default converts from float
to Decimal, using a truncation that defaults to 10 digits.
Specify either ``scale=n`` or ``decimal_return_scale=n`` in order
to change this scale, or ``asdecimal=False`` to return values
directly as Python floating points.
:param precision: Total digits in this number. If scale and precision
are both None, values are stored to limits allowed by the server.
:param scale: The number of digits after the decimal point.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super(DOUBLE, self).__init__(precision=precision, scale=scale,
asdecimal=asdecimal, **kw)
class REAL(_FloatType, sqltypes.REAL):
"""MySQL REAL type."""
__visit_name__ = 'REAL'
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
"""Construct a REAL.
.. note::
The :class:`.REAL` type by default converts from float
to Decimal, using a truncation that defaults to 10 digits.
Specify either ``scale=n`` or ``decimal_return_scale=n`` in order
to change this scale, or ``asdecimal=False`` to return values
directly as Python floating points.
:param precision: Total digits in this number. If scale and precision
are both None, values are stored to limits allowed by the server.
:param scale: The number of digits after the decimal point.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super(REAL, self).__init__(precision=precision, scale=scale,
asdecimal=asdecimal, **kw)
class FLOAT(_FloatType, sqltypes.FLOAT):
"""MySQL FLOAT type."""
__visit_name__ = 'FLOAT'
def __init__(self, precision=None, scale=None, asdecimal=False, **kw):
"""Construct a FLOAT.
:param precision: Total digits in this number. If scale and precision
are both None, values are stored to limits allowed by the server.
:param scale: The number of digits after the decimal point.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super(FLOAT, self).__init__(precision=precision, scale=scale,
asdecimal=asdecimal, **kw)
def bind_processor(self, dialect):
return None
class INTEGER(_IntegerType, sqltypes.INTEGER):
"""MySQL INTEGER type."""
__visit_name__ = 'INTEGER'
def __init__(self, display_width=None, **kw):
"""Construct an INTEGER.
:param display_width: Optional, maximum display width for this number.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super(INTEGER, self).__init__(display_width=display_width, **kw)
class BIGINT(_IntegerType, sqltypes.BIGINT):
"""MySQL BIGINTEGER type."""
__visit_name__ = 'BIGINT'
def __init__(self, display_width=None, **kw):
"""Construct a BIGINTEGER.
:param display_width: Optional, maximum display width for this number.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super(BIGINT, self).__init__(display_width=display_width, **kw)
class MEDIUMINT(_IntegerType):
"""MySQL MEDIUMINTEGER type."""
__visit_name__ = 'MEDIUMINT'
def __init__(self, display_width=None, **kw):
"""Construct a MEDIUMINTEGER
:param display_width: Optional, maximum display width for this number.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super(MEDIUMINT, self).__init__(display_width=display_width, **kw)
class TINYINT(_IntegerType):
"""MySQL TINYINT type."""
__visit_name__ = 'TINYINT'
def __init__(self, display_width=None, **kw):
"""Construct a TINYINT.
:param display_width: Optional, maximum display width for this number.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super(TINYINT, self).__init__(display_width=display_width, **kw)
class SMALLINT(_IntegerType, sqltypes.SMALLINT):
"""MySQL SMALLINTEGER type."""
__visit_name__ = 'SMALLINT'
def __init__(self, display_width=None, **kw):
"""Construct a SMALLINTEGER.
:param display_width: Optional, maximum display width for this number.
:param unsigned: a boolean, optional.
:param zerofill: Optional. If true, values will be stored as strings
left-padded with zeros. Note that this does not effect the values
returned by the underlying database API, which continue to be
numeric.
"""
super(SMALLINT, self).__init__(display_width=display_width, **kw)
class BIT(sqltypes.TypeEngine):
"""MySQL BIT type.
This type is for MySQL 5.0.3 or greater for MyISAM, and 5.0.5 or greater
for MyISAM, MEMORY, InnoDB and BDB. For older versions, use a
MSTinyInteger() type.
"""
__visit_name__ = 'BIT'
def __init__(self, length=None):
"""Construct a BIT.
:param length: Optional, number of bits.
"""
self.length = length
def result_processor(self, dialect, coltype):
"""Convert a MySQL's 64 bit, variable length binary string to a long.
TODO: this is MySQL-db, pyodbc specific. OurSQL and mysqlconnector
already do this, so this logic should be moved to those dialects.
"""
def process(value):
if value is not None:
v = 0
for i in value:
if not isinstance(i, int):
i = ord(i) # convert byte to int on Python 2
v = v << 8 | i
return v
return value
return process
class TIME(sqltypes.TIME):
"""MySQL TIME type. """
__visit_name__ = 'TIME'
def __init__(self, timezone=False, fsp=None):
"""Construct a MySQL TIME type.
:param timezone: not used by the MySQL dialect.
:param fsp: fractional seconds precision value.
MySQL 5.6 supports storage of fractional seconds;
this parameter will be used when emitting DDL
for the TIME type.
.. note::
DBAPI driver support for fractional seconds may
be limited; current support includes
MySQL Connector/Python.
.. versionadded:: 0.8 The MySQL-specific TIME
type as well as fractional seconds support.
"""
super(TIME, self).__init__(timezone=timezone)
self.fsp = fsp
def result_processor(self, dialect, coltype):
time = datetime.time
def process(value):
# convert from a timedelta value
if value is not None:
microseconds = value.microseconds
seconds = value.seconds
minutes = seconds // 60
return time(minutes // 60,
minutes % 60,
seconds - minutes * 60,
microsecond=microseconds)
else:
return None
return process
class TIMESTAMP(sqltypes.TIMESTAMP):
"""MySQL TIMESTAMP type.
"""
__visit_name__ = 'TIMESTAMP'
def __init__(self, timezone=False, fsp=None):
"""Construct a MySQL TIMESTAMP type.
:param timezone: not used by the MySQL dialect.
:param fsp: fractional seconds precision value.
MySQL 5.6.4 supports storage of fractional seconds;
this parameter will be used when emitting DDL
for the TIMESTAMP type.
.. note::
DBAPI driver support for fractional seconds may
be limited; current support includes
MySQL Connector/Python.
.. versionadded:: 0.8.5 Added MySQL-specific :class:`.mysql.TIMESTAMP`
with fractional seconds support.
"""
super(TIMESTAMP, self).__init__(timezone=timezone)
self.fsp = fsp
class DATETIME(sqltypes.DATETIME):
"""MySQL DATETIME type.
"""
__visit_name__ = 'DATETIME'
def __init__(self, timezone=False, fsp=None):
"""Construct a MySQL DATETIME type.
:param timezone: not used by the MySQL dialect.
:param fsp: fractional seconds precision value.
MySQL 5.6.4 supports storage of fractional seconds;
this parameter will be used when emitting DDL
for the DATETIME type.
.. note::
DBAPI driver support for fractional seconds may
be limited; current support includes
MySQL Connector/Python.
.. versionadded:: 0.8.5 Added MySQL-specific :class:`.mysql.DATETIME`
with fractional seconds support.
"""
super(DATETIME, self).__init__(timezone=timezone)
self.fsp = fsp
class YEAR(sqltypes.TypeEngine):
"""MySQL YEAR type, for single byte storage of years 1901-2155."""
__visit_name__ = 'YEAR'
def __init__(self, display_width=None):
self.display_width = display_width
class TEXT(_StringType, sqltypes.TEXT):
"""MySQL TEXT type, for text up to 2^16 characters."""
__visit_name__ = 'TEXT'
def __init__(self, length=None, **kw):
"""Construct a TEXT.
:param length: Optional, if provided the server may optimize storage
by substituting the smallest TEXT type sufficient to store
``length`` characters.
:param charset: Optional, a column-level character set for this string
value. Takes precedence to 'ascii' or 'unicode' short-hand.
:param collation: Optional, a column-level collation for this string
value. Takes precedence to 'binary' short-hand.
:param ascii: Defaults to False: short-hand for the ``latin1``
character set, generates ASCII in schema.
:param unicode: Defaults to False: short-hand for the ``ucs2``
character set, generates UNICODE in schema.
:param national: Optional. If true, use the server's configured
national character set.
:param binary: Defaults to False: short-hand, pick the binary
collation type that matches the column's character set. Generates
BINARY in schema. This does not affect the type of data stored,
only the collation of character data.
"""
super(TEXT, self).__init__(length=length, **kw)
class TINYTEXT(_StringType):
"""MySQL TINYTEXT type, for text up to 2^8 characters."""
__visit_name__ = 'TINYTEXT'
def __init__(self, **kwargs):
"""Construct a TINYTEXT.
:param charset: Optional, a column-level character set for this string
value. Takes precedence to 'ascii' or 'unicode' short-hand.
:param collation: Optional, a column-level collation for this string
value. Takes precedence to 'binary' short-hand.
:param ascii: Defaults to False: short-hand for the ``latin1``
character set, generates ASCII in schema.
:param unicode: Defaults to False: short-hand for the ``ucs2``
character set, generates UNICODE in schema.
:param national: Optional. If true, use the server's configured
national character set.
:param binary: Defaults to False: short-hand, pick the binary
collation type that matches the column's character set. Generates
BINARY in schema. This does not affect the type of data stored,
only the collation of character data.
"""
super(TINYTEXT, self).__init__(**kwargs)
class MEDIUMTEXT(_StringType):
"""MySQL MEDIUMTEXT type, for text up to 2^24 characters."""
__visit_name__ = 'MEDIUMTEXT'
def __init__(self, **kwargs):
"""Construct a MEDIUMTEXT.
:param charset: Optional, a column-level character set for this string
value. Takes precedence to 'ascii' or 'unicode' short-hand.
:param collation: Optional, a column-level collation for this string
value. Takes precedence to 'binary' short-hand.
:param ascii: Defaults to False: short-hand for the ``latin1``
character set, generates ASCII in schema.
:param unicode: Defaults to False: short-hand for the ``ucs2``
character set, generates UNICODE in schema.
:param national: Optional. If true, use the server's configured
national character set.
:param binary: Defaults to False: short-hand, pick the binary
collation type that matches the column's character set. Generates
BINARY in schema. This does not affect the type of data stored,
only the collation of character data.
"""
super(MEDIUMTEXT, self).__init__(**kwargs)
class LONGTEXT(_StringType):
"""MySQL LONGTEXT type, for text up to 2^32 characters."""
__visit_name__ = 'LONGTEXT'
def __init__(self, **kwargs):
"""Construct a LONGTEXT.
:param charset: Optional, a column-level character set for this string
value. Takes precedence to 'ascii' or 'unicode' short-hand.
:param collation: Optional, a column-level collation for this string
value. Takes precedence to 'binary' short-hand.
:param ascii: Defaults to False: short-hand for the ``latin1``
character set, generates ASCII in schema.
:param unicode: Defaults to False: short-hand for the ``ucs2``
character set, generates UNICODE in schema.
:param national: Optional. If true, use the server's configured
national character set.
:param binary: Defaults to False: short-hand, pick the binary
collation type that matches the column's character set. Generates
BINARY in schema. This does not affect the type of data stored,
only the collation of character data.
"""
super(LONGTEXT, self).__init__(**kwargs)
class VARCHAR(_StringType, sqltypes.VARCHAR):
"""MySQL VARCHAR type, for variable-length character data."""
__visit_name__ = 'VARCHAR'
def __init__(self, length=None, **kwargs):
"""Construct a VARCHAR.
:param charset: Optional, a column-level character set for this string
value. Takes precedence to 'ascii' or 'unicode' short-hand.
:param collation: Optional, a column-level collation for this string
value. Takes precedence to 'binary' short-hand.
:param ascii: Defaults to False: short-hand for the ``latin1``
character set, generates ASCII in schema.
:param unicode: Defaults to False: short-hand for the ``ucs2``
character set, generates UNICODE in schema.
:param national: Optional. If true, use the server's configured
national character set.
:param binary: Defaults to False: short-hand, pick the binary
collation type that matches the column's character set. Generates
BINARY in schema. This does not affect the type of data stored,
only the collation of character data.
"""
super(VARCHAR, self).__init__(length=length, **kwargs)
class CHAR(_StringType, sqltypes.CHAR):
"""MySQL CHAR type, for fixed-length character data."""
__visit_name__ = 'CHAR'
def __init__(self, length=None, **kwargs):
"""Construct a CHAR.
:param length: Maximum data length, in characters.
:param binary: Optional, use the default binary collation for the
national character set. This does not affect the type of data
stored, use a BINARY type for binary data.
:param collation: Optional, request a particular collation. Must be
compatible with the national character set.
"""
super(CHAR, self).__init__(length=length, **kwargs)
@classmethod
def _adapt_string_for_cast(self, type_):
# copy the given string type into a CHAR
# for the purposes of rendering a CAST expression
type_ = sqltypes.to_instance(type_)
if isinstance(type_, sqltypes.CHAR):
return type_
elif isinstance(type_, _StringType):
return CHAR(
length=type_.length,
charset=type_.charset,
collation=type_.collation,
ascii=type_.ascii,
binary=type_.binary,
unicode=type_.unicode,
national=False # not supported in CAST
)
else:
return CHAR(length=type_.length)
class NVARCHAR(_StringType, sqltypes.NVARCHAR):
"""MySQL NVARCHAR type.
For variable-length character data in the server's configured national
character set.
"""
__visit_name__ = 'NVARCHAR'
def __init__(self, length=None, **kwargs):
"""Construct an NVARCHAR.
:param length: Maximum data length, in characters.
:param binary: Optional, use the default binary collation for the
national character set. This does not affect the type of data
stored, use a BINARY type for binary data.
:param collation: Optional, request a particular collation. Must be
compatible with the national character set.
"""
kwargs['national'] = True
super(NVARCHAR, self).__init__(length=length, **kwargs)
class NCHAR(_StringType, sqltypes.NCHAR):
"""MySQL NCHAR type.
For fixed-length character data in the server's configured national
character set.
"""
__visit_name__ = 'NCHAR'
def __init__(self, length=None, **kwargs):
"""Construct an NCHAR.
:param length: Maximum data length, in characters.
:param binary: Optional, use the default binary collation for the
national character set. This does not affect the type of data
stored, use a BINARY type for binary data.
:param collation: Optional, request a particular collation. Must be
compatible with the national character set.
"""
kwargs['national'] = True
super(NCHAR, self).__init__(length=length, **kwargs)
class TINYBLOB(sqltypes._Binary):
"""MySQL TINYBLOB type, for binary data up to 2^8 bytes."""
__visit_name__ = 'TINYBLOB'
class MEDIUMBLOB(sqltypes._Binary):
"""MySQL MEDIUMBLOB type, for binary data up to 2^24 bytes."""
__visit_name__ = 'MEDIUMBLOB'
class LONGBLOB(sqltypes._Binary):
"""MySQL LONGBLOB type, for binary data up to 2^32 bytes."""
__visit_name__ = 'LONGBLOB'
| gpl-3.0 | -1,146,118,673,099,673,300 | 512,058,449,524,169,700 | 31.815927 | 78 | 0.612842 | false |
geoffreyporto/radartec | node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py | 2767 | 2174 | # Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Applies a fix to CR LF TAB handling in xml.dom.
Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293
Working around this: http://bugs.python.org/issue5752
TODO(bradnelson): Consider dropping this when we drop XP support.
"""
import xml.dom.minidom
def _Replacement_write_data(writer, data, is_attrib=False):
"""Writes datachars to writer."""
data = data.replace("&", "&").replace("<", "<")
data = data.replace("\"", """).replace(">", ">")
if is_attrib:
data = data.replace(
"\r", "
").replace(
"\n", "
").replace(
"\t", "	")
writer.write(data)
def _Replacement_writexml(self, writer, indent="", addindent="", newl=""):
# indent = current indentation
# addindent = indentation to add to higher levels
# newl = newline string
writer.write(indent+"<" + self.tagName)
attrs = self._get_attributes()
a_names = attrs.keys()
a_names.sort()
for a_name in a_names:
writer.write(" %s=\"" % a_name)
_Replacement_write_data(writer, attrs[a_name].value, is_attrib=True)
writer.write("\"")
if self.childNodes:
writer.write(">%s" % newl)
for node in self.childNodes:
node.writexml(writer, indent + addindent, addindent, newl)
writer.write("%s</%s>%s" % (indent, self.tagName, newl))
else:
writer.write("/>%s" % newl)
class XmlFix(object):
"""Object to manage temporary patching of xml.dom.minidom."""
def __init__(self):
# Preserve current xml.dom.minidom functions.
self.write_data = xml.dom.minidom._write_data
self.writexml = xml.dom.minidom.Element.writexml
# Inject replacement versions of a function and a method.
xml.dom.minidom._write_data = _Replacement_write_data
xml.dom.minidom.Element.writexml = _Replacement_writexml
def Cleanup(self):
if self.write_data:
xml.dom.minidom._write_data = self.write_data
xml.dom.minidom.Element.writexml = self.writexml
self.write_data = None
def __del__(self):
self.Cleanup()
| agpl-3.0 | -7,134,656,447,709,384,000 | 3,869,739,391,297,213,000 | 30.507246 | 74 | 0.658234 | false |
apache/thrift | lib/py/src/transport/TSocket.py | 7 | 9123 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import errno
import logging
import os
import socket
import sys
from .TTransport import TTransportBase, TTransportException, TServerTransportBase
logger = logging.getLogger(__name__)
class TSocketBase(TTransportBase):
def _resolveAddr(self):
if self._unix_socket is not None:
return [(socket.AF_UNIX, socket.SOCK_STREAM, None, None,
self._unix_socket)]
else:
return socket.getaddrinfo(self.host,
self.port,
self._socket_family,
socket.SOCK_STREAM,
0,
socket.AI_PASSIVE)
def close(self):
if self.handle:
self.handle.close()
self.handle = None
class TSocket(TSocketBase):
"""Socket implementation of TTransport base."""
def __init__(self, host='localhost', port=9090, unix_socket=None,
socket_family=socket.AF_UNSPEC,
socket_keepalive=False):
"""Initialize a TSocket
@param host(str) The host to connect to.
@param port(int) The (TCP) port to connect to.
@param unix_socket(str) The filename of a unix socket to connect to.
(host and port will be ignored.)
@param socket_family(int) The socket family to use with this socket.
@param socket_keepalive(bool) enable TCP keepalive, default off.
"""
self.host = host
self.port = port
self.handle = None
self._unix_socket = unix_socket
self._timeout = None
self._socket_family = socket_family
self._socket_keepalive = socket_keepalive
def setHandle(self, h):
self.handle = h
def isOpen(self):
if self.handle is None:
return False
# this lets us cheaply see if the other end of the socket is still
# connected. if disconnected, we'll get EOF back (expressed as zero
# bytes of data) otherwise we'll get one byte or an error indicating
# we'd have to block for data.
#
# note that we're not doing this with socket.MSG_DONTWAIT because 1)
# it's linux-specific and 2) gevent-patched sockets hide EAGAIN from us
# when timeout is non-zero.
original_timeout = self.handle.gettimeout()
try:
self.handle.settimeout(0)
try:
peeked_bytes = self.handle.recv(1, socket.MSG_PEEK)
except (socket.error, OSError) as exc: # on modern python this is just BlockingIOError
if exc.errno in (errno.EWOULDBLOCK, errno.EAGAIN):
return True
return False
finally:
self.handle.settimeout(original_timeout)
# the length will be zero if we got EOF (indicating connection closed)
return len(peeked_bytes) == 1
def setTimeout(self, ms):
if ms is None:
self._timeout = None
else:
self._timeout = ms / 1000.0
if self.handle is not None:
self.handle.settimeout(self._timeout)
def _do_open(self, family, socktype):
return socket.socket(family, socktype)
@property
def _address(self):
return self._unix_socket if self._unix_socket else '%s:%d' % (self.host, self.port)
def open(self):
if self.handle:
raise TTransportException(type=TTransportException.ALREADY_OPEN, message="already open")
try:
addrs = self._resolveAddr()
except socket.gaierror as gai:
msg = 'failed to resolve sockaddr for ' + str(self._address)
logger.exception(msg)
raise TTransportException(type=TTransportException.NOT_OPEN, message=msg, inner=gai)
for family, socktype, _, _, sockaddr in addrs:
handle = self._do_open(family, socktype)
# TCP_KEEPALIVE
if self._socket_keepalive:
handle.setsockopt(socket.IPPROTO_TCP, socket.SO_KEEPALIVE, 1)
handle.settimeout(self._timeout)
try:
handle.connect(sockaddr)
self.handle = handle
return
except socket.error:
handle.close()
logger.info('Could not connect to %s', sockaddr, exc_info=True)
msg = 'Could not connect to any of %s' % list(map(lambda a: a[4],
addrs))
logger.error(msg)
raise TTransportException(type=TTransportException.NOT_OPEN, message=msg)
def read(self, sz):
try:
buff = self.handle.recv(sz)
except socket.error as e:
if (e.args[0] == errno.ECONNRESET and
(sys.platform == 'darwin' or sys.platform.startswith('freebsd'))):
# freebsd and Mach don't follow POSIX semantic of recv
# and fail with ECONNRESET if peer performed shutdown.
# See corresponding comment and code in TSocket::read()
# in lib/cpp/src/transport/TSocket.cpp.
self.close()
# Trigger the check to raise the END_OF_FILE exception below.
buff = ''
elif e.args[0] == errno.ETIMEDOUT:
raise TTransportException(type=TTransportException.TIMED_OUT, message="read timeout", inner=e)
else:
raise TTransportException(message="unexpected exception", inner=e)
if len(buff) == 0:
raise TTransportException(type=TTransportException.END_OF_FILE,
message='TSocket read 0 bytes')
return buff
def write(self, buff):
if not self.handle:
raise TTransportException(type=TTransportException.NOT_OPEN,
message='Transport not open')
sent = 0
have = len(buff)
while sent < have:
try:
plus = self.handle.send(buff)
if plus == 0:
raise TTransportException(type=TTransportException.END_OF_FILE,
message='TSocket sent 0 bytes')
sent += plus
buff = buff[plus:]
except socket.error as e:
raise TTransportException(message="unexpected exception", inner=e)
def flush(self):
pass
class TServerSocket(TSocketBase, TServerTransportBase):
"""Socket implementation of TServerTransport base."""
def __init__(self, host=None, port=9090, unix_socket=None, socket_family=socket.AF_UNSPEC):
self.host = host
self.port = port
self._unix_socket = unix_socket
self._socket_family = socket_family
self.handle = None
self._backlog = 128
def setBacklog(self, backlog=None):
if not self.handle:
self._backlog = backlog
else:
# We cann't update backlog when it is already listening, since the
# handle has been created.
logger.warn('You have to set backlog before listen.')
def listen(self):
res0 = self._resolveAddr()
socket_family = self._socket_family == socket.AF_UNSPEC and socket.AF_INET6 or self._socket_family
for res in res0:
if res[0] is socket_family or res is res0[-1]:
break
# We need remove the old unix socket if the file exists and
# nobody is listening on it.
if self._unix_socket:
tmp = socket.socket(res[0], res[1])
try:
tmp.connect(res[4])
except socket.error as err:
eno, message = err.args
if eno == errno.ECONNREFUSED:
os.unlink(res[4])
self.handle = socket.socket(res[0], res[1])
self.handle.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if hasattr(self.handle, 'settimeout'):
self.handle.settimeout(None)
self.handle.bind(res[4])
self.handle.listen(self._backlog)
def accept(self):
client, addr = self.handle.accept()
result = TSocket()
result.setHandle(client)
return result
| apache-2.0 | 7,609,347,374,018,214,000 | -543,199,598,142,298,900 | 37.171548 | 110 | 0.58062 | false |
h-matsuo/memTracker | lib/track.py | 1 | 8958 | #!/usr/bin/python
# coding: UTF-8
"""
Implementation of command: track
"""
__author__ = "Hiroyuki Matsuo <h-matsuo@ist.osaka-u.ac.jp>"
# ===== Configuration ==========================================================
# ----- Disk I/O tracking ------------------------------------------------------
#DEVICE_NAME = "mmcblk0"
# Read from: "/sys/block/<DEVICE_NAME>/queue/physical_block_size"
SECTOR_SIZE = 512 # [Bytes]
# Source: https://www.kernel.org/doc/Documentation/ABI/testing/procfs-diskstats
DISKSTATS_ROW = 24
DISKSTATS_COL_READ = 5
DISKSTATS_COL_WRITE = 9
# ----- Memory usage tracking --------------------------------------------------
# NOTE: 実験の特性上,ここでは
# 「使用メモリ」=「メモリ合計容量」-「メモリ未割り当て容量」
# と定義する.バッファ領域やキャッシュ領域等は考慮しない.
# 参考:http://nopipi.hatenablog.com/entry/2015/09/13/181026
MEMINFO_ROW_TOTAL = 0
MEMINFO_ROW_FREE = 1
# ----- Network communications tracking ----------------------------------------
NET_DEV_ROW_WLAN0 = 2
NET_DEV_ROW_LO = 3
NET_DEV_ROW_ETH0 = 4
NET_DEV_COL_RECV = 1
NET_DEV_COL_SEND = 9
# ===== END Configuration ======================================================
from datetime import datetime
import json
import os.path
import re
import signal
import sys
import time
from lib.utils import Utils
class TrackController:
"""
Control tracking memory usage
"""
def __init__(self):
"""
Constructor
"""
# Initialize output data
self.__tracked_data = []
# Stop flag for tracking
self.__stop_flag = False
# Default values
self.__interval = 1.0
self.__out_file = None
# self.__pid = None
# Default tracking mode
self.__mode_io = True
self.__mode_mem = True
self.__mode_net = True
# Compile regex pattern
self.__regex_pattern = re.compile(r"\s*") # Raw string to avoid the backslash plague
def setTrackingInterval(self, interval):
"""
Set tracking interval
@param interval Tracking interval
"""
self.__interval = interval
def setOutputFilename(self, filename):
"""
Set filename to write output
@param filename Filename to write output
"""
self.__out_file = filename
# def setPid(self, pid):
# """
# Set process ID to track
# @param process ID
# """
# if not os.path.exists("/proc/%d" % pid):
# sys.stderr.write("ERROR: PID %d: No such process.\n" % pid)
# sys.exit(1)
# self.__pid = pid
def setTrackingMode(self, io = False, mem = False, net = False):
"""
Set tracking mode
@param io True if track disk I/O
@param mem True if track memory usage
@param net True if track network communications
"""
self.__mode_io = io
self.__mode_mem = mem
self.__mode_net = net
def start(self):
"""
Start tracking
"""
# Initialize valiables for analyzing "/proc/diskstats"
if self.__mode_io:
total_data = self.__getIOTotalData()
self.__io_read_bytes_begin = total_data["total_read_bytes"]
self.__io_write_bytes_begin = total_data["total_write_bytes"]
# Initialize valiables for analyzing "/proc/meminfo"
if self.__mode_mem:
total_data = self.__getMemTotalData()
self.__mem_used_kilobytes_begin = total_data["used_kilobytes"]
# Initialize valiables for analyzing "/proc/net/dev"
if self.__mode_net:
total_data = self.__getNetTotalData()
self.__net_recv_total_bytes_begin = total_data["total_recv_bytes"]
self.__net_send_total_bytes_begin = total_data["total_send_bytes"]
# Start tracking
self.__track()
def stop(self):
"""
Stop tracking
"""
self.__stop_flag = True
if self.__out_file != None:
fout = open(self.__out_file, "w")
json.dump(self.__tracked_data, fout, indent = 2, separators = (",", ": "))
fout.close()
def __track(self):
"""
Track procfs repeatedly
"""
while not self.__stop_flag:
begin = datetime.today()
tracked_data = self.__getTrackedData()
if self.__out_file != None:
self.__tracked_data.append(tracked_data)
else:
print json.dumps(tracked_data, indent = 2, separators = (",", ": "))
end = datetime.today()
diff = self.__interval - (end - begin).total_seconds()
if diff < 0: diff = 0
time.sleep(diff)
def __getTrackedData(self):
"""
Get data from "/proc"
@return Tracked data
"""
data = {}
now = datetime.today()
if self.__mode_io: data_io = self.__getIOData()
if self.__mode_mem: data_mem = self.__getMemData()
if self.__mode_net: data_net = self.__getNetData()
data["date"] = Utils.formatDatetime(now)
if self.__mode_io: data["io"] = data_io
if self.__mode_mem: data["mem"] = data_mem
if self.__mode_net: data["net"] = data_net
return data
def __getIOData(self):
"""
Get disk I/O data
@return Disk I/O data
"""
total_data = self.__getIOTotalData()
return {
"read_bytes" : total_data["total_read_bytes"] - self.__io_read_bytes_begin,
"write_bytes": total_data["total_write_bytes"] - self.__io_write_bytes_begin
}
def __getMemData(self):
"""
Get memory usage data
@return Memory usage data
"""
total_data = self.__getMemTotalData()
return {
"used_kilobytes": total_data["used_kilobytes"] - self.__mem_used_kilobytes_begin
}
def __getNetData(self):
"""
Get network communications data
@return Network communications data
"""
total_data = self.__getNetTotalData()
return {
"recv_bytes": total_data["total_recv_bytes"] - self.__net_recv_total_bytes_begin,
"send_bytes": total_data["total_send_bytes"] - self.__net_send_total_bytes_begin
}
def __getIOTotalData(self):
"""
Get data from "/proc/diskstats"
@return Analyzed data
"""
fin = open("/proc/diskstats", "r")
diskstats = fin.readlines()
fin.close()
diskstats = self.__regex_pattern.split(diskstats[DISKSTATS_ROW].strip())
return {
"total_read_bytes" : int(diskstats[DISKSTATS_COL_READ]) * SECTOR_SIZE,
"total_write_bytes": int(diskstats[DISKSTATS_COL_WRITE]) * SECTOR_SIZE
}
def __getMemTotalData(self):
"""
Get data from "/proc/meminfo"
@return Analyzed data
"""
fin = open("/proc/meminfo", "r")
meminfo = fin.readlines()
fin.close()
return {
"used_kilobytes": int(meminfo[MEMINFO_ROW_TOTAL][9:-3].strip()) - int(meminfo[MEMINFO_ROW_FREE][8:-3].strip())
}
def __getNetTotalData(self):
"""
Get data from "/proc/net/dev"
@return Analyzed data
"""
fin = open("/proc/net/dev", "r")
net_dev = fin.readlines()
fin.close()
recv_bytes = 0
send_bytes = 0
for row in [NET_DEV_ROW_WLAN0, NET_DEV_ROW_LO, NET_DEV_ROW_ETH0]:
line = self.__regex_pattern.split(net_dev[row].strip())
recv_bytes += int(line[NET_DEV_COL_RECV])
send_bytes += int(line[NET_DEV_COL_SEND])
return {
"total_recv_bytes": recv_bytes,
"total_send_bytes": send_bytes
}
def SIGINTHandler(signum, frame):
"""
Signal SIGINT handler
"""
global controller
controller.stop()
def exec_track(flags):
"""
Execute command: track
@param flags Result of parsing argv
"""
# Instantiate controller
global controller
controller = TrackController()
# Set tracking interval
controller.setTrackingInterval(flags.interval)
# Set output filename
if flags.out_file != None:
controller.setOutputFilename(flags.out_file)
# Set process id to track
# if flags.pid != None:
# controller.setPid(flags.pid)
# Set tracking mode
controller.setTrackingMode(io = flags.mode_io,
mem = flags.mode_mem,
net = flags.mode_net)
# Print message
print "Start tracking..."
print 'Press "Ctrl + c" to quit.'
# Handle SIGINT
signal.signal(signal.SIGINT, SIGINTHandler)
# Start tracking
controller.start()
| mit | -7,526,723,967,100,933,000 | -8,952,595,731,969,580,000 | 28.275748 | 122 | 0.53813 | false |
broferek/ansible | test/lib/ansible_test/_data/sanity/pylint/plugins/deprecated.py | 17 | 3851 | # (c) 2018, Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from distutils.version import LooseVersion
import astroid
from pylint.interfaces import IAstroidChecker
from pylint.checkers import BaseChecker
from pylint.checkers.utils import check_messages
from ansible.release import __version__ as ansible_version_raw
MSGS = {
'E9501': ("Deprecated version (%r) found in call to Display.deprecated "
"or AnsibleModule.deprecate",
"ansible-deprecated-version",
"Used when a call to Display.deprecated specifies a version "
"less than or equal to the current version of Ansible",
{'minversion': (2, 6)}),
'E9502': ("Display.deprecated call without a version",
"ansible-deprecated-no-version",
"Used when a call to Display.deprecated does not specify a "
"version",
{'minversion': (2, 6)}),
'E9503': ("Invalid deprecated version (%r) found in call to "
"Display.deprecated or AnsibleModule.deprecate",
"ansible-invalid-deprecated-version",
"Used when a call to Display.deprecated specifies an invalid "
"version number",
{'minversion': (2, 6)}),
}
ANSIBLE_VERSION = LooseVersion('.'.join(ansible_version_raw.split('.')[:3]))
def _get_expr_name(node):
"""Funciton to get either ``attrname`` or ``name`` from ``node.func.expr``
Created specifically for the case of ``display.deprecated`` or ``self._display.deprecated``
"""
try:
return node.func.expr.attrname
except AttributeError:
# If this fails too, we'll let it raise, the caller should catch it
return node.func.expr.name
class AnsibleDeprecatedChecker(BaseChecker):
"""Checks for Display.deprecated calls to ensure that the ``version``
has not passed or met the time for removal
"""
__implements__ = (IAstroidChecker,)
name = 'deprecated'
msgs = MSGS
@check_messages(*(MSGS.keys()))
def visit_call(self, node):
version = None
try:
if (node.func.attrname == 'deprecated' and 'display' in _get_expr_name(node) or
node.func.attrname == 'deprecate' and 'module' in _get_expr_name(node)):
if node.keywords:
for keyword in node.keywords:
if len(node.keywords) == 1 and keyword.arg is None:
# This is likely a **kwargs splat
return
if keyword.arg == 'version':
if isinstance(keyword.value.value, astroid.Name):
# This is likely a variable
return
version = keyword.value.value
if not version:
try:
version = node.args[1].value
except IndexError:
self.add_message('ansible-deprecated-no-version', node=node)
return
try:
if ANSIBLE_VERSION >= LooseVersion(str(version)):
self.add_message('ansible-deprecated-version', node=node, args=(version,))
except ValueError:
self.add_message('ansible-invalid-deprecated-version', node=node, args=(version,))
except AttributeError:
# Not the type of node we are interested in
pass
def register(linter):
"""required method to auto register this checker """
linter.register_checker(AnsibleDeprecatedChecker(linter))
| gpl-3.0 | 5,381,900,575,060,179,000 | 6,371,946,993,948,545,000 | 38.701031 | 102 | 0.57933 | false |
salamer/django | tests/extra_regress/models.py | 281 | 1401 | from __future__ import unicode_literals
import copy
import datetime
from django.contrib.auth.models import User
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class RevisionableModel(models.Model):
base = models.ForeignKey('self', models.SET_NULL, null=True)
title = models.CharField(blank=True, max_length=255)
when = models.DateTimeField(default=datetime.datetime.now)
def __str__(self):
return "%s (%s, %s)" % (self.title, self.id, self.base.id)
def save(self, *args, **kwargs):
super(RevisionableModel, self).save(*args, **kwargs)
if not self.base:
self.base = self
kwargs.pop('force_insert', None)
kwargs.pop('force_update', None)
super(RevisionableModel, self).save(*args, **kwargs)
def new_revision(self):
new_revision = copy.copy(self)
new_revision.pk = None
return new_revision
class Order(models.Model):
created_by = models.ForeignKey(User, models.CASCADE)
text = models.TextField()
@python_2_unicode_compatible
class TestObject(models.Model):
first = models.CharField(max_length=20)
second = models.CharField(max_length=20)
third = models.CharField(max_length=20)
def __str__(self):
return 'TestObject: %s,%s,%s' % (self.first, self.second, self.third)
| bsd-3-clause | -193,528,202,371,850,600 | -8,467,982,629,464,345,000 | 29.456522 | 77 | 0.668808 | false |
amar266/puppet-rjil | files/tests/ceph_health.py | 31 | 4048 | #!/usr/bin/env python
#
# Copyright (c) 2013 SWITCH http://www.switch.ch
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import os
import subprocess
import sys
__version__ = '1.0.1'
# default ceph values
CEPH_COMMAND = '/usr/bin/ceph'
# nagios exit code
STATUS_OK = 0
STATUS_WARNING = 1
STATUS_ERROR = 2
STATUS_UNKNOWN = 3
def main():
# parse args
parser = argparse.ArgumentParser(description="'ceph health' nagios plugin.")
parser.add_argument('-e','--exe', help='ceph executable [%s]' % CEPH_COMMAND)
parser.add_argument('-c','--conf', help='alternative ceph conf file')
parser.add_argument('-m','--monaddress', help='ceph monitor address[:port]')
parser.add_argument('-i','--id', help='ceph client id')
parser.add_argument('-k','--keyring', help='ceph client keyring file')
parser.add_argument('-d','--detail', help="exec 'ceph health detail'", action='store_true')
parser.add_argument('-V','--version', help='show version and exit', action='store_true')
args = parser.parse_args()
# validate args
ceph_exec = args.exe if args.exe else CEPH_COMMAND
if not os.path.exists(ceph_exec):
print "ERROR: ceph executable '%s' doesn't exist" % ceph_exec
return STATUS_UNKNOWN
if args.version:
print 'version %s' % __version__
return STATUS_OK
if args.conf and not os.path.exists(args.conf):
print "ERROR: ceph conf file '%s' doesn't exist" % args.conf
return STATUS_UNKNOWN
if args.keyring and not os.path.exists(args.keyring):
print "ERROR: keyring file '%s' doesn't exist" % args.keyring
return STATUS_UNKNOWN
# build command
ceph_health = [ceph_exec]
if args.monaddress:
ceph_health.append('-m')
ceph_health.append(args.monaddress)
if args.conf:
ceph_health.append('-c')
ceph_health.append(args.conf)
if args.id:
ceph_health.append('--id')
ceph_health.append(args.id)
if args.keyring:
ceph_health.append('--keyring')
ceph_health.append(args.keyring)
ceph_health.append('health')
if args.detail:
ceph_health.append('detail')
#print ceph_health
# exec command
p = subprocess.Popen(ceph_health,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
output, err = p.communicate()
# parse output
#print "output:", output
#print "err:", err
if output:
# merge multi-lines of output in one line
one_line = output.replace('\n','; ')
if one_line.startswith('HEALTH_OK'):
#print 'HEALTH OK:', one_line[len('HEALTH_OK')+1:]
one_line= one_line[len('HEALTH_OK')+1:].strip()
if one_line:
print 'HEALTH OK:', one_line
else:
print 'HEALTH OK'
return STATUS_OK
elif one_line.startswith('HEALTH_WARN'):
print 'HEALTH WARNING:', one_line[len('HEALTH_WARN')+1:]
return STATUS_WARNING
elif one_line.startswith('HEALTH_ERR'):
print 'HEALTH ERROR:', one_line[len('HEALTH_ERR')+1:]
return STATUS_ERROR
else:
print one_line
elif err:
# read only first line of error
one_line = err.split('\n')[0]
if '-1 ' in one_line:
idx = one_line.rfind('-1 ')
print 'ERROR: %s: %s' % (ceph_exec, one_line[idx+len('-1 '):])
else:
print one_line
return STATUS_UNKNOWN
if __name__ == "__main__":
sys.exit(main())
| apache-2.0 | 3,116,361,188,034,986,500 | 6,709,370,064,245,222,000 | 31.384 | 95 | 0.617836 | false |
bruecksen/isimip | isi_mip/climatemodels/migrations/0088_attachment.py | 1 | 1304 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-11-26 14:51
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import isi_mip.climatemodels.models
class Migration(migrations.Migration):
dependencies = [
('climatemodels', '0087_datapublicationconfirmation_confirmed_license'),
]
operations = [
migrations.CreateModel(
name='Attachment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('attachment1', models.FileField(upload_to=isi_mip.climatemodels.models.impact_model_path)),
('attachment2', models.FileField(upload_to=isi_mip.climatemodels.models.impact_model_path)),
('attachment3', models.FileField(upload_to=isi_mip.climatemodels.models.impact_model_path)),
('attachment4', models.FileField(upload_to=isi_mip.climatemodels.models.impact_model_path)),
('attachment5', models.FileField(upload_to=isi_mip.climatemodels.models.impact_model_path)),
('impact_model', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='climatemodels.ImpactModel')),
],
),
]
| mit | -5,514,696,209,649,499,000 | 9,148,776,885,765,327,000 | 43.965517 | 132 | 0.661043 | false |
pratikmallya/hue | desktop/core/ext-py/elementtree/elementtree/XMLTreeBuilder.py | 107 | 3744 | #
# ElementTree
# $Id: XMLTreeBuilder.py 2305 2005-03-01 17:43:09Z fredrik $
#
# an XML tree builder
#
# history:
# 2001-10-20 fl created
# 2002-05-01 fl added namespace support for xmllib
# 2002-07-27 fl require expat (1.5.2 code can use SimpleXMLTreeBuilder)
# 2002-08-17 fl use tag/attribute name memo cache
# 2002-12-04 fl moved XMLTreeBuilder to the ElementTree module
#
# Copyright (c) 1999-2004 by Fredrik Lundh. All rights reserved.
#
# fredrik@pythonware.com
# http://www.pythonware.com
#
# --------------------------------------------------------------------
# The ElementTree toolkit is
#
# Copyright (c) 1999-2004 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
##
# Tools to build element trees from XML files.
##
import ElementTree
##
# (obsolete) ElementTree builder for XML source data, based on the
# <b>expat</b> parser.
# <p>
# This class is an alias for ElementTree.XMLTreeBuilder. New code
# should use that version instead.
#
# @see elementtree.ElementTree
class TreeBuilder(ElementTree.XMLTreeBuilder):
pass
##
# (experimental) An alternate builder that supports manipulation of
# new elements.
class FancyTreeBuilder(TreeBuilder):
def __init__(self, html=0):
TreeBuilder.__init__(self, html)
self._parser.StartNamespaceDeclHandler = self._start_ns
self._parser.EndNamespaceDeclHandler = self._end_ns
self.namespaces = []
def _start(self, tag, attrib_in):
elem = TreeBuilder._start(self, tag, attrib_in)
self.start(elem)
def _start_list(self, tag, attrib_in):
elem = TreeBuilder._start_list(self, tag, attrib_in)
self.start(elem)
def _end(self, tag):
elem = TreeBuilder._end(self, tag)
self.end(elem)
def _start_ns(self, prefix, value):
self.namespaces.insert(0, (prefix, value))
def _end_ns(self, prefix):
assert self.namespaces.pop(0)[0] == prefix, "implementation confused"
##
# Hook method that's called when a new element has been opened.
# May access the <b>namespaces</b> attribute.
#
# @param element The new element. The tag name and attributes are,
# set, but it has no children, and the text and tail attributes
# are still empty.
def start(self, element):
pass
##
# Hook method that's called when a new element has been closed.
# May access the <b>namespaces</b> attribute.
#
# @param element The new element.
def end(self, element):
pass
| apache-2.0 | 2,551,407,794,751,064,600 | 6,255,126,359,180,669,000 | 32.132743 | 77 | 0.68109 | false |
anthonydillon/horizon | openstack_dashboard/dashboards/admin/metering/tables.py | 13 | 3288 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.contrib.humanize.templatetags import humanize
from django.utils import text
from django.utils.translation import ugettext_lazy as _
import six
from horizon import tables
def show_date(datum):
return datum.split('T')[0]
class ModifyUsageReportParameters(tables.LinkAction):
name = "create"
verbose_name = _("Modify Usage Report Parameters")
url = "horizon:admin:metering:create"
classes = ("ajax-modal",)
icon = "edit"
class CreateCSVUsageReport(tables.LinkAction):
name = "csv"
verbose_name = _("Download CSV Summary")
url = "horizon:admin:metering:csvreport"
classes = ("btn-create",)
icon = "download"
class ReportTable(tables.DataTable):
project = tables.Column('project', verbose_name=_('Project'))
service = tables.Column('service', verbose_name=_('Service'))
meter = tables.Column('meter', verbose_name=_('Meter'))
description = tables.Column('description', verbose_name=_('Description'))
time = tables.Column('time', verbose_name=_('Day'),
filters=[show_date])
value = tables.Column('value', verbose_name=_('Value (Avg)'),
filters=[humanize.intcomma])
unit = tables.Column('unit', verbose_name=_('Unit'))
def get_object_id(self, obj):
return "%s-%s-%s" % (obj['project'], obj['service'], obj['meter'])
class Meta(object):
name = 'report_table'
verbose_name = _("Daily Usage Report")
table_actions = (ModifyUsageReportParameters, CreateCSVUsageReport)
multi_select = False
@six.python_2_unicode_compatible
class UsageTable(tables.DataTable):
service = tables.Column('service', verbose_name=_('Service'))
meter = tables.Column('meter', verbose_name=_('Meter'))
description = tables.Column('description', verbose_name=_('Description'))
time = tables.Column('time', verbose_name=_('Day'),
filters=[show_date])
value = tables.Column('value', verbose_name=_('Value (Avg)'),
filters=[humanize.intcomma])
def __init__(self, request, *args, **kwargs):
super(UsageTable, self).__init__(request, *args, **kwargs)
self.title = getattr(self, 'title', None)
def get_object_id(self, datum):
return datum['time'] + datum['meter']
# since these tables are dynamically created and named, we use title
@property
def name(self):
# slugify was introduced in Django 1.5
if hasattr(text, 'slugify'):
return text.slugify(six.text_type(self.title))
else:
return self.title
def __str__(self):
return self.title
class Meta(object):
name = 'daily'
| apache-2.0 | -8,465,111,338,091,067,000 | 2,903,885,378,776,423,400 | 34.73913 | 78 | 0.645681 | false |
HackerBaloo/SublimeOpenInTotalCommander | Open in Total Commander.py | 1 | 2176 | import os
import os.path
import subprocess
import sublime
import sublime_plugin
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
def which(program):
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
class SelectInTotalCommanderCommand(sublime_plugin.TextCommand):
def set_exe(self, exe):
exe = which(exe)
if exe:
#print('exe: ', exe)
self.exe = exe
return True
return False
def __init__(self, view):
self.view = view
settings = sublime.load_settings("Open in Total Commander.sublime-settings")
self.args = settings.get("aruments")
env_name = settings.get("path_environment_variable")
#print('env_name: ', env_name)
variable = ''
if env_name in os.environ:
variable = os.environ[env_name]
if not self.set_exe(variable):
if not self.set_exe(settings.get("executable")):
if not self.set_exe(settings.get("executable2")):
sublime.error_message('No executable found, check Open in Total Commander.sublime-settings!')
def run(self, edit):
path = self.view.file_name()
if path is None:
sublime.error_message('No file in view')
return
#print('path: ', path)
#print('self.args: ', self.args)
args = self.args.format(**locals())
#print('args: ', args)
cmd = '{self.exe} {args}'.format(**locals())
print('cmd: ', cmd)
if os.name == 'posix':
subprocess.call([self.exe, args])
else:
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
proc = subprocess.Popen(cmd, startupinfo=startupinfo)
| mit | 4,254,017,208,917,110,300 | -7,629,363,765,588,480,000 | 30.477612 | 113 | 0.555147 | false |
batermj/algorithm-challenger | code-analysis/programming_anguage/python/source_codes/Python3.5.9/Python-3.5.9/Lib/distutils/tests/test_install_headers.py | 147 | 1264 | """Tests for distutils.command.install_headers."""
import sys
import os
import unittest
import getpass
from distutils.command.install_headers import install_headers
from distutils.tests import support
from test.support import run_unittest
class InstallHeadersTestCase(support.TempdirManager,
support.LoggingSilencer,
support.EnvironGuard,
unittest.TestCase):
def test_simple_run(self):
# we have two headers
header_list = self.mkdtemp()
header1 = os.path.join(header_list, 'header1')
header2 = os.path.join(header_list, 'header2')
self.write_file(header1)
self.write_file(header2)
headers = [header1, header2]
pkg_dir, dist = self.create_dist(headers=headers)
cmd = install_headers(dist)
self.assertEqual(cmd.get_inputs(), headers)
# let's run the command
cmd.install_dir = os.path.join(pkg_dir, 'inst')
cmd.ensure_finalized()
cmd.run()
# let's check the results
self.assertEqual(len(cmd.get_outputs()), 2)
def test_suite():
return unittest.makeSuite(InstallHeadersTestCase)
if __name__ == "__main__":
run_unittest(test_suite())
| apache-2.0 | 1,624,205,068,195,633,700 | 5,372,315,925,474,186,000 | 29.829268 | 61 | 0.628956 | false |
Medigate/cutiuta-server | cutiuta-server/env/lib/python3.4/site-packages/pip/_vendor/lockfile/linklockfile.py | 466 | 2649 | from __future__ import absolute_import
import time
import os
from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout,
AlreadyLocked)
class LinkLockFile(LockBase):
"""Lock access to a file using atomic property of link(2).
>>> lock = LinkLockFile('somefile')
>>> lock = LinkLockFile('somefile', threaded=False)
"""
def acquire(self, timeout=None):
try:
open(self.unique_name, "wb").close()
except IOError:
raise LockFailed("failed to create %s" % self.unique_name)
timeout = timeout is not None and timeout or self.timeout
end_time = time.time()
if timeout is not None and timeout > 0:
end_time += timeout
while True:
# Try and create a hard link to it.
try:
os.link(self.unique_name, self.lock_file)
except OSError:
# Link creation failed. Maybe we've double-locked?
nlinks = os.stat(self.unique_name).st_nlink
if nlinks == 2:
# The original link plus the one I created == 2. We're
# good to go.
return
else:
# Otherwise the lock creation failed.
if timeout is not None and time.time() > end_time:
os.unlink(self.unique_name)
if timeout > 0:
raise LockTimeout("Timeout waiting to acquire"
" lock for %s" %
self.path)
else:
raise AlreadyLocked("%s is already locked" %
self.path)
time.sleep(timeout is not None and timeout/10 or 0.1)
else:
# Link creation succeeded. We're good to go.
return
def release(self):
if not self.is_locked():
raise NotLocked("%s is not locked" % self.path)
elif not os.path.exists(self.unique_name):
raise NotMyLock("%s is locked, but not by me" % self.path)
os.unlink(self.unique_name)
os.unlink(self.lock_file)
def is_locked(self):
return os.path.exists(self.lock_file)
def i_am_locking(self):
return (self.is_locked() and
os.path.exists(self.unique_name) and
os.stat(self.unique_name).st_nlink == 2)
def break_lock(self):
if os.path.exists(self.lock_file):
os.unlink(self.lock_file)
| gpl-3.0 | -6,922,749,615,486,215,000 | -3,559,162,733,355,953,000 | 35.287671 | 75 | 0.508116 | false |
Aasmi/scikit-learn | sklearn/feature_selection/variance_threshold.py | 238 | 2594 | # Author: Lars Buitinck <L.J.Buitinck@uva.nl>
# License: 3-clause BSD
import numpy as np
from ..base import BaseEstimator
from .base import SelectorMixin
from ..utils import check_array
from ..utils.sparsefuncs import mean_variance_axis
from ..utils.validation import check_is_fitted
class VarianceThreshold(BaseEstimator, SelectorMixin):
"""Feature selector that removes all low-variance features.
This feature selection algorithm looks only at the features (X), not the
desired outputs (y), and can thus be used for unsupervised learning.
Read more in the :ref:`User Guide <variance_threshold>`.
Parameters
----------
threshold : float, optional
Features with a training-set variance lower than this threshold will
be removed. The default is to keep all features with non-zero variance,
i.e. remove the features that have the same value in all samples.
Attributes
----------
variances_ : array, shape (n_features,)
Variances of individual features.
Examples
--------
The following dataset has integer features, two of which are the same
in every sample. These are removed with the default setting for threshold::
>>> X = [[0, 2, 0, 3], [0, 1, 4, 3], [0, 1, 1, 3]]
>>> selector = VarianceThreshold()
>>> selector.fit_transform(X)
array([[2, 0],
[1, 4],
[1, 1]])
"""
def __init__(self, threshold=0.):
self.threshold = threshold
def fit(self, X, y=None):
"""Learn empirical variances from X.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Sample vectors from which to compute variances.
y : any
Ignored. This parameter exists only for compatibility with
sklearn.pipeline.Pipeline.
Returns
-------
self
"""
X = check_array(X, ('csr', 'csc'), dtype=np.float64)
if hasattr(X, "toarray"): # sparse matrix
_, self.variances_ = mean_variance_axis(X, axis=0)
else:
self.variances_ = np.var(X, axis=0)
if np.all(self.variances_ <= self.threshold):
msg = "No feature in X meets the variance threshold {0:.5f}"
if X.shape[0] == 1:
msg += " (X contains only one sample)"
raise ValueError(msg.format(self.threshold))
return self
def _get_support_mask(self):
check_is_fitted(self, 'variances_')
return self.variances_ > self.threshold
| bsd-3-clause | -5,055,543,874,576,218,000 | 1,147,733,837,307,614,200 | 30.634146 | 79 | 0.603315 | false |
XENON1T/pax | pax/trigger_plugins/FindSignals.py | 1 | 10320 | import numpy as np
import numba
from pax.trigger import TriggerPlugin
from pax.datastructure import TriggerSignal
from pax.dsputils import adc_to_pe
# Interrupts thrown by the signal finder
# Negative, since positive numbers indicate number of signals found during normal operation
SIGNAL_BUFFER_FULL = -1
SAVE_DARK_MONITOR_DATA = -2
class FindSignals(TriggerPlugin):
# How often did we save the dark rate since the last full (coincidence matrix) save?
dark_monitor_saves = 0
def startup(self):
# Initialize buffer for numba signal finding routine.
# Although we're able to extend this buffer as needed, we must do so outside numba
# and it involves copyping data, so if you pick too small a buffer size you will hurt performance.
self.numba_signals_buffer = np.zeros(self.config['numba_signal_buffer_size'],
dtype=TriggerSignal.get_dtype())
# Initialize buffers for tallying pulses / coincidences
# Reason for +1 is again 'ghost' channels, see trigger.py
n_channels = self.trigger.pax_config['DEFAULT']['n_channels'] + 1
self.all_pulses_tally = np.zeros(n_channels, dtype=np.int)
self.lone_pulses_tally = np.zeros(n_channels, dtype=np.int)
self.coincidence_tally = np.zeros((n_channels, n_channels), dtype=np.int)
# Get conversion factor from ADC counts to pe for each pmt
# The 'ghost' PMT will have gain 1 always
self.gain_conversion_factors = np.array([adc_to_pe(self.trigger.pax_config['DEFAULT'], ch)
for ch in range(n_channels - 1)] +
[1])
# We must keep track of the next time to save the dark rate between batches, since a batch usually does not
# end exactly at a save time.
self.next_save_time = None
def process(self, data):
if self.next_save_time is None:
self.next_save_time = self.config['dark_rate_save_interval']
if len(data.pulses):
self.next_save_time += data.pulses['time'][0]
sigf = signal_finder(times=data.pulses,
signal_separation=self.config['signal_separation'],
signal_buffer=self.numba_signals_buffer,
next_save_time=self.next_save_time,
dark_rate_save_interval=self.config['dark_rate_save_interval'],
all_pulses_tally=self.all_pulses_tally,
lone_pulses_tally=self.lone_pulses_tally,
coincidence_tally=self.coincidence_tally,
gain_conversion_factors=self.gain_conversion_factors,
)
saved_buffers = []
for result in sigf:
if result >= 0:
n_signals_found = result
if len(saved_buffers):
self.log.debug("%d previous signal buffers were saved, concatenating and returning them." % (
len(saved_buffers)))
saved_buffers.append(self.numba_signals_buffer[:n_signals_found])
signals = np.concatenate(saved_buffers)
else:
signals = self.numba_signals_buffer[:n_signals_found]
break
elif result == SIGNAL_BUFFER_FULL:
self.log.debug("Signal buffer is full, copying it out.")
saved_buffers.append(self.numba_signals_buffer.copy())
elif result == SAVE_DARK_MONITOR_DATA:
self.save_dark_monitor_data()
self.next_save_time += self.config['dark_rate_save_interval']
else:
raise ValueError("Unknown signal finder interrupt %d!" % result)
if data.last_data:
self.save_dark_monitor_data(last_time=True)
self.log.debug("Signal finder finished on this data increment, found %d signals." % len(signals))
data.signals = signals
def save_dark_monitor_data(self, last_time=False):
# Save the PMT dark rate
self.log.debug("Saving pulse rate: %d pulses (of which %d lone pulses)" % (
self.all_pulses_tally.sum(), self.lone_pulses_tally.sum()))
self.trigger.save_monitor_data('count_of_all_pulses', self.all_pulses_tally)
self.all_pulses_tally *= 0
self.trigger.save_monitor_data('count_of_lone_pulses', self.lone_pulses_tally)
self.lone_pulses_tally *= 0
self.dark_monitor_saves += 1
if last_time or self.dark_monitor_saves == self.config['dark_monitor_full_save_every']:
# Save the full coincidence rate
self.log.debug("Saving coincidence tally matrix, total %d" % self.coincidence_tally.sum())
self.trigger.save_monitor_data('count_of_2pmt_coincidences', self.coincidence_tally)
self.dark_monitor_saves = 0
self.coincidence_tally *= 0
def signal_finder(times, signal_separation,
signal_buffer,
next_save_time, dark_rate_save_interval,
all_pulses_tally, lone_pulses_tally, coincidence_tally,
gain_conversion_factors):
"""Fill signal_buffer with signals in times. Other arguments:
- signal_separation: group pulses into signals separated by signal_separation.
- coincidence_tally: nxn matrix of zero where n is number of channels,used to store 2-pmt coincidences
(with 1-pmt, i.e. dark rate, on diagonal)
- next_save_time: next time (in ns since start of run) the dark rate should be saved
- dark_rate_save_interval: yield SAVE_DARK_MONITOR every dark_rate_save_interval
Raises "interrupts" (yield numbers) to communicate with caller.
Online RMS algorithm is Knuth/Welford: https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance
"""
# Allocate memory for some internal buffers (which we can't do in numba) we don't need outside the signal finder
n_channels = len(all_pulses_tally) # Actually this is 1 more than the number of connected channels, see above
does_channel_contribute = np.zeros(n_channels, dtype=np.int8) # Bool gives weird errors
area_per_channel = np.zeros(n_channels, dtype=np.float64)
return _signal_finder(times, signal_separation,
signal_buffer,
next_save_time, dark_rate_save_interval,
all_pulses_tally, lone_pulses_tally, coincidence_tally,
gain_conversion_factors,
area_per_channel, does_channel_contribute)
@numba.jit()
def _signal_finder(times, signal_separation,
signal_buffer,
next_save_time, dark_rate_save_interval,
all_pulses_tally, lone_pulses_tally, coincidence_tally,
gain_conversion_factors,
area_per_channel, does_channel_contribute):
"""Numba backend for signal_finder: please see its docstring instead."""
in_signal = False
passes_test = False # Does the current time pass the signal inclusion test?
current_signal = 0 # Index of the current signal in the signal buffer
m2 = 0.0 # Temporary variable for online RMS computation
if not len(times):
yield 0 # no point looking for events. Communicate no events found, then exit.
return
for time_index, _time in enumerate(times):
t = _time.time
pmt = _time.pmt
area = _time.area * gain_conversion_factors[pmt]
# Save the dark rate. Notice it's WHILE t >= next_save_time, which ensures we save a lot of zeroes when there is
# a large gap in the data. Let's hope nobody tries to pass t = float('inf')...
while t >= next_save_time:
yield SAVE_DARK_MONITOR_DATA
next_save_time += dark_rate_save_interval
is_last_time = time_index == len(times) - 1
if not is_last_time:
# Should this time be in a signal? === Is the next time close enough?
passes_test = times[time_index+1].time - t < signal_separation
if not in_signal and passes_test:
# Start a signal. We must clear all attributes first to remove (potential) old stuff from the buffer.
in_signal = True
s = signal_buffer[current_signal]
s.left_time = t
s.right_time = 0
s.time_mean = 0
s.time_rms = 0
s.n_pulses = 0
s.n_contributing_channels = 0
s.area = 0
area_per_channel *= 0
does_channel_contribute *= 0
if in_signal: # Notice if, not elif. Work on first time in signal too.
# Update signal quantities
s = signal_buffer[current_signal]
area_per_channel[pmt] += area
does_channel_contribute[pmt] = True
s.n_pulses += 1
delta = t - s.time_mean
s.time_mean += delta / s.n_pulses
m2 += delta * (t - s.time_mean) # Notice this isn't delta**2: time_mean changed on the previous line!
if not passes_test or is_last_time:
# Signal has ended: store its quantities and move on
s.right_time = t
s.time_rms = (m2 / s.n_pulses)**0.5
s.n_contributing_channels = does_channel_contribute.sum()
s.area = area_per_channel.sum()
if s.n_contributing_channels == 2:
indices = np.nonzero(does_channel_contribute)[0]
coincidence_tally[indices[0], indices[1]] += 1
current_signal += 1
m2 = 0
in_signal = False
if current_signal == len(signal_buffer):
yield SIGNAL_BUFFER_FULL
# Caller will have copied out the signal buffer, we can start from 0 again
current_signal = 0
else:
lone_pulses_tally[pmt] += 1
all_pulses_tally[pmt] += 1
# Let caller know number of signals found, then raise StopIteration
yield current_signal
| bsd-3-clause | -8,437,612,663,708,768,000 | 5,488,828,431,019,874,000 | 46.123288 | 120 | 0.590601 | false |
oblique-labs/pyVM | rpython/rtyper/normalizecalls.py | 1 | 16966 | from rpython.annotator import model as annmodel, description
from rpython.flowspace.argument import Signature
from rpython.flowspace.model import (Variable, Constant, Block, Link,
checkgraph, FunctionGraph, SpaceOperation)
from rpython.rlib.objectmodel import ComputedIntSymbolic
from rpython.rtyper.error import TyperError
from rpython.rtyper.rmodel import getgcflavor
from rpython.tool.sourcetools import valid_identifier
from rpython.annotator.classdesc import ClassDesc
def normalize_call_familes(annotator):
for callfamily in annotator.bookkeeper.pbc_maximal_call_families.infos():
if not callfamily.modified:
assert callfamily.normalized
continue
normalize_calltable(annotator, callfamily)
callfamily.normalized = True
callfamily.modified = False
def normalize_calltable(annotator, callfamily):
"""Try to normalize all rows of a table."""
nshapes = len(callfamily.calltables)
for shape, table in callfamily.calltables.items():
for row in table:
did_something = normalize_calltable_row_signature(annotator, shape,
row)
if did_something:
assert not callfamily.normalized, "change in call family normalisation"
if nshapes != 1:
raise_call_table_too_complex_error(callfamily, annotator)
while True:
progress = False
for shape, table in callfamily.calltables.items():
for row in table:
progress |= normalize_calltable_row_annotation(annotator,
row.values())
if not progress:
return # done
assert not callfamily.normalized, "change in call family normalisation"
def raise_call_table_too_complex_error(callfamily, annotator):
msg = []
items = callfamily.calltables.items()
for i, (shape1, table1) in enumerate(items):
for shape2, table2 in items[i + 1:]:
if shape1 == shape2:
continue
row1 = table1[0]
row2 = table2[0]
problematic_function_graphs = set(row1.values()).union(set(row2.values()))
pfg = [str(graph) for graph in problematic_function_graphs]
pfg.sort()
msg.append("the following functions:")
msg.append(" %s" % ("\n ".join(pfg), ))
msg.append("are called with inconsistent numbers of arguments")
msg.append("(and/or the argument names are different, which is"
" not supported in this case)")
if shape1[0] != shape2[0]:
msg.append("sometimes with %s arguments, sometimes with %s" % (shape1[0], shape2[0]))
else:
pass # XXX better message in this case
callers = []
msg.append("the callers of these functions are:")
for tag, (caller, callee) in annotator.translator.callgraph.iteritems():
if callee not in problematic_function_graphs:
continue
if str(caller) in callers:
continue
callers.append(str(caller))
callers.sort()
for caller in callers:
msg.append(" %s" % (caller, ))
raise TyperError("\n".join(msg))
def normalize_calltable_row_signature(annotator, shape, row):
graphs = row.values()
assert graphs, "no graph??"
sig0 = graphs[0].signature
defaults0 = graphs[0].defaults
for graph in graphs[1:]:
if graph.signature != sig0:
break
if graph.defaults != defaults0:
break
else:
return False # nothing to do, all signatures already match
shape_cnt, shape_keys, shape_star = shape
assert not shape_star, "should have been removed at this stage"
# for the first 'shape_cnt' arguments we need to generalize to
# a common type
call_nbargs = shape_cnt + len(shape_keys)
did_something = False
for graph in graphs:
argnames, varargname, kwargname = graph.signature
assert not varargname, "XXX not implemented"
assert not kwargname, "XXX not implemented" # ?
inputargs_s = [annotator.binding(v) for v in graph.getargs()]
argorder = range(shape_cnt)
for key in shape_keys:
i = list(argnames).index(key)
assert i not in argorder
argorder.append(i)
need_reordering = (argorder != range(call_nbargs))
if need_reordering or len(graph.getargs()) != call_nbargs:
oldblock = graph.startblock
inlist = []
defaults = graph.defaults or ()
num_nondefaults = len(inputargs_s) - len(defaults)
defaults = [description.NODEFAULT] * num_nondefaults + list(defaults)
newdefaults = []
for j in argorder:
v = Variable(graph.getargs()[j])
annotator.setbinding(v, inputargs_s[j])
inlist.append(v)
newdefaults.append(defaults[j])
newblock = Block(inlist)
# prepare the output args of newblock:
# 1. collect the positional arguments
outlist = inlist[:shape_cnt]
# 2. add defaults and keywords
for j in range(shape_cnt, len(inputargs_s)):
try:
i = argorder.index(j)
v = inlist[i]
except ValueError:
default = defaults[j]
if default is description.NODEFAULT:
raise TyperError(
"call pattern has %d positional arguments, "
"but %r takes at least %d arguments" % (
shape_cnt, graph.name, num_nondefaults))
v = Constant(default)
outlist.append(v)
newblock.closeblock(Link(outlist, oldblock))
graph.startblock = newblock
for i in range(len(newdefaults)-1,-1,-1):
if newdefaults[i] is description.NODEFAULT:
newdefaults = newdefaults[i:]
break
graph.defaults = tuple(newdefaults)
graph.signature = Signature([argnames[j] for j in argorder],
None, None)
# finished
checkgraph(graph)
annotator.annotated[newblock] = annotator.annotated[oldblock]
did_something = True
return did_something
def normalize_calltable_row_annotation(annotator, graphs):
if len(graphs) <= 1:
return False # nothing to do
graph_bindings = {}
for graph in graphs:
graph_bindings[graph] = [annotator.binding(v)
for v in graph.getargs()]
iterbindings = graph_bindings.itervalues()
nbargs = len(iterbindings.next())
for binding in iterbindings:
assert len(binding) == nbargs
generalizedargs = []
for i in range(nbargs):
args_s = []
for graph, bindings in graph_bindings.items():
args_s.append(bindings[i])
s_value = annmodel.unionof(*args_s)
generalizedargs.append(s_value)
result_s = [annotator.binding(graph.getreturnvar())
for graph in graph_bindings]
generalizedresult = annmodel.unionof(*result_s)
conversion = False
for graph in graphs:
bindings = graph_bindings[graph]
need_conversion = (generalizedargs != bindings)
if need_conversion:
conversion = True
oldblock = graph.startblock
inlist = []
for j, s_value in enumerate(generalizedargs):
v = Variable(graph.getargs()[j])
annotator.setbinding(v, s_value)
inlist.append(v)
newblock = Block(inlist)
# prepare the output args of newblock and link
outlist = inlist[:]
newblock.closeblock(Link(outlist, oldblock))
graph.startblock = newblock
# finished
checkgraph(graph)
annotator.annotated[newblock] = annotator.annotated[oldblock]
# convert the return value too
if annotator.binding(graph.getreturnvar()) != generalizedresult:
conversion = True
annotator.setbinding(graph.getreturnvar(), generalizedresult)
return conversion
# ____________________________________________________________
def merge_classpbc_getattr_into_classdef(annotator):
# code like 'some_class.attr' will record an attribute access in the
# PBC access set of the family of classes of 'some_class'. If the classes
# have corresponding ClassDefs, they are not updated by the annotator.
# We have to do it now.
all_families = annotator.bookkeeper.classpbc_attr_families
for attrname, access_sets in all_families.items():
for access_set in access_sets.infos():
descs = access_set.descs
if len(descs) <= 1:
continue
if not isinstance(descs.iterkeys().next(), ClassDesc):
continue
classdefs = [desc.getuniqueclassdef() for desc in descs]
commonbase = classdefs[0]
for cdef in classdefs[1:]:
commonbase = commonbase.commonbase(cdef)
if commonbase is None:
raise TyperError("reading attribute %r: no common base "
"class for %r" % (attrname, descs.keys()))
extra_access_sets = commonbase.extra_access_sets
if commonbase.repr is not None:
assert access_set in extra_access_sets # minimal sanity check
continue
access_set.commonbase = commonbase
if access_set not in extra_access_sets:
counter = len(extra_access_sets)
extra_access_sets[access_set] = attrname, counter
# ____________________________________________________________
def create_class_constructors(annotator):
bk = annotator.bookkeeper
call_families = bk.pbc_maximal_call_families
for family in call_families.infos():
if len(family.descs) <= 1:
continue
descs = family.descs.keys()
if not isinstance(descs[0], ClassDesc):
continue
# Note that if classes are in the same callfamily, their __init__
# attribute must be in the same attrfamily as well.
change = descs[0].mergeattrfamilies(descs[1:], '__init__')
if hasattr(descs[0].getuniqueclassdef(), 'my_instantiate_graph'):
assert not change, "after the fact change to a family of classes" # minimal sanity check
continue
# Put __init__ into the attr family, for ClassesPBCRepr.call()
attrfamily = descs[0].getattrfamily('__init__')
inits_s = [desc.s_read_attribute('__init__') for desc in descs]
s_value = annmodel.unionof(attrfamily.s_value, *inits_s)
attrfamily.s_value = s_value
# ClassesPBCRepr.call() will also need instantiate() support
for desc in descs:
bk.needs_generic_instantiate[desc.getuniqueclassdef()] = True
# ____________________________________________________________
def create_instantiate_functions(annotator):
# build the 'instantiate() -> instance of C' functions for the vtables
needs_generic_instantiate = annotator.bookkeeper.needs_generic_instantiate
for classdef in needs_generic_instantiate:
assert getgcflavor(classdef) == 'gc' # only gc-case
create_instantiate_function(annotator, classdef)
def create_instantiate_function(annotator, classdef):
# build the graph of a function that looks like
#
# def my_instantiate():
# return instantiate(cls)
#
if hasattr(classdef, 'my_instantiate_graph'):
return
v = Variable()
block = Block([])
block.operations.append(SpaceOperation('instantiate1', [], v))
name = valid_identifier('instantiate_' + classdef.name)
graph = FunctionGraph(name, block)
block.closeblock(Link([v], graph.returnblock))
annotator.setbinding(v, annmodel.SomeInstance(classdef))
annotator.annotated[block] = graph
# force the result to be converted to a generic OBJECTPTR
generalizedresult = annmodel.SomeInstance(classdef=None)
annotator.setbinding(graph.getreturnvar(), generalizedresult)
classdef.my_instantiate_graph = graph
annotator.translator.graphs.append(graph)
# ____________________________________________________________
class TooLateForNewSubclass(Exception):
pass
class TotalOrderSymbolic(ComputedIntSymbolic):
def __init__(self, orderwitness, peers):
self.orderwitness = orderwitness
self.peers = peers
self.value = None
self._with_subclasses = None # unknown
peers.append(self)
def __cmp__(self, other):
if not isinstance(other, TotalOrderSymbolic):
return cmp(self.compute_fn(), other)
else:
return cmp(self.orderwitness, other.orderwitness)
# support for implementing int_between: (a<=b<c) with (b-a<c-a)
# see rpython.jit.metainterp.pyjitpl.opimpl_int_between
def __sub__(self, other):
return self.compute_fn() - other
def __rsub__(self, other):
return other - self.compute_fn()
def check_any_subclass_in_peer_list(self, i):
# check if the next peer, in order, is or not the end
# marker for this start marker
assert self.peers[i] is self
return self.peers[i + 1].orderwitness != self.orderwitness + [MAX]
def number_with_subclasses(self):
# Return True or False depending on whether this is the
# subclassrange_min corresponding to a class which has subclasses
# or not. If this is called and returns False, then adding later
# new subclasses will crash in compute_fn().
if self._with_subclasses is None: # unknown so far
self.peers.sort()
i = self.peers.index(self)
self._with_subclasses = self.check_any_subclass_in_peer_list(i)
return self._with_subclasses
def compute_fn(self):
if self.value is None:
self.peers.sort()
for i, peer in enumerate(self.peers):
assert peer.value is None or peer.value == i
peer.value = i
#
if peer._with_subclasses is False:
if peer.check_any_subclass_in_peer_list(i):
raise TooLateForNewSubclass
#
assert self.value is not None
return self.value
def dump(self, annotator): # for debugging
self.peers.sort()
mapping = {}
for classdef in annotator.bookkeeper.classdefs:
if hasattr(classdef, '_unique_cdef_id'):
mapping[classdef._unique_cdef_id] = classdef
for peer in self.peers:
if peer is self:
print '==>',
else:
print ' ',
print 'value %4s --' % (peer.value,), peer.orderwitness,
if peer.orderwitness[-1] in mapping:
print mapping[peer.orderwitness[-1]]
else:
print
def assign_inheritance_ids(annotator):
# we sort the classes by lexicographic order of reversed(mro),
# which gives a nice depth-first order. The classes are turned
# into numbers in order to (1) help determinism, (2) ensure that
# new hierarchies of classes with no common base classes can be
# added later and get higher numbers.
bk = annotator.bookkeeper
try:
lst = bk._inheritance_id_symbolics
except AttributeError:
lst = bk._inheritance_id_symbolics = []
for classdef in annotator.bookkeeper.classdefs:
if not hasattr(classdef, 'minid'):
witness = [get_unique_cdef_id(cdef) for cdef in classdef.getmro()]
witness.reverse()
classdef.minid = TotalOrderSymbolic(witness, lst)
classdef.maxid = TotalOrderSymbolic(witness + [MAX], lst)
MAX = 1E100
_cdef_id_counter = 0
def get_unique_cdef_id(cdef):
global _cdef_id_counter
try:
return cdef._unique_cdef_id
except AttributeError:
cdef._unique_cdef_id = _cdef_id_counter
_cdef_id_counter += 1
return cdef._unique_cdef_id
# ____________________________________________________________
def perform_normalizations(annotator):
create_class_constructors(annotator)
annotator.frozen += 1
try:
normalize_call_familes(annotator)
merge_classpbc_getattr_into_classdef(annotator)
assign_inheritance_ids(annotator)
finally:
annotator.frozen -= 1
create_instantiate_functions(annotator)
| mit | 4,137,588,253,988,607,500 | -2,160,348,067,079,881,700 | 40.279805 | 101 | 0.590357 | false |
kleientertainment/ds_mod_tools | pkg/win32/Python27/Lib/CGIHTTPServer.py | 2 | 13514 | """CGI-savvy HTTP Server.
This module builds on SimpleHTTPServer by implementing GET and POST
requests to cgi-bin scripts.
If the os.fork() function is not present (e.g. on Windows),
os.popen2() is used as a fallback, with slightly altered semantics; if
that function is not present either (e.g. on Macintosh), only Python
scripts are supported, and they are executed by the current process.
In all cases, the implementation is intentionally naive -- all
requests are executed sychronously.
SECURITY WARNING: DON'T USE THIS CODE UNLESS YOU ARE INSIDE A FIREWALL
-- it may execute arbitrary Python code or external programs.
Note that status code 200 is sent prior to execution of a CGI script, so
scripts cannot send other status codes such as 302 (redirect).
"""
__version__ = "0.4"
__all__ = ["CGIHTTPRequestHandler"]
import os
import sys
import urllib
import BaseHTTPServer
import SimpleHTTPServer
import select
import copy
class CGIHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
"""Complete HTTP server with GET, HEAD and POST commands.
GET and HEAD also support running CGI scripts.
The POST command is *only* implemented for CGI scripts.
"""
# Determine platform specifics
have_fork = hasattr(os, 'fork')
have_popen2 = hasattr(os, 'popen2')
have_popen3 = hasattr(os, 'popen3')
# Make rfile unbuffered -- we need to read one line and then pass
# the rest to a subprocess, so we can't use buffered input.
rbufsize = 0
def do_POST(self):
"""Serve a POST request.
This is only implemented for CGI scripts.
"""
if self.is_cgi():
self.run_cgi()
else:
self.send_error(501, "Can only POST to CGI scripts")
def send_head(self):
"""Version of send_head that support CGI scripts"""
if self.is_cgi():
return self.run_cgi()
else:
return SimpleHTTPServer.SimpleHTTPRequestHandler.send_head(self)
def is_cgi(self):
"""Test whether self.path corresponds to a CGI script.
Returns True and updates the cgi_info attribute to the tuple
(dir, rest) if self.path requires running a CGI script.
Returns False otherwise.
If any exception is raised, the caller should assume that
self.path was rejected as invalid and act accordingly.
The default implementation tests whether the normalized url
path begins with one of the strings in self.cgi_directories
(and the next character is a '/' or the end of the string).
"""
collapsed_path = _url_collapse_path(self.path)
dir_sep = collapsed_path.find('/', 1)
head, tail = collapsed_path[:dir_sep], collapsed_path[dir_sep+1:]
if head in self.cgi_directories:
self.cgi_info = head, tail
return True
return False
cgi_directories = ['/cgi-bin', '/htbin']
def is_executable(self, path):
"""Test whether argument path is an executable file."""
return executable(path)
def is_python(self, path):
"""Test whether argument path is a Python script."""
head, tail = os.path.splitext(path)
return tail.lower() in (".py", ".pyw")
def run_cgi(self):
"""Execute a CGI script."""
path = self.path
dir, rest = self.cgi_info
i = path.find('/', len(dir) + 1)
while i >= 0:
nextdir = path[:i]
nextrest = path[i+1:]
scriptdir = self.translate_path(nextdir)
if os.path.isdir(scriptdir):
dir, rest = nextdir, nextrest
i = path.find('/', len(dir) + 1)
else:
break
# find an explicit query string, if present.
i = rest.rfind('?')
if i >= 0:
rest, query = rest[:i], rest[i+1:]
else:
query = ''
# dissect the part after the directory name into a script name &
# a possible additional path, to be stored in PATH_INFO.
i = rest.find('/')
if i >= 0:
script, rest = rest[:i], rest[i:]
else:
script, rest = rest, ''
scriptname = dir + '/' + script
scriptfile = self.translate_path(scriptname)
if not os.path.exists(scriptfile):
self.send_error(404, "No such CGI script (%r)" % scriptname)
return
if not os.path.isfile(scriptfile):
self.send_error(403, "CGI script is not a plain file (%r)" %
scriptname)
return
ispy = self.is_python(scriptname)
if not ispy:
if not (self.have_fork or self.have_popen2 or self.have_popen3):
self.send_error(403, "CGI script is not a Python script (%r)" %
scriptname)
return
if not self.is_executable(scriptfile):
self.send_error(403, "CGI script is not executable (%r)" %
scriptname)
return
# Reference: http://hoohoo.ncsa.uiuc.edu/cgi/env.html
# XXX Much of the following could be prepared ahead of time!
env = copy.deepcopy(os.environ)
env['SERVER_SOFTWARE'] = self.version_string()
env['SERVER_NAME'] = self.server.server_name
env['GATEWAY_INTERFACE'] = 'CGI/1.1'
env['SERVER_PROTOCOL'] = self.protocol_version
env['SERVER_PORT'] = str(self.server.server_port)
env['REQUEST_METHOD'] = self.command
uqrest = urllib.unquote(rest)
env['PATH_INFO'] = uqrest
env['PATH_TRANSLATED'] = self.translate_path(uqrest)
env['SCRIPT_NAME'] = scriptname
if query:
env['QUERY_STRING'] = query
host = self.address_string()
if host != self.client_address[0]:
env['REMOTE_HOST'] = host
env['REMOTE_ADDR'] = self.client_address[0]
authorization = self.headers.getheader("authorization")
if authorization:
authorization = authorization.split()
if len(authorization) == 2:
import base64, binascii
env['AUTH_TYPE'] = authorization[0]
if authorization[0].lower() == "basic":
try:
authorization = base64.decodestring(authorization[1])
except binascii.Error:
pass
else:
authorization = authorization.split(':')
if len(authorization) == 2:
env['REMOTE_USER'] = authorization[0]
# XXX REMOTE_IDENT
if self.headers.typeheader is None:
env['CONTENT_TYPE'] = self.headers.type
else:
env['CONTENT_TYPE'] = self.headers.typeheader
length = self.headers.getheader('content-length')
if length:
env['CONTENT_LENGTH'] = length
referer = self.headers.getheader('referer')
if referer:
env['HTTP_REFERER'] = referer
accept = []
for line in self.headers.getallmatchingheaders('accept'):
if line[:1] in "\t\n\r ":
accept.append(line.strip())
else:
accept = accept + line[7:].split(',')
env['HTTP_ACCEPT'] = ','.join(accept)
ua = self.headers.getheader('user-agent')
if ua:
env['HTTP_USER_AGENT'] = ua
co = filter(None, self.headers.getheaders('cookie'))
if co:
env['HTTP_COOKIE'] = ', '.join(co)
# XXX Other HTTP_* headers
# Since we're setting the env in the parent, provide empty
# values to override previously set values
for k in ('QUERY_STRING', 'REMOTE_HOST', 'CONTENT_LENGTH',
'HTTP_USER_AGENT', 'HTTP_COOKIE', 'HTTP_REFERER'):
env.setdefault(k, "")
self.send_response(200, "Script output follows")
decoded_query = query.replace('+', ' ')
if self.have_fork:
# Unix -- fork as we should
args = [script]
if '=' not in decoded_query:
args.append(decoded_query)
nobody = nobody_uid()
self.wfile.flush() # Always flush before forking
pid = os.fork()
if pid != 0:
# Parent
pid, sts = os.waitpid(pid, 0)
# throw away additional data [see bug #427345]
while select.select([self.rfile], [], [], 0)[0]:
if not self.rfile.read(1):
break
if sts:
self.log_error("CGI script exit status %#x", sts)
return
# Child
try:
try:
os.setuid(nobody)
except os.error:
pass
os.dup2(self.rfile.fileno(), 0)
os.dup2(self.wfile.fileno(), 1)
os.execve(scriptfile, args, env)
except:
self.server.handle_error(self.request, self.client_address)
os._exit(127)
else:
# Non Unix - use subprocess
import subprocess
cmdline = [scriptfile]
if self.is_python(scriptfile):
interp = sys.executable
if interp.lower().endswith("w.exe"):
# On Windows, use python.exe, not pythonw.exe
interp = interp[:-5] + interp[-4:]
cmdline = [interp, '-u'] + cmdline
if '=' not in query:
cmdline.append(query)
self.log_message("command: %s", subprocess.list2cmdline(cmdline))
try:
nbytes = int(length)
except (TypeError, ValueError):
nbytes = 0
p = subprocess.Popen(cmdline,
stdin = subprocess.PIPE,
stdout = subprocess.PIPE,
stderr = subprocess.PIPE,
env = env
)
if self.command.lower() == "post" and nbytes > 0:
data = self.rfile.read(nbytes)
else:
data = None
# throw away additional data [see bug #427345]
while select.select([self.rfile._sock], [], [], 0)[0]:
if not self.rfile._sock.recv(1):
break
stdout, stderr = p.communicate(data)
self.wfile.write(stdout)
if stderr:
self.log_error('%s', stderr)
p.stderr.close()
p.stdout.close()
status = p.returncode
if status:
self.log_error("CGI script exit status %#x", status)
else:
self.log_message("CGI script exited OK")
def _url_collapse_path(path):
"""
Given a URL path, remove extra '/'s and '.' path elements and collapse
any '..' references and returns a colllapsed path.
Implements something akin to RFC-2396 5.2 step 6 to parse relative paths.
The utility of this function is limited to is_cgi method and helps
preventing some security attacks.
Returns: A tuple of (head, tail) where tail is everything after the final /
and head is everything before it. Head will always start with a '/' and,
if it contains anything else, never have a trailing '/'.
Raises: IndexError if too many '..' occur within the path.
"""
# Similar to os.path.split(os.path.normpath(path)) but specific to URL
# path semantics rather than local operating system semantics.
path_parts = path.split('/')
head_parts = []
for part in path_parts[:-1]:
if part == '..':
head_parts.pop() # IndexError if more '..' than prior parts
elif part and part != '.':
head_parts.append( part )
if path_parts:
tail_part = path_parts.pop()
if tail_part:
if tail_part == '..':
head_parts.pop()
tail_part = ''
elif tail_part == '.':
tail_part = ''
else:
tail_part = ''
splitpath = ('/' + '/'.join(head_parts), tail_part)
collapsed_path = "/".join(splitpath)
return collapsed_path
nobody = None
def nobody_uid():
"""Internal routine to get nobody's uid"""
global nobody
if nobody:
return nobody
try:
import pwd
except ImportError:
return -1
try:
nobody = pwd.getpwnam('nobody')[2]
except KeyError:
nobody = 1 + max(map(lambda x: x[2], pwd.getpwall()))
return nobody
def executable(path):
"""Test for executable file."""
try:
st = os.stat(path)
except os.error:
return False
return st.st_mode & 0111 != 0
def test(HandlerClass = CGIHTTPRequestHandler,
ServerClass = BaseHTTPServer.HTTPServer):
SimpleHTTPServer.test(HandlerClass, ServerClass)
if __name__ == '__main__':
test()
| mit | 7,952,073,705,643,130,000 | 8,763,282,020,584,952,000 | 33.751323 | 79 | 0.533521 | false |
betatim/osf-cli | osfclient/__main__.py | 1 | 4648 | from __future__ import print_function
import sys
import six
import argparse
from textwrap import dedent
from .cli import clone, fetch, list_, remove, upload, init
from . import __version__
def main():
description = dedent("""
osf is a command-line program to up and download
files from osf.io.
These are common osf commands:
init Set up a .osfcli.config file
clone Copy all files from all storages of a project
fetch Fetch an individual file from a project
list List all files from all storages for a project
upload Upload a new file to an existing project
remove Remove a file from a project's storage
See 'osf <command> -h' to read about a specific command.
""")
parser = argparse.ArgumentParser(
description=description,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-u', '--username', default=None,
help=('OSF username. Provide your password via '
'OSF_PASSWORD environment variable'))
parser.add_argument('-p', '--project', default=None,
help='OSF project ID')
parser.add_argument('-v', '--version', action='version',
version='%(prog)s {}'.format(__version__))
# dest=command stores the name of the command in a variable, this is
# used later on to retrieve the correct sub-parser
subparsers = parser.add_subparsers(dest='command')
# Clone project
clone_parser = subparsers.add_parser(
'clone', description=clone.__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
clone_parser.set_defaults(func=clone)
clone_parser.add_argument('output', help='Write files to this directory',
default=None, nargs='?')
def _add_subparser(name, description, aliases=[]):
options = {
'description': description,
'formatter_class': argparse.RawDescriptionHelpFormatter,
}
if six.PY3:
options['aliases'] = aliases
return subparsers.add_parser(name, **options)
init_parser = _add_subparser('init', init.__doc__)
init_parser.set_defaults(func=init)
# Fetch an individual file
fetch_parser = _add_subparser('fetch', fetch.__doc__)
fetch_parser.set_defaults(func=fetch)
fetch_parser.add_argument('-f', '--force',
help='Force overwriting of local file',
action='store_true')
fetch_parser.add_argument('remote', help='Remote path',
default=None)
fetch_parser.add_argument('local', help='Local path',
default=None, nargs='?')
# List all files in a project
list_parser = _add_subparser('list', list.__doc__, aliases=['ls'])
list_parser.set_defaults(func=list_)
# Upload a single file or a directory tree
upload_parser = _add_subparser('upload', upload.__doc__)
upload_parser.set_defaults(func=upload)
upload_parser.add_argument('-f', '--force',
help='Force overwriting of remote file',
action='store_true')
upload_parser.add_argument('-r', '--recursive',
help='Recursively upload entire directories',
action='store_true')
upload_parser.add_argument('source', help='Local file')
upload_parser.add_argument('destination', help='Remote file path')
# Remove a single file
remove_parser = _add_subparser('remove', remove.__doc__, aliases=['rm'])
remove_parser.set_defaults(func=remove)
remove_parser.add_argument('target', help='Remote file path')
# Python2 argparse exits with an error when no command is given
if six.PY2 and len(sys.argv) == 1:
parser.print_help()
return
args = parser.parse_args()
if 'func' in args:
# give functions a chance to influence the exit code
# this setup is so we can print usage for the sub command
# even if there was an error further down
try:
exit_code = args.func(args)
except SystemExit as e:
exit_code = e.code
if exit_code is not None:
sub_parser = subparsers.choices[args.command]
sub_parser.print_usage(file=sys.stderr)
print('{} {}: error:'.format(parser.prog, args.command),
file=sys.stderr, end=' ')
sys.exit(exit_code)
else:
parser.print_help()
if __name__ == "__main__":
main()
| bsd-3-clause | -38,943,368,128,454,020 | -9,084,639,725,829,067,000 | 37.733333 | 77 | 0.598322 | false |
culot/homebooking | storage.py | 1 | 15856 | import logging as log
import sqlite3
class Database():
"Class to manage interactions with database"
def __init__(self):
self.connection = sqlite3.connect('book.db')
self.connection.row_factory = sqlite3.Row
try:
self.sanity_checks()
except Exception:
self.create_schema()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.connection.close()
def sanity_checks(self):
cursor = self.connection.cursor()
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='BOOKINGS'")
if cursor.fetchone() == None:
log.info('Missing database schema, creating it')
raise RuntimeError('Missing schema')
def create_schema(self):
cursor = self.connection.cursor()
cursor.executescript('''
CREATE TABLE ROOMS (ROOM_ID INTEGER PRIMARY KEY,
NAME TEXT UNIQUE NOT NULL);
CREATE UNIQUE INDEX IDX_ROOMS ON ROOMS(NAME);
CREATE TABLE GUESTS (GUEST_ID INTEGER PRIMARY KEY,
NICKNAME TEXT UNIQUE NOT NULL,
FIRST_NAME TEXT,
LAST_NAME TEXT);
CREATE UNIQUE INDEX IDX_GUESTS ON GUESTS(NICKNAME);
CREATE TABLE FEATURES (FEATURE_ID INTEGER PRIMARY KEY,
NAME TEXT UNIQUE NOT NULL,
DESC TEXT);
CREATE UNIQUE INDEX IDX_FEATURES ON FEATURES(NAME);
CREATE TABLE BEDS (BED_ID INTEGER PRIMARY KEY,
NAME TEXT UNIQUE NOT NULL,
CAPACITY INTEGER NOT NULL,
FEATURE_ID INTEGER,
ROOM_ID INTEGER,
FOREIGN KEY(FEATURE_ID) REFERENCES FEATURES(FEATURE_ID),
FOREIGN KEY(ROOM_ID) REFERENCES ROOMS(ROOM_ID));
CREATE UNIQUE INDEX IDX_BEDS ON BEDS(NAME);
CREATE TABLE BOOKINGS (BOOKING_ID INTEGER PRIMARY KEY,
GUEST_ID INTEGER NOT NULL,
BED_ID INTEGER NOT NULL,
DATE TEXT NOT NULL,
FOREIGN KEY(GUEST_ID) REFERENCES GUESTS(GUEST_ID),
FOREIGN KEY(BED_ID) REFERENCES BEDS(BED_ID));
CREATE UNIQUE INDEX IDX_BOOKINGS ON BOOKINGS(GUEST_ID, BED_ID, DATE);
''')
def add_room(self, name):
log.info('Adding room [%s] to the database', name)
cursor = self.connection.cursor()
cursor.execute("INSERT INTO ROOMS (NAME) VALUES (:ROOM_NAME)", {"ROOM_NAME": name})
self.connection.commit()
def add_feature(self, name, desc = None):
log.info('Adding feature [%s] to the database', name)
cursor = self.connection.cursor()
cursor.execute("INSERT INTO FEATURES (NAME, DESC) VALUES (:FEATURE_NAME,:FEATURE_DESC)",
{"FEATURE_NAME": name, "FEATURE_DESC": desc})
self.connection.commit()
def add_guest(self, nick, first_name = None, last_name = None):
log.info('Adding guest [%s] to the database', nick)
cursor = self.connection.cursor()
cursor.execute("INSERT INTO GUESTS (NICKNAME, FIRST_NAME, LAST_NAME) VALUES (:NICKNAME,:FIRST_NAME,:LAST_NAME)",
{"NICKNAME": nick, "FIRST_NAME": first_name, "LAST_NAME": last_name})
self.connection.commit()
def add_bed(self, name, capacity, room, feature = None):
log.info('Adding bed [%s] to the database', name)
# First check that the room and feature exists and fetch the corresponding ids
try:
room_id = self._get_room_id(room)
except ValueError as e:
print str(e)
log.warn(str(e))
exit(1)
if feature:
try:
feature_id = self._get_feature_id(feature)
except ValueError as e:
print str(e)
log.warn(str(e))
exit(1)
else:
feature_id = None
cursor = self.connection.cursor()
cursor.execute("INSERT INTO BEDS (NAME,CAPACITY,FEATURE_ID,ROOM_ID) VALUES (:NAME,:CAPACITY,:FEATURE,:ROOM)",{"NAME":name,"CAPACITY":capacity,"FEATURE":feature_id,"ROOM":room_id})
self.connection.commit()
def register(self, guest, bed, date):
log.info('Registering guest [%s] for bed [%s] on [%s]')
try:
guest_id = self._get_guest_id(guest)
except ValueError as e:
print str(e)
log.warn(str(e))
exit(1)
try:
bed_id = self._get_bed_id(bed)
except ValueError as e:
print str(e)
log.warn(str(e))
exit(1)
cursor = self.connection.cursor()
cursor.execute("INSERT INTO BOOKINGS (GUEST_ID,BED_ID,DATE) VALUES (:GUEST,:BED,:DATE)",{"GUEST":guest_id,"BED":bed_id,"DATE":date})
self.connection.commit()
def unregister(self, guest, bed, date):
log.info('Unregistering guest [%s] for bed [%s] on [%s]')
try:
booking_id = self._get_booking_id(guest, bed, date)
except ValueError as e:
print str(e)
log.warn(str(e))
exit(1)
cursor = self.connection.cursor()
cursor.execute("DELETE FROM BOOKINGS WHERE BOOKING_ID = :ID",{"ID":booking_id})
self.connection.commit()
def remove_bed(self, name):
log.info('Removing bed [%s] from the database', name)
cursor = self.connection.cursor()
try:
bed_id = self._get_bed_id(name)
except ValueError as e:
print str(e)
log.warn(str(e))
exit(1)
# Now check if bookings exist for this bed, in which case they must be removed first
cursor.execute("SELECT COUNT(*) AS NB_BOOKINGS FROM BOOKINGS WHERE BED_ID = :ID",{"ID":bed_id})
resultset = cursor.fetchone()
if resultset != None:
nb_bookings = resultset["NB_BOOKINGS"]
if nb_bookings != 0:
print "Some bookings exist for this bed, please remove them first!"
log.warn('Bookings registered for bed [%s], can\'t remove it', name)
exit(1)
cursor.execute("DELETE FROM BEDS WHERE BED_ID = :ID",{"ID":bed_id})
self.connection.commit()
def remove_feature(self, name):
log.info('Removing feature [%s] from the database', name)
cursor = self.connection.cursor()
try:
feature_id = self._get_feature_id(name)
except ValueError as e:
print str(e)
log.warn(str(e))
exit(1)
# Now check if beds have this feature, in which case they must be removed first
cursor.execute("SELECT COUNT(*) AS NB_BEDS FROM BEDS WHERE FEATURE_ID = :ID",{"ID":feature_id})
resultset = cursor.fetchone()
if resultset != None:
nb_beds = resultset["NB_BEDS"]
if nb_beds != 0:
print "Some beds are registered with this feature, please remove them first!"
log.warn('Beds registered with feature [%s], can\'t remove it', name)
exit(1)
cursor.execute("DELETE FROM FEATURES WHERE FEATURE_ID = :ID",{"ID":feature_id})
self.connection.commit()
def remove_guest(self, nickname):
log.info('Removing guest [%s] from the database', nickname)
cursor = self.connection.cursor()
try:
guest_id = self._get_guest_id(nickname)
except ValueError as e:
print str(e)
log.warn(str(e))
exit(1)
# Now check if bookings exist for this guest, in which case they must be removed first
cursor.execute("SELECT COUNT(*) AS NB_BOOKINGS FROM BOOKINGS WHERE GUEST_ID = :ID",{"ID":guest_id})
resultset = cursor.fetchone()
if resultset != None:
nb_bookings = resultset["NB_BOOKINGS"]
if nb_bookings != 0:
print "Some bookings exist for this guest, please remove them first!"
log.warn('Bookings registered for guest [%s], can\'t remove it', nickname)
exit(1)
cursor.execute("DELETE FROM GUESTS WHERE GUEST_ID = :ID",{"ID":guest_id})
self.connection.commit()
def remove_room(self, name):
log.info('Removing room [%s] from the database', name)
cursor = self.connection.cursor()
try:
room_id = self._get_room_id(name)
except ValueError as e:
print str(e)
log.warn(str(e))
exit(1)
# Now check if beds are found for this room, in which case they must be removed first
cursor.execute("SELECT COUNT(*) AS NB_BEDS FROM BEDS WHERE ROOM_ID = :ID",{"ID":room_id})
resultset = cursor.fetchone()
if resultset != None:
nb_beds = resultset["NB_BEDS"]
if nb_beds != 0:
print "Some beds are registered for this room, please remove them first!"
log.warn('Beds registered for room [%s], can\'t remove it', name)
exit(1)
cursor.execute("DELETE FROM ROOMS WHERE ROOM_ID = :ID",{"ID":room_id})
self.connection.commit()
def list_room(self, name):
log.info('Listing bookings for room [%s]', name)
try:
room_id = self._get_room_id(name)
except ValueError as e:
print str(e)
log.warn(str(e))
exit(1)
query = '''
SELECT GUESTS.NICKNAME, BEDS.NAME, BOOKINGS.DATE
FROM BOOKINGS
JOIN GUESTS ON (GUESTS.GUEST_ID = BOOKINGS.GUEST_ID)
JOIN BEDS ON (BEDS.BED_ID = BOOKINGS.BED_ID)
WHERE BEDS.BED_ID IN
(SELECT BED_ID FROM BEDS WHERE ROOM_ID = :ROOM_ID)
'''
cursor = self.connection.cursor()
cursor.execute(query,{"ROOM_ID": room_id})
rows = cursor.fetchall()
for row in rows:
print "Guest [%s], Bed [%s], Date [%s]" % (row["NICKNAME"], row["NAME"], row["DATE"])
def list_bed(self, name):
log.info('Listing bookings for bed [%s]', name)
try:
bed_id = self._get_bed_id(name)
except ValueError as e:
print str(e)
log.warn(str(e))
exit(1)
query = '''
SELECT GUESTS.NICKNAME, BEDS.NAME, BOOKINGS.DATE
FROM BOOKINGS
JOIN GUESTS ON (GUESTS.GUEST_ID = BOOKINGS.GUEST_ID)
JOIN BEDS ON (BEDS.BED_ID = BOOKINGS.BED_ID)
WHERE BEDS.BED_ID = :BED_ID
'''
cursor = self.connection.cursor()
cursor.execute(query,{"BED_ID": bed_id})
rows = cursor.fetchall()
for row in rows:
print "Guest [%s], Bed [%s], Date [%s]" % (row["NICKNAME"], row["NAME"], row["DATE"])
def list_guest(self, nick):
log.info('Listing bookings for guest [%s]', nick)
try:
guest_id = self._get_guest_id(nick)
except ValueError as e:
print str(e)
log.warn(str(e))
exit(1)
query = '''
SELECT GUESTS.NICKNAME, BEDS.NAME, BOOKINGS.DATE
FROM BOOKINGS
JOIN GUESTS ON (GUESTS.GUEST_ID = BOOKINGS.GUEST_ID)
JOIN BEDS ON (BEDS.BED_ID = BOOKINGS.BED_ID)
WHERE GUESTS.GUEST_ID = :GUEST_ID
'''
cursor = self.connection.cursor()
cursor.execute(query,{"GUEST_ID": guest_id})
rows = cursor.fetchall()
for row in rows:
print "Guest [%s], Bed [%s], Date [%s]" % (row["NICKNAME"], row["NAME"], row["DATE"])
def list_date(self, date):
log.info('Listing bookings for date [%s]', date)
query = '''
SELECT GUESTS.NICKNAME, BEDS.NAME, BOOKINGS.DATE
FROM BOOKINGS
JOIN GUESTS ON (GUESTS.GUEST_ID = BOOKINGS.GUEST_ID)
JOIN BEDS ON (BEDS.BED_ID = BOOKINGS.BED_ID)
WHERE DATE = :DATE
'''
cursor = self.connection.cursor()
cursor.execute(query,{"DATE": date})
rows = cursor.fetchall()
for row in rows:
print "Guest [%s], Bed [%s], Date [%s]" % (row["NICKNAME"], row["NAME"], row["DATE"])
def search_date(self, date):
log.info('Searching availabilities for [%s]', date)
query = '''
SELECT DISTINCT BEDS.NAME
FROM BEDS
WHERE NOT EXISTS
(SELECT * FROM BOOKINGS WHERE BED_ID = BEDS.BED_ID AND DATE = :DATE)
'''
cursor = self.connection.cursor()
cursor.execute(query,{"DATE": date})
rows = cursor.fetchall()
for row in rows:
print "Bed [%s]" % row["NAME"]
def show_entity(self, entity):
print "%s:" % entity
cursor = self.connection.cursor()
query = "select * from '%s'" % entity
cursor.execute(query)
rows = cursor.fetchall()
for row in rows:
print "\t",
print row
print "\n"
def stats_number(self):
log.info('Gathering database statistics')
cursor = self.connection.cursor()
cursor.execute("select name from sqlite_master where type='table'")
rows = cursor.fetchall()
for row in rows:
table = row["NAME"]
query = "select count(*) as NUM from '%s'" % table
cursor.execute(query)
count = cursor.fetchone()
print "%s | %d" % (table, count["NUM"])
def dump(self):
for line in self.connection.iterdump():
print "%s\n" % line
def _get_room_id(self, room_name):
cursor = self.connection.cursor()
cursor.execute("SELECT ROOM_ID FROM ROOMS WHERE NAME=:NAME",{"NAME": room_name})
resultset = cursor.fetchone()
if resultset == None:
raise ValueError('Room [%s] not found' % name)
return resultset["ROOM_ID"]
def _get_guest_id(self, guest_nick):
cursor = self.connection.cursor()
cursor.execute("SELECT GUEST_ID FROM GUESTS WHERE NICKNAME=:NAME",{"NAME": guest_nick})
resultset = cursor.fetchone()
if resultset == None:
raise ValueError('Guest [%s] not found' % guest_nick)
return resultset["GUEST_ID"]
def _get_bed_id(self, bed_name):
cursor = self.connection.cursor()
cursor.execute("SELECT BED_ID FROM BEDS WHERE NAME=:NAME",{"NAME": bed_name})
resultset = cursor.fetchone()
if resultset == None:
raise ValueError('Bed [%s] not found' % bed_name)
return resultset["BED_ID"]
def _get_feature_id(self, feature_name):
cursor = self.connection.cursor()
cursor.execute("SELECT FEATURE_ID FROM FEATURES WHERE NAME=:NAME",{"NAME": feature_name})
resultset = cursor.fetchone()
if resultset == None:
raise ValueError("Feature [%s] not found" % feature_name)
return resultset["FEATURE_ID"]
def _get_booking_id(self, guest, bed, date):
guest_id = self._get_guest_id(guest)
bed_id = self._get_bed_id(bed)
cursor = self.connection.cursor()
cursor.execute("SELECT BOOKING_ID FROM BOOKINGS WHERE GUEST_ID=:GUEST_ID AND BED_ID=:BED_ID AND DATE=:DATE",{"GUEST_ID":guest_id,"BED_ID":bed_id,"DATE":date})
resultset = cursor.fetchone()
if resultset == None:
raise ValueError("Booking for guest [%s] and bed [%s] on [%s] not found" % (guest,bed,date))
return resultset["BOOKING_ID"]
| bsd-3-clause | 7,414,673,533,239,565,000 | 3,786,535,815,705,555,500 | 38.344913 | 187 | 0.550013 | false |
rationalAgent/edx-platform-custom | cms/djangoapps/contentstore/views/requests.py | 2 | 1047 | from django.http import HttpResponse
from mitxmako.shortcuts import render_to_string, render_to_response
__all__ = ['edge', 'event', 'landing']
# points to the temporary course landing page with log in and sign up
def landing(request, org, course, coursename):
return render_to_response('temp-course-landing.html', {})
# points to the temporary edge page
def edge(request):
return render_to_response('university_profiles/edge.html', {})
def event(request):
'''
A noop to swallow the analytics call so that cms methods don't spook and poor developers looking at
console logs don't get distracted :-)
'''
return HttpResponse(status=204)
def render_from_lms(template_name, dictionary, context=None, namespace='main'):
"""
Render a template using the LMS MAKO_TEMPLATES
"""
return render_to_string(template_name, dictionary, context, namespace="lms." + namespace)
def _xmodule_recurse(item, action):
for child in item.get_children():
_xmodule_recurse(child, action)
action(item)
| agpl-3.0 | -1,853,490,630,972,241,200 | -5,495,695,832,559,448,000 | 28.083333 | 103 | 0.707736 | false |
eli261/jumpserver | apps/orgs/mixins/api.py | 1 | 1880 | # -*- coding: utf-8 -*-
#
from django.shortcuts import get_object_or_404
from rest_framework.viewsets import ModelViewSet
from rest_framework_bulk import BulkModelViewSet
from common.mixins import IDInCacheFilterMixin
from ..utils import set_to_root_org
from ..models import Organization
__all__ = [
'RootOrgViewMixin', 'OrgMembershipModelViewSetMixin', 'OrgModelViewSet',
'OrgBulkModelViewSet',
]
class RootOrgViewMixin:
def dispatch(self, request, *args, **kwargs):
set_to_root_org()
return super().dispatch(request, *args, **kwargs)
class OrgModelViewSet(IDInCacheFilterMixin, ModelViewSet):
def get_queryset(self):
return super().get_queryset().all()
class OrgBulkModelViewSet(IDInCacheFilterMixin, BulkModelViewSet):
def get_queryset(self):
queryset = super().get_queryset().all()
if hasattr(self, 'action') and self.action == 'list' and \
hasattr(self, 'serializer_class') and \
hasattr(self.serializer_class, 'setup_eager_loading'):
queryset = self.serializer_class.setup_eager_loading(queryset)
return queryset
def allow_bulk_destroy(self, qs, filtered):
return False
class OrgMembershipModelViewSetMixin:
org = None
membership_class = None
lookup_field = 'user'
lookup_url_kwarg = 'user_id'
http_method_names = ['get', 'post', 'delete', 'head', 'options']
def dispatch(self, request, *args, **kwargs):
self.org = get_object_or_404(Organization, pk=kwargs.get('org_id'))
return super().dispatch(request, *args, **kwargs)
def get_serializer_context(self):
context = super().get_serializer_context()
context['org'] = self.org
return context
def get_queryset(self):
queryset = self.membership_class.objects.filter(organization=self.org)
return queryset
| gpl-2.0 | 7,896,203,303,827,761,000 | -2,105,834,852,883,638,500 | 30.864407 | 78 | 0.675 | false |
jsteemann/arangodb | 3rdParty/V8-4.3.61/third_party/python_26/Lib/lib-tk/tkColorChooser.py | 149 | 1786 | # tk common colour chooser dialogue
#
# this module provides an interface to the native color dialogue
# available in Tk 4.2 and newer.
#
# written by Fredrik Lundh, May 1997
#
# fixed initialcolor handling in August 1998
#
#
# options (all have default values):
#
# - initialcolor: colour to mark as selected when dialog is displayed
# (given as an RGB triplet or a Tk color string)
#
# - parent: which window to place the dialog on top of
#
# - title: dialog title
#
from tkCommonDialog import Dialog
#
# color chooser class
class Chooser(Dialog):
"Ask for a color"
command = "tk_chooseColor"
def _fixoptions(self):
try:
# make sure initialcolor is a tk color string
color = self.options["initialcolor"]
if isinstance(color, tuple):
# assume an RGB triplet
self.options["initialcolor"] = "#%02x%02x%02x" % color
except KeyError:
pass
def _fixresult(self, widget, result):
# result can be somethings: an empty tuple, an empty string or
# a Tcl_Obj, so this somewhat weird check handles that
if not result or not str(result):
return None, None # canceled
# to simplify application code, the color chooser returns
# an RGB tuple together with the Tk color string
r, g, b = widget.winfo_rgb(result)
return (r/256, g/256, b/256), str(result)
#
# convenience stuff
def askcolor(color = None, **options):
"Ask for a color"
if color:
options = options.copy()
options["initialcolor"] = color
return Chooser(**options).show()
# --------------------------------------------------------------------
# test stuff
if __name__ == "__main__":
print "color", askcolor()
| apache-2.0 | -7,481,494,730,395,507,000 | 5,454,434,354,855,997,000 | 23.805556 | 70 | 0.606943 | false |
CyanogenMod/android_external_chromium_org_third_party_skia | platform_tools/android/bin/download_utils.py | 149 | 8464 | #!/usr/bin/python
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A library to assist automatically downloading files.
This library is used by scripts that download tarballs, zipfiles, etc. as part
of the build process.
"""
import hashlib
import http_download
import os.path
import re
import shutil
import sys
import time
import urllib2
SOURCE_STAMP = 'SOURCE_URL'
HASH_STAMP = 'SOURCE_SHA1'
# Designed to handle more general inputs than sys.platform because the platform
# name may come from the command line.
PLATFORM_COLLAPSE = {
'windows': 'windows',
'win32': 'windows',
'cygwin': 'windows',
'linux': 'linux',
'linux2': 'linux',
'linux3': 'linux',
'darwin': 'mac',
'mac': 'mac',
}
ARCH_COLLAPSE = {
'i386' : 'x86',
'i686' : 'x86',
'x86_64': 'x86',
'armv7l': 'arm',
}
class HashError(Exception):
def __init__(self, download_url, expected_hash, actual_hash):
self.download_url = download_url
self.expected_hash = expected_hash
self.actual_hash = actual_hash
def __str__(self):
return 'Got hash "%s" but expected hash "%s" for "%s"' % (
self.actual_hash, self.expected_hash, self.download_url)
def PlatformName(name=None):
if name is None:
name = sys.platform
return PLATFORM_COLLAPSE[name]
def ArchName(name=None):
if name is None:
if PlatformName() == 'windows':
# TODO(pdox): Figure out how to auto-detect 32-bit vs 64-bit Windows.
name = 'i386'
else:
import platform
name = platform.machine()
return ARCH_COLLAPSE[name]
def EnsureFileCanBeWritten(filename):
directory = os.path.dirname(filename)
if not os.path.exists(directory):
os.makedirs(directory)
def WriteData(filename, data):
EnsureFileCanBeWritten(filename)
f = open(filename, 'wb')
f.write(data)
f.close()
def WriteDataFromStream(filename, stream, chunk_size, verbose=True):
EnsureFileCanBeWritten(filename)
dst = open(filename, 'wb')
try:
while True:
data = stream.read(chunk_size)
if len(data) == 0:
break
dst.write(data)
if verbose:
# Indicate that we're still writing.
sys.stdout.write('.')
sys.stdout.flush()
finally:
if verbose:
sys.stdout.write('\n')
dst.close()
def DoesStampMatch(stampfile, expected, index):
try:
f = open(stampfile, 'r')
stamp = f.read()
f.close()
if stamp.split('\n')[index] == expected:
return "already up-to-date."
elif stamp.startswith('manual'):
return "manual override."
return False
except IOError:
return False
def WriteStamp(stampfile, data):
EnsureFileCanBeWritten(stampfile)
f = open(stampfile, 'w')
f.write(data)
f.close()
def StampIsCurrent(path, stamp_name, stamp_contents, min_time=None, index=0):
stampfile = os.path.join(path, stamp_name)
# Check if the stampfile is older than the minimum last mod time
if min_time:
try:
stamp_time = os.stat(stampfile).st_mtime
if stamp_time <= min_time:
return False
except OSError:
return False
return DoesStampMatch(stampfile, stamp_contents, index)
def WriteSourceStamp(path, url):
stampfile = os.path.join(path, SOURCE_STAMP)
WriteStamp(stampfile, url)
def WriteHashStamp(path, hash_val):
hash_stampfile = os.path.join(path, HASH_STAMP)
WriteStamp(hash_stampfile, hash_val)
def Retry(op, *args):
# Windows seems to be prone to having commands that delete files or
# directories fail. We currently do not have a complete understanding why,
# and as a workaround we simply retry the command a few times.
# It appears that file locks are hanging around longer than they should. This
# may be a secondary effect of processes hanging around longer than they
# should. This may be because when we kill a browser sel_ldr does not exit
# immediately, etc.
# Virus checkers can also accidently prevent files from being deleted, but
# that shouldn't be a problem on the bots.
if sys.platform in ('win32', 'cygwin'):
count = 0
while True:
try:
op(*args)
break
except Exception:
sys.stdout.write("FAILED: %s %s\n" % (op.__name__, repr(args)))
count += 1
if count < 5:
sys.stdout.write("RETRY: %s %s\n" % (op.__name__, repr(args)))
time.sleep(pow(2, count))
else:
# Don't mask the exception.
raise
else:
op(*args)
def MoveDirCleanly(src, dst):
RemoveDir(dst)
MoveDir(src, dst)
def MoveDir(src, dst):
Retry(shutil.move, src, dst)
def RemoveDir(path):
if os.path.exists(path):
Retry(shutil.rmtree, path)
def RemoveFile(path):
if os.path.exists(path):
Retry(os.unlink, path)
def _HashFileHandle(fh):
"""sha1 of a file like object.
Arguments:
fh: file handle like object to hash.
Returns:
sha1 as a string.
"""
hasher = hashlib.sha1()
try:
while True:
data = fh.read(4096)
if not data:
break
hasher.update(data)
finally:
fh.close()
return hasher.hexdigest()
def HashFile(filename):
"""sha1 a file on disk.
Arguments:
filename: filename to hash.
Returns:
sha1 as a string.
"""
fh = open(filename, 'rb')
return _HashFileHandle(fh)
def HashUrlByDownloading(url):
"""sha1 the data at an url.
Arguments:
url: url to download from.
Returns:
sha1 of the data at the url.
"""
try:
fh = urllib2.urlopen(url)
except:
sys.stderr.write("Failed fetching URL: %s\n" % url)
raise
return _HashFileHandle(fh)
# Attempts to get the SHA1 hash of a file given a URL by looking for
# an adjacent file with a ".sha1hash" suffix. This saves having to
# download a large tarball just to get its hash. Otherwise, we fall
# back to downloading the main file.
def HashUrl(url):
hash_url = '%s.sha1hash' % url
try:
fh = urllib2.urlopen(hash_url)
data = fh.read(100)
fh.close()
except urllib2.HTTPError, exn:
if exn.code == 404:
return HashUrlByDownloading(url)
raise
else:
if not re.match('[0-9a-f]{40}\n?$', data):
raise AssertionError('Bad SHA1 hash file: %r' % data)
return data.strip()
def SyncURL(url, filename=None, stamp_dir=None, min_time=None,
hash_val=None, keep=False, verbose=False, stamp_index=0):
"""Synchronize a destination file with a URL
if the URL does not match the URL stamp, then we must re-download it.
Arugments:
url: the url which will to compare against and download
filename: the file to create on download
path: the download path
stamp_dir: the filename containing the URL stamp to check against
hash_val: if set, the expected hash which must be matched
verbose: prints out status as it runs
stamp_index: index within the stamp file to check.
Returns:
True if the file is replaced
False if the file is not replaced
Exception:
HashError: if the hash does not match
"""
assert url and filename
# If we are not keeping the tarball, or we already have it, we can
# skip downloading it for this reason. If we are keeping it,
# it must exist.
if keep:
tarball_ok = os.path.isfile(filename)
else:
tarball_ok = True
# If we don't need the tarball and the stamp_file matches the url, then
# we must be up to date. If the URL differs but the recorded hash matches
# the one we'll insist the tarball has, then that's good enough too.
# TODO(mcgrathr): Download the .sha1sum file first to compare with
# the cached hash, in case --file-hash options weren't used.
if tarball_ok and stamp_dir is not None:
if StampIsCurrent(stamp_dir, SOURCE_STAMP, url, min_time):
if verbose:
print '%s is already up to date.' % filename
return False
if (hash_val is not None and
StampIsCurrent(stamp_dir, HASH_STAMP, hash_val, min_time, stamp_index)):
if verbose:
print '%s is identical to the up to date file.' % filename
return False
if verbose:
print 'Updating %s\n\tfrom %s.' % (filename, url)
EnsureFileCanBeWritten(filename)
http_download.HttpDownload(url, filename)
if hash_val:
tar_hash = HashFile(filename)
if hash_val != tar_hash:
raise HashError(actual_hash=tar_hash, expected_hash=hash_val,
download_url=url)
return True
| bsd-3-clause | 4,942,500,190,869,142,000 | -4,526,286,549,577,163,000 | 25.204334 | 80 | 0.662689 | false |
ChinaMassClouds/copenstack-server | openstack/src/horizon-2014.2/openstack_dashboard/dashboards/project/databases/tests.py | 7 | 14628 | # Copyright 2013 Mirantis Inc.
# Copyright 2013 Rackspace Hosting.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.core.urlresolvers import reverse
from django import http
from mox import IsA # noqa
from horizon import exceptions
from openstack_dashboard import api
from openstack_dashboard.test import helpers as test
from troveclient import common
INDEX_URL = reverse('horizon:project:databases:index')
LAUNCH_URL = reverse('horizon:project:databases:launch')
DETAILS_URL = reverse('horizon:project:databases:detail', args=['id'])
class DatabaseTests(test.TestCase):
@test.create_stubs(
{api.trove: ('instance_list', 'flavor_list')})
def test_index(self):
# Mock database instances
databases = common.Paginated(self.databases.list())
api.trove.instance_list(IsA(http.HttpRequest), marker=None)\
.AndReturn(databases)
# Mock flavors
api.trove.flavor_list(IsA(http.HttpRequest))\
.AndReturn(self.flavors.list())
self.mox.ReplayAll()
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(res, 'project/databases/index.html')
# Check the Host column displaying ip or hostname
self.assertContains(res, '10.0.0.3')
self.assertContains(res, 'trove.instance-2.com')
@test.create_stubs(
{api.trove: ('instance_list', 'flavor_list')})
def test_index_flavor_exception(self):
# Mock database instances
databases = common.Paginated(self.databases.list())
api.trove.instance_list(IsA(http.HttpRequest), marker=None)\
.AndReturn(databases)
# Mock flavors
api.trove.flavor_list(IsA(http.HttpRequest))\
.AndRaise(self.exceptions.trove)
self.mox.ReplayAll()
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(res, 'project/databases/index.html')
self.assertMessageCount(res, error=1)
@test.create_stubs(
{api.trove: ('instance_list',)})
def test_index_list_exception(self):
# Mock database instances
api.trove.instance_list(IsA(http.HttpRequest), marker=None)\
.AndRaise(self.exceptions.trove)
self.mox.ReplayAll()
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(res, 'project/databases/index.html')
self.assertMessageCount(res, error=1)
@test.create_stubs(
{api.trove: ('instance_list', 'flavor_list')})
def test_index_pagination(self):
# Mock database instances
databases = self.databases.list()
last_record = databases[1]
databases = common.Paginated(databases, next_marker="foo")
api.trove.instance_list(IsA(http.HttpRequest), marker=None)\
.AndReturn(databases)
# Mock flavors
api.trove.flavor_list(IsA(http.HttpRequest))\
.AndReturn(self.flavors.list())
self.mox.ReplayAll()
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(res, 'project/databases/index.html')
self.assertContains(
res, 'marker=' + last_record.id)
@test.create_stubs(
{api.trove: ('instance_list', 'flavor_list')})
def test_index_flavor_list_exception(self):
# Mocking instances.
databases = common.Paginated(self.databases.list())
api.trove.instance_list(
IsA(http.HttpRequest),
marker=None,
).AndReturn(databases)
# Mocking flavor list with raising an exception.
api.trove.flavor_list(
IsA(http.HttpRequest),
).AndRaise(self.exceptions.trove)
self.mox.ReplayAll()
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(res, 'project/databases/index.html')
self.assertMessageCount(res, error=1)
@test.create_stubs({
api.trove: ('flavor_list', 'backup_list',
'datastore_list', 'datastore_version_list')})
def test_launch_instance(self):
api.trove.flavor_list(IsA(http.HttpRequest))\
.AndReturn(self.flavors.list())
api.trove.backup_list(IsA(http.HttpRequest))\
.AndReturn(self.database_backups.list())
# Mock datastores
api.trove.datastore_list(IsA(http.HttpRequest))\
.AndReturn(self.datastores.list())
# Mock datastore versions
api.trove.datastore_version_list(IsA(http.HttpRequest),
IsA(str)).AndReturn(self.datastore_versions.list())
self.mox.ReplayAll()
res = self.client.get(LAUNCH_URL)
self.assertTemplateUsed(res, 'project/databases/launch.html')
@test.create_stubs({api.trove: ('flavor_list',)})
def test_launch_instance_exception_on_flavors(self):
trove_exception = self.exceptions.nova
api.trove.flavor_list(IsA(http.HttpRequest)).AndRaise(trove_exception)
self.mox.ReplayAll()
toSuppress = ["openstack_dashboard.dashboards.project.databases."
"workflows.create_instance",
"horizon.workflows.base"]
# Suppress expected log messages in the test output
loggers = []
for cls in toSuppress:
logger = logging.getLogger(cls)
loggers.append((logger, logger.getEffectiveLevel()))
logger.setLevel(logging.CRITICAL)
try:
with self.assertRaises(exceptions.Http302):
self.client.get(LAUNCH_URL)
finally:
# Restore the previous log levels
for (log, level) in loggers:
log.setLevel(level)
@test.create_stubs({
api.trove: ('flavor_list', 'backup_list', 'instance_create',
'datastore_list', 'datastore_version_list'),
api.neutron: ('network_list',)})
def test_create_simple_instance(self):
api.trove.flavor_list(IsA(http.HttpRequest)).AndReturn(
self.flavors.list())
api.trove.backup_list(IsA(http.HttpRequest)).AndReturn(
self.database_backups.list())
# Mock datastores
api.trove.datastore_list(IsA(http.HttpRequest))\
.AndReturn(self.datastores.list())
# Mock datastore versions
api.trove.datastore_version_list(IsA(http.HttpRequest),
IsA(str)).AndReturn(self.datastore_versions.list())
api.neutron.network_list(IsA(http.HttpRequest),
tenant_id=self.tenant.id,
shared=False).AndReturn(
self.networks.list()[:1])
api.neutron.network_list(IsA(http.HttpRequest),
shared=True).AndReturn(
self.networks.list()[1:])
nics = [{"net-id": self.networks.first().id, "v4-fixed-ip": ''}]
# Actual create database call
api.trove.instance_create(
IsA(http.HttpRequest),
IsA(unicode),
IsA(int),
IsA(unicode),
databases=None,
datastore=IsA(unicode),
datastore_version=IsA(unicode),
restore_point=None,
users=None,
nics=nics).AndReturn(self.databases.first())
self.mox.ReplayAll()
post = {
'name': "MyDB",
'volume': '1',
'flavor': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
'network': self.networks.first().id,
'datastore': 'mysql,5.5',
}
res = self.client.post(LAUNCH_URL, post)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs({
api.trove: ('flavor_list', 'backup_list', 'instance_create',
'datastore_list', 'datastore_version_list'),
api.neutron: ('network_list',)})
def test_create_simple_instance_exception(self):
trove_exception = self.exceptions.nova
api.trove.flavor_list(IsA(http.HttpRequest)).AndReturn(
self.flavors.list())
api.trove.backup_list(IsA(http.HttpRequest)).AndReturn(
self.database_backups.list())
# Mock datastores
api.trove.datastore_list(IsA(http.HttpRequest))\
.AndReturn(self.datastores.list())
# Mock datastore versions
api.trove.datastore_version_list(IsA(http.HttpRequest),
IsA(str)).AndReturn(self.datastore_versions.list())
api.neutron.network_list(IsA(http.HttpRequest),
tenant_id=self.tenant.id,
shared=False).AndReturn(
self.networks.list()[:1])
api.neutron.network_list(IsA(http.HttpRequest),
shared=True).AndReturn(
self.networks.list()[1:])
nics = [{"net-id": self.networks.first().id, "v4-fixed-ip": ''}]
# Actual create database call
api.trove.instance_create(
IsA(http.HttpRequest),
IsA(unicode),
IsA(int),
IsA(unicode),
databases=None,
datastore=IsA(unicode),
datastore_version=IsA(unicode),
restore_point=None,
users=None,
nics=nics).AndRaise(trove_exception)
self.mox.ReplayAll()
post = {
'name': "MyDB",
'volume': '1',
'flavor': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
'network': self.networks.first().id,
'datastore': 'mysql,5.5',
}
res = self.client.post(LAUNCH_URL, post)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs(
{api.trove: ('instance_get', 'flavor_get',)})
def _test_details(self, database, with_designate=False):
api.trove.instance_get(IsA(http.HttpRequest), IsA(unicode))\
.AndReturn(database)
api.trove.flavor_get(IsA(http.HttpRequest), IsA(str))\
.AndReturn(self.flavors.first())
self.mox.ReplayAll()
res = self.client.get(DETAILS_URL)
self.assertTemplateUsed(res, 'project/databases/detail.html')
if with_designate:
self.assertContains(res, database.hostname)
else:
self.assertContains(res, database.ip[0])
def test_details_with_ip(self):
database = self.databases.first()
self._test_details(database, with_designate=False)
def test_details_with_hostname(self):
database = self.databases.list()[1]
self._test_details(database, with_designate=True)
@test.create_stubs(
{api.trove: ('instance_get', 'flavor_get', 'users_list',
'user_list_access', 'user_delete')})
def test_user_delete(self):
database = self.databases.first()
user = self.database_users.first()
user_db = self.database_user_dbs.first()
database_id = database.id
# Instead of using the user's ID, the api uses the user's name. BOOO!
user_id = user.name
# views.py: DetailView.get_data
api.trove.instance_get(IsA(http.HttpRequest), IsA(unicode))\
.AndReturn(database)
api.trove.flavor_get(IsA(http.HttpRequest), IsA(str))\
.AndReturn(self.flavors.first())
# tabs.py: UserTab.get_user_data
api.trove.users_list(IsA(http.HttpRequest),
database_id).AndReturn([user])
api.trove.user_list_access(IsA(http.HttpRequest),
database_id,
user_id).AndReturn([user_db])
# tables.py: DeleteUser.delete
api.trove.user_delete(IsA(http.HttpRequest),
database_id,
user_id).AndReturn(None)
self.mox.ReplayAll()
details_url = reverse('horizon:project:databases:detail',
args=[database_id])
url = details_url + '?tab=instance_details__users_tab'
action_string = u"users__delete__%s" % user_id
form_data = {'action': action_string}
res = self.client.post(url, form_data)
self.assertRedirectsNoFollow(res, url)
@test.create_stubs({
api.trove: ('instance_get', 'instance_resize_volume'),
})
def test_resize_volume(self):
database = self.databases.first()
database_id = database.id
database_size = database.volume.get('size')
# views.py: DetailView.get_data
api.trove.instance_get(IsA(http.HttpRequest), IsA(unicode))\
.AndReturn(database)
# forms.py: ResizeVolumeForm.handle
api.trove.instance_resize_volume(IsA(http.HttpRequest),
database_id,
IsA(int)).AndReturn(None)
self.mox.ReplayAll()
url = reverse('horizon:project:databases:resize_volume',
args=[database_id])
post = {
'instance_id': database_id,
'orig_size': database_size,
'new_size': database_size + 1,
}
res = self.client.post(url, post)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs({
api.trove: ('instance_get', 'instance_resize_volume'),
})
def test_resize_volume_bad_value(self):
database = self.databases.first()
database_id = database.id
database_size = database.volume.get('size')
# views.py: DetailView.get_data
api.trove.instance_get(IsA(http.HttpRequest), IsA(unicode))\
.AndReturn(database)
self.mox.ReplayAll()
url = reverse('horizon:project:databases:resize_volume',
args=[database_id])
post = {
'instance_id': database_id,
'orig_size': database_size,
'new_size': database_size,
}
res = self.client.post(url, post)
self.assertContains(res,
"New size for volume must be greater than current size.")
| gpl-2.0 | 783,860,120,055,760,600 | -1,052,410,782,923,582,300 | 36.316327 | 78 | 0.590033 | false |
beafus/Video-Player | project/server/lib/node_modules/npm/node_modules/node-gyp/gyp/tools/pretty_vcproj.py | 2637 | 9586 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Make the format of a vcproj really pretty.
This script normalize and sort an xml. It also fetches all the properties
inside linked vsprops and include them explicitly in the vcproj.
It outputs the resulting xml to stdout.
"""
__author__ = 'nsylvain (Nicolas Sylvain)'
import os
import sys
from xml.dom.minidom import parse
from xml.dom.minidom import Node
REPLACEMENTS = dict()
ARGUMENTS = None
class CmpTuple(object):
"""Compare function between 2 tuple."""
def __call__(self, x, y):
return cmp(x[0], y[0])
class CmpNode(object):
"""Compare function between 2 xml nodes."""
def __call__(self, x, y):
def get_string(node):
node_string = "node"
node_string += node.nodeName
if node.nodeValue:
node_string += node.nodeValue
if node.attributes:
# We first sort by name, if present.
node_string += node.getAttribute("Name")
all_nodes = []
for (name, value) in node.attributes.items():
all_nodes.append((name, value))
all_nodes.sort(CmpTuple())
for (name, value) in all_nodes:
node_string += name
node_string += value
return node_string
return cmp(get_string(x), get_string(y))
def PrettyPrintNode(node, indent=0):
if node.nodeType == Node.TEXT_NODE:
if node.data.strip():
print '%s%s' % (' '*indent, node.data.strip())
return
if node.childNodes:
node.normalize()
# Get the number of attributes
attr_count = 0
if node.attributes:
attr_count = node.attributes.length
# Print the main tag
if attr_count == 0:
print '%s<%s>' % (' '*indent, node.nodeName)
else:
print '%s<%s' % (' '*indent, node.nodeName)
all_attributes = []
for (name, value) in node.attributes.items():
all_attributes.append((name, value))
all_attributes.sort(CmpTuple())
for (name, value) in all_attributes:
print '%s %s="%s"' % (' '*indent, name, value)
print '%s>' % (' '*indent)
if node.nodeValue:
print '%s %s' % (' '*indent, node.nodeValue)
for sub_node in node.childNodes:
PrettyPrintNode(sub_node, indent=indent+2)
print '%s</%s>' % (' '*indent, node.nodeName)
def FlattenFilter(node):
"""Returns a list of all the node and sub nodes."""
node_list = []
if (node.attributes and
node.getAttribute('Name') == '_excluded_files'):
# We don't add the "_excluded_files" filter.
return []
for current in node.childNodes:
if current.nodeName == 'Filter':
node_list.extend(FlattenFilter(current))
else:
node_list.append(current)
return node_list
def FixFilenames(filenames, current_directory):
new_list = []
for filename in filenames:
if filename:
for key in REPLACEMENTS:
filename = filename.replace(key, REPLACEMENTS[key])
os.chdir(current_directory)
filename = filename.strip('"\' ')
if filename.startswith('$'):
new_list.append(filename)
else:
new_list.append(os.path.abspath(filename))
return new_list
def AbsoluteNode(node):
"""Makes all the properties we know about in this node absolute."""
if node.attributes:
for (name, value) in node.attributes.items():
if name in ['InheritedPropertySheets', 'RelativePath',
'AdditionalIncludeDirectories',
'IntermediateDirectory', 'OutputDirectory',
'AdditionalLibraryDirectories']:
# We want to fix up these paths
path_list = value.split(';')
new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1]))
node.setAttribute(name, ';'.join(new_list))
if not value:
node.removeAttribute(name)
def CleanupVcproj(node):
"""For each sub node, we call recursively this function."""
for sub_node in node.childNodes:
AbsoluteNode(sub_node)
CleanupVcproj(sub_node)
# Normalize the node, and remove all extranous whitespaces.
for sub_node in node.childNodes:
if sub_node.nodeType == Node.TEXT_NODE:
sub_node.data = sub_node.data.replace("\r", "")
sub_node.data = sub_node.data.replace("\n", "")
sub_node.data = sub_node.data.rstrip()
# Fix all the semicolon separated attributes to be sorted, and we also
# remove the dups.
if node.attributes:
for (name, value) in node.attributes.items():
sorted_list = sorted(value.split(';'))
unique_list = []
for i in sorted_list:
if not unique_list.count(i):
unique_list.append(i)
node.setAttribute(name, ';'.join(unique_list))
if not value:
node.removeAttribute(name)
if node.childNodes:
node.normalize()
# For each node, take a copy, and remove it from the list.
node_array = []
while node.childNodes and node.childNodes[0]:
# Take a copy of the node and remove it from the list.
current = node.childNodes[0]
node.removeChild(current)
# If the child is a filter, we want to append all its children
# to this same list.
if current.nodeName == 'Filter':
node_array.extend(FlattenFilter(current))
else:
node_array.append(current)
# Sort the list.
node_array.sort(CmpNode())
# Insert the nodes in the correct order.
for new_node in node_array:
# But don't append empty tool node.
if new_node.nodeName == 'Tool':
if new_node.attributes and new_node.attributes.length == 1:
# This one was empty.
continue
if new_node.nodeName == 'UserMacro':
continue
node.appendChild(new_node)
def GetConfiguationNodes(vcproj):
#TODO(nsylvain): Find a better way to navigate the xml.
nodes = []
for node in vcproj.childNodes:
if node.nodeName == "Configurations":
for sub_node in node.childNodes:
if sub_node.nodeName == "Configuration":
nodes.append(sub_node)
return nodes
def GetChildrenVsprops(filename):
dom = parse(filename)
if dom.documentElement.attributes:
vsprops = dom.documentElement.getAttribute('InheritedPropertySheets')
return FixFilenames(vsprops.split(';'), os.path.dirname(filename))
return []
def SeekToNode(node1, child2):
# A text node does not have properties.
if child2.nodeType == Node.TEXT_NODE:
return None
# Get the name of the current node.
current_name = child2.getAttribute("Name")
if not current_name:
# There is no name. We don't know how to merge.
return None
# Look through all the nodes to find a match.
for sub_node in node1.childNodes:
if sub_node.nodeName == child2.nodeName:
name = sub_node.getAttribute("Name")
if name == current_name:
return sub_node
# No match. We give up.
return None
def MergeAttributes(node1, node2):
# No attributes to merge?
if not node2.attributes:
return
for (name, value2) in node2.attributes.items():
# Don't merge the 'Name' attribute.
if name == 'Name':
continue
value1 = node1.getAttribute(name)
if value1:
# The attribute exist in the main node. If it's equal, we leave it
# untouched, otherwise we concatenate it.
if value1 != value2:
node1.setAttribute(name, ';'.join([value1, value2]))
else:
# The attribute does nto exist in the main node. We append this one.
node1.setAttribute(name, value2)
# If the attribute was a property sheet attributes, we remove it, since
# they are useless.
if name == 'InheritedPropertySheets':
node1.removeAttribute(name)
def MergeProperties(node1, node2):
MergeAttributes(node1, node2)
for child2 in node2.childNodes:
child1 = SeekToNode(node1, child2)
if child1:
MergeProperties(child1, child2)
else:
node1.appendChild(child2.cloneNode(True))
def main(argv):
"""Main function of this vcproj prettifier."""
global ARGUMENTS
ARGUMENTS = argv
# check if we have exactly 1 parameter.
if len(argv) < 2:
print ('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] '
'[key2=value2]' % argv[0])
return 1
# Parse the keys
for i in range(2, len(argv)):
(key, value) = argv[i].split('=')
REPLACEMENTS[key] = value
# Open the vcproj and parse the xml.
dom = parse(argv[1])
# First thing we need to do is find the Configuration Node and merge them
# with the vsprops they include.
for configuration_node in GetConfiguationNodes(dom.documentElement):
# Get the property sheets associated with this configuration.
vsprops = configuration_node.getAttribute('InheritedPropertySheets')
# Fix the filenames to be absolute.
vsprops_list = FixFilenames(vsprops.strip().split(';'),
os.path.dirname(argv[1]))
# Extend the list of vsprops with all vsprops contained in the current
# vsprops.
for current_vsprops in vsprops_list:
vsprops_list.extend(GetChildrenVsprops(current_vsprops))
# Now that we have all the vsprops, we need to merge them.
for current_vsprops in vsprops_list:
MergeProperties(configuration_node,
parse(current_vsprops).documentElement)
# Now that everything is merged, we need to cleanup the xml.
CleanupVcproj(dom.documentElement)
# Finally, we use the prett xml function to print the vcproj back to the
# user.
#print dom.toprettyxml(newl="\n")
PrettyPrintNode(dom.documentElement)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
| mit | 8,464,638,544,049,267,000 | 478,914,150,871,371,500 | 28.136778 | 76 | 0.654913 | false |
umangv/LitHub | LitHub/fbconnect/forms.py | 1 | 2017 | # Copyright 2011 Kalamazoo College Computer Science Club
# <kzoo-cs-board@googlegroups.com>
# This file is part of LitHub.
#
# LitHub is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# LitHub is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with LitHub. If not, see <http://www.gnu.org/licenses/>.
from django import forms
from django.contrib.auth.models import User
class FBRegisterForm(forms.Form):
username = forms.CharField(max_length=30)
def clean_username(self):
username = self.cleaned_data['username']
if User.objects.filter(username=username).count():
raise forms.ValidationError("This username has already been "+\
"taken. Please try again")
return self.cleaned_data['username']
class FBRegisterVerifyForm(forms.Form):
username = forms.CharField(max_length=30)
email = forms.EmailField(max_length=75)
def clean_username(self):
username = self.cleaned_data['username']
if User.objects.filter(username=username).count():
raise forms.ValidationError("This username has already been "
"taken. Please try again")
return self.cleaned_data['username']
def clean_email(self):
"""Ensures a valid K student email id is used. """
email_parts = self.cleaned_data['email'].split('@')
if email_parts[1].lower () != "kzoo.edu":
raise forms.ValidationError("Only kzoo.edu addresses are "
"allowed!")
return self.cleaned_data['email']
| gpl-3.0 | -1,834,058,218,253,365,800 | 9,008,473,689,790,774,000 | 40.163265 | 75 | 0.672286 | false |
indefini/linux | tools/perf/scripts/python/export-to-postgresql.py | 217 | 17826 | # export-to-postgresql.py: export perf data to a postgresql database
# Copyright (c) 2014, Intel Corporation.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms and conditions of the GNU General Public License,
# version 2, as published by the Free Software Foundation.
#
# This program is distributed in the hope it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
import os
import sys
import struct
import datetime
# To use this script you will need to have installed package python-pyside which
# provides LGPL-licensed Python bindings for Qt. You will also need the package
# libqt4-sql-psql for Qt postgresql support.
#
# The script assumes postgresql is running on the local machine and that the
# user has postgresql permissions to create databases. Examples of installing
# postgresql and adding such a user are:
#
# fedora:
#
# $ sudo yum install postgresql postgresql-server python-pyside qt-postgresql
# $ sudo su - postgres -c initdb
# $ sudo service postgresql start
# $ sudo su - postgres
# $ createuser <your user id here>
# Shall the new role be a superuser? (y/n) y
#
# ubuntu:
#
# $ sudo apt-get install postgresql
# $ sudo su - postgres
# $ createuser <your user id here>
# Shall the new role be a superuser? (y/n) y
#
# An example of using this script with Intel PT:
#
# $ perf record -e intel_pt//u ls
# $ perf script -s ~/libexec/perf-core/scripts/python/export-to-postgresql.py pt_example branches calls
# 2015-05-29 12:49:23.464364 Creating database...
# 2015-05-29 12:49:26.281717 Writing to intermediate files...
# 2015-05-29 12:49:27.190383 Copying to database...
# 2015-05-29 12:49:28.140451 Removing intermediate files...
# 2015-05-29 12:49:28.147451 Adding primary keys
# 2015-05-29 12:49:28.655683 Adding foreign keys
# 2015-05-29 12:49:29.365350 Done
#
# To browse the database, psql can be used e.g.
#
# $ psql pt_example
# pt_example=# select * from samples_view where id < 100;
# pt_example=# \d+
# pt_example=# \d+ samples_view
# pt_example=# \q
#
# An example of using the database is provided by the script
# call-graph-from-postgresql.py. Refer to that script for details.
from PySide.QtSql import *
# Need to access PostgreSQL C library directly to use COPY FROM STDIN
from ctypes import *
libpq = CDLL("libpq.so.5")
PQconnectdb = libpq.PQconnectdb
PQconnectdb.restype = c_void_p
PQfinish = libpq.PQfinish
PQstatus = libpq.PQstatus
PQexec = libpq.PQexec
PQexec.restype = c_void_p
PQresultStatus = libpq.PQresultStatus
PQputCopyData = libpq.PQputCopyData
PQputCopyData.argtypes = [ c_void_p, c_void_p, c_int ]
PQputCopyEnd = libpq.PQputCopyEnd
PQputCopyEnd.argtypes = [ c_void_p, c_void_p ]
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
# These perf imports are not used at present
#from perf_trace_context import *
#from Core import *
perf_db_export_mode = True
perf_db_export_calls = False
def usage():
print >> sys.stderr, "Usage is: export-to-postgresql.py <database name> [<columns>] [<calls>]"
print >> sys.stderr, "where: columns 'all' or 'branches'"
print >> sys.stderr, " calls 'calls' => create calls table"
raise Exception("Too few arguments")
if (len(sys.argv) < 2):
usage()
dbname = sys.argv[1]
if (len(sys.argv) >= 3):
columns = sys.argv[2]
else:
columns = "all"
if columns not in ("all", "branches"):
usage()
branches = (columns == "branches")
if (len(sys.argv) >= 4):
if (sys.argv[3] == "calls"):
perf_db_export_calls = True
else:
usage()
output_dir_name = os.getcwd() + "/" + dbname + "-perf-data"
os.mkdir(output_dir_name)
def do_query(q, s):
if (q.exec_(s)):
return
raise Exception("Query failed: " + q.lastError().text())
print datetime.datetime.today(), "Creating database..."
db = QSqlDatabase.addDatabase('QPSQL')
query = QSqlQuery(db)
db.setDatabaseName('postgres')
db.open()
try:
do_query(query, 'CREATE DATABASE ' + dbname)
except:
os.rmdir(output_dir_name)
raise
query.finish()
query.clear()
db.close()
db.setDatabaseName(dbname)
db.open()
query = QSqlQuery(db)
do_query(query, 'SET client_min_messages TO WARNING')
do_query(query, 'CREATE TABLE selected_events ('
'id bigint NOT NULL,'
'name varchar(80))')
do_query(query, 'CREATE TABLE machines ('
'id bigint NOT NULL,'
'pid integer,'
'root_dir varchar(4096))')
do_query(query, 'CREATE TABLE threads ('
'id bigint NOT NULL,'
'machine_id bigint,'
'process_id bigint,'
'pid integer,'
'tid integer)')
do_query(query, 'CREATE TABLE comms ('
'id bigint NOT NULL,'
'comm varchar(16))')
do_query(query, 'CREATE TABLE comm_threads ('
'id bigint NOT NULL,'
'comm_id bigint,'
'thread_id bigint)')
do_query(query, 'CREATE TABLE dsos ('
'id bigint NOT NULL,'
'machine_id bigint,'
'short_name varchar(256),'
'long_name varchar(4096),'
'build_id varchar(64))')
do_query(query, 'CREATE TABLE symbols ('
'id bigint NOT NULL,'
'dso_id bigint,'
'sym_start bigint,'
'sym_end bigint,'
'binding integer,'
'name varchar(2048))')
do_query(query, 'CREATE TABLE branch_types ('
'id integer NOT NULL,'
'name varchar(80))')
if branches:
do_query(query, 'CREATE TABLE samples ('
'id bigint NOT NULL,'
'evsel_id bigint,'
'machine_id bigint,'
'thread_id bigint,'
'comm_id bigint,'
'dso_id bigint,'
'symbol_id bigint,'
'sym_offset bigint,'
'ip bigint,'
'time bigint,'
'cpu integer,'
'to_dso_id bigint,'
'to_symbol_id bigint,'
'to_sym_offset bigint,'
'to_ip bigint,'
'branch_type integer,'
'in_tx boolean)')
else:
do_query(query, 'CREATE TABLE samples ('
'id bigint NOT NULL,'
'evsel_id bigint,'
'machine_id bigint,'
'thread_id bigint,'
'comm_id bigint,'
'dso_id bigint,'
'symbol_id bigint,'
'sym_offset bigint,'
'ip bigint,'
'time bigint,'
'cpu integer,'
'to_dso_id bigint,'
'to_symbol_id bigint,'
'to_sym_offset bigint,'
'to_ip bigint,'
'period bigint,'
'weight bigint,'
'transaction bigint,'
'data_src bigint,'
'branch_type integer,'
'in_tx boolean)')
if perf_db_export_calls:
do_query(query, 'CREATE TABLE call_paths ('
'id bigint NOT NULL,'
'parent_id bigint,'
'symbol_id bigint,'
'ip bigint)')
do_query(query, 'CREATE TABLE calls ('
'id bigint NOT NULL,'
'thread_id bigint,'
'comm_id bigint,'
'call_path_id bigint,'
'call_time bigint,'
'return_time bigint,'
'branch_count bigint,'
'call_id bigint,'
'return_id bigint,'
'parent_call_path_id bigint,'
'flags integer)')
do_query(query, 'CREATE VIEW samples_view AS '
'SELECT '
'id,'
'time,'
'cpu,'
'(SELECT pid FROM threads WHERE id = thread_id) AS pid,'
'(SELECT tid FROM threads WHERE id = thread_id) AS tid,'
'(SELECT comm FROM comms WHERE id = comm_id) AS command,'
'(SELECT name FROM selected_events WHERE id = evsel_id) AS event,'
'to_hex(ip) AS ip_hex,'
'(SELECT name FROM symbols WHERE id = symbol_id) AS symbol,'
'sym_offset,'
'(SELECT short_name FROM dsos WHERE id = dso_id) AS dso_short_name,'
'to_hex(to_ip) AS to_ip_hex,'
'(SELECT name FROM symbols WHERE id = to_symbol_id) AS to_symbol,'
'to_sym_offset,'
'(SELECT short_name FROM dsos WHERE id = to_dso_id) AS to_dso_short_name,'
'(SELECT name FROM branch_types WHERE id = branch_type) AS branch_type_name,'
'in_tx'
' FROM samples')
file_header = struct.pack("!11sii", "PGCOPY\n\377\r\n\0", 0, 0)
file_trailer = "\377\377"
def open_output_file(file_name):
path_name = output_dir_name + "/" + file_name
file = open(path_name, "w+")
file.write(file_header)
return file
def close_output_file(file):
file.write(file_trailer)
file.close()
def copy_output_file_direct(file, table_name):
close_output_file(file)
sql = "COPY " + table_name + " FROM '" + file.name + "' (FORMAT 'binary')"
do_query(query, sql)
# Use COPY FROM STDIN because security may prevent postgres from accessing the files directly
def copy_output_file(file, table_name):
conn = PQconnectdb("dbname = " + dbname)
if (PQstatus(conn)):
raise Exception("COPY FROM STDIN PQconnectdb failed")
file.write(file_trailer)
file.seek(0)
sql = "COPY " + table_name + " FROM STDIN (FORMAT 'binary')"
res = PQexec(conn, sql)
if (PQresultStatus(res) != 4):
raise Exception("COPY FROM STDIN PQexec failed")
data = file.read(65536)
while (len(data)):
ret = PQputCopyData(conn, data, len(data))
if (ret != 1):
raise Exception("COPY FROM STDIN PQputCopyData failed, error " + str(ret))
data = file.read(65536)
ret = PQputCopyEnd(conn, None)
if (ret != 1):
raise Exception("COPY FROM STDIN PQputCopyEnd failed, error " + str(ret))
PQfinish(conn)
def remove_output_file(file):
name = file.name
file.close()
os.unlink(name)
evsel_file = open_output_file("evsel_table.bin")
machine_file = open_output_file("machine_table.bin")
thread_file = open_output_file("thread_table.bin")
comm_file = open_output_file("comm_table.bin")
comm_thread_file = open_output_file("comm_thread_table.bin")
dso_file = open_output_file("dso_table.bin")
symbol_file = open_output_file("symbol_table.bin")
branch_type_file = open_output_file("branch_type_table.bin")
sample_file = open_output_file("sample_table.bin")
if perf_db_export_calls:
call_path_file = open_output_file("call_path_table.bin")
call_file = open_output_file("call_table.bin")
def trace_begin():
print datetime.datetime.today(), "Writing to intermediate files..."
# id == 0 means unknown. It is easier to create records for them than replace the zeroes with NULLs
evsel_table(0, "unknown")
machine_table(0, 0, "unknown")
thread_table(0, 0, 0, -1, -1)
comm_table(0, "unknown")
dso_table(0, 0, "unknown", "unknown", "")
symbol_table(0, 0, 0, 0, 0, "unknown")
sample_table(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
if perf_db_export_calls:
call_path_table(0, 0, 0, 0)
unhandled_count = 0
def trace_end():
print datetime.datetime.today(), "Copying to database..."
copy_output_file(evsel_file, "selected_events")
copy_output_file(machine_file, "machines")
copy_output_file(thread_file, "threads")
copy_output_file(comm_file, "comms")
copy_output_file(comm_thread_file, "comm_threads")
copy_output_file(dso_file, "dsos")
copy_output_file(symbol_file, "symbols")
copy_output_file(branch_type_file, "branch_types")
copy_output_file(sample_file, "samples")
if perf_db_export_calls:
copy_output_file(call_path_file, "call_paths")
copy_output_file(call_file, "calls")
print datetime.datetime.today(), "Removing intermediate files..."
remove_output_file(evsel_file)
remove_output_file(machine_file)
remove_output_file(thread_file)
remove_output_file(comm_file)
remove_output_file(comm_thread_file)
remove_output_file(dso_file)
remove_output_file(symbol_file)
remove_output_file(branch_type_file)
remove_output_file(sample_file)
if perf_db_export_calls:
remove_output_file(call_path_file)
remove_output_file(call_file)
os.rmdir(output_dir_name)
print datetime.datetime.today(), "Adding primary keys"
do_query(query, 'ALTER TABLE selected_events ADD PRIMARY KEY (id)')
do_query(query, 'ALTER TABLE machines ADD PRIMARY KEY (id)')
do_query(query, 'ALTER TABLE threads ADD PRIMARY KEY (id)')
do_query(query, 'ALTER TABLE comms ADD PRIMARY KEY (id)')
do_query(query, 'ALTER TABLE comm_threads ADD PRIMARY KEY (id)')
do_query(query, 'ALTER TABLE dsos ADD PRIMARY KEY (id)')
do_query(query, 'ALTER TABLE symbols ADD PRIMARY KEY (id)')
do_query(query, 'ALTER TABLE branch_types ADD PRIMARY KEY (id)')
do_query(query, 'ALTER TABLE samples ADD PRIMARY KEY (id)')
if perf_db_export_calls:
do_query(query, 'ALTER TABLE call_paths ADD PRIMARY KEY (id)')
do_query(query, 'ALTER TABLE calls ADD PRIMARY KEY (id)')
print datetime.datetime.today(), "Adding foreign keys"
do_query(query, 'ALTER TABLE threads '
'ADD CONSTRAINT machinefk FOREIGN KEY (machine_id) REFERENCES machines (id),'
'ADD CONSTRAINT processfk FOREIGN KEY (process_id) REFERENCES threads (id)')
do_query(query, 'ALTER TABLE comm_threads '
'ADD CONSTRAINT commfk FOREIGN KEY (comm_id) REFERENCES comms (id),'
'ADD CONSTRAINT threadfk FOREIGN KEY (thread_id) REFERENCES threads (id)')
do_query(query, 'ALTER TABLE dsos '
'ADD CONSTRAINT machinefk FOREIGN KEY (machine_id) REFERENCES machines (id)')
do_query(query, 'ALTER TABLE symbols '
'ADD CONSTRAINT dsofk FOREIGN KEY (dso_id) REFERENCES dsos (id)')
do_query(query, 'ALTER TABLE samples '
'ADD CONSTRAINT evselfk FOREIGN KEY (evsel_id) REFERENCES selected_events (id),'
'ADD CONSTRAINT machinefk FOREIGN KEY (machine_id) REFERENCES machines (id),'
'ADD CONSTRAINT threadfk FOREIGN KEY (thread_id) REFERENCES threads (id),'
'ADD CONSTRAINT commfk FOREIGN KEY (comm_id) REFERENCES comms (id),'
'ADD CONSTRAINT dsofk FOREIGN KEY (dso_id) REFERENCES dsos (id),'
'ADD CONSTRAINT symbolfk FOREIGN KEY (symbol_id) REFERENCES symbols (id),'
'ADD CONSTRAINT todsofk FOREIGN KEY (to_dso_id) REFERENCES dsos (id),'
'ADD CONSTRAINT tosymbolfk FOREIGN KEY (to_symbol_id) REFERENCES symbols (id)')
if perf_db_export_calls:
do_query(query, 'ALTER TABLE call_paths '
'ADD CONSTRAINT parentfk FOREIGN KEY (parent_id) REFERENCES call_paths (id),'
'ADD CONSTRAINT symbolfk FOREIGN KEY (symbol_id) REFERENCES symbols (id)')
do_query(query, 'ALTER TABLE calls '
'ADD CONSTRAINT threadfk FOREIGN KEY (thread_id) REFERENCES threads (id),'
'ADD CONSTRAINT commfk FOREIGN KEY (comm_id) REFERENCES comms (id),'
'ADD CONSTRAINT call_pathfk FOREIGN KEY (call_path_id) REFERENCES call_paths (id),'
'ADD CONSTRAINT callfk FOREIGN KEY (call_id) REFERENCES samples (id),'
'ADD CONSTRAINT returnfk FOREIGN KEY (return_id) REFERENCES samples (id),'
'ADD CONSTRAINT parent_call_pathfk FOREIGN KEY (parent_call_path_id) REFERENCES call_paths (id)')
do_query(query, 'CREATE INDEX pcpid_idx ON calls (parent_call_path_id)')
if (unhandled_count):
print datetime.datetime.today(), "Warning: ", unhandled_count, " unhandled events"
print datetime.datetime.today(), "Done"
def trace_unhandled(event_name, context, event_fields_dict):
global unhandled_count
unhandled_count += 1
def sched__sched_switch(*x):
pass
def evsel_table(evsel_id, evsel_name, *x):
n = len(evsel_name)
fmt = "!hiqi" + str(n) + "s"
value = struct.pack(fmt, 2, 8, evsel_id, n, evsel_name)
evsel_file.write(value)
def machine_table(machine_id, pid, root_dir, *x):
n = len(root_dir)
fmt = "!hiqiii" + str(n) + "s"
value = struct.pack(fmt, 3, 8, machine_id, 4, pid, n, root_dir)
machine_file.write(value)
def thread_table(thread_id, machine_id, process_id, pid, tid, *x):
value = struct.pack("!hiqiqiqiiii", 5, 8, thread_id, 8, machine_id, 8, process_id, 4, pid, 4, tid)
thread_file.write(value)
def comm_table(comm_id, comm_str, *x):
n = len(comm_str)
fmt = "!hiqi" + str(n) + "s"
value = struct.pack(fmt, 2, 8, comm_id, n, comm_str)
comm_file.write(value)
def comm_thread_table(comm_thread_id, comm_id, thread_id, *x):
fmt = "!hiqiqiq"
value = struct.pack(fmt, 3, 8, comm_thread_id, 8, comm_id, 8, thread_id)
comm_thread_file.write(value)
def dso_table(dso_id, machine_id, short_name, long_name, build_id, *x):
n1 = len(short_name)
n2 = len(long_name)
n3 = len(build_id)
fmt = "!hiqiqi" + str(n1) + "si" + str(n2) + "si" + str(n3) + "s"
value = struct.pack(fmt, 5, 8, dso_id, 8, machine_id, n1, short_name, n2, long_name, n3, build_id)
dso_file.write(value)
def symbol_table(symbol_id, dso_id, sym_start, sym_end, binding, symbol_name, *x):
n = len(symbol_name)
fmt = "!hiqiqiqiqiii" + str(n) + "s"
value = struct.pack(fmt, 6, 8, symbol_id, 8, dso_id, 8, sym_start, 8, sym_end, 4, binding, n, symbol_name)
symbol_file.write(value)
def branch_type_table(branch_type, name, *x):
n = len(name)
fmt = "!hiii" + str(n) + "s"
value = struct.pack(fmt, 2, 4, branch_type, n, name)
branch_type_file.write(value)
def sample_table(sample_id, evsel_id, machine_id, thread_id, comm_id, dso_id, symbol_id, sym_offset, ip, time, cpu, to_dso_id, to_symbol_id, to_sym_offset, to_ip, period, weight, transaction, data_src, branch_type, in_tx, *x):
if branches:
value = struct.pack("!hiqiqiqiqiqiqiqiqiqiqiiiqiqiqiqiiiB", 17, 8, sample_id, 8, evsel_id, 8, machine_id, 8, thread_id, 8, comm_id, 8, dso_id, 8, symbol_id, 8, sym_offset, 8, ip, 8, time, 4, cpu, 8, to_dso_id, 8, to_symbol_id, 8, to_sym_offset, 8, to_ip, 4, branch_type, 1, in_tx)
else:
value = struct.pack("!hiqiqiqiqiqiqiqiqiqiqiiiqiqiqiqiqiqiqiqiiiB", 21, 8, sample_id, 8, evsel_id, 8, machine_id, 8, thread_id, 8, comm_id, 8, dso_id, 8, symbol_id, 8, sym_offset, 8, ip, 8, time, 4, cpu, 8, to_dso_id, 8, to_symbol_id, 8, to_sym_offset, 8, to_ip, 8, period, 8, weight, 8, transaction, 8, data_src, 4, branch_type, 1, in_tx)
sample_file.write(value)
def call_path_table(cp_id, parent_id, symbol_id, ip, *x):
fmt = "!hiqiqiqiq"
value = struct.pack(fmt, 4, 8, cp_id, 8, parent_id, 8, symbol_id, 8, ip)
call_path_file.write(value)
def call_return_table(cr_id, thread_id, comm_id, call_path_id, call_time, return_time, branch_count, call_id, return_id, parent_call_path_id, flags, *x):
fmt = "!hiqiqiqiqiqiqiqiqiqiqii"
value = struct.pack(fmt, 11, 8, cr_id, 8, thread_id, 8, comm_id, 8, call_path_id, 8, call_time, 8, return_time, 8, branch_count, 8, call_id, 8, return_id, 8, parent_call_path_id, 4, flags)
call_file.write(value)
| gpl-2.0 | 1,630,603,360,882,899,200 | 1,486,115,298,710,481,700 | 35.305499 | 341 | 0.680523 | false |
hyperwang/bitcoin | contrib/devtools/security-check.py | 3 | 6400 | #!/usr/bin/python2
'''
Perform basic ELF security checks on a series of executables.
Exit status will be 0 if successful, and the program will be silent.
Otherwise the exit status will be 1 and it will log which executables failed which checks.
Needs `readelf` (for ELF) and `objdump` (for PE).
'''
from __future__ import division,print_function
import subprocess
import sys
import os
READELF_CMD = os.getenv('READELF', '/usr/bin/readelf')
OBJDUMP_CMD = os.getenv('OBJDUMP', '/usr/bin/objdump')
def check_ELF_PIE(executable):
'''
Check for position independent executable (PIE), allowing for address space randomization.
'''
p = subprocess.Popen([READELF_CMD, '-h', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
ok = False
for line in stdout.split('\n'):
line = line.split()
if len(line)>=2 and line[0] == 'Type:' and line[1] == 'DYN':
ok = True
return ok
def get_ELF_program_headers(executable):
'''Return type and flags for ELF program headers'''
p = subprocess.Popen([READELF_CMD, '-l', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
in_headers = False
count = 0
headers = []
for line in stdout.split('\n'):
if line.startswith('Program Headers:'):
in_headers = True
if line == '':
in_headers = False
if in_headers:
if count == 1: # header line
ofs_typ = line.find('Type')
ofs_offset = line.find('Offset')
ofs_flags = line.find('Flg')
ofs_align = line.find('Align')
if ofs_typ == -1 or ofs_offset == -1 or ofs_flags == -1 or ofs_align == -1:
raise ValueError('Cannot parse elfread -lW output')
elif count > 1:
typ = line[ofs_typ:ofs_offset].rstrip()
flags = line[ofs_flags:ofs_align].rstrip()
headers.append((typ, flags))
count += 1
return headers
def check_ELF_NX(executable):
'''
Check that no sections are writable and executable (including the stack)
'''
have_wx = False
have_gnu_stack = False
for (typ, flags) in get_ELF_program_headers(executable):
if typ == 'GNU_STACK':
have_gnu_stack = True
if 'W' in flags and 'E' in flags: # section is both writable and executable
have_wx = True
return have_gnu_stack and not have_wx
def check_ELF_RELRO(executable):
'''
Check for read-only relocations.
GNU_RELRO program header must exist
Dynamic section must have BIND_NOW flag
'''
have_gnu_relro = False
for (typ, flags) in get_ELF_program_headers(executable):
# Note: not checking flags == 'R': here as linkers set the permission differently
# This does not affect security: the permission flags of the GNU_RELRO program header are ignored, the PT_LOAD header determines the effective permissions.
# However, the dynamic linker need to write to this area so these are RW.
# Glibc itself takes care of mprotecting this area R after relocations are finished.
# See also http://permalink.gmane.org/gmane.comp.gnu.binutils/71347
if typ == 'GNU_RELRO':
have_gnu_relro = True
have_bindnow = False
p = subprocess.Popen([READELF_CMD, '-d', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
for line in stdout.split('\n'):
tokens = line.split()
if len(tokens)>1 and tokens[1] == '(BIND_NOW)':
have_bindnow = True
return have_gnu_relro and have_bindnow
def check_ELF_Canary(executable):
'''
Check for use of stack canary
'''
p = subprocess.Popen([READELF_CMD, '--dyn-syms', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
ok = False
for line in stdout.split('\n'):
if '__stack_chk_fail' in line:
ok = True
return ok
def get_PE_dll_characteristics(executable):
'''
Get PE DllCharacteristics bits
'''
p = subprocess.Popen([OBJDUMP_CMD, '-x', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
for line in stdout.split('\n'):
tokens = line.split()
if len(tokens)>=2 and tokens[0] == 'DllCharacteristics':
return int(tokens[1],16)
return 0
def check_PE_PIE(executable):
'''PIE: DllCharacteristics bit 0x40 signifies dynamicbase (ASLR)'''
return bool(get_PE_dll_characteristics(executable) & 0x40)
def check_PE_NX(executable):
'''NX: DllCharacteristics bit 0x100 signifies nxcompat (DEP)'''
return bool(get_PE_dll_characteristics(executable) & 0x100)
CHECKS = {
'ELF': [
('PIE', check_ELF_PIE),
('NX', check_ELF_NX),
('RELRO', check_ELF_RELRO),
('Canary', check_ELF_Canary)
],
'PE': [
('PIE', check_PE_PIE),
('NX', check_PE_NX)
]
}
def identify_executable(executable):
with open(filename, 'rb') as f:
magic = f.read(4)
if magic.startswith(b'MZ'):
return 'PE'
elif magic.startswith(b'\x7fELF'):
return 'ELF'
return None
if __name__ == '__main__':
retval = 0
for filename in sys.argv[1:]:
try:
etype = identify_executable(filename)
if etype is None:
print('%s: unknown format' % filename)
retval = 1
continue
failed = []
for (name, func) in CHECKS[etype]:
if not func(filename):
failed.append(name)
if failed:
print('%s: failed %s' % (filename, ' '.join(failed)))
retval = 1
except IOError:
print('%s: cannot open' % filename)
retval = 1
exit(retval)
| mit | 4,080,665,269,826,650,000 | 8,681,343,060,185,500,000 | 34.359116 | 163 | 0.605938 | false |
praekelt/ummeli | ummeli/base/migrations/0010_auto__add_field_skill_level.py | 1 | 11741 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Skill.level'
db.add_column('base_skill', 'level', self.gf('django.db.models.fields.PositiveIntegerField')(default=0), keep_default=False)
def backwards(self, orm):
# Deleting field 'Skill.level'
db.delete_column('base_skill', 'level')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'base.article': {
'Meta': {'object_name': 'Article'},
'date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 4, 3, 11, 20, 39, 972729)', 'blank': 'True'}),
'hash_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'text': ('django.db.models.fields.TextField', [], {})
},
'base.category': {
'Meta': {'object_name': 'Category'},
'articles': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['base.Article']", 'null': 'True', 'blank': 'True'}),
'hash_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'province': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['base.Province']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '45'}),
'user_submitted_job_articles': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['base.UserSubmittedJobArticle']", 'null': 'True', 'blank': 'True'})
},
'base.certificate': {
'Meta': {'object_name': 'Certificate'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'institution': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '45'}),
'year': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'})
},
'base.curriculumvitae': {
'Meta': {'object_name': 'CurriculumVitae'},
'certificates': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['base.Certificate']", 'symmetrical': 'False', 'blank': 'True'}),
'connection_requests': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'connection_requests'", 'blank': 'True', 'to': "orm['auth.User']"}),
'date_of_birth': ('django.db.models.fields.CharField', [], {'max_length': '45', 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.CharField', [], {'max_length': '45', 'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '45', 'null': 'True', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '45', 'null': 'True', 'blank': 'True'}),
'highest_grade': ('django.db.models.fields.CharField', [], {'max_length': '45', 'null': 'True', 'blank': 'True'}),
'highest_grade_year': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'house_number': ('django.db.models.fields.CharField', [], {'max_length': '45', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'languages': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['base.Language']", 'symmetrical': 'False', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '45', 'null': 'True', 'blank': 'True'}),
'nr_of_faxes_sent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'preferred_skill': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'profiles_preferred'", 'null': 'True', 'to': "orm['base.Skill']"}),
'references': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['base.Reference']", 'symmetrical': 'False', 'blank': 'True'}),
'school': ('django.db.models.fields.CharField', [], {'max_length': '45', 'null': 'True', 'blank': 'True'}),
'skills': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'profiles'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['base.Skill']"}),
'street_name': ('django.db.models.fields.CharField', [], {'max_length': '45', 'null': 'True', 'blank': 'True'}),
'surname': ('django.db.models.fields.CharField', [], {'max_length': '45', 'null': 'True', 'blank': 'True'}),
'telephone_number': ('django.db.models.fields.CharField', [], {'max_length': '45', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'}),
'work_experiences': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['base.WorkExperience']", 'symmetrical': 'False', 'blank': 'True'})
},
'base.language': {
'Meta': {'object_name': 'Language'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '45'}),
'read_write': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'base.province': {
'Meta': {'object_name': 'Province'},
'name': ('django.db.models.fields.CharField', [], {'max_length': '45'}),
'search_id': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'})
},
'base.reference': {
'Meta': {'object_name': 'Reference'},
'contact_no': ('django.db.models.fields.CharField', [], {'max_length': '45', 'null': 'True', 'blank': 'True'}),
'fullname': ('django.db.models.fields.CharField', [], {'max_length': '45'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'relationship': ('django.db.models.fields.CharField', [], {'max_length': '45', 'null': 'True', 'blank': 'True'})
},
'base.skill': {
'Meta': {'object_name': 'Skill'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'skill': ('django.db.models.fields.CharField', [], {'max_length': '45'})
},
'base.usersubmittedjobarticle': {
'Meta': {'object_name': 'UserSubmittedJobArticle'},
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'text': ('django.db.models.fields.TextField', [], {'default': "''"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_submitted_job_article_user'", 'to': "orm['auth.User']"})
},
'base.workexperience': {
'Meta': {'object_name': 'WorkExperience'},
'company': ('django.db.models.fields.CharField', [], {'max_length': '45', 'null': 'True', 'blank': 'True'}),
'end_year': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'start_year': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '45'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['base']
| bsd-3-clause | -7,646,395,972,671,763,000 | -6,840,609,777,714,336,000 | 76.243421 | 202 | 0.547313 | false |
okolisny/integration_tests | cfme/tests/infrastructure/test_host_drift_analysis.py | 1 | 4947 | # -*- coding: utf-8 -*-
import pytest
from cfme import test_requirements
from cfme.configure.tasks import Tasks
from cfme.fixtures import pytest_selenium as sel
from cfme.infrastructure import host as host_obj
from cfme.infrastructure.provider import InfraProvider
from cfme.web_ui import DriftGrid, toolbar as tb
from cfme.utils import error, testgen
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.utils.wait import wait_for
pytestmark = [
test_requirements.drift,
pytest.mark.tier(3)
]
def pytest_generate_tests(metafunc):
argnames, argvalues, idlist = testgen.providers_by_class(
metafunc, [InfraProvider], required_fields=['hosts'])
argnames += ['host']
new_idlist = []
new_argvalues = []
for i, argvalue_tuple in enumerate(argvalues):
args = dict(zip(argnames, argvalue_tuple))
for test_host in args['provider'].data['hosts']:
if not test_host.get('test_fleece', False):
continue
argvs = argvalues[i][:]
new_argvalues.append(argvs + [test_host])
test_id = '{}-{}'.format(args['provider'].key, test_host['type'])
new_idlist.append(test_id)
testgen.parametrize(metafunc, argnames, new_argvalues, ids=new_idlist, scope="module")
@pytest.mark.meta(blockers=[1242655])
def test_host_drift_analysis(request, setup_provider, provider, host, soft_assert):
"""Tests host drift analysis
Metadata:
test_flag: host_drift_analysis
"""
test_host = host_obj.Host(name=host['name'], provider=provider)
wait_for(lambda: test_host.exists, delay=20, num_sec=120, fail_func=sel.refresh,
message="hosts_exists")
# get drift history num
drift_num_orig = int(test_host.get_detail('Relationships', 'Drift History'))
# add credentials to host + finalizer to remove them
if not test_host.has_valid_credentials:
test_host.update(
updates={'credentials': host_obj.get_credentials_from_config(host['credentials'])},
validate_credentials=True
)
@request.addfinalizer
def test_host_remove_creds():
test_host.update(
updates={
'credentials': host_obj.Host.Credential(
principal="",
secret="",
verify_secret=""
)
}
)
# clear table
view = navigate_to(Tasks, 'AllOtherTasks')
view.delete.item_select('Delete All', handle_alert=True)
# initiate 1st analysis
test_host.run_smartstate_analysis()
# Wait for the task to finish
def is_host_analysis_finished():
""" Check if analysis is finished - if not, reload page
"""
finished = False
view = navigate_to(Tasks, 'AllOtherTasks')
host_analysis_row = view.tabs.allothertasks.table.row(
task_name="SmartState Analysis for '{}'".format(test_host.name))
if host_analysis_row.state.text == 'Finished':
finished = True
# select the row and delete the task
host_analysis_row[0].check()
view.delete.item_select('Delete', handle_alert=True)
else:
view.reload.click()
return finished
wait_for(is_host_analysis_finished, delay=5, timeout="8m")
# wait for for drift history num+1
wait_for(
lambda: int(test_host.get_detail('Relationships', 'Drift History')) == drift_num_orig + 1,
delay=20,
num_sec=120,
message="Waiting for Drift History count to increase",
fail_func=sel.refresh
)
# add a tag and a finalizer to remove it
test_host.add_tag(category='Department', tag='Accounting')
request.addfinalizer(lambda: test_host.remove_tag(category='Department', tag='Accounting'))
# initiate 2nd analysis
test_host.run_smartstate_analysis()
# Wait for the task to finish
wait_for(is_host_analysis_finished, delay=5, timeout="8m")
# wait for for drift history num+2
wait_for(
lambda: int(test_host.get_detail('Relationships', 'Drift History')) == drift_num_orig + 2,
delay=20,
num_sec=120,
message="Waiting for Drift History count to increase",
fail_func=sel.refresh
)
# check drift difference
soft_assert(not test_host.equal_drift_results('Department (1)', 'My Company Tags', 0, 1),
"Drift analysis results are equal when they shouldn't be")
# Test UI features that modify the drift grid
d_grid = DriftGrid()
# Accounting tag should not be displayed, because it was changed to True
tb.select("Attributes with same values")
with error.expected(sel.NoSuchElementException):
d_grid.get_cell('Accounting', 0)
# Accounting tag should be displayed now
tb.select("Attributes with different values")
d_grid.get_cell('Accounting', 0)
| gpl-2.0 | 7,844,373,983,777,419,000 | -1,579,688,900,991,782,100 | 33.354167 | 98 | 0.634526 | false |
BaconPancakes/valor | lib/websockets/client.py | 11 | 6563 | """
The :mod:`websockets.client` module defines a simple WebSocket client API.
"""
import asyncio
import collections.abc
import email.message
from .exceptions import InvalidHandshake
from .handshake import build_request, check_response
from .http import USER_AGENT, read_response
from .protocol import CONNECTING, OPEN, WebSocketCommonProtocol
from .uri import parse_uri
__all__ = ['connect', 'WebSocketClientProtocol']
class WebSocketClientProtocol(WebSocketCommonProtocol):
"""
Complete WebSocket client implementation as an :class:`asyncio.Protocol`.
This class inherits most of its methods from
:class:`~websockets.protocol.WebSocketCommonProtocol`.
"""
is_client = True
state = CONNECTING
@asyncio.coroutine
def handshake(self, wsuri,
origin=None, subprotocols=None, extra_headers=None):
"""
Perform the client side of the opening handshake.
If provided, ``origin`` sets the Origin HTTP header.
If provided, ``subprotocols`` is a list of supported subprotocols in
order of decreasing preference.
If provided, ``extra_headers`` sets additional HTTP request headers.
It must be a mapping or an iterable of (name, value) pairs.
"""
headers = []
set_header = lambda k, v: headers.append((k, v))
if wsuri.port == (443 if wsuri.secure else 80): # pragma: no cover
set_header('Host', wsuri.host)
else:
set_header('Host', '{}:{}'.format(wsuri.host, wsuri.port))
if origin is not None:
set_header('Origin', origin)
if subprotocols is not None:
set_header('Sec-WebSocket-Protocol', ', '.join(subprotocols))
if extra_headers is not None:
if isinstance(extra_headers, collections.abc.Mapping):
extra_headers = extra_headers.items()
for name, value in extra_headers:
set_header(name, value)
set_header('User-Agent', USER_AGENT)
key = build_request(set_header)
self.request_headers = email.message.Message()
for name, value in headers:
self.request_headers[name] = value
self.raw_request_headers = headers
# Send handshake request. Since the URI and the headers only contain
# ASCII characters, we can keep this simple.
request = ['GET %s HTTP/1.1' % wsuri.resource_name]
request.extend('{}: {}'.format(k, v) for k, v in headers)
request.append('\r\n')
request = '\r\n'.join(request).encode()
self.writer.write(request)
# Read handshake response.
try:
status_code, headers = yield from read_response(self.reader)
except ValueError as exc:
raise InvalidHandshake("Malformed HTTP message") from exc
if status_code != 101:
raise InvalidHandshake("Bad status code: {}".format(status_code))
self.response_headers = headers
self.raw_response_headers = list(headers.raw_items())
get_header = lambda k: headers.get(k, '')
check_response(get_header, key)
self.subprotocol = headers.get('Sec-WebSocket-Protocol', None)
if (self.subprotocol is not None and
self.subprotocol not in subprotocols):
raise InvalidHandshake(
"Unknown subprotocol: {}".format(self.subprotocol))
assert self.state == CONNECTING
self.state = OPEN
self.opening_handshake.set_result(True)
@asyncio.coroutine
def connect(uri, *,
klass=WebSocketClientProtocol,
timeout=10, max_size=2 ** 20, max_queue=2 ** 5,
loop=None, legacy_recv=False,
origin=None, subprotocols=None, extra_headers=None,
**kwds):
"""
This coroutine connects to a WebSocket server at a given ``uri``.
It yields a :class:`WebSocketClientProtocol` which can then be used to
send and receive messages.
:func:`connect` is a wrapper around the event loop's
:meth:`~asyncio.BaseEventLoop.create_connection` method. Extra keyword
arguments are passed to :meth:`~asyncio.BaseEventLoop.create_connection`.
For example, you can set the ``ssl`` keyword argument to a
:class:`~ssl.SSLContext` to enforce some TLS settings. When connecting to
a ``wss://`` URI, if this argument isn't provided explicitly, it's set to
``True``, which means Python's default :class:`~ssl.SSLContext` is used.
The behavior of the ``timeout``, ``max_size``, and ``max_queue`` optional
arguments is described the documentation of
:class:`~websockets.protocol.WebSocketCommonProtocol`.
:func:`connect` also accepts the following optional arguments:
* ``origin`` sets the Origin HTTP header
* ``subprotocols`` is a list of supported subprotocols in order of
decreasing preference
* ``extra_headers`` sets additional HTTP request headers – it can be a
mapping or an iterable of (name, value) pairs
:func:`connect` raises :exc:`~websockets.uri.InvalidURI` if ``uri`` is
invalid and :exc:`~websockets.handshake.InvalidHandshake` if the opening
handshake fails.
On Python 3.5, :func:`connect` can be used as a asynchronous context
manager. In that case, the connection is closed when exiting the context.
"""
if loop is None:
loop = asyncio.get_event_loop()
wsuri = parse_uri(uri)
if wsuri.secure:
kwds.setdefault('ssl', True)
elif 'ssl' in kwds:
raise ValueError("connect() received a SSL context for a ws:// URI. "
"Use a wss:// URI to enable TLS.")
factory = lambda: klass(
host=wsuri.host, port=wsuri.port, secure=wsuri.secure,
timeout=timeout, max_size=max_size, max_queue=max_queue,
loop=loop, legacy_recv=legacy_recv,
)
transport, protocol = yield from loop.create_connection(
factory, wsuri.host, wsuri.port, **kwds)
try:
yield from protocol.handshake(
wsuri, origin=origin, subprotocols=subprotocols,
extra_headers=extra_headers)
except Exception:
yield from protocol.close_connection(force=True)
raise
return protocol
try:
from .py35.client import Connect
except (SyntaxError, ImportError): # pragma: no cover
pass
else:
Connect.__wrapped__ = connect
# Copy over docstring to support building documentation on Python 3.5.
Connect.__doc__ = connect.__doc__
connect = Connect
| gpl-3.0 | -7,415,865,114,666,932,000 | 3,815,643,556,664,490,500 | 35.248619 | 78 | 0.644719 | false |
gumyns/home-automation | hardware_old/arm/toolchainGenerator.py | 1 | 2209 | #!/usr/bin/python
import os
import sys
import subprocess
devices = ['STM32F030x6']
template = """set(ENV{{PATH}} {gcc_dir})
set(GCC_ARM_DIRECTORY {gcc_dir})
add_definitions(-D{device})
set(CMAKE_SYSTEM_NAME Generic)
set(CMAKE_SYSTEM_PROCESSOR arm)
set(CMAKE_C_COMPILER_WORKS 1)
set(CMAKE_CXX_COMPILER_WORKS 1)
set(CMAKE_C_COMPILER arm-none-eabi-gcc)
set(CMAKE_CXX_COMPILER arm-none-eabi-g++)
set(CMAKE_ASM_COMPILER arm-none-eabi-as)
set(CMAKE_AR arm-none-eabi-ar)
set(CMAKE_RANLIB arm-none-eabi-ranlib)
set(CMAKE_LINKER arm-none-eabi-ld)
set(COMMON_FLAGS " -Wall -Wextra -nostartfiles -mcpu=cortex-m0 -mthumb -mthumb-interwork -mfloat-abi=soft -ffunction-sections -fdata-sections -g -fno-common -fmessage-length=0 --specs=nano.specs -Os -fsigned-char -ffreestanding -fomit-frame-pointer -mabi=aapcs -fno-unroll-loops -ffast-math -ftree-vectorize -fno-builtin -MMD -MP")
set(CMAKE_CXX_FLAGS_INIT "${{COMMON_FLAGS}} -std=gnu++14")
set(CMAKE_C_FLAGS_INIT "${{COMMON_FLAGS}} -std=gnu99")
set(CMAKE_ASM_FLAGS_INIT -nostdinc)
set(CMAKE_EXE_LINKER_FLAGS_INIT "-Xlinker --gc-sections -Wl,-Map,${{PROJECT_NAME}}.map,-o${{PROJECT_NAME}}.elf")
set(CMAKE_C_FLAGS "${{COMMON_FLAGS}} -std=gnu99" CACHE INTERNAL "c compiler flags")
set(CMAKE_CXX_FLAGS "${{COMMON_FLAGS}} -std=gnu++14 -fabi-version=0 -fno-exceptions -fno-rtti -fno-use-cxa-atexit -fno-threadsafe-statics" CACHE INTERNAL "cxx compiler flags")
set(CMAKE_ASM_FLAGS "${{COMMON_FLAGS}} -x assembler-with-cpp " CACHE INTERNAL "asm compiler flags")
"""
findCMD = 'find ~/opt -name "arm-none-eabi-gcc" | head -1 | rev | cut -c 18- | rev'
out = subprocess.Popen(findCMD, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = out.communicate()
files = stdout.decode().split()
if len(files) is 1:
directory = files[0].strip()
else:
sys.exit("GCC for arm not found...")
srcDir = os.path.join(os.path.dirname(os.path.abspath(str(sys.argv[0]))), "toolchains")
if not os.path.isdir(srcDir):
subprocess.call(["mkdir", srcDir])
for dev in devices:
with open(os.path.join(srcDir, "{0}.cmake".format(dev)), 'w') as f:
f.write(template.format(gcc_dir=directory, device=dev))
f.close()
| apache-2.0 | 4,637,635,944,676,726,000 | -1,368,766,167,540,253,700 | 40.679245 | 332 | 0.712992 | false |
CharlieGreenman/pixelatorV2_with_react | node_modules/gulp-sass/node_modules/node-sass/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py | 2698 | 3270 | #!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Unit tests for the easy_xml.py file. """
import gyp.easy_xml as easy_xml
import unittest
import StringIO
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
self.stderr = StringIO.StringIO()
def test_EasyXml_simple(self):
self.assertEqual(
easy_xml.XmlToString(['test']),
'<?xml version="1.0" encoding="utf-8"?><test/>')
self.assertEqual(
easy_xml.XmlToString(['test'], encoding='Windows-1252'),
'<?xml version="1.0" encoding="Windows-1252"?><test/>')
def test_EasyXml_simple_with_attributes(self):
self.assertEqual(
easy_xml.XmlToString(['test2', {'a': 'value1', 'b': 'value2'}]),
'<?xml version="1.0" encoding="utf-8"?><test2 a="value1" b="value2"/>')
def test_EasyXml_escaping(self):
original = '<test>\'"\r&\nfoo'
converted = '<test>\'"
&
foo'
converted_apos = converted.replace("'", ''')
self.assertEqual(
easy_xml.XmlToString(['test3', {'a': original}, original]),
'<?xml version="1.0" encoding="utf-8"?><test3 a="%s">%s</test3>' %
(converted, converted_apos))
def test_EasyXml_pretty(self):
self.assertEqual(
easy_xml.XmlToString(
['test3',
['GrandParent',
['Parent1',
['Child']
],
['Parent2']
]
],
pretty=True),
'<?xml version="1.0" encoding="utf-8"?>\n'
'<test3>\n'
' <GrandParent>\n'
' <Parent1>\n'
' <Child/>\n'
' </Parent1>\n'
' <Parent2/>\n'
' </GrandParent>\n'
'</test3>\n')
def test_EasyXml_complex(self):
# We want to create:
target = (
'<?xml version="1.0" encoding="utf-8"?>'
'<Project>'
'<PropertyGroup Label="Globals">'
'<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>'
'<Keyword>Win32Proj</Keyword>'
'<RootNamespace>automated_ui_tests</RootNamespace>'
'</PropertyGroup>'
'<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>'
'<PropertyGroup '
'Condition="\'$(Configuration)|$(Platform)\'=='
'\'Debug|Win32\'" Label="Configuration">'
'<ConfigurationType>Application</ConfigurationType>'
'<CharacterSet>Unicode</CharacterSet>'
'</PropertyGroup>'
'</Project>')
xml = easy_xml.XmlToString(
['Project',
['PropertyGroup', {'Label': 'Globals'},
['ProjectGuid', '{D2250C20-3A94-4FB9-AF73-11BC5B73884B}'],
['Keyword', 'Win32Proj'],
['RootNamespace', 'automated_ui_tests']
],
['Import', {'Project': '$(VCTargetsPath)\\Microsoft.Cpp.props'}],
['PropertyGroup',
{'Condition': "'$(Configuration)|$(Platform)'=='Debug|Win32'",
'Label': 'Configuration'},
['ConfigurationType', 'Application'],
['CharacterSet', 'Unicode']
]
])
self.assertEqual(xml, target)
if __name__ == '__main__':
unittest.main()
| mit | 8,924,617,947,330,789,000 | -5,488,665,097,748,867,000 | 30.747573 | 77 | 0.553823 | false |
ActionAdam/osmc | package/mediacenter-addon-osmc/src/script.module.osmcsetting.apfstore/resources/lib/apf_gui.py | 9 | 4895 |
# KODI modules
import xbmc
import xbmcaddon
import xbmcgui
# Standard modules
import sys
import os
import socket
import json
addonid = "script.module.osmcsetting.apfstore"
__addon__ = xbmcaddon.Addon(addonid)
__path__ = xbmc.translatePath(xbmcaddon.Addon(addonid).getAddonInfo('path'))
# Custom module path
sys.path.append(os.path.join(__path__, 'resources','lib'))
# OSMC SETTING Modules
from CompLogger import comprehensive_logger as clog
ADDONART = os.path.join(__path__, 'resources','skins', 'Default', 'media')
USERART = os.path.join(xbmc.translatePath('special://userdata/'),'addon_data ', addonid)
def log(message):
xbmc.log('OSMC APFStore gui : ' + str(message), level=xbmc.LOGDEBUG)
def lang(id):
san = __addon__.getLocalizedString(id).encode( 'utf-8', 'ignore' )
return san
class apf_GUI(xbmcgui.WindowXMLDialog):
def __init__(self, strXMLname, strFallbackPath, strDefaultName, apf_dict):
self.apf_dict = apf_dict
self.apf_order_list = []
self.action_dict = {}
def onInit(self):
self.list = self.getControl(500)
self.list.setVisible(True)
for x, y in self.apf_dict.iteritems():
# self.current_icon = '/home/kubkev/.kodi/addons/script.module.osmcsetting.apfstore/resources/skins/Default/media/osmc_logo.png'
self.list.addItem(y)
self.apf_order_list.append(x)
try:
self.getControl(50).setVisible(False)
except:
pass
self.check_action_dict()
@clog(logger=log)
def check_action_dict(self):
install = 0
removal = 0
for x, y in self.action_dict.iteritems():
if y == 'Install':
install += 1
elif y == 'Uninstall':
removal += 1
if not install and not removal:
self.getControl(6).setVisible(False)
self.getControl(61).setVisible(False)
self.getControl(62).setVisible(False)
return
if install:
self.getControl(61).setLabel(lang(32001) % install)
self.getControl(6).setVisible(True)
self.getControl(61).setVisible(True)
else:
self.getControl(61).setVisible(False)
if removal:
self.getControl(62).setLabel(lang(32002) % removal)
self.getControl(6).setVisible(True)
self.getControl(62).setVisible(True)
else:
self.getControl(62).setVisible(False)
@clog(logger=log)
def onClick(self, controlID):
if controlID == 500:
container = self.getControl(500)
sel_pos = container.getSelectedPosition()
sel_item = self.apf_dict[self.apf_order_list[sel_pos]]
xml = "APFAddonInfo_720OSMC.xml" if xbmcgui.Window(10000).getProperty("SkinHeight") == '720' else "APFAddonInfo_OSMC.xml"
self.addon_gui = addon_info_gui(xml, __path__, 'Default', sel_item=sel_item)
self.addon_gui.doModal()
ending_action = self.addon_gui.action
if ending_action == 'Install':
self.action_dict[sel_item.id] = 'Install'
elif ending_action == 'Uninstall':
self.action_dict[sel_item.id] = 'Uninstall'
elif sel_item.id in self.action_dict:
del self.action_dict[sel_item.id]
self.check_action_dict()
del self.addon_gui
log(self.action_dict)
elif controlID == 7:
self.close()
elif controlID == 6:
# send install and removal list to Update Service
action_list = ['install_' + k if v == 'Install' else 'removal_' + k for k, v in self.action_dict.iteritems()]
action_string = '|=|'.join(action_list)
self.contact_update_service(action_string)
self.close()
@clog(logger=log)
def contact_update_service(self, action_string):
address = '/var/tmp/osmc.settings.update.sockfile'
message = ('action_list', {'action': action_string})
message = json.dumps(message)
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.connect(address)
sock.sendall(message)
sock.close()
class addon_info_gui(xbmcgui.WindowXMLDialog):
'''
Controls
==============================
50001 Shortdesc
50002 Longdesc
50003 Version
50004 Maintainer
50005 LastUpdated
50006 Icon
50007 Name
'''
def __init__(self, strXMLname, strFallbackPath, strDefaultName, sel_item):
self.action = False
self.sel_item = sel_item
def onInit(self):
self.getControl(50001).setLabel(self.sel_item.shortdesc)
self.getControl(50002).setText(self.sel_item.longdesc)
self.getControl(50003).setLabel(self.sel_item.version)
self.getControl(50004).setLabel(self.sel_item.maintainedby)
self.getControl(50005).setLabel(self.sel_item.lastupdated)
self.getControl(50006).setImage(self.sel_item.current_icon, True)
self.getControl(50007).setLabel(self.sel_item.name)
if self.sel_item.installed:
self.getControl(6).setLabel(lang(32004))
else:
self.getControl(6).setLabel(lang(32003))
def onClick(self, controlID):
if controlID == 6:
lbl = self.getControl(6).getLabel()
if lbl == lang(32003):
self.action = 'Install'
else:
self.action = 'Uninstall'
self.close()
elif controlID == 7:
self.close()
| gpl-2.0 | -1,487,881,716,022,227,200 | -8,977,491,524,127,452,000 | 19.144033 | 131 | 0.68621 | false |
Dhivyap/ansible | lib/ansible/modules/cloud/google/gcp_filestore_instance_info.py | 3 | 7713 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Google
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file at
# https://www.github.com/GoogleCloudPlatform/magic-modules
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
__metaclass__ = type
################################################################################
# Documentation
################################################################################
ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcp_filestore_instance_info
description:
- Gather info for GCP Instance
short_description: Gather info for GCP Instance
version_added: '2.9'
author: Google Inc. (@googlecloudplatform)
requirements:
- python >= 2.6
- requests >= 2.18.4
- google-auth >= 1.3.0
options:
zone:
description:
- The name of the Filestore zone of the instance.
required: true
type: str
project:
description:
- The Google Cloud Platform project to use.
type: str
auth_kind:
description:
- The type of credential used.
type: str
required: true
choices:
- application
- machineaccount
- serviceaccount
service_account_contents:
description:
- The contents of a Service Account JSON file, either in a dictionary or as a
JSON string that represents it.
type: jsonarg
service_account_file:
description:
- The path of a Service Account JSON file if serviceaccount is selected as type.
type: path
service_account_email:
description:
- An optional service account email address if machineaccount is selected and
the user does not wish to use the default email.
type: str
scopes:
description:
- Array of scopes to be used
type: list
env_type:
description:
- Specifies which Ansible environment you're running this module within.
- This should not be set unless you know what you're doing.
- This only alters the User Agent string for any API requests.
type: str
notes:
- for authentication, you can set service_account_file using the c(gcp_service_account_file)
env variable.
- for authentication, you can set service_account_contents using the c(GCP_SERVICE_ACCOUNT_CONTENTS)
env variable.
- For authentication, you can set service_account_email using the C(GCP_SERVICE_ACCOUNT_EMAIL)
env variable.
- For authentication, you can set auth_kind using the C(GCP_AUTH_KIND) env variable.
- For authentication, you can set scopes using the C(GCP_SCOPES) env variable.
- Environment variables values will only be used if the playbook values are not set.
- The I(service_account_email) and I(service_account_file) options are mutually exclusive.
'''
EXAMPLES = '''
- name: get info on an instance
gcp_filestore_instance_info:
zone: us-central1-b
project: test_project
auth_kind: serviceaccount
service_account_file: "/tmp/auth.pem"
'''
RETURN = '''
resources:
description: List of resources
returned: always
type: complex
contains:
name:
description:
- The resource name of the instance.
returned: success
type: str
description:
description:
- A description of the instance.
returned: success
type: str
createTime:
description:
- Creation timestamp in RFC3339 text format.
returned: success
type: str
tier:
description:
- The service tier of the instance.
returned: success
type: str
labels:
description:
- Resource labels to represent user-provided metadata.
returned: success
type: dict
fileShares:
description:
- File system shares on the instance. For this version, only a single file share
is supported.
returned: success
type: complex
contains:
name:
description:
- The name of the fileshare (16 characters or less) .
returned: success
type: str
capacityGb:
description:
- File share capacity in GiB. This must be at least 1024 GiB for the standard
tier, or 2560 GiB for the premium tier.
returned: success
type: int
networks:
description:
- VPC networks to which the instance is connected. For this version, only a
single network is supported.
returned: success
type: complex
contains:
network:
description:
- The name of the GCE VPC network to which the instance is connected.
returned: success
type: str
modes:
description:
- IP versions for which the instance has IP addresses assigned.
returned: success
type: list
reservedIpRange:
description:
- A /29 CIDR block that identifies the range of IP addresses reserved for
this instance.
returned: success
type: str
ipAddresses:
description:
- A list of IPv4 or IPv6 addresses.
returned: success
type: list
etag:
description:
- Server-specified ETag for the instance resource to prevent simultaneous updates
from overwriting each other.
returned: success
type: str
zone:
description:
- The name of the Filestore zone of the instance.
returned: success
type: str
'''
################################################################################
# Imports
################################################################################
from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest
import json
################################################################################
# Main
################################################################################
def main():
module = GcpModule(argument_spec=dict(zone=dict(required=True, type='str')))
if not module.params['scopes']:
module.params['scopes'] = ['https://www.googleapis.com/auth/cloud-platform']
return_value = {'resources': fetch_list(module, collection(module))}
module.exit_json(**return_value)
def collection(module):
return "https://file.googleapis.com/v1/projects/{project}/locations/{zone}/instances".format(**module.params)
def fetch_list(module, link):
auth = GcpSession(module, 'filestore')
return auth.list(link, return_if_object, array_name='instances')
def return_if_object(module, response):
# If not found, return nothing.
if response.status_code == 404:
return None
# If no content, return nothing.
if response.status_code == 204:
return None
try:
module.raise_for_status(response)
result = response.json()
except getattr(json.decoder, 'JSONDecodeError', ValueError) as inst:
module.fail_json(msg="Invalid JSON response with error: %s" % inst)
if navigate_hash(result, ['error', 'errors']):
module.fail_json(msg=navigate_hash(result, ['error', 'errors']))
return result
if __name__ == "__main__":
main()
| gpl-3.0 | 6,129,733,651,159,034,000 | 8,122,272,490,353,736,000 | 29.975904 | 113 | 0.598081 | false |
scenarios/tensorflow | tensorflow/python/kernel_tests/ctc_loss_op_test.py | 10 | 10795 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ctc_ops.ctc_decoder_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import ctc_ops
from tensorflow.python.ops import gradients_impl
from tensorflow.python.platform import test
def SimpleSparseTensorFrom(x):
"""Create a very simple SparseTensor with dimensions (batch, time).
Args:
x: a list of lists of type int
Returns:
x_ix and x_val, the indices and values of the SparseTensor<2>.
"""
x_ix = []
x_val = []
for batch_i, batch in enumerate(x):
for time, val in enumerate(batch):
x_ix.append([batch_i, time])
x_val.append(val)
x_shape = [len(x), np.asarray(x_ix).max(0)[1] + 1]
x_ix = constant_op.constant(x_ix, dtypes.int64)
x_val = constant_op.constant(x_val, dtypes.int32)
x_shape = constant_op.constant(x_shape, dtypes.int64)
return sparse_tensor.SparseTensor(x_ix, x_val, x_shape)
class CTCLossTest(test.TestCase):
def _testCTCLoss(self,
inputs,
seq_lens,
labels,
loss_truth,
grad_truth,
expected_err_re=None):
self.assertEquals(len(inputs), len(grad_truth))
inputs_t = constant_op.constant(inputs)
with self.test_session(use_gpu=False) as sess:
loss = ctc_ops.ctc_loss(
inputs=inputs_t, labels=labels, sequence_length=seq_lens)
grad = gradients_impl.gradients(loss, [inputs_t])[0]
self.assertShapeEqual(loss_truth, loss)
self.assertShapeEqual(grad_truth, grad)
if expected_err_re is None:
(tf_loss, tf_grad) = sess.run([loss, grad])
self.assertAllClose(tf_loss, loss_truth, atol=1e-6)
self.assertAllClose(tf_grad, grad_truth, atol=1e-6)
else:
with self.assertRaisesOpError(expected_err_re):
sess.run([loss, grad])
def testBasic(self):
"""Test two batch entries."""
# Input and ground truth from Alex Graves' implementation.
#
#### Batch entry 0 #####
# targets: 0 1 2 1 0
# outputs:
# 0 0.633766 0.221185 0.0917319 0.0129757 0.0142857 0.0260553
# 1 0.111121 0.588392 0.278779 0.0055756 0.00569609 0.010436
# 2 0.0357786 0.633813 0.321418 0.00249248 0.00272882 0.0037688
# 3 0.0663296 0.643849 0.280111 0.00283995 0.0035545 0.00331533
# 4 0.458235 0.396634 0.123377 0.00648837 0.00903441 0.00623107
# alpha:
# 0 -3.64753 -0.456075 -inf -inf -inf -inf -inf -inf -inf -inf -inf
# 1 -inf -inf -inf -0.986437 -inf -inf -inf -inf -inf -inf -inf
# 2 -inf -inf -inf -inf -inf -2.12145 -inf -inf -inf -inf -inf
# 3 -inf -inf -inf -inf -inf -inf -inf -2.56174 -inf -inf -inf
# 4 -inf -inf -inf -inf -inf -inf -inf -inf -inf -3.34211 -inf
# beta:
# 0 -inf -2.88604 -inf -inf -inf -inf -inf -inf -inf -inf -inf
# 1 -inf -inf -inf -2.35568 -inf -inf -inf -inf -inf -inf -inf
# 2 -inf -inf -inf -inf -inf -1.22066 -inf -inf -inf -inf -inf
# 3 -inf -inf -inf -inf -inf -inf -inf -0.780373 -inf -inf -inf
# 4 -inf -inf -inf -inf -inf -inf -inf -inf -inf 0 0
# prob: -3.34211
# outputDerivs:
# 0 -0.366234 0.221185 0.0917319 0.0129757 0.0142857 0.0260553
# 1 0.111121 -0.411608 0.278779 0.0055756 0.00569609 0.010436
# 2 0.0357786 0.633813 -0.678582 0.00249248 0.00272882 0.0037688
# 3 0.0663296 -0.356151 0.280111 0.00283995 0.0035545 0.00331533
# 4 -0.541765 0.396634 0.123377 0.00648837 0.00903441 0.00623107
#
#### Batch entry 1 #####
#
# targets: 0 1 1 0
# outputs:
# 0 0.30176 0.28562 0.0831517 0.0862751 0.0816851 0.161508
# 1 0.24082 0.397533 0.0557226 0.0546814 0.0557528 0.19549
# 2 0.230246 0.450868 0.0389607 0.038309 0.0391602 0.202456
# 3 0.280884 0.429522 0.0326593 0.0339046 0.0326856 0.190345
# 4 0.423286 0.315517 0.0338439 0.0393744 0.0339315 0.154046
# alpha:
# 0 -1.8232 -1.19812 -inf -inf -inf -inf -inf -inf -inf
# 1 -inf -2.19315 -2.83037 -2.1206 -inf -inf -inf -inf -inf
# 2 -inf -inf -inf -2.03268 -3.71783 -inf -inf -inf -inf
# 3 -inf -inf -inf -inf -inf -4.56292 -inf -inf -inf
# 4 -inf -inf -inf -inf -inf -inf -inf -5.42262 -inf
# beta:
# 0 -inf -4.2245 -inf -inf -inf -inf -inf -inf -inf
# 1 -inf -inf -inf -3.30202 -inf -inf -inf -inf -inf
# 2 -inf -inf -inf -inf -1.70479 -0.856738 -inf -inf -inf
# 3 -inf -inf -inf -inf -inf -0.859706 -0.859706 -0.549337 -inf
# 4 -inf -inf -inf -inf -inf -inf -inf 0 0
# prob: -5.42262
# outputDerivs:
# 0 -0.69824 0.28562 0.0831517 0.0862751 0.0816851 0.161508
# 1 0.24082 -0.602467 0.0557226 0.0546814 0.0557528 0.19549
# 2 0.230246 0.450868 0.0389607 0.038309 0.0391602 -0.797544
# 3 0.280884 -0.570478 0.0326593 0.0339046 0.0326856 0.190345
# 4 -0.576714 0.315517 0.0338439 0.0393744 0.0339315 0.154046
# max_time_steps == 7
depth = 6
# seq_len_0 == 5
targets_0 = [0, 1, 2, 1, 0]
loss_log_prob_0 = -3.34211
# dimensions are time x depth
input_prob_matrix_0 = np.asarray(
[[0.633766, 0.221185, 0.0917319, 0.0129757, 0.0142857, 0.0260553],
[0.111121, 0.588392, 0.278779, 0.0055756, 0.00569609, 0.010436],
[0.0357786, 0.633813, 0.321418, 0.00249248, 0.00272882, 0.0037688],
[0.0663296, 0.643849, 0.280111, 0.00283995, 0.0035545, 0.00331533],
[0.458235, 0.396634, 0.123377, 0.00648837, 0.00903441, 0.00623107]],
dtype=np.float32)
input_log_prob_matrix_0 = np.log(input_prob_matrix_0)
gradient_log_prob_0 = np.asarray(
[[-0.366234, 0.221185, 0.0917319, 0.0129757, 0.0142857, 0.0260553],
[0.111121, -0.411608, 0.278779, 0.0055756, 0.00569609, 0.010436],
[0.0357786, 0.633813, -0.678582, 0.00249248, 0.00272882, 0.0037688],
[0.0663296, -0.356151, 0.280111, 0.00283995, 0.0035545, 0.00331533],
[-0.541765, 0.396634, 0.123377, 0.00648837, 0.00903441, 0.00623107]],
dtype=np.float32)
# seq_len_1 == 5
targets_1 = [0, 1, 1, 0]
loss_log_prob_1 = -5.42262
# dimensions are time x depth
input_prob_matrix_1 = np.asarray(
[[0.30176, 0.28562, 0.0831517, 0.0862751, 0.0816851, 0.161508],
[0.24082, 0.397533, 0.0557226, 0.0546814, 0.0557528, 0.19549],
[0.230246, 0.450868, 0.0389607, 0.038309, 0.0391602, 0.202456],
[0.280884, 0.429522, 0.0326593, 0.0339046, 0.0326856, 0.190345],
[0.423286, 0.315517, 0.0338439, 0.0393744, 0.0339315, 0.154046]],
dtype=np.float32)
input_log_prob_matrix_1 = np.log(input_prob_matrix_1)
gradient_log_prob_1 = np.asarray(
[[-0.69824, 0.28562, 0.0831517, 0.0862751, 0.0816851, 0.161508],
[0.24082, -0.602467, 0.0557226, 0.0546814, 0.0557528, 0.19549],
[0.230246, 0.450868, 0.0389607, 0.038309, 0.0391602, -0.797544],
[0.280884, -0.570478, 0.0326593, 0.0339046, 0.0326856, 0.190345],
[-0.576714, 0.315517, 0.0338439, 0.0393744, 0.0339315, 0.154046]],
dtype=np.float32)
# len max_time_steps array of 2 x depth matrices
inputs = [
np.vstack(
[input_log_prob_matrix_0[t, :], input_log_prob_matrix_1[t, :]])
for t in range(5)
] + 2 * [np.nan * np.ones((2, depth), np.float32)]
# convert inputs into [max_time x batch_size x depth tensor] Tensor
inputs = np.asarray(inputs, dtype=np.float32)
# len batch_size array of label vectors
labels = SimpleSparseTensorFrom([targets_0, targets_1])
# batch_size length vector of sequence_lengths
seq_lens = np.array([5, 5], dtype=np.int32)
# output: batch_size length vector of negative log probabilities
loss_truth = np.array([-loss_log_prob_0, -loss_log_prob_1], np.float32)
# output: len max_time_steps array of 2 x depth matrices
grad_truth = [
np.vstack([gradient_log_prob_0[t, :], gradient_log_prob_1[t, :]])
for t in range(5)
] + 2 * [np.zeros((2, depth), np.float32)]
# convert grad_truth into [max_time x batch_size x depth] Tensor
grad_truth = np.asarray(grad_truth, dtype=np.float32)
self._testCTCLoss(inputs, seq_lens, labels, loss_truth, grad_truth)
def test_time_major(self):
"""Testing time_major param.
testing if transposing and setting time_major=False will result in the same
loss
"""
# [max_time x batch_size x depth tensor]
inputs = np.random.randn(2, 2, 3).astype(np.float32)
labels = SimpleSparseTensorFrom([[0, 1], [1, 0]])
seq_lens = np.array([2, 2], dtype=np.int32)
inputs_t = constant_op.constant(inputs)
# Transposing tensor to [batch_size x max_time x depth tensor]
inputs_t_transposed = constant_op.constant(inputs.transpose(1, 0, 2))
with self.test_session(use_gpu=False) as sess:
loss = ctc_ops.ctc_loss(
inputs=inputs_t, labels=labels, sequence_length=seq_lens)
loss_transposed = ctc_ops.ctc_loss(
inputs=inputs_t_transposed,
labels=labels,
sequence_length=seq_lens,
time_major=False)
(tf_loss, tf_loss_transposed) = sess.run([loss, loss_transposed])
self.assertAllEqual(tf_loss, tf_loss_transposed)
def testInvalidSecondGradient(self):
inputs = np.random.randn(2, 2, 3).astype(np.float32)
inputs_t = constant_op.constant(inputs)
labels = SimpleSparseTensorFrom([[0, 1], [1, 0]])
seq_lens = np.array([2, 2], dtype=np.int32)
v = [1.0]
with self.test_session(use_gpu=False):
loss = ctc_ops.ctc_loss(
inputs=inputs_t, labels=labels, sequence_length=seq_lens)
# Taking ths second gradient should fail, since it is not
# yet supported.
with self.assertRaisesRegexp(LookupError,
".*No gradient defined.*PreventGradient.*"):
_ = gradients_impl._hessian_vector_product(loss, [inputs_t], v)
if __name__ == "__main__":
test.main()
| apache-2.0 | -8,136,799,086,759,572,000 | 6,462,425,386,138,319,000 | 39.735849 | 80 | 0.628069 | false |
mateuszmalinowski/visual_turing_test-tutorial | kraino/utils/print_metrics.py | 1 | 5116 | #!/usr/bin/env python
from __future__ import print_function
"""
Selects and prints metrics.
Author: Mateusz Malinowski
Email: mmalinow@mpi-inf.mpg.de
"""
import os
from uuid import uuid4
from compute_wups import get_metric_score as wups_score
from compute_wups import get_class_metric_score as class_wups_score
from data_provider import vqa_save_results as vqa_store
from vqaEvaluation.vqaClassNormalizedEval import VQAClassNormalizedEval as VQAEval
def average_over_dictionary(mydict):
"""
Average over dictionary values.
"""
ave = sum([x for x in mydict.values()])/len(mydict)
return ave
def show_wups(gt_list, pred_list, verbose, extra_vars):
"""
In:
gt_list - ground truth list
pred_list - list of predictions
verbose - if greater than 0 the metric measures are printed out
extra_vars - not used here
Out:
list of key, value pairs (dict) such that
'value' denotes the performance number
and 'name' denotes the name of the metric
"""
acc = wups_score(gt_list, pred_list, -1) * 100.0
wups_at_09 = wups_score(gt_list, pred_list, 0.9) * 100.0
#wups_at_0 = wups_score(gt_list, pred_list, 0.0) * 100.0
wups_at_0 = -1.0
per_class_acc_tmp = class_wups_score(gt_list, pred_list, -1)
#per_class_wups_at_09_tmp = class_wups_score(gt_list, pred_list, 0.9)
per_class_wups_at_09_tmp = None
per_class_acc = {k:v*100.0 for k,v in per_class_acc_tmp.items()}
if per_class_wups_at_09_tmp is not None:
per_class_wups_at_09 = {k:v*100.0 for k,v in per_class_wups_at_09_tmp.items()}
else:
per_class_wups_at_09 = None
class_acc = average_over_dictionary(per_class_acc_tmp)*100.0
if per_class_wups_at_09_tmp is not None:
class_wups_at_09 = average_over_dictionary(per_class_wups_at_09_tmp)*100.0
else:
class_wups_at_09 = -1.0
class_wups_at_0 = -1.0
if verbose > 0:
print('METRIC: Accuracy is {0}, wups at 0.9 is {1}, wups at 0.0 is {2}'.format(
acc, wups_at_09, wups_at_0))
print('CLASS METRIC: Accuracy is {0}, wups at 0.9 is {1}, wups at 0.0 is {2}'.format(
class_acc, class_wups_at_09, class_wups_at_0))
return [{'value':acc, 'name':'accuracy'},
{'value':wups_at_09, 'name':'wups at 0.9'},
{'value':wups_at_0, 'name':'wups at 0.0'},
{'value':per_class_acc, 'name':'per class accuracy',
'idiosyncrasy':'long:muted'},
{'value':per_class_wups_at_09, 'name':'per class wups at 0.9',
'idiosyncrasy':'long:muted'},
{'value':class_acc, 'name':'class accuracy'},
{'value':class_wups_at_09, 'name':'class wups at 0.9'},
{'value':class_wups_at_0, 'name':'class wups at 0'},]
def show_vqa(gt_list, pred_list, verbose, extra_vars):
#question_id, vqa_object,
#dataset_root=None):
"""
In:
gt_list - ground truth list
pred_list - list of predictions
verbose - if greater than 0 the metric measures are printed out
extra_vars - extra variables, here are:
extra_vars['vqa'] - the vqa object
extra_vars['resfun'] - function from the results file to the vqa object
extra_vars['question_id'] - list of the question ids
Out:
list of key, value pairs (dict) such that
'value' denotes the performance number
and 'name' denotes the name of the metric
"""
# TODO: quite hacky way of creating and next reading the file
if verbose > 0:
print('dumping json file ...')
vqa_object = extra_vars['vqa_object']
results_path = '/tmp/vqa_metric_{0}.json'.format(uuid4())
#print(results_path)
vqa_store(extra_vars['question_id'], pred_list, results_path)
vqa_res = extra_vars['resfun'](results_path)
os.remove(results_path)
if verbose > 0:
print('dumping finished')
###
vqaEval = VQAEval(vqa_object, vqa_res, n=2)
vqaEval.evaluate()
acc_overall = vqaEval.accuracy['overall']
acc_yes_no = vqaEval.accuracy['perAnswerType']['yes/no']
acc_number = vqaEval.accuracy['perAnswerType']['number']
acc_other = vqaEval.accuracy['perAnswerType']['other']
acc_per_class = vqaEval.accuracy['perAnswerClass']
acc_class_normalized = vqaEval.accuracy['classNormalizedOverall']
if verbose > 0:
print('METRIC: Accuracy yes/no is {0}, other is {1}, number is {2}, overall is {3}, class normalized is {4}'.\
format(acc_yes_no, acc_other, acc_number, acc_overall, acc_class_normalized))
return [{'value':acc_overall, 'name':'overall accuracy'},
{'value':acc_yes_no, 'name':'yes/no accuracy'},
{'value':acc_number, 'name':'number accuracy'},
{'value':acc_other, 'name':'other accuracy'},
{'value':acc_class_normalized, 'name':'class accuracy'},
{'value':acc_per_class, 'name':'per answer class',
'idiosyncrasy':'long:muted'},]
select = {
'wups' : show_wups,
'vqa' : show_vqa
}
| mit | 8,291,410,590,370,975,000 | 3,135,400,622,388,293,000 | 38.053435 | 118 | 0.611024 | false |
manish211/coveralls-python | tests/test_cli.py | 1 | 2270 | # coding: utf-8
import os
from mock import patch, call
import pytest
import coveralls
from coveralls.api import CoverallsException
import coveralls.cli
@patch.dict(os.environ, {'TRAVIS': 'True'}, clear=True)
@patch.object(coveralls.cli.log, 'info')
@patch.object(coveralls.Coveralls, 'wear')
def test_debug(mock_wear, mock_log):
coveralls.cli.main(argv=['debug'])
mock_wear.assert_called_with(dry_run=True)
mock_log.assert_has_calls([call("Testing coveralls-python...")])
@patch.object(coveralls.cli.log, 'info')
@patch.object(coveralls.Coveralls, 'wear')
def test_debug_no_token(mock_wear, mock_log):
coveralls.cli.main(argv=['debug'])
mock_wear.assert_called_with(dry_run=True)
mock_log.assert_has_calls([call("Testing coveralls-python...")])
@patch.object(coveralls.cli.log, 'info')
@patch.object(coveralls.Coveralls, 'wear')
@patch.dict(os.environ, {'TRAVIS': 'True'}, clear=True)
def test_real(mock_wear, mock_log):
coveralls.cli.main(argv=[])
mock_wear.assert_called_with()
mock_log.assert_has_calls([call("Submitting coverage to coveralls.io..."), call("Coverage submitted!")])
@patch.dict(os.environ, {'TRAVIS': 'True'}, clear=True)
@patch('coveralls.cli.Coveralls')
def test_rcfile(mock_coveralls):
coveralls.cli.main(argv=['--rcfile=coveragerc'])
mock_coveralls.assert_called_with(True, config_file='coveragerc')
exc = CoverallsException('bad stuff happened')
@patch.object(coveralls.cli.log, 'error')
@patch.object(coveralls.Coveralls, 'wear', side_effect=exc)
@patch.dict(os.environ, {'TRAVIS': 'True'}, clear=True)
def test_exception(mock_coveralls, mock_log):
coveralls.cli.main(argv=[])
mock_log.assert_has_calls([call(exc)])
@patch.object(coveralls.Coveralls, 'save_report')
@patch.dict(os.environ, {'TRAVIS': 'True'}, clear=True)
def test_save_report_to_file(mock_coveralls):
"""Check save_report api usage."""
coveralls.cli.main(argv=['--output=test.log'])
mock_coveralls.assert_called_with('test.log')
@patch.object(coveralls.Coveralls, 'save_report')
def test_save_report_to_file_no_token(mock_coveralls):
"""Check save_report api usage when token is not set."""
coveralls.cli.main(argv=['--output=test.log'])
mock_coveralls.assert_called_with('test.log')
| mit | -7,548,604,542,395,265,000 | -7,529,041,414,371,514,000 | 32.382353 | 108 | 0.712335 | false |
kaedroho/django | tests/i18n/test_compilation.py | 12 | 9956 | import gettext as gettext_module
import os
import stat
import unittest
from io import StringIO
from pathlib import Path
from subprocess import run
from unittest import mock
from django.core.management import (
CommandError, call_command, execute_from_command_line,
)
from django.core.management.commands.makemessages import (
Command as MakeMessagesCommand,
)
from django.core.management.utils import find_command
from django.test import SimpleTestCase, override_settings
from django.test.utils import captured_stderr, captured_stdout
from django.utils import translation
from django.utils.translation import gettext
from .utils import RunInTmpDirMixin, copytree
has_msgfmt = find_command('msgfmt')
@unittest.skipUnless(has_msgfmt, 'msgfmt is mandatory for compilation tests')
class MessageCompilationTests(RunInTmpDirMixin, SimpleTestCase):
work_subdir = 'commands'
class PoFileTests(MessageCompilationTests):
LOCALE = 'es_AR'
MO_FILE = 'locale/%s/LC_MESSAGES/django.mo' % LOCALE
def test_bom_rejection(self):
stderr = StringIO()
with self.assertRaisesMessage(CommandError, 'compilemessages generated one or more errors.'):
call_command('compilemessages', locale=[self.LOCALE], stdout=StringIO(), stderr=stderr)
self.assertIn('file has a BOM (Byte Order Mark)', stderr.getvalue())
self.assertFalse(os.path.exists(self.MO_FILE))
def test_no_write_access(self):
mo_file_en = 'locale/en/LC_MESSAGES/django.mo'
err_buffer = StringIO()
# put file in read-only mode
old_mode = os.stat(mo_file_en).st_mode
os.chmod(mo_file_en, stat.S_IREAD)
try:
with self.assertRaisesMessage(CommandError, 'compilemessages generated one or more errors.'):
call_command('compilemessages', locale=['en'], stderr=err_buffer, verbosity=0)
self.assertIn('not writable location', err_buffer.getvalue())
finally:
os.chmod(mo_file_en, old_mode)
class PoFileContentsTests(MessageCompilationTests):
# Ticket #11240
LOCALE = 'fr'
MO_FILE = 'locale/%s/LC_MESSAGES/django.mo' % LOCALE
def test_percent_symbol_in_po_file(self):
call_command('compilemessages', locale=[self.LOCALE], stdout=StringIO())
self.assertTrue(os.path.exists(self.MO_FILE))
class MultipleLocaleCompilationTests(MessageCompilationTests):
MO_FILE_HR = None
MO_FILE_FR = None
def setUp(self):
super().setUp()
localedir = os.path.join(self.test_dir, 'locale')
self.MO_FILE_HR = os.path.join(localedir, 'hr/LC_MESSAGES/django.mo')
self.MO_FILE_FR = os.path.join(localedir, 'fr/LC_MESSAGES/django.mo')
def test_one_locale(self):
with override_settings(LOCALE_PATHS=[os.path.join(self.test_dir, 'locale')]):
call_command('compilemessages', locale=['hr'], stdout=StringIO())
self.assertTrue(os.path.exists(self.MO_FILE_HR))
def test_multiple_locales(self):
with override_settings(LOCALE_PATHS=[os.path.join(self.test_dir, 'locale')]):
call_command('compilemessages', locale=['hr', 'fr'], stdout=StringIO())
self.assertTrue(os.path.exists(self.MO_FILE_HR))
self.assertTrue(os.path.exists(self.MO_FILE_FR))
class ExcludedLocaleCompilationTests(MessageCompilationTests):
work_subdir = 'exclude'
MO_FILE = 'locale/%s/LC_MESSAGES/django.mo'
def setUp(self):
super().setUp()
copytree('canned_locale', 'locale')
def test_command_help(self):
with captured_stdout(), captured_stderr():
# `call_command` bypasses the parser; by calling
# `execute_from_command_line` with the help subcommand we
# ensure that there are no issues with the parser itself.
execute_from_command_line(['django-admin', 'help', 'compilemessages'])
def test_one_locale_excluded(self):
call_command('compilemessages', exclude=['it'], stdout=StringIO())
self.assertTrue(os.path.exists(self.MO_FILE % 'en'))
self.assertTrue(os.path.exists(self.MO_FILE % 'fr'))
self.assertFalse(os.path.exists(self.MO_FILE % 'it'))
def test_multiple_locales_excluded(self):
call_command('compilemessages', exclude=['it', 'fr'], stdout=StringIO())
self.assertTrue(os.path.exists(self.MO_FILE % 'en'))
self.assertFalse(os.path.exists(self.MO_FILE % 'fr'))
self.assertFalse(os.path.exists(self.MO_FILE % 'it'))
def test_one_locale_excluded_with_locale(self):
call_command('compilemessages', locale=['en', 'fr'], exclude=['fr'], stdout=StringIO())
self.assertTrue(os.path.exists(self.MO_FILE % 'en'))
self.assertFalse(os.path.exists(self.MO_FILE % 'fr'))
self.assertFalse(os.path.exists(self.MO_FILE % 'it'))
def test_multiple_locales_excluded_with_locale(self):
call_command('compilemessages', locale=['en', 'fr', 'it'], exclude=['fr', 'it'],
stdout=StringIO())
self.assertTrue(os.path.exists(self.MO_FILE % 'en'))
self.assertFalse(os.path.exists(self.MO_FILE % 'fr'))
self.assertFalse(os.path.exists(self.MO_FILE % 'it'))
class IgnoreDirectoryCompilationTests(MessageCompilationTests):
# Reuse the exclude directory since it contains some locale fixtures.
work_subdir = 'exclude'
MO_FILE = '%s/%s/LC_MESSAGES/django.mo'
CACHE_DIR = Path('cache') / 'locale'
NESTED_DIR = Path('outdated') / 'v1' / 'locale'
def setUp(self):
super().setUp()
copytree('canned_locale', 'locale')
copytree('canned_locale', self.CACHE_DIR)
copytree('canned_locale', self.NESTED_DIR)
def assertAllExist(self, dir, langs):
self.assertTrue(all(Path(self.MO_FILE % (dir, lang)).exists() for lang in langs))
def assertNoneExist(self, dir, langs):
self.assertTrue(all(Path(self.MO_FILE % (dir, lang)).exists() is False for lang in langs))
def test_one_locale_dir_ignored(self):
call_command('compilemessages', ignore=['cache'], verbosity=0)
self.assertAllExist('locale', ['en', 'fr', 'it'])
self.assertNoneExist(self.CACHE_DIR, ['en', 'fr', 'it'])
self.assertAllExist(self.NESTED_DIR, ['en', 'fr', 'it'])
def test_multiple_locale_dirs_ignored(self):
call_command('compilemessages', ignore=['cache/locale', 'outdated'], verbosity=0)
self.assertAllExist('locale', ['en', 'fr', 'it'])
self.assertNoneExist(self.CACHE_DIR, ['en', 'fr', 'it'])
self.assertNoneExist(self.NESTED_DIR, ['en', 'fr', 'it'])
def test_ignores_based_on_pattern(self):
call_command('compilemessages', ignore=['*/locale'], verbosity=0)
self.assertAllExist('locale', ['en', 'fr', 'it'])
self.assertNoneExist(self.CACHE_DIR, ['en', 'fr', 'it'])
self.assertNoneExist(self.NESTED_DIR, ['en', 'fr', 'it'])
class CompilationErrorHandling(MessageCompilationTests):
def test_error_reported_by_msgfmt(self):
# po file contains wrong po formatting.
with self.assertRaises(CommandError):
call_command('compilemessages', locale=['ja'], verbosity=0, stderr=StringIO())
def test_msgfmt_error_including_non_ascii(self):
# po file contains invalid msgstr content (triggers non-ascii error content).
# Make sure the output of msgfmt is unaffected by the current locale.
env = os.environ.copy()
env.update({'LANG': 'C'})
with mock.patch('django.core.management.utils.run', lambda *args, **kwargs: run(*args, env=env, **kwargs)):
cmd = MakeMessagesCommand()
if cmd.gettext_version < (0, 18, 3):
self.skipTest("python-brace-format is a recent gettext addition.")
stderr = StringIO()
with self.assertRaisesMessage(CommandError, 'compilemessages generated one or more errors'):
call_command('compilemessages', locale=['ko'], stdout=StringIO(), stderr=stderr)
self.assertIn("' cannot start a field name", stderr.getvalue())
class ProjectAndAppTests(MessageCompilationTests):
LOCALE = 'ru'
PROJECT_MO_FILE = 'locale/%s/LC_MESSAGES/django.mo' % LOCALE
APP_MO_FILE = 'app_with_locale/locale/%s/LC_MESSAGES/django.mo' % LOCALE
class FuzzyTranslationTest(ProjectAndAppTests):
def setUp(self):
super().setUp()
gettext_module._translations = {} # flush cache or test will be useless
def test_nofuzzy_compiling(self):
with override_settings(LOCALE_PATHS=[os.path.join(self.test_dir, 'locale')]):
call_command('compilemessages', locale=[self.LOCALE], stdout=StringIO())
with translation.override(self.LOCALE):
self.assertEqual(gettext('Lenin'), 'Ленин')
self.assertEqual(gettext('Vodka'), 'Vodka')
def test_fuzzy_compiling(self):
with override_settings(LOCALE_PATHS=[os.path.join(self.test_dir, 'locale')]):
call_command('compilemessages', locale=[self.LOCALE], fuzzy=True, stdout=StringIO())
with translation.override(self.LOCALE):
self.assertEqual(gettext('Lenin'), 'Ленин')
self.assertEqual(gettext('Vodka'), 'Водка')
class AppCompilationTest(ProjectAndAppTests):
def test_app_locale_compiled(self):
call_command('compilemessages', locale=[self.LOCALE], stdout=StringIO())
self.assertTrue(os.path.exists(self.PROJECT_MO_FILE))
self.assertTrue(os.path.exists(self.APP_MO_FILE))
class PathLibLocaleCompilationTests(MessageCompilationTests):
work_subdir = 'exclude'
def test_locale_paths_pathlib(self):
with override_settings(LOCALE_PATHS=[Path(self.test_dir) / 'canned_locale']):
call_command('compilemessages', locale=['fr'], stdout=StringIO())
self.assertTrue(os.path.exists('canned_locale/fr/LC_MESSAGES/django.mo'))
| bsd-3-clause | -7,249,329,710,139,324,000 | 5,861,699,417,073,245,000 | 40.768908 | 115 | 0.660698 | false |
ambrosef/HLx_Examples | Acceleration/memcached/regressionSims/testgen/long/memtest_regressions_long.py | 2 | 5787 | #!/usr/bin/python
import memlib
## EDIT HERE ###################################################################
keySizes = [1,3,4,5,7,8,9,10,12,13,15,16,17,24,25,28,84,128]
#valueSizes = [1,3,4,5,8,9,10,12,13,16,17,24,28,184,208,1024]
#valueSizes = [1,3,4,5,8,9,10,12,13,16,17,24,28,184,208,1015]
valueSizes = [184,389, 792, 1015, 1862, 2469, 3285, 4397, 5850, 7932, 10174, 14473, 18043, 25836, 30859, 38623, 46827, 57482, 65520]
seq1repeat = 5
keyChars = map(chr, range(97, 123))
valueChars = map(chr, range(65, 91))
## EDIT FINISHED ###############################################################
DEBUG_SEQUENCES = False
PRINT_SEQUENCES = True
################################################################################
if DEBUG_SEQUENCES:
keySizes = [1,2,3]
valueSizes = [1,2]
def pair2kvpair(pair):
return memlib.kv_pair(pair[0], pair[1], "EFBEADDE", 42)
def seq1(keys, values, repeat):
if PRINT_SEQUENCES:
print "--- SEQUENCE 1 repeat %-3s -----------------------------------------------------" % repeat
kv_pairs = []
for key in keys:
for value in values:
kv_pairs.append( memlib.kv_pair(key, value, "EFBEADDE", 42) )
requests = []
responses = []
for kv_pair in kv_pairs:
if PRINT_SEQUENCES:
print "Set [%d -> %d]: %s -> %s" % (len(kv_pair['key']), len(kv_pair['value']), kv_pair['key'], kv_pair['value'])
requests.append( memlib.binarySetRequest( kv_pair , "00000000" ) )
responses.append( memlib.binarySetResponse( kv_pair, "00000000" ) )
for _ in range(repeat):
if PRINT_SEQUENCES:
print "Get [%d -> %d]: %s -> %s" % (len(kv_pair['key']), len(kv_pair['value']), kv_pair['key'], kv_pair['value'])
requests.append( memlib.binaryGetRequest( kv_pair , "00000000" ) )
responses.append( memlib.binaryGetResponse( kv_pair , "00000000" ) )
return (requests, responses)
def seq2(keys, values):
if PRINT_SEQUENCES:
print "--- SEQUENCE 2 -----------------------------------------------------------------"
requests = []
responses = []
for _ in range(len(values)):
# for more keys than values, duplicate use of values
values_used = values
if len(keys) > len(values):
while(len(keys) > len(values_used)):
values_used = values_used + values
values_used = values_used[0:len(keys)]
# requests
kv_pairs = map(pair2kvpair, zip(keys, values_used))
for kv_pair in kv_pairs:
if PRINT_SEQUENCES:
print "Set [%d -> %d]: %s -> %s" % (len(kv_pair['key']), len(kv_pair['value']), kv_pair['key'], kv_pair['value'])
requests.append( memlib.binarySetRequest(kv_pair, "00000000") )
responses.append( memlib.binarySetResponse(kv_pair, "00000000") )
for kv_pair in kv_pairs:
if PRINT_SEQUENCES:
print "Get [%d -> %d]: %s -> %s" % (len(kv_pair['key']), len(kv_pair['value']), kv_pair['key'], kv_pair['value'])
requests.append( memlib.binaryGetRequest(kv_pair, "00000000") )
responses.append( memlib.binaryGetResponse(kv_pair, "00000000") )
# rotation
values = values[1:] + values[0:1]
return (requests, responses)
################################################################################
if len(keySizes) > len(keyChars):
sys.exit("Error: Not enough key characters.")
if len(valueSizes) > len(valueChars):
sys.exit("Error: Not enough value characters.")
keyPairs = zip(keySizes, keyChars)
valuePairs = zip(valueSizes, valueChars)
keys = map(lambda (size, char): char * size, keyPairs)
values = map(lambda (size, char): char * size, valuePairs)
SEQ1 = seq1(keys, values, seq1repeat)
SEQ2 = seq2(keys, values)
SEQ3 = seq1(keys, values, 1)
# SEQ1
req = open("SEQ1_R12-pkt.in.long.txt", "w")
req.write( memlib.requests12Gbps(SEQ1[0]) )
req.close()
req = open("SEQ1_R1-pkt.in.long.txt", "w")
req.write( memlib.requests1Gbps(SEQ1[0]) )
req.close()
#res = open("SEQ1-pkt.out.long.txt", "w")
#res.write( memlib.responses(SEQ1[1]) )
#res.close()
res = open("SEQ1-pkt.out.long.hls.rtl.txt", "w")
res.write( memlib.responses_rtl_hls(SEQ1[1]) )
res.close()
# SEQ2
req = open("SEQ2_R12-pkt.in.long.txt", "w")
req.write( memlib.requests12Gbps(SEQ2[0]) )
req.close()
req = open("SEQ2_R1-pkt.in.long.txt", "w")
req.write( memlib.requests1Gbps(SEQ2[0]) )
req.close()
#res = open("SEQ2-pkt.out.long.txt", "w")
#res.write( memlib.responses(SEQ2[1]) )
#res.close()
res = open("SEQ2-pkt.out.long.hls.rtl.txt", "w")
res.write( memlib.responses_rtl_hls(SEQ2[1]) )
res.close()
# SEQ3
req = open("SEQ3_R12-pkt.in.long.txt", "w")
req.write( memlib.requests12Gbps(SEQ3[0]) )
req.close()
req = open("SEQ3_R1-pkt.in.long.txt", "w")
req.write( memlib.requests1Gbps(SEQ3[0]) )
req.close()
#res = open("SEQ3-pkt.out.long.txt", "w")
#res.write( memlib.responses(SEQ3[1]) )
#res.close()
res = open("SEQ3-pkt.out.long.hls.rtl.txt", "w")
res.write( memlib.responses_rtl_hls(SEQ3[1]) )
res.close()
####### Same thing for HLS outputs #######
# SEQ1
req = open("SEQ1_R12-pkt.in.long.hls.txt", "w")
req.write( memlib.requests12Gbps_hls(SEQ1[0]) )
req.close()
req = open("SEQ1_R1-pkt.in.long.hls.txt", "w")
req.write( memlib.requests1Gbps_hls(SEQ1[0]) )
req.close()
res = open("SEQ1-pkt.out.long.hls.txt", "w")
res.write( memlib.responses_hls(SEQ1[1]) )
res.close()
# SEQ2
req = open("SEQ2_R12-pkt.in.long.hls.txt", "w")
req.write( memlib.requests12Gbps_hls(SEQ2[0]) )
req.close()
req = open("SEQ2_R1-pkt.in.long.hls.txt", "w")
req.write( memlib.requests1Gbps_hls(SEQ2[0]) )
req.close()
res = open("SEQ2-pkt.out.long.hls.txt", "w")
res.write( memlib.responses_hls(SEQ2[1]) )
res.close()
# SEQ3
req = open("SEQ3_R12-pkt.in.long.hls.txt", "w")
req.write( memlib.requests12Gbps_hls(SEQ3[0]) )
req.close()
req = open("SEQ3_R1-pkt.in.long.hls.txt", "w")
req.write( memlib.requests1Gbps_hls(SEQ3[0]) )
req.close()
res = open("SEQ3-pkt.out.long.hls.txt", "w")
res.write( memlib.responses_hls(SEQ3[1]) )
res.close()
| bsd-3-clause | 5,118,999,203,537,057,000 | 4,363,845,298,306,678,300 | 32.068571 | 132 | 0.614135 | false |
Cinntax/home-assistant | homeassistant/components/channels/media_player.py | 2 | 9547 | """Support for interfacing with an instance of getchannels.com."""
import logging
import voluptuous as vol
from homeassistant.components.media_player import MediaPlayerDevice, PLATFORM_SCHEMA
from homeassistant.components.media_player.const import (
DOMAIN,
MEDIA_TYPE_CHANNEL,
MEDIA_TYPE_EPISODE,
MEDIA_TYPE_MOVIE,
MEDIA_TYPE_TVSHOW,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SELECT_SOURCE,
SUPPORT_STOP,
SUPPORT_VOLUME_MUTE,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_HOST,
CONF_NAME,
CONF_PORT,
STATE_IDLE,
STATE_PAUSED,
STATE_PLAYING,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DATA_CHANNELS = "channels"
DEFAULT_NAME = "Channels"
DEFAULT_PORT = 57000
FEATURE_SUPPORT = (
SUPPORT_PLAY
| SUPPORT_PAUSE
| SUPPORT_STOP
| SUPPORT_VOLUME_MUTE
| SUPPORT_NEXT_TRACK
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_PLAY_MEDIA
| SUPPORT_SELECT_SOURCE
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
}
)
SERVICE_SEEK_FORWARD = "channels_seek_forward"
SERVICE_SEEK_BACKWARD = "channels_seek_backward"
SERVICE_SEEK_BY = "channels_seek_by"
# Service call validation schemas
ATTR_SECONDS = "seconds"
CHANNELS_SCHEMA = vol.Schema({vol.Required(ATTR_ENTITY_ID): cv.entity_id})
CHANNELS_SEEK_BY_SCHEMA = CHANNELS_SCHEMA.extend(
{vol.Required(ATTR_SECONDS): vol.Coerce(int)}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Channels platform."""
device = ChannelsPlayer(
config.get(CONF_NAME), config.get(CONF_HOST), config.get(CONF_PORT)
)
if DATA_CHANNELS not in hass.data:
hass.data[DATA_CHANNELS] = []
add_entities([device], True)
hass.data[DATA_CHANNELS].append(device)
def service_handler(service):
"""Handle service."""
entity_id = service.data.get(ATTR_ENTITY_ID)
device = next(
(
device
for device in hass.data[DATA_CHANNELS]
if device.entity_id == entity_id
),
None,
)
if device is None:
_LOGGER.warning("Unable to find Channels with entity_id: %s", entity_id)
return
if service.service == SERVICE_SEEK_FORWARD:
device.seek_forward()
elif service.service == SERVICE_SEEK_BACKWARD:
device.seek_backward()
elif service.service == SERVICE_SEEK_BY:
seconds = service.data.get("seconds")
device.seek_by(seconds)
hass.services.register(
DOMAIN, SERVICE_SEEK_FORWARD, service_handler, schema=CHANNELS_SCHEMA
)
hass.services.register(
DOMAIN, SERVICE_SEEK_BACKWARD, service_handler, schema=CHANNELS_SCHEMA
)
hass.services.register(
DOMAIN, SERVICE_SEEK_BY, service_handler, schema=CHANNELS_SEEK_BY_SCHEMA
)
class ChannelsPlayer(MediaPlayerDevice):
"""Representation of a Channels instance."""
def __init__(self, name, host, port):
"""Initialize the Channels app."""
from pychannels import Channels
self._name = name
self._host = host
self._port = port
self.client = Channels(self._host, self._port)
self.status = None
self.muted = None
self.channel_number = None
self.channel_name = None
self.channel_image_url = None
self.now_playing_title = None
self.now_playing_episode_title = None
self.now_playing_season_number = None
self.now_playing_episode_number = None
self.now_playing_summary = None
self.now_playing_image_url = None
self.favorite_channels = []
def update_favorite_channels(self):
"""Update the favorite channels from the client."""
self.favorite_channels = self.client.favorite_channels()
def update_state(self, state_hash):
"""Update all the state properties with the passed in dictionary."""
self.status = state_hash.get("status", "stopped")
self.muted = state_hash.get("muted", False)
channel_hash = state_hash.get("channel")
np_hash = state_hash.get("now_playing")
if channel_hash:
self.channel_number = channel_hash.get("channel_number")
self.channel_name = channel_hash.get("channel_name")
self.channel_image_url = channel_hash.get("channel_image_url")
else:
self.channel_number = None
self.channel_name = None
self.channel_image_url = None
if np_hash:
self.now_playing_title = np_hash.get("title")
self.now_playing_episode_title = np_hash.get("episode_title")
self.now_playing_season_number = np_hash.get("season_number")
self.now_playing_episode_number = np_hash.get("episode_number")
self.now_playing_summary = np_hash.get("summary")
self.now_playing_image_url = np_hash.get("image_url")
else:
self.now_playing_title = None
self.now_playing_episode_title = None
self.now_playing_season_number = None
self.now_playing_episode_number = None
self.now_playing_summary = None
self.now_playing_image_url = None
@property
def name(self):
"""Return the name of the player."""
return self._name
@property
def state(self):
"""Return the state of the player."""
if self.status == "stopped":
return STATE_IDLE
if self.status == "paused":
return STATE_PAUSED
if self.status == "playing":
return STATE_PLAYING
return None
def update(self):
"""Retrieve latest state."""
self.update_favorite_channels()
self.update_state(self.client.status())
@property
def source_list(self):
"""List of favorite channels."""
sources = [channel["name"] for channel in self.favorite_channels]
return sources
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self.muted
@property
def media_content_id(self):
"""Content ID of current playing channel."""
return self.channel_number
@property
def media_content_type(self):
"""Content type of current playing media."""
return MEDIA_TYPE_CHANNEL
@property
def media_image_url(self):
"""Image url of current playing media."""
if self.now_playing_image_url:
return self.now_playing_image_url
if self.channel_image_url:
return self.channel_image_url
return "https://getchannels.com/assets/img/icon-1024.png"
@property
def media_title(self):
"""Title of current playing media."""
if self.state:
return self.now_playing_title
return None
@property
def supported_features(self):
"""Flag of media commands that are supported."""
return FEATURE_SUPPORT
def mute_volume(self, mute):
"""Mute (true) or unmute (false) player."""
if mute != self.muted:
response = self.client.toggle_muted()
self.update_state(response)
def media_stop(self):
"""Send media_stop command to player."""
self.status = "stopped"
response = self.client.stop()
self.update_state(response)
def media_play(self):
"""Send media_play command to player."""
response = self.client.resume()
self.update_state(response)
def media_pause(self):
"""Send media_pause command to player."""
response = self.client.pause()
self.update_state(response)
def media_next_track(self):
"""Seek ahead."""
response = self.client.skip_forward()
self.update_state(response)
def media_previous_track(self):
"""Seek back."""
response = self.client.skip_backward()
self.update_state(response)
def select_source(self, source):
"""Select a channel to tune to."""
for channel in self.favorite_channels:
if channel["name"] == source:
response = self.client.play_channel(channel["number"])
self.update_state(response)
break
def play_media(self, media_type, media_id, **kwargs):
"""Send the play_media command to the player."""
if media_type == MEDIA_TYPE_CHANNEL:
response = self.client.play_channel(media_id)
self.update_state(response)
elif media_type in [MEDIA_TYPE_MOVIE, MEDIA_TYPE_EPISODE, MEDIA_TYPE_TVSHOW]:
response = self.client.play_recording(media_id)
self.update_state(response)
def seek_forward(self):
"""Seek forward in the timeline."""
response = self.client.seek_forward()
self.update_state(response)
def seek_backward(self):
"""Seek backward in the timeline."""
response = self.client.seek_backward()
self.update_state(response)
def seek_by(self, seconds):
"""Seek backward in the timeline."""
response = self.client.seek(seconds)
self.update_state(response)
| apache-2.0 | 1,218,089,610,561,400,000 | 6,494,755,934,473,328,000 | 29.116719 | 85 | 0.614958 | false |
mewtaylor/django | django/contrib/gis/forms/fields.py | 504 | 4316 | from __future__ import unicode_literals
from django import forms
from django.contrib.gis.geos import GEOSException, GEOSGeometry
from django.utils.translation import ugettext_lazy as _
from .widgets import OpenLayersWidget
class GeometryField(forms.Field):
"""
This is the basic form field for a Geometry. Any textual input that is
accepted by GEOSGeometry is accepted by this form. By default,
this includes WKT, HEXEWKB, WKB (in a buffer), and GeoJSON.
"""
widget = OpenLayersWidget
geom_type = 'GEOMETRY'
default_error_messages = {
'required': _('No geometry value provided.'),
'invalid_geom': _('Invalid geometry value.'),
'invalid_geom_type': _('Invalid geometry type.'),
'transform_error': _('An error occurred when transforming the geometry '
'to the SRID of the geometry form field.'),
}
def __init__(self, **kwargs):
# Pop out attributes from the database field, or use sensible
# defaults (e.g., allow None).
self.srid = kwargs.pop('srid', None)
self.geom_type = kwargs.pop('geom_type', self.geom_type)
super(GeometryField, self).__init__(**kwargs)
self.widget.attrs['geom_type'] = self.geom_type
def to_python(self, value):
"""
Transforms the value to a Geometry object.
"""
if value in self.empty_values:
return None
if not isinstance(value, GEOSGeometry):
try:
value = GEOSGeometry(value)
except (GEOSException, ValueError, TypeError):
raise forms.ValidationError(self.error_messages['invalid_geom'], code='invalid_geom')
# Try to set the srid
if not value.srid:
try:
value.srid = self.widget.map_srid
except AttributeError:
if self.srid:
value.srid = self.srid
return value
def clean(self, value):
"""
Validates that the input value can be converted to a Geometry
object (which is returned). A ValidationError is raised if
the value cannot be instantiated as a Geometry.
"""
geom = super(GeometryField, self).clean(value)
if geom is None:
return geom
# Ensuring that the geometry is of the correct type (indicated
# using the OGC string label).
if str(geom.geom_type).upper() != self.geom_type and not self.geom_type == 'GEOMETRY':
raise forms.ValidationError(self.error_messages['invalid_geom_type'], code='invalid_geom_type')
# Transforming the geometry if the SRID was set.
if self.srid and self.srid != -1 and self.srid != geom.srid:
try:
geom.transform(self.srid)
except GEOSException:
raise forms.ValidationError(
self.error_messages['transform_error'], code='transform_error')
return geom
def has_changed(self, initial, data):
""" Compare geographic value of data with its initial value. """
try:
data = self.to_python(data)
initial = self.to_python(initial)
except forms.ValidationError:
return True
# Only do a geographic comparison if both values are available
if initial and data:
data.transform(initial.srid)
# If the initial value was not added by the browser, the geometry
# provided may be slightly different, the first time it is saved.
# The comparison is done with a very low tolerance.
return not initial.equals_exact(data, tolerance=0.000001)
else:
# Check for change of state of existence
return bool(initial) != bool(data)
class GeometryCollectionField(GeometryField):
geom_type = 'GEOMETRYCOLLECTION'
class PointField(GeometryField):
geom_type = 'POINT'
class MultiPointField(GeometryField):
geom_type = 'MULTIPOINT'
class LineStringField(GeometryField):
geom_type = 'LINESTRING'
class MultiLineStringField(GeometryField):
geom_type = 'MULTILINESTRING'
class PolygonField(GeometryField):
geom_type = 'POLYGON'
class MultiPolygonField(GeometryField):
geom_type = 'MULTIPOLYGON'
| bsd-3-clause | -647,900,961,878,998,300 | 8,851,986,984,821,578,000 | 32.71875 | 107 | 0.623262 | false |
azul-cloud/serendipity | storages/backends/symlinkorcopy.py | 19 | 2625 | import os
from django.conf import settings
from django.core.files.storage import FileSystemStorage
__doc__ = """
I needed to efficiently create a mirror of a directory tree (so that
"origin pull" CDNs can automatically pull files). The trick was that
some files could be modified, and some could be identical to the original.
Of course it doesn't make sense to store the exact same data twice on the
file system. So I created SymlinkOrCopyStorage.
SymlinkOrCopyStorage allows you to symlink a file when it's identical to
the original file and to copy the file if it's modified.
Of course, it's impossible to know if a file is modified just by looking
at the file, without knowing what the original file was.
That's what the symlinkWithin parameter is for. It accepts one or more paths
(if multiple, they should be concatenated using a colon (:)).
Files that will be saved using SymlinkOrCopyStorage are then checked on their
location: if they are within one of the symlink_within directories,
they will be symlinked, otherwise they will be copied.
The rationale is that unmodified files will exist in their original location,
e.g. /htdocs/example.com/image.jpg and modified files will be stored in
a temporary directory, e.g. /tmp/image.jpg.
"""
class SymlinkOrCopyStorage(FileSystemStorage):
"""Stores symlinks to files instead of actual files whenever possible
When a file that's being saved is currently stored in the symlink_within
directory, then symlink the file. Otherwise, copy the file.
"""
def __init__(self, location=settings.MEDIA_ROOT, base_url=settings.MEDIA_URL,
symlink_within=None):
super(SymlinkOrCopyStorage, self).__init__(location, base_url)
self.symlink_within = symlink_within.split(":")
def _save(self, name, content):
full_path_dst = self.path(name)
directory = os.path.dirname(full_path_dst)
if not os.path.exists(directory):
os.makedirs(directory)
elif not os.path.isdir(directory):
raise IOError("%s exists and is not a directory." % directory)
full_path_src = os.path.abspath(content.name)
symlinked = False
# Only symlink if the current platform supports it.
if getattr(os, "symlink", False):
for path in self.symlink_within:
if full_path_src.startswith(path):
os.symlink(full_path_src, full_path_dst)
symlinked = True
break
if not symlinked:
super(SymlinkOrCopyStorage, self)._save(name, content)
return name
| mit | -8,270,201,578,547,975,000 | -8,377,500,281,392,319,000 | 41.33871 | 82 | 0.697143 | false |
TeachAtTUM/edx-platform | openedx/core/djangoapps/theming/helpers.py | 10 | 11228 | """
Helpers for accessing comprehensive theming related variables.
This file is imported at startup. Imports of models or things which import models will break startup on Django 1.9+. If
you need models here, please import them inside the function which uses them.
"""
import os
import re
from logging import getLogger
from django.conf import settings
from microsite_configuration import microsite
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
from openedx.core.djangoapps.theming.helpers_dirs import (
Theme,
get_project_root_name_from_settings,
get_theme_base_dirs_from_settings,
get_theme_dirs,
get_themes_unchecked
)
from openedx.core.djangoapps.request_cache.middleware import RequestCache, request_cached
logger = getLogger(__name__) # pylint: disable=invalid-name
@request_cached
def get_template_path(relative_path, **kwargs):
"""
This is a proxy function to hide microsite_configuration behind comprehensive theming.
The calculated value is cached for the lifetime of the current request.
"""
# We need to give priority to theming over microsites
# So, we apply microsite override only if there is no associated site theme
# and associated microsite is present.
if not current_request_has_associated_site_theme() and microsite.is_request_in_microsite():
relative_path = microsite.get_template_path(relative_path, **kwargs)
return relative_path
def is_request_in_themed_site():
"""
This is a proxy function to hide microsite_configuration behind comprehensive theming.
"""
# We need to give priority to theming/site-configuration over microsites
return configuration_helpers.is_site_configuration_enabled() or microsite.is_request_in_microsite()
def get_template(uri):
"""
This is a proxy function to hide microsite_configuration behind comprehensive theming.
:param uri: uri of the template
"""
# We need to give priority to theming over microsites
# So, we apply microsite template override only when there is no associated theme,
if not current_request_has_associated_site_theme():
return microsite.get_template(uri)
def get_template_path_with_theme(relative_path):
"""
Returns template path in current site's theme if it finds one there otherwise returns same path.
Example:
>> get_template_path_with_theme('header.html')
'/red-theme/lms/templates/header.html'
Parameters:
relative_path (str): template's path relative to the templates directory e.g. 'footer.html'
Returns:
(str): template path in current site's theme
"""
relative_path = os.path.normpath(relative_path)
theme = get_current_theme()
if not theme:
return relative_path
# strip `/` if present at the start of relative_path
template_name = re.sub(r'^/+', '', relative_path)
template_path = theme.template_path / template_name
absolute_path = theme.path / "templates" / template_name
if absolute_path.exists():
return str(template_path)
else:
return relative_path
def get_all_theme_template_dirs():
"""
Returns template directories for all the themes.
Example:
>> get_all_theme_template_dirs()
[
'/edx/app/edxapp/edx-platform/themes/red-theme/lms/templates/',
]
Returns:
(list): list of directories containing theme templates.
"""
themes = get_themes()
template_paths = list()
for theme in themes:
template_paths.extend(theme.template_dirs)
return template_paths
def get_project_root_name():
"""
Return root name for the current project
Example:
>> get_project_root_name()
'lms'
# from studio
>> get_project_root_name()
'cms'
Returns:
(str): component name of platform e.g lms, cms
"""
return get_project_root_name_from_settings(settings.PROJECT_ROOT)
def strip_site_theme_templates_path(uri):
"""
Remove site template theme path from the uri.
Example:
>> strip_site_theme_templates_path('/red-theme/lms/templates/header.html')
'header.html'
Arguments:
uri (str): template path from which to remove site theme path. e.g. '/red-theme/lms/templates/header.html'
Returns:
(str): template path with site theme path removed.
"""
theme = get_current_theme()
if not theme:
return uri
templates_path = "/".join([
theme.theme_dir_name,
get_project_root_name(),
"templates"
])
uri = re.sub(r'^/*' + templates_path + '/*', '', uri)
return uri
def get_current_request():
"""
Return current request instance.
Returns:
(HttpRequest): returns current request
"""
return RequestCache.get_current_request()
def get_current_site():
"""
Return current site.
Returns:
(django.contrib.sites.models.Site): returns current site
"""
request = get_current_request()
if not request:
return None
return getattr(request, 'site', None)
def get_current_site_theme():
"""
Return current site theme object. Returns None if theming is disabled.
Returns:
(ecommerce.theming.models.SiteTheme): site theme object for the current site.
"""
# Return None if theming is disabled
if not is_comprehensive_theming_enabled():
return None
request = get_current_request()
if not request:
return None
return getattr(request, 'site_theme', None)
def get_current_theme():
"""
Return current theme object. Returns None if theming is disabled.
Returns:
(ecommerce.theming.models.SiteTheme): site theme object for the current site.
"""
# Return None if theming is disabled
if not is_comprehensive_theming_enabled():
return None
site_theme = get_current_site_theme()
if not site_theme:
return None
try:
return Theme(
name=site_theme.theme_dir_name,
theme_dir_name=site_theme.theme_dir_name,
themes_base_dir=get_theme_base_dir(site_theme.theme_dir_name),
project_root=get_project_root_name()
)
except ValueError as error:
# Log exception message and return None, so that open source theme is used instead
logger.exception('Theme not found in any of the themes dirs. [%s]', error)
return None
def current_request_has_associated_site_theme():
"""
True if current request has an associated SiteTheme, False otherwise.
Returns:
True if current request has an associated SiteTheme, False otherwise
"""
request = get_current_request()
site_theme = getattr(request, 'site_theme', None)
return bool(site_theme and site_theme.id)
def get_theme_base_dir(theme_dir_name, suppress_error=False):
"""
Returns absolute path to the directory that contains the given theme.
Args:
theme_dir_name (str): theme directory name to get base path for
suppress_error (bool): if True function will return None if theme is not found instead of raising an error
Returns:
(str): Base directory that contains the given theme
"""
for themes_dir in get_theme_base_dirs():
if theme_dir_name in get_theme_dirs(themes_dir):
return themes_dir
if suppress_error:
return None
raise ValueError(
"Theme '{theme}' not found in any of the following themes dirs, \nTheme dirs: \n{dir}".format(
theme=theme_dir_name,
dir=get_theme_base_dirs(),
))
def theme_exists(theme_name, themes_dir=None):
"""
Returns True if a theme exists with the specified name.
"""
for theme in get_themes(themes_dir=themes_dir):
if theme.theme_dir_name == theme_name:
return True
return False
def get_themes(themes_dir=None):
"""
get a list of all themes known to the system.
Args:
themes_dir (str): (Optional) Path to themes base directory
Returns:
list of themes known to the system.
"""
if not is_comprehensive_theming_enabled():
return []
if themes_dir is None:
themes_dir = get_theme_base_dirs_unchecked()
return get_themes_unchecked(themes_dir, settings.PROJECT_ROOT)
def get_theme_base_dirs_unchecked():
"""
Return base directories that contains all the themes.
Example:
>> get_theme_base_dirs_unchecked()
['/edx/app/ecommerce/ecommerce/themes']
Returns:
(List of Paths): Base theme directory paths
"""
theme_dirs = getattr(settings, "COMPREHENSIVE_THEME_DIRS", None)
return get_theme_base_dirs_from_settings(theme_dirs)
def get_theme_base_dirs():
"""
Return base directories that contains all the themes.
Ensures comprehensive theming is enabled.
Example:
>> get_theme_base_dirs()
['/edx/app/ecommerce/ecommerce/themes']
Returns:
(List of Paths): Base theme directory paths
"""
# Return an empty list if theming is disabled
if not is_comprehensive_theming_enabled():
return []
return get_theme_base_dirs_unchecked()
def is_comprehensive_theming_enabled():
"""
Returns boolean indicating whether comprehensive theming functionality is enabled or disabled.
Example:
>> is_comprehensive_theming_enabled()
True
Returns:
(bool): True if comprehensive theming is enabled else False
"""
# We need to give priority to theming over microsites
if settings.ENABLE_COMPREHENSIVE_THEMING and current_request_has_associated_site_theme():
return True
# Disable theming for microsites
# Microsite configurations take priority over the default site theme.
if microsite.is_request_in_microsite():
return False
return settings.ENABLE_COMPREHENSIVE_THEMING
def get_config_value_from_site_or_settings(name, site=None, site_config_name=None):
"""
Given a configuration setting name, try to get it from the site configuration and then fall back on the settings.
If site_config_name is not specified then "name" is used as the key for both collections.
Args:
name (str): The name of the setting to get the value of.
site: The site that we are trying to fetch the value for.
site_config_name: The name of the setting within the site configuration.
Returns:
The value stored in the configuration.
"""
from openedx.core.djangoapps.site_configuration.models import SiteConfiguration
if site_config_name is None:
site_config_name = name
if site is None:
site = get_current_site()
site_configuration = None
if site is not None:
try:
site_configuration = getattr(site, "configuration", None)
except SiteConfiguration.DoesNotExist:
pass
value_from_settings = getattr(settings, name, None)
if site_configuration is not None:
return site_configuration.get_value(site_config_name, default=value_from_settings)
else:
return value_from_settings
| agpl-3.0 | 1,203,974,141,823,069,000 | -646,988,079,397,756,700 | 28.62533 | 119 | 0.667349 | false |
tcpcloud/monitoring-for-openstack | oschecks/ceilometer.py | 4 | 2011 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Openstack Monitoring script for Sensu / Nagios
#
# Copyright © 2013-2014 eNovance <licensing@enovance.com>
#
# Author: Mehdi Abaakouk <mehdi.abaakouk@enovance.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oschecks import utils
def _check_ceilometer_api():
ceilometer = utils.Ceilometer()
ceilometer.add_argument('-w', dest='warning', type=int, default=5,
help='Warning timeout for Ceilometer APIs calls')
ceilometer.add_argument('-c', dest='critical', type=int, default=10,
help='Critical timeout for Ceilometer APIs calls')
options, client = ceilometer.setup()
elapsed, meters = utils.timeit(client.meters.list)
if not meters:
utils.critical("Unable to contact Ceilometer API.")
if elapsed > options.critical:
utils.critical("Get meters took more than %d seconds, "
"it's too long.|response_time=%d" %
(options.critical, elapsed))
elif elapsed > options.warning:
utils.warning("Get meters took more than %d seconds, "
"it's too long.|response_time=%d" %
(options.warning, elapsed))
else:
utils.ok("Get meters, Ceilometer API is working: "
"list %d meters in %d seconds.|response_time=%d" %
(len(meters), elapsed, elapsed))
def check_ceilometer_api():
utils.safe_run(_check_ceilometer_api)
| apache-2.0 | 6,036,237,922,886,502,000 | -2,196,678,004,973,906,700 | 38.411765 | 78 | 0.651244 | false |
kumar303/zamboni | mkt/site/monitors.py | 3 | 9592 | import os
import socket
import StringIO
import tempfile
import time
import traceback
from django.conf import settings
import commonware.log
import elasticsearch
import requests
from cache_nuggets.lib import memoize
from PIL import Image
from lib.crypto import packaged, receipt
from lib.crypto.packaged import SigningError as PackageSigningError
from lib.crypto.receipt import SigningError
from lib.pay_server import client
monitor_log = commonware.log.getLogger('z.monitor')
def memcache():
memcache = getattr(settings, 'CACHES', {}).get('default')
memcache_results = []
status = ''
if memcache and 'memcache' in memcache['BACKEND']:
hosts = memcache['LOCATION']
using_twemproxy = False
if not isinstance(hosts, (tuple, list)):
hosts = [hosts]
for host in hosts:
ip, port = host.split(':')
if ip == '127.0.0.1':
using_twemproxy = True
try:
s = socket.socket()
s.connect((ip, int(port)))
except Exception, e:
result = False
status = 'Failed to connect to memcached (%s): %s' % (host, e)
monitor_log.critical(status)
else:
result = True
finally:
s.close()
memcache_results.append((ip, port, result))
if (not using_twemproxy and len(hosts) > 1 and
len(memcache_results) < 2):
# If the number of requested hosts is greater than 1, but less
# than 2 replied, raise an error.
status = ('2+ memcache servers are required.'
'%s available') % len(memcache_results)
monitor_log.warning(status)
# If we are in debug mode, don't worry about checking for memcache.
elif settings.DEBUG:
return status, []
if not memcache_results:
status = 'Memcache is not configured'
monitor_log.info(status)
return status, memcache_results
def libraries():
# Check Libraries and versions
libraries_results = []
status = ''
try:
Image.new('RGB', (16, 16)).save(StringIO.StringIO(), 'JPEG')
libraries_results.append(('PIL+JPEG', True, 'Got it!'))
except Exception, e:
msg = "Failed to create a jpeg image: %s" % e
libraries_results.append(('PIL+JPEG', False, msg))
try:
import M2Crypto # NOQA
libraries_results.append(('M2Crypto', True, 'Got it!'))
except ImportError:
libraries_results.append(('M2Crypto', False, 'Failed to import'))
if settings.SPIDERMONKEY:
if os.access(settings.SPIDERMONKEY, os.R_OK):
libraries_results.append(('Spidermonkey is ready!', True, None))
# TODO: see if it works?
else:
msg = "You said spidermonkey was at (%s)" % settings.SPIDERMONKEY
libraries_results.append(('Spidermonkey', False, msg))
# If settings are debug and spidermonkey is empty,
# thorw this error.
elif settings.DEBUG and not settings.SPIDERMONKEY:
msg = 'SPIDERMONKEY is empty'
libraries_results.append(('Spidermonkey', True, msg))
else:
msg = "Please set SPIDERMONKEY in your settings file."
libraries_results.append(('Spidermonkey', False, msg))
missing_libs = [l for l, s, m in libraries_results if not s]
if missing_libs:
status = 'missing libs: %s' % ",".join(missing_libs)
return status, libraries_results
def elastic():
es = elasticsearch.Elasticsearch(hosts=settings.ES_HOSTS)
elastic_results = None
status = ''
try:
health = es.cluster.health()
if health['status'] == 'red':
status = 'ES is red'
elastic_results = health
except elasticsearch.ElasticsearchException:
monitor_log.exception('Failed to communicate with ES')
elastic_results = {'error': traceback.format_exc()}
status = 'traceback'
return status, elastic_results
def path():
# Check file paths / permissions
rw = (settings.TMP_PATH,
settings.NETAPP_STORAGE,
settings.UPLOADS_PATH,
settings.ADDONS_PATH,
settings.GUARDED_ADDONS_PATH,
settings.ADDON_ICONS_PATH,
settings.WEBSITE_ICONS_PATH,
settings.PREVIEWS_PATH,
settings.REVIEWER_ATTACHMENTS_PATH,)
r = [os.path.join(settings.ROOT, 'locale')]
filepaths = [(path, os.R_OK | os.W_OK, "We want read + write")
for path in rw]
filepaths += [(path, os.R_OK, "We want read") for path in r]
filepath_results = []
filepath_status = True
for path, perms, notes in filepaths:
path_exists = os.path.exists(path)
path_perms = os.access(path, perms)
filepath_status = filepath_status and path_exists and path_perms
filepath_results.append((path, path_exists, path_perms, notes))
key_exists = os.path.exists(settings.WEBAPPS_RECEIPT_KEY)
key_perms = os.access(settings.WEBAPPS_RECEIPT_KEY, os.R_OK)
filepath_status = filepath_status and key_exists and key_perms
filepath_results.append(('settings.WEBAPPS_RECEIPT_KEY',
key_exists, key_perms, 'We want read'))
status = filepath_status
status = ''
if not filepath_status:
status = 'check main status page for broken perms'
return status, filepath_results
def redis():
# Check Redis
redis_results = [None, 'REDIS_BACKEND is not set']
status = 'REDIS_BACKEND is not set'
if getattr(settings, 'REDIS_BACKEND', False):
from caching.invalidation import get_redis_backend
status = ''
try:
redis = get_redis_backend()
redis_results = redis.info()
except Exception, e:
redis_results = None
status = ('Failed to chat with redis')
monitor_log.critical('Failed to chat with redis: (%s)' % e)
return status, redis_results
# The signer check actually asks the signing server to sign something. Do this
# once per nagios check, once per web head might be a bit much. The memoize
# slows it down a bit, by caching the result for 15 seconds.
@memoize('monitors-signer', time=15)
def receipt_signer():
destination = getattr(settings, 'SIGNING_SERVER', None)
if not destination:
return '', 'Signer is not configured.'
# Just send some test data into the signer.
now = int(time.time())
not_valid = (settings.SITE_URL + '/not-valid')
data = {'detail': not_valid, 'exp': now + 3600, 'iat': now,
'iss': settings.SITE_URL,
'product': {'storedata': 'id=1', 'url': u'http://not-valid.com'},
'nbf': now, 'typ': 'purchase-receipt',
'reissue': not_valid,
'user': {'type': 'directed-identifier',
'value': u'something-not-valid'},
'verify': not_valid
}
try:
result = receipt.sign(data)
except SigningError as err:
msg = 'Error on signing (%s): %s' % (destination, err)
return msg, msg
try:
cert, rest = receipt.crack(result)
except Exception as err:
msg = 'Error on cracking receipt (%s): %s' % (destination, err)
return msg, msg
# Check that the certs used to sign the receipts are not about to expire.
limit = now + (60 * 60 * 24) # One day.
if cert['exp'] < limit:
msg = 'Cert will expire soon (%s)' % destination
return msg, msg
cert_err_msg = 'Error on checking public cert (%s): %s'
location = cert['iss']
try:
resp = requests.get(location, timeout=5, stream=False)
except Exception as err:
msg = cert_err_msg % (location, err)
return msg, msg
if not resp.ok:
msg = cert_err_msg % (location, resp.reason)
return msg, msg
cert_json = resp.json()
if not cert_json or 'jwk' not in cert_json:
msg = cert_err_msg % (location, 'Not valid JSON/JWK')
return msg, msg
return '', 'Signer working and up to date'
# Like the receipt signer above this asks the packaged app signing
# service to sign one for us.
@memoize('monitors-package-signer', time=60)
def package_signer():
destination = getattr(settings, 'SIGNED_APPS_SERVER', None)
if not destination:
return '', 'Signer is not configured.'
app_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'nagios_check_packaged_app.zip')
signed_path = tempfile.mktemp()
try:
packaged.sign_app(app_path, signed_path, None, False)
return '', 'Package signer working'
except PackageSigningError, e:
msg = 'Error on package signing (%s): %s' % (destination, e)
return msg, msg
finally:
os.unlink(signed_path)
# Not called settings to avoid conflict with django.conf.settings.
def settings_check():
required = ['APP_PURCHASE_KEY', 'APP_PURCHASE_TYP', 'APP_PURCHASE_AUD',
'APP_PURCHASE_SECRET']
for key in required:
if not getattr(settings, key):
msg = 'Missing required value %s' % key
return msg, msg
return '', 'Required settings ok'
def solitude():
try:
res = client.api.services.request.get()
except Exception as err:
return repr(err), repr(err)
auth = res.get('authenticated', None)
if auth != 'marketplace':
msg = 'Solitude authenticated as: %s' % auth
return msg, msg
return '', 'Solitude authentication ok'
| bsd-3-clause | -8,598,135,868,731,695,000 | -4,380,222,772,516,351,500 | 32.538462 | 78 | 0.60759 | false |
C00kiie/Youtube-Mp3-telegram-bot | youtube_dl/extractor/viki.py | 29 | 13668 | # coding: utf-8
from __future__ import unicode_literals
import hashlib
import hmac
import itertools
import json
import re
import time
from .common import InfoExtractor
from ..utils import (
ExtractorError,
int_or_none,
parse_age_limit,
parse_iso8601,
sanitized_Request,
)
class VikiBaseIE(InfoExtractor):
_VALID_URL_BASE = r'https?://(?:www\.)?viki\.(?:com|net|mx|jp|fr)/'
_API_QUERY_TEMPLATE = '/v4/%sapp=%s&t=%s&site=www.viki.com'
_API_URL_TEMPLATE = 'http://api.viki.io%s&sig=%s'
_APP = '65535a'
_APP_VERSION = '2.2.5.1428709186'
_APP_SECRET = '-$iJ}@p7!G@SyU/je1bEyWg}upLu-6V6-Lg9VD(]siH,r.,m-r|ulZ,U4LC/SeR)'
_GEO_BYPASS = False
_NETRC_MACHINE = 'viki'
_token = None
_ERRORS = {
'geo': 'Sorry, this content is not available in your region.',
'upcoming': 'Sorry, this content is not yet available.',
# 'paywall': 'paywall',
}
def _prepare_call(self, path, timestamp=None, post_data=None):
path += '?' if '?' not in path else '&'
if not timestamp:
timestamp = int(time.time())
query = self._API_QUERY_TEMPLATE % (path, self._APP, timestamp)
if self._token:
query += '&token=%s' % self._token
sig = hmac.new(
self._APP_SECRET.encode('ascii'),
query.encode('ascii'),
hashlib.sha1
).hexdigest()
url = self._API_URL_TEMPLATE % (query, sig)
return sanitized_Request(
url, json.dumps(post_data).encode('utf-8')) if post_data else url
def _call_api(self, path, video_id, note, timestamp=None, post_data=None):
resp = self._download_json(
self._prepare_call(path, timestamp, post_data), video_id, note)
error = resp.get('error')
if error:
if error == 'invalid timestamp':
resp = self._download_json(
self._prepare_call(path, int(resp['current_timestamp']), post_data),
video_id, '%s (retry)' % note)
error = resp.get('error')
if error:
self._raise_error(resp['error'])
return resp
def _raise_error(self, error):
raise ExtractorError(
'%s returned error: %s' % (self.IE_NAME, error),
expected=True)
def _check_errors(self, data):
for reason, status in data.get('blocking', {}).items():
if status and reason in self._ERRORS:
message = self._ERRORS[reason]
if reason == 'geo':
self.raise_geo_restricted(msg=message)
raise ExtractorError('%s said: %s' % (
self.IE_NAME, message), expected=True)
def _real_initialize(self):
self._login()
def _login(self):
(username, password) = self._get_login_info()
if username is None:
return
login_form = {
'login_id': username,
'password': password,
}
login = self._call_api(
'sessions.json', None,
'Logging in as %s' % username, post_data=login_form)
self._token = login.get('token')
if not self._token:
self.report_warning('Unable to get session token, login has probably failed')
@staticmethod
def dict_selection(dict_obj, preferred_key, allow_fallback=True):
if preferred_key in dict_obj:
return dict_obj.get(preferred_key)
if not allow_fallback:
return
filtered_dict = list(filter(None, [dict_obj.get(k) for k in dict_obj.keys()]))
return filtered_dict[0] if filtered_dict else None
class VikiIE(VikiBaseIE):
IE_NAME = 'viki'
_VALID_URL = r'%s(?:videos|player)/(?P<id>[0-9]+v)' % VikiBaseIE._VALID_URL_BASE
_TESTS = [{
'url': 'http://www.viki.com/videos/1023585v-heirs-episode-14',
'info_dict': {
'id': '1023585v',
'ext': 'mp4',
'title': 'Heirs Episode 14',
'uploader': 'SBS',
'description': 'md5:c4b17b9626dd4b143dcc4d855ba3474e',
'upload_date': '20131121',
'age_limit': 13,
},
'skip': 'Blocked in the US',
}, {
# clip
'url': 'http://www.viki.com/videos/1067139v-the-avengers-age-of-ultron-press-conference',
'md5': '86c0b5dbd4d83a6611a79987cc7a1989',
'info_dict': {
'id': '1067139v',
'ext': 'mp4',
'title': "'The Avengers: Age of Ultron' Press Conference",
'description': 'md5:d70b2f9428f5488321bfe1db10d612ea',
'duration': 352,
'timestamp': 1430380829,
'upload_date': '20150430',
'uploader': 'Arirang TV',
'like_count': int,
'age_limit': 0,
}
}, {
'url': 'http://www.viki.com/videos/1048879v-ankhon-dekhi',
'info_dict': {
'id': '1048879v',
'ext': 'mp4',
'title': 'Ankhon Dekhi',
'duration': 6512,
'timestamp': 1408532356,
'upload_date': '20140820',
'uploader': 'Spuul',
'like_count': int,
'age_limit': 13,
},
'skip': 'Blocked in the US',
}, {
# episode
'url': 'http://www.viki.com/videos/44699v-boys-over-flowers-episode-1',
'md5': '5fa476a902e902783ac7a4d615cdbc7a',
'info_dict': {
'id': '44699v',
'ext': 'mp4',
'title': 'Boys Over Flowers - Episode 1',
'description': 'md5:b89cf50038b480b88b5b3c93589a9076',
'duration': 4204,
'timestamp': 1270496524,
'upload_date': '20100405',
'uploader': 'group8',
'like_count': int,
'age_limit': 13,
}
}, {
# youtube external
'url': 'http://www.viki.com/videos/50562v-poor-nastya-complete-episode-1',
'md5': '63f8600c1da6f01b7640eee7eca4f1da',
'info_dict': {
'id': '50562v',
'ext': 'webm',
'title': 'Poor Nastya [COMPLETE] - Episode 1',
'description': '',
'duration': 606,
'timestamp': 1274949505,
'upload_date': '20101213',
'uploader': 'ad14065n',
'uploader_id': 'ad14065n',
'like_count': int,
'age_limit': 13,
}
}, {
'url': 'http://www.viki.com/player/44699v',
'only_matching': True,
}, {
# non-English description
'url': 'http://www.viki.com/videos/158036v-love-in-magic',
'md5': '1713ae35df5a521b31f6dc40730e7c9c',
'info_dict': {
'id': '158036v',
'ext': 'mp4',
'uploader': 'I Planet Entertainment',
'upload_date': '20111122',
'timestamp': 1321985454,
'description': 'md5:44b1e46619df3a072294645c770cef36',
'title': 'Love In Magic',
'age_limit': 13,
},
}]
def _real_extract(self, url):
video_id = self._match_id(url)
video = self._call_api(
'videos/%s.json' % video_id, video_id, 'Downloading video JSON')
self._check_errors(video)
title = self.dict_selection(video.get('titles', {}), 'en', allow_fallback=False)
if not title:
title = 'Episode %d' % video.get('number') if video.get('type') == 'episode' else video.get('id') or video_id
container_titles = video.get('container', {}).get('titles', {})
container_title = self.dict_selection(container_titles, 'en')
title = '%s - %s' % (container_title, title)
description = self.dict_selection(video.get('descriptions', {}), 'en')
duration = int_or_none(video.get('duration'))
timestamp = parse_iso8601(video.get('created_at'))
uploader = video.get('author')
like_count = int_or_none(video.get('likes', {}).get('count'))
age_limit = parse_age_limit(video.get('rating'))
thumbnails = []
for thumbnail_id, thumbnail in video.get('images', {}).items():
thumbnails.append({
'id': thumbnail_id,
'url': thumbnail.get('url'),
})
subtitles = {}
for subtitle_lang, _ in video.get('subtitle_completions', {}).items():
subtitles[subtitle_lang] = [{
'ext': subtitles_format,
'url': self._prepare_call(
'videos/%s/subtitles/%s.%s' % (video_id, subtitle_lang, subtitles_format)),
} for subtitles_format in ('srt', 'vtt')]
result = {
'id': video_id,
'title': title,
'description': description,
'duration': duration,
'timestamp': timestamp,
'uploader': uploader,
'like_count': like_count,
'age_limit': age_limit,
'thumbnails': thumbnails,
'subtitles': subtitles,
}
streams = self._call_api(
'videos/%s/streams.json' % video_id, video_id,
'Downloading video streams JSON')
if 'external' in streams:
result.update({
'_type': 'url_transparent',
'url': streams['external']['url'],
})
return result
formats = []
for format_id, stream_dict in streams.items():
height = int_or_none(self._search_regex(
r'^(\d+)[pP]$', format_id, 'height', default=None))
for protocol, format_dict in stream_dict.items():
# rtmps URLs does not seem to work
if protocol == 'rtmps':
continue
format_url = format_dict['url']
if format_id == 'm3u8':
m3u8_formats = self._extract_m3u8_formats(
format_url, video_id, 'mp4',
entry_protocol='m3u8_native',
m3u8_id='m3u8-%s' % protocol, fatal=False)
# Despite CODECS metadata in m3u8 all video-only formats
# are actually video+audio
for f in m3u8_formats:
if f.get('acodec') == 'none' and f.get('vcodec') != 'none':
f['acodec'] = None
formats.extend(m3u8_formats)
elif format_url.startswith('rtmp'):
mobj = re.search(
r'^(?P<url>rtmp://[^/]+/(?P<app>.+?))/(?P<playpath>mp4:.+)$',
format_url)
if not mobj:
continue
formats.append({
'format_id': 'rtmp-%s' % format_id,
'ext': 'flv',
'url': mobj.group('url'),
'play_path': mobj.group('playpath'),
'app': mobj.group('app'),
'page_url': url,
})
else:
formats.append({
'url': format_url,
'format_id': '%s-%s' % (format_id, protocol),
'height': height,
})
self._sort_formats(formats)
result['formats'] = formats
return result
class VikiChannelIE(VikiBaseIE):
IE_NAME = 'viki:channel'
_VALID_URL = r'%s(?:tv|news|movies|artists)/(?P<id>[0-9]+c)' % VikiBaseIE._VALID_URL_BASE
_TESTS = [{
'url': 'http://www.viki.com/tv/50c-boys-over-flowers',
'info_dict': {
'id': '50c',
'title': 'Boys Over Flowers',
'description': 'md5:ecd3cff47967fe193cff37c0bec52790',
},
'playlist_mincount': 71,
}, {
'url': 'http://www.viki.com/tv/1354c-poor-nastya-complete',
'info_dict': {
'id': '1354c',
'title': 'Poor Nastya [COMPLETE]',
'description': 'md5:05bf5471385aa8b21c18ad450e350525',
},
'playlist_count': 127,
}, {
'url': 'http://www.viki.com/news/24569c-showbiz-korea',
'only_matching': True,
}, {
'url': 'http://www.viki.com/movies/22047c-pride-and-prejudice-2005',
'only_matching': True,
}, {
'url': 'http://www.viki.com/artists/2141c-shinee',
'only_matching': True,
}]
_PER_PAGE = 25
def _real_extract(self, url):
channel_id = self._match_id(url)
channel = self._call_api(
'containers/%s.json' % channel_id, channel_id,
'Downloading channel JSON')
self._check_errors(channel)
title = self.dict_selection(channel['titles'], 'en')
description = self.dict_selection(channel['descriptions'], 'en')
entries = []
for video_type in ('episodes', 'clips', 'movies'):
for page_num in itertools.count(1):
page = self._call_api(
'containers/%s/%s.json?per_page=%d&sort=number&direction=asc&with_paging=true&page=%d'
% (channel_id, video_type, self._PER_PAGE, page_num), channel_id,
'Downloading %s JSON page #%d' % (video_type, page_num))
for video in page['response']:
video_id = video['id']
entries.append(self.url_result(
'http://www.viki.com/videos/%s' % video_id, 'Viki'))
if not page['pagination']['next']:
break
return self.playlist_result(entries, channel_id, title, description)
| mit | 5,744,556,572,335,888,000 | -4,739,361,530,131,701,000 | 34.59375 | 121 | 0.503951 | false |
mostaphaRoudsari/Honeybee | src/Honeybee_Read Annual Result I.py | 1 | 44279 | #
# Honeybee: A Plugin for Environmental Analysis (GPL) started by Mostapha Sadeghipour Roudsari
#
# This file is part of Honeybee.
#
# Copyright (c) 2013-2020, Mostapha Sadeghipour Roudsari <mostapha@ladybug.tools>
# Honeybee is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 3 of the License,
# or (at your option) any later version.
#
# Honeybee is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Honeybee; If not, see <http://www.gnu.org/licenses/>.
#
# @license GPL-3.0+ <http://spdx.org/licenses/GPL-3.0+>
"""
Read Annual Daylight Results I [Standard Daysim Results]
-
Provided by Honeybee 0.0.66
Args:
_illFilesAddress: List of .ill files
_testPoints: List of 3d Points
occupancyFiles_: Address to a Daysim occupancy file. You can find some example in \Daysim\occ. Use Honeybee Occupancy Generator to generate a custom occupancy file. You can also use EnergyPlus Schedules directly. If the schedule is using continuous values any value larger than .2 will be considered as occupied.
lightingControlGroups_: Daysim lighting control groups. Daysim can model up to 10 lighting control groups together. Default is > cntrlType = 4, lightingPower = 250, lightingSetpoint = 300, ballastLossFactor = 20, standbyPower = 3, delayTime = 5
_DLAIllumThresholds_: Illuminance threshold for Daylight Autonomy calculation in lux. Default is set to 300 lux.
SHDGroupI_Sensors_: Senors for dhading group I. Use shadingGroupSensors component to prepare the inputs
SHDGroupII_Sensors_: Senors for dhading group II. Use shadingGroupSensors component to prepare the inputs
_runIt: set to True to run the analysis
Returns:
DLA: Daylight Autonomy > Percentage of the time during the active occupancy hours that the test point receives more daylight than the illuminance threshold.
UDLI_Less_100: Useful Daylight illuminance > Percentage of time during the active occupancy hours that the test point receives less than 100 lux.
UDLI_100_2000: Useful Daylight illuminance > Percentage of time during the active occupancy hours that the test point receives between 100 and 2000 lux.
UDLI_More_2000: Useful Daylight illuminance > Percentage of time during the active occupancy hours that the test point receives more than 2000 lux.
CDA: Continuous Daylight Autonomy > Similar to Daylight Autonomy except that the point receives illuminaceLevel/illuminace threshold for hours that illuminance level is less than the threshold.
sDA: Spatial Daylight Autonomy > sDA is the percent of analysis points across the analysis area that meet or exceed _DLAIllumThresholds value (set to 300 lux for LEED) for at least 50% of the analysis period. Honeybee doesn't consider the effect of dynamic blinds in calculating sDA.
annualProfiles: A .csv file generated by Daysim that can be used as lighting schedule for annual energy simulation
"""
ghenv.Component.Name = "Honeybee_Read Annual Result I"
ghenv.Component.NickName = 'readAnnualResultsI'
ghenv.Component.Message = 'VER 0.0.66\nJUL_07_2020'
ghenv.Component.IconDisplayMode = ghenv.Component.IconDisplayMode.application
ghenv.Component.Category = "HB-Legacy"
ghenv.Component.SubCategory = "04 | Daylight | Daylight"
#compatibleHBVersion = VER 0.0.57\nNOV_03_2015
#compatibleLBVersion = VER 0.0.59\nFEB_01_2015
try: ghenv.Component.AdditionalHelpFromDocStrings = "2"
except: pass
from System import Object
import Grasshopper.Kernel as gh
from Grasshopper import DataTree
from Grasshopper.Kernel.Data import GH_Path
import Rhino as rc
import scriptcontext as sc
import os
import subprocess
import time
import shutil
"""
def testPtsStr(self, testPoint, ptsNormal):
return '%.4f'%testPoint.X + '\t' + \
'%.4f'%testPoint.Y + '\t' + \
'%.4f'%testPoint.Z + '\t' + \
'%.4f'%ptsNormal.X + '\t' + \
'%.4f'%ptsNormal.Y + '\t' + \
'%.4f'%ptsNormal.Z + '\n'
"""
def getFilelength(fileName):
with open(fileName) as inf:
for i, l in enumerate(inf):
pass
return i + 1
def executeBatchFiles(batchFileNames, maxPRuns = None, shell = False, waitingTime = 0.2):
"""Run a number of batch files in parallel and
wait to end of the analysis.
Args:
batchFileNames: List of batch files
maxPRuns: max number of files to be ran in parallel (default = 0)
shell: set to True if you do NOT want to see the cmd window while the analysis is runnig
"""
if not maxPRuns : maxPRuns = 1
maxPRuns = int(maxPRuns)
total = len(batchFileNames)
if maxPRuns < 1: maxPRuns = 1
if maxPRuns > total: maxPRuns = total
running = 0
done = False
jobs = []
pid = 0
try:
while not done:
if running < maxPRuns and pid < total:
# execute the files
jobs.append(subprocess.Popen(batchFileNames[pid].replace("\\", "/") , shell = shell))
pid+=1
time.sleep(waitingTime)
# count how many jobs are running and how many are done
running = 0
finished = 0
for job in jobs:
if job.poll() is None:
#one job is still running
running += 1
else:
finished += 1
if running == maxPRuns:
# wait for half a second
#print "waiting..."
time.sleep(waitingTime)
if finished == total:
done = True
except Exception, e:
print "Something went wrong: %s"%str(e)
def convertIllFileDaraTreeIntoSortedDictionary(illFilesAddress):
# I should move this function into Honeybee_Honeybee #BadPractice!
shadingGroupsCount = 0
shadingGroups = []
# get number of shading groups
for branch in range(illFilesAddress.BranchCount):
if illFilesAddress.Path(branch).Indices[0] not in shadingGroups:
shadingGroups.append(illFilesAddress.Path(branch).Indices[0])
shadingGroupsCount+=1
illFileSets = {}
for branch in range(illFilesAddress.BranchCount):
# sort files inside each branch if they are not sorted
fileNames = list(illFilesAddress.Branch(branch))
try:
fileNames = sorted(fileNames, key=lambda fileName: int(fileName \
.split(".")[-2] \
.strip("_down") \
.strip("_up") \
.split("_")[-1]))
except:
tmpmsg = "Can't sort .ill files based on the file names. Make sure branches are sorted correctly."
w = gh.GH_RuntimeMessageLevel.Warning
ghenv.Component.AddRuntimeMessage(w, tmpmsg)
#convert data tree to a useful dictionary
shadingGroupNumber = illFilesAddress.Path(branch).Indices[0]
if shadingGroupNumber not in illFileSets.keys():
illFileSets[shadingGroupNumber] = []
# create a separate list for each state
# the structure now is like llFileSets[shadingGroupNumber][[state 1], [state 2],..., [state n]]
illFileSets[shadingGroupNumber].append(fileNames)
return illFileSets
def convertEPScheduleToDSSchedule(scheduleName, folder):
lb_preparation = sc.sticky["ladybug_Preparation"]()
HBScheduleList = sc.sticky["honeybee_ScheduleLib"].keys()
if scheduleName.upper() not in HBScheduleList:
raise ValueError("Can't find %s in EnergyPlus schedules."%(scheduleName))
heading = "# Daysim occupancy file,,,\n" + \
"# time_step 60, comment: weekdays are based on user list inputs." + \
"daylight savings time is based on user input),,\n" + \
"# month,day,time,occupancy (1=present/0=absent)\n"
readSchedules = sc.sticky["honeybee_ReadSchedules"](scheduleName, 0)
dailyValues = readSchedules.getScheduleValues()
hourlyValues = []
for values in dailyValues: hourlyValues.extend(values)
# create a temp folder inside folder will .ill files
if not os.path.isdir(folder): os.mkdir(folder)
# write the values to file
fullPath = os.path.join(folder, scheduleName.replace(" ", "_") + ".csv")
with open(fullPath, "w") as occFile:
occFile.write(heading)
for HOY, occ in enumerate(hourlyValues):
HOY += 1
d, m, t = lb_preparation.hour2Date(HOY, True)
m += 1 #month starts from 0 in Ladybug hour2Date. I should fix this at some point
t -= .5 # add half an hour to the time to be similar to daysim
if t == -.5: t = 23.5
if float(occ) >= .2: occ = 1
else: occ = 0
occLine = str(m) + "," + str(d) + "," + str(t) + "," + str(occ) + "\n"
occFile.write(occLine)
return fullPath
def main(illFilesAddress, testPts, testVecs, occFiles, lightingControlGroups, SHDGroupI_Sensors, SHDGroupII_Sensors, DLAIllumThresholds, runInBackground=False):
if sc.sticky.has_key('honeybee_release'):
try:
if not sc.sticky['honeybee_release'].isCompatible(ghenv.Component): return -1
if sc.sticky['honeybee_release'].isInputMissing(ghenv.Component): return -1
except:
warning = "You need a newer version of Honeybee to use this compoent." + \
" Use updateHoneybee component to update userObjects.\n" + \
"If you have already updated userObjects drag Honeybee_Honeybee component " + \
"into canvas and try again."
w = gh.GH_RuntimeMessageLevel.Warning
ghenv.Component.AddRuntimeMessage(w, warning)
return -1
hb_folders = sc.sticky["honeybee_folders"]
hb_RADPath = hb_folders["RADPath"]
hb_RADLibPath = hb_folders["RADLibPath"]
hb_DSPath = hb_folders["DSPath"]
hb_DSCore = hb_folders["DSCorePath"]
hb_DSLibPath = hb_folders["DSLibPath"]
else:
msg = "You should first let Honeybee to fly first..."
return msg, None
daysimHeaderKeywords = ["project_name", "project_directory", "bin_directory", "tmp_directory", "Template_File",
"place", "latitude", "longitude", "time_zone", "site_elevation", "time_step",
"wea_data_short_file", "wea_data_short_file_units", "lower_direct_threshold", "lower_diffuse_threshold",
"output_units", "sensor_file_unit", "material_file", "geometry_file",
"radiance_source_files", "sensor_file", "viewpoint_file", "AdaptiveZoneApplies", "dgp_image_x_size", "dgp_image_y_size",
"ab", "ad", "as", "ar", "aa", "lr", "st", "sj", "lw", "dj", "ds", "dr", "dp",
"occupancy", "minimum_illuminance_level", "daylight_savings_time", "shading", "electric_lighting_system",
"sensor_file_info", "daylight_autonomy_active_RGB", "electric_lighting", "direct_sunlight_file", "thermal_simulation",
"user_profile", "PNGScheduleExists" ]
# I will remove this function later and just use WriteDS class
class genDefaultLightingControl(object):
def __init__(self, sensorPts = [], cntrlType = 4, lightingPower = 250, lightingSetpoint = 300, ballastLossFactor = 20, standbyPower = 3, delayTime = 5):
self.sensorPts = sensorPts
self.lightingControlStr = self.getLightingControlStr(cntrlType, lightingPower, lightingSetpoint, ballastLossFactor, standbyPower, delayTime)
def getLightingControlStr(self, cntrlType, lightingPower = 250, lightingSetpoint = 300, ballastLossFactor = 20, standbyPower = 3, delayTime = 5):
cntrlType += 1
# manual control
lightingControlDict = {
1 : 'manualControl',
2 : 'onlyOffSensor',
3 : 'onWhenOccupied',
4 : 'dimming',
5 : 'onlyOffSensorAndDimming',
6 : 'onWithDimming'}
lightingStr = `cntrlType` + " " + lightingControlDict[cntrlType] + " " + `lightingPower` + " 1 "
if cntrlType != 1:
lightingStr += `standbyPower` + " "
if cntrlType > 3:
lightingStr += `ballastLossFactor` + " " + `lightingSetpoint` + " "
if cntrlType != 1 and cntrlType!=4:
lightingStr += `delayTime`
lightingStr += "\n"
return lightingStr
def isSensor(testPt, sensors):
for pt in sensors:
if pt==None: return False
if pt.DistanceTo(testPt) < sc.doc.ModelAbsoluteTolerance:
# this is a senor point
return True
# not a sensor
return False
msg = str.Empty
# PREPARATION/CHECKING THE INPUTS #
# number of spaces
# this component considers each branch as a separate space and will generate
# a separate heading file for each space and generate a separate set of results
numOfSpaces = testPts.BranchCount
# number of total points
numOfPts = testPts.DataCount
# set up illuminance levels for the spaces if they are not already set
if len(DLAIllumThresholds)==0: DLAIllumThresholds = [300] * numOfSpaces
# check for occupancy file
occupancyFilesFolder = os.path.join(sc.sticky["Honeybee_DefaultFolder"], "DaysimCSVOCC\\")
if len(occFiles)!=0:
for fileCount, fileName in enumerate(occFiles):
if fileName.lower().endswith(".csv"):
try:
if not os.path.isfile(fileName):
msg = "Can't find the occupancy file: " + fileName
return msg, None
except:
msg = "Occupancy file address is not valid."
return msg, None
else:
#try:
# might be an energyplus schedule
filePath = convertEPScheduleToDSSchedule(fileName, occupancyFilesFolder)
occFiles[fileCount] = filePath
else:
daysimOccFile = os.path.join(sc.sticky["Honeybee_DefaultFolder"], "DaysimCSVOCC\\userDefinedOcc_9to17.csv")
occFiles = [daysimOccFile] * numOfSpaces
if not os.path.isfile(daysimOccFile):
msg = "Can't find the default occupancy file at: " + daysimOccFile + \
"\nYou can generate an occupancy file and connect the file address to occupancyFiles_ input."
return msg, None
# separate daylighting controls for each space
class SHDGroupSensors(object):
def __init__(self, sensorsList):
self.intSensors = sensorsList[0]
self.extSensors = sensorsList[1]
lightingControls = []
SHDGroupISensors = []
SHDGroupIISensors = []
originalIllFiles = []
testPoints = []
testVectors = []
numOfPtsInEachSpace = []
# collect the data for spaces
for branchNum in range(numOfSpaces):
ptList = list(testPts.Branch(branchNum))
testPoints.append(ptList)
numOfPtsInEachSpace.append(len(ptList))
try: testVectors.append(list(testVecs.Branch(branchNum)))
except: testVectors.append([rc.Geometry.Vector3d.ZAxis] * testPts.Branch(branchNum).Count)
try: lightingControls.append(list(lightingControlGroups.Branch(branchNum)))
except: lightingControls.append([genDefaultLightingControl()])
try: SHDGroupISensors.append(SHDGroupSensors(SHDGroupI_Sensors.Branch(branchNum)))
except: SHDGroupISensors.append(None)
try: SHDGroupIISensors.append(SHDGroupSensors((SHDGroupII_Sensors.Branch(branchNum))))
except: SHDGroupIISensors.append(None)
# create a place holder for each shading group
# sort the ill files based on their names
originalIllFilesSorted = convertIllFileDaraTreeIntoSortedDictionary(illFilesAddress)
# number of points should be the same in all the illfile lists
# that's why I just try the first list of the ill files
numOfPtsInEachFile = []
for illFile in originalIllFilesSorted[0][0]:
with open(illFile, "r") as illInf:
for lineCount, line in enumerate(illInf):
if not line.startswith("#"):
numOfPtsInEachFile.append(len(line.strip().split(" ")) - 4)
break
# find the current project directory that could be differnt from the old one
projectDirectory = os.path.dirname(originalIllFilesSorted[0][0][0]) + "\\"
# print numOfPtsInEachFile
#print numOfPtsInEachSpace
# make sure the number of points inside the ill file matches the number of points
# inside the point list
if sum(numOfPtsInEachFile) != numOfPts:
msg = "Number of points in ill files: " + `sum(numOfPtsInEachFile)` + \
" doesn't match the number of points in point files: " + `numOfPts`
return msg, None
# find the heading files and creat multiple ill files for the study
heaFiles = []
filePath = os.path.dirname(originalIllFilesSorted[0][0][0])
try:
files = os.listdir(filePath)
except:
msg = "Can't find the heading files (*.hea) at " + filePath
return msg, None
for fileName in files:
if fileName.EndsWith(".hea"): heaFiles.append(fileName)
# sort heading files and pt files
try: heaFiles = sorted(heaFiles, key=lambda fileName: int(fileName.split(".")[-2].split("_")[-1]))
except: pass
# copy one of the heading files to be modified
heaFile = heaFiles[0]
with open(os.path.join(filePath, heaFile), "r") as heainf:
baseHea = heainf.readlines()
modifiedHeaBase = str.Empty
keywordsToBeRemoved = ["daylight_autonomy_active_RGB", "electric_lighting", "direct_sunlight_file", "thermal_simulation", "occupancy_profile",
"continuous_daylight_autonomy_active_RGB", "UDI_100_active_RGB", "UDI_100_2000_active_RGB", "UDI_2000_active_RGB",
"DDS_sensor_file", "DDS_file", "sensor_file_info"]
linesToBePassed = []
for lineCount, line in enumerate(baseHea):
line = line.strip()
if not lineCount in linesToBePassed:
if line.split(" ")[0] == ("sensor_file"):
modifiedHeaBase += "sensor_file [sensor_file]\n"
elif line.startswith("occupancy-file"):
modifiedHeaBase += "occupancy-file [occupancy]\n"
elif line.startswith("occupancy"):
modifiedHeaBase += "occupancy 5 [occupancy]\n"
elif line.startswith("project_name"):
projectName = line.split("project_name")[-1].strip()
modifiedHeaBase += "project_name [project_name]\n"
elif line.startswith("project_directory"):
# projectDirectory = line.split("project_directory")[-1].strip()
modifiedHeaBase += "project_directory " + projectDirectory + "\n"
elif line.startswith("tmp_directory"):
# create a place holder for the new temp file
modifiedHeaBase += "tmp_directory " + os.path.join(projectDirectory, "tmp[spaceCount]") + "\\\n"
elif line.startswith("daylight_savings_time"):
modifiedHeaBase += "daylight_savings_time 1\n"
elif line.startswith("minimum_illuminance_level"):
modifiedHeaBase += "minimum_illuminance_level [minimum_illuminance_level]\n"
elif line.split(" ")[0] == "shading":
# add the place holder for new dc and ill file names
if line.find(".ill") >= 0: line = line.replace(".ill", "[spaceCount].ill")
if line.find(".dc") >= 0: line = line.replace(".dc", "[spaceCount].dc")
shadingStr = line + "\n"
for lineC in range(lineCount + 1, len(baseHea)):
line = baseHea[lineC].strip()
if lineCount > len(baseHea) or line == str.Empty or line.startswith("=") or line.split(" ")[0] in daysimHeaderKeywords:
# good example here that I should have used the while loop instead!
break
else:
linesToBePassed.append(lineC)
# add the place holder for new dc and ill file names
if line.find(".ill") >= 0:
line = line.replace(".ill", "[spaceCount].ill")
# I'm not sure if I really need to modify the .dc files
# based on the graph on daysim page it should only look
# for the ill files and not the dc files
if line.find(".dc") >= 0:
line = line.replace(".dc", "[spaceCount].dc")
linesToBePassed.append(lineC)
shadingStr += line + "\n"
modifiedHeaBase += shadingStr
#modifiedHeaBase.append("minimum_illuminance_level [minimum_illuminance_level]\n")
elif line.split(" ")[0] == "electric_lighting_system" or line.split(" ")[0] == "user_profile":
# remove the lines related to electric lighting system as the new ones should be assigned
for lineC in range(lineCount + 1, len(baseHea)):
line = baseHea[lineC].strip()
if lineCount > len(baseHea) or line == str.Empty or line.startswith("=") or line.split(" ")[0] in daysimHeaderKeywords:
# good example here that I should have used the while loop instead!
break
else:
linesToBePassed.append(lineC)
elif line.split(" ")[0] in keywordsToBeRemoved:
pass
else:
modifiedHeaBase += line + "\n"
# clean the parts that are related to lighting control and schedule
##replace
# re-write the ill files based on the number of points in each space
# if the study is only for a single space then all the ill files should be merged
# considering the structure of .ill files and the fact that the files can be really
# huge this part can take long. It is good to consider a new name for these files so
# in case the user has already ran the study for this folder the script just use the
# available files
# generate new files for each space
# check if the files are already generated once
# not a good idea at all - many users don't really change the file name and run several
# studies with the same name in this case, results will always stay the same! Oops...
firstRun = True
newIllFileNamesDict = {}
for shdGroupCounter, illFileList in originalIllFilesSorted.items():
newIllFileNamesDict[shdGroupCounter] = []
for shadingStateCount in range(len(illFileList)):
for spaceCount in range(numOfSpaces):
newIllFileName = illFileList[shadingStateCount][0].split(".ill")[0] + "_space_" + str(spaceCount) + ".ill"
newDcFileName = illFileList[shadingStateCount][0].split(".ill")[0] + "_space_" + str(spaceCount) + ".dc"
newIllFileNamesDict[shdGroupCounter].append(newIllFileName) #collect ill files to calculate sDA
#if not (os.path.isfile(newIllFileName) and os.path.isfile(newDcFileName)):
# firstRun = True
# break
# open all the available ill files and put them in the dictionary
illFilesDict = {}
newIllFilesDict = {}
if firstRun:
for shdGroupCounter, illFileList in originalIllFilesSorted.items():
for shadingStateCount, shadingStateFiles in enumerate(illFileList):
# create a place holder for new .ill files for each shading group
newIllFileNamesDict[shdGroupCounter] = []
# open all the files for this shading state into memory
for counter, illFile in enumerate(illFileList[shadingStateCount]):
illfile = open(illFile, "r")
illFilesDict[counter] = illfile #put each ill file from each cpu separate/ I don't know why I have done this
# open new ill files for each space and put them in the same directory
for spaceCount in range(numOfSpaces):
newIllFileName = illFileList[shadingStateCount][0].split(".ill")[0] + "_space_" + str(spaceCount) + ".ill"
newIllFileNamesDict[shdGroupCounter].append(newIllFileName) #collect new ill file names to calculate sDA
newIllFile = open(newIllFileName, "w")
newIllFilesDict[spaceCount] = newIllFile
# all the files will have the same length of 8760 lines for the hours of the year
for line in range(8760):
# merge the line from all the source file
mergedLine = []
for illFileKey in illFilesDict.keys():
line = illFilesDict[illFileKey].readline()
if illFileKey==0:
dateInfo = line.strip().split(" ")[:4]
mergedLine.extend(line.strip().split(" ")[4:])
# write the values to the target files
for illFileKey in newIllFilesDict.keys():
line = " ".join(dateInfo + mergedLine[sum(numOfPtsInEachSpace[:illFileKey]):sum(numOfPtsInEachSpace[:illFileKey+1])])
newIllFilesDict[illFileKey].write(line + "\n")
# close all the opened files
for illFileKey in illFilesDict.keys(): illFilesDict[illFileKey].close()
for illFileKey in newIllFilesDict.keys(): newIllFilesDict[illFileKey].close()
# print numOfPtsInEachSpace
# write the new .dc files for
dcFilesDict = {}
newDcFilesDict = {}
for shdGroupCounter, illFileList in originalIllFilesSorted.items():
for shadingStateCount, shadingStateFiles in enumerate(illFileList):
#illFileDict[shaidngGroupCounter]
lenOfDCFiles = []
for counter, illFile in enumerate(shadingStateFiles):
if illFile.endswith("_up.ill"):
dcFile = illFile.replace("_up.ill", ".dc")
elif illFile.endswith("_down.ill"):
dcFile = illFile.replace("_down.ill", ".dc")
else:
dcFile = illFile.replace(".ill", ".dc")
lenOfDCFile = getFilelength(dcFile) - 6 #Daysim files has 6 lines as header
lenOfDCFiles.append(lenOfDCFile)
dcfile = open(dcFile, "r")
dcFilesDict[counter] = dcfile
# open new ill files for each space and put them in the same directory
for spaceCount in range(numOfSpaces):
newDcFileName = illFileList[shadingStateCount][0].split(".ill")[0] + "_space_" + str(spaceCount) + ".dc"
newDcFile = open(newDcFileName, "w")
newDcFilesDict[spaceCount] = newDcFile
heading = str.Empty
for line in dcFilesDict[0]:
if line.startswith("#"):
#make one instance of heading
heading += line
else:
newDcFilesDict[0].write(heading)
newDcFilesDict[0].write(line)
break
pointCount = 1
spaceCount = 0
for dcFileKey in dcFilesDict.keys():
for line in dcFilesDict[dcFileKey]:
if not line.startswith("#"):
# write the line
newDcFilesDict[spaceCount].write(line)
pointCount+=1
if pointCount == sum(numOfPtsInEachSpace[:spaceCount + 1]):
# end of the file, start a new file
spaceCount += 1
try: newDcFilesDict[spaceCount].write(heading)
except: pass
# close all the opened files
for dcFileKey in dcFilesDict.keys(): dcFilesDict[dcFileKey].close()
for dcFileKey in newDcFilesDict.keys(): newDcFilesDict[dcFileKey].close()
heaFileNames = []
# write point files and heading files
for spaceCount in range(numOfSpaces):
tmpFolder = os.path.join(projectDirectory, "tmp_space_" + str(spaceCount))
if not os.path.isdir(tmpFolder): os.mkdir(tmpFolder)
subProjectName = projectName + "_space_" + str(spaceCount)
ptsFileName = subProjectName + ".pts"
modifiedHea = modifiedHeaBase
with open(os.path.join(filePath, ptsFileName), "w") as ptsf:
for ptCount, testPoint in enumerate(testPoints[spaceCount]):
ptNormal = testVectors[spaceCount][ptCount]
ptStr = '%.4f'%testPoint.X + '\t' + \
'%.4f'%testPoint.Y + '\t' + \
'%.4f'%testPoint.Z + '\t' + \
'%.4f'%ptNormal.X + '\t' + \
'%.4f'%ptNormal.Y + '\t' + \
'%.4f'%ptNormal.Z + '\n'
ptsf.write(ptStr)
# replace some of the values
# replace sensor file with the new file
if modifiedHea.find("[sensor_file]") >= 0:
modifiedHea = modifiedHea.replace("[sensor_file]", ptsFileName)
else:
modifiedHea += "sensor_file " + ptsFileName + "\n"
# occupancy file
try:
occFileFullPath = occFiles[spaceCount]
except:
occFileFullPath = occFiles[0]
#copy occupancy file to the folder
occFileName = os.path.basename(occFileFullPath)
targetFile = os.path.join(projectDirectory, occFileName)
if not os.path.isdir(targetFile):
shutil.copy2(occFileFullPath, targetFile)
if modifiedHea.find("[occupancy]") >= 0:
modifiedHea = modifiedHea.replace("[occupancy]", occFileName)
else:
# pass
modifiedHea += "occupancy-file " + occFileName + "\n"
modifiedHea += "occupancy 5 " + occFileName + "\n"
modifiedHea = modifiedHea.replace("[project_name]", subProjectName)
# daylight saving
if modifiedHea.find("daylight_savings_time") >= 0:
pass
else:
modifiedHea += "daylight_savings_time 1\n"
# illuminance level threshold
try: illumT = DLAIllumThresholds[spaceCount]
except: illumT = DLAIllumThresholds[0]
if modifiedHea.find("[minimum_illuminance_level]") >= 0:
modifiedHea = modifiedHea.replace("[minimum_illuminance_level]", str(illumT))
else:
modifiedHea += "minimum_illuminance_level " + str(illumT)+ "\n"
# replace the file names for advanced shadings
modifiedHea = modifiedHea.replace("[spaceCount]", "_space_" + str(spaceCount))
# add user information
modifiedHea += "user_profile 1\n" + \
"active 100 1 1\n"
try:
lghtCtrls = lightingControls[spaceCount]
lightingGroupSensors = []
except:
lghtCtrls = []
if len(lghtCtrls)!=0:
modifiedHea += "\n\nelectric_lighting_system " + str(len(lghtCtrls)) + "\n"
for lightingControl in lghtCtrls:
lightingGroupSensors.append(lightingControl.sensorPts)
lightingControlDefinition = lightingControl.lightingControlStr
modifiedHea += lightingControlDefinition
# write sensor info
modifiedHea += "\nsensor_file_info "
for pt in testPoints[spaceCount]:
sensorInfo = []
# test shading group
for groupCount, shdGroupSensor in enumerate([SHDGroupISensors[spaceCount], SHDGroupIISensors[spaceCount]]):
if shdGroupSensor!=None:
if isSensor(pt, shdGroupSensor.intSensors):
sensorInfo.append('BG' + str(groupCount+1))
if isSensor(pt, shdGroupSensor.extSensors):
sensorInfo.append('BG' + str(groupCount+1) + '_Ext')
# test lighting group
for groupCount, lightingGroupSensor in enumerate(lightingGroupSensors):
if lightingGroupSensor!=[] and isSensor(pt, lightingGroupSensor):
sensorInfo.append('LG' + str(groupCount+1))
if len(sensorInfo)==0:
modifiedHea += "0 "
elif len(sensorInfo)==1:
modifiedHea += sensorInfo[0] + " "
else:
modifiedHea += ",".join(sensorInfo) + " "
# output files
modifiedHea += "\n\n############################\n" + \
"# Daylighting Result Files #\n" + \
"############################\n"
modifiedHea += "daylight_autonomy_active_RGB " + subProjectName +"_autonomy.DA\n"
modifiedHea += "continuous_daylight_autonomy_active_RGB " + subProjectName +".CDA\n"
modifiedHea += "UDI_100_active_RGB " + subProjectName +"_less_than_100.UDI\n"
modifiedHea += "UDI_100_2000_active_RGB " + subProjectName +"_100_2000.UDI\n"
modifiedHea += "UDI_2000_active_RGB " + subProjectName + "_more_than_2000.UDI\n"
modifiedHea += "occupancy_profile " + subProjectName + "_occ_profile.csv\n"
modifiedHea += "electric_lighting " + subProjectName + "_electriclighting.htm\n"
modifiedHea += "direct_sunlight_file " + subProjectName + ".dir\n"
modifiedHea += "thermal_simulation " + subProjectName + "_intgain.csv\n"
#modifiedHea += "DDS_sensor_file " + subProjectName +".CDA\n".dds\n"
#modifiedHea += "DDS_file " + subProjectName +".sen\n"
heaFileName = subProjectName + ".hea"
heaFileNames.append(heaFileName)
with open(os.path.join(filePath, heaFileName), "w") as heaf:
heaf.write(modifiedHea)
# write batch files
batchFileNames = []
pathStr = "SET RAYPATH=.;" + hb_RADLibPath + ";" + hb_DSPath + ";" + hb_DSLibPath + ";\nPATH=" + hb_RADPath + ";" + hb_DSPath + ";" + hb_DSLibPath + ";$PATH\n"
for heaFileName in heaFileNames:
batchFileName = heaFileName.replace(".hea", ".bat")
batchFileNames.append(batchFileName)
with open(os.path.join(filePath, batchFileName), "w") as batchInf:
batchFileStr = ":: Daysim Result Calculation - Generated by Honeybee\n\n"
batchFileStr += pathStr
# gen glare profile in case there is any dynamic shading systems!
if len(originalIllFilesSorted.keys())>1:
batchFileStr += ':: Glare Profile in The Case of Dynamic Shading Calculation\n' + \
'gen_directsunlight ' + os.path.join(filePath, heaFileName) + '\n'
batchFileStr += ':: Generate the result files\n' + \
'ds_el_lighting.exe ' + os.path.join(filePath, heaFileName) + '\n'
batchInf.write(batchFileStr)
# write a batch file and run the study
ncpus = int(os.environ["NUMBER_OF_PROCESSORS"])
if ncpus == 0: ncpus = 1
#execute the batch files in parallel if there is enough CPUs!
fileNames = []
for fileName in batchFileNames:
batchFileName = os.path.join(filePath, fileName)
fileNames.append(batchFileName)
executeBatchFiles(fileNames, ncpus - 1, shell=runInBackground)
# calculate sDA
#sDADict = {}
#if len(newIllFileNamesDict.keys())!=1:
# warning = "This version of Honeybee doesn't consider dynamic blinds in sDA calculation!\n"
# w = gh.GH_RuntimeMessageLevel.Warning
# ghenv.Component.AddRuntimeMessage(w, warning)
#
#for spaceCount, spaceIllFiles in enumerate(newIllFileNamesDict[0]):
# totalOccupancyHours = 0
# sDADict[spaceCount] = 0
# try: DLAIllumThreshold = DLAIllumThresholds[spaceCount]
# except: DLAIllumThreshold = DLAIllumThresholds[0]
#
#
# # open the file to read the values
# with open(spaceIllFiles, "r") as illInf:
#
# # import occupancy profile
# try: occFile = occFiles[spaceCount]
# except: occFile = occFiles[0]
# with open(occFile, "r") as occInFile:
# occupancyLines = occInFile.readlines()
#
# # each line represnt an hour
# for lineCount, line in enumerate(illInf):
# higherThanThreshold = 0
# # check the occupancy profile
# if int(occupancyLines[lineCount + 3].split(",")[-1]) != 0:
# totalOccupancyHours += 1
# illValues = line.split(" ")[1].strip().split(" ")
#
# # check number of points that satisfy the minimum illuminance
# for sensorCount, illuminance in enumerate(illValues):
# # print float(illuminance), DLAIllumThreshold, float(illuminance) >= DLAIllumThreshold
# if float(illuminance) >= DLAIllumThreshold:
# higherThanThreshold += 1
#
# if higherThanThreshold/len(illValues) > .5:
# sDADict[spaceCount] += 1
#
# sDADict[spaceCount] = "%.2f"%((sDADict[spaceCount]/totalOccupancyHours) * 100)
# read all the results
DLALists = []
underUDLILists = []
inRangeUDLILists = []
overUDLILists = []
CDALists = []
EPLSchLists = []
htmLists = []
resultFiles = os.listdir(projectDirectory)
for fileName in resultFiles:
if fileName.endswith(".DA"): DLALists.append(os.path.join(filePath,fileName))
elif fileName.endswith(".CDA"): CDALists.append(os.path.join(filePath,fileName))
elif fileName.endswith(".htm"): htmLists.append(os.path.join(filePath,fileName))
elif fileName.endswith("_intgain.csv"): EPLSchLists.append(os.path.join(filePath,fileName))
elif fileName.endswith("less_than_100.UDI"): underUDLILists.append(os.path.join(filePath,fileName))
elif fileName.endswith("100_2000.UDI"): inRangeUDLILists.append(os.path.join(filePath,fileName))
elif fileName.endswith("more_than_2000.UDI"): overUDLILists.append(os.path.join(filePath,fileName))
# sort the lists
try: CDALists = sorted(CDALists, key=lambda fileName: int(fileName.split(".")[-2].split("_")[-1]))
except: pass
try: DLALists = sorted(DLALists, key=lambda fileName: int(fileName.split(".")[-2].split("_")[-2]))
except: pass
try: htmLists = sorted(htmLists, key=lambda fileName: int(fileName.split(".")[-2].split("_")[-2]))
except: pass
try: EPLSchLists = sorted(EPLSchLists, key=lambda fileName: int(fileName.split(".")[-2].split("_")[-2]))
except: pass
try: underUDLILists = sorted(underUDLILists, key=lambda fileName: int(fileName.split(".")[-2].split("_")[-4]))
except: pass
try: inRangeUDLILists = sorted(inRangeUDLILists, key=lambda fileName: int(fileName.split(".")[-2].split("_")[-3]))
except: pass
try: overUDLILists = sorted(overUDLILists, key=lambda fileName: int(fileName.split(".")[-2].split("_")[-4]))
except: pass
return None, [DLALists, underUDLILists, inRangeUDLILists, overUDLILists, CDALists, EPLSchLists, htmLists]
def isAllNone(dataList):
for item in dataList.AllData():
if item!=None: return False
return True
# Throwing warning if any of the mandatory input is missing
if isAllNone(_illFilesAddress) == True or isAllNone(_testPoints) == True:
msg = "Major input missing. Please check"
w = gh.GH_RuntimeMessageLevel.Warning
ghenv.Component.AddRuntimeMessage(w, msg)
if _runIt and not isAllNone(_illFilesAddress) and not isAllNone(_testPoints):
_testPoints.SimplifyPaths()
lightingControlGroups_.SimplifyPaths()
_illFilesAddress.SimplifyPaths()
res = main(_illFilesAddress, _testPoints, ptsVectors_, occupancyFiles_, lightingControlGroups_, SHDGroupI_Sensors_, SHDGroupII_Sensors_, _DLAIllumThresholds_, _runIt > 1)
if res!= -1:
msg, results = res
if msg!=None:
w = gh.GH_RuntimeMessageLevel.Warning
ghenv.Component.AddRuntimeMessage(w, msg)
else:
DLALists, underUDLILists, inRangeUDLILists, overUDLILists, CDALists, EPLSchLists, htmLists = results
DLA = DataTree[Object]()
UDLI_Less_100 = DataTree[Object]()
UDLI_100_2000 = DataTree[Object]()
UDLI_More_2000 = DataTree[Object]()
CDA = DataTree[Object]()
annualProfiles = DataTree[Object]()
sDA = DataTree[Object]()
htmReport = DataTree[Object]()
def readDSStandardResults(filePath):
results = []
with open(filePath, "r") as inf:
for line in inf:
if not line.startswith("#"):
results.append(float(line.split("\t")[-1]))
return results
def getsDA(DLARes, threshold = 50):
moreThan = 0
for res in DLARes:
if res >= threshold:
moreThan += 1
return "%.2f"%((moreThan/len(DLARes)) * 100)
for branchNum in range(_testPoints.BranchCount):
p = GH_Path(branchNum)
DLARes = readDSStandardResults(DLALists[branchNum])
DLA.AddRange(DLARes, p)
UDLI_Less_100.AddRange(readDSStandardResults(underUDLILists[branchNum]), p)
UDLI_100_2000.AddRange(readDSStandardResults(inRangeUDLILists[branchNum]), p)
UDLI_More_2000.AddRange(readDSStandardResults(overUDLILists[branchNum]), p)
CDA.AddRange(readDSStandardResults(CDALists[branchNum]), p)
annualProfiles.Add(EPLSchLists[branchNum], p)
sDA.Add(getsDA(DLARes), p)
htmReport.Add(htmLists[branchNum], p)
| gpl-3.0 | -7,120,307,103,483,225,000 | -7,442,119,711,159,515,000 | 45.269592 | 320 | 0.584973 | false |
grap/account-financial-tools | __unported__/account_compute_tax_amount/account_move_line.py | 7 | 3399 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2013 Camptocamp (http://www.camptocamp.com)
# All Right Reserved
#
# Author : Vincent Renaville (Camptocamp)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm, fields
import openerp.addons.decimal_precision as dp
class account_move_line(orm.Model):
_inherit = "account.move.line"
# We set the tax_amount invisible, because we recompute it in every case.
_columns = {
'tax_amount': fields.float(
'Tax/Base Amount',
digits_compute=dp.get_precision('Account'),
invisible=True,
select=True,
help="If the Tax account is a tax code account, "
"this field will contain the taxed amount. "
"If the tax account is base tax code, "
"this field will contain the basic amount (without tax)."
),
}
def create(self, cr, uid, vals, context=None, check=True):
result = super(account_move_line, self).create(cr, uid, vals,
context=context,
check=check)
if result:
move_line = self.read(cr, uid, result,
['credit', 'debit', 'tax_code_id'],
context=context)
if move_line['tax_code_id']:
tax_amount = move_line['credit'] - move_line['debit']
self.write(cr, uid, [result],
{'tax_amount': tax_amount},
context=context)
return result
def write(self, cr, uid, ids, vals, context=None, check=True,
update_check=True):
result = super(account_move_line, self).write(
cr, uid, ids, vals,
context=context,
check=check,
update_check=update_check
)
if result:
if ('debit' in vals) or ('credit' in vals):
move_lines = self.read(cr, uid, ids,
['credit', 'debit', 'tax_code_id'],
context=context)
for move_line in move_lines:
if move_line['tax_code_id']:
tax_amount = move_line['credit'] - move_line['debit']
self.write(cr, uid,
[move_line['id']],
{'tax_amount': tax_amount},
context=context)
return result
| agpl-3.0 | -5,133,703,835,064,616,000 | 8,239,352,468,559,268,000 | 40.962963 | 78 | 0.503089 | false |
risteon/nimble | nimble/sources/datasets.py | 1 | 1222 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
from ..composition import LabeledDataSource
from .images import ImageFileSource
from .labels import TransformMatrixSource
class KittiOdometrySource(LabeledDataSource):
"""Read images and ground truth poses of the Kitti dataset.
http://www.cvlibs.net/datasets/kitti/
Currently, this only reads the left image.
"""
def __init__(self, kitti_root_path, sequence, **kwargs):
self.seekable = True
self.parallel_possible = False
self.cached = False
self._sequence = sequence
self._sequence_folder = os.path.join(kitti_root_path, u"sequences", u"{:02d}".format(self._sequence),
u"image_2")
poses_file = os.path.join(kitti_root_path, u"poses", u"{:02d}.txt".format(self._sequence))
image_source = ImageFileSource(self._image_filename)
label_source = TransformMatrixSource(poses_file)
super(KittiOdometrySource, self).__init__(data_source=image_source, label_source=label_source, **kwargs)
def _image_filename(self, position):
return os.path.join(self._sequence_folder, "{:06}.png".format(position))
| mit | -3,433,399,952,411,804,700 | -4,080,582,908,361,651,700 | 34.941176 | 112 | 0.660393 | false |
daevaorn/sentry | src/sentry/migrations/0234_auto__add_savedsearchuserdefault__add_unique_savedsearchuserdefault_pr.py | 9 | 43642 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'SavedSearchUserDefault'
db.create_table('sentry_savedsearch_userdefault', (
('id', self.gf('sentry.db.models.fields.bounded.BoundedBigAutoField')(primary_key=True)),
('savedsearch', self.gf('sentry.db.models.fields.foreignkey.FlexibleForeignKey')(to=orm['sentry.SavedSearch'])),
('project', self.gf('sentry.db.models.fields.foreignkey.FlexibleForeignKey')(to=orm['sentry.Project'])),
('user', self.gf('sentry.db.models.fields.foreignkey.FlexibleForeignKey')(to=orm['sentry.User'])),
))
db.send_create_signal('sentry', ['SavedSearchUserDefault'])
# Adding unique constraint on 'SavedSearchUserDefault', fields ['project', 'user']
db.create_unique('sentry_savedsearch_userdefault', ['project_id', 'user_id'])
def backwards(self, orm):
# Removing unique constraint on 'SavedSearchUserDefault', fields ['project', 'user']
db.delete_unique('sentry_savedsearch_userdefault', ['project_id', 'user_id'])
# Deleting model 'SavedSearchUserDefault'
db.delete_table('sentry_savedsearch_userdefault')
models = {
'sentry.activity': {
'Meta': {'object_name': 'Activity'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'})
},
'sentry.apikey': {
'Meta': {'object_name': 'ApiKey'},
'allowed_origins': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'default': "'Default'", 'max_length': '64', 'blank': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Organization']"}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.auditlogentry': {
'Meta': {'object_name': 'AuditLogEntry'},
'actor': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'audit_actors'", 'null': 'True', 'to': "orm['sentry.User']"}),
'actor_key': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiKey']", 'null': 'True', 'blank': 'True'}),
'actor_label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'target_object': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'target_user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'audit_targets'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.authidentity': {
'Meta': {'unique_together': "(('auth_provider', 'ident'), ('auth_provider', 'user'))", 'object_name': 'AuthIdentity'},
'auth_provider': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.AuthProvider']"}),
'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'last_synced': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_verified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.authprovider': {
'Meta': {'object_name': 'AuthProvider'},
'config': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'default_global_access': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'default_role': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50'}),
'default_teams': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Team']", 'symmetrical': 'False', 'blank': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_sync': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']", 'unique': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'sync_time': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
},
'sentry.broadcast': {
'Meta': {'object_name': 'Broadcast'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_expires': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2016, 1, 25, 0, 0)', 'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'upstream_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'})
},
'sentry.broadcastseen': {
'Meta': {'unique_together': "(('broadcast', 'user'),)", 'object_name': 'BroadcastSeen'},
'broadcast': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Broadcast']"}),
'date_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.event': {
'Meta': {'unique_together': "(('project_id', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'", 'index_together': "(('group_id', 'datetime'),)"},
'data': ('sentry.db.models.fields.node.NodeField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'time_spent': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'null': 'True'})
},
'sentry.eventmapping': {
'Meta': {'unique_together': "(('project_id', 'event_id'),)", 'object_name': 'EventMapping'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.eventuser': {
'Meta': {'unique_together': "(('project', 'ident'), ('project', 'hash'))", 'object_name': 'EventUser', 'index_together': "(('project', 'email'), ('project', 'username'), ('project', 'ip_address'))"},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'})
},
'sentry.file': {
'Meta': {'object_name': 'File'},
'blob': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'legacy_blob'", 'null': 'True', 'to': "orm['sentry.FileBlob']"}),
'blobs': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.FileBlob']", 'through': "orm['sentry.FileBlobIndex']", 'symmetrical': 'False'}),
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True'}),
'headers': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'path': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'size': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.fileblob': {
'Meta': {'object_name': 'FileBlob'},
'checksum': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'size': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'})
},
'sentry.fileblobindex': {
'Meta': {'unique_together': "(('file', 'blob', 'offset'),)", 'object_name': 'FileBlobIndex'},
'blob': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.FileBlob']"}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'offset': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.group': {
'Meta': {'object_name': 'Group', 'db_table': "'sentry_groupedmessage'", 'index_together': "(('project', 'first_release'),)"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'first_release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']", 'null': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'resolved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'score': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'time_spent_count': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'time_spent_total': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '1', 'db_index': 'True'})
},
'sentry.groupassignee': {
'Meta': {'object_name': 'GroupAssignee', 'db_table': "'sentry_groupasignee'"},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'assignee_set'", 'unique': 'True', 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'assignee_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'sentry_assignee_set'", 'to': "orm['sentry.User']"})
},
'sentry.groupbookmark': {
'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': "orm['sentry.User']"})
},
'sentry.groupemailthread': {
'Meta': {'unique_together': "(('email', 'group'), ('email', 'msgid'))", 'object_name': 'GroupEmailThread'},
'date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'groupemail_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'msgid': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'groupemail_set'", 'to': "orm['sentry.Project']"})
},
'sentry.grouphash': {
'Meta': {'unique_together': "(('project', 'hash'),)", 'object_name': 'GroupHash'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'})
},
'sentry.groupmeta': {
'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.groupresolution': {
'Meta': {'object_name': 'GroupResolution'},
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'unique': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.grouprulestatus': {
'Meta': {'unique_together': "(('rule', 'group'),)", 'object_name': 'GroupRuleStatus'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_active': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'rule': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Rule']"}),
'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'sentry.groupseen': {
'Meta': {'unique_together': "(('user', 'group'),)", 'object_name': 'GroupSeen'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'db_index': 'False'})
},
'sentry.groupsnooze': {
'Meta': {'object_name': 'GroupSnooze'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'unique': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'until': ('django.db.models.fields.DateTimeField', [], {})
},
'sentry.grouptagkey': {
'Meta': {'unique_together': "(('project', 'group', 'key'),)", 'object_name': 'GroupTagKey'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.grouptagvalue': {
'Meta': {'unique_together': "(('project', 'key', 'value', 'group'),)", 'object_name': 'GroupTagValue', 'db_table': "'sentry_messagefiltervalue'"},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'grouptag'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'grouptag'", 'null': 'True', 'to': "orm['sentry.Project']"}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.helppage': {
'Meta': {'object_name': 'HelpPage'},
'content': ('django.db.models.fields.TextField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True'}),
'priority': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.lostpasswordhash': {
'Meta': {'object_name': 'LostPasswordHash'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'unique': 'True'})
},
'sentry.option': {
'Meta': {'object_name': 'Option'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.organization': {
'Meta': {'object_name': 'Organization'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'default_role': ('django.db.models.fields.CharField', [], {'default': "'member'", 'max_length': '32'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'org_memberships'", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMember']", 'to': "orm['sentry.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.organizationaccessrequest': {
'Meta': {'unique_together': "(('team', 'member'),)", 'object_name': 'OrganizationAccessRequest'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'member': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.OrganizationMember']"}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.organizationmember': {
'Meta': {'unique_together': "(('organization', 'user'), ('organization', 'email'))", 'object_name': 'OrganizationMember'},
'counter': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'has_global_access': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'member_set'", 'to': "orm['sentry.Organization']"}),
'role': ('django.db.models.fields.CharField', [], {'default': "'member'", 'max_length': '32'}),
'teams': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Team']", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMemberTeam']", 'blank': 'True'}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50', 'blank': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'sentry_orgmember_set'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.organizationmemberteam': {
'Meta': {'unique_together': "(('team', 'organizationmember'),)", 'object_name': 'OrganizationMemberTeam', 'db_table': "'sentry_organizationmember_teams'"},
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'organizationmember': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.OrganizationMember']"}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.organizationoption': {
'Meta': {'unique_together': "(('organization', 'key'),)", 'object_name': 'OrganizationOption', 'db_table': "'sentry_organizationoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.project': {
'Meta': {'unique_together': "(('team', 'slug'), ('organization', 'slug'))", 'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'first_event': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.projectkey': {
'Meta': {'object_name': 'ProjectKey'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'roles': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.projectoption': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.release': {
'Meta': {'unique_together': "(('project', 'version'),)", 'object_name': 'Release'},
'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_released': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_started': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'new_groups': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True', 'blank': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'ref': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.releasefile': {
'Meta': {'unique_together': "(('release', 'ident'),)", 'object_name': 'ReleaseFile'},
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'name': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.rule': {
'Meta': {'object_name': 'Rule'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.savedsearch': {
'Meta': {'unique_together': "(('project', 'name'),)", 'object_name': 'SavedSearch'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'query': ('django.db.models.fields.TextField', [], {})
},
'sentry.savedsearchuserdefault': {
'Meta': {'unique_together': "(('project', 'user'),)", 'object_name': 'SavedSearchUserDefault', 'db_table': "'sentry_savedsearch_userdefault'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'savedsearch': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.SavedSearch']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.tagkey': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'TagKey', 'db_table': "'sentry_filterkey'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.tagvalue': {
'Meta': {'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'TagValue', 'db_table': "'sentry_filtervalue'"},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.team': {
'Meta': {'unique_together': "(('organization', 'slug'),)", 'object_name': 'Team'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_column': "'first_name'", 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
},
'sentry.useroption': {
'Meta': {'unique_together': "(('user', 'project', 'key'),)", 'object_name': 'UserOption'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.userreport': {
'Meta': {'object_name': 'UserReport', 'index_together': "(('project', 'event_id'), ('project', 'date_added'))"},
'comments': ('django.db.models.fields.TextField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
}
}
complete_apps = ['sentry'] | bsd-3-clause | 6,563,626,246,100,796,000 | -1,510,614,198,262,186,800 | 87.346154 | 217 | 0.57447 | false |
JackWoot/E2E-Messenger | Server/passlib/handlers/sun_md5_crypt.py | 19 | 13964 | """passlib.handlers.sun_md5_crypt - Sun's Md5 Crypt, used on Solaris
.. warning::
This implementation may not reproduce
the original Solaris behavior in some border cases.
See documentation for details.
"""
#=============================================================================
# imports
#=============================================================================
# core
from hashlib import md5
import re
import logging; log = logging.getLogger(__name__)
from warnings import warn
# site
# pkg
from passlib.utils import h64, to_unicode
from passlib.utils.compat import b, bytes, byte_elem_value, irange, u, \
uascii_to_str, unicode, str_to_bascii
import passlib.utils.handlers as uh
# local
__all__ = [
"sun_md5_crypt",
]
#=============================================================================
# backend
#=============================================================================
# constant data used by alg - Hamlet act 3 scene 1 + null char
# exact bytes as in http://www.ibiblio.org/pub/docs/books/gutenberg/etext98/2ws2610.txt
# from Project Gutenberg.
MAGIC_HAMLET = b(
"To be, or not to be,--that is the question:--\n"
"Whether 'tis nobler in the mind to suffer\n"
"The slings and arrows of outrageous fortune\n"
"Or to take arms against a sea of troubles,\n"
"And by opposing end them?--To die,--to sleep,--\n"
"No more; and by a sleep to say we end\n"
"The heartache, and the thousand natural shocks\n"
"That flesh is heir to,--'tis a consummation\n"
"Devoutly to be wish'd. To die,--to sleep;--\n"
"To sleep! perchance to dream:--ay, there's the rub;\n"
"For in that sleep of death what dreams may come,\n"
"When we have shuffled off this mortal coil,\n"
"Must give us pause: there's the respect\n"
"That makes calamity of so long life;\n"
"For who would bear the whips and scorns of time,\n"
"The oppressor's wrong, the proud man's contumely,\n"
"The pangs of despis'd love, the law's delay,\n"
"The insolence of office, and the spurns\n"
"That patient merit of the unworthy takes,\n"
"When he himself might his quietus make\n"
"With a bare bodkin? who would these fardels bear,\n"
"To grunt and sweat under a weary life,\n"
"But that the dread of something after death,--\n"
"The undiscover'd country, from whose bourn\n"
"No traveller returns,--puzzles the will,\n"
"And makes us rather bear those ills we have\n"
"Than fly to others that we know not of?\n"
"Thus conscience does make cowards of us all;\n"
"And thus the native hue of resolution\n"
"Is sicklied o'er with the pale cast of thought;\n"
"And enterprises of great pith and moment,\n"
"With this regard, their currents turn awry,\n"
"And lose the name of action.--Soft you now!\n"
"The fair Ophelia!--Nymph, in thy orisons\n"
"Be all my sins remember'd.\n\x00" #<- apparently null at end of C string is included (test vector won't pass otherwise)
)
# NOTE: these sequences are pre-calculated iteration ranges used by X & Y loops w/in rounds function below
xr = irange(7)
_XY_ROUNDS = [
tuple((i,i,i+3) for i in xr), # xrounds 0
tuple((i,i+1,i+4) for i in xr), # xrounds 1
tuple((i,i+8,(i+11)&15) for i in xr), # yrounds 0
tuple((i,(i+9)&15, (i+12)&15) for i in xr), # yrounds 1
]
del xr
def raw_sun_md5_crypt(secret, rounds, salt):
"given secret & salt, return encoded sun-md5-crypt checksum"
global MAGIC_HAMLET
assert isinstance(secret, bytes)
assert isinstance(salt, bytes)
# validate rounds
if rounds <= 0:
rounds = 0
real_rounds = 4096 + rounds
# NOTE: spec seems to imply max 'rounds' is 2**32-1
# generate initial digest to start off round 0.
# NOTE: algorithm 'salt' includes full config string w/ trailing "$"
result = md5(secret + salt).digest()
assert len(result) == 16
# NOTE: many things in this function have been inlined (to speed up the loop
# as much as possible), to the point that this code barely resembles
# the algorithm as described in the docs. in particular:
#
# * all accesses to a given bit have been inlined using the formula
# rbitval(bit) = (rval((bit>>3) & 15) >> (bit & 7)) & 1
#
# * the calculation of coinflip value R has been inlined
#
# * the conditional division of coinflip value V has been inlined as
# a shift right of 0 or 1.
#
# * the i, i+3, etc iterations are precalculated in lists.
#
# * the round-based conditional division of x & y is now performed
# by choosing an appropriate precalculated list, so that it only
# calculates the 7 bits which will actually be used.
#
X_ROUNDS_0, X_ROUNDS_1, Y_ROUNDS_0, Y_ROUNDS_1 = _XY_ROUNDS
# NOTE: % appears to be *slightly* slower than &, so we prefer & if possible
round = 0
while round < real_rounds:
# convert last result byte string to list of byte-ints for easy access
rval = [ byte_elem_value(c) for c in result ].__getitem__
# build up X bit by bit
x = 0
xrounds = X_ROUNDS_1 if (rval((round>>3) & 15)>>(round & 7)) & 1 else X_ROUNDS_0
for i, ia, ib in xrounds:
a = rval(ia)
b = rval(ib)
v = rval((a >> (b % 5)) & 15) >> ((b>>(a&7)) & 1)
x |= ((rval((v>>3)&15)>>(v&7))&1) << i
# build up Y bit by bit
y = 0
yrounds = Y_ROUNDS_1 if (rval(((round+64)>>3) & 15)>>(round & 7)) & 1 else Y_ROUNDS_0
for i, ia, ib in yrounds:
a = rval(ia)
b = rval(ib)
v = rval((a >> (b % 5)) & 15) >> ((b>>(a&7)) & 1)
y |= ((rval((v>>3)&15)>>(v&7))&1) << i
# extract x'th and y'th bit, xoring them together to yeild "coin flip"
coin = ((rval(x>>3) >> (x&7)) ^ (rval(y>>3) >> (y&7))) & 1
# construct hash for this round
h = md5(result)
if coin:
h.update(MAGIC_HAMLET)
h.update(unicode(round).encode("ascii"))
result = h.digest()
round += 1
# encode output
return h64.encode_transposed_bytes(result, _chk_offsets)
# NOTE: same offsets as md5_crypt
_chk_offsets = (
12,6,0,
13,7,1,
14,8,2,
15,9,3,
5,10,4,
11,
)
#=============================================================================
# handler
#=============================================================================
class sun_md5_crypt(uh.HasRounds, uh.HasSalt, uh.GenericHandler):
"""This class implements the Sun-MD5-Crypt password hash, and follows the :ref:`password-hash-api`.
It supports a variable-length salt, and a variable number of rounds.
The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords:
:type salt: str
:param salt:
Optional salt string.
If not specified, a salt will be autogenerated (this is recommended).
If specified, it must be drawn from the regexp range ``[./0-9A-Za-z]``.
:type salt_size: int
:param salt_size:
If no salt is specified, this parameter can be used to specify
the size (in characters) of the autogenerated salt.
It currently defaults to 8.
:type rounds: int
:param rounds:
Optional number of rounds to use.
Defaults to 5500, must be between 0 and 4294963199, inclusive.
:type bare_salt: bool
:param bare_salt:
Optional flag used to enable an alternate salt digest behavior
used by some hash strings in this scheme.
This flag can be ignored by most users.
Defaults to ``False``.
(see :ref:`smc-bare-salt` for details).
:type relaxed: bool
:param relaxed:
By default, providing an invalid value for one of the other
keywords will result in a :exc:`ValueError`. If ``relaxed=True``,
and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning`
will be issued instead. Correctable errors include ``rounds``
that are too small or too large, and ``salt`` strings that are too long.
.. versionadded:: 1.6
"""
#===================================================================
# class attrs
#===================================================================
name = "sun_md5_crypt"
setting_kwds = ("salt", "rounds", "bare_salt", "salt_size")
checksum_chars = uh.HASH64_CHARS
checksum_size = 22
# NOTE: docs say max password length is 255.
# release 9u2
# NOTE: not sure if original crypt has a salt size limit,
# all instances that have been seen use 8 chars.
default_salt_size = 8
min_salt_size = 0
max_salt_size = None
salt_chars = uh.HASH64_CHARS
default_rounds = 5500 # current passlib default
min_rounds = 0
max_rounds = 4294963199 ##2**32-1-4096
# XXX: ^ not sure what it does if past this bound... does 32 int roll over?
rounds_cost = "linear"
ident_values = (u("$md5$"), u("$md5,"))
#===================================================================
# instance attrs
#===================================================================
bare_salt = False # flag to indicate legacy hashes that lack "$$" suffix
#===================================================================
# constructor
#===================================================================
def __init__(self, bare_salt=False, **kwds):
self.bare_salt = bare_salt
super(sun_md5_crypt, self).__init__(**kwds)
#===================================================================
# internal helpers
#===================================================================
@classmethod
def identify(cls, hash):
hash = uh.to_unicode_for_identify(hash)
return hash.startswith(cls.ident_values)
@classmethod
def from_string(cls, hash):
hash = to_unicode(hash, "ascii", "hash")
#
# detect if hash specifies rounds value.
# if so, parse and validate it.
# by end, set 'rounds' to int value, and 'tail' containing salt+chk
#
if hash.startswith(u("$md5$")):
rounds = 0
salt_idx = 5
elif hash.startswith(u("$md5,rounds=")):
idx = hash.find(u("$"), 12)
if idx == -1:
raise uh.exc.MalformedHashError(cls, "unexpected end of rounds")
rstr = hash[12:idx]
try:
rounds = int(rstr)
except ValueError:
raise uh.exc.MalformedHashError(cls, "bad rounds")
if rstr != unicode(rounds):
raise uh.exc.ZeroPaddedRoundsError(cls)
if rounds == 0:
# NOTE: not sure if this is forbidden by spec or not;
# but allowing it would complicate things,
# and it should never occur anyways.
raise uh.exc.MalformedHashError(cls, "explicit zero rounds")
salt_idx = idx+1
else:
raise uh.exc.InvalidHashError(cls)
#
# salt/checksum separation is kinda weird,
# to deal cleanly with some backward-compatible workarounds
# implemented by original implementation.
#
chk_idx = hash.rfind(u("$"), salt_idx)
if chk_idx == -1:
# ''-config for $-hash
salt = hash[salt_idx:]
chk = None
bare_salt = True
elif chk_idx == len(hash)-1:
if chk_idx > salt_idx and hash[-2] == u("$"):
raise uh.exc.MalformedHashError(cls, "too many '$' separators")
# $-config for $$-hash
salt = hash[salt_idx:-1]
chk = None
bare_salt = False
elif chk_idx > 0 and hash[chk_idx-1] == u("$"):
# $$-hash
salt = hash[salt_idx:chk_idx-1]
chk = hash[chk_idx+1:]
bare_salt = False
else:
# $-hash
salt = hash[salt_idx:chk_idx]
chk = hash[chk_idx+1:]
bare_salt = True
return cls(
rounds=rounds,
salt=salt,
checksum=chk,
bare_salt=bare_salt,
)
def to_string(self, withchk=True):
ss = u('') if self.bare_salt else u('$')
rounds = self.rounds
if rounds > 0:
hash = u("$md5,rounds=%d$%s%s") % (rounds, self.salt, ss)
else:
hash = u("$md5$%s%s") % (self.salt, ss)
if withchk:
chk = self.checksum
if chk:
hash = u("%s$%s") % (hash, chk)
return uascii_to_str(hash)
#===================================================================
# primary interface
#===================================================================
# TODO: if we're on solaris, check for native crypt() support.
# this will require extra testing, to make sure native crypt
# actually behaves correctly. of particular importance:
# when using ""-config, make sure to append "$x" to string.
def _calc_checksum(self, secret):
# NOTE: no reference for how sun_md5_crypt handles unicode
if isinstance(secret, unicode):
secret = secret.encode("utf-8")
config = str_to_bascii(self.to_string(withchk=False))
return raw_sun_md5_crypt(secret, self.rounds, config).decode("ascii")
#===================================================================
# eoc
#===================================================================
#=============================================================================
# eof
#=============================================================================
| gpl-2.0 | 7,085,786,802,647,289,000 | -804,345,004,608,822,000 | 37.362637 | 145 | 0.528072 | false |
enotx/ebook_librarian | src/classification.py | 1 | 13177 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from PyQt5.QtCore import (QAbstractItemModel, QFile, QIODevice,
QItemSelectionModel, QModelIndex, Qt, QRect, QCoreApplication, QMetaObject)
from PyQt5 import QtWidgets
from PyQt5.uic import loadUi
from config import CLASSIFICATION_FILE_NAME
class TreeItem(object):
def __init__(self, data, parent=None):
self.parentItem = parent
self.itemData = data
self.childItems = []
def child(self, row):
return self.childItems[row]
def childCount(self):
return len(self.childItems)
def childNumber(self):
if self.parentItem != None:
return self.parentItem.childItems.index(self)
return 0
def columnCount(self):
return len(self.itemData)
def data(self, column):
return self.itemData[column]
def insertChildren(self, position, count, columns):
if position < 0 or position > len(self.childItems):
return False
for row in range(count):
data = [None for v in range(columns)]
item = TreeItem(data, self)
self.childItems.insert(position, item)
return True
def insertColumns(self, position, columns):
if position < 0 or position > len(self.itemData):
return False
for column in range(columns):
self.itemData.insert(position, None)
for child in self.childItems:
child.insertColumns(position, columns)
return True
def parent(self):
return self.parentItem
def removeChildren(self, position, count):
if position < 0 or position + count > len(self.childItems):
return False
for row in range(count):
self.childItems.pop(position)
return True
def removeColumns(self, position, columns):
if position < 0 or position + columns > len(self.itemData):
return False
for column in range(columns):
self.itemData.pop(position)
for child in self.childItems:
child.removeColumns(position, columns)
return True
def setData(self, column, value):
if column < 0 or column >= len(self.itemData):
return False
self.itemData[column] = value
return True
class TreeModel(QAbstractItemModel):
def __init__(self, headers, data, parent=None):
super(TreeModel, self).__init__(parent)
rootData = [header for header in headers]
self.rootItem = TreeItem(rootData)
self.setupModelData(data.split("\n"), self.rootItem)
def columnCount(self, parent=QModelIndex()):
return self.rootItem.columnCount()
def data(self, index, role):
if not index.isValid():
return None
if role != Qt.DisplayRole and role != Qt.EditRole:
return None
item = self.getItem(index)
return item.data(index.column())
def flags(self, index):
if not index.isValid():
return 0
return Qt.ItemIsEditable | Qt.ItemIsEnabled | Qt.ItemIsSelectable
def getItem(self, index):
if index.isValid():
item = index.internalPointer()
if item:
return item
return self.rootItem
def headerData(self, section, orientation, role=Qt.DisplayRole):
if orientation == Qt.Horizontal and role == Qt.DisplayRole:
return self.rootItem.data(section)
return None
def index(self, row, column, parent=QModelIndex()):
if parent.isValid() and parent.column() != 0:
return QModelIndex()
parentItem = self.getItem(parent)
childItem = parentItem.child(row)
if childItem:
return self.createIndex(row, column, childItem)
else:
return QModelIndex()
def insertColumns(self, position, columns, parent=QModelIndex()):
self.beginInsertColumns(parent, position, position + columns - 1)
success = self.rootItem.insertColumns(position, columns)
self.endInsertColumns()
return success
def insertRows(self, position, rows, parent=QModelIndex()):
parentItem = self.getItem(parent)
self.beginInsertRows(parent, position, position + rows - 1)
success = parentItem.insertChildren(position, rows,
self.rootItem.columnCount())
self.endInsertRows()
return success
def parent(self, index):
if not index.isValid():
return QModelIndex()
childItem = self.getItem(index)
parentItem = childItem.parent()
if parentItem == self.rootItem:
return QModelIndex()
return self.createIndex(parentItem.childNumber(), 0, parentItem)
def removeColumns(self, position, columns, parent=QModelIndex()):
self.beginRemoveColumns(parent, position, position + columns - 1)
success = self.rootItem.removeColumns(position, columns)
self.endRemoveColumns()
if self.rootItem.columnCount() == 0:
self.removeRows(0, self.rowCount())
return success
def removeRows(self, position, rows, parent=QModelIndex()):
parentItem = self.getItem(parent)
self.beginRemoveRows(parent, position, position + rows - 1)
success = parentItem.removeChildren(position, rows)
self.endRemoveRows()
return success
def rowCount(self, parent=QModelIndex()):
parentItem = self.getItem(parent)
return parentItem.childCount()
def setData(self, index, value, role=Qt.EditRole):
if role != Qt.EditRole:
return False
item = self.getItem(index)
result = item.setData(index.column(), value)
if result:
self.dataChanged.emit(index, index)
return result
def setHeaderData(self, section, orientation, value, role=Qt.EditRole):
if role != Qt.EditRole or orientation != Qt.Horizontal:
return False
result = self.rootItem.setData(section, value)
if result:
self.headerDataChanged.emit(orientation, section, section)
return result
def setupModelData(self, lines, parent):
parents = [parent]
indentations = [0]
number = 0
while number < len(lines):
position = 0
while position < len(lines[number]):
if lines[number][position] != " ":
break
position += 1
lineData = lines[number][position:].trimmed()
if lineData:
# Read the column data from the rest of the line.
columnData = [s for s in lineData.split('\t') if s]
if position > indentations[-1]:
# The last child of the current parent is now the new
# parent unless the current parent has no children.
if parents[-1].childCount() > 0:
parents.append(parents[-1].child(parents[-1].childCount() - 1))
indentations.append(position)
else:
while position < indentations[-1] and len(parents) > 0:
parents.pop()
indentations.pop()
# Append a new item to the current parent's list of children.
parent = parents[-1]
parent.insertChildren(parent.childCount(), 1,
self.rootItem.columnCount())
for column in range(len(columnData)):
parent.child(parent.childCount() -1).setData(column, columnData[column])
number += 1
class MainWindow(QtWidgets.QMainWindow):
def __init__(self, parent=None):
super(MainWindow, self).__init__(parent)
loadUi(sys.path[0] + '/ui/classification.ui', self)
self.view.header().close()
headers = ["Classification"]
# try:
# except:
file = QFile(CLASSIFICATION_FILE_NAME)
file.open(QIODevice.ReadOnly)
model = TreeModel(headers, file.readAll())
file.close()
self.view.setModel(model)
for column in range(model.columnCount()):
self.view.resizeColumnToContents(column)
self.exitAction.triggered.connect(QtWidgets.QApplication.instance().quit)
self.view.selectionModel().selectionChanged.connect(self.updateActions)
self.actionsMenu.aboutToShow.connect(self.updateActions)
self.insertRowAction.triggered.connect(self.insertRow)
self.insertColumnAction.triggered.connect(self.insertColumn)
self.removeRowAction.triggered.connect(self.removeRow)
self.removeColumnAction.triggered.connect(self.removeColumn)
self.insertChildAction.triggered.connect(self.insertChild)
self.saveAction.triggered.connect(self.savaStatus)
self.updateActions()
def insertChild(self):
index = self.view.selectionModel().currentIndex()
model = self.view.model()
if model.columnCount(index) == 0:
if not model.insertColumn(0, index):
return
if not model.insertRow(0, index):
return
for column in range(model.columnCount(index)):
child = model.index(0, column, index)
model.setData(child, "[No data]", Qt.EditRole)
if model.headerData(column, Qt.Horizontal) is None:
model.setHeaderData(column, Qt.Horizontal, "[No header]",
Qt.EditRole)
self.view.selectionModel().setCurrentIndex(model.index(0, 0, index),
QItemSelectionModel.ClearAndSelect)
self.updateActions()
def insertColumn(self):
model = self.view.model()
column = self.view.selectionModel().currentIndex().column()
changed = model.insertColumn(column + 1)
if changed:
model.setHeaderData(column + 1, Qt.Horizontal, "[No header]",
Qt.EditRole)
self.updateActions()
return changed
def insertRow(self):
index = self.view.selectionModel().currentIndex()
model = self.view.model()
if not model.insertRow(index.row()+1, index.parent()):
return
self.updateActions()
for column in range(model.columnCount(index.parent())):
child = model.index(index.row()+1, column, index.parent())
model.setData(child, "[No data]", Qt.EditRole)
def removeColumn(self):
model = self.view.model()
column = self.view.selectionModel().currentIndex().column()
changed = model.removeColumn(column)
if changed:
self.updateActions()
return changed
def removeRow(self):
index = self.view.selectionModel().currentIndex()
model = self.view.model()
if (model.removeRow(index.row(), index.parent())):
self.updateActions()
def updateActions(self):
hasSelection = not self.view.selectionModel().selection().isEmpty()
self.removeRowAction.setEnabled(hasSelection)
self.removeColumnAction.setEnabled(hasSelection)
hasCurrent = self.view.selectionModel().currentIndex().isValid()
self.insertRowAction.setEnabled(hasCurrent)
self.insertColumnAction.setEnabled(hasCurrent)
if hasCurrent:
self.view.closePersistentEditor(self.view.selectionModel().currentIndex())
row = self.view.selectionModel().currentIndex().row()
column = self.view.selectionModel().currentIndex().column()
# if self.view.selectionModel().currentIndex().parent().isValid():
# self.statusBar().showMessage("Position: (%d,%d)" % (row, column))
# else:
# self.statusBar().showMessage("Position: (%d,%d) in top level" % (row, column))
def showitems(self, item, statuslist):
statuslist.append('')
x = self.getlevel(item)
for i in range(x):
# print (' ',end = "")
statuslist[-1] += ' '
statuslist[-1] += (str(item.itemData[0]))
# print (str(item.itemData[0]))
if item.childCount:
# print ("ok")
for i in item.childItems:
self.showitems(i, statuslist)
def getlevel(self, item):
x = 0
while item.parentItem != self.view.model().rootItem:
x += 1
item = item.parentItem
return x
def savaStatus(self):
f = open (CLASSIFICATION_FILE_NAME, 'w')
model = self.view.model()
statuslist = []
for i in model.rootItem.childItems:
self.showitems(i,statuslist)
# print (str(i.itemData[0])[2:-1])
for i in statuslist:
if i[:2] == "b'" and i[-1] == "'":
f.write(i[2:-1]+'\n')
else:
f.write(i+'\n')
f.close()
if __name__ == '__main__':
import sys
app = QtWidgets.QApplication(sys.argv)
window = MainWindow()
window.show()
sys.exit(app.exec_())
| gpl-3.0 | 7,692,934,784,047,252,000 | -5,773,634,142,380,749,000 | 30.299287 | 96 | 0.595052 | false |
Jet-Streaming/gyp | pylib/gyp/easy_xml.py | 3 | 4960 | # Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
import os
def XmlToString(content, encoding='utf-8', pretty=False):
""" Writes the XML content to disk, touching the file only if it has changed.
Visual Studio files have a lot of pre-defined structures. This function makes
it easy to represent these structures as Python data structures, instead of
having to create a lot of function calls.
Each XML element of the content is represented as a list composed of:
1. The name of the element, a string,
2. The attributes of the element, a dictionary (optional), and
3+. The content of the element, if any. Strings are simple text nodes and
lists are child elements.
Example 1:
<test/>
becomes
['test']
Example 2:
<myelement a='value1' b='value2'>
<childtype>This is</childtype>
<childtype>it!</childtype>
</myelement>
becomes
['myelement', {'a':'value1', 'b':'value2'},
['childtype', 'This is'],
['childtype', 'it!'],
]
Args:
content: The structured content to be converted.
encoding: The encoding to report on the first XML line.
pretty: True if we want pretty printing with indents and new lines.
Returns:
The XML content as a string.
"""
# We create a huge list of all the elements of the file.
xml_parts = ['<?xml version="1.0" encoding="%s"?>' % encoding]
if pretty:
xml_parts.append('\n')
_ConstructContentList(xml_parts, content, pretty)
# Convert it to a string
return ''.join(xml_parts)
def _ConstructContentList(xml_parts, specification, pretty, level=0):
""" Appends the XML parts corresponding to the specification.
Args:
xml_parts: A list of XML parts to be appended to.
specification: The specification of the element. See EasyXml docs.
pretty: True if we want pretty printing with indents and new lines.
level: Indentation level.
"""
# The first item in a specification is the name of the element.
if pretty:
indentation = ' ' * level
new_line = '\n'
else:
indentation = ''
new_line = ''
name = specification[0]
if not isinstance(name, str):
raise Exception('The first item of an EasyXml specification should be '
'a string. Specification was ' + str(specification))
xml_parts.append(indentation + '<' + name)
# Optionally in second position is a dictionary of the attributes.
rest = specification[1:]
if rest and isinstance(rest[0], dict):
for at, val in sorted(rest[0].iteritems()):
xml_parts.append(' %s="%s"' % (at, _XmlEscape(val, attr=True)))
rest = rest[1:]
if rest:
xml_parts.append('>')
all_strings = reduce(lambda x, y: x and isinstance(y, str), rest, True)
multi_line = not all_strings
if multi_line and new_line:
xml_parts.append(new_line)
for child_spec in rest:
# If it's a string, append a text node.
# Otherwise recurse over that child definition
if isinstance(child_spec, str):
xml_parts.append(_XmlEscape(child_spec))
else:
_ConstructContentList(xml_parts, child_spec, pretty, level + 1)
if multi_line and indentation:
xml_parts.append(indentation)
xml_parts.append('</%s>%s' % (name, new_line))
else:
xml_parts.append('/>%s' % new_line)
def WriteXmlIfChanged(content, path, encoding='utf-8', pretty=False,
win32=False):
""" Writes the XML content to disk, touching the file only if it has changed.
Args:
content: The structured content to be written.
path: Location of the file.
encoding: The encoding to report on the first line of the XML file.
pretty: True if we want pretty printing with indents and new lines.
"""
xml_string = XmlToString(content, encoding, pretty)
if win32 and os.linesep != '\r\n':
xml_string = xml_string.replace('\n', '\r\n')
# Get the old content
try:
f = open(path, 'r')
existing = f.read()
f.close()
except:
existing = None
# It has changed, write it
if existing != xml_string:
f = open(path, 'w')
f.write(xml_string)
f.close()
_xml_escape_map = {
'"': '"',
"'": ''',
'<': '<',
'>': '>',
'&': '&',
'\n': '
',
'\r': '
',
}
_xml_escape_re = re.compile(
"(%s)" % "|".join(map(re.escape, _xml_escape_map.keys())))
def _XmlEscape(value, attr=False):
""" Escape a string for inclusion in XML."""
def replace(match):
m = match.string[match.start() : match.end()]
# don't replace single quotes in attrs
if attr and m == "'":
return m
return _xml_escape_map[m]
return _xml_escape_re.sub(replace, value)
| bsd-3-clause | -3,230,600,642,567,276,000 | 4,989,912,391,750,513,000 | 29.592357 | 80 | 0.61875 | false |
stanley89/helios-server | helios/security.py | 3 | 5315 | """
Helios Security -- mostly access control
Ben Adida (ben@adida.net)
"""
# nicely update the wrapper function
from functools import update_wrapper
from django.core.urlresolvers import reverse
from django.core.exceptions import *
from django.http import *
from django.conf import settings
from models import *
from helios_auth.security import get_user
from django.http import HttpResponseRedirect
import urllib
import helios
# current voter
def get_voter(request, user, election):
"""
return the current voter
"""
voter = None
if request.session.has_key('CURRENT_VOTER_ID'):
voter = Voter.objects.get(id=request.session['CURRENT_VOTER_ID'])
if voter.election != election:
voter = None
if not voter:
if user:
voter = Voter.get_by_election_and_user(election, user)
return voter
# a function to check if the current user is a trustee
HELIOS_TRUSTEE_UUID = 'helios_trustee_uuid'
def get_logged_in_trustee(request):
if request.session.has_key(HELIOS_TRUSTEE_UUID):
return Trustee.get_by_uuid(request.session[HELIOS_TRUSTEE_UUID])
else:
return None
def set_logged_in_trustee(request, trustee):
request.session[HELIOS_TRUSTEE_UUID] = trustee.uuid
#
# some common election checks
#
def do_election_checks(election, props):
# frozen
if props.has_key('frozen'):
frozen = props['frozen']
else:
frozen = None
# newvoters (open for registration)
if props.has_key('newvoters'):
newvoters = props['newvoters']
else:
newvoters = None
# frozen check
if frozen != None:
if frozen and not election.frozen_at:
raise PermissionDenied()
if not frozen and election.frozen_at:
raise PermissionDenied()
# open for new voters check
if newvoters != None:
if election.can_add_voters() != newvoters:
raise PermissionDenied()
def get_election_by_uuid(uuid):
if not uuid:
raise Exception("no election ID")
return Election.get_by_uuid(uuid)
# decorator for views that pertain to an election
# takes parameters:
# frozen - is the election frozen
# newvoters - does the election accept new voters
def election_view(**checks):
def election_view_decorator(func):
def election_view_wrapper(request, election_uuid=None, *args, **kw):
election = get_election_by_uuid(election_uuid)
if not election:
raise Http404
# do checks
do_election_checks(election, checks)
# if private election, only logged in voters
if election.private_p and not checks.get('allow_logins',False):
from views import password_voter_login
if not user_can_see_election(request, election):
return_url = request.get_full_path()
return HttpResponseRedirect("%s?%s" % (reverse(password_voter_login, args=[election.uuid]), urllib.urlencode({
'return_url' : return_url
})))
return func(request, election, *args, **kw)
return update_wrapper(election_view_wrapper, func)
return election_view_decorator
def user_can_admin_election(user, election):
if not user:
return False
# election or site administrator
return election.admin == user or user.admin_p
def user_can_see_election(request, election):
user = get_user(request)
if not election.private_p:
return True
# election is private
# but maybe this user is the administrator?
if user_can_admin_election(user, election):
return True
# or maybe this is a trustee of the election?
trustee = get_logged_in_trustee(request)
if trustee and trustee.election.uuid == election.uuid:
return True
# then this user has to be a voter
return (get_voter(request, user, election) != None)
def api_client_can_admin_election(api_client, election):
return election.api_client == api_client and api_client != None
# decorator for checking election admin access, and some properties of the election
# frozen - is the election frozen
# newvoters - does the election accept new voters
def election_admin(**checks):
def election_admin_decorator(func):
def election_admin_wrapper(request, election_uuid=None, *args, **kw):
election = get_election_by_uuid(election_uuid)
user = get_user(request)
if not user_can_admin_election(user, election):
raise PermissionDenied()
# do checks
do_election_checks(election, checks)
return func(request, election, *args, **kw)
return update_wrapper(election_admin_wrapper, func)
return election_admin_decorator
def trustee_check(func):
def trustee_check_wrapper(request, election_uuid, trustee_uuid, *args, **kwargs):
election = get_election_by_uuid(election_uuid)
trustee = Trustee.get_by_election_and_uuid(election, trustee_uuid)
if trustee == get_logged_in_trustee(request):
return func(request, election, trustee, *args, **kwargs)
else:
raise PermissionDenied()
return update_wrapper(trustee_check_wrapper, func)
def can_create_election(request):
user = get_user(request)
if not user:
return False
if helios.ADMIN_ONLY:
return user.admin_p
else:
return user.can_create_election()
def user_can_feature_election(user, election):
if not user:
return False
return user.admin_p
| apache-2.0 | -5,613,128,028,017,407,000 | -3,874,972,755,542,386,000 | 25.979695 | 120 | 0.690875 | false |
soxfmr/engineshooter | engineshooter/spiders/engine/baidu.py | 1 | 6685 | # -*- coding: utf-8 -*-
import re
import os
import scrapy
import random
import string
import tempfile
import webbrowser
from os.path import join
from urllib import urlencode
from StringIO import StringIO
from engineshooter.items import SearchResultItem
class BaiduEngine:
name = 'baidu'
BASE_URL = 'https://www.baidu.com/s?rn=50&wd={}'
CAPTCHA_URL = 'https://ipv4.google.com/sorry/index'
NOTHING_MATCHES_TAG = ('<div class="content_none"><div class="nors">', )
def __init__(self, spider):
self.spider = spider
self.keyword = ''
self.callback = None
self.maxpage = 0
self.result = []
self.intercept_status = False
self.url_next_page = None
self.request = None
self.payload = None
self.current_page = 0
def search(self, keyword, callback, maxpage=0):
self.maxpage = maxpage
self.keyword = keyword
self.callback = callback
return scrapy.Request(url=BaiduEngine.BASE_URL.format(self.keyword), callback=self.callback)
def parse(self, response):
# reset
self.request = None
self.result = []
self.url_next_page = None
self.intercept_status = False
# Nothing found
empty = True
for tag in BaiduEngine.NOTHING_MATCHES_TAG:
if tag not in response.body:
empty = False
break
if empty:
self.spider.logger.warning('Empty search result')
return False
"""
# Determine whether the captcha present
if response.status in [301, 302]:
if GoogleEngine.CAPTCHA_URL in response.headers['Location']:
self.spider.logger.info(response.headers['Location'])
self.spider.logger.warning('Captcha redirect detect, grabing the captcha...')
self.request = scrapy.Request(url = response.headers['Location'], callback = self.callback,
dont_filter = True, meta = {'route' : self.grab_captcha})
else:
if 'route' not in response.meta:
# Validate success
self.spider.logger.info('Validate success, continue for next request')
self.url_next_page = response.headers['Location']
return False
if response.status in [503] or 'route' in response.meta:
'''
self.spider.logger.warning('Validation code incorrectly, please retry')
self.request = scrapy.Request(url = response.url, callback = self.callback,
dont_filter = True, meta = {'engine' : self, 'route' : self.grab_captcha})
'''
response.meta['route'](response)
return False
"""
# Extact all of result
for item in response.css('div.result > h3.t'):
try:
result = SearchResultItem()
result['url'] = re.search('(http|https)://.+', item.css('a::attr(href)').extract_first()).group()
# Right to Left
title = u''.join([plain.extract() for plain in item.css('a::text')])
result['title'] = title.encode('utf-8')
self.result.append(result)
except Exception as e:
self.spider.logger.error('An error occured when extract the item: ' + str(e))
# Current page
current_page = response.css('strong > span.pc::text').extract_first()
if current_page:
self.current_page = int(current_page)
self.spider.logger.info('Current search index %d', self.current_page)
# Parse next page information
next_page = response.css('a.n::attr(href)').extract()
next_text = response.css('a.n::text').extract()
if next_page:
length = len(next_page)
# Stopped sending request if not next page button present
if length > 1 or '>' in next_text[0]:
if length == 2:
_, next_page = next_page
else:
next_page = next_page[0]
next_page = re.sub(r'pn=(\d+)', 'pn=%d&rn=50' % (self.current_page * 50), next_page)
self.url_next_page = response.urljoin(next_page)
self.spider.logger.info('Totally %d urls been extracted from current page', len( self.result ))
self.spider.logger.info('Response parsing completed')
return True
def next(self):
if self.request:
self.spider.logger.info('Urgent request provides, sending request directly.')
return self.request
if self.maxpage > 0 and self.current_page >= self.maxpage:
self.spider.logger.info('Crawled %d pages as desire', self.maxpage)
return
if self.url_next_page == None:
self.spider.logger.info('Reached the end of page')
return
self.spider.logger.info('Sending request for next page')
return scrapy.Request(url = self.url_next_page, callback = self.callback, dont_filter = True)
def grab_captcha(self, response):
self.payload = {'q' : response.css('input[name=q]::attr(value)').extract_first().encode('utf-8'),
'continue' : response.css('input[name=continue]::attr(value)').extract_first().encode('utf-8') }
imgurl = response.urljoin(response.css('img::attr(src)').extract_first())
self.request = scrapy.Request(url=imgurl, callback=self.callback, meta = {
'route' : self.require_captcha, 'url' : response.url})
# Notify user for captcha
self.intercept_status = True
def require_captcha(self, response):
tmpdir = tempfile.gettempdir()
path = join(tmpdir, ''.join(random.choice(string.letters + string.digits) for _ in range(6)) + '.jpg')
with open(path, 'wb+') as handle:
handle.write(response.body)
webbrowser.open(path)
while True:
captcha = raw_input('Please enter the captcha: ')
if not captcha:
continue
self.payload['captcha'] = captcha
url = '{}?{}'.format(BaiduEngine.CAPTCHA_URL, urlencode(self.payload))
self.spider.logger.info(url)
self.request = scrapy.Request(url=url, dont_filter = True, meta = {'route' : self.grab_captcha})
self.spider.logger.info(self.payload)
break
os.remove(path)
def get_result_url(self):
return map(lambda item: item['url'], self.result)
def get_result(self):
return self.result
def intercept(self):
return self.intercept_status
| mit | 3,157,964,551,465,821,000 | -7,009,614,043,648,316,000 | 36.768362 | 113 | 0.581301 | false |
vkscool/nupic | tests/unit/nupic/algorithms/nab_detector_test.py | 9 | 10846 | #! /usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2015, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Test the NuPIC imports run as expected in
nab/detectors/numenta/numenta_detector.py. They are
nupic/algorithms/anomaly_likelihood and
nupic/frameworks/opf/modelfactory.ModelFactory. The intent here is not to test
functionality but rather that the functions are able to run in NAB.
NAB repo: https://github.com/numenta/NAB
"""
import copy
import csv
import datetime
import os
import unittest
from nupic.algorithms import anomaly_likelihood as an
from nupic.frameworks.opf.modelfactory import ModelFactory
from nupic.frameworks.opf.clamodel import CLAModel
from nupic.support.unittesthelpers.testcasebase import TestCaseBase
def _getDateList(numSamples, startDatetime):
"""
Generate a sequence of sample dates starting at startDatetime and incrementing
every 5 minutes.
@param numSamples (int) number of datetimes to generate
@param startDatetime (datetime) the start (first) datetime
@return dateList (list) generated sequence of datetimes
"""
dateList = []
td = datetime.timedelta(minutes=5)
currentDate = startDatetime + td
for _ in xrange(numSamples):
dateList.append(currentDate)
currentDate = currentDate + td
return dateList
def _addSampleData(numSamples=20, spikeValue=1.0, spikePeriod=10):
"""
Add sample anomaly data to the existing/new data list. Data is constant 0.0,
where anomalies are spikes to 1.0 at an interval set by spikePeriod. The test
data is trivial, as explicit testing of functions is done in other unit tests.
@param numSamples (int) number of data entries to produce
@param spikeValue (float) value of the anomaly spikes
@param spikePeriod (int) periodicity of anomaly spikes, where one will
occur for every spikePeriod data entries
@return data (list) list of generated data entries
"""
# Generate datetimes
lastDate = datetime.datetime(2015, 4, 1)
dateList = _getDateList(numSamples, lastDate)
# Generate data with anomaly spikes
data = []
for idx, date in enumerate(dateList):
if (spikePeriod > 0) and ( (idx + 1) % spikePeriod == 0):
data.append([date, idx, spikeValue])
else:
data.append([date, idx, 0.0])
return data
def _writeToCSV(data, headers, fileName):
"""
Write list of data to CSV.
@param data (list) list of data entries, where each row is a list
@param headers (list) column headers, where each entry in list is
a string
"""
with open(fileName, "wb") as f:
writer = csv.writer(f, delimiter=",", lineterminator="\n")
writer.writerow(headers)
writer.writerows(data)
class NABTest(TestCaseBase):
def setUp(self):
# Generate sample data, save to CSV (not used now, but put in place
# for future NAB tests)
self.data = _addSampleData()
self.dataFileName = "temp_data.csv"
_writeToCSV(self.data, ["datetime", "index", "value"], self.dataFileName)
def tearDown(self):
os.remove(self.dataFileName)
def testModelCreator(self):
"""
Tests the ModelFactory.create() method in
"nupic/frameworks/opf/modelfactory.py" by creating a new model object, as
in "NAB/detectors/numenta/numenta_detector.py".
Model paramaters are same as in NAB v0.8.
"""
# Create model as in NAB/.../numenta_detector.py
modelParams = {
"aggregationInfo": {
"days": 0,
"fields": [],
"hours": 0,
"microseconds": 0,
"milliseconds": 0,
"minutes": 0,
"months": 0,
"seconds": 0,
"weeks": 0,
"years": 0
},
"model": "CLA",
"modelParams": {
"anomalyParams": {
"anomalyCacheRecords": None,
"autoDetectThreshold": None,
"autoDetectWaitRecords": 5030
},
"clEnable": False,
"clParams": {
"alpha": 0.035828933612158,
"clVerbosity": 0,
"regionName": "CLAClassifierRegion",
"steps": "1"
},
"inferenceType": "TemporalAnomaly",
"sensorParams": {
"encoders": {
"timestamp_timeOfDay": {
"fieldname": "timestamp",
"name": "timestamp_timeOfDay",
"timeOfDay": [
21,
9.49122334747737
],
"type": "DateEncoder"
},
"timestamp_dayOfWeek": None,
"timestamp_weekend": None,
"value": {
"name": "value",
"fieldname": "value",
"numBuckets": 94.0,
"seed": 42,
"type": "RandomDistributedScalarEncoder"
}
},
"sensorAutoReset": None,
"verbosity": 0
},
"spEnable": True,
"spParams": {
"potentialPct": 0.8,
"columnCount": 2048,
"globalInhibition": 1,
"inputWidth": 0,
"maxBoost": 1.0,
"numActiveColumnsPerInhArea": 40,
"seed": 1956,
"spVerbosity": 0,
"spatialImp": "cpp",
"synPermActiveInc": 0.0015,
"synPermConnected": 0.1,
"synPermInactiveDec": 0.0005
},
"tpEnable": True,
"tpParams": {
"activationThreshold": 13,
"cellsPerColumn": 32,
"columnCount": 2048,
"globalDecay": 0.0,
"initialPerm": 0.21,
"inputWidth": 2048,
"maxAge": 0,
"maxSegmentsPerCell": 128,
"maxSynapsesPerSegment": 32,
"minThreshold": 10,
"newSynapseCount": 20,
"outputType": "normal",
"pamLength": 3,
"permanenceDec": 0.1,
"permanenceInc": 0.1,
"seed": 1960,
"temporalImp": "cpp",
"verbosity": 0
},
"trainSPNetOnlyIfRequested": False
},
"predictAheadTime": None,
"version": 1
}
sensorParams = (modelParams["modelParams"]["sensorParams"]
["encoders"]["value"])
sensorParams["resolution"] = max(0.001,
(1.2 - 0.2) / sensorParams.pop("numBuckets"))
model = ModelFactory.create(modelParams)
self.assertIs(type(model), CLAModel, msg="The created model is not a"
"CLAModel, but rather is of type %s" % type(model))
def testNABAnomalyLikelihood(self):
"""
Tests the specific calls to nupic/algorithms/anomaly_likelihood as they"re
made in "NAB/detectors/numenta/numenta_detector.py".
Note "NAB/.../numenta_detector.py" has its own class AnomalyLikelihood,
different from nupic/algorithms/anomaly_likelihood.AnomalyLikelihood, but
which calls the functions estimateAnomalyLikelihoods() and
updateAnomalyLikelihoods() from "nupic/algorithms/anomaly_likelihood.py".
"""
# AnomalyLikelihood object initial values
iteration = 0
probationaryPeriod = 4
historicalScores = []
likelihoodList = []
for dataPoint in self.data:
# Ignore the first probationaryPeriod data points
if len(historicalScores) < probationaryPeriod:
likelihood = 0.5
else:
if iteration % 4 == 0:
_, _, distribution = an.estimateAnomalyLikelihoods(
historicalScores,
skipRecords = probationaryPeriod)
likelihoods, _, distribution = an.updateAnomalyLikelihoods(
[dataPoint], distribution)
likelihood = 1.0 - likelihoods[0]
historicalScores.append(dataPoint)
iteration += 1
likelihoodList.append(likelihood)
truthLikelihoodList = [0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5,
0.044565462999999972, 0.044565462999999972,
0.044565462999999972, 0.044565462999999972,
0.90319951499999995, 0.90319951499999995,
0.90319951499999995, 0.90319951499999995,
0.78814460099999994, 0.78814460099999994,
0.78814460099999994, 0.78814460099999994]
for i in xrange(len(likelihoodList)):
self.assertAlmostEqual(likelihoodList[i], truthLikelihoodList[i],
msg="unequal values are at index %i" % i)
if __name__ == "__main__":
unittest.main()
| gpl-3.0 | -905,637,572,050,624,800 | 5,399,354,196,981,031,000 | 38.155235 | 80 | 0.518071 | false |
ruziniu/v2ex | main.py | 15 | 57245 | #!/usr/bin/env python
# coding=utf-8
import base64
import os
import re
import time
import datetime
import hashlib
import urllib
import string
import random
import pickle
import math
from google.appengine.ext import webapp
from google.appengine.api import memcache
from google.appengine.api import urlfetch
from google.appengine.api.labs import taskqueue
from google.appengine.api import mail
from google.appengine.ext import db
from google.appengine.ext.webapp import util
from google.appengine.ext.webapp import template
from v2ex.babel import Member
from v2ex.babel import Counter
from v2ex.babel import Section
from v2ex.babel import Node
from v2ex.babel import Topic
from v2ex.babel import Reply
from v2ex.babel import PasswordResetToken
from v2ex.babel import SYSTEM_VERSION
from v2ex.babel.security import *
from v2ex.babel.ua import *
from v2ex.babel.da import *
from v2ex.babel.l10n import *
from v2ex.babel.ext.cookies import Cookies
from v2ex.babel.ext.sessions import Session
from v2ex.babel.handlers import BaseHandler
from django.utils import simplejson as json
from v2ex.babel.ext import captcha
template.register_template_library('v2ex.templatetags.filters')
import config
class HomeHandler(webapp.RequestHandler):
def head(self):
pass
def get(self):
host = self.request.headers['Host']
if host == 'beta.v2ex.com':
return self.redirect('http://www.v2ex.com/')
site = GetSite()
browser = detect(self.request)
template_values = {}
template_values['site'] = GetSite()
template_values['canonical'] = 'http://' + site.domain + '/'
template_values['rnd'] = random.randrange(1, 100)
template_values['page_title'] = site.title
template_values['system_version'] = SYSTEM_VERSION
member = CheckAuth(self)
if member:
if member.my_home != None and len(member.my_home) > 0:
return self.redirect(member.my_home)
l10n = GetMessages(self, member, site)
template_values['l10n'] = l10n
if member:
self.response.headers['Set-Cookie'] = 'auth=' + member.auth + '; expires=' + (datetime.datetime.now() + datetime.timedelta(days=365)).strftime("%a, %d-%b-%Y %H:%M:%S GMT") + '; path=/'
template_values['member'] = member
try:
blocked = pickle.loads(member.blocked.encode('utf-8'))
except:
blocked = []
if (len(blocked) > 0):
template_values['blocked'] = ','.join(map(str, blocked))
if member:
recent_nodes = memcache.get('member::' + str(member.num) + '::recent_nodes')
if recent_nodes:
template_values['recent_nodes'] = recent_nodes
nodes_new = []
nodes_new = memcache.get('home_nodes_new')
if nodes_new is None:
nodes_new = []
qnew = db.GqlQuery("SELECT * FROM Node ORDER BY created DESC LIMIT 10")
if (qnew.count() > 0):
i = 0
for node in qnew:
nodes_new.append(node)
i = i + 1
memcache.set('home_nodes_new', nodes_new, 86400)
template_values['nodes_new'] = nodes_new
ignored = ['newbie', 'in', 'flamewar', 'pointless', 'tuan', '528491', 'chamber', 'autistic', 'blog', 'love', 'flood', 'beforesunrise', 'diary', 'fanfou', 'closed']
if browser['ios']:
home_rendered = memcache.get('home_rendered_mobile')
if home_rendered is None:
latest = memcache.get('q_latest_16')
if (latest):
template_values['latest'] = latest
else:
q2 = db.GqlQuery("SELECT * FROM Topic ORDER BY last_touched DESC LIMIT 16")
topics = []
for topic in q2:
if topic.node_name not in ignored:
topics.append(topic)
memcache.set('q_latest_16', topics, 600)
latest = topics
template_values['latest'] = latest
path = os.path.join(os.path.dirname(__file__), 'tpl', 'portion', 'home_mobile.html')
home_rendered = template.render(path, template_values)
memcache.set('home_rendered_mobile', home_rendered, 600)
template_values['home'] = home_rendered
else:
home_rendered = memcache.get('home_rendered')
if home_rendered is None:
latest = memcache.get('q_latest_16')
if (latest):
template_values['latest'] = latest
else:
q2 = db.GqlQuery("SELECT * FROM Topic ORDER BY last_touched DESC LIMIT 16")
topics = []
for topic in q2:
if topic.node_name not in ignored:
topics.append(topic)
memcache.set('q_latest_16', topics, 600)
latest = topics
template_values['latest'] = latest
path = os.path.join(os.path.dirname(__file__), 'tpl', 'portion', 'home.html')
home_rendered = template.render(path, template_values)
memcache.set('home_rendered', home_rendered, 600)
template_values['home'] = home_rendered
member_total = memcache.get('member_total')
if member_total is None:
q3 = db.GqlQuery("SELECT * FROM Counter WHERE name = 'member.total'")
if (q3.count() > 0):
member_total = q3[0].value
else:
member_total = 0
memcache.set('member_total', member_total, 3600)
template_values['member_total'] = member_total
topic_total = memcache.get('topic_total')
if topic_total is None:
q4 = db.GqlQuery("SELECT * FROM Counter WHERE name = 'topic.total'")
if (q4.count() > 0):
topic_total = q4[0].value
else:
topic_total = 0
memcache.set('topic_total', topic_total, 3600)
template_values['topic_total'] = topic_total
reply_total = memcache.get('reply_total')
if reply_total is None:
q5 = db.GqlQuery("SELECT * FROM Counter WHERE name = 'reply.total'")
if (q5.count() > 0):
reply_total = q5[0].value
else:
reply_total = 0
memcache.set('reply_total', reply_total, 3600)
template_values['reply_total'] = reply_total
hottest = memcache.get('index_hottest_sidebar')
if hottest is None:
qhot = db.GqlQuery("SELECT * FROM Node ORDER BY topics DESC LIMIT 25")
hottest = u''
for node in qhot:
hottest = hottest + '<a href="/go/' + node.name + '" class="item_node">' + node.title + '</a>'
memcache.set('index_hottest_sidebar', hottest, 86400)
template_values['index_hottest_sidebar'] = hottest
c = memcache.get('index_categories')
if c is None:
c = ''
i = 0
if site.home_categories is not None:
categories = site.home_categories.split("\n")
else:
categories = []
for category in categories:
category = category.strip()
i = i + 1
c = c + '<div class="cell"><table cellpadding="0" cellspacing="0" border="0"><tr><td align="right" width="60"><span class="fade">' + category + '</span></td><td style="line-height: 200%; padding-left: 10px;">'
qx = db.GqlQuery("SELECT * FROM Node WHERE category = :1 ORDER BY topics DESC", category)
for node in qx:
c = c + '<a href="/go/' + node.name + '" style="font-size: 14px;">' + node.title + '</a> '
c = c + '</td></tr></table></div>'
memcache.set('index_categories', c, 86400)
template_values['c'] = c
if (browser['ios']):
path = os.path.join(os.path.dirname(__file__), 'tpl', 'mobile', 'index.html')
else:
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'index.html')
output = template.render(path, template_values)
self.response.out.write(output)
class PlanesHandler(BaseHandler):
def get(self):
c = 0
c = memcache.get('planes_c')
s = ''
s = memcache.get('planes')
if (s == None):
c = 0
s = ''
q = db.GqlQuery("SELECT * FROM Section ORDER BY nodes DESC")
if (q.count() > 0):
for section in q:
q2 = db.GqlQuery("SELECT * FROM Node WHERE section_num = :1 ORDER BY topics DESC", section.num)
n = ''
if (q2.count() > 0):
nodes = []
i = 0
for node in q2:
nodes.append(node)
i = i + 1
random.shuffle(nodes)
for node in nodes:
fs = random.randrange(12, 16)
n = n + '<a href="/go/' + node.name + '" class="item_node">' + node.title + '</a>'
c = c + 1
s = s + '<div class="sep20"></div><div class="box"><div class="cell"><div class="fr"><strong class="snow">' + section.title_alternative + u'</strong><small class="snow"> • ' + str(section.nodes) + ' nodes</small></div>' + section.title + '</div><div class="inner" align="center">' + n + '</div></div>'
memcache.set('planes', s, 86400)
memcache.set('planes_c', c, 86400)
self.values['c'] = c
self.values['s'] = s
self.values['page_title'] = self.site.title.decode('utf-8') + u' › ' + self.l10n.planes.decode('utf-8')
self.finalize(template_name='planes')
class RecentHandler(webapp.RequestHandler):
def get(self):
site = GetSite()
browser = detect(self.request)
template_values = {}
template_values['site'] = site
template_values['rnd'] = random.randrange(1, 100)
template_values['system_version'] = SYSTEM_VERSION
template_values['page_title'] = site.title + u' › 最近的 50 个主题'
member = CheckAuth(self)
l10n = GetMessages(self, member, site)
template_values['l10n'] = l10n
if member:
template_values['member'] = member
try:
blocked = pickle.loads(member.blocked.encode('utf-8'))
except:
blocked = []
if (len(blocked) > 0):
template_values['blocked'] = ','.join(map(str, blocked))
latest = memcache.get('q_recent_50')
if (latest):
template_values['latest'] = latest
else:
q2 = db.GqlQuery("SELECT * FROM Topic ORDER BY last_touched DESC LIMIT 16,50")
topics = []
IGNORED_RECENT = ['flamewar', 'pointless', 'in', 'autistic', 'chamber', 'flood', 'diary', 'fanfou']
for topic in q2:
if topic.node_name not in IGNORED_RECENT:
topics.append(topic)
memcache.set('q_recent_50', topics, 80)
template_values['latest'] = topics
template_values['latest_total'] = len(topics)
if browser['ios']:
path = os.path.join(os.path.dirname(__file__), 'tpl', 'mobile', 'recent.html')
else:
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'recent.html')
output = template.render(path, template_values)
expires_date = datetime.datetime.utcnow() + datetime.timedelta(minutes=2)
expires_str = expires_date.strftime("%d %b %Y %H:%M:%S GMT")
self.response.headers.add_header("Expires", expires_str)
self.response.headers['Cache-Control'] = 'max-age=120, must-revalidate'
self.response.out.write(output)
class UAHandler(webapp.RequestHandler):
def get(self):
site = GetSite()
browser = detect(self.request)
template_values = {}
template_values['site'] = site
template_values['system_version'] = SYSTEM_VERSION
member = CheckAuth(self)
template_values['member'] = member
l10n = GetMessages(self, member, site)
template_values['l10n'] = l10n
template_values['ua'] = os.environ['HTTP_USER_AGENT']
template_values['page_title'] = site.title + u' › 用户代理字符串'
path = os.path.join(os.path.dirname(__file__), 'tpl', 'mobile', 'ua.html')
output = template.render(path, template_values)
self.response.out.write(output)
class SigninHandler(webapp.RequestHandler):
def get(self):
site = GetSite()
member = False
browser = detect(self.request)
template_values = {}
template_values['site'] = site
template_values['page_title'] = site.title + u' › 登入'
template_values['system_version'] = SYSTEM_VERSION
l10n = GetMessages(self, member, site)
template_values['l10n'] = l10n
errors = 0
template_values['errors'] = errors
template_values['next'] = self.request.referer
if browser['ios']:
path = os.path.join(os.path.dirname(__file__), 'tpl', 'mobile', 'signin.html')
else:
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'signin.html')
output = template.render(path, template_values)
self.response.out.write(output)
def post(self):
site = GetSite()
member = False
browser = detect(self.request)
template_values = {}
template_values['site'] = site
template_values['page_title'] = site.title + u' › 登入'
template_values['system_version'] = SYSTEM_VERSION
u = self.request.get('u').strip()
p = self.request.get('p').strip()
l10n = GetMessages(self, member, site)
template_values['l10n'] = l10n
errors = 0
error_messages = ['', '请输入用户名和密码', '你输入的用户名或密码不正确']
if (len(u) > 0 and len(p) > 0):
p_sha1 = hashlib.sha1(p).hexdigest()
if '@' in u:
q = db.GqlQuery("SELECT * FROM Member WHERE email = :1 AND password = :2", u.lower(), p_sha1)
else:
q = db.GqlQuery("SELECT * FROM Member WHERE username_lower = :1 AND password = :2", u.lower(), p_sha1)
if (q.count() == 1):
member = q[0]
self.response.headers['Set-Cookie'] = 'auth=' + member.auth + '; expires=' + (datetime.datetime.now() + datetime.timedelta(days=365)).strftime("%a, %d-%b-%Y %H:%M:%S GMT") + '; path=/'
next = self.request.get('next').strip()
host = self.request.host + '/'
if next.rfind(host)>0 and not next.rfind('/sign'):
self.redirect(next)
else:
self.redirect('/')
else:
errors = 2
else:
errors = 1
template_values['u'] = u
template_values['p'] = p
template_values['errors'] = errors
template_values['error_message'] = error_messages[errors]
if browser['ios']:
path = os.path.join(os.path.dirname(__file__), 'tpl', 'mobile', 'signin.html')
else:
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'signin.html')
output = template.render(path, template_values)
self.response.out.write(output)
class SignupHandler(webapp.RequestHandler):
def get(self):
site = GetSite()
member = False
chtml = captcha.displayhtml(
public_key = config.recaptcha_public_key,
use_ssl = False,
error = None)
browser = detect(self.request)
template_values = {}
template_values['site'] = site
template_values['page_title'] = site.title + u' › 注册'
template_values['system_version'] = SYSTEM_VERSION
template_values['errors'] = 0
template_values['captchahtml'] = chtml
l10n = GetMessages(self, member, site)
template_values['l10n'] = l10n
if browser['ios']:
path = os.path.join(os.path.dirname(__file__), 'tpl', 'mobile', 'signup.html')
else:
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'signup.html')
output = template.render(path, template_values)
self.response.out.write(output)
def post(self):
site = GetSite()
member = False
browser = detect(self.request)
template_values = {}
template_values['site'] = site
template_values['page_title'] = site.title + u' › 注册'
template_values['system_version'] = SYSTEM_VERSION
l10n = GetMessages(self, member, site)
template_values['l10n'] = l10n
errors = 0
# Verification: username
member_username_error = 0
member_username_error_messages = ['',
l10n.username_empty,
l10n.username_too_long,
l10n.username_too_short,
l10n.username_invalid,
l10n.username_taken]
member_username = self.request.get('username').strip()
# Special cases
if 'vpn' in member_username:
return self.redirect('http://www.v2ex.com/')
if (len(member_username) == 0):
errors = errors + 1
member_username_error = 1
else:
if (len(member_username) > 16):
errors = errors + 1
member_username_error = 2
else:
if (len(member_username) < 3):
errors = errors + 1
member_username_error = 3
else:
if (re.search('^[a-zA-Z0-9\_]+$', member_username)):
q = db.GqlQuery('SELECT __key__ FROM Member WHERE username_lower = :1', member_username.lower())
if (q.count() > 0):
errors = errors + 1
member_username_error = 5
else:
errors = errors + 1
member_username_error = 4
template_values['member_username'] = member_username
template_values['member_username_error'] = member_username_error
template_values['member_username_error_message'] = member_username_error_messages[member_username_error]
# Verification: password
member_password_error = 0
member_password_error_messages = ['',
u'请输入你的密码',
u'密码长度不能超过 32 个字符'
]
member_password = self.request.get('password').strip()
if (len(member_password) == 0):
errors = errors + 1
member_password_error = 1
else:
if (len(member_password) > 32):
errors = errors + 1
member_password_error = 2
template_values['member_password'] = member_password
template_values['member_password_error'] = member_password_error
template_values['member_password_error_message'] = member_password_error_messages[member_password_error]
# Verification: email
member_email_error = 0
member_email_error_messages = ['',
u'请输入你的电子邮件地址',
u'电子邮件地址长度不能超过 32 个字符',
u'你输入的电子邮件地址不符合规则',
u'抱歉这个电子邮件地址已经有人注册过了']
member_email = self.request.get('email').strip()
if (len(member_email) == 0):
errors = errors + 1
member_email_error = 1
else:
if (len(member_email) > 32):
errors = errors + 1
member_email_error = 2
else:
p = re.compile(r"(?:^|\s)[-a-z0-9_.+]+@(?:[-a-z0-9]+\.)+[a-z]{2,6}(?:\s|$)", re.IGNORECASE)
if (p.search(member_email)):
q = db.GqlQuery('SELECT __key__ FROM Member WHERE email = :1', member_email.lower())
if (q.count() > 0):
errors = errors + 1
member_email_error = 4
else:
errors = errors + 1
member_email_error = 3
template_values['member_email'] = member_email
template_values['member_email_error'] = member_email_error
template_values['member_email_error_message'] = member_email_error_messages[member_email_error]
# Verification: reCAPTCHA
challenge = self.request.get('recaptcha_challenge_field')
response = self.request.get('recaptcha_response_field')
remoteip = os.environ['REMOTE_ADDR']
cResponse = captcha.submit(
challenge,
response,
config.recaptcha_private_key,
remoteip)
if cResponse.is_valid:
logging.info('reCAPTCHA verification passed')
template_values['recaptcha_error'] = 0
else:
errors = errors + 1
error = cResponse.error_code
chtml = captcha.displayhtml(
public_key = config.recaptcha_public_key,
use_ssl = False,
error = cResponse.error_code)
template_values['captchahtml'] = chtml
template_values['recaptcha_error'] = 1
template_values['recaptcha_error_message'] = '请重新输入 reCAPTCHA 验证码'
template_values['errors'] = errors
if (errors == 0):
member = Member()
q = db.GqlQuery('SELECT * FROM Counter WHERE name = :1', 'member.max')
if (q.count() == 1):
counter = q[0]
counter.value = counter.value + 1
else:
counter = Counter()
counter.name = 'member.max'
counter.value = 1
q2 = db.GqlQuery('SELECT * FROM Counter WHERE name = :1', 'member.total')
if (q2.count() == 1):
counter2 = q2[0]
counter2.value = counter2.value + 1
else:
counter2 = Counter()
counter2.name = 'member.total'
counter2.value = 1
member.num = counter.value
member.username = member_username
member.username_lower = member_username.lower()
member.password = hashlib.sha1(member_password).hexdigest()
member.email = member_email.lower()
member.auth = hashlib.sha1(str(member.num) + ':' + member.password).hexdigest()
member.l10n = site.l10n
member.newbie = 1
member.noob = 0
if member.num == 1:
member.level = 0
else:
member.level = 1000
member.put()
counter.put()
counter2.put()
self.response.headers['Set-Cookie'] = 'auth=' + member.auth + '; expires=' + (datetime.datetime.now() + datetime.timedelta(days=365)).strftime("%a, %d-%b-%Y %H:%M:%S GMT") + '; path=/'
memcache.delete('member_total')
self.redirect('/')
else:
if browser['ios']:
path = os.path.join(os.path.dirname(__file__), 'tpl', 'mobile', 'signup.html')
else:
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'signup.html')
output = template.render(path, template_values)
self.response.out.write(output)
class SignoutHandler(webapp.RequestHandler):
def get(self):
site = GetSite()
browser = detect(self.request)
member = False
template_values = {}
template_values['site'] = site
template_values['page_title'] = site.title + u' › 登出'
template_values['system_version'] = SYSTEM_VERSION
l10n = GetMessages(self, member, site)
template_values['l10n'] = l10n
cookies = Cookies(self, max_age = 86400, path = '/')
del cookies['auth']
if browser['ios']:
path = os.path.join(os.path.dirname(__file__), 'tpl', 'mobile', 'signout.html')
else:
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'signout.html')
output = template.render(path, template_values)
self.response.out.write(output)
class ForgotHandler(webapp.RequestHandler):
def get(self):
site = GetSite()
browser = detect(self.request)
template_values = {}
template_values['rnd'] = random.randrange(1, 100)
template_values['site'] = site
member = CheckAuth(self)
l10n = GetMessages(self, member, site)
template_values['l10n'] = l10n
if member:
template_values['member'] = member
template_values['page_title'] = site.title + u' › 重新设置密码'
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'forgot.html')
output = template.render(path, template_values)
self.response.out.write(output)
def post(self):
site = GetSite()
browser = detect(self.request)
template_values = {}
template_values['rnd'] = random.randrange(1, 100)
template_values['site'] = site
member = CheckAuth(self)
l10n = GetMessages(self, member, site)
template_values['l10n'] = l10n
if member:
template_values['member'] = member
template_values['page_title'] = site.title + u' › 重新设置密码'
# Verification: username & email
username = self.request.get('username').strip().lower()
email = self.request.get('email').strip().lower()
q = db.GqlQuery("SELECT * FROM Member WHERE username_lower = :1 AND email = :2", username, email)
if q.count() == 1:
one = q[0]
q2 = db.GqlQuery("SELECT * FROM PasswordResetToken WHERE timestamp > :1 AND email = :2", (int(time.time()) - 86400), email)
if q2.count() > 2:
error_message = '你不能在 24 小时内进行超过 2 次的密码重设操作。'
template_values['errors'] = 1
template_values['error_message'] = error_message
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'forgot.html')
output = template.render(path, template_values)
self.response.out.write(output)
else:
token = ''.join([str(random.randint(0, 9)) for i in range(32)])
prt = PasswordResetToken()
prt.token = token
prt.member = one
prt.email = one.email
prt.timestamp = int(time.time())
prt.put()
mail_template_values = {}
mail_template_values['site'] = site
mail_template_values['one'] = one
mail_template_values['host'] = self.request.headers['Host']
mail_template_values['token'] = token
mail_template_values['ua'] = self.request.headers['User-Agent']
mail_template_values['ip'] = self.request.remote_addr
path = os.path.join(os.path.dirname(__file__), 'tpl', 'mail', 'reset_password.txt')
output = template.render(path, mail_template_values)
result = mail.send_mail(sender="v2ex.livid@me.com",
to=one.email,
subject="=?UTF-8?B?" + base64.b64encode((u"[" + site.title + u"] 重新设置密码").encode('utf-8')) + "?=",
body=output)
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'forgot_sent.html')
output = template.render(path, template_values)
self.response.out.write(output)
else:
error_message = '无法找到匹配的用户名和邮箱记录'
template_values['errors'] = 1
template_values['error_message'] = error_message
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'forgot.html')
output = template.render(path, template_values)
self.response.out.write(output)
class PasswordResetHandler(BaseHandler):
def get(self, token):
site = GetSite()
template_values = {}
template_values['site'] = site
member = False
l10n = GetMessages(self, member, site)
template_values['l10n'] = l10n
token = str(token.strip().lower())
q = db.GqlQuery("SELECT * FROM PasswordResetToken WHERE token = :1 AND valid = 1", token)
if q.count() == 1:
prt = q[0]
template_values['page_title'] = site.title + u' › 重新设置密码'
template_values['token'] = prt
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'reset_password.html')
output = template.render(path, template_values)
self.response.out.write(output)
else:
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'token_not_found.html')
output = template.render(path, template_values)
self.response.out.write(output)
def post(self, token):
site = GetSite()
template_values = {}
template_values['site'] = site
member = False
l10n = GetMessages(self, member, site)
template_values['l10n'] = l10n
token = str(token.strip().lower())
q = db.GqlQuery("SELECT * FROM PasswordResetToken WHERE token = :1 AND valid = 1", token)
if q.count() == 1:
prt = q[0]
template_values['page_title'] = site.title + u' › 重新设置密码'
template_values['token'] = prt
# Verification
errors = 0
new_password = str(self.request.get('new_password').strip())
new_password_again = str(self.request.get('new_password_again').strip())
if new_password is '' or new_password_again is '':
errors = errors + 1
error_message = '请输入两次新密码'
if errors == 0:
if new_password != new_password_again:
errors = errors + 1
error_message = '两次输入的新密码不一致'
if errors == 0:
if len(new_password) > 32:
errors = errors + 1
error_message = '新密码长度不能超过 32 个字符'
if errors == 0:
q2 = db.GqlQuery("SELECT * FROM Member WHERE num = :1", prt.member.num)
one = q2[0]
one.password = hashlib.sha1(new_password).hexdigest()
one.auth = hashlib.sha1(str(one.num) + ':' + one.password).hexdigest()
one.put()
prt.valid = 0
prt.put()
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'reset_password_ok.html')
output = template.render(path, template_values)
self.response.out.write(output)
else:
template_values['errors'] = errors
template_values['error_message'] = error_message
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'reset_password.html')
output = template.render(path, template_values)
self.response.out.write(output)
else:
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'token_not_found.html')
output = template.render(path, template_values)
self.response.out.write(output)
class NodeGraphHandler(BaseHandler):
def get(self, node_name):
site = GetSite()
browser = detect(self.request)
self.session = Session()
template_values = {}
template_values['site'] = site
template_values['rnd'] = random.randrange(1, 100)
template_values['system_version'] = SYSTEM_VERSION
member = CheckAuth(self)
if member:
template_values['member'] = member
can_create = False
can_manage = False
if site.topic_create_level > 999:
if member:
can_create = True
else:
if member:
if member.level <= site.topic_create_level:
can_create = True
if member:
if member.level == 0:
can_manage = True
template_values['can_create'] = can_create
template_values['can_manage'] = can_manage
l10n = GetMessages(self, member, site)
template_values['l10n'] = l10n
node = GetKindByName('Node', node_name)
template_values['node'] = node
if node:
template_values['feed_link'] = '/feed/' + node.name + '.xml'
template_values['feed_title'] = site.title + u' › ' + node.title
template_values['canonical'] = 'http://' + site.domain + '/go/' + node.name
if node.parent_node_name is None:
siblings = []
else:
siblings = db.GqlQuery("SELECT * FROM Node WHERE parent_node_name = :1 AND name != :2", node.parent_node_name, node.name)
template_values['siblings'] = siblings
if member:
favorited = member.hasFavorited(node)
template_values['favorited'] = favorited
recent_nodes = memcache.get('member::' + str(member.num) + '::recent_nodes')
recent_nodes_ids = memcache.get('member::' + str(member.num) + '::recent_nodes_ids')
if recent_nodes and recent_nodes_ids:
if (node.num in recent_nodes_ids) is not True:
recent_nodes.insert(0, node)
recent_nodes_ids.insert(0, node.num)
memcache.set('member::' + str(member.num) + '::recent_nodes', recent_nodes, 7200)
memcache.set('member::' + str(member.num) + '::recent_nodes_ids', recent_nodes_ids, 7200)
else:
recent_nodes = []
recent_nodes.append(node)
recent_nodes_ids = []
recent_nodes_ids.append(node.num)
memcache.set('member::' + str(member.num) + '::recent_nodes', recent_nodes, 7200)
memcache.set('member::' + str(member.num) + '::recent_nodes_ids', recent_nodes_ids, 7200)
template_values['recent_nodes'] = recent_nodes
template_values['page_title'] = site.title + u' › ' + node.title
else:
template_values['page_title'] = site.title + u' › 节点未找到'
section = False
if node:
section = GetKindByNum('Section', node.section_num)
template_values['section'] = section
if browser['ios']:
if (node):
path = os.path.join(os.path.dirname(__file__), 'tpl', 'mobile', 'node_graph.html')
else:
path = os.path.join(os.path.dirname(__file__), 'tpl', 'mobile', 'node_not_found.html')
else:
if (node):
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'node_graph.html')
else:
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'node_not_found.html')
output = template.render(path, template_values)
self.response.out.write(output)
class NodeHandler(webapp.RequestHandler):
def get(self, node_name):
site = GetSite()
browser = detect(self.request)
self.session = Session()
template_values = {}
template_values['site'] = site
template_values['rnd'] = random.randrange(1, 100)
template_values['system_version'] = SYSTEM_VERSION
member = CheckAuth(self)
if member:
template_values['member'] = member
can_create = False
can_manage = False
if site.topic_create_level > 999:
if member:
can_create = True
else:
if member:
if member.level <= site.topic_create_level:
can_create = True
if member:
if member.level == 0:
can_manage = True
template_values['can_create'] = can_create
template_values['can_manage'] = can_manage
l10n = GetMessages(self, member, site)
template_values['l10n'] = l10n
node = GetKindByName('Node', node_name)
template_values['node'] = node
pagination = False
pages = 1
page = 1
page_size = 15
start = 0
has_more = False
more = 1
has_previous = False
previous = 1
if node:
template_values['feed_link'] = '/feed/' + node.name + '.xml'
template_values['feed_title'] = site.title + u' › ' + node.title
template_values['canonical'] = 'http://' + site.domain + '/go/' + node.name
if member:
favorited = member.hasFavorited(node)
template_values['favorited'] = favorited
recent_nodes = memcache.get('member::' + str(member.num) + '::recent_nodes')
recent_nodes_ids = memcache.get('member::' + str(member.num) + '::recent_nodes_ids')
if recent_nodes and recent_nodes_ids:
if (node.num in recent_nodes_ids) is not True:
recent_nodes.insert(0, node)
recent_nodes_ids.insert(0, node.num)
memcache.set('member::' + str(member.num) + '::recent_nodes', recent_nodes, 7200)
memcache.set('member::' + str(member.num) + '::recent_nodes_ids', recent_nodes_ids, 7200)
else:
recent_nodes = []
recent_nodes.append(node)
recent_nodes_ids = []
recent_nodes_ids.append(node.num)
memcache.set('member::' + str(member.num) + '::recent_nodes', recent_nodes, 7200)
memcache.set('member::' + str(member.num) + '::recent_nodes_ids', recent_nodes_ids, 7200)
template_values['recent_nodes'] = recent_nodes
template_values['page_title'] = site.title + u' › ' + node.title
# Pagination
if node.topics > page_size:
pagination = True
else:
pagination = False
if pagination:
if node.topics % page_size == 0:
pages = int(node.topics / page_size)
else:
pages = int(node.topics / page_size) + 1
page = self.request.get('p')
if (page == '') or (page is None):
page = 1
else:
page = int(page)
if page > pages:
page = pages
else:
if page < 1:
page = 1
if page < pages:
has_more = True
more = page + 1
if page > 1:
has_previous = True
previous = page - 1
start = (page - 1) * page_size
template_values['canonical'] = 'http://' + site.domain + '/go/' + node.name + '?p=' + str(page)
else:
template_values['page_title'] = site.title + u' › 节点未找到'
template_values['pagination'] = pagination
template_values['pages'] = pages
template_values['page'] = page
template_values['page_size'] = page_size
template_values['has_more'] = has_more
template_values['more'] = more
template_values['has_previous'] = has_previous
template_values['previous'] = previous
section = False
if node:
section = GetKindByNum('Section', node.section_num)
template_values['section'] = section
topics = False
if node:
q3 = db.GqlQuery("SELECT * FROM Topic WHERE node_num = :1 ORDER BY last_touched DESC LIMIT " + str(start) + ", " + str(page_size), node.num)
topics = q3
template_values['latest'] = topics
if browser['ios']:
if (node):
path = os.path.join(os.path.dirname(__file__), 'tpl', 'mobile', 'node.html')
else:
path = os.path.join(os.path.dirname(__file__), 'tpl', 'mobile', 'node_not_found.html')
else:
if (node):
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'node.html')
else:
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'node_not_found.html')
output = template.render(path, template_values)
self.response.out.write(output)
class NodeApiHandler(webapp.RequestHandler):
def get(self, node_name):
site = GetSite()
node = GetKindByName('Node', node_name)
if node:
template_values = {}
template_values['site'] = site
template_values['node'] = node
path = os.path.join(os.path.dirname(__file__), 'tpl', 'api', 'node.json')
self.response.headers['Content-type'] = 'application/json;charset=UTF-8'
output = template.render(path, template_values)
self.response.out.write(output)
else:
self.error(404)
class SearchHandler(webapp.RequestHandler):
def get(self, q):
site = GetSite()
q = urllib.unquote(q)
template_values = {}
template_values['site'] = site
member = CheckAuth(self)
l10n = GetMessages(self, member, site)
template_values['l10n'] = l10n
if member:
template_values['member'] = member
template_values['page_title'] = site.title + u' › 搜索 ' + q.decode('utf-8')
template_values['q'] = q
if config.fts_enabled is not True:
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'search_unavailable.html')
output = template.render(path, template_values)
self.response.out.write(output)
else:
if re.findall('^([a-zA-Z0-9\_]+)$', q):
node = GetKindByName('Node', q.lower())
if node is not None:
template_values['node'] = node
# Fetch result
q_lowered = q.lower()
q_md5 = hashlib.md5(q_lowered).hexdigest()
topics = memcache.get('q::' + q_md5)
if topics is None:
try:
if os.environ['SERVER_SOFTWARE'] == 'Development/1.0':
fts = u'http://127.0.0.1:20000/search?q=' + str(urllib.quote(q_lowered))
else:
fts = u'http://' + config.fts_server + '/search?q=' + str(urllib.quote(q_lowered))
response = urlfetch.fetch(fts, headers = {"Authorization" : "Basic %s" % base64.b64encode(config.fts_username + ':' + config.fts_password)})
if response.status_code == 200:
results = json.loads(response.content)
topics = []
for num in results:
topics.append(GetKindByNum('Topic', num))
template_values['topics'] = topics
memcache.set('q::' + q_md5, topics, 86400)
except:
template_values['topics'] = []
else:
template_values['topics'] = topics
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'search.html')
output = template.render(path, template_values)
self.response.out.write(output)
class DispatcherHandler(webapp.RequestHandler):
def post(self):
referer = self.request.headers['Referer']
q = self.request.get('q').strip()
if len(q) > 0:
self.redirect('/q/' + q)
else:
self.redirect(referer)
class RouterHandler(webapp.RequestHandler):
def get(self, path):
if path.find('/') != -1:
# Page
parts = path.split('/')
if len(parts) == 2:
minisite_name = parts[0]
if parts[1] == '':
page_name = 'index.html'
else:
page_name = parts[1]
minisite = GetKindByName('Minisite', minisite_name)
if minisite is not False:
page = memcache.get(path)
if page is None:
q = db.GqlQuery("SELECT * FROM Page WHERE name = :1 AND minisite = :2", page_name, minisite)
if q.count() == 1:
page = q[0]
memcache.set(path, page, 864000)
if page.mode == 1:
# Dynamic embedded page
template_values = {}
site = GetSite()
template_values['site'] = site
member = CheckAuth(self)
if member:
template_values['member'] = member
l10n = GetMessages(self, member, site)
template_values['l10n'] = l10n
template_values['rnd'] = random.randrange(1, 100)
template_values['page'] = page
template_values['minisite'] = page.minisite
template_values['page_title'] = site.title + u' › ' + page.minisite.title.decode('utf-8') + u' › ' + page.title.decode('utf-8')
taskqueue.add(url='/hit/page/' + str(page.key()))
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'page.html')
output = template.render(path, template_values)
self.response.out.write(output)
else:
# Static standalone page
taskqueue.add(url='/hit/page/' + str(page.key()))
expires_date = datetime.datetime.utcnow() + datetime.timedelta(days=10)
expires_str = expires_date.strftime("%d %b %Y %H:%M:%S GMT")
self.response.headers.add_header("Expires", expires_str)
self.response.headers['Cache-Control'] = 'max-age=864000, must-revalidate'
self.response.headers['Content-Type'] = page.content_type
self.response.out.write(page.content)
else:
minisite_name = parts[0]
page_name = 'index.html'
minisite = GetKindByName('Minisite', minisite_name)
if minisite is not False:
page = memcache.get(path)
if page is None:
q = db.GqlQuery("SELECT * FROM Page WHERE name = :1 AND minisite = :2", page_name, minisite)
if q.count() == 1:
page = q[0]
memcache.set(path, page, 864000)
if page.mode == 1:
# Dynamic embedded page
template_values = {}
site = GetSite()
template_values['site'] = site
member = CheckAuth(self)
if member:
template_values['member'] = member
l10n = GetMessages(self, member, site)
template_values['l10n'] = l10n
template_values['rnd'] = random.randrange(1, 100)
template_values['page'] = page
template_values['minisite'] = page.minisite
template_values['page_title'] = site.title + u' › ' + page.minisite.title.decode('utf-8') + u' › ' + page.title.decode('utf-8')
taskqueue.add(url='/hit/page/' + str(page.key()))
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'page.html')
output = template.render(path, template_values)
self.response.out.write(output)
else:
# Static standalone page
taskqueue.add(url='/hit/page/' + str(page.key()))
expires_date = datetime.datetime.utcnow() + datetime.timedelta(days=10)
expires_str = expires_date.strftime("%d %b %Y %H:%M:%S GMT")
self.response.headers.add_header("Expires", expires_str)
self.response.headers['Cache-Control'] = 'max-age=864000, must-revalidate'
self.response.headers['Content-Type'] = page.content_type
self.response.out.write(page.content)
else:
# Site
page = memcache.get(path + '/index.html')
if page:
taskqueue.add(url='/hit/page/' + str(page.key()))
if page.mode == 1:
# Dynamic embedded page
template_values = {}
site = GetSite()
template_values['site'] = site
member = CheckAuth(self)
if member:
template_values['member'] = member
l10n = GetMessages(self, member, site)
template_values['l10n'] = l10n
template_values['rnd'] = random.randrange(1, 100)
template_values['page'] = page
template_values['minisite'] = page.minisite
template_values['page_title'] = site.title + u' › ' + page.minisite.title.decode('utf-8') + u' › ' + page.title.decode('utf-8')
taskqueue.add(url='/hit/page/' + str(page.key()))
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'page.html')
output = template.render(path, template_values)
self.response.out.write(output)
else:
expires_date = datetime.datetime.utcnow() + datetime.timedelta(days=10)
expires_str = expires_date.strftime("%d %b %Y %H:%M:%S GMT")
self.response.headers.add_header("Expires", expires_str)
self.response.headers['Cache-Control'] = 'max-age=864000, must-revalidate'
self.response.headers['Content-Type'] = page.content_type
self.response.out.write(page.content)
else:
minisite_name = path
minisite = GetKindByName('Minisite', minisite_name)
q = db.GqlQuery("SELECT * FROM Page WHERE name = :1 AND minisite = :2", 'index.html', minisite)
if q.count() == 1:
page = q[0]
memcache.set(path + '/index.html', page, 864000)
if page.mode == 1:
# Dynamic embedded page
template_values = {}
site = GetSite()
template_values['site'] = site
member = CheckAuth(self)
if member:
template_values['member'] = member
l10n = GetMessages(self, member, site)
template_values['l10n'] = l10n
template_values['rnd'] = random.randrange(1, 100)
template_values['page'] = page
template_values['minisite'] = page.minisite
template_values['page_title'] = site.title + u' › ' + page.minisite.title.decode('utf-8') + u' › ' + page.title.decode('utf-8')
taskqueue.add(url='/hit/page/' + str(page.key()))
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'page.html')
output = template.render(path, template_values)
self.response.out.write(output)
else:
# Static standalone page
taskqueue.add(url='/hit/page/' + str(page.key()))
expires_date = datetime.datetime.utcnow() + datetime.timedelta(days=10)
expires_str = expires_date.strftime("%d %b %Y %H:%M:%S GMT")
self.response.headers.add_header("Expires", expires_str)
self.response.headers['Cache-Control'] = 'max-age=864000, must-revalidate'
self.response.headers['Content-Type'] = page.content_type
self.response.out.write(page.content)
class ChangesHandler(webapp.RequestHandler):
def get(self):
site = GetSite()
browser = detect(self.request)
template_values = {}
template_values['site'] = site
template_values['rnd'] = random.randrange(1, 100)
template_values['system_version'] = SYSTEM_VERSION
template_values['page_title'] = site.title + u' › 全站最新更改记录'
member = CheckAuth(self)
template_values['member'] = member
l10n = GetMessages(self, member, site)
template_values['l10n'] = l10n
topic_total = memcache.get('topic_total')
if topic_total is None:
q2 = db.GqlQuery("SELECT * FROM Counter WHERE name = 'topic.total'")
if (q2.count() > 0):
topic_total = q2[0].value
else:
topic_total = 0
memcache.set('topic_total', topic_total, 600)
template_values['topic_total'] = topic_total
page_size = 60
pages = 1
if topic_total > page_size:
if (topic_total % page_size) > 0:
pages = int(math.floor(topic_total / page_size)) + 1
else:
pages = int(math.floor(topic_total / page_size))
try:
page_current = int(self.request.get('p'))
if page_current < 1:
page_current = 1
if page_current > pages:
page_current = pages
except:
page_current = 1
page_start = (page_current - 1) * page_size
template_values['pages'] = pages
template_values['page_current'] = page_current
i = 1
ps = []
while i <= pages:
ps.append(i)
i = i + 1
template_values['ps'] = ps
latest = memcache.get('q_changes_' + str(page_current))
if (latest):
template_values['latest'] = latest
else:
q1 = db.GqlQuery("SELECT * FROM Topic ORDER BY last_touched DESC LIMIT " + str(page_start) + "," + str(page_size))
topics = []
for topic in q1:
topics.append(topic)
memcache.set('q_changes_' + str(page_current), topics, 120)
template_values['latest'] = topics
template_values['latest_total'] = len(topics)
if browser['ios']:
path = os.path.join(os.path.dirname(__file__), 'tpl', 'mobile', 'changes.html')
else:
path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'changes.html')
output = template.render(path, template_values)
self.response.out.write(output)
def main():
application = webapp.WSGIApplication([
('/', HomeHandler),
('/planes/?', PlanesHandler),
('/recent', RecentHandler),
('/ua', UAHandler),
('/signin', SigninHandler),
('/signup', SignupHandler),
('/signout', SignoutHandler),
('/forgot', ForgotHandler),
('/reset/([0-9]+)', PasswordResetHandler),
('/go/([a-zA-Z0-9]+)/graph', NodeGraphHandler),
('/go/([a-zA-Z0-9]+)', NodeHandler),
('/n/([a-zA-Z0-9]+).json', NodeApiHandler),
('/q/(.*)', SearchHandler),
('/_dispatcher', DispatcherHandler),
('/changes', ChangesHandler),
('/(.*)', RouterHandler)
],
debug=True)
util.run_wsgi_app(application)
if __name__ == '__main__':
main()
| bsd-3-clause | -357,091,797,173,069,500 | -455,163,873,875,950,400 | 45.281633 | 321 | 0.517188 | false |
wileeam/airflow | airflow/providers/microsoft/azure/operators/adls_list.py | 5 | 2549 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import Iterable
from airflow.models import BaseOperator
from airflow.providers.microsoft.azure.hooks.azure_data_lake import AzureDataLakeHook
from airflow.utils.decorators import apply_defaults
class AzureDataLakeStorageListOperator(BaseOperator):
"""
List all files from the specified path
This operator returns a python list with the names of files which can be used by
`xcom` in the downstream tasks.
:param path: The Azure Data Lake path to find the objects. Supports glob
strings (templated)
:type path: str
:param azure_data_lake_conn_id: The connection ID to use when
connecting to Azure Data Lake Storage.
:type azure_data_lake_conn_id: str
**Example**:
The following Operator would list all the Parquet files from ``folder/output/``
folder in the specified ADLS account ::
adls_files = AzureDataLakeStorageListOperator(
task_id='adls_files',
path='folder/output/*.parquet',
azure_data_lake_conn_id='azure_data_lake_default'
)
"""
template_fields = ('path',) # type: Iterable[str]
ui_color = '#901dd2'
@apply_defaults
def __init__(self,
path,
azure_data_lake_conn_id='azure_data_lake_default',
*args,
**kwargs):
super().__init__(*args, **kwargs)
self.path = path
self.azure_data_lake_conn_id = azure_data_lake_conn_id
def execute(self, context):
hook = AzureDataLakeHook(
azure_data_lake_conn_id=self.azure_data_lake_conn_id
)
self.log.info('Getting list of ADLS files in path: %s', self.path)
return hook.list(path=self.path)
| apache-2.0 | 3,425,513,188,258,966,000 | 5,746,598,203,656,810,000 | 34.901408 | 87 | 0.674382 | false |
nhuthep91/aBasic | plugins/ti.alloy/plugin.py | 1729 | 5251 | import os, sys, subprocess, hashlib
import subprocess
def check_output(*popenargs, **kwargs):
r"""Run command with arguments and return its output as a byte string.
Backported from Python 2.7 as it's implemented as pure python on stdlib.
>>> check_output(['/usr/bin/python', '--version'])
Python 2.6.2
"""
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
error = subprocess.CalledProcessError(retcode, cmd)
error.output = output
raise error
return output
def compile(config):
paths = {}
binaries = ["alloy","node"]
dotAlloy = os.path.abspath(os.path.join(config['project_dir'], 'build', '.alloynewcli'))
if os.path.exists(dotAlloy):
print "[DEBUG] build/.alloynewcli file found, skipping plugin..."
os.remove(dotAlloy)
else:
for binary in binaries:
try:
# see if the environment variable is defined
paths[binary] = os.environ["ALLOY_" + ("NODE_" if binary == "node" else "") + "PATH"]
except KeyError as ex:
# next try PATH, and then our guess paths
if sys.platform == "darwin" or sys.platform.startswith('linux'):
userPath = os.environ["HOME"]
guessPaths = [
"/usr/local/bin/"+binary,
"/opt/local/bin/"+binary,
userPath+"/local/bin/"+binary,
"/opt/bin/"+binary,
"/usr/bin/"+binary,
"/usr/local/share/npm/bin/"+binary
]
try:
binaryPath = check_output(["which",binary], stderr=subprocess.STDOUT).strip()
print "[DEBUG] %s installed at '%s'" % (binary,binaryPath)
except:
print "[WARN] Couldn't find %s on your PATH:" % binary
print "[WARN] %s" % os.environ["PATH"]
print "[WARN]"
print "[WARN] Checking for %s in a few default locations:" % binary
for p in guessPaths:
sys.stdout.write("[WARN] %s -> " % p)
if os.path.exists(p):
binaryPath = p
print "FOUND"
break
else:
print "not found"
binaryPath = None
if binaryPath is None:
print "[ERROR] Couldn't find %s" % binary
sys.exit(1)
else:
paths[binary] = binaryPath
# no guesses on windows, just use the PATH
elif sys.platform == "win32":
paths["alloy"] = "alloy.cmd"
f = os.path.abspath(os.path.join(config['project_dir'], 'app'))
if os.path.exists(f):
print "[INFO] alloy app found at %s" % f
rd = os.path.abspath(os.path.join(config['project_dir'], 'Resources'))
devicefamily = 'none'
simtype = 'none'
version = '0'
deploytype = 'development'
if config['platform']==u'ios':
version = config['iphone_version']
devicefamily = config['devicefamily']
deploytype = config['deploytype']
if config['platform']==u'android':
builder = config['android_builder']
version = builder.tool_api_level
deploytype = config['deploy_type']
if config['platform']==u'mobileweb':
builder = config['mobileweb_builder']
deploytype = config['deploytype']
cfg = "platform=%s,version=%s,simtype=%s,devicefamily=%s,deploytype=%s," % (config['platform'],version,simtype,devicefamily,deploytype)
if sys.platform == "win32":
cmd = [paths["alloy"], "compile", f, "--no-colors", "--config", cfg]
else:
cmd = [paths["node"], paths["alloy"], "compile", f, "--no-colors", "--config", cfg]
print "[INFO] Executing Alloy compile:"
print "[INFO] %s" % " ".join(cmd)
try:
print check_output(cmd, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as ex:
if hasattr(ex, 'output'):
print ex.output
print "[ERROR] Alloy compile failed"
retcode = 1
if hasattr(ex, 'returncode'):
retcode = ex.returncode
sys.exit(retcode)
except EnvironmentError as ex:
print "[ERROR] Unexpected error with Alloy compiler plugin: %s" % ex.strerror
sys.exit(2)
| apache-2.0 | -1,944,921,810,938,486,300 | -5,135,760,589,879,898,000 | 41.691057 | 147 | 0.476862 | false |
mclaughlin6464/pdnn | models/dnn_reg.py | 1 | 11677 | '''
@Author Sean McLaughiln
This is my copy of the dnn module. I'm adding some features so I can use DNN for regression rather than just classification.
'''
import cPickle
import gzip
import os
import sys
import time
import collections
import numpy
import theano
import theano.tensor as T
from theano.tensor.shared_randomstreams import RandomStreams
from layers.regressions_sgd import Regression
from layers.mlp import HiddenLayer, DropoutHiddenLayer, _dropout_from_layer
from models.dnn import DNN
from io_func import smart_open
from io_func.model_io import _nnet2file, _file2nnet
class DNN_REG(object):
def __init__(self, numpy_rng, theano_rng=None,
cfg = None, # the network configuration
dnn_shared = None, shared_layers=[], input = None):
self.layers = []
self.params = []
self.delta_params = []
self.cfg = cfg
self.n_ins = cfg.n_ins; self.n_outs = cfg.n_outs
self.hidden_layers_sizes = cfg.hidden_layers_sizes
self.hidden_layers_number = len(self.hidden_layers_sizes)
self.activation = cfg.activation
self.do_maxout = cfg.do_maxout; self.pool_size = cfg.pool_size
self.max_col_norm = cfg.max_col_norm
self.l1_reg = cfg.l1_reg
self.l2_reg = cfg.l2_reg
self.non_updated_layers = cfg.non_updated_layers
if not theano_rng:
theano_rng = RandomStreams(numpy_rng.randint(2 ** 30))
# allocate symbolic variables for the data
if input == None:
#TODO CHANGE BACK
self.x = T.matrix('x')
else:
self.x = input
self.y = T.matrix('y')
for i in xrange(self.hidden_layers_number):
# construct the hidden layer
if i == 0:
input_size = self.n_ins
layer_input = self.x
else:
input_size = self.hidden_layers_sizes[i - 1]
layer_input = self.layers[-1].output
W = None; b = None
if (i in shared_layers) :
W = dnn_shared.layers[i].W; b = dnn_shared.layers[i].b
if self.do_maxout == True:
hidden_layer = HiddenLayer(rng=numpy_rng,
input=layer_input,
n_in=input_size,
n_out=self.hidden_layers_sizes[i] * self.pool_size,
W = W, b = b,
activation = (lambda x: 1.0*x),
do_maxout = True, pool_size = self.pool_size)
else:
hidden_layer = HiddenLayer(rng=numpy_rng,
input=layer_input,
n_in=input_size,
n_out=self.hidden_layers_sizes[i],
W = W, b = b,
activation=self.activation)
# add the layer to our list of layers
self.layers.append(hidden_layer)
# if the layer index is included in self.non_updated_layers, parameters of this layer will not be updated
if (i not in self.non_updated_layers):
self.params.extend(hidden_layer.params)
self.delta_params.extend(hidden_layer.delta_params)
# We now need to add a logistic layer on top of the MLP
self.regLayer = Regression(
input= self.layers[-1].output if self.hidden_layers_number>0 else self.x,
n_in=self.hidden_layers_sizes[-1] if self.hidden_layers_number>0 else self.n_ins, n_out=self.n_outs)
#print self.hidden_layers_sizes[-1]
#print self.n_outs
if self.n_outs > 0:
self.layers.append(self.regLayer)
self.params.extend(self.regLayer.params)
self.delta_params.extend(self.regLayer.delta_params)
# compute the cost for second phase of training,
# defined as the negative log likelihood
self.finetune_cost = self.regLayer.negative_log_likelihood(self.y)
self.errors = self.finetune_cost
if self.l1_reg is not None:
for i in xrange(self.hidden_layers_number):
W = self.layers[i].W
self.finetune_cost += self.l1_reg * (abs(W).sum())
if self.l2_reg is not None:
for i in xrange(self.hidden_layers_number):
W = self.layers[i].W
self.finetune_cost += self.l2_reg * T.sqr(W).sum()
def build_finetune_functions(self, train_shared_xy, valid_shared_xy, batch_size):
#print len(self.layers)
#print [T.shape(l.W)[0] for l in self.layers]
(train_set_x, train_set_y) = train_shared_xy
(valid_set_x, valid_set_y) = valid_shared_xy
#print T.shape(train_set_x), T.shape(train_set_y)
index = T.lscalar('index') # index to a [mini]batch
learning_rate = T.fscalar('learning_rate')
momentum = T.fscalar('momentum')
#theano.printing.pydotprint(self.finetune_cost, outfile="finetune_cost.png", var_with_name_simple=True)
# compute the gradients with respect to the model parameters
gparams = T.grad(self.finetune_cost, self.params)
#theano.printing.pydotprint(gparams, outfile="gparams.png", var_with_name_simple=True)
# compute list of fine-tuning updates
#updates = collections.OrderedDict()
updates = theano.compat.python2x.OrderedDict()
for dparam, gparam in zip(self.delta_params, gparams):
updates[dparam] = momentum * dparam - gparam*learning_rate
for dparam, param in zip(self.delta_params, self.params):
updates[param] = param + updates[dparam]
if self.max_col_norm is not None:
for i in xrange(self.hidden_layers_number):
W = self.layers[i].W
if W in updates:
updated_W = updates[W]
col_norms = T.sqrt(T.sum(T.sqr(updated_W), axis=0))
desired_norms = T.clip(col_norms, 0, self.max_col_norm)
updates[W] = updated_W * (desired_norms / (1e-7 + col_norms))
#theano.printing.pydotprint(self.errors, outfile="errors.png", var_with_name_simple=True)
train_fn = theano.function(inputs=[index, theano.Param(learning_rate, default = 0.0001),
theano.Param(momentum, default = 0.5)],
outputs=self.errors,
updates=updates,
givens={
self.x: train_set_x[index * batch_size:
(index + 1) * batch_size],
self.y: train_set_y[index * batch_size:
(index + 1) * batch_size]})
#theano.printing.pydotprint(train_fn , outfile="train_fn.png", var_with_name_simple=True)
valid_fn = theano.function(inputs=[index],
outputs=self.errors,
givens={
self.x: valid_set_x[index * batch_size:
(index + 1) * batch_size],
self.y: valid_set_y[index * batch_size:
(index + 1) * batch_size]})
return train_fn, valid_fn
def build_extract_feat_function(self, output_layer):
feat = T.matrix('feat')
out_da = theano.function([feat], self.layers[output_layer].output, updates = None, givens={self.x:feat}, on_unused_input='warn')
return out_da
def build_finetune_functions_kaldi(self, train_shared_xy, valid_shared_xy):
(train_set_x, train_set_y) = train_shared_xy
(valid_set_x, valid_set_y) = valid_shared_xy
index = T.lscalar('index') # index to a [mini]batch
learning_rate = T.fscalar('learning_rate')
momentum = T.fscalar('momentum')
# compute the gradients with respect to the model parameters
gparams = T.grad(self.finetune_cost, self.params)
# compute list of fine-tuning updates
updates = collections.OrderedDict()
for dparam, gparam in zip(self.delta_params, gparams):
updates[dparam] = momentum * dparam - gparam*learning_rate
for dparam, param in zip(self.delta_params, self.params):
updates[param] = param + updates[dparam]
if self.max_col_norm is not None:
for i in xrange(self.hidden_layers_number):
W = self.layers[i].W
if W in updates:
updated_W = updates[W]
col_norms = T.sqrt(T.sum(T.sqr(updated_W), axis=0))
desired_norms = T.clip(col_norms, 0, self.max_col_norm)
updates[W] = updated_W * (desired_norms / (1e-7 + col_norms))
train_fn = theano.function(inputs=[theano.Param(learning_rate, default = 0.0001),
theano.Param(momentum, default = 0.5)],
outputs=self.errors,
updates=updates,
givens={self.x: train_set_x, self.y: train_set_y})
valid_fn = theano.function(inputs=[],
outputs=self.errors,
givens={self.x: valid_set_x, self.y: valid_set_y})
return train_fn, valid_fn
def write_model_to_raw(self, file_path):
# output the model to tmp_path; this format is readable by PDNN
_nnet2file(self.layers, filename=file_path)
def write_model_to_kaldi(self, file_path, with_softmax = True):
# determine whether it's BNF based on layer sizes
output_layer_number = -1;
for layer_index in range(1, self.hidden_layers_number - 1):
cur_layer_size = self.hidden_layers_sizes[layer_index]
prev_layer_size = self.hidden_layers_sizes[layer_index-1]
next_layer_size = self.hidden_layers_sizes[layer_index+1]
if cur_layer_size < prev_layer_size and cur_layer_size < next_layer_size:
output_layer_number = layer_index+1; break
layer_number = len(self.layers)
if output_layer_number == -1:
output_layer_number = layer_number
fout = smart_open(file_path, 'wb')
for i in xrange(output_layer_number):
activation_text = '<' + self.cfg.activation_text + '>'
if i == (layer_number-1) and with_softmax: # we assume that the last layer is a softmax layer
activation_text = '<softmax>'
W_mat = self.layers[i].W.get_value()
b_vec = self.layers[i].b.get_value()
input_size, output_size = W_mat.shape
W_layer = []; b_layer = ''
for rowX in xrange(output_size):
W_layer.append('')
for x in xrange(input_size):
for t in xrange(output_size):
W_layer[t] = W_layer[t] + str(W_mat[x][t]) + ' '
for x in xrange(output_size):
b_layer = b_layer + str(b_vec[x]) + ' '
fout.write('<affinetransform> ' + str(output_size) + ' ' + str(input_size) + '\n')
fout.write('[' + '\n')
for x in xrange(output_size):
fout.write(W_layer[x].strip() + '\n')
fout.write(']' + '\n')
fout.write('[ ' + b_layer.strip() + ' ]' + '\n')
if activation_text == '<maxout>':
fout.write(activation_text + ' ' + str(output_size/self.pool_size) + ' ' + str(output_size) + '\n')
else:
fout.write(activation_text + ' ' + str(output_size) + ' ' + str(output_size) + '\n')
fout.close()
| apache-2.0 | 8,339,362,177,847,277,000 | -8,183,565,554,343,723,000 | 40.703571 | 136 | 0.553139 | false |