repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
---|---|---|---|---|
flp9001/eveggie | refs/heads/master | eveggie/orders/apps.py | 2 | from django.apps import AppConfig
class OrdersConfig(AppConfig):
name = 'orders'
verbose_name = "Orders"
|
robhudson/kuma | refs/heads/master | vendor/packages/git/utils.py | 32 | # utils.py
# Copyright (C) 2008-2010 Michael Trier (mtrier@gmail.com) and contributors
#
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
import os
def dashify(string):
return string.replace('_', '-')
def touch(filename):
fp = open(filename, 'a')
fp.close()
def is_git_dir(d):
""" This is taken from the git setup.c:is_git_directory
function."""
if os.path.isdir(d) and \
os.path.isdir(os.path.join(d, 'objects')) and \
os.path.isdir(os.path.join(d, 'refs')):
headref = os.path.join(d, 'HEAD')
return os.path.isfile(headref) or \
(os.path.islink(headref) and
os.readlink(headref).startswith('refs'))
return False
|
neversun/sailfish-hackernews | refs/heads/master | pyPackages/requests-noarch/requests/packages/chardet/compat.py | 2942 | ######################## BEGIN LICENSE BLOCK ########################
# Contributor(s):
# Ian Cordasco - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import sys
if sys.version_info < (3, 0):
base_str = (str, unicode)
else:
base_str = (bytes, str)
def wrap_ord(a):
if sys.version_info < (3, 0) and isinstance(a, base_str):
return ord(a)
else:
return a
|
rubencabrera/odoo | refs/heads/8.0 | addons/web_tests/tests/test_ui.py | 175 | # -*- coding: utf-8 -*-
import os
import openerp.tests
class TestUi(openerp.tests.HttpCase):
def test_03_js_public(self):
self.phantom_js('/',"console.log('ok')","console")
def test_04_js_admin(self):
self.phantom_js('/web',"console.log('ok')","openerp.client.action_manager.inner_widget", login='admin')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
rafaelhenrique/flask-tornado-dispatcher | refs/heads/master | project_flask/__init__.py | 1 | from flask import Flask
from project_flask import config
from project_flask.first_app import first_app
from os import environ
def create_app(config=config.dev_config):
mode = environ.get('MODE', 'dev')
app = Flask("project_flask")
app.config.from_object(
'project_flask.config.{0}_config'.format(mode.lower()))
register_blueprints(app)
register_errorhandlers(app)
register_jinja_env(app)
register_extensions(app)
return app
def register_blueprints(app):
app.register_blueprint(first_app, url_prefix='/firstflask')
def register_errorhandlers(app):
def render_error(e):
if e.code == 400:
return 'Bad request.', 400
elif e.code == 404:
return 'Not found.', 404
elif e.code == 500:
return 'Internal server error', 500
for e in [400, 404, 500]:
app.errorhandler(e)(render_error)
def register_jinja_env(app):
pass
def register_extensions(app):
pass
|
cyberplant/scrapy | refs/heads/master | scrapy/contrib/debug.py | 144 | import warnings
from scrapy.exceptions import ScrapyDeprecationWarning
warnings.warn("Module `scrapy.contrib.debug` is deprecated, "
"use `scrapy.extensions.debug` instead",
ScrapyDeprecationWarning, stacklevel=2)
from scrapy.extensions.debug import *
|
kvar/ansible | refs/heads/seas_master_2.9.5 | lib/ansible/modules/cloud/azure/azure_rm_acs.py | 27 | #!/usr/bin/python
# -*- coding: utf-8 -*
# Copyright: (c) 2017, Julien Stroheker <juliens@microsoft.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_acs
version_added: "2.4"
short_description: Manage an Azure Container Service(ACS) instance
description:
- Create, update and delete an Azure Container Service(ACS) instance.
options:
resource_group:
description:
- Name of a resource group where the Container Services exists or will be created.
required: true
name:
description:
- Name of the Azure Container Services(ACS) instance.
required: true
state:
description:
- Assert the state of the ACS. Use C(present) to create or update an ACS and C(absent) to delete it.
default: present
choices:
- absent
- present
location:
description:
- Valid azure location. Defaults to location of the resource group.
orchestration_platform:
description:
- Specifies the Container Orchestration Platform to use. Currently can be either C(DCOS), C(Kubernetes) or C(Swarm).
- The I(service_principal) must be defined if set to C(Kubernetes).
choices:
- 'DCOS'
- 'Kubernetes'
- 'Swarm'
required: true
master_profile:
description:
- Master profile suboptions.
required: true
suboptions:
count:
description:
- Number of masters (VMs) in the container service cluster. Allowed values are C(1), C(3), and C(5).
required: true
choices:
- 1
- 3
- 5
vm_size:
description:
- The VM Size of each of the Agent Pool VM's (e.g. C(Standard_F1) / C(Standard_D2v2)).
required: true
version_added: 2.5
dns_prefix:
description:
- The DNS Prefix to use for the Container Service master nodes.
required: true
linux_profile:
description:
- The Linux profile suboptions.
required: true
suboptions:
admin_username:
description:
- The Admin Username for the Cluster.
required: true
ssh_key:
description:
- The Public SSH Key used to access the cluster.
required: true
agent_pool_profiles:
description:
- The agent pool profile suboptions.
required: true
suboptions:
name:
description:
- Unique name of the agent pool profile in the context of the subscription and resource group.
required: true
count:
description:
- Number of agents (VMs) to host docker containers. Allowed values must be in the range of 1 to 100 (inclusive).
required: true
dns_prefix:
description:
- The DNS Prefix given to Agents in this Agent Pool.
required: true
vm_size:
description:
- The VM Size of each of the Agent Pool VM's (e.g. C(Standard_F1) / C(Standard_D2v2)).
required: true
service_principal:
description:
- The service principal suboptions.
- Required when I(orchestration_platform=Kubernetes).
suboptions:
client_id:
description:
- The ID for the Service Principal.
client_secret:
description:
- The secret password associated with the service principal.
diagnostics_profile:
description:
- Should VM Diagnostics be enabled for the Container Service VM's.
required: true
type: bool
extends_documentation_fragment:
- azure
- azure_tags
author:
- Julien Stroheker (@julienstroheker)
'''
EXAMPLES = '''
- name: Create an azure container services instance running Kubernetes
azure_rm_acs:
name: acctestcontservice1
location: eastus
resource_group: myResourceGroup
orchestration_platform: Kubernetes
master_profile:
- count: 3
dns_prefix: acsk8smasterdns
vm_size: Standard_D2_v2
linux_profile:
- admin_username: azureuser
ssh_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAA...
service_principal:
- client_id: "cf72ca99-f6b9-4004-b0e0-bee10c521948"
client_secret: "mySPNp@ssw0rd!"
agent_pool_profiles:
- name: default
count: 5
dns_prefix: acsk8sagent
vm_size: Standard_D2_v2
diagnostics_profile: false
tags:
Environment: Production
- name: Create an azure container services instance running DCOS
azure_rm_acs:
name: acctestcontservice2
location: eastus
resource_group: myResourceGroup
orchestration_platform: DCOS
master_profile:
- count: 3
dns_prefix: acsdcosmasterdns
vm_size: Standard_D2_v2
linux_profile:
- admin_username: azureuser
ssh_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAA...
agent_pool_profiles:
- name: default
count: 5
dns_prefix: acscdcosagent
vm_size: Standard_D2_v2
diagnostics_profile: false
tags:
Environment: Production
- name: Create an azure container services instance running Swarm
azure_rm_acs:
name: acctestcontservice3
location: eastus
resource_group: myResourceGroup
orchestration_platform: Swarm
master_profile:
- count: 3
dns_prefix: acsswarmmasterdns
vm_size: Standard_D2_v2
linux_profile:
- admin_username: azureuser
ssh_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAA...
agent_pool_profiles:
- name: default
count: 5
dns_prefix: acsswarmagent
vm_size: Standard_D2_v2
diagnostics_profile: false
tags:
Environment: Production
# Deletes the specified container service in the specified subscription and resource group.
# The operation does not delete other resources created as part of creating a container service,
# including storage accounts, VMs, and availability sets. All the other resources created with the container
# service are part of the same resource group and can be deleted individually.
- name: Remove an azure container services instance
azure_rm_acs:
name: acctestcontservice3
location: eastus
resource_group: myResourceGroup
state: absent
orchestration_platform: Swarm
master_profile:
- count: 1
vm_size: Standard_A0
dns_prefix: acstestingmasterdns5
linux_profile:
- admin_username: azureuser
ssh_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAA...
agent_pool_profiles:
- name: default
count: 4
dns_prefix: acctestagent15
vm_size: Standard_A0
diagnostics_profile: false
tags:
Ansible: azure_rm_acs
'''
RETURN = '''
state:
description: Current state of the Azure Container Service(ACS).
returned: always
type: dict
'''
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
try:
from msrestazure.azure_exceptions import CloudError
from azure.mgmt.containerservice.models import (
ContainerService, ContainerServiceOrchestratorProfile, ContainerServiceCustomProfile,
ContainerServiceServicePrincipalProfile, ContainerServiceMasterProfile,
ContainerServiceAgentPoolProfile, ContainerServiceWindowsProfile,
ContainerServiceLinuxProfile, ContainerServiceSshConfiguration,
ContainerServiceDiagnosticsProfile, ContainerServiceSshPublicKey,
ContainerServiceVMDiagnostics
)
except ImportError:
# This is handled in azure_rm_common
pass
def create_agent_pool_profile_instance(agentpoolprofile):
'''
Helper method to serialize a dict to a ContainerServiceAgentPoolProfile
:param: agentpoolprofile: dict with the parameters to setup the ContainerServiceAgentPoolProfile
:return: ContainerServiceAgentPoolProfile
'''
return ContainerServiceAgentPoolProfile(
name=agentpoolprofile['name'],
count=agentpoolprofile['count'],
dns_prefix=agentpoolprofile['dns_prefix'],
vm_size=agentpoolprofile['vm_size']
)
def create_orch_platform_instance(orchestrator):
'''
Helper method to serialize a dict to a ContainerServiceOrchestratorProfile
:param: orchestrator: dict with the parameters to setup the ContainerServiceOrchestratorProfile
:return: ContainerServiceOrchestratorProfile
'''
return ContainerServiceOrchestratorProfile(
orchestrator_type=orchestrator,
)
def create_service_principal_profile_instance(spnprofile):
'''
Helper method to serialize a dict to a ContainerServiceServicePrincipalProfile
:param: spnprofile: dict with the parameters to setup the ContainerServiceServicePrincipalProfile
:return: ContainerServiceServicePrincipalProfile
'''
return ContainerServiceServicePrincipalProfile(
client_id=spnprofile[0]['client_id'],
secret=spnprofile[0]['client_secret']
)
def create_linux_profile_instance(linuxprofile):
'''
Helper method to serialize a dict to a ContainerServiceLinuxProfile
:param: linuxprofile: dict with the parameters to setup the ContainerServiceLinuxProfile
:return: ContainerServiceLinuxProfile
'''
return ContainerServiceLinuxProfile(
admin_username=linuxprofile[0]['admin_username'],
ssh=create_ssh_configuration_instance(linuxprofile[0]['ssh_key'])
)
def create_ssh_configuration_instance(sshconf):
'''
Helper method to serialize a dict to a ContainerServiceSshConfiguration
:param: sshconf: dict with the parameters to setup the ContainerServiceSshConfiguration
:return: ContainerServiceSshConfiguration
'''
listssh = []
key = ContainerServiceSshPublicKey(key_data=str(sshconf))
listssh.append(key)
return ContainerServiceSshConfiguration(
public_keys=listssh
)
def create_master_profile_instance(masterprofile):
'''
Helper method to serialize a dict to a ContainerServiceMasterProfile
Note: first_consecutive_static_ip is specifically set to None, for Azure server doesn't accept
request body with this property. This should be an inconsistency bug before Azure client SDK
and Azure server.
:param: masterprofile: dict with the parameters to setup the ContainerServiceMasterProfile
:return: ContainerServiceMasterProfile
'''
return ContainerServiceMasterProfile(
count=masterprofile[0]['count'],
dns_prefix=masterprofile[0]['dns_prefix'],
vm_size=masterprofile[0]['vm_size'],
first_consecutive_static_ip=None
)
def create_diagnostics_profile_instance(diagprofile):
'''
Helper method to serialize a dict to a ContainerServiceDiagnosticsProfile
:param: diagprofile: dict with the parameters to setup the ContainerServiceDiagnosticsProfile
:return: ContainerServiceDiagnosticsProfile
'''
return ContainerServiceDiagnosticsProfile(
vm_diagnostics=create_vm_diagnostics_instance(diagprofile)
)
def create_vm_diagnostics_instance(vmdiag):
'''
Helper method to serialize a dict to a ContainerServiceVMDiagnostics
:param: vmdiag: dict with the parameters to setup the ContainerServiceVMDiagnostics
:return: ContainerServiceVMDiagnostics
'''
return ContainerServiceVMDiagnostics(
enabled=vmdiag
)
def create_acs_dict(acs):
'''
Helper method to deserialize a ContainerService to a dict
:param: acs: ContainerService or AzureOperationPoller with the Azure callback object
:return: dict with the state on Azure
'''
service_principal_profile_dict = None
if acs.orchestrator_profile.orchestrator_type == 'Kubernetes':
service_principal_profile_dict = create_service_principal_profile_dict(acs.service_principal_profile)
return dict(
id=acs.id,
name=acs.name,
location=acs.location,
tags=acs.tags,
orchestrator_profile=create_orchestrator_profile_dict(acs.orchestrator_profile),
master_profile=create_master_profile_dict(acs.master_profile),
linux_profile=create_linux_profile_dict(acs.linux_profile),
service_principal_profile=service_principal_profile_dict,
diagnostics_profile=create_diagnotstics_profile_dict(acs.diagnostics_profile),
provisioning_state=acs.provisioning_state,
agent_pool_profiles=create_agent_pool_profiles_dict(acs.agent_pool_profiles),
type=acs.type
)
def create_linux_profile_dict(linuxprofile):
'''
Helper method to deserialize a ContainerServiceLinuxProfile to a dict
:param: linuxprofile: ContainerServiceLinuxProfile with the Azure callback object
:return: dict with the state on Azure
'''
return dict(
ssh_key=linuxprofile.ssh.public_keys[0].key_data,
admin_username=linuxprofile.admin_username
)
def create_master_profile_dict(masterprofile):
'''
Helper method to deserialize a ContainerServiceMasterProfile to a dict
:param: masterprofile: ContainerServiceMasterProfile with the Azure callback object
:return: dict with the state on Azure
'''
return dict(
count=masterprofile.count,
fqdn=masterprofile.fqdn,
vm_size=masterprofile.vm_size,
dns_prefix=masterprofile.dns_prefix
)
def create_service_principal_profile_dict(serviceprincipalprofile):
'''
Helper method to deserialize a ContainerServiceServicePrincipalProfile to a dict
Note: For security reason, the service principal secret is skipped on purpose.
:param: serviceprincipalprofile: ContainerServiceServicePrincipalProfile with the Azure callback object
:return: dict with the state on Azure
'''
return dict(
client_id=serviceprincipalprofile.client_id
)
def create_diagnotstics_profile_dict(diagnosticsprofile):
'''
Helper method to deserialize a ContainerServiceVMDiagnostics to a dict
:param: diagnosticsprofile: ContainerServiceVMDiagnostics with the Azure callback object
:return: dict with the state on Azure
'''
return dict(
vm_diagnostics=diagnosticsprofile.vm_diagnostics.enabled
)
def create_orchestrator_profile_dict(orchestratorprofile):
'''
Helper method to deserialize a ContainerServiceOrchestratorProfile to a dict
:param: orchestratorprofile: ContainerServiceOrchestratorProfile with the Azure callback object
:return: dict with the state on Azure
'''
return dict(
orchestrator_type=str(orchestratorprofile.orchestrator_type)
)
def create_agent_pool_profiles_dict(agentpoolprofiles):
'''
Helper method to deserialize a ContainerServiceAgentPoolProfile to a dict
:param: agentpoolprofiles: ContainerServiceAgentPoolProfile with the Azure callback object
:return: dict with the state on Azure
'''
return [dict(
count=profile.count,
vm_size=profile.vm_size,
name=profile.name,
dns_prefix=profile.dns_prefix,
fqdn=profile.fqdn
) for profile in agentpoolprofiles]
class AzureRMContainerService(AzureRMModuleBase):
"""Configuration class for an Azure RM container service resource"""
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(
type='str',
required=True
),
name=dict(
type='str',
required=True
),
state=dict(
type='str',
default='present',
choices=['present', 'absent']
),
location=dict(
type='str'
),
orchestration_platform=dict(
type='str',
required=True,
choices=['DCOS', 'Kubernetes', 'Swarm']
),
master_profile=dict(
type='list',
required=True
),
linux_profile=dict(
type='list',
required=True
),
agent_pool_profiles=dict(
type='list',
required=True
),
service_principal=dict(
type='list'
),
diagnostics_profile=dict(
type='bool',
required=True
)
)
self.resource_group = None
self.name = None
self.location = None
self.tags = None
self.state = None
self.orchestration_platform = None
self.master_profile = None
self.linux_profile = None
self.agent_pool_profiles = None
self.service_principal = None
self.diagnostics_profile = None
self.results = dict(changed=False, state=dict())
super(AzureRMContainerService, self).__init__(derived_arg_spec=self.module_arg_spec,
supports_check_mode=True,
supports_tags=True)
def exec_module(self, **kwargs):
"""Main module execution method"""
for key in list(self.module_arg_spec.keys()) + ['tags']:
setattr(self, key, kwargs[key])
resource_group = None
response = None
results = dict()
to_be_updated = False
resource_group = self.get_resource_group(self.resource_group)
if not self.location:
self.location = resource_group.location
# Check if the ACS instance already present in the RG
if self.state == 'present':
if self.orchestration_platform == 'Kubernetes':
if not self.service_principal:
self.fail('service_principal should be specified when using Kubernetes')
if not self.service_principal[0].get('client_id'):
self.fail('service_principal.client_id should be specified when using Kubernetes')
if not self.service_principal[0].get('client_secret'):
self.fail('service_principal.client_secret should be specified when using Kubernetes')
mastercount = self.master_profile[0].get('count')
if mastercount != 1 and mastercount != 3 and mastercount != 5:
self.fail('Master Count number wrong : {0} / should be 1 3 or 5'.format(mastercount))
# For now Agent Pool cannot be more than 1, just remove this part in the future if it change
agentpoolcount = len(self.agent_pool_profiles)
if agentpoolcount > 1:
self.fail('You cannot specify more than agent_pool_profiles')
response = self.get_acs()
self.results['state'] = response
if not response:
to_be_updated = True
else:
self.log('Results : {0}'.format(response))
update_tags, response['tags'] = self.update_tags(response['tags'])
if response['provisioning_state'] == "Succeeded":
if update_tags:
to_be_updated = True
def is_property_changed(profile, property, ignore_case=False):
base = response[profile].get(property)
new = getattr(self, profile)[0].get(property)
if ignore_case:
return base.lower() != new.lower()
else:
return base != new
# Cannot Update the master count for now // Uncomment this block in the future to support it
if is_property_changed('master_profile', 'count'):
# self.log(("Master Profile Count Diff, Was {0} / Now {1}"
# .format(response['master_profile'].count,
# self.master_profile[0].get('count'))))
# to_be_updated = True
self.module.warn("master_profile.count cannot be updated")
# Cannot Update the master vm_size for now. Could be a client SDK bug
# Uncomment this block in the future to support it
if is_property_changed('master_profile', 'vm_size', True):
# self.log(("Master Profile VM Size Diff, Was {0} / Now {1}"
# .format(response['master_profile'].get('vm_size'),
# self.master_profile[0].get('vm_size'))))
# to_be_updated = True
self.module.warn("master_profile.vm_size cannot be updated")
# Cannot Update the SSH Key for now // Uncomment this block in the future to support it
if is_property_changed('linux_profile', 'ssh_key'):
# self.log(("Linux Profile Diff SSH, Was {0} / Now {1}"
# .format(response['linux_profile'].ssh.public_keys[0].key_data,
# self.linux_profile[0].get('ssh_key'))))
# to_be_updated = True
self.module.warn("linux_profile.ssh_key cannot be updated")
# self.log("linux_profile response : {0}".format(response['linux_profile'].get('admin_username')))
# self.log("linux_profile self : {0}".format(self.linux_profile[0].get('admin_username')))
# Cannot Update the Username for now // Uncomment this block in the future to support it
if is_property_changed('linux_profile', 'admin_username'):
# self.log(("Linux Profile Diff User, Was {0} / Now {1}"
# .format(response['linux_profile'].admin_username,
# self.linux_profile[0].get('admin_username'))))
# to_be_updated = True
self.module.warn("linux_profile.admin_username cannot be updated")
# Cannot have more that one agent pool profile for now // Uncomment this block in the future to support it
# if len(response['agent_pool_profiles']) != len(self.agent_pool_profiles):
# self.log("Agent Pool count is diff, need to updated")
# to_be_updated = True
for profile_result in response['agent_pool_profiles']:
matched = False
for profile_self in self.agent_pool_profiles:
if profile_result['name'] == profile_self['name']:
matched = True
if profile_result['count'] != profile_self['count'] or profile_result['vm_size'] != \
profile_self['vm_size']:
self.log(("Agent Profile Diff - Count was {0} / Now {1} - Vm_size was {2} / Now {3}"
.format(profile_result['count'], profile_self['count'],
profile_result['vm_size'], profile_self['vm_size'])))
to_be_updated = True
if not matched:
self.log("Agent Pool not found")
to_be_updated = True
if to_be_updated:
self.log("Need to Create / Update the ACS instance")
if self.check_mode:
return self.results
self.results['state'] = self.create_update_acs()
self.results['changed'] = True
self.log("Creation / Update done")
elif self.state == 'absent':
if self.check_mode:
return self.results
self.delete_acs()
self.log("ACS instance deleted")
return self.results
def create_update_acs(self):
'''
Creates or updates a container service with the specified configuration of orchestrator, masters, and agents.
:return: deserialized ACS instance state dictionary
'''
self.log("Creating / Updating the ACS instance {0}".format(self.name))
service_principal_profile = None
agentpools = []
if self.agent_pool_profiles:
for profile in self.agent_pool_profiles:
self.log("Trying to push the following Profile {0}".format(profile))
agentpools.append(create_agent_pool_profile_instance(profile))
if self.orchestration_platform == 'Kubernetes':
service_principal_profile = create_service_principal_profile_instance(self.service_principal)
parameters = ContainerService(
location=self.location,
tags=self.tags,
orchestrator_profile=create_orch_platform_instance(self.orchestration_platform),
service_principal_profile=service_principal_profile,
linux_profile=create_linux_profile_instance(self.linux_profile),
master_profile=create_master_profile_instance(self.master_profile),
agent_pool_profiles=agentpools,
diagnostics_profile=create_diagnostics_profile_instance(self.diagnostics_profile)
)
# self.log("orchestrator_profile : {0}".format(parameters.orchestrator_profile))
# self.log("service_principal_profile : {0}".format(parameters.service_principal_profile))
# self.log("linux_profile : {0}".format(parameters.linux_profile))
# self.log("ssh from yaml : {0}".format(results.get('linux_profile')[0]))
# self.log("ssh : {0}".format(parameters.linux_profile.ssh))
# self.log("master_profile : {0}".format(parameters.master_profile))
# self.log("agent_pool_profiles : {0}".format(parameters.agent_pool_profiles))
# self.log("vm_diagnostics : {0}".format(parameters.diagnostics_profile.vm_diagnostics))
try:
poller = self.containerservice_client.container_services.create_or_update(self.resource_group, self.name,
parameters)
response = self.get_poller_result(poller)
except CloudError as exc:
self.log('Error attempting to create the ACS instance.')
self.fail("Error creating the ACS instance: {0}".format(str(exc)))
return create_acs_dict(response)
def delete_acs(self):
'''
Deletes the specified container service in the specified subscription and resource group.
The operation does not delete other resources created as part of creating a container service,
including storage accounts, VMs, and availability sets.
All the other resources created with the container service are part of the same resource group and can be deleted individually.
:return: True
'''
self.log("Deleting the ACS instance {0}".format(self.name))
try:
poller = self.containerservice_client.container_services.delete(self.resource_group, self.name)
self.get_poller_result(poller)
except CloudError as e:
self.log('Error attempting to delete the ACS instance.')
self.fail("Error deleting the ACS instance: {0}".format(str(e)))
return True
def get_acs(self):
'''
Gets the properties of the specified container service.
:return: deserialized ACS instance state dictionary
'''
self.log("Checking if the ACS instance {0} is present".format(self.name))
found = False
try:
response = self.containerservice_client.container_services.get(self.resource_group, self.name)
found = True
self.log("Response : {0}".format(response))
self.log("ACS instance : {0} found".format(response.name))
except CloudError as e:
self.log('Did not find the ACS instance.')
if found is True:
return create_acs_dict(response)
else:
return False
def main():
"""Main execution"""
AzureRMContainerService()
if __name__ == '__main__':
main()
|
spinellic/Mission-Planner | refs/heads/master | Lib/site-packages/scipy/misc/__init__.py | 55 | from info import __doc__
__all__ = ['who', 'source', 'info']
from common import *
from numpy import who, source, info as _info
import sys
def info(object=None,maxwidth=76,output=sys.stdout,toplevel='scipy'):
return _info(object, maxwidth, output, toplevel)
info.__doc__ = _info.__doc__
del sys
try:
from pilutil import *
__all__ += pilutil.__all__
except ImportError:
pass
__all__ += common.__all__
from numpy.testing import Tester
test = Tester().test
|
MackZxh/OCA-Choice | refs/heads/8.0 | hr/hr_expense_sequence/__openerp__.py | 13 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Odoo Source Management Solution
# Copyright (c) 2014 Serv. Tecnol. Avanzados (http://www.serviciosbaeza.com)
# Pedro M. Baeza <pedro.baeza@serviciosbaeza.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'HR expense sequence',
'version': '8.0.1.0.0',
'category': 'HR',
'author': "Serv. Tecnol. Avanzados - Pedro M. Baeza,"
"Odoo Community Association (OCA)",
'website': 'http://www.serviciosbaeza.com',
'depends': [
'hr_expense',
],
'data': [
'data/hr_expense_data.xml',
'views/hr_expense_expense_view.xml',
],
"installable": True,
"post_init_hook": "assign_old_sequences",
}
|
jwren/intellij-community | refs/heads/master | python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_1/_pkg0_1_0/_pkg0_1_0_0/_pkg0_1_0_0_0/__init__.py | 30 | from ._mod0_1_0_0_0_0 import *
from ._mod0_1_0_0_0_1 import *
from ._mod0_1_0_0_0_2 import *
from ._mod0_1_0_0_0_3 import *
from ._mod0_1_0_0_0_4 import * |
aristanetworks/arista-ovs-nova | refs/heads/master | nova/scheduler/filters/all_hosts_filter.py | 11 | # Copyright (c) 2011-2012 OpenStack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.scheduler import filters
class AllHostsFilter(filters.BaseHostFilter):
"""NOOP host filter. Returns all hosts."""
def host_passes(self, host_state, filter_properties):
return True
|
colin2k/VVS | refs/heads/master | lib/werkzeug/contrib/sessions.py | 315 | # -*- coding: utf-8 -*-
r"""
werkzeug.contrib.sessions
~~~~~~~~~~~~~~~~~~~~~~~~~
This module contains some helper classes that help one to add session
support to a python WSGI application. For full client-side session
storage see :mod:`~werkzeug.contrib.securecookie` which implements a
secure, client-side session storage.
Application Integration
=======================
::
from werkzeug.contrib.sessions import SessionMiddleware, \
FilesystemSessionStore
app = SessionMiddleware(app, FilesystemSessionStore())
The current session will then appear in the WSGI environment as
`werkzeug.session`. However it's recommended to not use the middleware
but the stores directly in the application. However for very simple
scripts a middleware for sessions could be sufficient.
This module does not implement methods or ways to check if a session is
expired. That should be done by a cronjob and storage specific. For
example to prune unused filesystem sessions one could check the modified
time of the files. It sessions are stored in the database the new()
method should add an expiration timestamp for the session.
For better flexibility it's recommended to not use the middleware but the
store and session object directly in the application dispatching::
session_store = FilesystemSessionStore()
def application(environ, start_response):
request = Request(environ)
sid = request.cookies.get('cookie_name')
if sid is None:
request.session = session_store.new()
else:
request.session = session_store.get(sid)
response = get_the_response_object(request)
if request.session.should_save:
session_store.save(request.session)
response.set_cookie('cookie_name', request.session.sid)
return response(environ, start_response)
:copyright: (c) 2013 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import re
import os
import sys
import tempfile
from os import path
from time import time
from random import random
from hashlib import sha1
from pickle import dump, load, HIGHEST_PROTOCOL
from werkzeug.datastructures import CallbackDict
from werkzeug.utils import dump_cookie, parse_cookie
from werkzeug.wsgi import ClosingIterator
from werkzeug.posixemulation import rename
from werkzeug._compat import PY2, text_type
_sha1_re = re.compile(r'^[a-f0-9]{40}$')
def _urandom():
if hasattr(os, 'urandom'):
return os.urandom(30)
return random()
def generate_key(salt=None):
if salt is None:
salt = repr(salt).encode('ascii')
return sha1(b''.join([
salt,
str(time()).encode('ascii'),
_urandom()
])).hexdigest()
class ModificationTrackingDict(CallbackDict):
__slots__ = ('modified',)
def __init__(self, *args, **kwargs):
def on_update(self):
self.modified = True
self.modified = False
CallbackDict.__init__(self, on_update=on_update)
dict.update(self, *args, **kwargs)
def copy(self):
"""Create a flat copy of the dict."""
missing = object()
result = object.__new__(self.__class__)
for name in self.__slots__:
val = getattr(self, name, missing)
if val is not missing:
setattr(result, name, val)
return result
def __copy__(self):
return self.copy()
class Session(ModificationTrackingDict):
"""Subclass of a dict that keeps track of direct object changes. Changes
in mutable structures are not tracked, for those you have to set
`modified` to `True` by hand.
"""
__slots__ = ModificationTrackingDict.__slots__ + ('sid', 'new')
def __init__(self, data, sid, new=False):
ModificationTrackingDict.__init__(self, data)
self.sid = sid
self.new = new
def __repr__(self):
return '<%s %s%s>' % (
self.__class__.__name__,
dict.__repr__(self),
self.should_save and '*' or ''
)
@property
def should_save(self):
"""True if the session should be saved.
.. versionchanged:: 0.6
By default the session is now only saved if the session is
modified, not if it is new like it was before.
"""
return self.modified
class SessionStore(object):
"""Baseclass for all session stores. The Werkzeug contrib module does not
implement any useful stores besides the filesystem store, application
developers are encouraged to create their own stores.
:param session_class: The session class to use. Defaults to
:class:`Session`.
"""
def __init__(self, session_class=None):
if session_class is None:
session_class = Session
self.session_class = session_class
def is_valid_key(self, key):
"""Check if a key has the correct format."""
return _sha1_re.match(key) is not None
def generate_key(self, salt=None):
"""Simple function that generates a new session key."""
return generate_key(salt)
def new(self):
"""Generate a new session."""
return self.session_class({}, self.generate_key(), True)
def save(self, session):
"""Save a session."""
def save_if_modified(self, session):
"""Save if a session class wants an update."""
if session.should_save:
self.save(session)
def delete(self, session):
"""Delete a session."""
def get(self, sid):
"""Get a session for this sid or a new session object. This method
has to check if the session key is valid and create a new session if
that wasn't the case.
"""
return self.session_class({}, sid, True)
#: used for temporary files by the filesystem session store
_fs_transaction_suffix = '.__wz_sess'
class FilesystemSessionStore(SessionStore):
"""Simple example session store that saves sessions on the filesystem.
This store works best on POSIX systems and Windows Vista / Windows
Server 2008 and newer.
.. versionchanged:: 0.6
`renew_missing` was added. Previously this was considered `True`,
now the default changed to `False` and it can be explicitly
deactivated.
:param path: the path to the folder used for storing the sessions.
If not provided the default temporary directory is used.
:param filename_template: a string template used to give the session
a filename. ``%s`` is replaced with the
session id.
:param session_class: The session class to use. Defaults to
:class:`Session`.
:param renew_missing: set to `True` if you want the store to
give the user a new sid if the session was
not yet saved.
"""
def __init__(self, path=None, filename_template='werkzeug_%s.sess',
session_class=None, renew_missing=False, mode=0o644):
SessionStore.__init__(self, session_class)
if path is None:
path = tempfile.gettempdir()
self.path = path
if isinstance(filename_template, text_type) and PY2:
filename_template = filename_template.encode(
sys.getfilesystemencoding() or 'utf-8')
assert not filename_template.endswith(_fs_transaction_suffix), \
'filename templates may not end with %s' % _fs_transaction_suffix
self.filename_template = filename_template
self.renew_missing = renew_missing
self.mode = mode
def get_session_filename(self, sid):
# out of the box, this should be a strict ASCII subset but
# you might reconfigure the session object to have a more
# arbitrary string.
if isinstance(sid, text_type) and PY2:
sid = sid.encode(sys.getfilesystemencoding() or 'utf-8')
return path.join(self.path, self.filename_template % sid)
def save(self, session):
fn = self.get_session_filename(session.sid)
fd, tmp = tempfile.mkstemp(suffix=_fs_transaction_suffix,
dir=self.path)
f = os.fdopen(fd, 'wb')
try:
dump(dict(session), f, HIGHEST_PROTOCOL)
finally:
f.close()
try:
rename(tmp, fn)
os.chmod(fn, self.mode)
except (IOError, OSError):
pass
def delete(self, session):
fn = self.get_session_filename(session.sid)
try:
os.unlink(fn)
except OSError:
pass
def get(self, sid):
if not self.is_valid_key(sid):
return self.new()
try:
f = open(self.get_session_filename(sid), 'rb')
except IOError:
if self.renew_missing:
return self.new()
data = {}
else:
try:
try:
data = load(f)
except Exception:
data = {}
finally:
f.close()
return self.session_class(data, sid, False)
def list(self):
"""Lists all sessions in the store.
.. versionadded:: 0.6
"""
before, after = self.filename_template.split('%s', 1)
filename_re = re.compile(r'%s(.{5,})%s$' % (re.escape(before),
re.escape(after)))
result = []
for filename in os.listdir(self.path):
#: this is a session that is still being saved.
if filename.endswith(_fs_transaction_suffix):
continue
match = filename_re.match(filename)
if match is not None:
result.append(match.group(1))
return result
class SessionMiddleware(object):
"""A simple middleware that puts the session object of a store provided
into the WSGI environ. It automatically sets cookies and restores
sessions.
However a middleware is not the preferred solution because it won't be as
fast as sessions managed by the application itself and will put a key into
the WSGI environment only relevant for the application which is against
the concept of WSGI.
The cookie parameters are the same as for the :func:`~dump_cookie`
function just prefixed with ``cookie_``. Additionally `max_age` is
called `cookie_age` and not `cookie_max_age` because of backwards
compatibility.
"""
def __init__(self, app, store, cookie_name='session_id',
cookie_age=None, cookie_expires=None, cookie_path='/',
cookie_domain=None, cookie_secure=None,
cookie_httponly=False, environ_key='werkzeug.session'):
self.app = app
self.store = store
self.cookie_name = cookie_name
self.cookie_age = cookie_age
self.cookie_expires = cookie_expires
self.cookie_path = cookie_path
self.cookie_domain = cookie_domain
self.cookie_secure = cookie_secure
self.cookie_httponly = cookie_httponly
self.environ_key = environ_key
def __call__(self, environ, start_response):
cookie = parse_cookie(environ.get('HTTP_COOKIE', ''))
sid = cookie.get(self.cookie_name, None)
if sid is None:
session = self.store.new()
else:
session = self.store.get(sid)
environ[self.environ_key] = session
def injecting_start_response(status, headers, exc_info=None):
if session.should_save:
self.store.save(session)
headers.append(('Set-Cookie', dump_cookie(self.cookie_name,
session.sid, self.cookie_age,
self.cookie_expires, self.cookie_path,
self.cookie_domain, self.cookie_secure,
self.cookie_httponly)))
return start_response(status, headers, exc_info)
return ClosingIterator(self.app(environ, injecting_start_response),
lambda: self.store.save_if_modified(session))
|
portnov/sverchok | refs/heads/master | nodes/list_struct/index_to_mask.py | 3 | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import bpy
import numpy as np
from bpy.props import IntProperty, BoolProperty
from sverchok.node_tree import SverchCustomTreeNode
from sverchok.data_structure import (updateNode, second_as_first_cycle as safc)
class SvIndexToMaskNode(bpy.types.Node, SverchCustomTreeNode):
''' Create mask list from index '''
bl_idname = 'SvIndexToMaskNode'
bl_label = 'Index To Mask'
bl_icon = 'OUTLINER_OB_EMPTY'
ML = IntProperty(name='Mask Length', default=10, min=2, update=updateNode)
def update_mode(self, context):
self.inputs['mask size'].hide_safe = self.data_to_mask
self.inputs['data to mask'].hide_safe = not self.data_to_mask
updateNode(self, context)
data_to_mask = BoolProperty(name = "data masking",
description = "Use data to define mask length",
default = False,
update=update_mode)
complex_data = BoolProperty(name = "topo mask",
description = "data consists of verts or polygons\edges. Otherwise the two vertices will be masked as [[[T, T, T], [F, F, F]]] instead of [[T, F]]",
default = False,
update=update_mode)
def draw_buttons(self, context, layout):
col = layout.column(align=True)
col.prop(self, "data_to_mask", toggle=True)
if self.data_to_mask:
col.prop(self, "complex_data", toggle=True)
def sv_init(self, context):
self.inputs.new('StringsSocket', 'Index')
self.inputs.new('StringsSocket', 'mask size').prop_name = "ML"
self.inputs.new('StringsSocket', 'data to mask').hide_safe = True
self.outputs.new('StringsSocket', 'mask')
def process(self):
Inds, MaSi, Dat = self.inputs
OM = self.outputs[0]
if OM.is_linked:
out = []
I = Inds.sv_get()
if not self.data_to_mask:
for Ind, Size in zip(I, safc(I, MaSi.sv_get()[0])):
Ma = np.zeros(Size, dtype= np.bool)
Ma[Ind] = 1
out.append(Ma.tolist())
else:
Ma = np.zeros_like(Dat.sv_get(), dtype= np.bool)
if not self.complex_data:
for m, i in zip(Ma, safc(Ma, I)):
m[i] = 1
out.append(m.tolist())
else:
for m, i in zip(Ma, safc(Ma, I)):
m[i] = 1
out.append(m[:, 0].tolist())
OM.sv_set(out)
def register():
bpy.utils.register_class(SvIndexToMaskNode)
def unregister():
bpy.utils.unregister_class(SvIndexToMaskNode)
|
barbarubra/Don-t-know-What-i-m-doing. | refs/heads/master | python/src/Lib/encodings/euc_jisx0213.py | 816 | #
# euc_jisx0213.py: Python Unicode Codec for EUC_JISX0213
#
# Written by Hye-Shik Chang <perky@FreeBSD.org>
#
import _codecs_jp, codecs
import _multibytecodec as mbc
codec = _codecs_jp.getcodec('euc_jisx0213')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='euc_jisx0213',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
|
tlk-emb/SWORDS | refs/heads/master | python/generatehlstcl.py | 1 | # -*- coding: utf-8 -*-
import sys
import argparse
from jinja2 import Template,Environment,FileSystemLoader
from analyzer.jsonparam import TasksConfig
args = sys.argv
def main():
parser = argparse.ArgumentParser()
parser.add_argument("cfile_name")
parser.add_argument("json_file_name")
parser.add_argument("project_name")
parser.add_argument("toolchain_path")
args = parser.parse_args()
cfile_name = args.cfile_name
json_file_name = args.json_file_name
project_name = args.project_name
toolchain_path = args.toolchain_path
# JSON©çÝèðÇÝÝ
config = TasksConfig.parse_config(json_file_name)
if config is None:
return 1
function_name = config.hw_funcname(config)
vendor_name = config.vendorname(config)
board_name = config.boardname(config)
tclfile_name = project_name + "_hls.tcl"
tclfile = open(tclfile_name,"w")
tclfile.write(generatehlstcl(cfile_name, project_name, function_name, vendor_name, board_name, toolchain_path))
def generatehlstcl(cfile_name, project_name, function_name, vendor_name, board_name, toolchain_path):
env = Environment(loader=FileSystemLoader(toolchain_path+'template\\'+vendor_name+'\\'))
template = env.get_template('hls.tcl')
data = {'cfilename': cfile_name, 'projname': project_name, 'funcname': function_name, 'boardname': board_name}
return template.render(data)
if __name__ == "__main__":
sys.exit(main())
|
flakey-bit/plugin.audio.spotify | refs/heads/master | resources/libs/cherrypy/lib/static.py | 19 | import os
import re
import stat
import mimetypes
try:
from io import UnsupportedOperation
except ImportError:
UnsupportedOperation = object()
import cherrypy
from cherrypy._cpcompat import ntob, unquote
from cherrypy.lib import cptools, httputil, file_generator_limited
mimetypes.init()
mimetypes.types_map['.dwg'] = 'image/x-dwg'
mimetypes.types_map['.ico'] = 'image/x-icon'
mimetypes.types_map['.bz2'] = 'application/x-bzip2'
mimetypes.types_map['.gz'] = 'application/x-gzip'
def serve_file(path, content_type=None, disposition=None, name=None,
debug=False):
"""Set status, headers, and body in order to serve the given path.
The Content-Type header will be set to the content_type arg, if provided.
If not provided, the Content-Type will be guessed by the file extension
of the 'path' argument.
If disposition is not None, the Content-Disposition header will be set
to "<disposition>; filename=<name>". If name is None, it will be set
to the basename of path. If disposition is None, no Content-Disposition
header will be written.
"""
response = cherrypy.serving.response
# If path is relative, users should fix it by making path absolute.
# That is, CherryPy should not guess where the application root is.
# It certainly should *not* use cwd (since CP may be invoked from a
# variety of paths). If using tools.staticdir, you can make your relative
# paths become absolute by supplying a value for "tools.staticdir.root".
if not os.path.isabs(path):
msg = "'%s' is not an absolute path." % path
if debug:
cherrypy.log(msg, 'TOOLS.STATICFILE')
raise ValueError(msg)
try:
st = os.stat(path)
except (OSError, TypeError, ValueError):
# OSError when file fails to stat
# TypeError on Python 2 when there's a null byte
# ValueError on Python 3 when there's a null byte
if debug:
cherrypy.log('os.stat(%r) failed' % path, 'TOOLS.STATIC')
raise cherrypy.NotFound()
# Check if path is a directory.
if stat.S_ISDIR(st.st_mode):
# Let the caller deal with it as they like.
if debug:
cherrypy.log('%r is a directory' % path, 'TOOLS.STATIC')
raise cherrypy.NotFound()
# Set the Last-Modified response header, so that
# modified-since validation code can work.
response.headers['Last-Modified'] = httputil.HTTPDate(st.st_mtime)
cptools.validate_since()
if content_type is None:
# Set content-type based on filename extension
ext = ""
i = path.rfind('.')
if i != -1:
ext = path[i:].lower()
content_type = mimetypes.types_map.get(ext, None)
if content_type is not None:
response.headers['Content-Type'] = content_type
if debug:
cherrypy.log('Content-Type: %r' % content_type, 'TOOLS.STATIC')
cd = None
if disposition is not None:
if name is None:
name = os.path.basename(path)
cd = '%s; filename="%s"' % (disposition, name)
response.headers["Content-Disposition"] = cd
if debug:
cherrypy.log('Content-Disposition: %r' % cd, 'TOOLS.STATIC')
# Set Content-Length and use an iterable (file object)
# this way CP won't load the whole file in memory
content_length = st.st_size
fileobj = open(path, 'rb')
return _serve_fileobj(fileobj, content_type, content_length, debug=debug)
def serve_fileobj(fileobj, content_type=None, disposition=None, name=None,
debug=False):
"""Set status, headers, and body in order to serve the given file object.
The Content-Type header will be set to the content_type arg, if provided.
If disposition is not None, the Content-Disposition header will be set
to "<disposition>; filename=<name>". If name is None, 'filename' will
not be set. If disposition is None, no Content-Disposition header will
be written.
CAUTION: If the request contains a 'Range' header, one or more seek()s will
be performed on the file object. This may cause undesired behavior if
the file object is not seekable. It could also produce undesired results
if the caller set the read position of the file object prior to calling
serve_fileobj(), expecting that the data would be served starting from that
position.
"""
response = cherrypy.serving.response
try:
st = os.fstat(fileobj.fileno())
except AttributeError:
if debug:
cherrypy.log('os has no fstat attribute', 'TOOLS.STATIC')
content_length = None
except UnsupportedOperation:
content_length = None
else:
# Set the Last-Modified response header, so that
# modified-since validation code can work.
response.headers['Last-Modified'] = httputil.HTTPDate(st.st_mtime)
cptools.validate_since()
content_length = st.st_size
if content_type is not None:
response.headers['Content-Type'] = content_type
if debug:
cherrypy.log('Content-Type: %r' % content_type, 'TOOLS.STATIC')
cd = None
if disposition is not None:
if name is None:
cd = disposition
else:
cd = '%s; filename="%s"' % (disposition, name)
response.headers["Content-Disposition"] = cd
if debug:
cherrypy.log('Content-Disposition: %r' % cd, 'TOOLS.STATIC')
return _serve_fileobj(fileobj, content_type, content_length, debug=debug)
def _serve_fileobj(fileobj, content_type, content_length, debug=False):
"""Internal. Set response.body to the given file object, perhaps ranged."""
response = cherrypy.serving.response
# HTTP/1.0 didn't have Range/Accept-Ranges headers, or the 206 code
request = cherrypy.serving.request
if request.protocol >= (1, 1):
response.headers["Accept-Ranges"] = "bytes"
r = httputil.get_ranges(request.headers.get('Range'), content_length)
if r == []:
response.headers['Content-Range'] = "bytes */%s" % content_length
message = ("Invalid Range (first-byte-pos greater than "
"Content-Length)")
if debug:
cherrypy.log(message, 'TOOLS.STATIC')
raise cherrypy.HTTPError(416, message)
if r:
if len(r) == 1:
# Return a single-part response.
start, stop = r[0]
if stop > content_length:
stop = content_length
r_len = stop - start
if debug:
cherrypy.log(
'Single part; start: %r, stop: %r' % (start, stop),
'TOOLS.STATIC')
response.status = "206 Partial Content"
response.headers['Content-Range'] = (
"bytes %s-%s/%s" % (start, stop - 1, content_length))
response.headers['Content-Length'] = r_len
fileobj.seek(start)
response.body = file_generator_limited(fileobj, r_len)
else:
# Return a multipart/byteranges response.
response.status = "206 Partial Content"
try:
# Python 3
from email.generator import _make_boundary as make_boundary
except ImportError:
# Python 2
from mimetools import choose_boundary as make_boundary
boundary = make_boundary()
ct = "multipart/byteranges; boundary=%s" % boundary
response.headers['Content-Type'] = ct
if "Content-Length" in response.headers:
# Delete Content-Length header so finalize() recalcs it.
del response.headers["Content-Length"]
def file_ranges():
# Apache compatibility:
yield ntob("\r\n")
for start, stop in r:
if debug:
cherrypy.log(
'Multipart; start: %r, stop: %r' % (
start, stop),
'TOOLS.STATIC')
yield ntob("--" + boundary, 'ascii')
yield ntob("\r\nContent-type: %s" % content_type,
'ascii')
yield ntob(
"\r\nContent-range: bytes %s-%s/%s\r\n\r\n" % (
start, stop - 1, content_length),
'ascii')
fileobj.seek(start)
gen = file_generator_limited(fileobj, stop - start)
for chunk in gen:
yield chunk
yield ntob("\r\n")
# Final boundary
yield ntob("--" + boundary + "--", 'ascii')
# Apache compatibility:
yield ntob("\r\n")
response.body = file_ranges()
return response.body
else:
if debug:
cherrypy.log('No byteranges requested', 'TOOLS.STATIC')
# Set Content-Length and use an iterable (file object)
# this way CP won't load the whole file in memory
response.headers['Content-Length'] = content_length
response.body = fileobj
return response.body
def serve_download(path, name=None):
"""Serve 'path' as an application/x-download attachment."""
# This is such a common idiom I felt it deserved its own wrapper.
return serve_file(path, "application/x-download", "attachment", name)
def _attempt(filename, content_types, debug=False):
if debug:
cherrypy.log('Attempting %r (content_types %r)' %
(filename, content_types), 'TOOLS.STATICDIR')
try:
# you can set the content types for a
# complete directory per extension
content_type = None
if content_types:
r, ext = os.path.splitext(filename)
content_type = content_types.get(ext[1:], None)
serve_file(filename, content_type=content_type, debug=debug)
return True
except cherrypy.NotFound:
# If we didn't find the static file, continue handling the
# request. We might find a dynamic handler instead.
if debug:
cherrypy.log('NotFound', 'TOOLS.STATICFILE')
return False
def staticdir(section, dir, root="", match="", content_types=None, index="",
debug=False):
"""Serve a static resource from the given (root +) dir.
match
If given, request.path_info will be searched for the given
regular expression before attempting to serve static content.
content_types
If given, it should be a Python dictionary of
{file-extension: content-type} pairs, where 'file-extension' is
a string (e.g. "gif") and 'content-type' is the value to write
out in the Content-Type response header (e.g. "image/gif").
index
If provided, it should be the (relative) name of a file to
serve for directory requests. For example, if the dir argument is
'/home/me', the Request-URI is 'myapp', and the index arg is
'index.html', the file '/home/me/myapp/index.html' will be sought.
"""
request = cherrypy.serving.request
if request.method not in ('GET', 'HEAD'):
if debug:
cherrypy.log('request.method not GET or HEAD', 'TOOLS.STATICDIR')
return False
if match and not re.search(match, request.path_info):
if debug:
cherrypy.log('request.path_info %r does not match pattern %r' %
(request.path_info, match), 'TOOLS.STATICDIR')
return False
# Allow the use of '~' to refer to a user's home directory.
dir = os.path.expanduser(dir)
# If dir is relative, make absolute using "root".
if not os.path.isabs(dir):
if not root:
msg = "Static dir requires an absolute dir (or root)."
if debug:
cherrypy.log(msg, 'TOOLS.STATICDIR')
raise ValueError(msg)
dir = os.path.join(root, dir)
# Determine where we are in the object tree relative to 'section'
# (where the static tool was defined).
if section == 'global':
section = "/"
section = section.rstrip(r"\/")
branch = request.path_info[len(section) + 1:]
branch = unquote(branch.lstrip(r"\/"))
# If branch is "", filename will end in a slash
filename = os.path.join(dir, branch)
if debug:
cherrypy.log('Checking file %r to fulfill %r' %
(filename, request.path_info), 'TOOLS.STATICDIR')
# There's a chance that the branch pulled from the URL might
# have ".." or similar uplevel attacks in it. Check that the final
# filename is a child of dir.
if not os.path.normpath(filename).startswith(os.path.normpath(dir)):
raise cherrypy.HTTPError(403) # Forbidden
handled = _attempt(filename, content_types)
if not handled:
# Check for an index file if a folder was requested.
if index:
handled = _attempt(os.path.join(filename, index), content_types)
if handled:
request.is_index = filename[-1] in (r"\/")
return handled
def staticfile(filename, root=None, match="", content_types=None, debug=False):
"""Serve a static resource from the given (root +) filename.
match
If given, request.path_info will be searched for the given
regular expression before attempting to serve static content.
content_types
If given, it should be a Python dictionary of
{file-extension: content-type} pairs, where 'file-extension' is
a string (e.g. "gif") and 'content-type' is the value to write
out in the Content-Type response header (e.g. "image/gif").
"""
request = cherrypy.serving.request
if request.method not in ('GET', 'HEAD'):
if debug:
cherrypy.log('request.method not GET or HEAD', 'TOOLS.STATICFILE')
return False
if match and not re.search(match, request.path_info):
if debug:
cherrypy.log('request.path_info %r does not match pattern %r' %
(request.path_info, match), 'TOOLS.STATICFILE')
return False
# If filename is relative, make absolute using "root".
if not os.path.isabs(filename):
if not root:
msg = "Static tool requires an absolute filename (got '%s')." % (
filename,)
if debug:
cherrypy.log(msg, 'TOOLS.STATICFILE')
raise ValueError(msg)
filename = os.path.join(root, filename)
return _attempt(filename, content_types, debug=debug)
|
jmchilton/pulsar | refs/heads/master | pulsar/managers/base/directory.py | 1 | import logging
import os
import stat
from pulsar.managers.base import BaseManager
from pulsar.managers import PULSAR_UNKNOWN_RETURN_CODE
from ..util.job_script import job_script
from ..util.env import env_to_statement
log = logging.getLogger(__name__)
# TODO: Rename these to abstract out the fact they are files - pulsar
# should be able to replace metadata backing with non-file stuff now that
# the abstractions are fairly well utilized.
JOB_FILE_RETURN_CODE = "return_code"
JOB_FILE_STANDARD_OUTPUT = "stdout"
JOB_FILE_STANDARD_ERROR = "stderr"
JOB_FILE_TOOL_ID = "tool_id"
JOB_FILE_TOOL_VERSION = "tool_version"
JOB_FILE_CANCELLED = "cancelled"
class DirectoryBaseManager(BaseManager):
def _job_file(self, job_id, name):
return self._job_directory(job_id)._job_file(name)
def return_code(self, job_id):
return_code_str = self._read_job_file(job_id, JOB_FILE_RETURN_CODE, default=PULSAR_UNKNOWN_RETURN_CODE)
return int(return_code_str) if return_code_str and return_code_str != PULSAR_UNKNOWN_RETURN_CODE else return_code_str
def stdout_contents(self, job_id):
return self._read_job_file(job_id, JOB_FILE_STANDARD_OUTPUT, default="")
def stderr_contents(self, job_id):
return self._read_job_file(job_id, JOB_FILE_STANDARD_ERROR, default="")
def _stdout_path(self, job_id):
return self._job_file(job_id, JOB_FILE_STANDARD_OUTPUT)
def _stderr_path(self, job_id):
return self._job_file(job_id, JOB_FILE_STANDARD_ERROR)
def _return_code_path(self, job_id):
return self._job_file(job_id, JOB_FILE_RETURN_CODE)
def _setup_job_for_job_id(self, job_id, tool_id, tool_version):
self._setup_job_directory(job_id)
tool_id = str(tool_id) if tool_id else ""
tool_version = str(tool_version) if tool_version else ""
authorization = self._get_authorization(job_id, tool_id)
authorization.authorize_setup()
self._write_tool_info(job_id, tool_id, tool_version)
return job_id
def _read_job_file(self, job_id, name, **kwds):
return self._job_directory(job_id).read_file(name, **kwds)
def _write_job_file(self, job_id, name, contents):
return self._job_directory(job_id).write_file(name, contents)
def _write_return_code(self, job_id, return_code):
self._write_job_file(job_id, JOB_FILE_RETURN_CODE, str(return_code))
def _write_tool_info(self, job_id, tool_id, tool_version):
job_directory = self._job_directory(job_id)
job_directory.store_metadata(JOB_FILE_TOOL_ID, tool_id)
job_directory.store_metadata(JOB_FILE_TOOL_VERSION, tool_version)
def _record_cancel(self, job_id):
try:
self._job_directory(job_id).store_metadata(JOB_FILE_CANCELLED, True)
except Exception:
log.info("Failed to recod job with id %s was cancelled." % job_id)
def _was_cancelled(self, job_id):
try:
return self._job_directory(job_id).load_metadata(JOB_FILE_CANCELLED, None)
except Exception:
log.info("Failed to determine if job with id %s was cancelled, assuming no." % job_id)
return False
def _open_standard_output(self, job_id):
return self._job_directory(job_id).open_file(JOB_FILE_STANDARD_OUTPUT, 'w')
def _open_standard_error(self, job_id):
return self._job_directory(job_id).open_file(JOB_FILE_STANDARD_ERROR, 'w')
def _check_execution_with_tool_file(self, job_id, command_line):
tool_id = self._tool_id(job_id)
self._check_execution(job_id, tool_id, command_line)
def _tool_id(self, job_id):
tool_id = None
job_directory = self._job_directory(job_id)
if job_directory.has_metadata(JOB_FILE_TOOL_ID):
tool_id = job_directory.load_metadata(JOB_FILE_TOOL_ID)
return tool_id
# Helpers methods related to setting up job script files.
def _setup_job_file(self, job_id, command_line, dependencies_description=None, env=[]):
command_line = self._expand_command_line(command_line, dependencies_description)
script_env = self._job_template_env(job_id, command_line=command_line, env=env)
script = job_script(**script_env)
return self._write_job_script(job_id, script)
def _job_template_env(self, job_id, command_line=None, env=[]):
return_code_path = self._return_code_path(job_id)
# TODO: Add option to ignore remote env.
env = env + self.env_vars
env_setup_commands = map(env_to_statement, env)
job_template_env = {
'job_instrumenter': self.job_metrics.default_job_instrumenter,
'galaxy_lib': self._galaxy_lib(),
'env_setup_commands': env_setup_commands,
'exit_code_path': return_code_path,
'working_directory': self.job_directory(job_id).working_directory(),
'job_id': job_id,
}
if command_line:
job_template_env['command'] = command_line
return job_template_env
def _write_job_script(self, job_id, contents):
self._write_job_file(job_id, "command.sh", contents)
script_path = self._job_file(job_id, "command.sh")
os.chmod(script_path, stat.S_IEXEC | stat.S_IWRITE | stat.S_IREAD)
return script_path
|
Jgarcia-IAS/ReporsitorioVacioOdoo | refs/heads/master | openerp/addons/website_event_sale/controllers/main.py | 233 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import SUPERUSER_ID
from openerp.addons.web import http
from openerp.addons.web.http import request
from openerp.addons.website_event.controllers.main import website_event
from openerp.addons.website_sale.controllers.main import get_pricelist
from openerp.tools.translate import _
class website_event(website_event):
@http.route(['/event/<model("event.event"):event>/register'], type='http', auth="public", website=True)
def event_register(self, event, **post):
pricelist_id = int(get_pricelist())
values = {
'event': event.with_context(pricelist=pricelist_id),
'main_object': event.with_context(pricelist=pricelist_id),
'range': range,
}
return request.website.render("website_event.event_description_full", values)
@http.route(['/event/cart/update'], type='http', auth="public", methods=['POST'], website=True)
def cart_update(self, event_id, **post):
cr, uid, context = request.cr, request.uid, request.context
ticket_obj = request.registry.get('event.event.ticket')
sale = False
for key, value in post.items():
quantity = int(value or "0")
if not quantity:
continue
sale = True
ticket_id = key.split("-")[0] == 'ticket' and int(key.split("-")[1]) or None
ticket = ticket_obj.browse(cr, SUPERUSER_ID, ticket_id, context=context)
order = request.website.sale_get_order(force_create=1)
order.with_context(event_ticket_id=ticket.id)._cart_update(product_id=ticket.product_id.id, add_qty=quantity)
if not sale:
return request.redirect("/event/%s" % event_id)
return request.redirect("/shop/checkout")
def _add_event(self, event_name="New Event", context={}, **kwargs):
try:
dummy, res_id = request.registry.get('ir.model.data').get_object_reference(request.cr, request.uid, 'event_sale', 'product_product_event')
context['default_event_ticket_ids'] = [[0,0,{
'name': _('Subscription'),
'product_id': res_id,
'deadline' : False,
'seats_max': 1000,
'price': 0,
}]]
except ValueError:
pass
return super(website_event, self)._add_event(event_name, context, **kwargs)
|
drewx2/android_kernel_htc_dlx | refs/heads/cm10 | tools/perf/scripts/python/sctop.py | 11180 | # system call top
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Periodically displays system-wide system call totals, broken down by
# syscall. If a [comm] arg is specified, only syscalls called by
# [comm] are displayed. If an [interval] arg is specified, the display
# will be refreshed every [interval] seconds. The default interval is
# 3 seconds.
import os, sys, thread, time
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s sctop.py [comm] [interval]\n";
for_comm = None
default_interval = 3
interval = default_interval
if len(sys.argv) > 3:
sys.exit(usage)
if len(sys.argv) > 2:
for_comm = sys.argv[1]
interval = int(sys.argv[2])
elif len(sys.argv) > 1:
try:
interval = int(sys.argv[1])
except ValueError:
for_comm = sys.argv[1]
interval = default_interval
syscalls = autodict()
def trace_begin():
thread.start_new_thread(print_syscall_totals, (interval,))
pass
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals(interval):
while 1:
clear_term()
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
try:
print "%-40s %10d\n" % (syscall_name(id), val),
except TypeError:
pass
syscalls.clear()
time.sleep(interval)
|
Alberto-Beralix/Beralix | refs/heads/master | i386-squashfs-root/usr/lib/python2.7/dist-packages/launchpadlib/credentials.py | 2 | ../../../../share/pyshared/launchpadlib/credentials.py |
priyankadeswal/network-address-translator | refs/heads/master | waf-tools/cflags.py | 16 | from waflib import Logs, Options, Utils
class CompilerTraits(object):
def get_warnings_flags(self, level):
"""get_warnings_flags(level) -> list of cflags"""
raise NotImplementedError
def get_optimization_flags(self, level):
"""get_optimization_flags(level) -> list of cflags"""
raise NotImplementedError
def get_debug_flags(self, level):
"""get_debug_flags(level) -> (list of cflags, list of cppdefines)"""
raise NotImplementedError
class GccTraits(CompilerTraits):
def __init__(self):
super(GccTraits, self).__init__()
# cumulative list of warnings per level
self.warnings_flags = [['-Wall'], ['-Werror'], ['-Wextra']]
def get_warnings_flags(self, level):
warnings = []
for l in range(level):
if l < len(self.warnings_flags):
warnings.extend(self.warnings_flags[l])
else:
break
return warnings
def get_optimization_flags(self, level):
if level == 0:
return ['-O0']
elif level == 1:
return ['-O']
elif level == 2:
return ['-O2']
elif level == 3:
return ['-O3']
def get_debug_flags(self, level):
if level == 0:
return (['-g0'], ['NDEBUG'])
elif level == 1:
return (['-g'], [])
elif level >= 2:
return (['-ggdb', '-g3'], ['_DEBUG'])
class IccTraits(CompilerTraits):
def __init__(self):
super(IccTraits, self).__init__()
# cumulative list of warnings per level
# icc is _very_ verbose with -Wall, -Werror is barely achievable
self.warnings_flags = [[], [], ['-Wall']]
def get_warnings_flags(self, level):
warnings = []
for l in range(level):
if l < len(self.warnings_flags):
warnings.extend(self.warnings_flags[l])
else:
break
return warnings
def get_optimization_flags(self, level):
if level == 0:
return ['-O0']
elif level == 1:
return ['-O']
elif level == 2:
return ['-O2']
elif level == 3:
return ['-O3']
def get_debug_flags(self, level):
if level == 0:
return (['-g0'], ['NDEBUG'])
elif level == 1:
return (['-g'], [])
elif level >= 2:
return (['-ggdb', '-g3'], ['_DEBUG'])
class MsvcTraits(CompilerTraits):
def __init__(self):
super(MsvcTraits, self).__init__()
# cumulative list of warnings per level
self.warnings_flags = [['/W2'], ['/WX'], ['/Wall']]
def get_warnings_flags(self, level):
warnings = []
for l in range(level):
if l < len(self.warnings_flags):
warnings.extend(self.warnings_flags[l])
else:
break
return warnings
def get_optimization_flags(self, level):
if level == 0:
return ['/Od']
elif level == 1:
return []
elif level == 2:
return ['/O2']
elif level == 3:
return ['/Ox']
def get_debug_flags(self, level):
if level == 0:
return ([], ['NDEBUG'])
elif level == 1:
return (['/ZI', '/RTC1'], [])
elif level >= 2:
return (['/ZI', '/RTC1'], ['_DEBUG'])
gcc = GccTraits()
icc = IccTraits()
msvc = MsvcTraits()
# how to map env['COMPILER_CC'] or env['COMPILER_CXX'] into a traits object
compiler_mapping = {
'gcc': gcc,
'g++': gcc,
'msvc': msvc,
'icc': icc,
'icpc': icc,
'clang': gcc,
'clang++': gcc,
}
profiles = {
# profile name: [optimization_level, warnings_level, debug_level]
'default': [2, 1, 1],
'debug': [0, 2, 3],
'release': [3, 1, 0],
}
default_profile = 'default'
def options(opt):
assert default_profile in profiles
opt.add_option('-d', '--build-profile',
action='store',
default=default_profile,
help=("Specify the build profile. "
"Build profiles control the default compilation flags"
" used for C/C++ programs, if CCFLAGS/CXXFLAGS are not"
" set set in the environment. [Allowed Values: %s]"
% ", ".join([repr(p) for p in list(profiles.keys())])),
choices=list(profiles.keys()),
dest='build_profile')
def configure(conf):
cc = conf.env['COMPILER_CC'] or None
cxx = conf.env['COMPILER_CXX'] or None
if not (cc or cxx):
raise Utils.WafError("neither COMPILER_CC nor COMPILER_CXX are defined; "
"maybe the compiler_cc or compiler_cxx tool has not been configured yet?")
try:
compiler = compiler_mapping[cc]
except KeyError:
try:
compiler = compiler_mapping[cxx]
except KeyError:
Logs.warn("No compiler flags support for compiler %r or %r"
% (cc, cxx))
return
opt_level, warn_level, dbg_level = profiles[Options.options.build_profile]
optimizations = compiler.get_optimization_flags(opt_level)
debug, debug_defs = compiler.get_debug_flags(dbg_level)
warnings = compiler.get_warnings_flags(warn_level)
if cc and not conf.env['CCFLAGS']:
conf.env.append_value('CCFLAGS', optimizations)
conf.env.append_value('CCFLAGS', debug)
conf.env.append_value('CCFLAGS', warnings)
conf.env.append_value('CCDEFINES', debug_defs)
if cxx and not conf.env['CXXFLAGS']:
conf.env.append_value('CXXFLAGS', optimizations)
conf.env.append_value('CXXFLAGS', debug)
conf.env.append_value('CXXFLAGS', warnings)
conf.env.append_value('CXXDEFINES', debug_defs)
|
bsipocz/astropy | refs/heads/hacking | docs/conftest.py | 4 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# This file needs to be included here to make sure commands such
# as ``python setup.py test ... -t docs/...`` works, since this
# will ignore the conftest.py file at the root of the repository
# and the one in astropy/conftest.py
import os
import tempfile
# Make sure we use temporary directories for the config and cache
# so that the tests are insensitive to local configuration.
os.environ['XDG_CONFIG_HOME'] = tempfile.mkdtemp('astropy_config')
os.environ['XDG_CACHE_HOME'] = tempfile.mkdtemp('astropy_cache')
os.mkdir(os.path.join(os.environ['XDG_CONFIG_HOME'], 'astropy'))
os.mkdir(os.path.join(os.environ['XDG_CACHE_HOME'], 'astropy'))
# Note that we don't need to change the environment variables back or remove
# them after testing, because they are only changed for the duration of the
# Python process, and this configuration only matters if running pytest
# directly, not from e.g. an IPython session.
|
mmardini/django | refs/heads/master | django/test/client.py | 7 | from __future__ import unicode_literals
import sys
import os
import re
import mimetypes
from copy import copy
from importlib import import_module
from io import BytesIO
from django.apps import apps
from django.conf import settings
from django.core import urlresolvers
from django.core.handlers.base import BaseHandler
from django.core.handlers.wsgi import WSGIRequest
from django.core.signals import (request_started, request_finished,
got_request_exception)
from django.db import close_old_connections
from django.http import SimpleCookie, HttpRequest, QueryDict
from django.template import TemplateDoesNotExist
from django.test import signals
from django.utils.functional import curry, SimpleLazyObject
from django.utils.encoding import force_bytes, force_str
from django.utils.http import urlencode
from django.utils.itercompat import is_iterable
from django.utils import six
from django.utils.six.moves.urllib.parse import unquote, urlparse, urlsplit
from django.test.utils import ContextList
__all__ = ('Client', 'RequestFactory', 'encode_file', 'encode_multipart')
BOUNDARY = 'BoUnDaRyStRiNg'
MULTIPART_CONTENT = 'multipart/form-data; boundary=%s' % BOUNDARY
CONTENT_TYPE_RE = re.compile('.*; charset=([\w\d-]+);?')
class FakePayload(object):
"""
A wrapper around BytesIO that restricts what can be read since data from
the network can't be seeked and cannot be read outside of its content
length. This makes sure that views can't do anything under the test client
that wouldn't work in Real Life.
"""
def __init__(self, content=None):
self.__content = BytesIO()
self.__len = 0
self.read_started = False
if content is not None:
self.write(content)
def __len__(self):
return self.__len
def read(self, num_bytes=None):
if not self.read_started:
self.__content.seek(0)
self.read_started = True
if num_bytes is None:
num_bytes = self.__len or 0
assert self.__len >= num_bytes, "Cannot read more than the available bytes from the HTTP incoming data."
content = self.__content.read(num_bytes)
self.__len -= num_bytes
return content
def write(self, content):
if self.read_started:
raise ValueError("Unable to write a payload after he's been read")
content = force_bytes(content)
self.__content.write(content)
self.__len += len(content)
def closing_iterator_wrapper(iterable, close):
try:
for item in iterable:
yield item
finally:
request_finished.disconnect(close_old_connections)
close() # will fire request_finished
request_finished.connect(close_old_connections)
class ClientHandler(BaseHandler):
"""
A HTTP Handler that can be used for testing purposes. Uses the WSGI
interface to compose requests, but returns the raw HttpResponse object with
the originating WSGIRequest attached to its ``wsgi_request`` attribute.
"""
def __init__(self, enforce_csrf_checks=True, *args, **kwargs):
self.enforce_csrf_checks = enforce_csrf_checks
super(ClientHandler, self).__init__(*args, **kwargs)
def __call__(self, environ):
# Set up middleware if needed. We couldn't do this earlier, because
# settings weren't available.
if self._request_middleware is None:
self.load_middleware()
request_started.disconnect(close_old_connections)
request_started.send(sender=self.__class__)
request_started.connect(close_old_connections)
request = WSGIRequest(environ)
# sneaky little hack so that we can easily get round
# CsrfViewMiddleware. This makes life easier, and is probably
# required for backwards compatibility with external tests against
# admin views.
request._dont_enforce_csrf_checks = not self.enforce_csrf_checks
# Request goes through middleware.
response = self.get_response(request)
# Attach the originating request to the response so that it could be
# later retrieved.
response.wsgi_request = request
# We're emulating a WSGI server; we must call the close method
# on completion.
if response.streaming:
response.streaming_content = closing_iterator_wrapper(
response.streaming_content, response.close)
else:
request_finished.disconnect(close_old_connections)
response.close() # will fire request_finished
request_finished.connect(close_old_connections)
return response
def store_rendered_templates(store, signal, sender, template, context, **kwargs):
"""
Stores templates and contexts that are rendered.
The context is copied so that it is an accurate representation at the time
of rendering.
"""
store.setdefault('templates', []).append(template)
store.setdefault('context', ContextList()).append(copy(context))
def encode_multipart(boundary, data):
"""
Encodes multipart POST data from a dictionary of form values.
The key will be used as the form data name; the value will be transmitted
as content. If the value is a file, the contents of the file will be sent
as an application/octet-stream; otherwise, str(value) will be sent.
"""
lines = []
to_bytes = lambda s: force_bytes(s, settings.DEFAULT_CHARSET)
# Not by any means perfect, but good enough for our purposes.
is_file = lambda thing: hasattr(thing, "read") and callable(thing.read)
# Each bit of the multipart form data could be either a form value or a
# file, or a *list* of form values and/or files. Remember that HTTP field
# names can be duplicated!
for (key, value) in data.items():
if is_file(value):
lines.extend(encode_file(boundary, key, value))
elif not isinstance(value, six.string_types) and is_iterable(value):
for item in value:
if is_file(item):
lines.extend(encode_file(boundary, key, item))
else:
lines.extend([to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
item
]])
else:
lines.extend([to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
value
]])
lines.extend([
to_bytes('--%s--' % boundary),
b'',
])
return b'\r\n'.join(lines)
def encode_file(boundary, key, file):
to_bytes = lambda s: force_bytes(s, settings.DEFAULT_CHARSET)
if hasattr(file, 'content_type'):
content_type = file.content_type
else:
content_type = mimetypes.guess_type(file.name)[0]
if content_type is None:
content_type = 'application/octet-stream'
return [
to_bytes('--%s' % boundary),
to_bytes('Content-Disposition: form-data; name="%s"; filename="%s"'
% (key, os.path.basename(file.name))),
to_bytes('Content-Type: %s' % content_type),
b'',
file.read()
]
class RequestFactory(object):
"""
Class that lets you create mock Request objects for use in testing.
Usage:
rf = RequestFactory()
get_request = rf.get('/hello/')
post_request = rf.post('/submit/', {'foo': 'bar'})
Once you have a request object you can pass it to any view function,
just as if that view had been hooked up using a URLconf.
"""
def __init__(self, **defaults):
self.defaults = defaults
self.cookies = SimpleCookie()
self.errors = BytesIO()
def _base_environ(self, **request):
"""
The base environment for a request.
"""
# This is a minimal valid WSGI environ dictionary, plus:
# - HTTP_COOKIE: for cookie support,
# - REMOTE_ADDR: often useful, see #8551.
# See http://www.python.org/dev/peps/pep-3333/#environ-variables
environ = {
'HTTP_COOKIE': self.cookies.output(header='', sep='; '),
'PATH_INFO': str('/'),
'REMOTE_ADDR': str('127.0.0.1'),
'REQUEST_METHOD': str('GET'),
'SCRIPT_NAME': str(''),
'SERVER_NAME': str('testserver'),
'SERVER_PORT': str('80'),
'SERVER_PROTOCOL': str('HTTP/1.1'),
'wsgi.version': (1, 0),
'wsgi.url_scheme': str('http'),
'wsgi.input': FakePayload(b''),
'wsgi.errors': self.errors,
'wsgi.multiprocess': True,
'wsgi.multithread': False,
'wsgi.run_once': False,
}
environ.update(self.defaults)
environ.update(request)
return environ
def request(self, **request):
"Construct a generic request object."
return WSGIRequest(self._base_environ(**request))
def _encode_data(self, data, content_type):
if content_type is MULTIPART_CONTENT:
return encode_multipart(BOUNDARY, data)
else:
# Encode the content so that the byte representation is correct.
match = CONTENT_TYPE_RE.match(content_type)
if match:
charset = match.group(1)
else:
charset = settings.DEFAULT_CHARSET
return force_bytes(data, encoding=charset)
def _get_path(self, parsed):
path = force_str(parsed[2])
# If there are parameters, add them
if parsed[3]:
path += str(";") + force_str(parsed[3])
path = unquote(path)
# WSGI requires latin-1 encoded strings. See get_path_info().
if six.PY3:
path = path.encode('utf-8').decode('iso-8859-1')
return path
def get(self, path, data=None, secure=False, **extra):
"Construct a GET request."
r = {
'QUERY_STRING': urlencode(data or {}, doseq=True),
}
r.update(extra)
return self.generic('GET', path, secure=secure, **r)
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
secure=False, **extra):
"Construct a POST request."
post_data = self._encode_data(data or {}, content_type)
return self.generic('POST', path, post_data, content_type,
secure=secure, **extra)
def head(self, path, data=None, secure=False, **extra):
"Construct a HEAD request."
r = {
'QUERY_STRING': urlencode(data or {}, doseq=True),
}
r.update(extra)
return self.generic('HEAD', path, secure=secure, **r)
def options(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct an OPTIONS request."
return self.generic('OPTIONS', path, data, content_type,
secure=secure, **extra)
def put(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a PUT request."
return self.generic('PUT', path, data, content_type,
secure=secure, **extra)
def patch(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a PATCH request."
return self.generic('PATCH', path, data, content_type,
secure=secure, **extra)
def delete(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a DELETE request."
return self.generic('DELETE', path, data, content_type,
secure=secure, **extra)
def generic(self, method, path, data='',
content_type='application/octet-stream', secure=False,
**extra):
"""Constructs an arbitrary HTTP request."""
parsed = urlparse(path)
data = force_bytes(data, settings.DEFAULT_CHARSET)
r = {
'PATH_INFO': self._get_path(parsed),
'REQUEST_METHOD': str(method),
'SERVER_PORT': str('443') if secure else str('80'),
'wsgi.url_scheme': str('https') if secure else str('http'),
}
if data:
r.update({
'CONTENT_LENGTH': len(data),
'CONTENT_TYPE': str(content_type),
'wsgi.input': FakePayload(data),
})
r.update(extra)
# If QUERY_STRING is absent or empty, we want to extract it from the URL.
if not r.get('QUERY_STRING'):
query_string = force_bytes(parsed[4])
# WSGI requires latin-1 encoded strings. See get_path_info().
if six.PY3:
query_string = query_string.decode('iso-8859-1')
r['QUERY_STRING'] = query_string
return self.request(**r)
class Client(RequestFactory):
"""
A class that can act as a client for testing purposes.
It allows the user to compose GET and POST requests, and
obtain the response that the server gave to those requests.
The server Response objects are annotated with the details
of the contexts and templates that were rendered during the
process of serving the request.
Client objects are stateful - they will retain cookie (and
thus session) details for the lifetime of the Client instance.
This is not intended as a replacement for Twill/Selenium or
the like - it is here to allow testing against the
contexts and templates produced by a view, rather than the
HTML rendered to the end-user.
"""
def __init__(self, enforce_csrf_checks=False, **defaults):
super(Client, self).__init__(**defaults)
self.handler = ClientHandler(enforce_csrf_checks)
self.exc_info = None
def store_exc_info(self, **kwargs):
"""
Stores exceptions when they are generated by a view.
"""
self.exc_info = sys.exc_info()
def _session(self):
"""
Obtains the current session variables.
"""
if apps.is_installed('django.contrib.sessions'):
engine = import_module(settings.SESSION_ENGINE)
cookie = self.cookies.get(settings.SESSION_COOKIE_NAME, None)
if cookie:
return engine.SessionStore(cookie.value)
else:
s = engine.SessionStore()
s.save()
self.cookies[settings.SESSION_COOKIE_NAME] = s.session_key
return s
return {}
session = property(_session)
def request(self, **request):
"""
The master request method. Composes the environment dictionary
and passes to the handler, returning the result of the handler.
Assumes defaults for the query environment, which can be overridden
using the arguments to the request.
"""
environ = self._base_environ(**request)
# Curry a data dictionary into an instance of the template renderer
# callback function.
data = {}
on_template_render = curry(store_rendered_templates, data)
signal_uid = "template-render-%s" % id(request)
signals.template_rendered.connect(on_template_render, dispatch_uid=signal_uid)
# Capture exceptions created by the handler.
got_request_exception.connect(self.store_exc_info, dispatch_uid="request-exception")
try:
try:
response = self.handler(environ)
except TemplateDoesNotExist as e:
# If the view raises an exception, Django will attempt to show
# the 500.html template. If that template is not available,
# we should ignore the error in favor of re-raising the
# underlying exception that caused the 500 error. Any other
# template found to be missing during view error handling
# should be reported as-is.
if e.args != ('500.html',):
raise
# Look for a signalled exception, clear the current context
# exception data, then re-raise the signalled exception.
# Also make sure that the signalled exception is cleared from
# the local cache!
if self.exc_info:
exc_info = self.exc_info
self.exc_info = None
six.reraise(*exc_info)
# Save the client and request that stimulated the response.
response.client = self
response.request = request
# Add any rendered template detail to the response.
response.templates = data.get("templates", [])
response.context = data.get("context")
# Attach the ResolverMatch instance to the response
response.resolver_match = SimpleLazyObject(
lambda: urlresolvers.resolve(request['PATH_INFO']))
# Flatten a single context. Not really necessary anymore thanks to
# the __getattr__ flattening in ContextList, but has some edge-case
# backwards-compatibility implications.
if response.context and len(response.context) == 1:
response.context = response.context[0]
# Update persistent cookie data.
if response.cookies:
self.cookies.update(response.cookies)
return response
finally:
signals.template_rendered.disconnect(dispatch_uid=signal_uid)
got_request_exception.disconnect(dispatch_uid="request-exception")
def get(self, path, data=None, follow=False, secure=False, **extra):
"""
Requests a response from the server using GET.
"""
response = super(Client, self).get(path, data=data, secure=secure,
**extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
follow=False, secure=False, **extra):
"""
Requests a response from the server using POST.
"""
response = super(Client, self).post(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def head(self, path, data=None, follow=False, secure=False, **extra):
"""
Request a response from the server using HEAD.
"""
response = super(Client, self).head(path, data=data, secure=secure,
**extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def options(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Request a response from the server using OPTIONS.
"""
response = super(Client, self).options(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def put(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a resource to the server using PUT.
"""
response = super(Client, self).put(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def patch(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a resource to the server using PATCH.
"""
response = super(Client, self).patch(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def delete(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a DELETE request to the server.
"""
response = super(Client, self).delete(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def login(self, **credentials):
"""
Sets the Factory to appear as if it has successfully logged into a site.
Returns True if login is possible; False if the provided credentials
are incorrect, or the user is inactive, or if the sessions framework is
not available.
"""
from django.contrib.auth import authenticate, login
user = authenticate(**credentials)
if (user and user.is_active and
apps.is_installed('django.contrib.sessions')):
engine = import_module(settings.SESSION_ENGINE)
# Create a fake request to store login details.
request = HttpRequest()
if self.session:
request.session = self.session
else:
request.session = engine.SessionStore()
login(request, user)
# Save the session values.
request.session.save()
# Set the cookie to represent the session.
session_cookie = settings.SESSION_COOKIE_NAME
self.cookies[session_cookie] = request.session.session_key
cookie_data = {
'max-age': None,
'path': '/',
'domain': settings.SESSION_COOKIE_DOMAIN,
'secure': settings.SESSION_COOKIE_SECURE or None,
'expires': None,
}
self.cookies[session_cookie].update(cookie_data)
return True
else:
return False
def logout(self):
"""
Removes the authenticated user's cookies and session object.
Causes the authenticated user to be logged out.
"""
from django.contrib.auth import get_user, logout
request = HttpRequest()
engine = import_module(settings.SESSION_ENGINE)
if self.session:
request.session = self.session
request.user = get_user(request)
else:
request.session = engine.SessionStore()
logout(request)
self.cookies = SimpleCookie()
def _handle_redirects(self, response, **extra):
"Follows any redirects by requesting responses from the server using GET."
response.redirect_chain = []
while response.status_code in (301, 302, 303, 307):
url = response.url
redirect_chain = response.redirect_chain
redirect_chain.append((url, response.status_code))
url = urlsplit(url)
if url.scheme:
extra['wsgi.url_scheme'] = url.scheme
if url.hostname:
extra['SERVER_NAME'] = url.hostname
if url.port:
extra['SERVER_PORT'] = str(url.port)
response = self.get(url.path, QueryDict(url.query), follow=False, **extra)
response.redirect_chain = redirect_chain
# Prevent loops
if response.redirect_chain[-1] in response.redirect_chain[0:-1]:
break
return response
|
Carrefour/linux-replication | refs/heads/replication-v3.6 | scripts/tracing/draw_functrace.py | 14679 | #!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <fweisbec@gmail.com>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
|
lanfker/tdma_imac | refs/heads/master | src/config-store/bindings/callbacks_list.py | 837 | callback_classes = [
]
|
trondhindenes/ansible | refs/heads/devel | lib/ansible/plugins/inventory/ini.py | 37 | # Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
inventory: ini
version_added: "2.4"
short_description: Uses an Ansible INI file as inventory source.
description:
- INI file based inventory, sections are groups or group related with special `:modifiers`.
- Entries in sections C([group_1]) are hosts, members of the group.
- Hosts can have variables defined inline as key/value pairs separated by C(=).
- The C(children) modifier indicates that the section contains groups.
- The C(vars) modifier indicates that the section contains variables assigned to members of the group.
- Anything found outside a section is considered an 'ungrouped' host.
- Values passed in using the C(key=value) syntax are interpreted as Python literal structure (strings, numbers, tuples, lists, dicts,
booleans, None), alternatively as string. For example C(var=FALSE) would create a string equal to 'FALSE'. Do not rely on types set
during definition, always make sure you specify type with a filter when needed when consuming the variable.
notes:
- It takes the place of the previously hardcoded INI inventory.
- To function it requires being whitelisted in configuration.
- Variable values are processed by Python's ast.literal_eval function (U(https://docs.python.org/2/library/ast.html#ast.literal_eval))
which could cause the value to change in some cases. See the Examples for proper quoting to prevent changes. Another option would be
to use the yaml format for inventory source which processes the values correctly.
'''
EXAMPLES = '''
example1: |
# example cfg file
[web]
host1
host2 ansible_port=222
[web:vars]
http_port=8080 # all members of 'web' will inherit these
myvar=23
[web:children] # child groups will automatically add their hosts to partent group
apache
nginx
[apache]
tomcat1
tomcat2 myvar=34 # host specific vars override group vars
tomcat3 mysecret="'03#pa33w0rd'" # proper quoting to prevent value changes
[nginx]
jenkins1
[nginx:vars]
has_java = True # vars in child groups override same in parent
[all:vars]
has_java = False # 'all' is 'top' parent
example2: |
# other example config
host1 # this is 'ungrouped'
# both hosts have same IP but diff ports, also 'ungrouped'
host2 ansible_host=127.0.0.1 ansible_port=44
host3 ansible_host=127.0.0.1 ansible_port=45
[g1]
host4
[g2]
host4 # same host as above, but member of 2 groups, will inherit vars from both
# inventory hostnames are unique
'''
import ast
import re
from ansible.plugins.inventory import BaseFileInventoryPlugin, detect_range, expand_hostname_range
from ansible.parsing.utils.addresses import parse_address
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.module_utils._text import to_bytes, to_text
from ansible.utils.shlex import shlex_split
class InventoryModule(BaseFileInventoryPlugin):
"""
Takes an INI-format inventory file and builds a list of groups and subgroups
with their associated hosts and variable settings.
"""
NAME = 'ini'
_COMMENT_MARKERS = frozenset((u';', u'#'))
b_COMMENT_MARKERS = frozenset((b';', b'#'))
def __init__(self):
super(InventoryModule, self).__init__()
self.patterns = {}
self._filename = None
def parse(self, inventory, loader, path, cache=True):
super(InventoryModule, self).parse(inventory, loader, path)
self._filename = path
try:
# Read in the hosts, groups, and variables defined in the inventory file.
if self.loader:
(b_data, private) = self.loader._get_file_contents(path)
else:
b_path = to_bytes(path, errors='surrogate_or_strict')
with open(b_path, 'rb') as fh:
b_data = fh.read()
try:
# Faster to do to_text once on a long string than many
# times on smaller strings
data = to_text(b_data, errors='surrogate_or_strict').splitlines()
except UnicodeError:
# Handle non-utf8 in comment lines: https://github.com/ansible/ansible/issues/17593
data = []
for line in b_data.splitlines():
if line and line[0] in self.b_COMMENT_MARKERS:
# Replace is okay for comment lines
# data.append(to_text(line, errors='surrogate_then_replace'))
# Currently we only need these lines for accurate lineno in errors
data.append(u'')
else:
# Non-comment lines still have to be valid uf-8
data.append(to_text(line, errors='surrogate_or_strict'))
self._parse(path, data)
except Exception as e:
raise AnsibleParserError(e)
def _raise_error(self, message):
raise AnsibleError("%s:%d: " % (self._filename, self.lineno) + message)
def _parse(self, path, lines):
'''
Populates self.groups from the given array of lines. Raises an error on
any parse failure.
'''
self._compile_patterns()
# We behave as though the first line of the inventory is '[ungrouped]',
# and begin to look for host definitions. We make a single pass through
# each line of the inventory, building up self.groups and adding hosts,
# subgroups, and setting variables as we go.
pending_declarations = {}
groupname = 'ungrouped'
state = 'hosts'
self.lineno = 0
for line in lines:
self.lineno += 1
line = line.strip()
# Skip empty lines and comments
if not line or line[0] in self._COMMENT_MARKERS:
continue
# Is this a [section] header? That tells us what group we're parsing
# definitions for, and what kind of definitions to expect.
m = self.patterns['section'].match(line)
if m:
(groupname, state) = m.groups()
state = state or 'hosts'
if state not in ['hosts', 'children', 'vars']:
title = ":".join(m.groups())
self._raise_error("Section [%s] has unknown type: %s" % (title, state))
# If we haven't seen this group before, we add a new Group.
if groupname not in self.inventory.groups:
# Either [groupname] or [groupname:children] is sufficient to declare a group,
# but [groupname:vars] is allowed only if the # group is declared elsewhere.
# We add the group anyway, but make a note in pending_declarations to check at the end.
#
# It's possible that a group is previously pending due to being defined as a child
# group, in that case we simply pass so that the logic below to process pending
# declarations will take the appropriate action for a pending child group instead of
# incorrectly handling it as a var state pending declaration
if state == 'vars' and groupname not in pending_declarations:
pending_declarations[groupname] = dict(line=self.lineno, state=state, name=groupname)
self.inventory.add_group(groupname)
# When we see a declaration that we've been waiting for, we process and delete.
if groupname in pending_declarations and state != 'vars':
if pending_declarations[groupname]['state'] == 'children':
self._add_pending_children(groupname, pending_declarations)
elif pending_declarations[groupname]['state'] == 'vars':
del pending_declarations[groupname]
continue
elif line.startswith('[') and line.endswith(']'):
self._raise_error("Invalid section entry: '%s'. Please make sure that there are no spaces" % line +
"in the section entry, and that there are no other invalid characters")
# It's not a section, so the current state tells us what kind of
# definition it must be. The individual parsers will raise an
# error if we feed them something they can't digest.
# [groupname] contains host definitions that must be added to
# the current group.
if state == 'hosts':
hosts, port, variables = self._parse_host_definition(line)
self._populate_host_vars(hosts, variables, groupname, port)
# [groupname:vars] contains variable definitions that must be
# applied to the current group.
elif state == 'vars':
(k, v) = self._parse_variable_definition(line)
self.inventory.set_variable(groupname, k, v)
# [groupname:children] contains subgroup names that must be
# added as children of the current group. The subgroup names
# must themselves be declared as groups, but as before, they
# may only be declared later.
elif state == 'children':
child = self._parse_group_name(line)
if child not in self.inventory.groups:
if child not in pending_declarations:
pending_declarations[child] = dict(line=self.lineno, state=state, name=child, parents=[groupname])
else:
pending_declarations[child]['parents'].append(groupname)
else:
self.inventory.add_child(groupname, child)
else:
# This can happen only if the state checker accepts a state that isn't handled above.
self._raise_error("Entered unhandled state: %s" % (state))
# Any entries in pending_declarations not removed by a group declaration above mean that there was an unresolved reference.
# We report only the first such error here.
for g in pending_declarations:
decl = pending_declarations[g]
if decl['state'] == 'vars':
raise AnsibleError("%s:%d: Section [%s:vars] not valid for undefined group: %s" % (path, decl['line'], decl['name'], decl['name']))
elif decl['state'] == 'children':
raise AnsibleError("%s:%d: Section [%s:children] includes undefined group: %s" % (path, decl['line'], decl['parents'].pop(), decl['name']))
def _add_pending_children(self, group, pending):
for parent in pending[group]['parents']:
self.inventory.add_child(parent, group)
if parent in pending and pending[parent]['state'] == 'children':
self._add_pending_children(parent, pending)
del pending[group]
def _parse_group_name(self, line):
'''
Takes a single line and tries to parse it as a group name. Returns the
group name if successful, or raises an error.
'''
m = self.patterns['groupname'].match(line)
if m:
return m.group(1)
self._raise_error("Expected group name, got: %s" % (line))
def _parse_variable_definition(self, line):
'''
Takes a string and tries to parse it as a variable definition. Returns
the key and value if successful, or raises an error.
'''
# TODO: We parse variable assignments as a key (anything to the left of
# an '='"), an '=', and a value (anything left) and leave the value to
# _parse_value to sort out. We should be more systematic here about
# defining what is acceptable, how quotes work, and so on.
if '=' in line:
(k, v) = [e.strip() for e in line.split("=", 1)]
return (k, self._parse_value(v))
self._raise_error("Expected key=value, got: %s" % (line))
def _parse_host_definition(self, line):
'''
Takes a single line and tries to parse it as a host definition. Returns
a list of Hosts if successful, or raises an error.
'''
# A host definition comprises (1) a non-whitespace hostname or range,
# optionally followed by (2) a series of key="some value" assignments.
# We ignore any trailing whitespace and/or comments. For example, here
# are a series of host definitions in a group:
#
# [groupname]
# alpha
# beta:2345 user=admin # we'll tell shlex
# gamma sudo=True user=root # to ignore comments
try:
tokens = shlex_split(line, comments=True)
except ValueError as e:
self._raise_error("Error parsing host definition '%s': %s" % (line, e))
(hostnames, port) = self._expand_hostpattern(tokens[0])
# Try to process anything remaining as a series of key=value pairs.
variables = {}
for t in tokens[1:]:
if '=' not in t:
self._raise_error("Expected key=value host variable assignment, got: %s" % (t))
(k, v) = t.split('=', 1)
variables[k] = self._parse_value(v)
return hostnames, port, variables
def _expand_hostpattern(self, hostpattern):
'''
Takes a single host pattern and returns a list of hostnames and an
optional port number that applies to all of them.
'''
# Can the given hostpattern be parsed as a host with an optional port
# specification?
try:
(pattern, port) = parse_address(hostpattern, allow_ranges=True)
except Exception:
# not a recognizable host pattern
pattern = hostpattern
port = None
# Once we have separated the pattern, we expand it into list of one or
# more hostnames, depending on whether it contains any [x:y] ranges.
if detect_range(pattern):
hostnames = expand_hostname_range(pattern)
else:
hostnames = [pattern]
return (hostnames, port)
@staticmethod
def _parse_value(v):
'''
Attempt to transform the string value from an ini file into a basic python object
(int, dict, list, unicode string, etc).
'''
try:
v = ast.literal_eval(v)
# Using explicit exceptions.
# Likely a string that literal_eval does not like. We wil then just set it.
except ValueError:
# For some reason this was thought to be malformed.
pass
except SyntaxError:
# Is this a hash with an equals at the end?
pass
return to_text(v, nonstring='passthru', errors='surrogate_or_strict')
def _compile_patterns(self):
'''
Compiles the regular expressions required to parse the inventory and
stores them in self.patterns.
'''
# Section names are square-bracketed expressions at the beginning of a
# line, comprising (1) a group name optionally followed by (2) a tag
# that specifies the contents of the section. We ignore any trailing
# whitespace and/or comments. For example:
#
# [groupname]
# [somegroup:vars]
# [naughty:children] # only get coal in their stockings
self.patterns['section'] = re.compile(
to_text(r'''^\[
([^:\]\s]+) # group name (see groupname below)
(?::(\w+))? # optional : and tag name
\]
\s* # ignore trailing whitespace
(?:\#.*)? # and/or a comment till the
$ # end of the line
''', errors='surrogate_or_strict'), re.X
)
# FIXME: What are the real restrictions on group names, or rather, what
# should they be? At the moment, they must be non-empty sequences of non
# whitespace characters excluding ':' and ']', but we should define more
# precise rules in order to support better diagnostics.
self.patterns['groupname'] = re.compile(
to_text(r'''^
([^:\]\s]+)
\s* # ignore trailing whitespace
(?:\#.*)? # and/or a comment till the
$ # end of the line
''', errors='surrogate_or_strict'), re.X
)
|
UOSHUB/BackEnd | refs/heads/master | API/views/courses.py | 1 | from django.http.response import HttpResponse
from rest_framework.response import Response
from rest_framework.views import APIView
from Requests import blackboard, myudc
from .common import login_required
from .api_root import APIRoot
from threading import Thread
from zipfile import ZipFile
# Student's courses requests handler
class Courses(APIView):
"""
This returns course's data, which is a list
of documents and deadline in the course
"""
# Returns a list of courses or course's data (with term and data type options)
@staticmethod
@login_required("blackboard")
def get(request):
# Return list of student's courses
return Response(
# Get & scrape courses list from Blackboard Mobile
blackboard.scrape.courses_list(
blackboard.get.courses_list(
# Send Blackboard cookies
request.session["blackboard"]
), # Send scrape the URL builder
lambda path: request.build_absolute_uri("/api/courses/" + path + "/")
)
)
# Course's Blackboard content handler
class Content(APIView):
"""
This returns course's Blackboard content,
which includes its documents and deadlines
"""
# Returns course's documents and deadlines
@staticmethod
@login_required("blackboard")
def get(request, course_key, course_id):
# Return requested course's data
return Response(
# Get & scrape course's data from Blackboard Mobile
blackboard.scrape.course_data(
blackboard.get.course_data(
# Send Blackboard cookies & course's id
request.session["blackboard"], course_id
), course_key, course_id
)
)
# Course's MyUDC details handler
class Details(APIView):
"""
Returns course's MyUDC details,
which includes its location, time, doctor, etc...
"""
# Returns a single course's details
@staticmethod
@login_required("myudc")
def get(request, course_key, crn, term_code):
# If crn or term aren't sent
if not (crn and term_code):
# Return to API root with an error message
return APIRoot.get(request, request.path)
# Otherwise, return requested course's details
return Response(
# Get & scrape course's details from MyUDC
myudc.scrape.course(
myudc.get.course(
# Send MyUDC session
request.session["myudc"],
# Send course's key, crn and term code
crn, course_key, term_code
)
)
)
# Course's Blackboard document download handler
class Documents(APIView):
# Returns a course's document file
@staticmethod
@login_required("blackboard")
def get(request, document_id):
# Get course document data and name from Blackboard
file_data, file_name = blackboard.get.course_document(
# Send Blackboard cookies, and document content id and xid
request.session["blackboard"], *document_id.split("_")
)
# Construct a response with document content and type
response = HttpResponse(**file_data)
# Specify document file name in response and return it
response["Content-Disposition"] = f'attachment; filename="{file_name}"'
return response
# Course's Blackboard documents download as a zip handler
class Zip(APIView):
# Returns a course's documents in a zip file
@staticmethod
@login_required("blackboard")
def get(request, documents_ids, zip_name):
# Create an HTTP response with content type zip
response = HttpResponse(content_type="application/x-zip-compressed")
# Specify zip file name in response
response["Content-Disposition"] = f'attachment; filename="{zip_name or "documents"}.zip"'
# Open a zip file that'll contain downloaded documents
zip_file = ZipFile(response, mode="w")
# Downloads and adds document to zip file
def download_document(document_id):
# Get course document data and name from Blackboard
file_data, file_name = blackboard.get.course_document(
# Send Blackboard cookies, and document content id and xid
request.session["blackboard"], *document_id.split("_")
)
# Write downloaded document to zip file
zip_file.writestr(file_name, file_data["content"])
# Create a threads queue
threads = []
# Loop through requested documents ids
for document_id in documents_ids.split(","):
# Append a new thread to queue that downloads and zips document
threads.append(Thread(target=download_document, args=(document_id,)))
# Start created thread
threads[-1].start()
# Join all started threads to main one
[thread.join() for thread in threads]
# Once done, close zip file and return it
zip_file.close()
return response
|
pballand/congress | refs/heads/master | thirdparty/antlr3-antlr-3.5/runtime/Python3/tests/t024finally.py | 5 | import antlr3
import testbase
import unittest
class t024finally(testbase.ANTLRTest):
def setUp(self):
self.compileGrammar()
def testValid1(self):
cStream = antlr3.StringStream('foobar')
lexer = self.getLexer(cStream)
tStream = antlr3.CommonTokenStream(lexer)
parser = self.getParser(tStream)
events = parser.prog()
self.assertEqual(events, ['catch', 'finally'])
if __name__ == '__main__':
unittest.main()
|
locked/4stability | refs/heads/master | adxl345.py | 1 | # license: BSD, see LICENSE included in this package
#
# based on awesome lib from Jonathan Williamson (https://github.com/pimoroni/adxl345-python/)
#
import smbus
import time
import sys
class ADXL345:
#SCALE_MULTIPLIER = 0.004
DATA_FORMAT = 0x31
BW_RATE = 0x2C
POWER_CTL = 0x2D
BW_RATE_1600HZ = 0x0F
BW_RATE_800HZ = 0x0E
BW_RATE_400HZ = 0x0D
BW_RATE_200HZ = 0x0C
BW_RATE_100HZ = 0x0B
BW_RATE_50HZ = 0x0A
BW_RATE_25HZ = 0x09
BW_RATE_3HZ = 0x06
BW_RATE_1HZ = 0x05
BW_RATE_01HZ = 0x00
RANGE_2G = 0x00
RANGE_4G = 0x01
RANGE_8G = 0x02
RANGE_16G = 0x03
MEASURE = 0x08
AXES_DATA = 0x32
address = None
bus = None
def __init__(self, address=0x53, bwrate=None, range=None):
self.address = address
self.bus = smbus.SMBus(1)
if bwrate is None:
bwrate = self.BW_RATE_100HZ
self.setBandwidthRate(bwrate)
if range is None:
range = self.RANGE_2G
self.setRange(range)
self.enableMeasurement()
def enableMeasurement(self):
self.bus.write_byte_data(self.address, self.POWER_CTL, self.MEASURE)
def setBandwidthRate(self, rate_flag):
self.bus.write_byte_data(self.address, self.BW_RATE, rate_flag)
def setRange(self, range_flag):
value = self.bus.read_byte_data(self.address, self.DATA_FORMAT)
value &= ~0x0F;
value |= range_flag;
value |= 0x08;
self.bus.write_byte_data(self.address, self.DATA_FORMAT, value)
def getAxes(self):
bytes = self.bus.read_i2c_block_data(self.address, self.AXES_DATA, 6)
x = bytes[0] | (bytes[1] << 8)
if(x & (1 << 16 - 1)):
x = x - (1<<16)
y = bytes[2] | (bytes[3] << 8)
if(y & (1 << 16 - 1)):
y = y - (1<<16)
z = bytes[4] | (bytes[5] << 8)
if(z & (1 << 16 - 1)):
z = z - (1<<16)
#x = x * SCALE_MULTIPLIER
#y = y * SCALE_MULTIPLIER
#z = z * SCALE_MULTIPLIER
x = round(x, 4)
y = round(y, 4)
z = round(z, 4)
return {"x": x, "y": y, "z": z}
|
15Dkatz/pants | refs/heads/master | src/python/pants/engine/parser.py | 3 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from abc import abstractmethod
from pants.engine.addressable import Exactly
from pants.util.meta import AbstractClass
class ParseError(Exception):
"""Indicates an error parsing BUILD configuration."""
class SymbolTable(AbstractClass):
"""A one-classmethod interface exposing a symbol table dict."""
@abstractmethod
def table(self):
"""Returns a dict of name to implementation class."""
def constraint(self):
"""Returns the typeconstraint for the symbol table"""
# NB Sort types so that multiple calls get the same tuples.
symbol_table_types = sorted(set(self.table().values()))
return Exactly(*symbol_table_types, description='symbol table types')
class EmptyTable(SymbolTable):
def table(self):
return {}
class Parser(AbstractClass):
@abstractmethod
def parse(self, filepath, filecontent):
"""
:param string filepath: The name of the file being parsed. The parser should not assume
that the path is accessible, and should consume the filecontent.
:param bytes filecontent: The raw byte content to parse.
:returns: A list of decoded addressable, Serializable objects. The callable will
raise :class:`ParseError` if there were any problems encountered parsing the filecontent.
:rtype: :class:`collections.Callable`
"""
|
jswope00/griffinx | refs/heads/master | common/test/acceptance/pages/common/logout.py | 162 | """
Logout Page.
"""
from bok_choy.page_object import PageObject
from . import BASE_URL
class LogoutPage(PageObject):
"""
Logout page to logout current logged in user.
"""
url = BASE_URL + "/logout"
def is_browser_on_page(self):
return self.q(css='.cta-login').present
|
ishirav/draw-and-learn | refs/heads/master | lessons/1/part-3.rectangles.py | 1 | from draw import *
w = Window(title=__file__)
w.rect(50, 50, 100, 200)
w.rect(150, 150, 300, 250, color='orange', thickness=5)
w.rect(300, 300, 400, 550, color='blue', fill='cyan', thickness=3)
w.wait()
|
greut/invenio-kwalitee | refs/heads/master | tests/utils.py | 3 | # -*- coding: utf-8 -*-
#
# This file is part of kwalitee
# Copyright (C) 2014 CERN.
#
# kwalitee is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# kwalitee is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with kwalitee; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
#
# In applying this licence, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""Mocks and mixins for the tests."""
from __future__ import unicode_literals
GPL = """
{1} This file is part of kwalitee
{1} Copyright (C) {0} CERN.
{1}
{1} kwalitee is free software; you can redistribute it and/or
{1} modify it under the terms of the GNU General Public License as
{1} published by the Free Software Foundation; either version 2 of the
{1} License, or (at your option) any later version.
{1}
{1} kwalitee is distributed in the hope that it will be useful, but
{1} WITHOUT ANY WARRANTY; without even the implied warranty of
{1} MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
{1} General Public License for more details.
{1}
{1} You should have received a copy of the GNU General Public License
{1} along with kwalitee; if not, write to the Free Software Foundation,
{1} Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
{1}
{1} In applying this licence, CERN does not waive the privileges and immunities
{1} granted to it by virtue of its status as an Intergovernmental Organization
{1} or submit itself to any jurisdiction.
"""
class MyQueue(object):
"""Queue mock to use in place of the RQ queue.
.. seealso:: `RQ <http://python-rq.org/docs/>`_
"""
def __init__(self):
"""Initialize an empty queue."""
self.queue = []
def __len__(self):
"""Length of the queue."""
return len(self.queue)
def dequeue(self):
"""Remove one item from the queue."""
return self.queue.pop()
def enqueue(self, *args, **kwargs):
"""Add items to the queue.
:param args: tuple is appended to list
:param kwargs: are ignored.
"""
self.queue.insert(0, args)
|
t0in4/django | refs/heads/master | django/conf/locale/uk/formats.py | 565 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j E Y р.'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j E Y р. H:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j F'
SHORT_DATE_FORMAT = 'j M Y'
# SHORT_DATETIME_FORMAT =
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# DATE_INPUT_FORMATS =
# TIME_INPUT_FORMATS =
# DATETIME_INPUT_FORMATS =
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = ' '
# NUMBER_GROUPING =
|
buchbend/astrolyze | refs/heads/master | astrolyze/sed/plotSEDs.py | 2 | import matplotlib.pyplot as plt
import astrolyze.functions.astro_functions as astFunc
import astrolyze.functions.constants as const
import numpy as np
from copy import deepcopy as copy
def plot_sed(p2, nu_or_lambda='nu', color='black', kappa='easy',
linewidth=0.5, xRange='normal'):
'''
Plot a multi component greybody model.
:param p2: formatted: p2 = [[t1,t2,t3,...], [N1,N2,N3,...], [beta]]
:param nu_or_lambda: plot against frequency ``'nu'`` or wavelenght
``'lambda'``
:param kappa: The kappa to use. ``'easy'`` or ``'Kruegel'``. Please refer
to :py:func:`functions.astroFunctions.greyBody` for more information.
:param xRange: PLEASE ADD DESCRIPTION
:param linewidth: The linewidth of the plotted lines. Default to 0.5.
:param linewidth: float
:param color: the color of the plotted lines. Default to ``'black'``
:type color: matplotlib conform color.
'''
if xRange == 'LTIR':
# Plot the SED in the range of the determination
# of the L_TIR: 3-1100 micron
xmin = 3e-6# micron
xmax = 4000e-6 # micron
# conversion to frequency in GHz
xmin = const.c/xmax/1e9
xmax = const.c/xmin/1e9
step = 0.1
if xRange == 'normal':
# arbitrary range definition
xmin = 1e-2
xmax = 3e5
step = 0.5
if type(xRange) == list:
xmin = xRange[0]
xmax = xRange[1]
if len(xRange) < 3:
step = 0.1
else:
step = xRange[2]
x = np.arange(xmin,xmax,step)
# multiComponent gives the summed 'model' and the components 'grey'.
# 'grey' is a List
if nu_or_lambda == 'nu':
model,grey = astFunc.multi_component_grey_body(p2,x,'nu',kappa)
if nu_or_lambda=='lambda':
model,grey = astFunc.multi_component_grey_body(p2,x,'nu',kappa)
y=copy(x)
modelLambda =copy(model)
greyLambda = []
for i in range(len(grey)):
greyLambda += [copy(grey[i])]
for i in range(len(x)):
y[i]=(const.c/(x[len(x)-i-1]*1e9))/1e-6
for i in range(len(model)):
modelLambda[i]=model[len(model)-i-1]
for j in range(len(greyLambda)):
greyLambda[j][i] = grey[j][len(grey[j])-i-1]
x=y
model =modelLambda
grey = greyLambda
plt.loglog(x,model,ls='-',color=color,label='_nolegend_',lw=0.5,marker='')
linestyles = [':','-.','-']
j=0
for i in grey:
plt.loglog(x,i,color=color,ls=linestyles[j],lw=0.5,marker='')
j+=1
def create_figure(pList, data, parted=[1], plotLegend='no', label=['Fluxes'],
color=['black'], marker=['x'], markerSize=6, titleString='',
xLabel='',yLabel='', textString1=None, nu_or_lambda='nu',
fontdict=None, printFitResult=True, fitResultLoc=[50,10],
lineWidth=0.5, kappa='easy', chisq='', xRange='normal',
plotXlabel=None, plotYlabel=None, noXtics=False,
noYtics=False, lineColor='black', ylim=[1e-3,3e1],
xlim=[500,20000]):
'''
Plots the total SED of M33. Resamples the older gildas output.
input:
pList: Multi-Component GreyBody parameters. pList = [[t1,t2,t3,...],
[N1,N2,N3,...],[beta]]
'''
# ensures the current figure is an empty page
# generates the Text that shows the fit results for this Plot
# if pList corresponds to a 2 component grey Body it prints the N1/N2 value
textString = ''
for i in range(len(pList[0])):
textString += (r'T'+str(i+1)+'='+str('%1.1f'%pList[0][i])+' K\nM'+
str(i+1)+'='+str("%1.2e"%pList[1][i])+' M$_{\odot}$\n')
if len(pList[0])==2:
textString += r'N1/N2 = '+str('%i'%(pList[1][0]/pList[1][1]))+'\n'
textString += (r'beta = '+str("%1.2f"%pList[2][0])+'\n$\\chi^2$ ='+
str("%1.2f"%chisq)+'\n')
print textString
# sets the limits of the plot Page
plotSize = 0.9 # percentace of plotpage larger than the plotted values?
if nu_or_lambda=='nu':
xLimNu=[min(data[0])-min(data[0])*plotSize,max(data[0])+max(data[0])]
if nu_or_lambda == 'lambda':
newData = []
for i in data[0]:
#print i
newData+=[const.c / (i * 1e9) / 1e-6]
data[0]=newData
xLimNu=[min(data[0]) - min(data[0]) * plotSize*2,
max(data[0]) + max(data[0]) * plotSize]
# Steering the xtick-Labels if more than one plot is to be connected.
axNow= plt.gca()
plt.setp( axNow.get_xticklabels(), visible=True)
if noXtics == True:
plt.setp( axNow.get_xticklabels(), visible=False)
if noYtics == True:
plt.setp( axNow.get_yticklabels(), visible=False)
plt.xlim(xlim[0],xlim[1])
plt.ylim(ylim[0],ylim[1])
# reads the Data for Plotting
# PLots the model given in pList
plot_sed(pList,nu_or_lambda,kappa=kappa,xRange=xRange,color=lineColor)
markersize = 6
#Plotting the data points
if len(parted)==1:
plt.errorbar(data[0], data[1], yerr=data[2], fmt='o', marker='p',
mfc='None', mew=0.5, mec='#00ffff', ms=markersize,
color='black',lw=lineWidth)
else:
for i in range(len(parted)):
if i == 0:
plt.errorbar(data[0][0:parted[i]], data[1][0:parted[i]],
yerr=data[2][0:parted[i]], fmt=marker[i],
marker=marker[i], mfc='None', label=label[i],
mew=0.5, mec=color[i], ms=markersize,
color=color[i], lw=lineWidth)
else:
plt.errorbar(data[0][parted[i-1]:parted[i]],
data[1][parted[i-1]:parted[i]],
yerr=data[2][parted[i-1]:parted[i]], fmt=marker[i],
marker=marker[i], mfc='None', label=label[i],
mew=0.5, mec=color[i], ms=markersize,
color=color[i], lw=lineWidth)
# setting up legend,title, xlabel.
if plotLegend == True:
fontdict={'size':'13'}
plt.legend(loc='upper right', numpoints=1, fancybox=False,
prop=fontdict, markerscale=1)
fontdict={'size':'22'}
if printFitResult==True:
fontdict={'size':'12'}
plt.text(fitResultLoc[0], fitResultLoc[1], s=textString,
fontdict=fontdict, alpha=0.4)
fontdict={'size':'22'}
fontdict={'size':'10'}
if textString1 != None:
plt.text(5,10,s=textString1, fontdict=fontdict)
plt.title(titleString)
if plotXlabel==True:
plt.xlabel(xLabel)
if plotYlabel==True:
plt.ylabel(yLabel)
plt.axis([xlim[0],xlim[1],ylim[0],ylim[1]])
axNow= plt.gca()
plt.setp( axNow.get_xticklabels(), visible=True)
|
wkentaro/chainer | refs/heads/master | chainer/__init__.py | 3 | from __future__ import absolute_import
import collections
import os
import threading
import warnings as builtin_warnings
import numpy
from chainer import _version
from chainer import backends # NOQA
from chainer import dataset # NOQA
from chainer import datasets # NOQA
from chainer import distributions # NOQA
from chainer import function_hooks # NOQA
from chainer import functions # NOQA
from chainer import graph_optimizations # NOQA
from chainer import initializers # NOQA
from chainer import iterators # NOQA
from chainer import links # NOQA
from chainer import optimizers # NOQA
from chainer import serializers # NOQA
from chainer import training # NOQA
from chainer import variable # NOQA
from chainer import warnings # NOQA
# import class and function
# These functions from backends.cuda are kept for backward compatibility
from chainer._backprop import backward # NOQA
from chainer._runtime_info import print_runtime_info # NOQA
from chainer.backend import get_device # NOQA
from chainer.backend import using_device # NOQA
from chainer.backends.cuda import should_use_cudnn # NOQA
from chainer.backends.cuda import should_use_cudnn_tensor_core # NOQA
from chainer.configuration import config # NOQA
from chainer.configuration import global_config # NOQA
from chainer.configuration import using_config # NOQA
from chainer.device_resident import DeviceResident # NOQA
from chainer.distribution import cross_entropy # NOQA
from chainer.distribution import Distribution # NOQA
from chainer.distribution import kl_divergence # NOQA
from chainer.distribution import register_kl # NOQA
from chainer.function import force_backprop_mode # NOQA
from chainer.function import Function # NOQA
from chainer.function import FunctionAdapter # NOQA
from chainer.function import no_backprop_mode # NOQA
from chainer.function_hook import FunctionHook # NOQA
from chainer.function_node import FunctionNode # NOQA
from chainer.function_node import grad # NOQA
from chainer.functions import array # NOQA
from chainer.functions.math import basic_math # NOQA
from chainer.graph_optimizations.static_graph import static_graph # NOQA
from chainer.graph_optimizations.static_graph_utilities import static_code # NOQA
from chainer.initializer import Initializer # NOQA
from chainer.link import Chain # NOQA
from chainer.link import ChainList # NOQA
from chainer.link import Link # NOQA
from chainer.link_hook import LinkHook # NOQA
from chainer.optimizer import GradientMethod # NOQA
from chainer.optimizer import Optimizer # NOQA
from chainer.optimizer import UpdateRule # NOQA
from chainer.reporter import DictSummary # NOQA
from chainer.reporter import get_current_reporter # NOQA
from chainer.reporter import report # NOQA
from chainer.reporter import report_scope # NOQA
from chainer.reporter import Reporter # NOQA
from chainer.reporter import Summary # NOQA
from chainer.sequential import Sequential # NOQA
from chainer.serializer import AbstractSerializer # NOQA
from chainer.serializer import Deserializer # NOQA
from chainer.serializer import Serializer # NOQA
from chainer.variable import as_array # NOQA
from chainer.variable import as_variable # NOQA
from chainer.variable import Parameter # NOQA
from chainer.variable import Variable # NOQA
# Alias for backward compatibility
from chainer import cuda # NOQA
from chainer import _environment_check
import chainerx
# Introduce an alias that cannot be declared at the original place due to
# circular imports.
import chainer.utils.walker_alias
chainer.utils.WalkerAlias = chainer.utils.walker_alias.WalkerAlias
del chainer
# Check environment conditions
_environment_check.check()
__version__ = _version.__version__
_thread_local = threading.local()
_array_types = None
_cpu_array_types = None
# Used in chainer.FunctionNode.forward_chainerx().
# This value is returned to indicate that the function does not support forward
# computation in ChainerX implementation with given input arrays and other
# arguments.
class _FallbackType(object):
def __repr__(self):
return 'Fallback'
Fallback = _FallbackType()
def get_function_hooks():
try:
ret = _thread_local.function_hooks
except AttributeError:
ret = collections.OrderedDict()
_thread_local.function_hooks = ret
return ret
def _get_link_hooks():
try:
ret = _thread_local.link_hooks
except AttributeError:
ret = collections.OrderedDict()
_thread_local.link_hooks = ret
return ret
def _load_array_types():
# Note: this function may not be protected by GIL because of external
# calls.
global _array_types
global _cpu_array_types
if _array_types is None:
array_types = [numpy.ndarray]
cpu_array_types = [numpy.ndarray]
if backends.cuda.available:
array_types.append(backends.cuda.ndarray)
if backends.intel64.is_ideep_available():
array_types.append(backends.intel64.mdarray)
cpu_array_types.append(backends.intel64.mdarray)
if chainerx.is_available():
array_types.append(chainerx.ndarray)
cpu_array_types.append(chainerx.ndarray)
array_types = tuple(array_types)
cpu_array_types = tuple(cpu_array_types)
_array_types = array_types
_cpu_array_types = cpu_array_types
def get_array_types():
_load_array_types()
return _array_types
def get_cpu_array_types():
_load_array_types()
return _cpu_array_types
# TODO(hvy): Move this function to backend?
def is_arrays_compatible(arrays):
# Do not use this function to check if a single object is an array or
# not. Use isinstance(obj, chainer.get_array_types()) instead.
arrays = [a for a in arrays if a is not None]
if not arrays:
return True
# If there's at least one chainerx.ndarray, all other arrays
# must be chainerx as well
are_chainerx = [isinstance(arr, chainerx.ndarray) for arr in arrays]
if chainerx.is_available() and any(are_chainerx):
return all(are_chainerx)
if isinstance(arrays[0], backends.cuda.ndarray):
types = backends.cuda.ndarray
else:
types = get_cpu_array_types()
return all([isinstance(a, types) for a in arrays])
class _Mixed16(object):
dtype = numpy.dtype(numpy.float16)
def __repr__(self):
return "dtype('mixed16')"
mixed16 = _Mixed16()
"""Dtype-like object that represents 16/32 bits mixed precision float."""
global_config.debug = bool(int(os.environ.get('CHAINER_DEBUG', '0')))
global_config.cudnn_deterministic = False
global_config.warn_nondeterministic = False
global_config.enable_backprop = True
global_config.keep_graph_on_report = bool(int(
os.environ.get('CHAINER_KEEP_GRAPH_ON_REPORT', '0')))
global_config.train = True
global_config.type_check = bool(int(os.environ.get('CHAINER_TYPE_CHECK', '1')))
global_config.use_cudnn = os.environ.get('CHAINER_USE_CUDNN', 'auto')
global_config.use_cudnn_tensor_core = 'auto'
global_config.autotune = False
global_config.schedule_func = None
global_config.use_static_graph = True
global_config.use_ideep = os.environ.get('CHAINER_USE_IDEEP', 'never')
global_config.lazy_grad_sum = bool(int(
os.environ.get('CHAINER_LAZY_GRAD_SUM', '0')))
global_config.cudnn_fast_batch_normalization = bool(int(
os.environ.get('CHAINER_CUDNN_FAST_BATCH_NORMALIZATION', '0')))
_chainer_dtype = os.environ.get('CHAINER_DTYPE', 'float32')
if _chainer_dtype in ('float16', 'float32', 'float64'):
global_config.dtype = numpy.dtype(_chainer_dtype)
elif _chainer_dtype == 'mixed16':
global_config.dtype = mixed16
else:
raise TypeError('incorrect dtype name in CHAINER_DTYPE: "{}". '
'Only float16/32/64 are allowed.'.format(_chainer_dtype))
global_config.in_recomputing = False
global_config._will_recompute = False
def is_debug():
"""Returns if the debug mode is enabled or not in the current thread.
Returns:
bool: ``True`` if the debug mode is enabled.
"""
return bool(config.__getattr__('debug'))
def set_debug(debug):
"""Enables or disables the debug mode in the current thread.
.. note::
``chainer.set_debug(value)`` is equivalent to
``chainer.config.debug = value``.
Args:
debug (bool): New debug mode.
"""
config.debug = debug
class DebugMode(object):
"""Debug mode context.
This class provides a context manager for debug mode. When entering the
context, it sets the debug mode to the value of `debug` parameter with
memorizing its original value. When exiting the context, it sets the debug
mode back to the original value.
.. deprecated:: v2.0.0
Use :func:`chainer.using_config` instead. See :ref:`debug` for details.
Args:
debug (bool): Debug mode used in the context.
"""
def __init__(self, debug):
builtin_warnings.warn(
'chainer.DebugMode is deprecated. '
'Use chainer.using_config("debug", ...) instead.',
DeprecationWarning)
self._using = using_config('debug', debug)
def __enter__(self):
self._using.__enter__()
def __exit__(self, *args):
self._using.__exit__(*args)
def get_dtype(dtype=None, map_mixed16=None):
"""Resolves Chainer's default dtype.
Args:
dtype: Dtype specifier. If this value is specified (not ``None``),
this function returns the dtype object corresponding to it.
map_mixed16: Dtype specifier. When ``chainer.config.dtype`` is mixed16,
this option is used. If this value is ``None``, float16 is used.
Returns:
If ``dtype`` is not ``None``, it returns the dtype normalized by
``numpy.dtype()``. Otherwise, it returns ``chainer.config.dtype`` (see
:ref:`configuration`) normalized as well. When ``chainer.config.dtype``
is :data:`~chainer.mixed16` and ``map_mixed16`` is specified, it
returns the normalized version of ``map_mixed16``.
"""
if dtype is None:
dtype = config.dtype
if dtype is mixed16 and map_mixed16 is not None:
dtype = map_mixed16
return numpy.dtype(dtype)
basic_math.install_variable_arithmetics()
array.get_item.install_variable_get_item()
disable_experimental_feature_warning = False
|
maxkoryukov/headphones | refs/heads/master | lib/html5lib/filters/optionaltags.py | 1727 | from __future__ import absolute_import, division, unicode_literals
from . import _base
class Filter(_base.Filter):
def slider(self):
previous1 = previous2 = None
for token in self.source:
if previous1 is not None:
yield previous2, previous1, token
previous2 = previous1
previous1 = token
yield previous2, previous1, None
def __iter__(self):
for previous, token, next in self.slider():
type = token["type"]
if type == "StartTag":
if (token["data"] or
not self.is_optional_start(token["name"], previous, next)):
yield token
elif type == "EndTag":
if not self.is_optional_end(token["name"], next):
yield token
else:
yield token
def is_optional_start(self, tagname, previous, next):
type = next and next["type"] or None
if tagname in 'html':
# An html element's start tag may be omitted if the first thing
# inside the html element is not a space character or a comment.
return type not in ("Comment", "SpaceCharacters")
elif tagname == 'head':
# A head element's start tag may be omitted if the first thing
# inside the head element is an element.
# XXX: we also omit the start tag if the head element is empty
if type in ("StartTag", "EmptyTag"):
return True
elif type == "EndTag":
return next["name"] == "head"
elif tagname == 'body':
# A body element's start tag may be omitted if the first thing
# inside the body element is not a space character or a comment,
# except if the first thing inside the body element is a script
# or style element and the node immediately preceding the body
# element is a head element whose end tag has been omitted.
if type in ("Comment", "SpaceCharacters"):
return False
elif type == "StartTag":
# XXX: we do not look at the preceding event, so we never omit
# the body element's start tag if it's followed by a script or
# a style element.
return next["name"] not in ('script', 'style')
else:
return True
elif tagname == 'colgroup':
# A colgroup element's start tag may be omitted if the first thing
# inside the colgroup element is a col element, and if the element
# is not immediately preceeded by another colgroup element whose
# end tag has been omitted.
if type in ("StartTag", "EmptyTag"):
# XXX: we do not look at the preceding event, so instead we never
# omit the colgroup element's end tag when it is immediately
# followed by another colgroup element. See is_optional_end.
return next["name"] == "col"
else:
return False
elif tagname == 'tbody':
# A tbody element's start tag may be omitted if the first thing
# inside the tbody element is a tr element, and if the element is
# not immediately preceeded by a tbody, thead, or tfoot element
# whose end tag has been omitted.
if type == "StartTag":
# omit the thead and tfoot elements' end tag when they are
# immediately followed by a tbody element. See is_optional_end.
if previous and previous['type'] == 'EndTag' and \
previous['name'] in ('tbody', 'thead', 'tfoot'):
return False
return next["name"] == 'tr'
else:
return False
return False
def is_optional_end(self, tagname, next):
type = next and next["type"] or None
if tagname in ('html', 'head', 'body'):
# An html element's end tag may be omitted if the html element
# is not immediately followed by a space character or a comment.
return type not in ("Comment", "SpaceCharacters")
elif tagname in ('li', 'optgroup', 'tr'):
# A li element's end tag may be omitted if the li element is
# immediately followed by another li element or if there is
# no more content in the parent element.
# An optgroup element's end tag may be omitted if the optgroup
# element is immediately followed by another optgroup element,
# or if there is no more content in the parent element.
# A tr element's end tag may be omitted if the tr element is
# immediately followed by another tr element, or if there is
# no more content in the parent element.
if type == "StartTag":
return next["name"] == tagname
else:
return type == "EndTag" or type is None
elif tagname in ('dt', 'dd'):
# A dt element's end tag may be omitted if the dt element is
# immediately followed by another dt element or a dd element.
# A dd element's end tag may be omitted if the dd element is
# immediately followed by another dd element or a dt element,
# or if there is no more content in the parent element.
if type == "StartTag":
return next["name"] in ('dt', 'dd')
elif tagname == 'dd':
return type == "EndTag" or type is None
else:
return False
elif tagname == 'p':
# A p element's end tag may be omitted if the p element is
# immediately followed by an address, article, aside,
# blockquote, datagrid, dialog, dir, div, dl, fieldset,
# footer, form, h1, h2, h3, h4, h5, h6, header, hr, menu,
# nav, ol, p, pre, section, table, or ul, element, or if
# there is no more content in the parent element.
if type in ("StartTag", "EmptyTag"):
return next["name"] in ('address', 'article', 'aside',
'blockquote', 'datagrid', 'dialog',
'dir', 'div', 'dl', 'fieldset', 'footer',
'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6',
'header', 'hr', 'menu', 'nav', 'ol',
'p', 'pre', 'section', 'table', 'ul')
else:
return type == "EndTag" or type is None
elif tagname == 'option':
# An option element's end tag may be omitted if the option
# element is immediately followed by another option element,
# or if it is immediately followed by an <code>optgroup</code>
# element, or if there is no more content in the parent
# element.
if type == "StartTag":
return next["name"] in ('option', 'optgroup')
else:
return type == "EndTag" or type is None
elif tagname in ('rt', 'rp'):
# An rt element's end tag may be omitted if the rt element is
# immediately followed by an rt or rp element, or if there is
# no more content in the parent element.
# An rp element's end tag may be omitted if the rp element is
# immediately followed by an rt or rp element, or if there is
# no more content in the parent element.
if type == "StartTag":
return next["name"] in ('rt', 'rp')
else:
return type == "EndTag" or type is None
elif tagname == 'colgroup':
# A colgroup element's end tag may be omitted if the colgroup
# element is not immediately followed by a space character or
# a comment.
if type in ("Comment", "SpaceCharacters"):
return False
elif type == "StartTag":
# XXX: we also look for an immediately following colgroup
# element. See is_optional_start.
return next["name"] != 'colgroup'
else:
return True
elif tagname in ('thead', 'tbody'):
# A thead element's end tag may be omitted if the thead element
# is immediately followed by a tbody or tfoot element.
# A tbody element's end tag may be omitted if the tbody element
# is immediately followed by a tbody or tfoot element, or if
# there is no more content in the parent element.
# A tfoot element's end tag may be omitted if the tfoot element
# is immediately followed by a tbody element, or if there is no
# more content in the parent element.
# XXX: we never omit the end tag when the following element is
# a tbody. See is_optional_start.
if type == "StartTag":
return next["name"] in ['tbody', 'tfoot']
elif tagname == 'tbody':
return type == "EndTag" or type is None
else:
return False
elif tagname == 'tfoot':
# A tfoot element's end tag may be omitted if the tfoot element
# is immediately followed by a tbody element, or if there is no
# more content in the parent element.
# XXX: we never omit the end tag when the following element is
# a tbody. See is_optional_start.
if type == "StartTag":
return next["name"] == 'tbody'
else:
return type == "EndTag" or type is None
elif tagname in ('td', 'th'):
# A td element's end tag may be omitted if the td element is
# immediately followed by a td or th element, or if there is
# no more content in the parent element.
# A th element's end tag may be omitted if the th element is
# immediately followed by a td or th element, or if there is
# no more content in the parent element.
if type == "StartTag":
return next["name"] in ('td', 'th')
else:
return type == "EndTag" or type is None
return False
|
h2educ/scikit-learn | refs/heads/master | sklearn/externals/joblib/memory.py | 194 | """
A context object for caching a function's return value each time it
is called with the same input arguments.
"""
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
# Copyright (c) 2009 Gael Varoquaux
# License: BSD Style, 3 clauses.
from __future__ import with_statement
import os
import shutil
import time
import pydoc
import re
import sys
try:
import cPickle as pickle
except ImportError:
import pickle
import functools
import traceback
import warnings
import inspect
import json
import weakref
import io
# Local imports
from . import hashing
from .func_inspect import get_func_code, get_func_name, filter_args
from .func_inspect import format_signature, format_call
from ._memory_helpers import open_py_source
from .logger import Logger, format_time, pformat
from . import numpy_pickle
from .disk import mkdirp, rm_subdirs
from ._compat import _basestring
FIRST_LINE_TEXT = "# first line:"
# TODO: The following object should have a data store object as a sub
# object, and the interface to persist and query should be separated in
# the data store.
#
# This would enable creating 'Memory' objects with a different logic for
# pickling that would simply span a MemorizedFunc with the same
# store (or do we want to copy it to avoid cross-talks?), for instance to
# implement HDF5 pickling.
# TODO: Same remark for the logger, and probably use the Python logging
# mechanism.
def extract_first_line(func_code):
""" Extract the first line information from the function code
text if available.
"""
if func_code.startswith(FIRST_LINE_TEXT):
func_code = func_code.split('\n')
first_line = int(func_code[0][len(FIRST_LINE_TEXT):])
func_code = '\n'.join(func_code[1:])
else:
first_line = -1
return func_code, first_line
class JobLibCollisionWarning(UserWarning):
""" Warn that there might be a collision between names of functions.
"""
def _get_func_fullname(func):
"""Compute the part of part associated with a function.
See code of_cache_key_to_dir() for details
"""
modules, funcname = get_func_name(func)
modules.append(funcname)
return os.path.join(*modules)
def _cache_key_to_dir(cachedir, func, argument_hash):
"""Compute directory associated with a given cache key.
func can be a function or a string as returned by _get_func_fullname().
"""
parts = [cachedir]
if isinstance(func, _basestring):
parts.append(func)
else:
parts.append(_get_func_fullname(func))
if argument_hash is not None:
parts.append(argument_hash)
return os.path.join(*parts)
def _load_output(output_dir, func_name, timestamp=None, metadata=None,
mmap_mode=None, verbose=0):
"""Load output of a computation."""
if verbose > 1:
signature = ""
try:
if metadata is not None:
args = ", ".join(['%s=%s' % (name, value)
for name, value
in metadata['input_args'].items()])
signature = "%s(%s)" % (os.path.basename(func_name),
args)
else:
signature = os.path.basename(func_name)
except KeyError:
pass
if timestamp is not None:
t = "% 16s" % format_time(time.time() - timestamp)
else:
t = ""
if verbose < 10:
print('[Memory]%s: Loading %s...' % (t, str(signature)))
else:
print('[Memory]%s: Loading %s from %s' % (
t, str(signature), output_dir))
filename = os.path.join(output_dir, 'output.pkl')
if not os.path.isfile(filename):
raise KeyError(
"Non-existing cache value (may have been cleared).\n"
"File %s does not exist" % filename)
return numpy_pickle.load(filename, mmap_mode=mmap_mode)
# An in-memory store to avoid looking at the disk-based function
# source code to check if a function definition has changed
_FUNCTION_HASHES = weakref.WeakKeyDictionary()
###############################################################################
# class `MemorizedResult`
###############################################################################
class MemorizedResult(Logger):
"""Object representing a cached value.
Attributes
----------
cachedir: string
path to root of joblib cache
func: function or string
function whose output is cached. The string case is intended only for
instanciation based on the output of repr() on another instance.
(namely eval(repr(memorized_instance)) works).
argument_hash: string
hash of the function arguments
mmap_mode: {None, 'r+', 'r', 'w+', 'c'}
The memmapping mode used when loading from cache numpy arrays. See
numpy.load for the meaning of the different values.
verbose: int
verbosity level (0 means no message)
timestamp, metadata: string
for internal use only
"""
def __init__(self, cachedir, func, argument_hash,
mmap_mode=None, verbose=0, timestamp=None, metadata=None):
Logger.__init__(self)
if isinstance(func, _basestring):
self.func = func
else:
self.func = _get_func_fullname(func)
self.argument_hash = argument_hash
self.cachedir = cachedir
self.mmap_mode = mmap_mode
self._output_dir = _cache_key_to_dir(cachedir, self.func,
argument_hash)
if metadata is not None:
self.metadata = metadata
else:
self.metadata = {}
# No error is relevant here.
try:
with open(os.path.join(self._output_dir, 'metadata.json'),
'rb') as f:
self.metadata = json.load(f)
except:
pass
self.duration = self.metadata.get('duration', None)
self.verbose = verbose
self.timestamp = timestamp
def get(self):
"""Read value from cache and return it."""
return _load_output(self._output_dir, _get_func_fullname(self.func),
timestamp=self.timestamp,
metadata=self.metadata, mmap_mode=self.mmap_mode,
verbose=self.verbose)
def clear(self):
"""Clear value from cache"""
shutil.rmtree(self._output_dir, ignore_errors=True)
def __repr__(self):
return ('{class_name}(cachedir="{cachedir}", func="{func}", '
'argument_hash="{argument_hash}")'.format(
class_name=self.__class__.__name__,
cachedir=self.cachedir,
func=self.func,
argument_hash=self.argument_hash
))
def __reduce__(self):
return (self.__class__, (self.cachedir, self.func, self.argument_hash),
{'mmap_mode': self.mmap_mode})
class NotMemorizedResult(object):
"""Class representing an arbitrary value.
This class is a replacement for MemorizedResult when there is no cache.
"""
__slots__ = ('value', 'valid')
def __init__(self, value):
self.value = value
self.valid = True
def get(self):
if self.valid:
return self.value
else:
raise KeyError("No value stored.")
def clear(self):
self.valid = False
self.value = None
def __repr__(self):
if self.valid:
return '{class_name}({value})'.format(
class_name=self.__class__.__name__,
value=pformat(self.value)
)
else:
return self.__class__.__name__ + ' with no value'
# __getstate__ and __setstate__ are required because of __slots__
def __getstate__(self):
return {"valid": self.valid, "value": self.value}
def __setstate__(self, state):
self.valid = state["valid"]
self.value = state["value"]
###############################################################################
# class `NotMemorizedFunc`
###############################################################################
class NotMemorizedFunc(object):
"""No-op object decorating a function.
This class replaces MemorizedFunc when there is no cache. It provides an
identical API but does not write anything on disk.
Attributes
----------
func: callable
Original undecorated function.
"""
# Should be a light as possible (for speed)
def __init__(self, func):
self.func = func
def __call__(self, *args, **kwargs):
return self.func(*args, **kwargs)
def call_and_shelve(self, *args, **kwargs):
return NotMemorizedResult(self.func(*args, **kwargs))
def __reduce__(self):
return (self.__class__, (self.func,))
def __repr__(self):
return '%s(func=%s)' % (
self.__class__.__name__,
self.func
)
def clear(self, warn=True):
# Argument "warn" is for compatibility with MemorizedFunc.clear
pass
###############################################################################
# class `MemorizedFunc`
###############################################################################
class MemorizedFunc(Logger):
""" Callable object decorating a function for caching its return value
each time it is called.
All values are cached on the filesystem, in a deep directory
structure. Methods are provided to inspect the cache or clean it.
Attributes
----------
func: callable
The original, undecorated, function.
cachedir: string
Path to the base cache directory of the memory context.
ignore: list or None
List of variable names to ignore when choosing whether to
recompute.
mmap_mode: {None, 'r+', 'r', 'w+', 'c'}
The memmapping mode used when loading from cache
numpy arrays. See numpy.load for the meaning of the different
values.
compress: boolean, or integer
Whether to zip the stored data on disk. If an integer is
given, it should be between 1 and 9, and sets the amount
of compression. Note that compressed arrays cannot be
read by memmapping.
verbose: int, optional
The verbosity flag, controls messages that are issued as
the function is evaluated.
"""
#-------------------------------------------------------------------------
# Public interface
#-------------------------------------------------------------------------
def __init__(self, func, cachedir, ignore=None, mmap_mode=None,
compress=False, verbose=1, timestamp=None):
"""
Parameters
----------
func: callable
The function to decorate
cachedir: string
The path of the base directory to use as a data store
ignore: list or None
List of variable names to ignore.
mmap_mode: {None, 'r+', 'r', 'w+', 'c'}, optional
The memmapping mode used when loading from cache
numpy arrays. See numpy.load for the meaning of the
arguments.
compress : boolean, or integer
Whether to zip the stored data on disk. If an integer is
given, it should be between 1 and 9, and sets the amount
of compression. Note that compressed arrays cannot be
read by memmapping.
verbose: int, optional
Verbosity flag, controls the debug messages that are issued
as functions are evaluated. The higher, the more verbose
timestamp: float, optional
The reference time from which times in tracing messages
are reported.
"""
Logger.__init__(self)
self.mmap_mode = mmap_mode
self.func = func
if ignore is None:
ignore = []
self.ignore = ignore
self._verbose = verbose
self.cachedir = cachedir
self.compress = compress
if compress and self.mmap_mode is not None:
warnings.warn('Compressed results cannot be memmapped',
stacklevel=2)
if timestamp is None:
timestamp = time.time()
self.timestamp = timestamp
mkdirp(self.cachedir)
try:
functools.update_wrapper(self, func)
except:
" Objects like ufunc don't like that "
if inspect.isfunction(func):
doc = pydoc.TextDoc().document(func)
# Remove blank line
doc = doc.replace('\n', '\n\n', 1)
# Strip backspace-overprints for compatibility with autodoc
doc = re.sub('\x08.', '', doc)
else:
# Pydoc does a poor job on other objects
doc = func.__doc__
self.__doc__ = 'Memoized version of %s' % doc
def _cached_call(self, args, kwargs):
"""Call wrapped function and cache result, or read cache if available.
This function returns the wrapped function output and some metadata.
Returns
-------
output: value or tuple
what is returned by wrapped function
argument_hash: string
hash of function arguments
metadata: dict
some metadata about wrapped function call (see _persist_input())
"""
# Compare the function code with the previous to see if the
# function code has changed
output_dir, argument_hash = self._get_output_dir(*args, **kwargs)
metadata = None
# FIXME: The statements below should be try/excepted
if not (self._check_previous_func_code(stacklevel=4) and
os.path.exists(output_dir)):
if self._verbose > 10:
_, name = get_func_name(self.func)
self.warn('Computing func %s, argument hash %s in '
'directory %s'
% (name, argument_hash, output_dir))
out, metadata = self.call(*args, **kwargs)
if self.mmap_mode is not None:
# Memmap the output at the first call to be consistent with
# later calls
out = _load_output(output_dir, _get_func_fullname(self.func),
timestamp=self.timestamp,
mmap_mode=self.mmap_mode,
verbose=self._verbose)
else:
try:
t0 = time.time()
out = _load_output(output_dir, _get_func_fullname(self.func),
timestamp=self.timestamp,
metadata=metadata, mmap_mode=self.mmap_mode,
verbose=self._verbose)
if self._verbose > 4:
t = time.time() - t0
_, name = get_func_name(self.func)
msg = '%s cache loaded - %s' % (name, format_time(t))
print(max(0, (80 - len(msg))) * '_' + msg)
except Exception:
# XXX: Should use an exception logger
self.warn('Exception while loading results for '
'(args=%s, kwargs=%s)\n %s' %
(args, kwargs, traceback.format_exc()))
shutil.rmtree(output_dir, ignore_errors=True)
out, metadata = self.call(*args, **kwargs)
argument_hash = None
return (out, argument_hash, metadata)
def call_and_shelve(self, *args, **kwargs):
"""Call wrapped function, cache result and return a reference.
This method returns a reference to the cached result instead of the
result itself. The reference object is small and pickeable, allowing
to send or store it easily. Call .get() on reference object to get
result.
Returns
-------
cached_result: MemorizedResult or NotMemorizedResult
reference to the value returned by the wrapped function. The
class "NotMemorizedResult" is used when there is no cache
activated (e.g. cachedir=None in Memory).
"""
_, argument_hash, metadata = self._cached_call(args, kwargs)
return MemorizedResult(self.cachedir, self.func, argument_hash,
metadata=metadata, verbose=self._verbose - 1,
timestamp=self.timestamp)
def __call__(self, *args, **kwargs):
return self._cached_call(args, kwargs)[0]
def __reduce__(self):
""" We don't store the timestamp when pickling, to avoid the hash
depending from it.
In addition, when unpickling, we run the __init__
"""
return (self.__class__, (self.func, self.cachedir, self.ignore,
self.mmap_mode, self.compress, self._verbose))
def format_signature(self, *args, **kwargs):
warnings.warn("MemorizedFunc.format_signature will be removed in a "
"future version of joblib.", DeprecationWarning)
return format_signature(self.func, *args, **kwargs)
def format_call(self, *args, **kwargs):
warnings.warn("MemorizedFunc.format_call will be removed in a "
"future version of joblib.", DeprecationWarning)
return format_call(self.func, args, kwargs)
#-------------------------------------------------------------------------
# Private interface
#-------------------------------------------------------------------------
def _get_argument_hash(self, *args, **kwargs):
return hashing.hash(filter_args(self.func, self.ignore,
args, kwargs),
coerce_mmap=(self.mmap_mode is not None))
def _get_output_dir(self, *args, **kwargs):
""" Return the directory in which are persisted the result
of the function called with the given arguments.
"""
argument_hash = self._get_argument_hash(*args, **kwargs)
output_dir = os.path.join(self._get_func_dir(self.func),
argument_hash)
return output_dir, argument_hash
get_output_dir = _get_output_dir # backward compatibility
def _get_func_dir(self, mkdir=True):
""" Get the directory corresponding to the cache for the
function.
"""
func_dir = _cache_key_to_dir(self.cachedir, self.func, None)
if mkdir:
mkdirp(func_dir)
return func_dir
def _hash_func(self):
"""Hash a function to key the online cache"""
func_code_h = hash(getattr(self.func, '__code__', None))
return id(self.func), hash(self.func), func_code_h
def _write_func_code(self, filename, func_code, first_line):
""" Write the function code and the filename to a file.
"""
# We store the first line because the filename and the function
# name is not always enough to identify a function: people
# sometimes have several functions named the same way in a
# file. This is bad practice, but joblib should be robust to bad
# practice.
func_code = u'%s %i\n%s' % (FIRST_LINE_TEXT, first_line, func_code)
with io.open(filename, 'w', encoding="UTF-8") as out:
out.write(func_code)
# Also store in the in-memory store of function hashes
is_named_callable = False
if sys.version_info[0] > 2:
is_named_callable = (hasattr(self.func, '__name__')
and self.func.__name__ != '<lambda>')
else:
is_named_callable = (hasattr(self.func, 'func_name')
and self.func.func_name != '<lambda>')
if is_named_callable:
# Don't do this for lambda functions or strange callable
# objects, as it ends up being too fragile
func_hash = self._hash_func()
try:
_FUNCTION_HASHES[self.func] = func_hash
except TypeError:
# Some callable are not hashable
pass
def _check_previous_func_code(self, stacklevel=2):
"""
stacklevel is the depth a which this function is called, to
issue useful warnings to the user.
"""
# First check if our function is in the in-memory store.
# Using the in-memory store not only makes things faster, but it
# also renders us robust to variations of the files when the
# in-memory version of the code does not vary
try:
if self.func in _FUNCTION_HASHES:
# We use as an identifier the id of the function and its
# hash. This is more likely to falsely change than have hash
# collisions, thus we are on the safe side.
func_hash = self._hash_func()
if func_hash == _FUNCTION_HASHES[self.func]:
return True
except TypeError:
# Some callables are not hashable
pass
# Here, we go through some effort to be robust to dynamically
# changing code and collision. We cannot inspect.getsource
# because it is not reliable when using IPython's magic "%run".
func_code, source_file, first_line = get_func_code(self.func)
func_dir = self._get_func_dir()
func_code_file = os.path.join(func_dir, 'func_code.py')
try:
with io.open(func_code_file, encoding="UTF-8") as infile:
old_func_code, old_first_line = \
extract_first_line(infile.read())
except IOError:
self._write_func_code(func_code_file, func_code, first_line)
return False
if old_func_code == func_code:
return True
# We have differing code, is this because we are referring to
# different functions, or because the function we are referring to has
# changed?
_, func_name = get_func_name(self.func, resolv_alias=False,
win_characters=False)
if old_first_line == first_line == -1 or func_name == '<lambda>':
if not first_line == -1:
func_description = '%s (%s:%i)' % (func_name,
source_file, first_line)
else:
func_description = func_name
warnings.warn(JobLibCollisionWarning(
"Cannot detect name collisions for function '%s'"
% func_description), stacklevel=stacklevel)
# Fetch the code at the old location and compare it. If it is the
# same than the code store, we have a collision: the code in the
# file has not changed, but the name we have is pointing to a new
# code block.
if not old_first_line == first_line and source_file is not None:
possible_collision = False
if os.path.exists(source_file):
_, func_name = get_func_name(self.func, resolv_alias=False)
num_lines = len(func_code.split('\n'))
with open_py_source(source_file) as f:
on_disk_func_code = f.readlines()[
old_first_line - 1:old_first_line - 1 + num_lines - 1]
on_disk_func_code = ''.join(on_disk_func_code)
possible_collision = (on_disk_func_code.rstrip()
== old_func_code.rstrip())
else:
possible_collision = source_file.startswith('<doctest ')
if possible_collision:
warnings.warn(JobLibCollisionWarning(
'Possible name collisions between functions '
"'%s' (%s:%i) and '%s' (%s:%i)" %
(func_name, source_file, old_first_line,
func_name, source_file, first_line)),
stacklevel=stacklevel)
# The function has changed, wipe the cache directory.
# XXX: Should be using warnings, and giving stacklevel
if self._verbose > 10:
_, func_name = get_func_name(self.func, resolv_alias=False)
self.warn("Function %s (stored in %s) has changed." %
(func_name, func_dir))
self.clear(warn=True)
return False
def clear(self, warn=True):
""" Empty the function's cache.
"""
func_dir = self._get_func_dir(mkdir=False)
if self._verbose > 0 and warn:
self.warn("Clearing cache %s" % func_dir)
if os.path.exists(func_dir):
shutil.rmtree(func_dir, ignore_errors=True)
mkdirp(func_dir)
func_code, _, first_line = get_func_code(self.func)
func_code_file = os.path.join(func_dir, 'func_code.py')
self._write_func_code(func_code_file, func_code, first_line)
def call(self, *args, **kwargs):
""" Force the execution of the function with the given arguments and
persist the output values.
"""
start_time = time.time()
output_dir, _ = self._get_output_dir(*args, **kwargs)
if self._verbose > 0:
print(format_call(self.func, args, kwargs))
output = self.func(*args, **kwargs)
self._persist_output(output, output_dir)
duration = time.time() - start_time
metadata = self._persist_input(output_dir, duration, args, kwargs)
if self._verbose > 0:
_, name = get_func_name(self.func)
msg = '%s - %s' % (name, format_time(duration))
print(max(0, (80 - len(msg))) * '_' + msg)
return output, metadata
# Make public
def _persist_output(self, output, dir):
""" Persist the given output tuple in the directory.
"""
try:
mkdirp(dir)
filename = os.path.join(dir, 'output.pkl')
numpy_pickle.dump(output, filename, compress=self.compress)
if self._verbose > 10:
print('Persisting in %s' % dir)
except OSError:
" Race condition in the creation of the directory "
def _persist_input(self, output_dir, duration, args, kwargs,
this_duration_limit=0.5):
""" Save a small summary of the call using json format in the
output directory.
output_dir: string
directory where to write metadata.
duration: float
time taken by hashing input arguments, calling the wrapped
function and persisting its output.
args, kwargs: list and dict
input arguments for wrapped function
this_duration_limit: float
Max execution time for this function before issuing a warning.
"""
start_time = time.time()
argument_dict = filter_args(self.func, self.ignore,
args, kwargs)
input_repr = dict((k, repr(v)) for k, v in argument_dict.items())
# This can fail due to race-conditions with multiple
# concurrent joblibs removing the file or the directory
metadata = {"duration": duration, "input_args": input_repr}
try:
mkdirp(output_dir)
with open(os.path.join(output_dir, 'metadata.json'), 'w') as f:
json.dump(metadata, f)
except:
pass
this_duration = time.time() - start_time
if this_duration > this_duration_limit:
# This persistence should be fast. It will not be if repr() takes
# time and its output is large, because json.dump will have to
# write a large file. This should not be an issue with numpy arrays
# for which repr() always output a short representation, but can
# be with complex dictionaries. Fixing the problem should be a
# matter of replacing repr() above by something smarter.
warnings.warn("Persisting input arguments took %.2fs to run.\n"
"If this happens often in your code, it can cause "
"performance problems \n"
"(results will be correct in all cases). \n"
"The reason for this is probably some large input "
"arguments for a wrapped\n"
" function (e.g. large strings).\n"
"THIS IS A JOBLIB ISSUE. If you can, kindly provide "
"the joblib's team with an\n"
" example so that they can fix the problem."
% this_duration, stacklevel=5)
return metadata
def load_output(self, output_dir):
""" Read the results of a previous calculation from the directory
it was cached in.
"""
warnings.warn("MemorizedFunc.load_output is deprecated and will be "
"removed in a future version\n"
"of joblib. A MemorizedResult provides similar features",
DeprecationWarning)
# No metadata available here.
return _load_output(output_dir, _get_func_fullname(self.func),
timestamp=self.timestamp,
mmap_mode=self.mmap_mode, verbose=self._verbose)
# XXX: Need a method to check if results are available.
#-------------------------------------------------------------------------
# Private `object` interface
#-------------------------------------------------------------------------
def __repr__(self):
return '%s(func=%s, cachedir=%s)' % (
self.__class__.__name__,
self.func,
repr(self.cachedir),
)
###############################################################################
# class `Memory`
###############################################################################
class Memory(Logger):
""" A context object for caching a function's return value each time it
is called with the same input arguments.
All values are cached on the filesystem, in a deep directory
structure.
see :ref:`memory_reference`
"""
#-------------------------------------------------------------------------
# Public interface
#-------------------------------------------------------------------------
def __init__(self, cachedir, mmap_mode=None, compress=False, verbose=1):
"""
Parameters
----------
cachedir: string or None
The path of the base directory to use as a data store
or None. If None is given, no caching is done and
the Memory object is completely transparent.
mmap_mode: {None, 'r+', 'r', 'w+', 'c'}, optional
The memmapping mode used when loading from cache
numpy arrays. See numpy.load for the meaning of the
arguments.
compress: boolean, or integer
Whether to zip the stored data on disk. If an integer is
given, it should be between 1 and 9, and sets the amount
of compression. Note that compressed arrays cannot be
read by memmapping.
verbose: int, optional
Verbosity flag, controls the debug messages that are issued
as functions are evaluated.
"""
# XXX: Bad explanation of the None value of cachedir
Logger.__init__(self)
self._verbose = verbose
self.mmap_mode = mmap_mode
self.timestamp = time.time()
self.compress = compress
if compress and mmap_mode is not None:
warnings.warn('Compressed results cannot be memmapped',
stacklevel=2)
if cachedir is None:
self.cachedir = None
else:
self.cachedir = os.path.join(cachedir, 'joblib')
mkdirp(self.cachedir)
def cache(self, func=None, ignore=None, verbose=None,
mmap_mode=False):
""" Decorates the given function func to only compute its return
value for input arguments not cached on disk.
Parameters
----------
func: callable, optional
The function to be decorated
ignore: list of strings
A list of arguments name to ignore in the hashing
verbose: integer, optional
The verbosity mode of the function. By default that
of the memory object is used.
mmap_mode: {None, 'r+', 'r', 'w+', 'c'}, optional
The memmapping mode used when loading from cache
numpy arrays. See numpy.load for the meaning of the
arguments. By default that of the memory object is used.
Returns
-------
decorated_func: MemorizedFunc object
The returned object is a MemorizedFunc object, that is
callable (behaves like a function), but offers extra
methods for cache lookup and management. See the
documentation for :class:`joblib.memory.MemorizedFunc`.
"""
if func is None:
# Partial application, to be able to specify extra keyword
# arguments in decorators
return functools.partial(self.cache, ignore=ignore,
verbose=verbose, mmap_mode=mmap_mode)
if self.cachedir is None:
return NotMemorizedFunc(func)
if verbose is None:
verbose = self._verbose
if mmap_mode is False:
mmap_mode = self.mmap_mode
if isinstance(func, MemorizedFunc):
func = func.func
return MemorizedFunc(func, cachedir=self.cachedir,
mmap_mode=mmap_mode,
ignore=ignore,
compress=self.compress,
verbose=verbose,
timestamp=self.timestamp)
def clear(self, warn=True):
""" Erase the complete cache directory.
"""
if warn:
self.warn('Flushing completely the cache')
rm_subdirs(self.cachedir)
def eval(self, func, *args, **kwargs):
""" Eval function func with arguments `*args` and `**kwargs`,
in the context of the memory.
This method works similarly to the builtin `apply`, except
that the function is called only if the cache is not
up to date.
"""
if self.cachedir is None:
return func(*args, **kwargs)
return self.cache(func)(*args, **kwargs)
#-------------------------------------------------------------------------
# Private `object` interface
#-------------------------------------------------------------------------
def __repr__(self):
return '%s(cachedir=%s)' % (
self.__class__.__name__,
repr(self.cachedir),
)
def __reduce__(self):
""" We don't store the timestamp when pickling, to avoid the hash
depending from it.
In addition, when unpickling, we run the __init__
"""
# We need to remove 'joblib' from the end of cachedir
cachedir = self.cachedir[:-7] if self.cachedir is not None else None
return (self.__class__, (cachedir,
self.mmap_mode, self.compress, self._verbose))
|
Alwnikrotikz/micolog | refs/heads/master | plugins/sys_plugin/sys_plugin.py | 10 | # -*- coding: utf-8 -*-
from micolog_plugin import *
import logging,re
from google.appengine.api import mail
from model import *
from google.appengine.api import users
from base import BaseRequestHandler,urldecode
from google.appengine.ext.webapp import template
SBODY='''New comment on your post "%(title)s"
Author : %(author)s
E-mail : %(email)s
URL : %(weburl)s
Comment:
%(content)s
You can see all comments on this post here:
%(commenturl)s
'''
BBODY='''Hi~ New reference on your comment for post "%(title)s"
Author : %(author)s
URL : %(weburl)s
Comment:
%(content)s
You can see all comments on this post here:
%(commenturl)s
'''
class NotifyHandler(BaseRequestHandler):
def __init__(self):
BaseRequestHandler.__init__(self)
self.current="config"
self.sbody=OptionSet.getValue('sys_plugin_sbody',SBODY)
self.bbody=OptionSet.getValue('sys_plugin_bbody',BBODY)
def get(self):
self.template_vals.update({'self':self})
content=template.render('plugins/sys_plugin/setup.html',self.template_vals)
self.render2('views/admin/setup_base.html',{'m_id':'sysplugin_notify','content':content})
#Also you can use:
#self.render2('plugins/sys_plugin/setup2.html',{'m_id':'sysplugin_notify','self':self})
def post(self):
self.bbody=self.param('bbody')
self.sbody=self.param('sbody')
self.blog.comment_notify_mail=self.parambool('comment_notify_mail')
self.blog.put()
OptionSet.setValue('sys_plugin_sbody',self.sbody)
OptionSet.setValue('sys_plugin_bbody',self.bbody)
self.get()
class sys_plugin(Plugin):
def __init__(self):
Plugin.__init__(self,__file__)
self.author="xuming"
self.authoruri="http://xuming.net"
self.uri="http://xuming.net"
self.description="System plugin for micolog"
self.name="Sys Plugin"
self.version="0.2"
self.blocklist=OptionSet.getValue("sys_plugin_blocklist",default="")
self.register_filter('head',self.head)
self.register_filter('footer',self.footer)
self.register_urlmap('sys_plugin/setup',self.setup)
self.register_urlhandler('/admin/sys_plugin/notify',NotifyHandler)
self.register_setupmenu('sysplugin_notify',_('Notify'),'/admin/sys_plugin/notify')
self.register_action('pre_comment',self.pre_comment)
self.register_action('save_comment',self.save_comment)
self.sbody=OptionSet.getValue('sys_plugin_sbody',SBODY)
self.bbody=OptionSet.getValue('sys_plugin_bbody',BBODY)
def head(self,content,blog=None,*arg1,**arg2):
content=content+'<meta name="generator" content="Micolog %s" />'%blog.version
return content
def footer(self,content,blog=None,*arg1,**arg2):
return content+'<!--Powered by micolog %s-->'%blog.version
def setup(self,page=None,*arg1,**arg2):
if not page.is_login:
page.redirect(users.create_login_url(page.request.uri))
tempstr='''
<p>blocklist:</p>
<form action="" method="post">
<p>
<textarea name="ta_list" style="width:400px;height:300px">%s</textarea>
</p>
<input type="submit" value="submit">
</form>'''
if page.request.method=='GET':
page.render2('views/admin/base.html',{'m_id':'sysplugin_block','content':tempstr%self.blocklist})
else:
self.blocklist=page.param("ta_list")
OptionSet.setValue("sys_plugin_blocklist",self.blocklist)
page.render2('views/admin/base.html',{'m_id':'sysplugin_block','content':tempstr%self.blocklist})
def get(self,page):
return '''<h3>Sys Plugin</h3>
<p>This is a system plugin for micolog. <br>Also a demo for how to write plugin for micolog.</p>
<h4>feature</h4>
<p><ol>
<li>Add Meta <meta name="generator" content="Micolog x.x" /></li>
<li>Add footer "<!--Powered by micolog x.x-->"</li>
<li>Comments Filter with blocklist <a href="/e/sys_plugin/setup">Setup</a></li>
<li>Comment Notify <a href="/admin/sys_plugin/notify">Setup</a></li>
</ol></p>
'''
def pre_comment(self,comment,*arg1,**arg2):
for s in self.blocklist.splitlines():
if comment.content.find(s)>-1:
raise Exception
def save_comment(self,comment,*arg1,**arg2):
if self.blog.comment_notify_mail:
self.notify(comment)
def notify(self,comment):
try:
sbody=self.sbody.decode('utf-8')
except:
sbody=self.sbody
try:
bbody=self.bbody.decode('utf-8')
except:
bbody=self.bbody
if self.blog.comment_notify_mail and self.blog.owner and not users.is_current_user_admin() :
sbody=sbody%{'title':comment.entry.title,
'author':comment.author,
'weburl':comment.weburl,
'email':comment.email,
'content':comment.content,
'commenturl':comment.entry.fullurl+"#comment-"+str(comment.key().id())
}
mail.send_mail_to_admins(self.blog.owner.email(),'Comments:'+comment.entry.title, sbody,reply_to=comment.email)
#reply comment mail notify
refers = re.findall(r'#comment-(\d+)', comment.content)
if len(refers)!=0:
replyIDs=[int(a) for a in refers]
commentlist=comment.entry.comments()
emaillist=[c.email for c in commentlist if c.reply_notify_mail and c.key().id() in replyIDs]
emaillist = {}.fromkeys(emaillist).keys()
for refer in emaillist:
if self.blog.owner and mail.is_email_valid(refer):
emailbody = bbody%{'title':comment.entry.title,
'author':comment.author,
'weburl':comment.weburl,
'email':comment.email,
'content':comment.content,
'commenturl':comment.entry.fullurl+"#comment-"+str(comment.key().id())
}
message = mail.EmailMessage(sender = self.blog.owner.email(),subject = 'Comments:'+comment.entry.title)
message.to = refer
message.body = emailbody
message.send()
|
xin3liang/platform_external_chromium_org_third_party_libyuv | refs/heads/master | tools/valgrind-libyuv/libyuv_tests.py | 21 | #!/usr/bin/env python
# Copyright (c) 2012 The LibYuv Project Authors. All rights reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""Runs various libyuv tests through valgrind_test.py.
This script inherits the chrome_tests.py in Chrome, but allows running any test
instead of only the hard-coded ones. It uses the -t cmdline flag to do this, and
only supports specifying a single test for each run.
Suppression files:
The Chrome valgrind directory we use as a DEPS dependency contains the following
suppression files:
valgrind/memcheck/suppressions.txt
valgrind/memcheck/suppressions_mac.txt
valgrind/tsan/suppressions.txt
valgrind/tsan/suppressions_mac.txt
valgrind/tsan/suppressions_win32.txt
Since they're referenced from the chrome_tests.py script, we have similar files
below the directory of this script. When executing, this script will setup both
Chrome's suppression files and our own, so we can easily maintain libyuv
specific suppressions in our own files.
"""
import logging
import optparse
import os
import sys
import logging_utils
import path_utils
import chrome_tests
class LibyuvTest(chrome_tests.ChromeTests):
"""Class that handles setup of suppressions for libyuv.
Everything else is inherited from chrome_tests.ChromeTests.
"""
def _DefaultCommand(self, tool, exe=None, valgrind_test_args=None):
"""Override command-building method so we can add more suppressions."""
cmd = chrome_tests.ChromeTests._DefaultCommand(self, tool, exe,
valgrind_test_args)
# When ChromeTests._DefaultCommand has executed, it has setup suppression
# files based on what's found in the memcheck/ or tsan/ subdirectories of
# this script's location. If Mac or Windows is executing, additional
# platform specific files have also been added.
# Since only the ones located below this directory is added, we must also
# add the ones maintained by Chrome, located in ../valgrind.
# The idea is to look for --suppression arguments in the cmd list and add a
# modified copy of each suppression file, for the corresponding file in
# ../valgrind. If we would simply replace 'valgrind-libyuv' with 'valgrind'
# we may produce invalid paths if other parts of the path contain that
# string. That's why the code below only replaces the end of the path.
script_dir = path_utils.ScriptDir()
old_base, _ = os.path.split(script_dir)
new_dir = os.path.join(old_base, 'valgrind')
add_suppressions = []
for token in cmd:
if '--suppressions' in token:
add_suppressions.append(token.replace(script_dir, new_dir))
return add_suppressions + cmd
def main(_):
parser = optparse.OptionParser('usage: %prog -b <dir> -t <test> <test args>')
parser.disable_interspersed_args()
parser.add_option('-b', '--build-dir',
help=('Location of the compiler output. Can only be used '
'when the test argument does not contain this path.'))
parser.add_option("--target", help="Debug or Release")
parser.add_option('-t', '--test', help='Test to run.')
parser.add_option('', '--baseline', action='store_true', default=False,
help='Generate baseline data instead of validating')
parser.add_option('', '--gtest_filter',
help='Additional arguments to --gtest_filter')
parser.add_option('', '--gtest_repeat',
help='Argument for --gtest_repeat')
parser.add_option("--gtest_shuffle", action="store_true", default=False,
help="Randomize tests' orders on every iteration.")
parser.add_option('-v', '--verbose', action='store_true', default=False,
help='Verbose output - enable debug log messages')
parser.add_option('', '--tool', dest='valgrind_tool', default='memcheck',
help='Specify a valgrind tool to run the tests under')
parser.add_option('', '--tool_flags', dest='valgrind_tool_flags', default='',
help='Specify custom flags for the selected valgrind tool')
parser.add_option('', '--keep_logs', action='store_true', default=False,
help=('Store memory tool logs in the <tool>.logs directory '
'instead of /tmp.\nThis can be useful for tool '
'developers/maintainers.\nPlease note that the <tool>'
'.logs directory will be clobbered on tool startup.'))
parser.add_option("--brave-new-test-launcher", action="store_true",
help="run the tests with --brave-new-test-launcher")
parser.add_option("--test-launcher-bot-mode", action="store_true",
help="run the tests with --test-launcher-bot-mode")
options, args = parser.parse_args()
if options.verbose:
logging_utils.config_root(logging.DEBUG)
else:
logging_utils.config_root()
if not options.test:
parser.error('--test not specified')
# Support build dir both with and without the target.
if (options.target and options.build_dir and
not options.build_dir.endswith(options.target)):
options.build_dir = os.path.join(options.build_dir, options.target)
# If --build_dir is provided, prepend it to the test executable if needed.
test_executable = options.test
if options.build_dir and not test_executable.startswith(options.build_dir):
test_executable = os.path.join(options.build_dir, test_executable)
args = [test_executable] + args
test = LibyuvTest(options, args, 'cmdline')
return test.Run()
if __name__ == '__main__':
return_code = main(sys.argv)
sys.exit(return_code)
|
alanthai/django-guardian | refs/heads/master | guardian/exceptions.py | 87 | """
Exceptions used by django-guardian. All internal and guardian-specific errors
should extend GuardianError class.
"""
from __future__ import unicode_literals
class GuardianError(Exception):
pass
class NotUserNorGroup(GuardianError):
pass
class ObjectNotPersisted(GuardianError):
pass
class WrongAppError(GuardianError):
pass
class MixedContentTypeError(GuardianError):
pass
|
mwx1993/TACTIC | refs/heads/master | src/tactic/ui/cgapp/app_init_wdg.py | 6 | ###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
__all__ = [ 'PyMayaInit', 'PyFlashInit', 'PyRepoActionInit', 'PyHoudiniInit', 'PyXSIInit']
from pyasm.biz import PrefSetting, Project
from pyasm.web import Html, WebContainer, Widget, DivWdg
from pyasm.widget import HiddenWdg
class PyMayaInit(Widget):
def get_display(my):
div = DivWdg()
# this is to prevent this function from being run in other tabs
web = WebContainer.get_web()
user = WebContainer.get_user_name()
local_dir = web.get_local_dir()
context_url = web.get_site_context_url().to_string()
http_server = web.get_base_url().to_string()
upload_url = web.get_upload_url()
project_code = Project.get_project_code()
div.add_behavior( {
'type': 'load',
'cbjs_action': '''
var js_files = [
"/context/javascript/PyMaya.js",
];
var supp_js_files = [
"/context/spt_js/fx_anim.js",
"/context/javascript/PyHoudini.js",
"/context/javascript/PyXSI.js"
];
var set_up = function() {
try {
app = new PyMaya(); }
catch(e) {
app = null;
}
if (app) {
app.user = '%(user)s';
app.local_dir = '%(local_dir)s';
app.context_url = '%(context_url)s';
app.base_url = '%(server)s';
app.upload_url = '%(upload_url)s';
app.project_code = '%(project_code)s';
}
}
spt.dom.load_js(js_files, function() {PyMaya(); set_up();});
spt.dom.load_js(supp_js_files, function() {});
'''%{
'user': user,
'local_dir': local_dir,
'context_url' : context_url,
'server': http_server,
'upload_url': upload_url,
'project_code': project_code }
})
#pref = PrefSetting.get_value_by_key("use_java_maya")
#if pref == "true":
# html.writeln("<script>app.use_java = true</script>")
handoff_dir = web.get_client_handoff_dir(no_exception=True)
if not handoff_dir:
print "WARNING: handoff_dir is empty in the TACTIC config file"
server = web.get_http_host()
application = "maya"
div.add( HiddenWdg("user", user) )
div.add( HiddenWdg("handoff_dir", handoff_dir) )
div.add( HiddenWdg("project_code", project_code) )
div.add( HiddenWdg("local_dir", local_dir) )
div.add( HiddenWdg("server_name", server) )
div.add( HiddenWdg("application", application) )
#div.add( HiddenWdg("base_url", server) )
#div.add( HiddenWdg("upload_url", upload_url) )
return div
class PyFlashInit(Widget):
def get_display(my):
web = WebContainer.get_web()
html = Html()
html.writeln("<script>var pyflash=new PyFlash()</script>")
# add in parameters for pyflash
user = WebContainer.get_user_name()
html.writeln("<script>pyflash.user = '%s'</script>" % user)
local_dir = web.get_local_dir()
html.writeln("<script>pyflash.local_dir = '%s'</script>" % local_dir)
server = web.get_base_url().to_string()
html.writeln("<script>pyflash.server_url = '%s'</script>" % server)
context_url = web.get_site_context_url().to_string()
html.writeln("<script>pyflash.context_url = '%s%s'</script>" % (server, context_url))
upload_url = web.get_upload_url()
html.writeln("<script>pyflash.upload_url = '%s'</script>" % upload_url)
return html
class PyHoudiniInit(Widget):
def get_display(my):
web = WebContainer.get_web()
user = WebContainer.get_user_name()
local_dir = web.get_local_dir()
context_url = web.get_site_context_url().to_string()
server = web.get_base_url().to_string()
upload_url = web.get_upload_url()
html = Html()
html.writeln('<script language="JavaScript" src="resource:///res/RunHCommand.js"></script>')
html.writeln('''\n<script>try{ app = new PyHoudini(); }
catch(e){
app = null;}
if (app) {
app.user = '%(user)s';
app.local_dir = '%(local_dir)s';
app.context_url = '%(context_url)s';
app.base_url = '%(server)s';
app.upload_url = '%(upload_url)s';
app.project_code = '%(project_code)s';} </script>'''%{'user': user,
'local_dir': local_dir,
'context_url' : context_url,
'server': server,
'upload_url': upload_url,
'project_code': Project.get_project_code()})
return html
class PyXSIInit(Widget):
def get_display(my):
web = WebContainer.get_web()
user = WebContainer.get_user_name()
local_dir = web.get_local_dir()
context_url = web.get_site_context_url().to_string()
server = web.get_base_url().to_string()
upload_url = web.get_upload_url()
html = Html()
html.writeln('''\n<script>try{ app = new PyXSI(); }
catch(e){
app = null;}
if (app) {
app.user = '%(user)s';
app.local_dir = '%(local_dir)s';
app.context_url = '%(context_url)s';
app.base_url = '%(server)s';
app.upload_url = '%(upload_url)s';
app.project_code = '%(project_code)s';} </script>'''%{'user': user,
'local_dir': local_dir,
'context_url' : context_url,
'server': server,
'upload_url': upload_url,
'project_code': Project.get_project_code()})
return html
class PyRepoActionInit(Widget):
def get_display(my):
html = Html()
html.writeln("<script>var pyp4=new PyPerforce()</script>")
upload_url = WebContainer.get_web().get_upload_url()
html.writeln("<script>var tactic_repo=new TacticRepo()</script>")
html.writeln("<script>tactic_repo.upload_url='%s'</script>" %upload_url)
return html
|
miteshvp/fabric8-analytics-worker | refs/heads/master | f8a_worker/storages/postgres_base.py | 2 | #!/usr/bin/env python3
"""Base class for PostgreSQL related adapters."""
import os
from selinon import DataStorage
from sqlalchemy import create_engine
from sqlalchemy.exc import IntegrityError, SQLAlchemyError
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound
from f8a_worker.errors import TaskAlreadyExistsError
from f8a_worker.models import Ecosystem
Base = declarative_base()
class PostgresBase(DataStorage):
"""Base class for PostgreSQL related adapters."""
# Make these class variables and let derived classes share session so we
# have only one postgres connection
session = None
connection_string = None
encoding = None
echo = None
# Which table should be used for querying in derived classes
query_table = None
_CONF_ERROR_MESSAGE = "PostgreSQL configuration mismatch, cannot use same database adapter " \
"base for connecting to different PostgreSQL instances"
def __init__(self, connection_string, encoding='utf-8', echo=False):
"""Configure database connector."""
super().__init__()
connection_string = connection_string.format(**os.environ)
if PostgresBase.connection_string is None:
PostgresBase.connection_string = connection_string
elif PostgresBase.connection_string != connection_string:
raise ValueError("%s: %s != %s" % (self._CONF_ERROR_MESSAGE,
PostgresBase.connection_string, connection_string))
if PostgresBase.encoding is None:
PostgresBase.encoding = encoding
elif PostgresBase.encoding != encoding:
raise ValueError(self._CONF_ERROR_MESSAGE)
if PostgresBase.echo is None:
PostgresBase.echo = echo
elif PostgresBase.echo != echo:
raise ValueError(self._CONF_ERROR_MESSAGE)
# Assign what S3 storage should be used in derived classes
self._s3 = None
def is_connected(self):
"""Check if the connection to database has been established."""
return PostgresBase.session is not None
def connect(self):
"""Establish connection to the databse."""
# Keep one connection alive and keep overflow unlimited so we can add
# more connections in our jobs service
engine = create_engine(
self.connection_string,
encoding=self.encoding,
echo=self.echo,
isolation_level="AUTOCOMMIT",
pool_size=1,
max_overflow=-1
)
PostgresBase.session = sessionmaker(bind=engine)()
Base.metadata.create_all(engine)
def disconnect(self):
"""Close connection to the database."""
if self.is_connected():
PostgresBase.session.close()
PostgresBase.session = None
def retrieve(self, flow_name, task_name, task_id):
"""Retrieve the record identified by task_id from the database."""
if not self.is_connected():
self.connect()
try:
record = PostgresBase.session.query(self.query_table). \
filter_by(worker_id=task_id). \
one()
except (NoResultFound, MultipleResultsFound):
raise
except SQLAlchemyError:
PostgresBase.session.rollback()
raise
assert record.worker == task_name
task_result = record.task_result
if not self.is_real_task_result(task_result):
# we synced results to S3, retrieve them from there
# We do not care about some specific version, so no time-based collisions possible
return self.s3.retrieve_task_result(
record.ecosystem.name,
record.package.name,
record.version.identifier,
task_name
)
return task_result
def _create_result_entry(self, node_args, flow_name, task_name, task_id, result, error=False):
raise NotImplementedError()
def store(self, node_args, flow_name, task_name, task_id, result):
"""Store the record identified by task_id into the database."""
# Sanity checks
if not self.is_connected():
self.connect()
res = self._create_result_entry(node_args, flow_name, task_name, task_id, result)
try:
PostgresBase.session.add(res)
PostgresBase.session.commit()
except SQLAlchemyError:
PostgresBase.session.rollback()
raise
def store_error(self, node_args, flow_name, task_name, task_id, exc_info, result=None):
"""Store error info to the Postgres database.
Note: We do not store errors in init tasks.
The reasoning is that init
tasks are responsible for creating database entries. We cannot rely
that all database entries are successfully created. By doing this we
remove weird-looking errors like (un-committed changes due to errors
in init task):
DETAIL: Key (package_analysis_id)=(1113452) is not present in table "package_analyses".
"""
if task_name in ('InitPackageFlow', 'InitAnalysisFlow')\
or issubclass(exc_info[0], TaskAlreadyExistsError):
return
# Sanity checks
if not self.is_connected():
self.connect()
res = self._create_result_entry(node_args, flow_name, task_name, task_id, result=result,
error=True)
try:
PostgresBase.session.add(res)
PostgresBase.session.commit()
except IntegrityError:
# the result has been already stored before the error occurred
# hence there is no reason to re-raise
PostgresBase.session.rollback()
except SQLAlchemyError:
PostgresBase.session.rollback()
raise
def get_ecosystem(self, name):
"""Get ecosystem by name."""
if not self.is_connected():
self.connect()
return Ecosystem.by_name(PostgresBase.session, name)
@staticmethod
def is_real_task_result(task_result):
"""Check that the task result is not just S3 object version reference."""
return task_result and (len(task_result.keys()) != 1 or
'version_id' not in task_result.keys())
|
kingvuplus/italysat-enigma5 | refs/heads/master | lib/python/Plugins/Extensions/GraphMultiEPG/GraphMultiEpg.py | 28 | from skin import parseColor, parseFont, parseSize
from Components.config import config, ConfigClock, ConfigInteger, ConfigSubsection, ConfigYesNo, ConfigSelection, ConfigSelectionNumber
from Components.Pixmap import Pixmap
from Components.Button import Button
from Components.ActionMap import HelpableActionMap
from Components.HTMLComponent import HTMLComponent
from Components.GUIComponent import GUIComponent
from Components.EpgList import Rect
from Components.Sources.Event import Event
from Components.MultiContent import MultiContentEntryText, MultiContentEntryPixmapAlphaTest
from Components.TimerList import TimerList
from Components.Renderer.Picon import getPiconName
from Components.Sources.ServiceEvent import ServiceEvent
from Screens.Screen import Screen
from Screens.HelpMenu import HelpableScreen
from Screens.EventView import EventViewEPGSelect
from Screens.TimeDateInput import TimeDateInput
from Screens.TimerEntry import TimerEntry
from Screens.EpgSelection import EPGSelection
from Screens.TimerEdit import TimerSanityConflict
from Screens.MessageBox import MessageBox
from Screens.ChoiceBox import ChoiceBox
from Tools.Directories import resolveFilename, SCOPE_CURRENT_SKIN
from RecordTimer import RecordTimerEntry, parseEvent, AFTEREVENT
from ServiceReference import ServiceReference, isPlayableForCur
from Tools.LoadPixmap import LoadPixmap
from Tools.Alternatives import CompareWithAlternatives
from Tools import Notifications
from enigma import eEPGCache, eListbox, gFont, eListboxPythonMultiContent, RT_HALIGN_LEFT, RT_HALIGN_RIGHT, RT_HALIGN_CENTER,\
RT_VALIGN_CENTER, RT_WRAP, BT_SCALE, BT_KEEP_ASPECT_RATIO, eSize, eRect, eTimer, getBestPlayableServiceReference, loadPNG
from GraphMultiEpgSetup import GraphMultiEpgSetup
from time import localtime, time, strftime
MAX_TIMELINES = 6
config.misc.graph_mepg = ConfigSubsection()
config.misc.graph_mepg.prev_time = ConfigClock(default = time())
config.misc.graph_mepg.prev_time_period = ConfigInteger(default = 120, limits = (60, 300))
config.misc.graph_mepg.ev_fontsize = ConfigSelectionNumber(default = 0, stepwidth = 1, min = -8, max = 8, wraparound = True)
config.misc.graph_mepg.items_per_page = ConfigSelectionNumber(min = 3, max = 40, stepwidth = 1, default = 6, wraparound = True)
config.misc.graph_mepg.items_per_page_listscreen = ConfigSelectionNumber(min = 3, max = 60, stepwidth = 1, default = 12, wraparound = True)
config.misc.graph_mepg.default_mode = ConfigYesNo(default = False)
config.misc.graph_mepg.overjump = ConfigYesNo(default = True)
config.misc.graph_mepg.center_timeline = ConfigYesNo(default = False)
config.misc.graph_mepg.servicetitle_mode = ConfigSelection(default = "picon+servicename", choices = [
("servicename", _("Service name")),
("picon", _("Picon")),
("picon+servicename", _("Picon and service name")) ])
config.misc.graph_mepg.roundTo = ConfigSelection(default = "900", choices = [("900", _("%d minutes") % 15), ("1800", _("%d minutes") % 30), ("3600", _("%d minutes") % 60)])
config.misc.graph_mepg.OKButton = ConfigSelection(default = "info", choices = [("info", _("Show detailed event info")), ("zap", _("Zap to selected channel"))])
possibleAlignmentChoices = [
( str(RT_HALIGN_LEFT | RT_VALIGN_CENTER ) , _("left")),
( str(RT_HALIGN_CENTER | RT_VALIGN_CENTER ) , _("centered")),
( str(RT_HALIGN_RIGHT | RT_VALIGN_CENTER ) , _("right")),
( str(RT_HALIGN_LEFT | RT_VALIGN_CENTER | RT_WRAP) , _("left, wrapped")),
( str(RT_HALIGN_CENTER | RT_VALIGN_CENTER | RT_WRAP) , _("centered, wrapped")),
( str(RT_HALIGN_RIGHT | RT_VALIGN_CENTER | RT_WRAP) , _("right, wrapped"))]
config.misc.graph_mepg.event_alignment = ConfigSelection(default = possibleAlignmentChoices[0][0], choices = possibleAlignmentChoices)
config.misc.graph_mepg.servicename_alignment = ConfigSelection(default = possibleAlignmentChoices[0][0], choices = possibleAlignmentChoices)
config.misc.graph_mepg.extension_menu = ConfigYesNo(default = True)
listscreen = config.misc.graph_mepg.default_mode.value
class EPGList(HTMLComponent, GUIComponent):
def __init__(self, selChangedCB = None, timer = None, time_epoch = 120, overjump_empty = True):
GUIComponent.__init__(self)
self.cur_event = None
self.cur_service = None
self.offs = 0
self.timer = timer
self.last_time = time()
self.onSelChanged = [ ]
if selChangedCB is not None:
self.onSelChanged.append(selChangedCB)
self.l = eListboxPythonMultiContent()
self.l.setBuildFunc(self.buildEntry)
self.setOverjump_Empty(overjump_empty)
self.epgcache = eEPGCache.getInstance()
self.clocks = [ LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'skin_default/icons/epgclock_add.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'skin_default/icons/epgclock_pre.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'skin_default/icons/epgclock.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'skin_default/icons/epgclock_prepost.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'skin_default/icons/epgclock_post.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'skin_default/icons/zapclock_add.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'skin_default/icons/zapclock_pre.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'skin_default/icons/zapclock.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'skin_default/icons/zapclock_prepost.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'skin_default/icons/zapclock_post.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'skin_default/icons/zaprecclock_add.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'skin_default/icons/zaprecclock_pre.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'skin_default/icons/zaprecclock.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'skin_default/icons/zaprecclock_prepost.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'skin_default/icons/zaprecclock_post.png')) ]
self.time_base = None
self.time_epoch = time_epoch
self.list = None
self.select_rect = None
self.event_rect = None
self.service_rect = None
self.picon_size = None
self.currentlyPlaying = None
self.showPicon = False
self.showServiceTitle = True
self.nowEvPix = None
self.othEvPix = None
self.selEvPix = None
self.recEvPix = None
self.curSerPix = None
self.foreColor = 0xffffff
self.foreColorSelected = 0xffc000
self.borderColor = 0x464445
self.backColor = 0x595959
self.backColorSelected = 0x808080
self.foreColorService = 0xffffff
self.foreColorServiceSelected = 0xffffff
self.backColorService = 0x000000
self.backColorServiceSelected = 0x508050
self.borderColorService = 0x000000
self.foreColorNow = 0xffffff
self.backColorNow = 0x505080
self.foreColorRec = 0xffffff
self.backColorRec = 0x805050
self.serviceFont = gFont("Regular", 20)
self.entryFontName = "Regular"
self.entryFontSize = 18
self.listHeight = None
self.listWidth = None
self.serviceBorderWidth = 1
self.serviceNamePadding = 0
self.eventBorderWidth = 1
self.eventNamePadding = 0
def applySkin(self, desktop, screen):
if self.skinAttributes is not None:
attribs = [ ]
for (attrib, value) in self.skinAttributes:
if attrib == "EntryForegroundColor":
self.foreColor = parseColor(value).argb()
elif attrib == "EntryForegroundColorSelected":
self.foreColorSelected = parseColor(value).argb()
elif attrib == "EntryBackgroundColor":
self.backColor = parseColor(value).argb()
elif attrib == "EntryBackgroundColorSelected":
self.backColorSelected = parseColor(value).argb()
elif attrib == "EntryBorderColor":
self.borderColor = parseColor(value).argb()
elif attrib == "EntryFont":
font = parseFont(value, ((1,1),(1,1)) )
self.entryFontName = font.family
self.entryFontSize = font.pointSize
elif attrib == "ServiceForegroundColor" or attrib == "ServiceNameForegroundColor":
self.foreColorService = parseColor(value).argb()
elif attrib == "ServiceForegroundColorSelected":
self.foreColorServiceSelected = parseColor(value).argb()
elif attrib == "ServiceBackgroundColor" or attrib == "ServiceNameBackgroundColor":
self.backColorService = parseColor(value).argb()
elif attrib == "ServiceBackgroundColorSelected":
self.backColorServiceSelected = parseColor(value).argb()
elif attrib == "ServiceBackgroundColorRecording" or attrib == "ServiceNameBackgroundColor":
self.backColorRec = parseColor(value).argb()
elif attrib == "ServiceForegroundColorRecording":
self.foreColorRec = parseColor(value).argb()
elif attrib == "ServiceBorderColor":
self.borderColorService = parseColor(value).argb()
elif attrib == "ServiceFont":
self.serviceFont = parseFont(value, ((1,1),(1,1)) )
elif attrib == "EntryBackgroundColorNow":
self.backColorNow = parseColor(value).argb()
elif attrib == "EntryForegroundColorNow":
self.foreColorNow = parseColor(value).argb()
elif attrib == "ServiceBorderWidth":
self.serviceBorderWidth = int(value)
elif attrib == "ServiceNamePadding":
self.serviceNamePadding = int(value)
elif attrib == "EventBorderWidth":
self.eventBorderWidth = int(value)
elif attrib == "EventNamePadding":
self.eventNamePadding = int(value)
else:
attribs.append((attrib,value))
self.skinAttributes = attribs
self.l.setFont(0, self.serviceFont)
self.setEventFontsize()
rc = GUIComponent.applySkin(self, desktop, screen)
# now we know our size and can safely set items per page
self.listHeight = self.instance.size().height()
self.listWidth = self.instance.size().width()
self.setItemsPerPage()
return rc
def isSelectable(self, service, service_name, events, picon):
return (events and len(events) and True) or False
def setShowServiceMode(self, value):
self.showServiceTitle = "servicename" in value
self.showPicon = "picon" in value
self.recalcEntrySize()
self.selEntry(0) #Select entry again so that the clipping region gets updated if needed
def setOverjump_Empty(self, overjump_empty):
if overjump_empty:
self.l.setSelectableFunc(self.isSelectable)
else:
self.l.setSelectableFunc(None)
def setEpoch(self, epoch):
self.offs = 0
self.time_epoch = epoch
self.fillMultiEPG(None) # refill
def setCurrentlyPlaying(self, serviceref):
self.currentlyPlaying = serviceref
def getEventFromId(self, service, eventid):
event = None
if self.epgcache is not None and eventid is not None:
event = self.epgcache.lookupEventId(service.ref, eventid)
return event
def getIndexFromService(self, serviceref):
if serviceref is not None:
for x in range(len(self.list)):
if CompareWithAlternatives(self.list[x][0], serviceref.toString()):
return x
return None
def moveToService(self, serviceref):
newIdx = self.getIndexFromService(serviceref)
if newIdx is None:
newIdx = 0
self.setCurrentIndex(newIdx)
def setCurrentIndex(self, index):
if self.instance is not None:
self.instance.moveSelectionTo(index)
def moveTo(self, dir):
if self.instance is not None:
self.instance.moveSelection(dir)
def getCurrent(self):
if self.cur_service is None:
return (None, None)
old_service = self.cur_service #(service, service_name, events, picon)
events = self.cur_service[2]
refstr = self.cur_service[0]
if self.cur_event is None or not events or not len(events):
return (None, ServiceReference(refstr))
event = events[self.cur_event] #(event_id, event_title, begin_time, duration)
eventid = event[0]
service = ServiceReference(refstr)
event = self.getEventFromId(service, eventid) # get full event info
return (event, service)
def connectSelectionChanged(func):
if not self.onSelChanged.count(func):
self.onSelChanged.append(func)
def disconnectSelectionChanged(func):
self.onSelChanged.remove(func)
def serviceChanged(self):
cur_sel = self.l.getCurrentSelection()
if cur_sel:
self.findBestEvent()
def findBestEvent(self):
old_service = self.cur_service #(service, service_name, events, picon)
cur_service = self.cur_service = self.l.getCurrentSelection()
time_base = self.getTimeBase()
now = time()
if old_service and self.cur_event is not None:
events = old_service[2]
cur_event = events[self.cur_event] #(event_id, event_title, begin_time, duration)
if self.last_time < cur_event[2] or cur_event[2]+cur_event[3] < self.last_time:
self.last_time = cur_event[2]
if now > self.last_time:
self.last_time = now
if cur_service:
self.cur_event = None
events = cur_service[2]
if events and len(events):
self.cur_event = idx = 0
for event in events: #iterate all events
if event[2] <= self.last_time and event[2]+event[3] > self.last_time:
self.cur_event = idx
break
idx += 1
self.selEntry(0)
def selectionChanged(self):
for x in self.onSelChanged:
if x is not None:
x()
GUI_WIDGET = eListbox
def setItemsPerPage(self):
global listscreen
if self.listHeight > 0:
if listscreen:
itemHeight = self.listHeight / config.misc.graph_mepg.items_per_page_listscreen.value
else:
itemHeight = self.listHeight / config.misc.graph_mepg.items_per_page.value
else:
itemHeight = 54 # some default (270/5)
if listscreen:
self.instance.resize(eSize(self.listWidth, itemHeight * config.misc.graph_mepg.items_per_page_listscreen.value))
else:
self.instance.resize(eSize(self.listWidth, itemHeight * config.misc.graph_mepg.items_per_page.value))
self.l.setItemHeight(itemHeight)
self.nowEvPix = loadPNG(resolveFilename(SCOPE_CURRENT_SKIN, 'epg/CurrentEvent.png'))
self.othEvPix = loadPNG(resolveFilename(SCOPE_CURRENT_SKIN, 'epg/OtherEvent.png'))
self.selEvPix = loadPNG(resolveFilename(SCOPE_CURRENT_SKIN, 'epg/SelectedEvent.png'))
self.recEvPix = loadPNG(resolveFilename(SCOPE_CURRENT_SKIN, 'epg/RecordingEvent.png'))
self.curSerPix = loadPNG(resolveFilename(SCOPE_CURRENT_SKIN, 'epg/CurrentService.png'))
def setEventFontsize(self):
self.l.setFont(1, gFont(self.entryFontName, self.entryFontSize + config.misc.graph_mepg.ev_fontsize.value))
def postWidgetCreate(self, instance):
instance.setWrapAround(True)
instance.selectionChanged.get().append(self.serviceChanged)
instance.setContent(self.l)
self.l.setSelectionClip(eRect(0, 0, 0, 0), False)
def preWidgetRemove(self, instance):
instance.selectionChanged.get().remove(self.serviceChanged)
instance.setContent(None)
def recalcEntrySize(self):
esize = self.l.getItemSize()
width = esize.width()
height = esize.height()
if self.showServiceTitle:
w = width / 10 * 2;
else: # if self.showPicon: # this must be set if showServiceTitle is None
w = 2 * height - 2 * self.serviceBorderWidth # FIXME: could do better...
self.service_rect = Rect(0, 0, w, height)
self.event_rect = Rect(w, 0, width - w, height)
piconHeight = height - 2 * self.serviceBorderWidth
piconWidth = 2 * piconHeight # FIXME: could do better...
if piconWidth > w - 2 * self.serviceBorderWidth:
piconWidth = w - 2 * self.serviceBorderWidth
self.picon_size = eSize(piconWidth, piconHeight)
def calcEntryPosAndWidthHelper(self, stime, duration, start, end, width):
xpos = (stime - start) * width / (end - start)
ewidth = (stime + duration - start) * width / (end - start)
ewidth -= xpos;
if xpos < 0:
ewidth += xpos;
xpos = 0;
if (xpos + ewidth) > width:
ewidth = width - xpos
return xpos, ewidth
def calcEntryPosAndWidth(self, event_rect, time_base, time_epoch, ev_start, ev_duration):
xpos, width = self.calcEntryPosAndWidthHelper(ev_start, ev_duration, time_base, time_base + time_epoch * 60, event_rect.width())
return xpos + event_rect.left(), width
def buildEntry(self, service, service_name, events, picon):
r1 = self.service_rect
r2 = self.event_rect
selected = self.cur_service[0] == service
# Picon and Service name
if CompareWithAlternatives(service, self.currentlyPlaying and self.currentlyPlaying.toString()):
serviceForeColor = self.foreColorServiceSelected
serviceBackColor = self.backColorServiceSelected
bgpng = self.curSerPix or self.nowEvPix
currentservice = True
else:
serviceForeColor = self.foreColorService
serviceBackColor = self.backColorService
bgpng = self.othEvPix
currentservice = False
res = [ None ]
if bgpng is not None: # bacground for service rect
res.append(MultiContentEntryPixmapAlphaTest(
pos = (r1.x + self.serviceBorderWidth, r1.y + self.serviceBorderWidth),
size = (r1.w - 2 * self.serviceBorderWidth, r1.h - 2 * self.serviceBorderWidth),
png = bgpng,
flags = BT_SCALE))
else:
res.append(MultiContentEntryText(
pos = (r1.x, r1.y),
size = (r1.w, r1.h),
font = 0, flags = RT_HALIGN_LEFT | RT_VALIGN_CENTER,
text = "",
color = serviceForeColor, color_sel = serviceForeColor,
backcolor = serviceBackColor, backcolor_sel = serviceBackColor))
displayPicon = None
if self.showPicon:
if picon is None: # go find picon and cache its location
picon = getPiconName(service)
curIdx = self.l.getCurrentSelectionIndex()
self.list[curIdx] = (service, service_name, events, picon)
piconWidth = self.picon_size.width()
piconHeight = self.picon_size.height()
if picon != "":
displayPicon = loadPNG(picon)
if displayPicon is not None:
res.append(MultiContentEntryPixmapAlphaTest(
pos = (r1.x + self.serviceBorderWidth, r1.y + self.serviceBorderWidth),
size = (piconWidth, piconHeight),
png = displayPicon,
backcolor = None, backcolor_sel = None, flags = BT_SCALE | BT_KEEP_ASPECT_RATIO))
elif not self.showServiceTitle:
# no picon so show servicename anyway in picon space
namefont = 1
namefontflag = int(config.misc.graph_mepg.servicename_alignment.value)
namewidth = piconWidth
piconWidth = 0
else:
piconWidth = 0
if self.showServiceTitle: # we have more space so reset parms
namefont = 0
namefontflag = int(config.misc.graph_mepg.servicename_alignment.value)
namewidth = r1.w - piconWidth
if self.showServiceTitle or displayPicon is None:
res.append(MultiContentEntryText(
pos = (r1.x + piconWidth + self.serviceBorderWidth + self.serviceNamePadding,
r1.y + self.serviceBorderWidth),
size = (namewidth - 2 * (self.serviceBorderWidth + self.serviceNamePadding),
r1.h - 2 * self.serviceBorderWidth),
font = namefont, flags = namefontflag,
text = service_name,
color = serviceForeColor, color_sel = serviceForeColor,
backcolor = None, backcolor_sel = None))
# Events for service
backColorSel = self.backColorSelected
if events:
start = self.time_base + self.offs * self.time_epoch * 60
end = start + self.time_epoch * 60
left = r2.x
top = r2.y
width = r2.w
height = r2.h
now = time()
for ev in events: #(event_id, event_title, begin_time, duration)
stime = ev[2]
duration = ev[3]
xpos, ewidth = self.calcEntryPosAndWidthHelper(stime, duration, start, end, width)
rec = self.timer.isInTimer(ev[0], stime, duration, service)
# event box background
foreColorSelected = foreColor = self.foreColor
if stime <= now and now < stime + duration:
backColor = self.backColorNow
if isPlayableForCur(ServiceReference(service).ref):
foreColor = self.foreColorNow
foreColorSelected = self.foreColorSelected
else:
backColor = self.backColor
if selected and self.select_rect.x == xpos + left and self.selEvPix:
bgpng = self.selEvPix
backColorSel = None
elif rec is not None and rec[1][-1] in (2, 12):
bgpng = self.recEvPix
foreColor = self.foreColorRec
backColor = self.backColorRec
elif stime <= now and now < stime + duration:
bgpng = self.nowEvPix
elif currentservice:
bgpng = self.curSerPix or self.othEvPix
backColor = self.backColorServiceSelected
else:
bgpng = self.othEvPix
if bgpng is not None:
res.append(MultiContentEntryPixmapAlphaTest(
pos = (left + xpos + self.eventBorderWidth, top + self.eventBorderWidth),
size = (ewidth - 2 * self.eventBorderWidth, height - 2 * self.eventBorderWidth),
png = bgpng,
flags = BT_SCALE))
else:
res.append(MultiContentEntryText(
pos = (left + xpos, top), size = (ewidth, height),
font = 1, flags = int(config.misc.graph_mepg.event_alignment.value),
text = "", color = None, color_sel = None,
backcolor = backColor, backcolor_sel = backColorSel))
# event text
evX = left + xpos + self.eventBorderWidth + self.eventNamePadding
evY = top + self.eventBorderWidth
evW = ewidth - 2 * (self.eventBorderWidth + self.eventNamePadding)
evH = height - 2 * self.eventBorderWidth
if evW > 0:
res.append(MultiContentEntryText(
pos = (evX, evY),
size = (evW, evH),
font = 1,
flags = int(config.misc.graph_mepg.event_alignment.value),
text = ev[1],
color = foreColor,
color_sel = foreColorSelected))
# recording icons
if rec is not None:
for i in range(len(rec[1])):
if ewidth < (i + 1) * 22:
break
res.append(MultiContentEntryPixmapAlphaTest(
pos = (left + xpos + ewidth - (i + 1) * 22, top + height - 22), size = (21, 21),
png = self.clocks[rec[1][len(rec[1]) - 1 - i]]))
else:
if selected and self.selEvPix:
res.append(MultiContentEntryPixmapAlphaTest(
pos = (r2.x + self.eventBorderWidth, r2.y + self.eventBorderWidth),
size = (r2.w - 2 * self.eventBorderWidth, r2.h - 2 * self.eventBorderWidth),
png = self.selEvPix,
flags = BT_SCALE))
return res
def selEntry(self, dir, visible = True):
cur_service = self.cur_service #(service, service_name, events, picon)
self.recalcEntrySize()
valid_event = self.cur_event is not None
if cur_service:
update = True
entries = cur_service[2]
if dir == 0: #current
update = False
elif dir == +1: #next
if valid_event and self.cur_event + 1 < len(entries):
self.cur_event += 1
else:
self.offs += 1
self.fillMultiEPG(None) # refill
return True
elif dir == -1: #prev
if valid_event and self.cur_event - 1 >= 0:
self.cur_event -= 1
elif self.offs > 0:
self.offs -= 1
self.fillMultiEPG(None) # refill
return True
elif dir == +2: #next page
self.offs += 1
self.fillMultiEPG(None) # refill
return True
elif dir == -2: #prev
if self.offs > 0:
self.offs -= 1
self.fillMultiEPG(None) # refill
return True
elif dir == +3: #next day
self.offs += 60 * 24 / self.time_epoch
self.fillMultiEPG(None) # refill
return True
elif dir == -3: #prev day
self.offs -= 60 * 24 / self.time_epoch
if self.offs < 0:
self.offs = 0;
self.fillMultiEPG(None) # refill
return True
if cur_service and valid_event:
entry = entries[self.cur_event] #(event_id, event_title, begin_time, duration)
time_base = self.time_base + self.offs*self.time_epoch * 60
xpos, width = self.calcEntryPosAndWidth(self.event_rect, time_base, self.time_epoch, entry[2], entry[3])
self.select_rect = Rect(xpos ,0, width, self.event_rect.height)
self.l.setSelectionClip(eRect(xpos, 0, width, self.event_rect.h), visible and update)
else:
self.select_rect = self.event_rect
self.l.setSelectionClip(eRect(self.event_rect.x, self.event_rect.y, self.event_rect.w, self.event_rect.h), False)
self.selectionChanged()
return False
def fillMultiEPG(self, services, stime = None):
if stime is not None:
self.time_base = int(stime)
if services is None:
time_base = self.time_base + self.offs * self.time_epoch * 60
test = [ (service[0], 0, time_base, self.time_epoch) for service in self.list ]
serviceList = self.list
piconIdx = 3
else:
self.cur_event = None
self.cur_service = None
test = [ (service.ref.toString(), 0, self.time_base, self.time_epoch) for service in services ]
serviceList = services
piconIdx = 0
test.insert(0, 'XRnITBD') #return record, service ref, service name, event id, event title, begin time, duration
epg_data = [] if self.epgcache is None else self.epgcache.lookupEvent(test)
self.list = [ ]
tmp_list = None
service = ""
sname = ""
serviceIdx = 0
for x in epg_data:
if service != x[0]:
if tmp_list is not None:
picon = None if piconIdx == 0 else serviceList[serviceIdx][piconIdx]
self.list.append((service, sname, tmp_list[0][0] is not None and tmp_list or None, picon))
serviceIdx += 1
service = x[0]
sname = x[1]
tmp_list = [ ]
tmp_list.append((x[2], x[3], x[4], x[5])) #(event_id, event_title, begin_time, duration)
if tmp_list and len(tmp_list):
picon = None if piconIdx == 0 else serviceList[serviceIdx][piconIdx]
self.list.append((service, sname, tmp_list[0][0] is not None and tmp_list or None, picon))
serviceIdx += 1
self.l.setList(self.list)
self.findBestEvent()
def getEventRect(self):
rc = self.event_rect
return Rect( rc.left() + (self.instance and self.instance.position().x() or 0), rc.top(), rc.width(), rc.height() )
def getServiceRect(self):
rc = self.service_rect
return Rect( rc.left() + (self.instance and self.instance.position().x() or 0), rc.top(), rc.width(), rc.height() )
def getTimeEpoch(self):
return self.time_epoch
def getTimeBase(self):
return self.time_base + (self.offs * self.time_epoch * 60)
def resetOffset(self):
self.offs = 0
class TimelineText(HTMLComponent, GUIComponent):
def __init__(self):
GUIComponent.__init__(self)
self.l = eListboxPythonMultiContent()
self.l.setSelectionClip(eRect(0, 0, 0, 0))
self.l.setItemHeight(25);
self.foreColor = 0xffc000
self.backColor = 0x000000
self.time_base = 0
self.time_epoch = 0
self.font = gFont("Regular", 20)
GUI_WIDGET = eListbox
def applySkin(self, desktop, screen):
if self.skinAttributes is not None:
attribs = [ ]
for (attrib, value) in self.skinAttributes:
if attrib == "foregroundColor":
self.foreColor = parseColor(value).argb()
elif attrib == "backgroundColor":
self.backColor = parseColor(value).argb()
elif attrib == "font":
self.font = parseFont(value, ((1, 1), (1, 1)) )
else:
attribs.append((attrib,value))
self.skinAttributes = attribs
self.l.setFont(0, self.font)
return GUIComponent.applySkin(self, desktop, screen)
def postWidgetCreate(self, instance):
instance.setContent(self.l)
def setDateFormat(self, value):
if "servicename" in value:
self.datefmt = _("%A %d %B")
elif "picon" in value:
self.datefmt = _("%d-%m")
def setEntries(self, l, timeline_now, time_lines, force):
event_rect = l.getEventRect()
time_epoch = l.getTimeEpoch()
time_base = l.getTimeBase()
if event_rect is None or time_epoch is None or time_base is None:
return
eventLeft = event_rect.left()
res = [ None ]
# Note: event_rect and service_rect are relative to the timeline_text position
# while the time lines are relative to the GraphEPG screen position!
if self.time_base != time_base or self.time_epoch != time_epoch or force:
service_rect = l.getServiceRect()
itemHeight = self.l.getItemSize().height()
time_steps = 60 if time_epoch > 180 else 30
num_lines = time_epoch / time_steps
timeStepsCalc = time_steps * 60
incWidth = event_rect.width() / num_lines
if int(config.misc.graph_mepg.center_timeline.value):
tlMove = incWidth / 2
tlFlags = RT_HALIGN_CENTER | RT_VALIGN_CENTER
else:
tlMove = 0
tlFlags = RT_HALIGN_LEFT | RT_VALIGN_CENTER
res.append( MultiContentEntryText(
pos = (0, 0),
size = (service_rect.width(), itemHeight),
font = 0, flags = RT_HALIGN_LEFT | RT_VALIGN_CENTER,
text = strftime(self.datefmt, localtime(time_base)),
color = self.foreColor, color_sel = self.foreColor,
backcolor = self.backColor, backcolor_sel = self.backColor) )
xpos = 0 # eventLeft
for x in range(0, num_lines):
res.append( MultiContentEntryText(
pos = (service_rect.width() + xpos-tlMove, 0),
size = (incWidth, itemHeight),
font = 0, flags = tlFlags,
text = strftime("%H:%M", localtime( time_base + x*timeStepsCalc )),
color = self.foreColor, color_sel = self.foreColor,
backcolor = self.backColor, backcolor_sel = self.backColor) )
line = time_lines[x]
old_pos = line.position
line.setPosition(xpos + eventLeft, old_pos[1])
line.visible = True
xpos += incWidth
for x in range(num_lines, MAX_TIMELINES):
time_lines[x].visible = False
self.l.setList([res])
self.time_base = time_base
self.time_epoch = time_epoch
now = time()
if now >= time_base and now < (time_base + time_epoch * 60):
xpos = int((((now - time_base) * event_rect.width()) / (time_epoch * 60)) - (timeline_now.instance.size().width() / 2))
old_pos = timeline_now.position
new_pos = (xpos + eventLeft, old_pos[1])
if old_pos != new_pos:
timeline_now.setPosition(new_pos[0], new_pos[1])
timeline_now.visible = True
else:
timeline_now.visible = False
class GraphMultiEPG(Screen, HelpableScreen):
EMPTY = 0
ADD_TIMER = 1
REMOVE_TIMER = 2
ZAP = 1
def __init__(self, session, services, zapFunc=None, bouquetChangeCB=None, bouquetname=""):
Screen.__init__(self, session)
self.bouquetChangeCB = bouquetChangeCB
now = time() - config.epg.histminutes.value * 60
self.ask_time = now - now % int(config.misc.graph_mepg.roundTo.value)
self["key_red"] = Button("")
self["key_green"] = Button("")
global listscreen
if listscreen:
self["key_yellow"] = Button(_("Normal mode"))
self.skinName="GraphMultiEPGList"
else:
self["key_yellow"] = Button(_("List mode"))
self["key_blue"] = Button(_("Goto"))
self.key_green_choice = self.EMPTY
self.key_red_choice = self.EMPTY
self["timeline_text"] = TimelineText()
self["Service"] = ServiceEvent()
self["Event"] = Event()
self.time_lines = [ ]
for x in range(0, MAX_TIMELINES):
pm = Pixmap()
self.time_lines.append(pm)
self["timeline%d"%(x)] = pm
self["timeline_now"] = Pixmap()
self.services = services
self.zapFunc = zapFunc
if bouquetname != "":
Screen.setTitle(self, bouquetname)
self["list"] = EPGList( selChangedCB = self.onSelectionChanged,
timer = self.session.nav.RecordTimer,
time_epoch = config.misc.graph_mepg.prev_time_period.value,
overjump_empty = config.misc.graph_mepg.overjump.value)
HelpableScreen.__init__(self)
self["okactions"] = HelpableActionMap(self, "OkCancelActions",
{
"cancel": (self.closeScreen, _("Exit EPG")),
"ok": (self.eventSelected, _("Zap to selected channel, or show detailed event info (depends on configuration)"))
}, -1)
self["okactions"].csel = self
self["epgactions"] = HelpableActionMap(self, "EPGSelectActions",
{
"timerAdd": (self.timerAdd, _("Add/remove change timer for current event")),
"info": (self.infoKeyPressed, _("Show detailed event info")),
"red": (self.zapTo, _("Zap to selected channel")),
"yellow": (self.swapMode, _("Switch between normal mode and list mode")),
"blue": (self.enterDateTime, _("Goto specific data/time")),
"menu": (self.showSetup, _("Setup menu")),
"nextBouquet": (self.nextBouquet, _("Show bouquet selection menu")),
"prevBouquet": (self.prevBouquet, _("Show bouquet selection menu")),
"nextService": (self.nextPressed, _("Goto next page of events")),
"prevService": (self.prevPressed, _("Goto previous page of events")),
"preview": (self.preview, _("Preview selected channel")),
"nextDay": (self.nextDay, _("Goto next day of events")),
"prevDay": (self.prevDay, _("Goto previous day of events"))
}, -1)
self["epgactions"].csel = self
self["inputactions"] = HelpableActionMap(self, "InputActions",
{
"left": (self.leftPressed, _("Go to previous event")),
"right": (self.rightPressed, _("Go to next event")),
"1": (self.key1, _("Set time window to 1 hour")),
"2": (self.key2, _("Set time window to 2 hours")),
"3": (self.key3, _("Set time window to 3 hours")),
"4": (self.key4, _("Set time window to 4 hours")),
"5": (self.key5, _("Set time window to 5 hours")),
"6": (self.key6, _("Set time window to 6 hours")),
"7": (self.prevPage, _("Go to previous page of service")),
"9": (self.nextPage, _("Go to next page of service")),
"8": (self.toTop, _("Go to first service")),
"0": (self.toEnd, _("Go to last service"))
}, -1)
self["inputactions"].csel = self
self.updateTimelineTimer = eTimer()
self.updateTimelineTimer.callback.append(self.moveTimeLines)
self.updateTimelineTimer.start(60 * 1000)
self.onLayoutFinish.append(self.onCreate)
self.previousref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
def prevPage(self):
self["list"].moveTo(eListbox.pageUp)
def nextPage(self):
self["list"].moveTo(eListbox.pageDown)
def toTop(self):
self["list"].moveTo(eListbox.moveTop)
def toEnd(self):
self["list"].moveTo(eListbox.moveEnd)
def prevPressed(self):
self.updEvent(-2)
def nextPressed(self):
self.updEvent(+2)
def leftPressed(self):
self.updEvent(-1)
def rightPressed(self):
self.updEvent(+1)
def prevDay(self):
self.updEvent(-3)
def nextDay(self):
self.updEvent(+3)
def updEvent(self, dir, visible = True):
ret = self["list"].selEntry(dir, visible)
if ret:
self.moveTimeLines(True)
def updEpoch(self, mins):
self["list"].setEpoch(mins)
config.misc.graph_mepg.prev_time_period.setValue(mins)
self.moveTimeLines()
def key1(self):
self.updEpoch(60)
def key2(self):
self.updEpoch(120)
def key3(self):
self.updEpoch(180)
def key4(self):
self.updEpoch(240)
def key5(self):
self.updEpoch(300)
def key6(self):
self.updEpoch(360)
def nextBouquet(self):
if self.bouquetChangeCB:
self.bouquetChangeCB(1, self)
def prevBouquet(self):
if self.bouquetChangeCB:
self.bouquetChangeCB(-1, self)
def enterDateTime(self):
t = localtime(time())
config.misc.graph_mepg.prev_time.setValue([t.tm_hour, t.tm_min])
self.session.openWithCallback(self.onDateTimeInputClosed, TimeDateInput, config.misc.graph_mepg.prev_time)
def onDateTimeInputClosed(self, ret):
if len(ret) > 1:
if ret[0]:
now = time() - config.epg.histminutes.value * 60
self.ask_time = ret[1] if ret[1] >= now else now
self.ask_time = self.ask_time - self.ask_time % int(config.misc.graph_mepg.roundTo.value)
l = self["list"]
l.resetOffset()
l.fillMultiEPG(None, self.ask_time)
self.moveTimeLines(True)
def showSetup(self):
self.session.openWithCallback(self.onSetupClose, GraphMultiEpgSetup)
def onSetupClose(self, ignore = -1):
l = self["list"]
l.setItemsPerPage()
l.setEventFontsize()
l.setEpoch(config.misc.graph_mepg.prev_time_period.value)
l.setOverjump_Empty(config.misc.graph_mepg.overjump.value)
l.setShowServiceMode(config.misc.graph_mepg.servicetitle_mode.value)
now = time() - config.epg.histminutes.value * 60
self.ask_time = now - now % int(config.misc.graph_mepg.roundTo.value)
self["timeline_text"].setDateFormat(config.misc.graph_mepg.servicetitle_mode.value)
l.fillMultiEPG(None, self.ask_time)
self.moveTimeLines(True)
def closeScreen(self):
self.zapFunc(None, zapback = True)
config.misc.graph_mepg.save()
self.close(False)
def infoKeyPressed(self):
cur = self["list"].getCurrent()
event = cur[0]
service = cur[1]
if event is not None:
self.session.open(EventViewEPGSelect, event, service, self.eventViewCallback, self.openSingleServiceEPG, self.openMultiServiceEPG, self.openSimilarList)
def openSimilarList(self, eventid, refstr):
self.session.open(EPGSelection, refstr, None, eventid)
def openSingleServiceEPG(self):
ref = self["list"].getCurrent()[1].ref.toString()
if ref:
self.session.open(EPGSelection, ref)
def openMultiServiceEPG(self):
if self.services:
self.session.openWithCallback(self.doRefresh, EPGSelection, self.services, self.zapFunc, None, self.bouquetChangeCB)
def setServices(self, services):
self.services = services
self.onCreate()
def doRefresh(self, answer):
serviceref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
l = self["list"]
l.moveToService(serviceref)
l.setCurrentlyPlaying(serviceref)
self.moveTimeLines()
def onCreate(self):
serviceref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
l = self["list"]
l.setShowServiceMode(config.misc.graph_mepg.servicetitle_mode.value)
self["timeline_text"].setDateFormat(config.misc.graph_mepg.servicetitle_mode.value)
l.fillMultiEPG(self.services, self.ask_time)
l.moveToService(serviceref)
l.setCurrentlyPlaying(serviceref)
self.moveTimeLines()
def eventViewCallback(self, setEvent, setService, val):
l = self["list"]
old = l.getCurrent()
self.updEvent(val, False)
cur = l.getCurrent()
if cur[0] is None and cur[1].ref != old[1].ref:
self.eventViewCallback(setEvent, setService, val)
else:
setService(cur[1])
setEvent(cur[0])
def preview(self):
ref = self["list"].getCurrent()[1]
if ref:
self.zapFunc(ref.ref, preview = True)
self["list"].setCurrentlyPlaying(ref.ref)
self["list"].l.invalidate()
def zapTo(self):
if self.zapFunc and self.key_red_choice == self.ZAP:
ref = self["list"].getCurrent()[1]
if ref:
self.zapFunc(ref.ref)
if self.previousref and self.previousref == ref.ref:
config.misc.graph_mepg.save()
self.close(True)
self.previousref = ref.ref
self["list"].setCurrentlyPlaying(ref.ref)
self["list"].l.invalidate()
def swapMode(self):
global listscreen
listscreen = not listscreen
self.close(None)
def eventSelected(self):
if config.misc.graph_mepg.OKButton.value == "info":
self.infoKeyPressed()
else:
self.zapTo()
def removeTimer(self, timer):
timer.afterEvent = AFTEREVENT.NONE
self.session.nav.RecordTimer.removeEntry(timer)
self["key_green"].setText(_("Add timer"))
self.key_green_choice = self.ADD_TIMER
def timerAdd(self):
cur = self["list"].getCurrent()
event = cur[0]
if event is None:
return
eventid = event.getEventId()
serviceref = cur[1]
refstr = ':'.join(serviceref.ref.toString().split(':')[:11])
for timer in self.session.nav.RecordTimer.timer_list:
if timer.eit == eventid and ':'.join(timer.service_ref.ref.toString().split(':')[:11]) == refstr:
menu = [(_("Delete timer"), "delete"),(_("Edit timer"), "edit")]
buttons = ["red", "green"]
def timerAction(choice):
if choice is not None:
if choice[1] == "delete":
self.removeTimer(timer)
elif choice[1] == "edit":
self.session.open(TimerEntry, timer)
self.session.openWithCallback(timerAction, ChoiceBox, title=_("Select action for timer %s:") % event.getEventName(), list=menu, keys=buttons)
break
else:
newEntry = RecordTimerEntry(serviceref, checkOldTimers = True, *parseEvent(event))
self.session.openWithCallback(self.finishedTimerAdd, TimerEntry, newEntry)
def finishedTimerAdd(self, answer):
print "finished add"
if answer[0]:
entry = answer[1]
simulTimerList = self.session.nav.RecordTimer.record(entry)
if simulTimerList is not None:
for x in simulTimerList:
if x.setAutoincreaseEnd(entry):
self.session.nav.RecordTimer.timeChanged(x)
simulTimerList = self.session.nav.RecordTimer.record(entry)
if simulTimerList is not None:
if not entry.repeated and not config.recording.margin_before.value and not config.recording.margin_after.value and len(simulTimerList) > 1:
change_time = False
conflict_begin = simulTimerList[1].begin
conflict_end = simulTimerList[1].end
if conflict_begin == entry.end:
entry.end -= 30
change_time = True
elif entry.begin == conflict_end:
entry.begin += 30
change_time = True
if change_time:
simulTimerList = self.session.nav.RecordTimer.record(entry)
if simulTimerList is not None:
self.session.openWithCallback(self.finishSanityCorrection, TimerSanityConflict, simulTimerList)
self["key_green"].setText(_("Change timer"))
self.key_green_choice = self.REMOVE_TIMER
else:
self["key_green"].setText(_("Add timer"))
self.key_green_choice = self.ADD_TIMER
print "Timeredit aborted"
def finishSanityCorrection(self, answer):
self.finishedTimerAdd(answer)
def onSelectionChanged(self):
cur = self["list"].getCurrent()
event = cur[0]
self["Event"].newEvent(event)
if cur[1] is None or cur[1].getServiceName() == "":
if self.key_green_choice != self.EMPTY:
self["key_green"].setText("")
self.key_green_choice = self.EMPTY
if self.key_red_choice != self.EMPTY:
self["key_red"].setText("")
self.key_red_choice = self.EMPTY
return
servicerefref = cur[1].ref
self["Service"].newService(servicerefref)
if self.key_red_choice != self.ZAP:
self["key_red"].setText(_("Zap"))
self.key_red_choice = self.ZAP
if not event:
if self.key_green_choice != self.EMPTY:
self["key_green"].setText("")
self.key_green_choice = self.EMPTY
return
eventid = event.getEventId()
refstr = ':'.join(servicerefref.toString().split(':')[:11])
isRecordEvent = False
for timer in self.session.nav.RecordTimer.timer_list:
if timer.eit == eventid and ':'.join(timer.service_ref.ref.toString().split(':')[:11]) == refstr:
isRecordEvent = True
break
if isRecordEvent and self.key_green_choice != self.REMOVE_TIMER:
self["key_green"].setText(_("Change timer"))
self.key_green_choice = self.REMOVE_TIMER
elif not isRecordEvent and self.key_green_choice != self.ADD_TIMER:
self["key_green"].setText(_("Add timer"))
self.key_green_choice = self.ADD_TIMER
def moveTimeLines(self, force=False):
self.updateTimelineTimer.start((60 - (int(time()) % 60)) * 1000) #keep syncronised
self["timeline_text"].setEntries(self["list"], self["timeline_now"], self.time_lines, force)
self["list"].l.invalidate() # not needed when the zPosition in the skin is correct! ?????
|
youdonghai/intellij-community | refs/heads/master | python/testData/inspections/PyArgumentListInspection/CsvRegisterDialect/csv.py | 42 | from _csv import register_dialect
__all__ = ["register_dialect"] |
Buckmarble/Lunar_kernel_sense_m7 | refs/heads/master | scripts/tracing/draw_functrace.py | 14679 | #!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <fweisbec@gmail.com>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
|
reneenoble/datacats | refs/heads/master | datacats/tests/test_validate.py | 10 | # Copyright 2014-2015 Boxkite Inc.
# This file is part of the DataCats package and is released under
# the terms of the GNU Affero General Public License version 3.0.
# See LICENSE.txt or http://www.fsf.org/licensing/licenses/agpl-3.0.html
from unittest import TestCase
from datacats.validate import valid_name, valid_deploy_name
class TestValidate(TestCase):
def test_good_name(self):
self.assertTrue(valid_name('copper'))
def test_name_with_numbers(self):
self.assertTrue(valid_name('seven42'))
def test_name_with_leading_numbers(self):
self.assertFalse(valid_name('42seven'))
def test_name_too_short(self):
self.assertFalse(valid_deploy_name('foo'))
|
Endika/event | refs/heads/8.0 | event_registration_mass_mailing/tests/test_event_registration_mail_list_wizard.py | 3 | # -*- coding: utf-8 -*-
# License AGPL-3: Antiun Ingenieria S.L. - Javier Iniesta
# See README.rst file on addon root folder for more details
from openerp.tests.common import TransactionCase
class TestEventRegistrationMailListWizard(TransactionCase):
def setUp(self):
super(TestEventRegistrationMailListWizard, self).setUp()
self.mail_list = self.env['mail.mass_mailing.list'].create({
'name': 'Test 01'})
self.contact = self.env['mail.mass_mailing.contact'].create({
'name': 'Test Contact 01', 'email': 'email01@test.com',
'list_id': self.mail_list.id})
self.event = self.env.ref('event.event_0')
self.registration_01 = self.env['event.registration'].create({
'name': 'Test Registration 01', 'email': 'email01@test.com',
'event_id': self.event.id})
self.registration_02 = self.env['event.registration'].create({
'name': 'Test Registration 02', 'email': 'email02@test.com',
'event_id': self.event.id})
def test_add_to_mail_list(self):
wizard = self.env['event.registration.mail.list.wizard'].create({
'mail_list': self.mail_list.id})
wizard.with_context(
{'active_ids': [self.registration_01.id,
self.registration_02.id]}).add_to_mail_list()
self.assertEqual(self.mail_list.contact_nbr, 2)
|
alvin319/CarnotKE | refs/heads/master | jyhton/lib-python/2.7/plat-sunos5/STROPTS.py | 66 | # Generated by h2py from /usr/include/sys/stropts.h
# Included from sys/feature_tests.h
# Included from sys/isa_defs.h
_CHAR_ALIGNMENT = 1
_SHORT_ALIGNMENT = 2
_INT_ALIGNMENT = 4
_LONG_ALIGNMENT = 8
_LONG_LONG_ALIGNMENT = 8
_DOUBLE_ALIGNMENT = 8
_LONG_DOUBLE_ALIGNMENT = 16
_POINTER_ALIGNMENT = 8
_MAX_ALIGNMENT = 16
_ALIGNMENT_REQUIRED = 1
_CHAR_ALIGNMENT = 1
_SHORT_ALIGNMENT = 2
_INT_ALIGNMENT = 4
_LONG_ALIGNMENT = 4
_LONG_LONG_ALIGNMENT = 4
_DOUBLE_ALIGNMENT = 4
_LONG_DOUBLE_ALIGNMENT = 4
_POINTER_ALIGNMENT = 4
_MAX_ALIGNMENT = 4
_ALIGNMENT_REQUIRED = 0
_CHAR_ALIGNMENT = 1
_SHORT_ALIGNMENT = 2
_INT_ALIGNMENT = 4
_LONG_LONG_ALIGNMENT = 8
_DOUBLE_ALIGNMENT = 8
_ALIGNMENT_REQUIRED = 1
_LONG_ALIGNMENT = 4
_LONG_DOUBLE_ALIGNMENT = 8
_POINTER_ALIGNMENT = 4
_MAX_ALIGNMENT = 8
_LONG_ALIGNMENT = 8
_LONG_DOUBLE_ALIGNMENT = 16
_POINTER_ALIGNMENT = 8
_MAX_ALIGNMENT = 16
_POSIX_C_SOURCE = 1
_LARGEFILE64_SOURCE = 1
_LARGEFILE_SOURCE = 1
_FILE_OFFSET_BITS = 64
_FILE_OFFSET_BITS = 32
_POSIX_C_SOURCE = 199506L
_POSIX_PTHREAD_SEMANTICS = 1
_XOPEN_VERSION = 500
_XOPEN_VERSION = 4
_XOPEN_VERSION = 3
from TYPES import *
# Included from sys/conf.h
# Included from sys/t_lock.h
# Included from sys/machlock.h
from TYPES import *
LOCK_HELD_VALUE = 0xff
def SPIN_LOCK(pl): return ((pl) > ipltospl(LOCK_LEVEL))
def LOCK_SAMPLE_INTERVAL(i): return (((i) & 0xff) == 0)
CLOCK_LEVEL = 10
LOCK_LEVEL = 10
DISP_LEVEL = (LOCK_LEVEL + 1)
PTR24_LSB = 5
PTR24_MSB = (PTR24_LSB + 24)
PTR24_ALIGN = 32
PTR24_BASE = 0xe0000000
# Included from sys/param.h
from TYPES import *
_POSIX_VDISABLE = 0
MAX_INPUT = 512
MAX_CANON = 256
UID_NOBODY = 60001
GID_NOBODY = UID_NOBODY
UID_NOACCESS = 60002
MAX_TASKID = 999999
MAX_MAXPID = 999999
DEFAULT_MAXPID = 999999
DEFAULT_JUMPPID = 100000
DEFAULT_MAXPID = 30000
DEFAULT_JUMPPID = 0
MAXUID = 2147483647
MAXPROJID = MAXUID
MAXLINK = 32767
NMOUNT = 40
CANBSIZ = 256
NOFILE = 20
NGROUPS_UMIN = 0
NGROUPS_UMAX = 32
NGROUPS_MAX_DEFAULT = 16
NZERO = 20
NULL = 0L
NULL = 0
CMASK = 022
CDLIMIT = (1L<<11)
NBPS = 0x20000
NBPSCTR = 512
UBSIZE = 512
SCTRSHFT = 9
SYSNAME = 9
PREMOTE = 39
MAXPATHLEN = 1024
MAXSYMLINKS = 20
MAXNAMELEN = 256
NADDR = 13
PIPE_BUF = 5120
PIPE_MAX = 5120
NBBY = 8
MAXBSIZE = 8192
DEV_BSIZE = 512
DEV_BSHIFT = 9
MAXFRAG = 8
MAXOFF32_T = 0x7fffffff
MAXOFF_T = 0x7fffffffffffffffl
MAXOFFSET_T = 0x7fffffffffffffffl
MAXOFF_T = 0x7fffffffl
MAXOFFSET_T = 0x7fffffff
def btodb(bytes): return \
def dbtob(db): return \
def lbtodb(bytes): return \
def ldbtob(db): return \
NCARGS32 = 0x100000
NCARGS64 = 0x200000
NCARGS = NCARGS64
NCARGS = NCARGS32
FSHIFT = 8
FSCALE = (1<<FSHIFT)
def DELAY(n): return drv_usecwait(n)
def mmu_ptob(x): return ((x) << MMU_PAGESHIFT)
def mmu_btop(x): return (((x)) >> MMU_PAGESHIFT)
def mmu_btopr(x): return ((((x) + MMU_PAGEOFFSET) >> MMU_PAGESHIFT))
def mmu_ptod(x): return ((x) << (MMU_PAGESHIFT - DEV_BSHIFT))
def ptod(x): return ((x) << (PAGESHIFT - DEV_BSHIFT))
def ptob(x): return ((x) << PAGESHIFT)
def btop(x): return (((x) >> PAGESHIFT))
def btopr(x): return ((((x) + PAGEOFFSET) >> PAGESHIFT))
def dtop(DD): return (((DD) + NDPP - 1) >> (PAGESHIFT - DEV_BSHIFT))
def dtopt(DD): return ((DD) >> (PAGESHIFT - DEV_BSHIFT))
_AIO_LISTIO_MAX = (4096)
_AIO_MAX = (-1)
_MQ_OPEN_MAX = (32)
_MQ_PRIO_MAX = (32)
_SEM_NSEMS_MAX = INT_MAX
_SEM_VALUE_MAX = INT_MAX
# Included from sys/unistd.h
_CS_PATH = 65
_CS_LFS_CFLAGS = 68
_CS_LFS_LDFLAGS = 69
_CS_LFS_LIBS = 70
_CS_LFS_LINTFLAGS = 71
_CS_LFS64_CFLAGS = 72
_CS_LFS64_LDFLAGS = 73
_CS_LFS64_LIBS = 74
_CS_LFS64_LINTFLAGS = 75
_CS_XBS5_ILP32_OFF32_CFLAGS = 700
_CS_XBS5_ILP32_OFF32_LDFLAGS = 701
_CS_XBS5_ILP32_OFF32_LIBS = 702
_CS_XBS5_ILP32_OFF32_LINTFLAGS = 703
_CS_XBS5_ILP32_OFFBIG_CFLAGS = 705
_CS_XBS5_ILP32_OFFBIG_LDFLAGS = 706
_CS_XBS5_ILP32_OFFBIG_LIBS = 707
_CS_XBS5_ILP32_OFFBIG_LINTFLAGS = 708
_CS_XBS5_LP64_OFF64_CFLAGS = 709
_CS_XBS5_LP64_OFF64_LDFLAGS = 710
_CS_XBS5_LP64_OFF64_LIBS = 711
_CS_XBS5_LP64_OFF64_LINTFLAGS = 712
_CS_XBS5_LPBIG_OFFBIG_CFLAGS = 713
_CS_XBS5_LPBIG_OFFBIG_LDFLAGS = 714
_CS_XBS5_LPBIG_OFFBIG_LIBS = 715
_CS_XBS5_LPBIG_OFFBIG_LINTFLAGS = 716
_SC_ARG_MAX = 1
_SC_CHILD_MAX = 2
_SC_CLK_TCK = 3
_SC_NGROUPS_MAX = 4
_SC_OPEN_MAX = 5
_SC_JOB_CONTROL = 6
_SC_SAVED_IDS = 7
_SC_VERSION = 8
_SC_PASS_MAX = 9
_SC_LOGNAME_MAX = 10
_SC_PAGESIZE = 11
_SC_XOPEN_VERSION = 12
_SC_NPROCESSORS_CONF = 14
_SC_NPROCESSORS_ONLN = 15
_SC_STREAM_MAX = 16
_SC_TZNAME_MAX = 17
_SC_AIO_LISTIO_MAX = 18
_SC_AIO_MAX = 19
_SC_AIO_PRIO_DELTA_MAX = 20
_SC_ASYNCHRONOUS_IO = 21
_SC_DELAYTIMER_MAX = 22
_SC_FSYNC = 23
_SC_MAPPED_FILES = 24
_SC_MEMLOCK = 25
_SC_MEMLOCK_RANGE = 26
_SC_MEMORY_PROTECTION = 27
_SC_MESSAGE_PASSING = 28
_SC_MQ_OPEN_MAX = 29
_SC_MQ_PRIO_MAX = 30
_SC_PRIORITIZED_IO = 31
_SC_PRIORITY_SCHEDULING = 32
_SC_REALTIME_SIGNALS = 33
_SC_RTSIG_MAX = 34
_SC_SEMAPHORES = 35
_SC_SEM_NSEMS_MAX = 36
_SC_SEM_VALUE_MAX = 37
_SC_SHARED_MEMORY_OBJECTS = 38
_SC_SIGQUEUE_MAX = 39
_SC_SIGRT_MIN = 40
_SC_SIGRT_MAX = 41
_SC_SYNCHRONIZED_IO = 42
_SC_TIMERS = 43
_SC_TIMER_MAX = 44
_SC_2_C_BIND = 45
_SC_2_C_DEV = 46
_SC_2_C_VERSION = 47
_SC_2_FORT_DEV = 48
_SC_2_FORT_RUN = 49
_SC_2_LOCALEDEF = 50
_SC_2_SW_DEV = 51
_SC_2_UPE = 52
_SC_2_VERSION = 53
_SC_BC_BASE_MAX = 54
_SC_BC_DIM_MAX = 55
_SC_BC_SCALE_MAX = 56
_SC_BC_STRING_MAX = 57
_SC_COLL_WEIGHTS_MAX = 58
_SC_EXPR_NEST_MAX = 59
_SC_LINE_MAX = 60
_SC_RE_DUP_MAX = 61
_SC_XOPEN_CRYPT = 62
_SC_XOPEN_ENH_I18N = 63
_SC_XOPEN_SHM = 64
_SC_2_CHAR_TERM = 66
_SC_XOPEN_XCU_VERSION = 67
_SC_ATEXIT_MAX = 76
_SC_IOV_MAX = 77
_SC_XOPEN_UNIX = 78
_SC_PAGE_SIZE = _SC_PAGESIZE
_SC_T_IOV_MAX = 79
_SC_PHYS_PAGES = 500
_SC_AVPHYS_PAGES = 501
_SC_COHER_BLKSZ = 503
_SC_SPLIT_CACHE = 504
_SC_ICACHE_SZ = 505
_SC_DCACHE_SZ = 506
_SC_ICACHE_LINESZ = 507
_SC_DCACHE_LINESZ = 508
_SC_ICACHE_BLKSZ = 509
_SC_DCACHE_BLKSZ = 510
_SC_DCACHE_TBLKSZ = 511
_SC_ICACHE_ASSOC = 512
_SC_DCACHE_ASSOC = 513
_SC_MAXPID = 514
_SC_STACK_PROT = 515
_SC_THREAD_DESTRUCTOR_ITERATIONS = 568
_SC_GETGR_R_SIZE_MAX = 569
_SC_GETPW_R_SIZE_MAX = 570
_SC_LOGIN_NAME_MAX = 571
_SC_THREAD_KEYS_MAX = 572
_SC_THREAD_STACK_MIN = 573
_SC_THREAD_THREADS_MAX = 574
_SC_TTY_NAME_MAX = 575
_SC_THREADS = 576
_SC_THREAD_ATTR_STACKADDR = 577
_SC_THREAD_ATTR_STACKSIZE = 578
_SC_THREAD_PRIORITY_SCHEDULING = 579
_SC_THREAD_PRIO_INHERIT = 580
_SC_THREAD_PRIO_PROTECT = 581
_SC_THREAD_PROCESS_SHARED = 582
_SC_THREAD_SAFE_FUNCTIONS = 583
_SC_XOPEN_LEGACY = 717
_SC_XOPEN_REALTIME = 718
_SC_XOPEN_REALTIME_THREADS = 719
_SC_XBS5_ILP32_OFF32 = 720
_SC_XBS5_ILP32_OFFBIG = 721
_SC_XBS5_LP64_OFF64 = 722
_SC_XBS5_LPBIG_OFFBIG = 723
_PC_LINK_MAX = 1
_PC_MAX_CANON = 2
_PC_MAX_INPUT = 3
_PC_NAME_MAX = 4
_PC_PATH_MAX = 5
_PC_PIPE_BUF = 6
_PC_NO_TRUNC = 7
_PC_VDISABLE = 8
_PC_CHOWN_RESTRICTED = 9
_PC_ASYNC_IO = 10
_PC_PRIO_IO = 11
_PC_SYNC_IO = 12
_PC_FILESIZEBITS = 67
_PC_LAST = 67
_POSIX_VERSION = 199506L
_POSIX2_VERSION = 199209L
_POSIX2_C_VERSION = 199209L
_XOPEN_XCU_VERSION = 4
_XOPEN_REALTIME = 1
_XOPEN_ENH_I18N = 1
_XOPEN_SHM = 1
_POSIX2_C_BIND = 1
_POSIX2_CHAR_TERM = 1
_POSIX2_LOCALEDEF = 1
_POSIX2_C_DEV = 1
_POSIX2_SW_DEV = 1
_POSIX2_UPE = 1
# Included from sys/mutex.h
from TYPES import *
def MUTEX_HELD(x): return (mutex_owned(x))
# Included from sys/rwlock.h
from TYPES import *
def RW_READ_HELD(x): return (rw_read_held((x)))
def RW_WRITE_HELD(x): return (rw_write_held((x)))
def RW_LOCK_HELD(x): return (rw_lock_held((x)))
def RW_ISWRITER(x): return (rw_iswriter(x))
# Included from sys/semaphore.h
# Included from sys/thread.h
from TYPES import *
# Included from sys/klwp.h
from TYPES import *
# Included from sys/condvar.h
from TYPES import *
# Included from sys/time.h
# Included from sys/types32.h
# Included from sys/int_types.h
TIME32_MAX = INT32_MAX
TIME32_MIN = INT32_MIN
def TIMEVAL_OVERFLOW(tv): return \
from TYPES import *
DST_NONE = 0
DST_USA = 1
DST_AUST = 2
DST_WET = 3
DST_MET = 4
DST_EET = 5
DST_CAN = 6
DST_GB = 7
DST_RUM = 8
DST_TUR = 9
DST_AUSTALT = 10
ITIMER_REAL = 0
ITIMER_VIRTUAL = 1
ITIMER_PROF = 2
ITIMER_REALPROF = 3
def ITIMERVAL_OVERFLOW(itv): return \
SEC = 1
MILLISEC = 1000
MICROSEC = 1000000
NANOSEC = 1000000000
# Included from sys/time_impl.h
def TIMESPEC_OVERFLOW(ts): return \
def ITIMERSPEC_OVERFLOW(it): return \
__CLOCK_REALTIME0 = 0
CLOCK_VIRTUAL = 1
CLOCK_PROF = 2
__CLOCK_REALTIME3 = 3
CLOCK_HIGHRES = 4
CLOCK_MAX = 5
CLOCK_REALTIME = __CLOCK_REALTIME3
CLOCK_REALTIME = __CLOCK_REALTIME0
TIMER_RELTIME = 0x0
TIMER_ABSTIME = 0x1
def TICK_TO_SEC(tick): return ((tick) / hz)
def SEC_TO_TICK(sec): return ((sec) * hz)
def TICK_TO_MSEC(tick): return \
def MSEC_TO_TICK(msec): return \
def MSEC_TO_TICK_ROUNDUP(msec): return \
def TICK_TO_USEC(tick): return ((tick) * usec_per_tick)
def USEC_TO_TICK(usec): return ((usec) / usec_per_tick)
def USEC_TO_TICK_ROUNDUP(usec): return \
def TICK_TO_NSEC(tick): return ((tick) * nsec_per_tick)
def NSEC_TO_TICK(nsec): return ((nsec) / nsec_per_tick)
def NSEC_TO_TICK_ROUNDUP(nsec): return \
def TIMEVAL_TO_TICK(tvp): return \
def TIMESTRUC_TO_TICK(tsp): return \
# Included from time.h
from TYPES import *
# Included from iso/time_iso.h
NULL = 0L
NULL = 0
CLOCKS_PER_SEC = 1000000
# Included from sys/select.h
FD_SETSIZE = 65536
FD_SETSIZE = 1024
_NBBY = 8
NBBY = _NBBY
def FD_ZERO(p): return bzero((p), sizeof (*(p)))
# Included from sys/signal.h
# Included from sys/iso/signal_iso.h
SIGHUP = 1
SIGINT = 2
SIGQUIT = 3
SIGILL = 4
SIGTRAP = 5
SIGIOT = 6
SIGABRT = 6
SIGEMT = 7
SIGFPE = 8
SIGKILL = 9
SIGBUS = 10
SIGSEGV = 11
SIGSYS = 12
SIGPIPE = 13
SIGALRM = 14
SIGTERM = 15
SIGUSR1 = 16
SIGUSR2 = 17
SIGCLD = 18
SIGCHLD = 18
SIGPWR = 19
SIGWINCH = 20
SIGURG = 21
SIGPOLL = 22
SIGIO = SIGPOLL
SIGSTOP = 23
SIGTSTP = 24
SIGCONT = 25
SIGTTIN = 26
SIGTTOU = 27
SIGVTALRM = 28
SIGPROF = 29
SIGXCPU = 30
SIGXFSZ = 31
SIGWAITING = 32
SIGLWP = 33
SIGFREEZE = 34
SIGTHAW = 35
SIGCANCEL = 36
SIGLOST = 37
_SIGRTMIN = 38
_SIGRTMAX = 45
SIG_BLOCK = 1
SIG_UNBLOCK = 2
SIG_SETMASK = 3
SIGNO_MASK = 0xFF
SIGDEFER = 0x100
SIGHOLD = 0x200
SIGRELSE = 0x400
SIGIGNORE = 0x800
SIGPAUSE = 0x1000
# Included from sys/siginfo.h
from TYPES import *
SIGEV_NONE = 1
SIGEV_SIGNAL = 2
SIGEV_THREAD = 3
SI_NOINFO = 32767
SI_USER = 0
SI_LWP = (-1)
SI_QUEUE = (-2)
SI_TIMER = (-3)
SI_ASYNCIO = (-4)
SI_MESGQ = (-5)
# Included from sys/machsig.h
ILL_ILLOPC = 1
ILL_ILLOPN = 2
ILL_ILLADR = 3
ILL_ILLTRP = 4
ILL_PRVOPC = 5
ILL_PRVREG = 6
ILL_COPROC = 7
ILL_BADSTK = 8
NSIGILL = 8
EMT_TAGOVF = 1
EMT_CPCOVF = 2
NSIGEMT = 2
FPE_INTDIV = 1
FPE_INTOVF = 2
FPE_FLTDIV = 3
FPE_FLTOVF = 4
FPE_FLTUND = 5
FPE_FLTRES = 6
FPE_FLTINV = 7
FPE_FLTSUB = 8
NSIGFPE = 8
SEGV_MAPERR = 1
SEGV_ACCERR = 2
NSIGSEGV = 2
BUS_ADRALN = 1
BUS_ADRERR = 2
BUS_OBJERR = 3
NSIGBUS = 3
TRAP_BRKPT = 1
TRAP_TRACE = 2
TRAP_RWATCH = 3
TRAP_WWATCH = 4
TRAP_XWATCH = 5
NSIGTRAP = 5
CLD_EXITED = 1
CLD_KILLED = 2
CLD_DUMPED = 3
CLD_TRAPPED = 4
CLD_STOPPED = 5
CLD_CONTINUED = 6
NSIGCLD = 6
POLL_IN = 1
POLL_OUT = 2
POLL_MSG = 3
POLL_ERR = 4
POLL_PRI = 5
POLL_HUP = 6
NSIGPOLL = 6
PROF_SIG = 1
NSIGPROF = 1
SI_MAXSZ = 256
SI_MAXSZ = 128
# Included from sys/time_std_impl.h
from TYPES import *
SI32_MAXSZ = 128
def SI_CANQUEUE(c): return ((c) <= SI_QUEUE)
SA_NOCLDSTOP = 0x00020000
SA_ONSTACK = 0x00000001
SA_RESETHAND = 0x00000002
SA_RESTART = 0x00000004
SA_SIGINFO = 0x00000008
SA_NODEFER = 0x00000010
SA_NOCLDWAIT = 0x00010000
SA_WAITSIG = 0x00010000
NSIG = 46
MAXSIG = 45
S_SIGNAL = 1
S_SIGSET = 2
S_SIGACTION = 3
S_NONE = 4
MINSIGSTKSZ = 2048
SIGSTKSZ = 8192
SS_ONSTACK = 0x00000001
SS_DISABLE = 0x00000002
SN_PROC = 1
SN_CANCEL = 2
SN_SEND = 3
# Included from sys/ucontext.h
from TYPES import *
# Included from sys/regset.h
REG_CCR = (0)
REG_PSR = (0)
REG_PSR = (0)
REG_PC = (1)
REG_nPC = (2)
REG_Y = (3)
REG_G1 = (4)
REG_G2 = (5)
REG_G3 = (6)
REG_G4 = (7)
REG_G5 = (8)
REG_G6 = (9)
REG_G7 = (10)
REG_O0 = (11)
REG_O1 = (12)
REG_O2 = (13)
REG_O3 = (14)
REG_O4 = (15)
REG_O5 = (16)
REG_O6 = (17)
REG_O7 = (18)
REG_ASI = (19)
REG_FPRS = (20)
REG_PS = REG_PSR
REG_SP = REG_O6
REG_R0 = REG_O0
REG_R1 = REG_O1
_NGREG = 21
_NGREG = 19
NGREG = _NGREG
_NGREG32 = 19
_NGREG64 = 21
SPARC_MAXREGWINDOW = 31
MAXFPQ = 16
XRS_ID = 0x78727300
# Included from v7/sys/privregs.h
# Included from v7/sys/psr.h
PSR_CWP = 0x0000001F
PSR_ET = 0x00000020
PSR_PS = 0x00000040
PSR_S = 0x00000080
PSR_PIL = 0x00000F00
PSR_EF = 0x00001000
PSR_EC = 0x00002000
PSR_RSV = 0x000FC000
PSR_ICC = 0x00F00000
PSR_C = 0x00100000
PSR_V = 0x00200000
PSR_Z = 0x00400000
PSR_N = 0x00800000
PSR_VER = 0x0F000000
PSR_IMPL = 0xF0000000
PSL_ALLCC = PSR_ICC
PSL_USER = (PSR_S)
PSL_USERMASK = (PSR_ICC)
PSL_UBITS = (PSR_ICC|PSR_EF)
def USERMODE(ps): return (((ps) & PSR_PS) == 0)
# Included from sys/fsr.h
FSR_CEXC = 0x0000001f
FSR_AEXC = 0x000003e0
FSR_FCC = 0x00000c00
FSR_PR = 0x00001000
FSR_QNE = 0x00002000
FSR_FTT = 0x0001c000
FSR_VER = 0x000e0000
FSR_TEM = 0x0f800000
FSR_RP = 0x30000000
FSR_RD = 0xc0000000
FSR_VER_SHIFT = 17
FSR_FCC1 = 0x00000003
FSR_FCC2 = 0x0000000C
FSR_FCC3 = 0x00000030
FSR_CEXC_NX = 0x00000001
FSR_CEXC_DZ = 0x00000002
FSR_CEXC_UF = 0x00000004
FSR_CEXC_OF = 0x00000008
FSR_CEXC_NV = 0x00000010
FSR_AEXC_NX = (0x1 << 5)
FSR_AEXC_DZ = (0x2 << 5)
FSR_AEXC_UF = (0x4 << 5)
FSR_AEXC_OF = (0x8 << 5)
FSR_AEXC_NV = (0x10 << 5)
FTT_NONE = 0
FTT_IEEE = 1
FTT_UNFIN = 2
FTT_UNIMP = 3
FTT_SEQ = 4
FTT_ALIGN = 5
FTT_DFAULT = 6
FSR_FTT_SHIFT = 14
FSR_FTT_IEEE = (FTT_IEEE << FSR_FTT_SHIFT)
FSR_FTT_UNFIN = (FTT_UNFIN << FSR_FTT_SHIFT)
FSR_FTT_UNIMP = (FTT_UNIMP << FSR_FTT_SHIFT)
FSR_FTT_SEQ = (FTT_SEQ << FSR_FTT_SHIFT)
FSR_FTT_ALIGN = (FTT_ALIGN << FSR_FTT_SHIFT)
FSR_FTT_DFAULT = (FTT_DFAULT << FSR_FTT_SHIFT)
FSR_TEM_NX = (0x1 << 23)
FSR_TEM_DZ = (0x2 << 23)
FSR_TEM_UF = (0x4 << 23)
FSR_TEM_OF = (0x8 << 23)
FSR_TEM_NV = (0x10 << 23)
RP_DBLEXT = 0
RP_SINGLE = 1
RP_DOUBLE = 2
RP_RESERVED = 3
RD_NEAR = 0
RD_ZER0 = 1
RD_POSINF = 2
RD_NEGINF = 3
FPRS_DL = 0x1
FPRS_DU = 0x2
FPRS_FEF = 0x4
PIL_MAX = 0xf
def SAVE_GLOBALS(RP): return \
def RESTORE_GLOBALS(RP): return \
def SAVE_OUTS(RP): return \
def RESTORE_OUTS(RP): return \
def SAVE_WINDOW(SBP): return \
def RESTORE_WINDOW(SBP): return \
def STORE_FPREGS(FP): return \
def LOAD_FPREGS(FP): return \
_SPARC_MAXREGWINDOW = 31
_XRS_ID = 0x78727300
GETCONTEXT = 0
SETCONTEXT = 1
UC_SIGMASK = 001
UC_STACK = 002
UC_CPU = 004
UC_MAU = 010
UC_FPU = UC_MAU
UC_INTR = 020
UC_ASR = 040
UC_MCONTEXT = (UC_CPU|UC_FPU|UC_ASR)
UC_ALL = (UC_SIGMASK|UC_STACK|UC_MCONTEXT)
_SIGQUEUE_MAX = 32
_SIGNOTIFY_MAX = 32
# Included from sys/pcb.h
INSTR_VALID = 0x02
NORMAL_STEP = 0x04
WATCH_STEP = 0x08
CPC_OVERFLOW = 0x10
ASYNC_HWERR = 0x20
STEP_NONE = 0
STEP_REQUESTED = 1
STEP_ACTIVE = 2
STEP_WASACTIVE = 3
# Included from sys/msacct.h
LMS_USER = 0
LMS_SYSTEM = 1
LMS_TRAP = 2
LMS_TFAULT = 3
LMS_DFAULT = 4
LMS_KFAULT = 5
LMS_USER_LOCK = 6
LMS_SLEEP = 7
LMS_WAIT_CPU = 8
LMS_STOPPED = 9
NMSTATES = 10
# Included from sys/lwp.h
# Included from sys/synch.h
from TYPES import *
USYNC_THREAD = 0x00
USYNC_PROCESS = 0x01
LOCK_NORMAL = 0x00
LOCK_ERRORCHECK = 0x02
LOCK_RECURSIVE = 0x04
USYNC_PROCESS_ROBUST = 0x08
LOCK_PRIO_NONE = 0x00
LOCK_PRIO_INHERIT = 0x10
LOCK_PRIO_PROTECT = 0x20
LOCK_STALL_NP = 0x00
LOCK_ROBUST_NP = 0x40
LOCK_OWNERDEAD = 0x1
LOCK_NOTRECOVERABLE = 0x2
LOCK_INITED = 0x4
LOCK_UNMAPPED = 0x8
LWP_DETACHED = 0x00000040
LWP_SUSPENDED = 0x00000080
__LWP_ASLWP = 0x00000100
MAXSYSARGS = 8
NORMALRETURN = 0
JUSTRETURN = 1
LWP_USER = 0x01
LWP_SYS = 0x02
TS_FREE = 0x00
TS_SLEEP = 0x01
TS_RUN = 0x02
TS_ONPROC = 0x04
TS_ZOMB = 0x08
TS_STOPPED = 0x10
T_INTR_THREAD = 0x0001
T_WAKEABLE = 0x0002
T_TOMASK = 0x0004
T_TALLOCSTK = 0x0008
T_WOULDBLOCK = 0x0020
T_DONTBLOCK = 0x0040
T_DONTPEND = 0x0080
T_SYS_PROF = 0x0100
T_WAITCVSEM = 0x0200
T_WATCHPT = 0x0400
T_PANIC = 0x0800
TP_HOLDLWP = 0x0002
TP_TWAIT = 0x0004
TP_LWPEXIT = 0x0008
TP_PRSTOP = 0x0010
TP_CHKPT = 0x0020
TP_EXITLWP = 0x0040
TP_PRVSTOP = 0x0080
TP_MSACCT = 0x0100
TP_STOPPING = 0x0200
TP_WATCHPT = 0x0400
TP_PAUSE = 0x0800
TP_CHANGEBIND = 0x1000
TS_LOAD = 0x0001
TS_DONT_SWAP = 0x0002
TS_SWAPENQ = 0x0004
TS_ON_SWAPQ = 0x0008
TS_CSTART = 0x0100
TS_UNPAUSE = 0x0200
TS_XSTART = 0x0400
TS_PSTART = 0x0800
TS_RESUME = 0x1000
TS_CREATE = 0x2000
TS_ALLSTART = \
(TS_CSTART|TS_UNPAUSE|TS_XSTART|TS_PSTART|TS_RESUME|TS_CREATE)
def CPR_VSTOPPED(t): return \
def THREAD_TRANSITION(tp): return thread_transition(tp);
def THREAD_STOP(tp): return \
def THREAD_ZOMB(tp): return THREAD_SET_STATE(tp, TS_ZOMB, NULL)
def SEMA_HELD(x): return (sema_held((x)))
NO_LOCKS_HELD = 1
NO_COMPETING_THREADS = 1
FMNAMESZ = 8
# Included from sys/systm.h
from TYPES import *
# Included from sys/proc.h
# Included from sys/cred.h
# Included from sys/user.h
from TYPES import *
# Included from sys/resource.h
from TYPES import *
PRIO_PROCESS = 0
PRIO_PGRP = 1
PRIO_USER = 2
RLIMIT_CPU = 0
RLIMIT_FSIZE = 1
RLIMIT_DATA = 2
RLIMIT_STACK = 3
RLIMIT_CORE = 4
RLIMIT_NOFILE = 5
RLIMIT_VMEM = 6
RLIMIT_AS = RLIMIT_VMEM
RLIM_NLIMITS = 7
RLIM_INFINITY = (-3l)
RLIM_SAVED_MAX = (-2l)
RLIM_SAVED_CUR = (-1l)
RLIM_INFINITY = 0x7fffffff
RLIM_SAVED_MAX = 0x7ffffffe
RLIM_SAVED_CUR = 0x7ffffffd
RLIM32_INFINITY = 0x7fffffff
RLIM32_SAVED_MAX = 0x7ffffffe
RLIM32_SAVED_CUR = 0x7ffffffd
# Included from sys/model.h
# Included from sys/debug.h
def ASSERT64(x): return ASSERT(x)
def ASSERT32(x): return ASSERT(x)
DATAMODEL_MASK = 0x0FF00000
DATAMODEL_ILP32 = 0x00100000
DATAMODEL_LP64 = 0x00200000
DATAMODEL_NONE = 0
DATAMODEL_NATIVE = DATAMODEL_LP64
DATAMODEL_NATIVE = DATAMODEL_ILP32
def STRUCT_SIZE(handle): return \
def STRUCT_BUF(handle): return ((handle).ptr.m64)
def SIZEOF_PTR(umodel): return \
def STRUCT_SIZE(handle): return (sizeof (*(handle).ptr))
def STRUCT_BUF(handle): return ((handle).ptr)
def SIZEOF_PTR(umodel): return sizeof (caddr_t)
def lwp_getdatamodel(t): return DATAMODEL_ILP32
RUSAGE_SELF = 0
RUSAGE_CHILDREN = -1
# Included from sys/auxv.h
AT_NULL = 0
AT_IGNORE = 1
AT_EXECFD = 2
AT_PHDR = 3
AT_PHENT = 4
AT_PHNUM = 5
AT_PAGESZ = 6
AT_BASE = 7
AT_FLAGS = 8
AT_ENTRY = 9
AT_DCACHEBSIZE = 10
AT_ICACHEBSIZE = 11
AT_UCACHEBSIZE = 12
AT_SUN_UID = 2000
AT_SUN_RUID = 2001
AT_SUN_GID = 2002
AT_SUN_RGID = 2003
AT_SUN_LDELF = 2004
AT_SUN_LDSHDR = 2005
AT_SUN_LDNAME = 2006
AT_SUN_LPAGESZ = 2007
AT_SUN_PLATFORM = 2008
AT_SUN_HWCAP = 2009
AT_SUN_IFLUSH = 2010
AT_SUN_CPU = 2011
AT_SUN_EMUL_ENTRY = 2012
AT_SUN_EMUL_EXECFD = 2013
AT_SUN_EXECNAME = 2014
AT_SUN_MMU = 2015
# Included from sys/errno.h
EPERM = 1
ENOENT = 2
ESRCH = 3
EINTR = 4
EIO = 5
ENXIO = 6
E2BIG = 7
ENOEXEC = 8
EBADF = 9
ECHILD = 10
EAGAIN = 11
ENOMEM = 12
EACCES = 13
EFAULT = 14
ENOTBLK = 15
EBUSY = 16
EEXIST = 17
EXDEV = 18
ENODEV = 19
ENOTDIR = 20
EISDIR = 21
EINVAL = 22
ENFILE = 23
EMFILE = 24
ENOTTY = 25
ETXTBSY = 26
EFBIG = 27
ENOSPC = 28
ESPIPE = 29
EROFS = 30
EMLINK = 31
EPIPE = 32
EDOM = 33
ERANGE = 34
ENOMSG = 35
EIDRM = 36
ECHRNG = 37
EL2NSYNC = 38
EL3HLT = 39
EL3RST = 40
ELNRNG = 41
EUNATCH = 42
ENOCSI = 43
EL2HLT = 44
EDEADLK = 45
ENOLCK = 46
ECANCELED = 47
ENOTSUP = 48
EDQUOT = 49
EBADE = 50
EBADR = 51
EXFULL = 52
ENOANO = 53
EBADRQC = 54
EBADSLT = 55
EDEADLOCK = 56
EBFONT = 57
EOWNERDEAD = 58
ENOTRECOVERABLE = 59
ENOSTR = 60
ENODATA = 61
ETIME = 62
ENOSR = 63
ENONET = 64
ENOPKG = 65
EREMOTE = 66
ENOLINK = 67
EADV = 68
ESRMNT = 69
ECOMM = 70
EPROTO = 71
ELOCKUNMAPPED = 72
ENOTACTIVE = 73
EMULTIHOP = 74
EBADMSG = 77
ENAMETOOLONG = 78
EOVERFLOW = 79
ENOTUNIQ = 80
EBADFD = 81
EREMCHG = 82
ELIBACC = 83
ELIBBAD = 84
ELIBSCN = 85
ELIBMAX = 86
ELIBEXEC = 87
EILSEQ = 88
ENOSYS = 89
ELOOP = 90
ERESTART = 91
ESTRPIPE = 92
ENOTEMPTY = 93
EUSERS = 94
ENOTSOCK = 95
EDESTADDRREQ = 96
EMSGSIZE = 97
EPROTOTYPE = 98
ENOPROTOOPT = 99
EPROTONOSUPPORT = 120
ESOCKTNOSUPPORT = 121
EOPNOTSUPP = 122
EPFNOSUPPORT = 123
EAFNOSUPPORT = 124
EADDRINUSE = 125
EADDRNOTAVAIL = 126
ENETDOWN = 127
ENETUNREACH = 128
ENETRESET = 129
ECONNABORTED = 130
ECONNRESET = 131
ENOBUFS = 132
EISCONN = 133
ENOTCONN = 134
ESHUTDOWN = 143
ETOOMANYREFS = 144
ETIMEDOUT = 145
ECONNREFUSED = 146
EHOSTDOWN = 147
EHOSTUNREACH = 148
EWOULDBLOCK = EAGAIN
EALREADY = 149
EINPROGRESS = 150
ESTALE = 151
PSARGSZ = 80
PSCOMSIZ = 14
MAXCOMLEN = 16
__KERN_NAUXV_IMPL = 19
__KERN_NAUXV_IMPL = 21
__KERN_NAUXV_IMPL = 21
PSARGSZ = 80
# Included from sys/watchpoint.h
from TYPES import *
# Included from vm/seg_enum.h
# Included from sys/copyops.h
from TYPES import *
# Included from sys/buf.h
# Included from sys/kstat.h
from TYPES import *
KSTAT_STRLEN = 31
def KSTAT_ENTER(k): return \
def KSTAT_EXIT(k): return \
KSTAT_TYPE_RAW = 0
KSTAT_TYPE_NAMED = 1
KSTAT_TYPE_INTR = 2
KSTAT_TYPE_IO = 3
KSTAT_TYPE_TIMER = 4
KSTAT_NUM_TYPES = 5
KSTAT_FLAG_VIRTUAL = 0x01
KSTAT_FLAG_VAR_SIZE = 0x02
KSTAT_FLAG_WRITABLE = 0x04
KSTAT_FLAG_PERSISTENT = 0x08
KSTAT_FLAG_DORMANT = 0x10
KSTAT_FLAG_INVALID = 0x20
KSTAT_READ = 0
KSTAT_WRITE = 1
KSTAT_DATA_CHAR = 0
KSTAT_DATA_INT32 = 1
KSTAT_DATA_UINT32 = 2
KSTAT_DATA_INT64 = 3
KSTAT_DATA_UINT64 = 4
KSTAT_DATA_LONG = KSTAT_DATA_INT32
KSTAT_DATA_ULONG = KSTAT_DATA_UINT32
KSTAT_DATA_LONG = KSTAT_DATA_INT64
KSTAT_DATA_ULONG = KSTAT_DATA_UINT64
KSTAT_DATA_LONG = 7
KSTAT_DATA_ULONG = 8
KSTAT_DATA_LONGLONG = KSTAT_DATA_INT64
KSTAT_DATA_ULONGLONG = KSTAT_DATA_UINT64
KSTAT_DATA_FLOAT = 5
KSTAT_DATA_DOUBLE = 6
KSTAT_INTR_HARD = 0
KSTAT_INTR_SOFT = 1
KSTAT_INTR_WATCHDOG = 2
KSTAT_INTR_SPURIOUS = 3
KSTAT_INTR_MULTSVC = 4
KSTAT_NUM_INTRS = 5
B_BUSY = 0x0001
B_DONE = 0x0002
B_ERROR = 0x0004
B_PAGEIO = 0x0010
B_PHYS = 0x0020
B_READ = 0x0040
B_WRITE = 0x0100
B_KERNBUF = 0x0008
B_WANTED = 0x0080
B_AGE = 0x000200
B_ASYNC = 0x000400
B_DELWRI = 0x000800
B_STALE = 0x001000
B_DONTNEED = 0x002000
B_REMAPPED = 0x004000
B_FREE = 0x008000
B_INVAL = 0x010000
B_FORCE = 0x020000
B_HEAD = 0x040000
B_NOCACHE = 0x080000
B_TRUNC = 0x100000
B_SHADOW = 0x200000
B_RETRYWRI = 0x400000
def notavail(bp): return \
def BWRITE(bp): return \
def BWRITE2(bp): return \
# Included from sys/aio_req.h
# Included from sys/uio.h
from TYPES import *
WP_NOWATCH = 0x01
WP_SETPROT = 0x02
# Included from sys/timer.h
from TYPES import *
_TIMER_MAX = 32
ITLK_LOCKED = 0x01
ITLK_WANTED = 0x02
ITLK_REMOVE = 0x04
IT_PERLWP = 0x01
IT_SIGNAL = 0x02
# Included from sys/utrap.h
UT_INSTRUCTION_DISABLED = 1
UT_INSTRUCTION_ERROR = 2
UT_INSTRUCTION_PROTECTION = 3
UT_ILLTRAP_INSTRUCTION = 4
UT_ILLEGAL_INSTRUCTION = 5
UT_PRIVILEGED_OPCODE = 6
UT_FP_DISABLED = 7
UT_FP_EXCEPTION_IEEE_754 = 8
UT_FP_EXCEPTION_OTHER = 9
UT_TAG_OVERFLOW = 10
UT_DIVISION_BY_ZERO = 11
UT_DATA_EXCEPTION = 12
UT_DATA_ERROR = 13
UT_DATA_PROTECTION = 14
UT_MEM_ADDRESS_NOT_ALIGNED = 15
UT_PRIVILEGED_ACTION = 16
UT_ASYNC_DATA_ERROR = 17
UT_TRAP_INSTRUCTION_16 = 18
UT_TRAP_INSTRUCTION_17 = 19
UT_TRAP_INSTRUCTION_18 = 20
UT_TRAP_INSTRUCTION_19 = 21
UT_TRAP_INSTRUCTION_20 = 22
UT_TRAP_INSTRUCTION_21 = 23
UT_TRAP_INSTRUCTION_22 = 24
UT_TRAP_INSTRUCTION_23 = 25
UT_TRAP_INSTRUCTION_24 = 26
UT_TRAP_INSTRUCTION_25 = 27
UT_TRAP_INSTRUCTION_26 = 28
UT_TRAP_INSTRUCTION_27 = 29
UT_TRAP_INSTRUCTION_28 = 30
UT_TRAP_INSTRUCTION_29 = 31
UT_TRAP_INSTRUCTION_30 = 32
UT_TRAP_INSTRUCTION_31 = 33
UTRAP_V8P_FP_DISABLED = UT_FP_DISABLED
UTRAP_V8P_MEM_ADDRESS_NOT_ALIGNED = UT_MEM_ADDRESS_NOT_ALIGNED
UT_PRECISE_MAXTRAPS = 33
# Included from sys/refstr.h
# Included from sys/task.h
from TYPES import *
TASK_NORMAL = 0x0
TASK_FINAL = 0x1
TASK_FINALITY = 0x1
# Included from sys/id_space.h
from TYPES import *
# Included from sys/vmem.h
from TYPES import *
VM_SLEEP = 0x00000000
VM_NOSLEEP = 0x00000001
VM_PANIC = 0x00000002
VM_KMFLAGS = 0x000000ff
VM_BESTFIT = 0x00000100
VMEM_ALLOC = 0x01
VMEM_FREE = 0x02
VMEM_SPAN = 0x10
ISP_NORMAL = 0x0
ISP_RESERVE = 0x1
# Included from sys/exacct_impl.h
from TYPES import *
# Included from sys/kmem.h
from TYPES import *
KM_SLEEP = 0x0000
KM_NOSLEEP = 0x0001
KM_PANIC = 0x0002
KM_VMFLAGS = 0x00ff
KM_FLAGS = 0xffff
KMC_NOTOUCH = 0x00010000
KMC_NODEBUG = 0x00020000
KMC_NOMAGAZINE = 0x00040000
KMC_NOHASH = 0x00080000
KMC_QCACHE = 0x00100000
_ISA_IA32 = 0
_ISA_IA64 = 1
SSLEEP = 1
SRUN = 2
SZOMB = 3
SSTOP = 4
SIDL = 5
SONPROC = 6
CLDPEND = 0x0001
CLDCONT = 0x0002
SSYS = 0x00000001
STRC = 0x00000002
SLOAD = 0x00000008
SLOCK = 0x00000010
SPREXEC = 0x00000020
SPROCTR = 0x00000040
SPRFORK = 0x00000080
SKILLED = 0x00000100
SULOAD = 0x00000200
SRUNLCL = 0x00000400
SBPTADJ = 0x00000800
SKILLCL = 0x00001000
SOWEUPC = 0x00002000
SEXECED = 0x00004000
SPASYNC = 0x00008000
SJCTL = 0x00010000
SNOWAIT = 0x00020000
SVFORK = 0x00040000
SVFWAIT = 0x00080000
EXITLWPS = 0x00100000
HOLDFORK = 0x00200000
SWAITSIG = 0x00400000
HOLDFORK1 = 0x00800000
COREDUMP = 0x01000000
SMSACCT = 0x02000000
ASLWP = 0x04000000
SPRLOCK = 0x08000000
NOCD = 0x10000000
HOLDWATCH = 0x20000000
SMSFORK = 0x40000000
SDOCORE = 0x80000000
FORREAL = 0
JUSTLOOKING = 1
SUSPEND_NORMAL = 0
SUSPEND_PAUSE = 1
NOCLASS = (-1)
# Included from sys/dditypes.h
DDI_DEVICE_ATTR_V0 = 0x0001
DDI_NEVERSWAP_ACC = 0x00
DDI_STRUCTURE_LE_ACC = 0x01
DDI_STRUCTURE_BE_ACC = 0x02
DDI_STRICTORDER_ACC = 0x00
DDI_UNORDERED_OK_ACC = 0x01
DDI_MERGING_OK_ACC = 0x02
DDI_LOADCACHING_OK_ACC = 0x03
DDI_STORECACHING_OK_ACC = 0x04
DDI_DATA_SZ01_ACC = 1
DDI_DATA_SZ02_ACC = 2
DDI_DATA_SZ04_ACC = 4
DDI_DATA_SZ08_ACC = 8
VERS_ACCHDL = 0x0001
DEVID_NONE = 0
DEVID_SCSI3_WWN = 1
DEVID_SCSI_SERIAL = 2
DEVID_FAB = 3
DEVID_ENCAP = 4
DEVID_MAXTYPE = 4
# Included from sys/varargs.h
# Included from sys/va_list.h
VA_ALIGN = 8
def _ARGSIZEOF(t): return ((sizeof (t) + VA_ALIGN - 1) & ~(VA_ALIGN - 1))
VA_ALIGN = 8
def _ARGSIZEOF(t): return ((sizeof (t) + VA_ALIGN - 1) & ~(VA_ALIGN - 1))
NSYSCALL = 256
SE_32RVAL1 = 0x0
SE_32RVAL2 = 0x1
SE_64RVAL = 0x2
SE_RVAL_MASK = 0x3
SE_LOADABLE = 0x08
SE_LOADED = 0x10
SE_NOUNLOAD = 0x20
SE_ARGC = 0x40
# Included from sys/devops.h
from TYPES import *
# Included from sys/poll.h
POLLIN = 0x0001
POLLPRI = 0x0002
POLLOUT = 0x0004
POLLRDNORM = 0x0040
POLLWRNORM = POLLOUT
POLLRDBAND = 0x0080
POLLWRBAND = 0x0100
POLLNORM = POLLRDNORM
POLLERR = 0x0008
POLLHUP = 0x0010
POLLNVAL = 0x0020
POLLREMOVE = 0x0800
POLLRDDATA = 0x0200
POLLNOERR = 0x0400
POLLCLOSED = 0x8000
# Included from vm/as.h
# Included from vm/seg.h
# Included from sys/vnode.h
from TYPES import *
VROOT = 0x01
VNOCACHE = 0x02
VNOMAP = 0x04
VDUP = 0x08
VNOSWAP = 0x10
VNOMOUNT = 0x20
VISSWAP = 0x40
VSWAPLIKE = 0x80
VVFSLOCK = 0x100
VVFSWAIT = 0x200
VVMLOCK = 0x400
VDIROPEN = 0x800
VVMEXEC = 0x1000
VPXFS = 0x2000
AT_TYPE = 0x0001
AT_MODE = 0x0002
AT_UID = 0x0004
AT_GID = 0x0008
AT_FSID = 0x0010
AT_NODEID = 0x0020
AT_NLINK = 0x0040
AT_SIZE = 0x0080
AT_ATIME = 0x0100
AT_MTIME = 0x0200
AT_CTIME = 0x0400
AT_RDEV = 0x0800
AT_BLKSIZE = 0x1000
AT_NBLOCKS = 0x2000
AT_VCODE = 0x4000
AT_ALL = (AT_TYPE|AT_MODE|AT_UID|AT_GID|AT_FSID|AT_NODEID|\
AT_NLINK|AT_SIZE|AT_ATIME|AT_MTIME|AT_CTIME|\
AT_RDEV|AT_BLKSIZE|AT_NBLOCKS|AT_VCODE)
AT_STAT = (AT_MODE|AT_UID|AT_GID|AT_FSID|AT_NODEID|AT_NLINK|\
AT_SIZE|AT_ATIME|AT_MTIME|AT_CTIME|AT_RDEV)
AT_TIMES = (AT_ATIME|AT_MTIME|AT_CTIME)
AT_NOSET = (AT_NLINK|AT_RDEV|AT_FSID|AT_NODEID|AT_TYPE|\
AT_BLKSIZE|AT_NBLOCKS|AT_VCODE)
VSUID = 04000
VSGID = 02000
VSVTX = 01000
VREAD = 00400
VWRITE = 00200
VEXEC = 00100
MODEMASK = 07777
PERMMASK = 00777
def MANDMODE(mode): return (((mode) & (VSGID|(VEXEC>>3))) == VSGID)
VSA_ACL = 0x0001
VSA_ACLCNT = 0x0002
VSA_DFACL = 0x0004
VSA_DFACLCNT = 0x0008
LOOKUP_DIR = 0x01
DUMP_ALLOC = 0
DUMP_FREE = 1
DUMP_SCAN = 2
ATTR_UTIME = 0x01
ATTR_EXEC = 0x02
ATTR_COMM = 0x04
ATTR_HINT = 0x08
ATTR_REAL = 0x10
# Included from vm/faultcode.h
FC_HWERR = 0x1
FC_ALIGN = 0x2
FC_OBJERR = 0x3
FC_PROT = 0x4
FC_NOMAP = 0x5
FC_NOSUPPORT = 0x6
def FC_MAKE_ERR(e): return (((e) << 8) | FC_OBJERR)
def FC_CODE(fc): return ((fc) & 0xff)
def FC_ERRNO(fc): return ((unsigned)(fc) >> 8)
# Included from vm/hat.h
from TYPES import *
# Included from vm/page.h
PAGE_HASHAVELEN = 4
PAGE_HASHVPSHIFT = 6
PG_EXCL = 0x0001
PG_WAIT = 0x0002
PG_PHYSCONTIG = 0x0004
PG_MATCH_COLOR = 0x0008
PG_NORELOC = 0x0010
PG_FREE_LIST = 1
PG_CACHE_LIST = 2
PG_LIST_TAIL = 0
PG_LIST_HEAD = 1
def page_next_raw(PP): return page_nextn_raw((PP), 1)
PAGE_IO_INUSE = 0x1
PAGE_IO_WANTED = 0x2
PGREL_NOTREL = 0x1
PGREL_CLEAN = 0x2
PGREL_MOD = 0x3
P_FREE = 0x80
P_NORELOC = 0x40
def PP_SETAGED(pp): return ASSERT(PP_ISAGED(pp))
HAT_FLAGS_RESV = 0xFF000000
HAT_LOAD = 0x00
HAT_LOAD_LOCK = 0x01
HAT_LOAD_ADV = 0x04
HAT_LOAD_CONTIG = 0x10
HAT_LOAD_NOCONSIST = 0x20
HAT_LOAD_SHARE = 0x40
HAT_LOAD_REMAP = 0x80
HAT_RELOAD_SHARE = 0x100
HAT_PLAT_ATTR_MASK = 0xF00000
HAT_PROT_MASK = 0x0F
HAT_NOFAULT = 0x10
HAT_NOSYNC = 0x20
HAT_STRICTORDER = 0x0000
HAT_UNORDERED_OK = 0x0100
HAT_MERGING_OK = 0x0200
HAT_LOADCACHING_OK = 0x0300
HAT_STORECACHING_OK = 0x0400
HAT_ORDER_MASK = 0x0700
HAT_NEVERSWAP = 0x0000
HAT_STRUCTURE_BE = 0x1000
HAT_STRUCTURE_LE = 0x2000
HAT_ENDIAN_MASK = 0x3000
HAT_COW = 0x0001
HAT_UNLOAD = 0x00
HAT_UNLOAD_NOSYNC = 0x02
HAT_UNLOAD_UNLOCK = 0x04
HAT_UNLOAD_OTHER = 0x08
HAT_UNLOAD_UNMAP = 0x10
HAT_SYNC_DONTZERO = 0x00
HAT_SYNC_ZERORM = 0x01
HAT_SYNC_STOPON_REF = 0x02
HAT_SYNC_STOPON_MOD = 0x04
HAT_SYNC_STOPON_RM = (HAT_SYNC_STOPON_REF | HAT_SYNC_STOPON_MOD)
HAT_DUP_ALL = 1
HAT_DUP_COW = 2
HAT_MAP = 0x00
HAT_ADV_PGUNLOAD = 0x00
HAT_FORCE_PGUNLOAD = 0x01
P_MOD = 0x1
P_REF = 0x2
P_RO = 0x4
def hat_ismod(pp): return (hat_page_getattr(pp, P_MOD))
def hat_isref(pp): return (hat_page_getattr(pp, P_REF))
def hat_isro(pp): return (hat_page_getattr(pp, P_RO))
def hat_setmod(pp): return (hat_page_setattr(pp, P_MOD))
def hat_setref(pp): return (hat_page_setattr(pp, P_REF))
def hat_setrefmod(pp): return (hat_page_setattr(pp, P_REF|P_MOD))
def hat_clrmod(pp): return (hat_page_clrattr(pp, P_MOD))
def hat_clrref(pp): return (hat_page_clrattr(pp, P_REF))
def hat_clrrefmod(pp): return (hat_page_clrattr(pp, P_REF|P_MOD))
def hat_page_is_mapped(pp): return (hat_page_getshare(pp))
HAT_DONTALLOC = 0
HAT_ALLOC = 1
HRM_SHIFT = 4
HRM_BYTES = (1 << HRM_SHIFT)
HRM_PAGES = ((HRM_BYTES * NBBY) / 2)
HRM_PGPERBYTE = (NBBY/2)
HRM_PGBYTEMASK = (HRM_PGPERBYTE-1)
HRM_HASHSIZE = 0x200
HRM_HASHMASK = (HRM_HASHSIZE - 1)
HRM_BLIST_INCR = 0x200
HRM_SWSMONID = 1
SSL_NLEVELS = 4
SSL_BFACTOR = 4
SSL_LOG2BF = 2
SEGP_ASYNC_FLUSH = 0x1
SEGP_FORCE_WIRED = 0x2
SEGP_SUCCESS = 0
SEGP_FAIL = 1
def seg_pages(seg): return \
IE_NOMEM = -1
AS_PAGLCK = 0x80
AS_CLAIMGAP = 0x40
AS_UNMAPWAIT = 0x20
def AS_TYPE_64BIT(as_): return \
AS_LREP_LINKEDLIST = 0
AS_LREP_SKIPLIST = 1
AS_MUTATION_THRESH = 225
AH_DIR = 0x1
AH_LO = 0x0
AH_HI = 0x1
AH_CONTAIN = 0x2
# Included from sys/ddidmareq.h
DMA_UNIT_8 = 1
DMA_UNIT_16 = 2
DMA_UNIT_32 = 4
DMALIM_VER0 = ((0x86000000) + 0)
DDI_DMA_FORCE_PHYSICAL = 0x0100
DMA_ATTR_V0 = 0
DMA_ATTR_VERSION = DMA_ATTR_V0
DDI_DMA_CALLBACK_RUNOUT = 0
DDI_DMA_CALLBACK_DONE = 1
DDI_DMA_WRITE = 0x0001
DDI_DMA_READ = 0x0002
DDI_DMA_RDWR = (DDI_DMA_READ | DDI_DMA_WRITE)
DDI_DMA_REDZONE = 0x0004
DDI_DMA_PARTIAL = 0x0008
DDI_DMA_CONSISTENT = 0x0010
DDI_DMA_EXCLUSIVE = 0x0020
DDI_DMA_STREAMING = 0x0040
DDI_DMA_SBUS_64BIT = 0x2000
DDI_DMA_MAPPED = 0
DDI_DMA_MAPOK = 0
DDI_DMA_PARTIAL_MAP = 1
DDI_DMA_DONE = 2
DDI_DMA_NORESOURCES = -1
DDI_DMA_NOMAPPING = -2
DDI_DMA_TOOBIG = -3
DDI_DMA_TOOSMALL = -4
DDI_DMA_LOCKED = -5
DDI_DMA_BADLIMITS = -6
DDI_DMA_STALE = -7
DDI_DMA_BADATTR = -8
DDI_DMA_INUSE = -9
DDI_DMA_SYNC_FORDEV = 0x0
DDI_DMA_SYNC_FORCPU = 0x1
DDI_DMA_SYNC_FORKERNEL = 0x2
# Included from sys/ddimapreq.h
# Included from sys/mman.h
PROT_READ = 0x1
PROT_WRITE = 0x2
PROT_EXEC = 0x4
PROT_USER = 0x8
PROT_ZFOD = (PROT_READ | PROT_WRITE | PROT_EXEC | PROT_USER)
PROT_ALL = (PROT_READ | PROT_WRITE | PROT_EXEC | PROT_USER)
PROT_NONE = 0x0
MAP_SHARED = 1
MAP_PRIVATE = 2
MAP_TYPE = 0xf
MAP_FIXED = 0x10
MAP_NORESERVE = 0x40
MAP_ANON = 0x100
MAP_ANONYMOUS = MAP_ANON
MAP_RENAME = 0x20
PROC_TEXT = (PROT_EXEC | PROT_READ)
PROC_DATA = (PROT_READ | PROT_WRITE | PROT_EXEC)
SHARED = 0x10
PRIVATE = 0x20
VALID_ATTR = (PROT_READ|PROT_WRITE|PROT_EXEC|SHARED|PRIVATE)
PROT_EXCL = 0x20
_MAP_LOW32 = 0x80
_MAP_NEW = 0x80000000
from TYPES import *
MADV_NORMAL = 0
MADV_RANDOM = 1
MADV_SEQUENTIAL = 2
MADV_WILLNEED = 3
MADV_DONTNEED = 4
MADV_FREE = 5
MS_OLDSYNC = 0x0
MS_SYNC = 0x4
MS_ASYNC = 0x1
MS_INVALIDATE = 0x2
MC_SYNC = 1
MC_LOCK = 2
MC_UNLOCK = 3
MC_ADVISE = 4
MC_LOCKAS = 5
MC_UNLOCKAS = 6
MCL_CURRENT = 0x1
MCL_FUTURE = 0x2
DDI_MAP_VERSION = 0x0001
DDI_MF_USER_MAPPING = 0x1
DDI_MF_KERNEL_MAPPING = 0x2
DDI_MF_DEVICE_MAPPING = 0x4
DDI_ME_GENERIC = (-1)
DDI_ME_UNIMPLEMENTED = (-2)
DDI_ME_NORESOURCES = (-3)
DDI_ME_UNSUPPORTED = (-4)
DDI_ME_REGSPEC_RANGE = (-5)
DDI_ME_RNUMBER_RANGE = (-6)
DDI_ME_INVAL = (-7)
# Included from sys/ddipropdefs.h
def CELLS_1275_TO_BYTES(n): return ((n) * PROP_1275_CELL_SIZE)
def BYTES_TO_1275_CELLS(n): return ((n) / PROP_1275_CELL_SIZE)
PH_FROM_PROM = 0x01
DDI_PROP_SUCCESS = 0
DDI_PROP_NOT_FOUND = 1
DDI_PROP_UNDEFINED = 2
DDI_PROP_NO_MEMORY = 3
DDI_PROP_INVAL_ARG = 4
DDI_PROP_BUF_TOO_SMALL = 5
DDI_PROP_CANNOT_DECODE = 6
DDI_PROP_CANNOT_ENCODE = 7
DDI_PROP_END_OF_DATA = 8
DDI_PROP_FOUND_1275 = 255
PROP_1275_INT_SIZE = 4
DDI_PROP_DONTPASS = 0x0001
DDI_PROP_CANSLEEP = 0x0002
DDI_PROP_SYSTEM_DEF = 0x0004
DDI_PROP_NOTPROM = 0x0008
DDI_PROP_DONTSLEEP = 0x0010
DDI_PROP_STACK_CREATE = 0x0020
DDI_PROP_UNDEF_IT = 0x0040
DDI_PROP_HW_DEF = 0x0080
DDI_PROP_TYPE_INT = 0x0100
DDI_PROP_TYPE_STRING = 0x0200
DDI_PROP_TYPE_BYTE = 0x0400
DDI_PROP_TYPE_COMPOSITE = 0x0800
DDI_PROP_TYPE_ANY = (DDI_PROP_TYPE_INT | \
DDI_PROP_TYPE_STRING | \
DDI_PROP_TYPE_BYTE | \
DDI_PROP_TYPE_COMPOSITE)
DDI_PROP_TYPE_MASK = (DDI_PROP_TYPE_INT | \
DDI_PROP_TYPE_STRING | \
DDI_PROP_TYPE_BYTE | \
DDI_PROP_TYPE_COMPOSITE)
DDI_RELATIVE_ADDRESSING = "relative-addressing"
DDI_GENERIC_ADDRESSING = "generic-addressing"
# Included from sys/ddidevmap.h
KMEM_PAGEABLE = 0x100
KMEM_NON_PAGEABLE = 0x200
UMEM_LOCKED = 0x400
UMEM_TRASH = 0x800
DEVMAP_OPS_REV = 1
DEVMAP_DEFAULTS = 0x00
DEVMAP_MAPPING_INVALID = 0x01
DEVMAP_ALLOW_REMAP = 0x02
DEVMAP_USE_PAGESIZE = 0x04
DEVMAP_SETUP_FLAGS = \
(DEVMAP_MAPPING_INVALID | DEVMAP_ALLOW_REMAP | DEVMAP_USE_PAGESIZE)
DEVMAP_SETUP_DONE = 0x100
DEVMAP_LOCK_INITED = 0x200
DEVMAP_FAULTING = 0x400
DEVMAP_LOCKED = 0x800
DEVMAP_FLAG_LARGE = 0x1000
DDI_UMEM_SLEEP = 0x0
DDI_UMEM_NOSLEEP = 0x01
DDI_UMEM_PAGEABLE = 0x02
DDI_UMEM_TRASH = 0x04
DDI_UMEMLOCK_READ = 0x01
DDI_UMEMLOCK_WRITE = 0x02
# Included from sys/nexusdefs.h
# Included from sys/nexusintr.h
BUSO_REV = 4
BUSO_REV_3 = 3
BUSO_REV_4 = 4
DEVO_REV = 3
CB_REV = 1
DDI_IDENTIFIED = (0)
DDI_NOT_IDENTIFIED = (-1)
DDI_PROBE_FAILURE = ENXIO
DDI_PROBE_DONTCARE = 0
DDI_PROBE_PARTIAL = 1
DDI_PROBE_SUCCESS = 2
MAPDEV_REV = 1
from TYPES import *
D_NEW = 0x00
_D_OLD = 0x01
D_TAPE = 0x08
D_MTSAFE = 0x0020
_D_QNEXTLESS = 0x0040
_D_MTOCSHARED = 0x0080
D_MTOCEXCL = 0x0800
D_MTPUTSHARED = 0x1000
D_MTPERQ = 0x2000
D_MTQPAIR = 0x4000
D_MTPERMOD = 0x6000
D_MTOUTPERIM = 0x8000
_D_MTCBSHARED = 0x10000
D_MTINNER_MOD = (D_MTPUTSHARED|_D_MTOCSHARED|_D_MTCBSHARED)
D_MTOUTER_MOD = (D_MTOCEXCL)
D_MP = D_MTSAFE
D_64BIT = 0x200
D_SYNCSTR = 0x400
D_DEVMAP = 0x100
D_HOTPLUG = 0x4
SNDZERO = 0x001
SNDPIPE = 0x002
RNORM = 0x000
RMSGD = 0x001
RMSGN = 0x002
RMODEMASK = 0x003
RPROTDAT = 0x004
RPROTDIS = 0x008
RPROTNORM = 0x010
RPROTMASK = 0x01c
RFLUSHMASK = 0x020
RFLUSHPCPROT = 0x020
RERRNORM = 0x001
RERRNONPERSIST = 0x002
RERRMASK = (RERRNORM|RERRNONPERSIST)
WERRNORM = 0x004
WERRNONPERSIST = 0x008
WERRMASK = (WERRNORM|WERRNONPERSIST)
FLUSHR = 0x01
FLUSHW = 0x02
FLUSHRW = 0x03
FLUSHBAND = 0x04
MAPINOK = 0x01
NOMAPIN = 0x02
REMAPOK = 0x04
NOREMAP = 0x08
S_INPUT = 0x0001
S_HIPRI = 0x0002
S_OUTPUT = 0x0004
S_MSG = 0x0008
S_ERROR = 0x0010
S_HANGUP = 0x0020
S_RDNORM = 0x0040
S_WRNORM = S_OUTPUT
S_RDBAND = 0x0080
S_WRBAND = 0x0100
S_BANDURG = 0x0200
RS_HIPRI = 0x01
STRUIO_POSTPONE = 0x08
STRUIO_MAPIN = 0x10
MSG_HIPRI = 0x01
MSG_ANY = 0x02
MSG_BAND = 0x04
MSG_XPG4 = 0x08
MSG_IPEEK = 0x10
MSG_DISCARDTAIL = 0x20
MSG_HOLDSIG = 0x40
MSG_IGNERROR = 0x80
MSG_DELAYERROR = 0x100
MSG_IGNFLOW = 0x200
MSG_NOMARK = 0x400
MORECTL = 1
MOREDATA = 2
MUXID_ALL = (-1)
ANYMARK = 0x01
LASTMARK = 0x02
_INFTIM = -1
INFTIM = _INFTIM
|
miguelpalacio/python-for-android | refs/heads/master | python-modules/twisted/twisted/runner/topfiles/setup.py | 54 | # Copyright (c) 2008 Twisted Matrix Laboratories.
# See LICENSE for details.
try:
from twisted.python.dist import setup, ConditionalExtension as Extension
except ImportError:
raise SystemExit("twisted.python.dist module not found. Make sure you "
"have installed the Twisted core package before "
"attempting to install any other Twisted projects.")
extensions = [
Extension("twisted.runner.portmap",
["twisted/runner/portmap.c"],
condition=lambda builder: builder._check_header("rpc/rpc.h")),
]
if __name__ == '__main__':
setup(
twisted_subproject="runner",
# metadata
name="Twisted Runner",
description="Twisted Runner is a process management library and inetd "
"replacement.",
author="Twisted Matrix Laboratories",
author_email="twisted-python@twistedmatrix.com",
maintainer="Andrew Bennetts",
url="http://twistedmatrix.com/trac/wiki/TwistedRunner",
license="MIT",
long_description="""\
Twisted Runner contains code useful for persistent process management
with Python and Twisted, and has an almost full replacement for inetd.
""",
# build stuff
conditionalExtensions=extensions,
)
|
priyatransbit/linux | refs/heads/master | scripts/tracing/draw_functrace.py | 14679 | #!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <fweisbec@gmail.com>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
|
sakisbl/OpenSesameOnline | refs/heads/master | webapp/os_online/translator/os_modules/multiline.py | 1 | # -*- coding: ascii -*-
"""
#==============================================================================
#title :multiline.py
#description :This class represents an OpenSesame Variable statement
#author :OpenSesame group of GipHouse 2014, Radboud University
#programmers :Ben Bruecker, Laurens van Bercken
#date :20140419
#version :0.1
#usage :python ../translator.py
#notes :
#python_version :2.7
#==============================================================================
"""
import re
from os_online.translator.os_modules.statement import Statement
class Multiline(Statement):
""" This class models a multi-line variable for open sesame.
It begins with __variable-name__ and ends with __end__
This subclasses Statement
"""
is_multiline = True
@staticmethod
def _get_pattern():
""" Gets the pattern that defines a multiline start """
pattern = r"^__(.+)__$"
pattern = re.compile(pattern)
return pattern
def addline(self, line):
""" Adds a line to this multiline statement
This sets done to true if __end__ is encountered
line -- the line to be added
"""
if not line == "__end__":
if not self.done:
if not self.parameters['lines'] == "":
self.parameters['lines'] += "\n"
self.parameters['lines'] += line
else:
self.done = True
def is_done(self):
""" Gets wheter this multiline has been closed """
return self.done
def __init__(self, line):
""" Initialize with a valid string that fits the pattern for this class
If this fails, a Value error will be thrown
"""
self.done = False
pattern = self._get_pattern()
exp = pattern.match(line)
if not exp:
raise ValueError("The provided line was not a valid Multiline")
# Strip the quotes from the name
self.os_type = "multi-line"
self.parameters = {}
self.parameters['name'] = exp.group(1)
self.parameters['lines'] = ""
Statement.__init__(self, self.os_type, self.parameters)
|
bgris/ODL_bgris | refs/heads/master | lib/python3.5/site-packages/astroid/builder.py | 8 | # copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of astroid.
#
# astroid is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 2.1 of the License, or (at your
# option) any later version.
#
# astroid is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with astroid. If not, see <http://www.gnu.org/licenses/>.
"""The AstroidBuilder makes astroid from living object and / or from _ast
The builder is not thread safe and can't be used to parse different sources
at the same time.
"""
from __future__ import with_statement
import _ast
import os
import sys
import textwrap
from astroid import bases
from astroid import exceptions
from astroid import manager
from astroid import modutils
from astroid import raw_building
from astroid import rebuilder
from astroid import util
def _parse(string):
return compile(string, "<string>", 'exec', _ast.PyCF_ONLY_AST)
if sys.version_info >= (3, 0):
# pylint: disable=no-name-in-module; We don't understand flows yet.
from tokenize import detect_encoding
def open_source_file(filename):
with open(filename, 'rb') as byte_stream:
encoding = detect_encoding(byte_stream.readline)[0]
stream = open(filename, 'r', newline=None, encoding=encoding)
try:
data = stream.read()
except UnicodeError: # wrong encoding
# detect_encoding returns utf-8 if no encoding specified
msg = 'Wrong (%s) or no encoding specified' % encoding
raise exceptions.AstroidBuildingException(msg)
return stream, encoding, data
else:
import re
_ENCODING_RGX = re.compile(r"\s*#+.*coding[:=]\s*([-\w.]+)")
def _guess_encoding(string):
"""get encoding from a python file as string or return None if not found"""
# check for UTF-8 byte-order mark
if string.startswith('\xef\xbb\xbf'):
return 'UTF-8'
for line in string.split('\n', 2)[:2]:
# check for encoding declaration
match = _ENCODING_RGX.match(line)
if match is not None:
return match.group(1)
def open_source_file(filename):
"""get data for parsing a file"""
stream = open(filename, 'U')
data = stream.read()
encoding = _guess_encoding(data)
return stream, encoding, data
MANAGER = manager.AstroidManager()
class AstroidBuilder(raw_building.InspectBuilder):
"""Class for building an astroid tree from source code or from a live module.
The param *manager* specifies the manager class which should be used.
If no manager is given, then the default one will be used. The
param *apply_transforms* determines if the transforms should be
applied after the tree was built from source or from a live object,
by default being True.
"""
def __init__(self, manager=None, apply_transforms=True):
super(AstroidBuilder, self).__init__()
self._manager = manager or MANAGER
self._apply_transforms = apply_transforms
def module_build(self, module, modname=None):
"""Build an astroid from a living module instance."""
node = None
path = getattr(module, '__file__', None)
if path is not None:
path_, ext = os.path.splitext(modutils._path_from_filename(path))
if ext in ('.py', '.pyc', '.pyo') and os.path.exists(path_ + '.py'):
node = self.file_build(path_ + '.py', modname)
if node is None:
# this is a built-in module
# get a partial representation by introspection
node = self.inspect_build(module, modname=modname, path=path)
if self._apply_transforms:
# We have to handle transformation by ourselves since the
# rebuilder isn't called for builtin nodes
node = self._manager.visit_transforms(node)
return node
def file_build(self, path, modname=None):
"""Build astroid from a source code file (i.e. from an ast)
*path* is expected to be a python source file
"""
try:
stream, encoding, data = open_source_file(path)
except IOError as exc:
msg = 'Unable to load file %r (%s)' % (path, exc)
raise exceptions.AstroidBuildingException(msg)
except SyntaxError as exc: # py3k encoding specification error
raise exceptions.AstroidBuildingException(exc)
except LookupError as exc: # unknown encoding
raise exceptions.AstroidBuildingException(exc)
with stream:
# get module name if necessary
if modname is None:
try:
modname = '.'.join(modutils.modpath_from_file(path))
except ImportError:
modname = os.path.splitext(os.path.basename(path))[0]
# build astroid representation
module = self._data_build(data, modname, path)
return self._post_build(module, encoding)
def string_build(self, data, modname='', path=None):
"""Build astroid from source code string."""
module = self._data_build(data, modname, path)
module.source_code = data.encode('utf-8')
return self._post_build(module, 'utf-8')
def _post_build(self, module, encoding):
"""Handles encoding and delayed nodes after a module has been built"""
module.file_encoding = encoding
self._manager.cache_module(module)
# post tree building steps after we stored the module in the cache:
for from_node in module._import_from_nodes:
if from_node.modname == '__future__':
for symbol, _ in from_node.names:
module._future_imports.add(symbol)
self.add_from_names_to_locals(from_node)
# handle delayed assattr nodes
for delayed in module._delayed_assattr:
self.delayed_assattr(delayed)
# Visit the transforms
if self._apply_transforms:
module = self._manager.visit_transforms(module)
return module
def _data_build(self, data, modname, path):
"""Build tree node from data and add some informations"""
try:
node = _parse(data + '\n')
except (TypeError, ValueError, SyntaxError) as exc:
raise exceptions.AstroidBuildingException(exc)
if path is not None:
node_file = os.path.abspath(path)
else:
node_file = '<?>'
if modname.endswith('.__init__'):
modname = modname[:-9]
package = True
else:
package = path and path.find('__init__.py') > -1 or False
builder = rebuilder.TreeRebuilder(self._manager)
module = builder.visit_module(node, modname, node_file, package)
module._import_from_nodes = builder._import_from_nodes
module._delayed_assattr = builder._delayed_assattr
return module
def add_from_names_to_locals(self, node):
"""Store imported names to the locals
Resort the locals if coming from a delayed node
"""
_key_func = lambda node: node.fromlineno
def sort_locals(my_list):
my_list.sort(key=_key_func)
for (name, asname) in node.names:
if name == '*':
try:
imported = node.do_import_module()
except exceptions.InferenceError:
continue
for name in imported._public_names():
node.parent.set_local(name, node)
sort_locals(node.parent.scope()._locals[name])
else:
node.parent.set_local(asname or name, node)
sort_locals(node.parent.scope()._locals[asname or name])
def delayed_assattr(self, node):
"""Visit a AssAttr node
This adds name to locals and handle members definition.
"""
try:
frame = node.frame()
for inferred in node.expr.infer():
if inferred is util.YES:
continue
try:
if inferred.__class__ is bases.Instance:
inferred = inferred._proxied
iattrs = inferred._instance_attrs
elif isinstance(inferred, bases.Instance):
# Const, Tuple, ... we may be wrong, may be not, but
# anyway we don't want to pollute builtin's namespace
continue
elif inferred.is_function:
iattrs = inferred._instance_attrs
else:
iattrs = inferred._locals
except AttributeError:
# XXX log error
continue
values = iattrs.setdefault(node.attrname, [])
if node in values:
continue
# get assign in __init__ first XXX useful ?
if (frame.name == '__init__' and values and
not values[0].frame().name == '__init__'):
values.insert(0, node)
else:
values.append(node)
except exceptions.InferenceError:
pass
def parse(code, module_name='', path=None, apply_transforms=True):
"""Parses a source string in order to obtain an astroid AST from it
:param str code: The code for the module.
:param str module_name: The name for the module, if any
:param str path: The path for the module
:param bool apply_transforms:
Apply the transforms for the give code. Use it if you
don't want the default transforms to be applied.
"""
code = textwrap.dedent(code)
builder = AstroidBuilder(manager=MANAGER,
apply_transforms=apply_transforms)
return builder.string_build(code, modname=module_name, path=path)
|
derekchiang/keystone | refs/heads/master | keystone/openstack/common/context.py | 24 | # Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Simple class that stores security context information in the web request.
Projects should subclass this class if they wish to enhance the request
context or provide additional information in their specific WSGI pipeline.
"""
import itertools
import uuid
def generate_request_id():
return 'req-%s' % str(uuid.uuid4())
class RequestContext(object):
"""Helper class to represent useful information about a request context.
Stores information about the security context under which the user
accesses the system, as well as additional request information.
"""
user_idt_format = '{user} {tenant} {domain} {user_domain} {p_domain}'
def __init__(self, auth_token=None, user=None, tenant=None, domain=None,
user_domain=None, project_domain=None, is_admin=False,
read_only=False, show_deleted=False, request_id=None,
instance_uuid=None):
self.auth_token = auth_token
self.user = user
self.tenant = tenant
self.domain = domain
self.user_domain = user_domain
self.project_domain = project_domain
self.is_admin = is_admin
self.read_only = read_only
self.show_deleted = show_deleted
self.instance_uuid = instance_uuid
if not request_id:
request_id = generate_request_id()
self.request_id = request_id
def to_dict(self):
user_idt = (
self.user_idt_format.format(user=self.user or '-',
tenant=self.tenant or '-',
domain=self.domain or '-',
user_domain=self.user_domain or '-',
p_domain=self.project_domain or '-'))
return {'user': self.user,
'tenant': self.tenant,
'domain': self.domain,
'user_domain': self.user_domain,
'project_domain': self.project_domain,
'is_admin': self.is_admin,
'read_only': self.read_only,
'show_deleted': self.show_deleted,
'auth_token': self.auth_token,
'request_id': self.request_id,
'instance_uuid': self.instance_uuid,
'user_identity': user_idt}
def get_admin_context(show_deleted=False):
context = RequestContext(None,
tenant=None,
is_admin=True,
show_deleted=show_deleted)
return context
def get_context_from_function_and_args(function, args, kwargs):
"""Find an arg of type RequestContext and return it.
This is useful in a couple of decorators where we don't
know much about the function we're wrapping.
"""
for arg in itertools.chain(kwargs.values(), args):
if isinstance(arg, RequestContext):
return arg
return None
|
jlyoung/stackoverflow_answers | refs/heads/master | transposesparsematrix/transposesparsematrix.py | 1 | data='''10 1 x_time
10 2 x_time
9 3 x_time
2 15 x_time
7 16 x_time
10 18 x_time
3 25 x_time
5 31 x_time
2 35 x_time
4 1 t_msg
3 5 t_msg
5 9 t_msg
8 10 t_msg
4 90 t_msg
8 4 g_up
3 5 g_up
3 56 g_up'''
matrix = {}
for line in data.splitlines():
rank, day, parameter = line.split()
if parameter not in matrix.keys():
matrix[parameter] = {}
daydict = {day: rank}
matrix[parameter].update(daydict)
print '\t{}'.format('\t'.join(['day'+str(i) for i in range(1,91)]))
for parameter in matrix:
colvals = [matrix[parameter].get(str(i),'0') for i in range(1, 91)]
print '{}\t{}'.format(parameter, '\t'.join(colvals)) |
ApuliaSoftware/odoo | refs/heads/8.0 | addons/auth_ldap/__init__.py | 442 | ##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import users_ldap
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
ityaptin/ceilometer | refs/heads/master | ceilometer/tests/unit/dispatcher/test_file.py | 1 | #
# Copyright 2013 IBM Corp
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging.handlers
import os
import tempfile
from oslo_config import fixture as fixture_config
from oslotest import base
from ceilometer.dispatcher import file
from ceilometer.publisher import utils
class TestDispatcherFile(base.BaseTestCase):
def setUp(self):
super(TestDispatcherFile, self).setUp()
self.CONF = self.useFixture(fixture_config.Config()).conf
def test_file_dispatcher_with_all_config(self):
# Create a temporaryFile to get a file name
tf = tempfile.NamedTemporaryFile('r')
filename = tf.name
tf.close()
self.CONF.dispatcher_file.file_path = filename
self.CONF.dispatcher_file.max_bytes = 50
self.CONF.dispatcher_file.backup_count = 5
dispatcher = file.FileDispatcher(self.CONF)
# The number of the handlers should be 1
self.assertEqual(1, len(dispatcher.log.handlers))
# The handler should be RotatingFileHandler
handler = dispatcher.log.handlers[0]
self.assertIsInstance(handler,
logging.handlers.RotatingFileHandler)
msg = {'counter_name': 'test',
'resource_id': self.id(),
'counter_volume': 1,
}
msg['message_signature'] = utils.compute_signature(
msg, self.CONF.publisher.telemetry_secret,
)
# The record_metering_data method should exist
# and not produce errors.
dispatcher.record_metering_data(msg)
# After the method call above, the file should have been created.
self.assertTrue(os.path.exists(handler.baseFilename))
def test_file_dispatcher_with_path_only(self):
# Create a temporaryFile to get a file name
tf = tempfile.NamedTemporaryFile('r')
filename = tf.name
tf.close()
self.CONF.dispatcher_file.file_path = filename
self.CONF.dispatcher_file.max_bytes = 0
self.CONF.dispatcher_file.backup_count = 0
dispatcher = file.FileDispatcher(self.CONF)
# The number of the handlers should be 1
self.assertEqual(1, len(dispatcher.log.handlers))
# The handler should be RotatingFileHandler
handler = dispatcher.log.handlers[0]
self.assertIsInstance(handler,
logging.FileHandler)
msg = {'counter_name': 'test',
'resource_id': self.id(),
'counter_volume': 1,
}
msg['message_signature'] = utils.compute_signature(
msg, self.CONF.publisher.telemetry_secret,
)
# The record_metering_data method should exist and not produce errors.
dispatcher.record_metering_data(msg)
# After the method call above, the file should have been created.
self.assertTrue(os.path.exists(handler.baseFilename))
def test_file_dispatcher_with_no_path(self):
self.CONF.dispatcher_file.file_path = None
dispatcher = file.FileDispatcher(self.CONF)
# The log should be None
self.assertIsNone(dispatcher.log)
|
jkoelker/ryu | refs/heads/master | ryu/lib/ovs/db_client.py | 31 | # Copyright (C) 2012 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2012 Isaku Yamahata <yamahata at private email ne jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
from ovs import (jsonrpc,
stream)
from ovs import util as ovs_util
from ovs.db import schema
LOG = logging.getLogger(__name__)
class DBClient(object):
def __init__(self, remote):
super(DBClient, self).__init__()
self.remote = remote
def run_command(self, args):
_COMMANDS = {
'list-dbs': self._list_dbs,
'get-schema': self._get_schema,
'get-schema-version': self._get_schema_version,
'list-tables': self._list_tables,
'list-columns': self._list_columns,
'transact': self._transact,
'monitor': self._monitor,
'dump': self._dump,
}
command = args[0]
args = args[1:]
error, stream_ = stream.Stream.open_block(
stream.Stream.open(self.remote))
if error:
RuntimeError('can not open socket to %s: %s' %
(self.remote, os.strerror(error)))
raise
rpc = jsonrpc.Connection(stream_)
ret = _COMMANDS[command](rpc, *args)
LOG.info('ret %s', ret)
rpc.close()
def _check_txn(self, error, reply):
if error:
ovs_util.ovs_fatal(error, os.strerror(error))
elif reply.error:
ovs_util.ovs_fatal(reply.error, 'error %s' % reply.error)
def _fetch_dbs(self, rpc):
request = jsonrpc.Message.create_request('list_dbs', [])
error, reply = rpc.transact_block(request)
self._check_txn(error, reply)
dbs = set()
for name in reply.result:
dbs.add(name)
return dbs
def _fetch_schema_json(self, rpc, database):
request = jsonrpc.Message.create_request('get_schema', [database])
error, reply = rpc.transact_block(request)
self._check_txn(error, reply)
return reply.result
def _fetch_schema(self, rpc, database):
return schema.DbSchema.from_json(self._fetch_schema_json(rpc,
database))
# commands
def _list_dbs(self, rpc, *_args):
return self._fetch_dbs(rpc)
def _get_schema(self, rpc, *args):
database = args[0]
return self._fetch_schema(rpc, database).to_json()
def _get_schema_version(self, rpc, *_args):
database = _args[0]
schema_ = self._fetch_schema(rpc, database)
return schema_.version
def _list_tables(self, rpc, *args):
database = args[0]
schema_ = self._fetch_schema(rpc, database)
return [table.to_json() for table in schema_.tables.values()]
def _list_columns(self, rpc, *args):
database = args[0]
table_name = None
if len(args) > 1:
table_name = args[1]
schema_ = self._fetch_schema(rpc, database)
if table_name is None:
tables = [table for table in schema_.tables.values()]
else:
tables = [table for table in schema_.tables.values()
if table.name == table_name]
columns = []
for table in tables:
columns.extend(table.columns.values())
return [column.to_json() for column in columns]
def _transact(self, rpc, *args):
raise NotImplementedError()
def _monitor(self, rpc, *args):
raise NotImplementedError()
def _dump(self, rpc, *args):
raise NotImplementedError()
|
claudep/pootle | refs/heads/master | pootle/apps/pootle_statistics/migrations/0001_initial.py | 7 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('pootle_translationproject', '0001_initial'),
('pootle_store', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Submission',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('creation_time', models.DateTimeField(db_index=True)),
('field', models.IntegerField(db_index=True, null=True, blank=True)),
('type', models.IntegerField(db_index=True, null=True, blank=True)),
('old_value', models.TextField(default='', blank=True)),
('new_value', models.TextField(default='', blank=True)),
('similarity', models.FloatField(null=True, blank=True)),
('mt_similarity', models.FloatField(null=True, blank=True)),
('quality_check', models.ForeignKey(blank=True, to='pootle_store.QualityCheck', null=True, on_delete=models.CASCADE)),
('store', models.ForeignKey(blank=True, to='pootle_store.Store', null=True, on_delete=models.CASCADE)),
('submitter', models.ForeignKey(to=settings.AUTH_USER_MODEL, null=True, on_delete=models.CASCADE)),
('suggestion', models.ForeignKey(blank=True, to='pootle_store.Suggestion', null=True, on_delete=models.CASCADE)),
('translation_project', models.ForeignKey(to='pootle_translationproject.TranslationProject', on_delete=models.CASCADE)),
('unit', models.ForeignKey(blank=True, to='pootle_store.Unit', null=True, on_delete=models.CASCADE)),
],
options={
'ordering': ['creation_time'],
'db_table': 'pootle_app_submission',
'get_latest_by': 'creation_time',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='ScoreLog',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('creation_time', models.DateTimeField(db_index=True)),
('rate', models.FloatField(default=0)),
('review_rate', models.FloatField(default=0)),
('wordcount', models.PositiveIntegerField()),
('similarity', models.FloatField()),
('score_delta', models.FloatField()),
('action_code', models.IntegerField()),
('submission', models.ForeignKey(to='pootle_statistics.Submission', on_delete=models.CASCADE)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
],
options={
},
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name='scorelog',
unique_together=set([('submission', 'action_code')]),
),
]
|
oryxr/dxf2gcode | refs/heads/master | Gui/TreeHandling.py | 2 | # -*- coding: utf-8 -*-
############################################################################
#
# Copyright (C) 2012-2014
# Xavier Izard
# Jean-Paul Schouwstra
#
# This file is part of DXF2GCODE.
#
# DXF2GCODE is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DXF2GCODE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with DXF2GCODE. If not, see <http://www.gnu.org/licenses/>.
#
############################################################################
"""
This class is intended to deal with the drawing (.dxf) structure.
It has the following functions:
- populate the entities treeView and the layers treeView
- allow selection of shapes from any treeView and show the
selection on the graphic view
- allow to enable/disable shapes from any treeView
- reflects into the treeView the changes that occurs on the graphic view
- set export order using drag & drop
@purpose: display tree structure of the .dxf file, select,
enable and set export order of the shapes
@author: Xavier Izard
@since: 2012.10.01
@license: GPL
"""
from PyQt4 import QtCore, QtGui
from Gui.myTreeView import MyStandardItemModel
from math import degrees
import Core.Globals as g
from Core.CustomGCode import CustomGCodeClass
import logging
logger = logging.getLogger("Gui.TreeHandling")
#defines some arbitrary types for the objects stored into the treeView.
#These types will eg help us to find which kind of data is stored
#in the element received from a click() event
ENTITY_OBJECT = QtCore.Qt.UserRole + 1 #For storing refs to the entities elements (entities_list)
LAYER_OBJECT = QtCore.Qt.UserRole + 2 #For storing refs to the layers elements (layers_list)
SHAPE_OBJECT = QtCore.Qt.UserRole + 3 #For storing refs to the shape elements (entities_list & layers_list)
CUSTOM_GCODE_OBJECT = QtCore.Qt.UserRole + 4 #For storing refs to the custom gcode elements (layers_list)
PATH_OPTIMISATION_COL = 3 #Column that corresponds to TSP enable checkbox
class TreeHandler(QtGui.QWidget):
"""
Class to handle both QTreeView : entitiesTreeView (for blocks, and the tree of blocks) and layersShapesTreeView (for layers and shapes)
"""
def __init__(self, ui):
"""
Standard method to initialize the class
@param ui: the QT4 GUI
"""
QtGui.QWidget.__init__(self)
self.ui = ui
#Used to store previous values in order to enable/disable text
self.palette = self.ui.zRetractionArealLineEdit.palette()
self.clearToolsParameters()
#Layers & Shapes TreeView
self.layer_item_model = None
self.layers_list = None
self.auto_update_export_order = False
self.ui.layersShapesTreeView.setSelectionCallback(self.actionOnSelectionChange) #pass the callback function to the QTreeView
self.ui.layersShapesTreeView.setKeyPressEventCallback(self.actionOnKeyPress)
self.ui.layersShapesTreeView.setSelectionMode(QtGui.QAbstractItemView.ExtendedSelection)
self.ui.layersShapesTreeView.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
QtCore.QObject.connect(self.ui.layersGoUpPushButton,
QtCore.SIGNAL("clicked()"),
self.ui.layersShapesTreeView.moveUpCurrentItem)
QtCore.QObject.connect(self.ui.layersGoDownPushButton,
QtCore.SIGNAL("clicked()"),
self.ui.layersShapesTreeView.moveDownCurrentItem)
#Load the tools from the config file to the tool selection combobox
for tool in g.config.vars.Tool_Parameters:
self.ui.toolDiameterComboBox.addItem(tool)
#Select the first tool in the list and update the tools diameter, ... accordingly
self.ui.toolDiameterComboBox.setCurrentIndex(0)
self.toolUpdate(self.ui.toolDiameterComboBox.currentText())
QtCore.QObject.connect(self.ui.toolDiameterComboBox,
QtCore.SIGNAL("activated(const QString &)"),
self.toolUpdate)
QtCore.QObject.connect(self.ui.zRetractionArealLineEdit,
QtCore.SIGNAL("textEdited(const QString &)"),
self.toolParameterzRetractionArealUpdate)
QtCore.QObject.connect(self.ui.zSafetyMarginLineEdit,
QtCore.SIGNAL("textEdited(const QString &)"),
self.toolParameterzSafetyMarginUpdate)
QtCore.QObject.connect(self.ui.zInfeedDepthLineEdit,
QtCore.SIGNAL("textEdited(const QString &)"),
self.toolParameterzInfeedDepthUpdate)
QtCore.QObject.connect(self.ui.zInitialMillDepthLineEdit,
QtCore.SIGNAL("textEdited(const QString &)"),
self.toolParameterzInitialMillDepthUpdate)
QtCore.QObject.connect(self.ui.zFinalMillDepthLineEdit,
QtCore.SIGNAL("textEdited(const QString &)"),
self.toolParameterzFinalMillDepthUpdate)
QtCore.QObject.connect(self.ui.g1FeedXYLineEdit,
QtCore.SIGNAL("textEdited(const QString &)"),
self.toolParameterg1FeedXYUpdate)
QtCore.QObject.connect(self.ui.g1FeedZLineEdit,
QtCore.SIGNAL("textEdited(const QString &)"),
self.toolParameterg1FeedZUpdate)
#Entities TreeView
self.entity_item_model = None
self.entities_list = None
self.ui.entitiesTreeView.setSelectionCallback(self.actionOnSelectionChange) #pass the callback function to the QTreeView
self.ui.entitiesTreeView.setKeyPressEventCallback(self.actionOnKeyPress)
self.ui.entitiesTreeView.setSelectionMode(QtGui.QAbstractItemView.ExtendedSelection)
self.ui.entitiesTreeView.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
QtCore.QObject.connect(self.ui.blocksCollapsePushButton,
QtCore.SIGNAL("clicked()"),
self.expandToDepth0)
QtCore.QObject.connect(self.ui.blocksExpandPushButton,
QtCore.SIGNAL("clicked()"),
self.ui.entitiesTreeView.expandAll)
#Build the contextual menu (mouse right click)
self.context_menu = QtGui.QMenu(self)
menu_action = self.context_menu.addAction("Unselect all")
menu_action.triggered.connect(self.ui.layersShapesTreeView.clearSelection)
menu_action = self.context_menu.addAction("Select all")
menu_action.triggered.connect(self.ui.layersShapesTreeView.selectAll)
self.context_menu.addSeparator()
menu_action = self.context_menu.addAction("Disable selection")
menu_action.triggered.connect(self.disableSelectedItems)
menu_action = self.context_menu.addAction("Enable selection")
menu_action.triggered.connect(self.enableSelectedItems)
self.context_menu.addSeparator()
menu_action = self.context_menu.addAction("Don't opti. route for selection")
menu_action.triggered.connect(self.doNotOptimizeRouteForSelectedItems)
menu_action = self.context_menu.addAction("Optimize route for selection")
menu_action.triggered.connect(self.optimizeRouteForSelectedItems)
self.context_menu.addSeparator()
menu_action = self.context_menu.addAction("Remove custom GCode")
menu_action.triggered.connect(self.removeCustomGCode)
sub_menu = QtGui.QMenu("Add custom GCode ...", self)
for custom_action in g.config.vars.Custom_Actions:
menu_action = sub_menu.addAction(QtCore.QString(custom_action).replace('_', ' '))
menu_action.setData(custom_action) #save the exact name of the action, as it is defined in the config file. We will use it later to identify the action
self.context_menu.addMenu(sub_menu)
#Right click menu
self.ui.layersShapesTreeView.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
QtCore.QObject.connect(self.ui.layersShapesTreeView,
QtCore.SIGNAL("customContextMenuRequested(const QPoint &)"),
self.displayContextMenu)
#Not used for now, so hide them
self.ui.startAtXLabel.hide()
self.ui.startAtYLabel.hide()
self.ui.unitLabel_1.hide()
self.ui.unitLabel_2.hide()
self.ui.startAtXLineEdit.hide()
self.ui.startAtYLineEdit.hide()
def displayContextMenu(self, position):
"""
Function used to display a right click context menu
@param position: position of the cursor within the treeView widget
"""
selected_action = self.context_menu.exec_(self.ui.layersShapesTreeView.mapToGlobal(position))
if selected_action and selected_action.data().isValid():
#contextual menu selection concerns a custom gcode
custom_gcode_name = selected_action.data().toString()
self.addCustomGCodeAfter(custom_gcode_name)
def expandToDepth0(self):
"""
Slot used to expand the entities treeView up to depth 0
"""
self.ui.entitiesTreeView.expandToDepth(0)
def buildLayerTree(self, layers_list):
"""
This method populates the Layers QTreeView with all the elements contained into the layers_list
Method must be called each time a new .dxf file is loaded.
options
@param layers_list: list of the layers and shapes (created in the main)
"""
self.layers_list = layers_list
if self.layer_item_model:
self.layer_item_model.clear() #Remove any existing item_model
self.layer_item_model = MyStandardItemModel() #This is the model view (QStandardItemModel). it's the container for the data
self.layer_item_model.setSupportedDragActions(QtCore.Qt.MoveAction)
self.layer_item_model.setHorizontalHeaderItem(0, QtGui.QStandardItem("[en]"))
self.layer_item_model.setHorizontalHeaderItem(1, QtGui.QStandardItem("Name"))
self.layer_item_model.setHorizontalHeaderItem(2, QtGui.QStandardItem("Nr"))
self.layer_item_model.setHorizontalHeaderItem(3, QtGui.QStandardItem("Optimal path"))
modele_root_element = self.layer_item_model.invisibleRootItem() #Root element of our tree
for layer in layers_list:
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/images/layer.png"))
checkbox_element = QtGui.QStandardItem(icon, "")
checkbox_element.setFlags(QtCore.Qt.ItemIsDragEnabled | QtCore.Qt.ItemIsDropEnabled | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsUserCheckable | QtCore.Qt.ItemIsSelectable)
checkbox_element.setData(QtCore.QVariant(layer), LAYER_OBJECT) #store a ref to the layer in our treeView element - this is a method to map tree elements with real data
if layer.should_ignore():
checkbox_element.setCheckState(QtCore.Qt.Unchecked)
else:
checkbox_element.setCheckState(QtCore.Qt.Checked)
modele_element = QtGui.QStandardItem(layer.LayerName)
modele_element.setFlags(QtCore.Qt.ItemIsDragEnabled | QtCore.Qt.ItemIsDropEnabled | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable)
nbr_element = QtGui.QStandardItem()
nbr_element.setFlags(QtCore.Qt.ItemIsEnabled)
optimise_element = QtGui.QStandardItem()
optimise_element.setFlags(QtCore.Qt.ItemIsEnabled)
modele_root_element.appendRow([checkbox_element, modele_element, nbr_element, optimise_element])
for shape in layer.shapes:
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/images/shape.png"))
item_col_0 = QtGui.QStandardItem(icon, "") #will only display a checkbox + an icon that will never be disabled
item_col_0.setData(QtCore.QVariant(shape), SHAPE_OBJECT) #store a ref to the shape in our treeView element
item_col_0.setFlags(QtCore.Qt.ItemIsDragEnabled | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsUserCheckable | QtCore.Qt.ItemIsSelectable)
item_col_1 = QtGui.QStandardItem(shape.type)
item_col_1.setFlags(QtCore.Qt.ItemIsDragEnabled | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable)
item_col_2 = QtGui.QStandardItem(str(shape.nr))
item_col_2.setFlags(QtCore.Qt.ItemIsDragEnabled | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable)
item_col_3 = QtGui.QStandardItem()
item_col_3.setFlags(QtCore.Qt.ItemIsDragEnabled | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsUserCheckable | QtCore.Qt.ItemIsSelectable)
parent_item = modele_root_element.child(modele_root_element.rowCount() - 1, 0)
parent_item.appendRow([item_col_0, item_col_1, item_col_2, item_col_3])
#Deal with the checkboxes (shape enabled or disabled / send shape to TSP optimizer)
item_col_0.setCheckState(QtCore.Qt.Unchecked if shape.isDisabled() else QtCore.Qt.Checked)
item_col_3.setCheckState(QtCore.Qt.Checked if shape.isToolPathOptimized() else QtCore.Qt.Unchecked)
#Signal to get events when a checkbox state changes (enable or disable shapes)
QtCore.QObject.connect(self.layer_item_model, QtCore.SIGNAL("itemChanged(QStandardItem*)"), self.on_itemChanged)
self.ui.layersShapesTreeView.setModel(self.layer_item_model) #Affect our model to the GUI TreeView, in order to display it
self.ui.layersShapesTreeView.expandAll()
self.ui.layersShapesTreeView.setDragDropMode(QtGui.QTreeView.InternalMove)
#self.ui.layersShapesTreeView.setDefaultDropAction(QtCore.Qt.MoveAction)
#self.ui.layersShapesTreeView.setDragDropOverwriteMode(True)
self.ui.layersShapesTreeView.setDropIndicatorShown(True)
self.ui.layersShapesTreeView.setAcceptDrops(True)
self.ui.layersShapesTreeView.setDragEnabled(True)
self.ui.layersShapesTreeView.resizeColumnToContents(3)
self.ui.layersShapesTreeView.resizeColumnToContents(2)
self.ui.layersShapesTreeView.resizeColumnToContents(1)
self.ui.layersShapesTreeView.resizeColumnToContents(0)
def buildEntitiesTree(self, entities_list):
"""
This method populates the Entities (blocks) QTreeView with
all the elements contained in the entities_list
Method must be called each time a new .dxf file is loaded.
options
@param entities_list: list of the layers and shapes (created in the main)
"""
self.entities_list = entities_list
if self.entity_item_model:
self.entity_item_model.clear() #Remove any existing item_model
self.entity_item_model = QtGui.QStandardItemModel()
self.entity_item_model.setHorizontalHeaderItem(0, QtGui.QStandardItem("[en]"))
self.entity_item_model.setHorizontalHeaderItem(1, QtGui.QStandardItem("Name"))
self.entity_item_model.setHorizontalHeaderItem(2, QtGui.QStandardItem("Nr"))
self.entity_item_model.setHorizontalHeaderItem(3, QtGui.QStandardItem("Type"))
self.entity_item_model.setHorizontalHeaderItem(4, QtGui.QStandardItem("Base point"))
self.entity_item_model.setHorizontalHeaderItem(5, QtGui.QStandardItem("Scale"))
self.entity_item_model.setHorizontalHeaderItem(6, QtGui.QStandardItem("Rotation"))
modele_root_element = self.entity_item_model.invisibleRootItem()
self.buildEntitiesSubTree(modele_root_element, entities_list)
#Signal to get events when a checkbox state changes (enable or disable shapes)
QtCore.QObject.connect(self.entity_item_model,
QtCore.SIGNAL("itemChanged(QStandardItem*)"),
self.on_itemChanged)
self.ui.entitiesTreeView.setModel(self.entity_item_model)
self.ui.entitiesTreeView.expandToDepth(0)
i = 0
while(i < 6):
self.ui.entitiesTreeView.resizeColumnToContents(i)
i += 1
def buildEntitiesSubTree(self, elements_model, elements_list):
"""
This method is called (possibly recursively) to populate the
Entities treeView. It is not intended to be called directly,
use buildEntitiesTree() function instead.
options
@param elements_model: the treeView model (used to store the data, see QT docs)
@param elements_list: either a list of entities, or a shape
@return (containsChecked, containsUnchecked) indicating whether the subtree contains checked and/or unchecked elements
"""
containsChecked = False
containsUnchecked = False
if isinstance(elements_list, list):
#We got a list
for element in elements_list:
(checked, unchecked) = self.addEntitySubTree(elements_model, element)
containsChecked = containsChecked or checked
containsUnchecked = containsUnchecked or unchecked
else:
#Unique element (shape)
element = elements_list
(containsChecked, containsUnchecked) = self.addEntitySubTree(elements_model, element)
return (containsChecked, containsUnchecked)
def addEntitySubTree(self, elements_model, element):
"""
This method populates a row of the Entities treeView. It is
not intended to be called directly, use buildEntitiesTree()
function instead.
options
@param elements_model: the treeView model (used to store the data, see QT docs)
@param element: the Entity or Shape element
@return (containsChecked, containsUnchecked) indicating whether the subtree contains checked and/or unchecked elements
"""
containsChecked = False
containsUnchecked = False
item_col_0 = None
if element.type == "Entitie":
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/images/blocks.png"))
item_col_0 = QtGui.QStandardItem(icon, "") #will only display a checkbox + an icon that will never be disabled
item_col_0.setData(QtCore.QVariant(element), ENTITY_OBJECT) #store a ref to the entity in our treeView element
item_col_1 = QtGui.QStandardItem(element.Name)
item_col_2 = QtGui.QStandardItem(str(element.Nr))
item_col_3 = QtGui.QStandardItem(element.type)
item_col_4 = QtGui.QStandardItem(str(element.p0))
item_col_5 = QtGui.QStandardItem(str(element.sca))
item_col_6 = QtGui.QStandardItem(str(round(degrees(element.rot), 3))) #convert the angle into degrees with 3 digit after the decimal point
elements_model.appendRow([item_col_0, item_col_1, item_col_2, item_col_3, item_col_4, item_col_5, item_col_6])
for sub_element in element.children:
(checked, unchecked) = self.buildEntitiesSubTree(item_col_0, sub_element)
containsChecked = containsChecked or checked
containsUnchecked = containsUnchecked or unchecked
elif element.type == "Shape":
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/images/shape.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
item_col_0 = QtGui.QStandardItem(icon, "") #will only display a checkbox + an icon that will never be disabled
item_col_0.setData(QtCore.QVariant(element), SHAPE_OBJECT) #store a ref to the entity in our treeView element
if element.isDisabled():
containsUnchecked = True
else:
containsChecked = True
item_col_1 = QtGui.QStandardItem(element.type)
item_col_2 = QtGui.QStandardItem(str(element.nr))
item_col_3 = QtGui.QStandardItem(element.type)
item_col_4 = QtGui.QStandardItem()
item_col_5 = QtGui.QStandardItem()
item_col_6 = QtGui.QStandardItem()
elements_model.appendRow([item_col_0, item_col_1, item_col_2, item_col_3, item_col_4, item_col_5, item_col_6])
item_col_0.setCheckState(self.getCheckState(containsChecked, containsUnchecked))
item_col_0.setFlags(QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsUserCheckable | QtCore.Qt.ItemIsSelectable)
item_col_1.setFlags(QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable)
item_col_2.setFlags(QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable)
item_col_3.setFlags(QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable)
item_col_4.setFlags(QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable)
item_col_5.setFlags(QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable)
item_col_6.setFlags(QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable)
return (containsChecked, containsUnchecked)
def getCheckState(self, containsChecked, containsUnchecked):
if containsChecked:
if containsUnchecked:
return QtCore.Qt.PartiallyChecked
else:
return QtCore.Qt.Checked
else:
return QtCore.Qt.Unchecked
def updateExportOrder(self):
"""
Update the layers_list order to reflect the TreeView order.
This function must be called before generating the GCode
(export function). Export will be performed in the order of the
structure self.LayerContents of the main. Each layer contains
some shapes, and the export order of the shapes is set by
populating the exp_order[] list with the shapes reference number
for each layer (eg exp_order = [5, 3, 2, 4, 0, 1] for layer 0,
exp_order = [5, 3, 7] for layer 1, ...)
options
"""
i = self.layer_item_model.rowCount(QtCore.QModelIndex())
while i > 0:
i -= 1
layer_item_index = self.layer_item_model.index(i, 0)
if layer_item_index.data(LAYER_OBJECT).isValid():
real_layer = layer_item_index.data(LAYER_OBJECT).toPyObject()
self.layers_list.remove(real_layer) #Remove the layer from its original position
self.layers_list.insert(0, real_layer) #and insert it at the beginning of the layer's list
real_layer.exp_order = [] #Clear the current export order
real_layer.exp_order_complete = [] #Clear the current export order
#Assign the export order for the shapes of the layer "real_layer"
j = 0
while j < self.layer_item_model.rowCount(layer_item_index):
shape_item_index = self.layer_item_model.index(j, 0, layer_item_index)
real_shape = None
if shape_item_index.data(SHAPE_OBJECT).isValid():
real_shape = shape_item_index.data(SHAPE_OBJECT).toPyObject()
if real_shape.isDisabled() is False:
real_layer.exp_order.append(real_shape.nr) #Create the export order list with the real and unique shapes numbers (eg [25, 22, 30, 4, 1, 5])
if shape_item_index.data(CUSTOM_GCODE_OBJECT).isValid():
real_shape = shape_item_index.data(CUSTOM_GCODE_OBJECT).toPyObject()
if real_shape and real_shape.isDisabled() is False:
real_layer.exp_order_complete.append(real_layer.shapes.index(real_shape)) #Create the export order list with the shapes & custom gcode numbers (eg [5, 3, 2, 4, 0, 1])
j += 1
def updateTreeViewOrder(self):
"""
Update the Layer TreeView order according to the exp_order list
of each layer. This function should be called after running the
TSP path otimizer
"""
i = self.layer_item_model.rowCount(QtCore.QModelIndex())
while i > 0:
i -= 1
layer_item_index = self.layer_item_model.index(i, 0)
layer_item = self.layer_item_model.itemFromIndex(layer_item_index)
if layer_item_index.data(LAYER_OBJECT).isValid():
real_layer = layer_item_index.data(LAYER_OBJECT).toPyObject()
#for shape_nr in real_layer.exp_order[::-1]: #reverse order and prepend if we want to insert optimized shape before fixed shapes
for shape_nr in real_layer.exp_order:
j = 0
while j < self.layer_item_model.rowCount(layer_item_index):
shape_item_index = self.layer_item_model.index(j, 0, layer_item_index)
real_shape = None
if shape_item_index.data(SHAPE_OBJECT).isValid():
real_shape = shape_item_index.data(SHAPE_OBJECT).toPyObject()
if real_shape and real_shape.nr == shape_nr and (real_shape.send_to_TSP or g.config.vars.Route_Optimisation['TSP_shape_order'] == 'CONSTRAIN_ORDER_ONLY'):
#Shape number "shape_nr" found in the treeView and Shape is movable => moving it to its new position
item_to_be_moved = layer_item.takeRow(j)
layer_item.appendRow(item_to_be_moved)
break
j += 1
def columnsSelectDeselect(self, selection_model, item_index, select):
"""
columnsSelectDeselect()
"""
if select:
#Select the matching shape in the list.
selection_model.select(item_index, QtGui.QItemSelectionModel.Select | QtGui.QItemSelectionModel.Rows)
else:
#Unselect the matching shape in the list.
selection_model.select(item_index, QtGui.QItemSelectionModel.Deselect | QtGui.QItemSelectionModel.Rows)
def updateShapeSelection(self, shape, select):
"""
This method is a "slot" (callback) called from the main when the
selection changes on the graphic view. It aims to update the
treeView selection according to the graphic view.
Note: in order to avoid signal loops, all selection signals are
blocked when updating the selections in the treeViews
options
@param shape: the Shape whose selection has changed
@param selection: whether the Shape has been selected (True) or unselected (False)
"""
#Layer treeView
item_index = self.findLayerItemIndexFromShape(shape)
selection_model = self.ui.layersShapesTreeView.selectionModel() #Get the selection model of the QTreeView
if item_index:
#we found the matching index for the shape in our layers treeView model
self.ui.layersShapesTreeView.blockSignals(True) #Avoid signal loops (we dont want the treeView to re-emit selectionChanged signal)
self.columnsSelectDeselect(selection_model, item_index, select)
self.ui.layersShapesTreeView.blockSignals(False)
#Entities treeView
item_index = self.findEntityItemIndexFromShape(shape)
selection_model = self.ui.entitiesTreeView.selectionModel() #Get the selection model of the QTreeView
if item_index:
#we found the matching index for the shape in our entities treeView model
self.ui.entitiesTreeView.blockSignals(True) #Avoid signal loops (we dont want the treeView to re-emit selectionChanged signal)
self.columnsSelectDeselect(selection_model, item_index, select)
self.ui.entitiesTreeView.blockSignals(False)
#Update the tool parameters fields
self.clearToolsParameters()
self.displayToolParametersForItem(shape.LayerContent, shape)
def updateShapeEnabling(self, shape, enable):
"""
This method is a "slot" (callback) called from the main when the
shapes are enabled or disabled on the graphic view.
It aims to update the treeView checkboxes according to the
graphic view.
Note: in order to avoid signal loops, all selection signals are
blocked when updating the checkboxes in the treeViews
options
@param shape: the Shape whose enabling has changed
@param enable: whether the Shape has been enabled (True) or disabled (False)
"""
#Layer treeView
item_index = self.findLayerItemIndexFromShape(shape)
if item_index:
#we found the matching index for the shape in our treeView model
item = item_index.model().itemFromIndex(item_index)
self.layer_item_model.blockSignals(True) #Avoid signal loops (we dont want the treeView to emit itemChanged signal)
if enable:
#Select the matching shape in the list
self.updateCheckboxOfItem(item, QtCore.Qt.Checked)
else:
#Unselect the matching shape in the list
self.updateCheckboxOfItem(item, QtCore.Qt.Unchecked)
self.layer_item_model.blockSignals(False)
self.ui.layersShapesTreeView.update(item_index) #update the treeList drawing
self.traverseParentsAndUpdateEnableDisable(self.layer_item_model, item_index) #update the parents checkboxes
if self.auto_update_export_order:
#update export order and thus export drawing
self.prepareExportOrderUpdate()
#Entities treeView
item_index = self.findEntityItemIndexFromShape(shape)
if item_index:
#we found the matching index for the shape in our treeView model
item = item_index.model().itemFromIndex(item_index)
self.entity_item_model.blockSignals(True) #Avoid signal loops (we dont want the treeView to emit itemChanged signal)
if enable:
#Select the matching shape in the list
self.updateCheckboxOfItem(item, QtCore.Qt.Checked)
else:
#Unselect the matching shape in the list
self.updateCheckboxOfItem(item, QtCore.Qt.Unchecked)
self.entity_item_model.blockSignals(False)
self.ui.entitiesTreeView.update(item_index) #update the treeList drawing
self.traverseParentsAndUpdateEnableDisable(self.entity_item_model, item_index) #update the parents checkboxes
def findLayerItemIndexFromShape(self, shape):
"""
Find internal layers treeView reference (item index) matching a
"real" shape (ie a ShapeClass instance)
options
@param shape: the real shape (ShapeClass instance)
@return: the found item index (can be None)
"""
return self.traverseChildrenAndFindShape(self.layer_item_model, QtCore.QModelIndex(), shape)
def findEntityItemIndexFromShape(self, shape):
"""
Find internal entities treeView reference (item index) matching
a "real" shape (ie a ShapeClass instance)
options
@param shape: the real shape (ShapeClass instance)
@return: the found item index (can be None)
"""
return self.traverseChildrenAndFindShape(self.entity_item_model, QtCore.QModelIndex(), shape)
def traverseChildrenAndFindShape(self, item_model, item_index, shape):
"""
This method is used by the findLayerItemIndexFromShape() and
findEntityItemIndexFromShape() function in order to find a
reference from a layer. It traverses the QT model and compares
each item data with the shape passed as parameter. When found,
the reference is returned
options
@param item_model: the treeView model (used to store the data, see QT docs)
@param item_index: the initial model index (QModelIndex) in the tree (all children of this index are scanned)
@param shape: the real shape (ShapeClass instance)
@return: the found item index
"""
found_item_index = None
i = 0
while i < item_model.rowCount(item_index):
sub_item_index = item_model.index(i, 0, item_index)
if sub_item_index.data(SHAPE_OBJECT).isValid():
real_item = sub_item_index.data(SHAPE_OBJECT).toPyObject()
if shape == real_item:
return sub_item_index
if item_model.hasChildren(sub_item_index):
found_item_index = self.traverseChildrenAndFindShape(item_model, sub_item_index, shape)
if found_item_index:
return found_item_index
i += 1
def traverseChildrenAndSelect(self, item_model, item_index, itemSelection):
"""
This method is used internally to select/unselect all children
of a given entity (eg to select all the shapes of a given layer
when the user has selected a layer)
options
@param item_model: the treeView model (used to store the data, see QT docs)
@param item_index: the initial model index (QModelIndex) in the tree (all children of this index are scanned)
@param select: whether to select (True) or not (False)
"""
i = 0
while i < item_model.rowCount(item_index):
sub_item_index = item_model.index(i, 0, item_index)
if item_model.hasChildren(sub_item_index):
self.traverseChildrenAndSelect(item_model, sub_item_index, itemSelection)
element = sub_item_index.model().itemFromIndex(sub_item_index)
if element:
if element.data(SHAPE_OBJECT).isValid() or element.data(CUSTOM_GCODE_OBJECT).isValid():
#only select Shapes or Custom GCode
itemSelection.select(sub_item_index, sub_item_index)
i += 1
def traverseChildrenAndEnableDisable(self, item_model, item_index, checked_state):
"""
This method is used internally to check/uncheck all children of
a given entity (eg to enable all shapes of a given layer when
the user has enabled a layer)
options
@param item_model: the treeView model (used to store the data, see QT docs)
@param item_index: the initial model index (QModelIndex) in the tree (all children of this index are scanned)
@param checked_state: the state of the checkbox
"""
i = 0
while i < item_model.rowCount(item_index):
sub_item_index = item_model.index(i, 0, item_index)
if item_model.hasChildren(sub_item_index):
self.traverseChildrenAndEnableDisable(item_model, sub_item_index, checked_state)
item = item_model.itemFromIndex(sub_item_index)
if item:
self.updateCheckboxOfItem(item, checked_state)
i += 1
def traverseParentsAndUpdateEnableDisable(self, item_model, item_index):
"""
This code updates the parents checkboxes for a given entity.
Parents checkboxes are tristate, eg if some of the shapes that
belong to a layer are checked and others not, then the checkbox
of this layer will be "half" checked
options
@param item_model: the treeView model (used to store the data, see QT docs)
@param item_index: the initial model index (QModelIndex) in the tree (all children of this index are scanned)
"""
has_unchecked = False
has_partially_checked = False
has_checked = False
item = None
parent_item_index = None
i = 0
while i < item_model.rowCount(item_index.parent()):
parent_item_index = item_model.index(i, 0, item_index.parent())
item = item_model.itemFromIndex(parent_item_index)
if item:
if item.checkState() == QtCore.Qt.Checked:
has_checked = True
elif item.checkState() == QtCore.Qt.PartiallyChecked:
has_partially_checked = True
else:
has_unchecked = True
i += 1
#Update the parent item according to its children
if item and item.parent():
parent_state = item.parent().checkState()
if has_checked and has_unchecked or has_partially_checked:
parent_state = QtCore.Qt.PartiallyChecked
elif has_checked and not has_unchecked:
parent_state = QtCore.Qt.Checked
elif not has_checked and has_unchecked:
parent_state = QtCore.Qt.Unchecked
self.updateCheckboxOfItem(item.parent(), parent_state)
#Handle the parent of the parent (recursive call)
if parent_item_index and parent_item_index.parent().isValid():
self.traverseParentsAndUpdateEnableDisable(item_model, parent_item_index.parent())
def toolUpdate(self, text):
"""
Slot that updates the tool's diameter, speed and start_radius
when a new tool is selected
@param text: the name of the newly selected tool
"""
if not text.isEmpty():
new_diameter = g.config.vars.Tool_Parameters[str(text)]['diameter']
new_speed = g.config.vars.Tool_Parameters[str(text)]['speed']
new_start_radius = g.config.vars.Tool_Parameters[str(text)]['start_radius']
self.ui.toolDiameterComboBox.setPalette(self.palette)
self.ui.toolDiameterLabel.setText(str(round(new_diameter, 3)))
self.ui.toolDiameterLabel.setPalette(self.palette) #Restore color
self.ui.toolSpeedLabel.setText(str(round(new_speed, 1)))
self.ui.toolSpeedLabel.setPalette(self.palette) #Restore color
self.ui.startRadiusLabel.setText(str(round(new_start_radius, 3)))
self.ui.startRadiusLabel.setPalette(self.palette) #Restore color
#Get the new value and convert it to int
val = text.toInt()
if val[1]:
selected_indexes_list = self.ui.layersShapesTreeView.selectedIndexes()
for model_index in selected_indexes_list:
if model_index.isValid():
model_index = model_index.sibling(model_index.row(), 0) #get the first column of the selected row, since it's the only one that contains data
element = model_index.model().itemFromIndex(model_index)
real_item = None
if element.data(SHAPE_OBJECT).isValid():
real_item = element.data(SHAPE_OBJECT).toPyObject().LayerContent #Shape has no such property => update the parent layer
elif element and element.data(LAYER_OBJECT).isValid():
real_item = element.data(LAYER_OBJECT).toPyObject()
if not real_item is None:
real_item.tool_nr = val[0]
real_item.tool_diameter = new_diameter
real_item.speed = new_speed
real_item.start_radius = new_start_radius
self.tool_nr = real_item.tool_nr
self.tool_diameter = new_diameter
self.speed = new_speed
self.start_radius = new_start_radius
def toolParameterzRetractionArealUpdate(self, text):
"""
Slot that updates the above tools parameter when the
corresponding LineEdit changes
@param text: the value of the LineEdit
"""
self.ui.zRetractionArealLineEdit.setPalette(self.palette) #Restore color
#Get the new value and convert it to float
val = text.toFloat()
if val[1]:
selected_indexes_list = self.ui.layersShapesTreeView.selectedIndexes()
for model_index in selected_indexes_list:
if model_index.isValid():
model_index = model_index.sibling(model_index.row(), 0) #get the first column of the selected row, since it's the only one that contains data
element = model_index.model().itemFromIndex(model_index)
real_item = None
if element.data(SHAPE_OBJECT).isValid():
real_item = element.data(SHAPE_OBJECT).toPyObject().LayerContent #Shape has no such property => update the parent layer
elif element and element.data(LAYER_OBJECT).isValid():
real_item = element.data(LAYER_OBJECT).toPyObject()
if not real_item is None:
real_item.axis3_retract = val[0]
self.axis3_retract = real_item.axis3_retract
def toolParameterzSafetyMarginUpdate(self, text):
"""
Slot that updates the above tools parameter when the
corresponding LineEdit changes
@param text: the value of the LineEdit
"""
self.ui.zSafetyMarginLineEdit.setPalette(self.palette) #Restore color
#Get the new value and convert it to float
val = text.toFloat()
if val[1]:
selected_indexes_list = self.ui.layersShapesTreeView.selectedIndexes()
for model_index in selected_indexes_list:
if model_index.isValid():
model_index = model_index.sibling(model_index.row(), 0) #get the first column of the selected row, since it's the only one that contains data
element = model_index.model().itemFromIndex(model_index)
real_item = None
if element.data(SHAPE_OBJECT).isValid():
real_item = element.data(SHAPE_OBJECT).toPyObject().LayerContent
elif element and element.data(LAYER_OBJECT).isValid():
real_item = element.data(LAYER_OBJECT).toPyObject()
if not real_item is None:
real_item.axis3_safe_margin = val[0]
self.axis3_safe_margin = real_item.axis3_safe_margin
def toolParameterzInfeedDepthUpdate(self, text):
"""
Slot that updates the above tools parameter when the
corresponding LineEdit changes
@param text: the value of the LineEdit
"""
self.ui.zInfeedDepthLineEdit.setPalette(self.palette) #Restore color
#Get the new value and convert it to float
val = text.toFloat()
if val[1]:
selected_indexes_list = self.ui.layersShapesTreeView.selectedIndexes()
for model_index in selected_indexes_list:
if model_index.isValid():
model_index = model_index.sibling(model_index.row(), 0) #get the first column of the selected row, since it's the only one that contains data
element = model_index.model().itemFromIndex(model_index)
real_item = None
if element.data(SHAPE_OBJECT).isValid():
real_item = element.data(SHAPE_OBJECT).toPyObject()
elif element and element.data(LAYER_OBJECT).isValid():
real_item = element.data(LAYER_OBJECT).toPyObject()
if not real_item is None:
real_item.axis3_slice_depth = val[0]
self.axis3_slice_depth = real_item.axis3_slice_depth
def toolParameterg1FeedXYUpdate(self, text):
"""
Slot that updates the above tools parameter when the
corresponding LineEdit changes
@param text: the value of the LineEdit
"""
self.ui.g1FeedXYLineEdit.setPalette(self.palette) #Restore color
#Get the new value and convert it to float
val = text.toFloat()
if val[1]:
selected_indexes_list = self.ui.layersShapesTreeView.selectedIndexes()
for model_index in selected_indexes_list:
if model_index.isValid():
model_index = model_index.sibling(model_index.row(), 0) #get the first column of the selected row, since it's the only one that contains data
element = model_index.model().itemFromIndex(model_index)
real_item = None
if element.data(SHAPE_OBJECT).isValid():
real_item = element.data(SHAPE_OBJECT).toPyObject()
elif element and element.data(LAYER_OBJECT).isValid():
real_item = element.data(LAYER_OBJECT).toPyObject()
if not real_item is None:
real_item.f_g1_plane = val[0]
self.f_g1_plane = real_item.f_g1_plane
def toolParameterg1FeedZUpdate(self, text):
"""
Slot that updates the above tools parameter when the
corresponding LineEdit changes
@param text: the value of the LineEdit
"""
self.ui.g1FeedZLineEdit.setPalette(self.palette) #Restore color
#Get the new value and convert it to float
val = text.toFloat()
if val[1]:
selected_indexes_list = self.ui.layersShapesTreeView.selectedIndexes()
for model_index in selected_indexes_list:
if model_index.isValid():
model_index = model_index.sibling(model_index.row(), 0) #get the first column of the selected row, since it's the only one that contains data
element = model_index.model().itemFromIndex(model_index)
real_item = None
if element.data(SHAPE_OBJECT).isValid():
real_item = element.data(SHAPE_OBJECT).toPyObject()
elif element and element.data(LAYER_OBJECT).isValid():
real_item = element.data(LAYER_OBJECT).toPyObject()
if not real_item is None:
real_item.f_g1_depth = val[0]
self.f_g1_depth = real_item.f_g1_depth
def toolParameterzInitialMillDepthUpdate(self, text):
"""
Slot that updates the above tools parameter when the
corresponding LineEdit changes
@param text: the value of the LineEdit
"""
self.ui.zInitialMillDepthLineEdit.setPalette(self.palette) #Restore color
#Get the new value and convert it to float
val = text.toFloat()
if val[1]:
selected_indexes_list = self.ui.layersShapesTreeView.selectedIndexes()
for model_index in selected_indexes_list:
if model_index.isValid():
model_index = model_index.sibling(model_index.row(), 0) #get the first column of the selected row, since it's the only one that contains data
element = model_index.model().itemFromIndex(model_index)
real_item = None
if element.data(SHAPE_OBJECT).isValid():
real_item = element.data(SHAPE_OBJECT).toPyObject()
elif element and element.data(LAYER_OBJECT).isValid():
real_item = element.data(LAYER_OBJECT).toPyObject()
if not real_item is None:
real_item.axis3_start_mill_depth = val[0]
self.axis3_start_mill_depth = real_item.axis3_start_mill_depth
def toolParameterzFinalMillDepthUpdate(self, text):
"""
Slot that updates the above tools parameter when the
corresponding LineEdit changes
@param text: the value of the LineEdit
"""
self.ui.zFinalMillDepthLineEdit.setPalette(self.palette) #Restore color
#Get the new value and convert it to float
val = text.toFloat()
if val[1]:
selected_indexes_list = self.ui.layersShapesTreeView.selectedIndexes()
for model_index in selected_indexes_list:
if model_index.isValid():
model_index = model_index.sibling(model_index.row(), 0) #get the first column of the selected row, since it's the only one that contains data
element = model_index.model().itemFromIndex(model_index)
real_item = None
if element.data(SHAPE_OBJECT).isValid():
real_item = element.data(SHAPE_OBJECT).toPyObject()
elif element and element.data(LAYER_OBJECT).isValid():
real_item = element.data(LAYER_OBJECT).toPyObject()
if not real_item is None:
real_item.axis3_mill_depth = val[0]
self.axis3_mill_depth = real_item.axis3_mill_depth
def actionOnSelectionChange(self, parent, selected, deselected):
"""
This function is a callback called from QTreeView class when
something changed in the selection. It aims to update the
graphic view according to the tree selection. It also deals
with children selection when a parent is selected
Note that there is no predefined signal for
selectionChange event, that's why we use a callback function
options
@param parent: QT parent item (unused)
@param select: list of selected items in the treeView
@param deselect: list of deselected items in the treeView
"""
self.clearToolsParameters() #disable tools parameters widgets, ...
#Deselects all the shapes that are selected
itemLayerSelection = QtGui.QItemSelection()
itemEntitySelection = QtGui.QItemSelection()
for selection in deselected:
for model_index in selection.indexes():
if model_index.isValid():
element = model_index.model().itemFromIndex(model_index)
if element:
if element.data(SHAPE_OBJECT).isValid():
real_item = element.data(SHAPE_OBJECT).toPyObject()
real_item.setSelected(False, True) #Deselect the shape on the canvas
#Update the other TreeViews
item_index = self.findEntityItemIndexFromShape(real_item)
if model_index.model() == self.layer_item_model and item_index:
itemEntitySelection.select(item_index, item_index)
item_index = self.findLayerItemIndexFromShape(real_item)
if model_index.model() == self.entity_item_model and item_index:
itemLayerSelection.select(item_index, item_index)
elif element.data(LAYER_OBJECT).isValid():
itemLayerSelection.select(model_index, model_index)
elif element.data(ENTITY_OBJECT).isValid():
itemEntitySelection.select(model_index, model_index)
selectionLayer = self.ui.layersShapesTreeView.selectionModel()
selectionLayer.select(itemLayerSelection, QtGui.QItemSelectionModel.Deselect | QtGui.QItemSelectionModel.Rows)
selectionEntity = self.ui.entitiesTreeView.selectionModel()
selectionEntity.select(itemEntitySelection, QtGui.QItemSelectionModel.Deselect | QtGui.QItemSelectionModel.Rows)
#Selects all the shapes that are selected
itemLayerSelection = QtGui.QItemSelection()
itemEntitySelection = QtGui.QItemSelection()
for selection in selected:
for model_index in selection.indexes():
if model_index.isValid():
model_index = model_index.sibling(model_index.row(), 0) #get the first column of the selected row, since it's the only one that contains data
element = model_index.model().itemFromIndex(model_index)
if element:
if element.data(SHAPE_OBJECT).isValid():
real_item = element.data(SHAPE_OBJECT).toPyObject()
#update the tools parameters according to the selection
self.displayToolParametersForItem(real_item.LayerContent, real_item)
real_item.setSelected(True, True) #Select the shape on the canvas
#Update the other TreeViews
item_index = self.findEntityItemIndexFromShape(real_item)
if model_index.model() == self.layer_item_model and item_index:
itemEntitySelection.select(item_index, item_index)
item_index = self.findLayerItemIndexFromShape(real_item)
if model_index.model() == self.entity_item_model and item_index:
itemLayerSelection.select(item_index, item_index)
#select all the children of a given layer when clicked
elif element.data(LAYER_OBJECT).isValid():
selection_model = self.ui.layersShapesTreeView.selectionModel() #Get the selection model of the QTreeView
#Deselect the Layer in the list.
self.columnsSelectDeselect(selection_model, model_index, False)
self.traverseChildrenAndSelect(self.layer_item_model, model_index, itemLayerSelection)
real_item = element.data(LAYER_OBJECT).toPyObject()
#update the tools parameters according to the selection
self.displayToolParametersForItem(real_item)
#select all the children of a given entity when clicked
elif element.data(ENTITY_OBJECT).isValid():
selection_model = self.ui.entitiesTreeView.selectionModel() #Get the selection model of the QTreeView
#Deselect the Entities in the list.
self.columnsSelectDeselect(selection_model, model_index, False)
self.traverseChildrenAndSelect(self.entity_item_model, model_index, itemEntitySelection)
selectionLayer = self.ui.layersShapesTreeView.selectionModel()
selectionLayer.select(itemLayerSelection, QtGui.QItemSelectionModel.Select | QtGui.QItemSelectionModel.Rows)
selectionEntity = self.ui.entitiesTreeView.selectionModel()
selectionEntity.select(itemEntitySelection, QtGui.QItemSelectionModel.Select | QtGui.QItemSelectionModel.Rows)
def clearToolsParameters(self):
"""
This function restore defaults for tools parameters widgets
(disabled, default color, ...)
"""
number_of_selected_items = len(self.ui.layersShapesTreeView.selectedIndexes())
if number_of_selected_items <= 2: #2 selections = 1 row of 2 columns
# 0 or 1 row are selected => clear some states
self.tool_nr = None
self.tool_diameter = None
self.speed = None
self.start_radius = None
self.axis3_retract = None
self.axis3_safe_margin = None
self.axis3_slice_depth = None
self.axis3_start_mill_depth = None
self.axis3_mill_depth = None
self.f_g1_plane = None
self.f_g1_depth = None
self.ui.toolDiameterComboBox.setPalette(self.palette)
self.ui.toolDiameterLabel.setPalette(self.palette)
self.ui.toolSpeedLabel.setPalette(self.palette)
self.ui.startRadiusLabel.setPalette(self.palette)
self.ui.zRetractionArealLineEdit.setPalette(self.palette)
self.ui.zSafetyMarginLineEdit.setPalette(self.palette)
self.ui.zInfeedDepthLineEdit.setPalette(self.palette)
self.ui.g1FeedXYLineEdit.setPalette(self.palette)
self.ui.g1FeedZLineEdit.setPalette(self.palette)
self.ui.zInitialMillDepthLineEdit.setPalette(self.palette)
self.ui.zFinalMillDepthLineEdit.setPalette(self.palette)
if number_of_selected_items == 0:
self.ui.millSettingsFrame.setEnabled(False)
else:
self.ui.millSettingsFrame.setEnabled(True)
def displayToolParametersForItem(self, layer_item, shape_item = None):
"""
Display the current tools settings (fill the QLineEdit, ...)
for the Layer / Shape passed as parameter
@param layer_item: layer instance as defined in LayerContent.py
@param shape_item: shape instance as defined in Shape.py
"""
#Selects the tool for the selected layer
self.ui.toolDiameterComboBox.setCurrentIndex(self.ui.toolDiameterComboBox.findText(str(layer_item.tool_nr)))
if not self.tool_nr is None and layer_item.tool_nr != self.tool_nr:
#Several different tools are currently selected => grey background for the combobox
palette = QtGui.QPalette()
palette.setColor(QtGui.QPalette.Button, QtCore.Qt.gray)
self.ui.toolDiameterComboBox.setPalette(palette)
self.tool_nr = layer_item.tool_nr
self.tool_diameter = self.updateAndColorizeWidget(self.ui.toolDiameterLabel,
self.tool_diameter,
layer_item.tool_diameter)
self.speed = self.updateAndColorizeWidget(self.ui.toolSpeedLabel,
self.speed, layer_item.speed)
self.start_radius = self.updateAndColorizeWidget(self.ui.startRadiusLabel,
self.start_radius,
layer_item.start_radius)
self.axis3_retract = self.updateAndColorizeWidget(self.ui.zRetractionArealLineEdit,
self.axis3_retract,
layer_item.axis3_retract)
self.axis3_safe_margin = self.updateAndColorizeWidget(self.ui.zSafetyMarginLineEdit,
self.axis3_safe_margin,
layer_item.axis3_safe_margin)
if shape_item and shape_item.axis3_slice_depth is not None:
#If Shape slice_depth is defined, then use it instead of the one of the layer
self.axis3_slice_depth = self.updateAndColorizeWidget(self.ui.zInfeedDepthLineEdit,
self.axis3_slice_depth,
shape_item.axis3_slice_depth)
else:
self.axis3_slice_depth = self.updateAndColorizeWidget(self.ui.zInfeedDepthLineEdit,
self.axis3_slice_depth,
layer_item.axis3_slice_depth)
if shape_item and shape_item.axis3_start_mill_depth is not None:
#If Shape initial mill_depth is defined, then use it instead of the one of the layer
self.axis3_start_mill_depth = self.updateAndColorizeWidget(self.ui.zInitialMillDepthLineEdit,
self.axis3_start_mill_depth,
shape_item.axis3_start_mill_depth)
else:
self.axis3_start_mill_depth = self.updateAndColorizeWidget(self.ui.zInitialMillDepthLineEdit,
self.axis3_start_mill_depth,
layer_item.axis3_start_mill_depth)
if shape_item and shape_item.axis3_mill_depth is not None:
#If Shape mill_depth is defined, then use it instead of the one of the layer
self.axis3_mill_depth = self.updateAndColorizeWidget(self.ui.zFinalMillDepthLineEdit,
self.axis3_mill_depth,
shape_item.axis3_mill_depth)
else:
self.axis3_mill_depth = self.updateAndColorizeWidget(self.ui.zFinalMillDepthLineEdit,
self.axis3_mill_depth,
layer_item.axis3_mill_depth)
if shape_item and shape_item.f_g1_plane is not None:
#If Shape XY speed is defined, then use it instead of the one of the layer
self.f_g1_plane = self.updateAndColorizeWidget(self.ui.g1FeedXYLineEdit,
self.f_g1_plane,
shape_item.f_g1_plane)
else:
self.f_g1_plane = self.updateAndColorizeWidget(self.ui.g1FeedXYLineEdit,
self.f_g1_plane,
layer_item.f_g1_plane)
if shape_item and shape_item.f_g1_depth is not None:
#If Shape Z speed is defined, then use it instead of the one of the layer
self.f_g1_depth = self.updateAndColorizeWidget(self.ui.g1FeedZLineEdit,
self.f_g1_depth,
shape_item.f_g1_depth)
else:
self.f_g1_depth = self.updateAndColorizeWidget(self.ui.g1FeedZLineEdit,
self.f_g1_depth,
layer_item.f_g1_depth)
def updateAndColorizeWidget(self, widget, previous_value, value):
"""
This function colours the text in grey when two values are
different. It is used to show differences in tools settings
when several layers / shapes are selected.
@param widget: QT widget to update (can be a QLabel or a QLineEdit)
@param previous_value: the value of the previously selected item
@param value: the value (parameter) of the selected item
"""
widget.setText(str(round(value, 4))) #Round the value with at most 4 digits
if previous_value != None and value != previous_value:
#Several different tools parameter are currently selected (eg: mill deph = -3 for the first selected item and -2 for the second) => grey color for the text
palette = QtGui.QPalette()
palette.setColor(QtGui.QPalette.Text, QtCore.Qt.gray)
widget.setPalette(palette)
return value
def disableSelectedItems(self):
selected_indexes_list = self.ui.layersShapesTreeView.selectedIndexes()
for model_index in selected_indexes_list:
if model_index.isValid():
model_index = model_index.sibling(model_index.row(), 0) #get the first column of the selected row, since it's the only one that contains data
element = model_index.model().itemFromIndex(model_index)
element.setCheckState(QtCore.Qt.Unchecked)
def enableSelectedItems(self):
selected_indexes_list = self.ui.layersShapesTreeView.selectedIndexes()
for model_index in selected_indexes_list:
if model_index.isValid():
model_index = model_index.sibling(model_index.row(), 0) #get the first column of the selected row, since it's the only one that contains data
element = model_index.model().itemFromIndex(model_index)
element.setCheckState(QtCore.Qt.Checked)
def doNotOptimizeRouteForSelectedItems(self):
selected_indexes_list = self.ui.layersShapesTreeView.selectedIndexes()
for model_index in selected_indexes_list:
if model_index.isValid():
model_index = model_index.sibling(model_index.row(), PATH_OPTIMISATION_COL) #get the first column of the selected row, since it's the only one that contains data
element = model_index.model().itemFromIndex(model_index)
if element.isCheckable():
element.setCheckState(QtCore.Qt.Unchecked)
def optimizeRouteForSelectedItems(self):
selected_indexes_list = self.ui.layersShapesTreeView.selectedIndexes()
for model_index in selected_indexes_list:
if model_index.isValid():
model_index = model_index.sibling(model_index.row(), PATH_OPTIMISATION_COL) #Get the column of the row that contains the "Optimize Path" checkbox
element = model_index.model().itemFromIndex(model_index)
if element.isCheckable():
element.setCheckState(QtCore.Qt.Checked)
def actionOnKeyPress(self, key_code, item_index):
"""
This function is a callback called from QTreeView class when a
key is pressed on the treeView. If the key is the spacebar, and O, then
capture it to enable/disable shape and optimize path ...
@param key_code: the key code as defined by QT
@param item_index: the item on which the keyPress event occurred
"""
#Enable/disable checkbox
if key_code == QtCore.Qt.Key_Space and item_index and item_index.isValid():
selection_model = self.ui.layersShapesTreeView.selectionModel() #Use selection models such that we can still reverse the tree with the keyboard
for layer in self.layers_list:
for shape in layer.shapes:
sub_item_index = self.findLayerItemIndexFromShape(shape)
if selection_model.isSelected(sub_item_index):
#sub_item_index = sub_item_index.sibling(sub_item_index.row(), 0) #Get the first column of the row (ie the one that contains the enable/disable checkbox)
sub_item = sub_item_index.model().itemFromIndex(sub_item_index)
sub_item.setCheckState(QtCore.Qt.Unchecked if sub_item.checkState() == QtCore.Qt.Checked else QtCore.Qt.Checked) #Toggle enable/disable checkbox
#Optimize path checkbox
if key_code == QtCore.Qt.Key_O and item_index and item_index.isValid():
for layer in self.layers_list:
for shape in layer.shapes:
if shape.isSelected():
sub_item_index = self.findLayerItemIndexFromShape(shape)
sub_item_index = sub_item_index.sibling(sub_item_index.row(), PATH_OPTIMISATION_COL) #Get the column of the row that contains the "Optimize Path" checkbox
sub_item = sub_item_index.model().itemFromIndex(sub_item_index)
if sub_item.isCheckable():
sub_item.setCheckState(QtCore.Qt.Unchecked if sub_item.checkState() == QtCore.Qt.Checked else QtCore.Qt.Checked) #Toggle checkbox
def on_itemChanged(self, item):
"""
This slot is called when some data changes in one of the
TreeView. For us, since rows are read only, it is only
triggered when a checkbox is checked/unchecked
options
@param item: item is the modified element. It can be a Shape, a Layer or an Entity
"""
if item.column() == PATH_OPTIMISATION_COL:
#User has clicked on the Path Optimisation (TSP) checkbox => update the corresponding data into the shape
item_model_index = item.index().sibling(item.row(), 0) #get the first column of the selected row, since it's the only one that contains data
first_col_item = item_model_index.model().itemFromIndex(item_model_index)
if first_col_item and first_col_item.data(SHAPE_OBJECT).isValid():
#Set tool path optimisation for the matching shape
first_col_item.data(SHAPE_OBJECT).toPyObject().setToolPathOptimized(False if item.checkState() == QtCore.Qt.Unchecked else True)
elif item.data(SHAPE_OBJECT).isValid() or item.data(CUSTOM_GCODE_OBJECT).isValid():
self.updateCheckboxOfItem(item, item.checkState())
if self.auto_update_export_order:
#update export order and thus export drawing
self.prepareExportOrderUpdate()
elif item.data(LAYER_OBJECT).isValid():
#Checkbox concerns a Layer object => check/uncheck each sub-items (shapes)
self.traverseChildrenAndEnableDisable(self.layer_item_model, item.index(), item.checkState())
if self.auto_update_export_order:
self.prepareExportOrderUpdate()
elif item.data(ENTITY_OBJECT).isValid():
#Checkbox concerns an Entity object => check/uncheck each sub-items (shapes and/or other entities)
self.traverseChildrenAndEnableDisable(self.entity_item_model, item.index(), item.checkState())
if self.auto_update_export_order:
self.prepareExportOrderUpdate()
def updateCheckboxOfItem(self, item, check):
"""
This function is used to effectively update the state of a
checkbox and enable / disable texts when item is a shape
@param item: item is the modified element. It can be a Shape, a Layer or an Entity
@param check: the check state
"""
item.model().blockSignals(True) #Avoid unnecessary signal loops (we don't want the treeView to emit itemChanged signal)
item.setCheckState(check)
item.model().blockSignals(False)
if item.data(SHAPE_OBJECT).isValid():
#Checkbox concerns a shape object
real_item = item.data(SHAPE_OBJECT).toPyObject()
real_item.setDisable(False if check == QtCore.Qt.Checked else True, True)
#Update the other TreeViews
item_index = self.findEntityItemIndexFromShape(real_item)
if item_index:
if item.model() == self.layer_item_model:
self.entity_item_model.blockSignals(True) #Avoid unnecessary signal loops (we don't want the treeView to emit itemChanged signal)
item_other_tree = self.entity_item_model.itemFromIndex(item_index)
item_other_tree.setCheckState(check)
self.enableDisableTreeRow(item_other_tree, check)
self.entity_item_model.blockSignals(False)
self.traverseParentsAndUpdateEnableDisable(self.entity_item_model, item_index) #Update parents checkboxes
item_index = self.findLayerItemIndexFromShape(real_item)
if item_index:
if item.model() == self.entity_item_model:
self.layer_item_model.blockSignals(True) #Avoid unnecessary signal loops (we don't want the treeView to emit itemChanged signal)
item_other_tree = self.layer_item_model.itemFromIndex(item_index)
item_other_tree.setCheckState(check)
self.enableDisableTreeRow(item_other_tree, check)
self.layer_item_model.blockSignals(False)
self.traverseParentsAndUpdateEnableDisable(self.layer_item_model, item_index) #Update parents checkboxes
if item.data(CUSTOM_GCODE_OBJECT).isValid():
#Checkbox concerns a custom gcode object
real_item = item.data(CUSTOM_GCODE_OBJECT).toPyObject()
real_item.setDisable(False if check == QtCore.Qt.Checked else True)
self.traverseParentsAndUpdateEnableDisable(self.layer_item_model, item.index()) #Update parents checkboxes
self.enableDisableTreeRow(item, check)
def enableDisableTreeRow(self, item, check):
"""
Enable / disable all the columns from a row, except the first
one (because the first column contains the checkbox that must
stay enabled in order to be clickable)
@param item: item is the modified element.
It can be a Shape, a Layer or an Entity
"""
current_tree_view = None
if item.model() == self.layer_item_model:
current_tree_view = self.ui.layersShapesTreeView
else:
current_tree_view = self.ui.entitiesTreeView
item.model().blockSignals(True)
i = 0
row = 0
if not item.parent():
row_item = item.model().invisibleRootItem() #parent is 0, so we need to get the root item of the tree as parent
i = item.columnCount()
else:
row_item = item.parent() #we are on one of the column of the row => take the parent, so that we get the complete row
i = row_item.columnCount()
row = item.row()
while i > 1:
i -= 1
column_item = row_item.child(row, i)
if column_item:
column_item.setEnabled(False if check == QtCore.Qt.Unchecked else True)
current_tree_view.update(column_item.index())
item.model().blockSignals(False)
#Update the display (refresh the treeView for the given item)
current_tree_view.update(item.index())
def removeCustomGCode(self):
"""
Remove a custom GCode object from the treeView, just after the
current item. Custom GCode are defined into the config file
"""
logger.debug(_('Removing custom GCode...'))
current_item_index = self.ui.layersShapesTreeView.currentIndex()
if current_item_index and current_item_index.isValid():
remove_row = current_item_index.row()
#get the first column of the selected row, since it's the only one that contains data
item_model_index = current_item_index.sibling(remove_row, 0)
first_col_item = item_model_index.model().itemFromIndex(item_model_index)
if first_col_item and first_col_item.data(CUSTOM_GCODE_OBJECT).isValid():
#Item is a Custom GCode, so we can remove it
real_item = first_col_item.data(CUSTOM_GCODE_OBJECT).toPyObject()
real_item.LayerContent.shapes.remove(real_item)
first_col_item.parent().removeRow(remove_row)
else:
logger.warning(_('Only Custom GCode items are removable!'))
def addCustomGCodeAfter(self, action_name):
"""
Add a custom GCode object into the treeView, just after the
current item. Custom GCode are defined into the config file
@param action_name: the name of the custom GCode to be inserted.
This name must match one of the subsection names
of [Custom_Actions] from the config file.
"""
logger.debug(_('Adding custom GCode "%s"') % (action_name))
g_code = "(No custom GCode defined)"
if action_name and len(action_name) > 0:
g_code = g.config.vars.Custom_Actions[str(action_name)].gcode
current_item_index = self.ui.layersShapesTreeView.currentIndex()
if current_item_index and current_item_index.isValid():
push_row = current_item_index.row() + 1 #insert after the current row
current_item = current_item_index.model().itemFromIndex(current_item_index)
current_item_parent = current_item.parent()
if not current_item_parent:
#parent is 0, so we are probably on a layer
#get the first column of the selected row, since it's the only one that contains data
current_item_parent_index = current_item_index.sibling(current_item_index.row(), 0)
current_item_parent = current_item_parent_index.model().itemFromIndex(current_item_parent_index)
push_row = 0 #insert before any shape
if current_item_parent.data(LAYER_OBJECT).isValid():
real_item_parent = current_item_parent.data(LAYER_OBJECT).toPyObject()
#creates a new CustomGCode instance
custom_gcode = CustomGCodeClass(action_name, len(real_item_parent.shapes), g_code, real_item_parent)
#insert this new item at the end of the physical list
real_item_parent.shapes.append(custom_gcode)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/images/pause.png"))
item_col_0 = QtGui.QStandardItem(icon, "") #will only display a checkbox + an icon that will never be disabled
item_col_0.setData(QtCore.QVariant(custom_gcode), CUSTOM_GCODE_OBJECT) #store a ref to the custom gcode in our treeView element
item_col_0.setFlags(QtCore.Qt.ItemIsDragEnabled | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsUserCheckable | QtCore.Qt.ItemIsSelectable)
item_col_1 = QtGui.QStandardItem(custom_gcode.name)
item_col_1.setFlags(QtCore.Qt.ItemIsDragEnabled | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable)
item_col_2 = QtGui.QStandardItem(str(custom_gcode.nr))
item_col_2.setFlags(QtCore.Qt.ItemIsDragEnabled | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable)
item_col_3 = QtGui.QStandardItem()
item_col_3.setFlags(QtCore.Qt.ItemIsDragEnabled | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable)
#Deal with the checkboxes (shape enabled or disabled / send shape to TSP optimizer)
item_col_0.setCheckState(QtCore.Qt.Checked)
current_item_parent.insertRow(push_row, [item_col_0, item_col_1, item_col_2, item_col_3])
self.ui.layersShapesTreeView.setCurrentIndex(current_item.index())
def prepareExportOrderUpdate(self):
"""
If the live update of export route is enabled, this function is
called each time the shape order changes. It aims to update the drawing.
"""
if self.auto_update_export_order:
#Update the exported shapes
self.updateExportOrder()
#Emit the signal "exportOrderUpdated", so that the main can update tool path if he wants
QtCore.QObject.emit(self, QtCore.SIGNAL("exportOrderUpdated"), self) #We only pass python objects as parameters => definition without parentheses (PyQt_PyObject)
def setUpdateExportRoute(self, live_update):
"""
Set or unset the live update of export route.
"""
self.auto_update_export_order = live_update
if live_update:
#Live update the export route drawing
QtCore.QObject.connect(self.ui.layersShapesTreeView, QtCore.SIGNAL("itemMoved"), self.prepareExportOrderUpdate)
self.prepareExportOrderUpdate()
else:
#Don't automatically update the export route drawing
QtCore.QObject.disconnect(self.ui.layersShapesTreeView, QtCore.SIGNAL("itemMoved"), self.prepareExportOrderUpdate)
|
neraliu/tainted-phantomjs | refs/heads/master | src/breakpad/src/tools/gyp/test/home_dot_gyp/gyptest-home-includes.py | 151 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies inclusion of $HOME/.gyp/includes.gypi works.
"""
import os
import TestGyp
test = TestGyp.TestGyp()
os.environ['HOME'] = os.path.abspath('home')
test.run_gyp('all.gyp', chdir='src')
# After relocating, we should still be able to build (build file shouldn't
# contain relative reference to ~/.gyp/includes.gypi)
test.relocate('src', 'relocate/src')
test.build('all.gyp', test.ALL, chdir='relocate/src')
test.run_built_executable('printfoo',
chdir='relocate/src',
stdout="FOO is fromhome\n");
test.pass_test()
|
kalahbrown/HueBigSQL | refs/heads/master | desktop/core/ext-py/python-dateutil-2.4.2/dateutil/test/test.py | 43 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import calendar
import base64
import sys
from six import StringIO, BytesIO, PY3
try:
# python2.6 unittest has no skipUnless. So we use unittest2.
# if you have python >= 2.7, you don't need unittest2, but it won't harm
import unittest2 as unittest
except ImportError:
import unittest
MISSING_TARBALL = ("This test fails if you don't have the dateutil "
"timezone file installed. Please read the README")
from datetime import *
from dateutil.relativedelta import *
from dateutil.parser import *
from dateutil.easter import *
from dateutil.rrule import *
from dateutil.tz import *
from dateutil import zoneinfo
try:
from dateutil import tzwin
except ImportError:
pass
class RelativeDeltaTest(unittest.TestCase):
now = datetime(2003, 9, 17, 20, 54, 47, 282310)
today = date(2003, 9, 17)
def testNextMonth(self):
self.assertEqual(self.now+relativedelta(months=+1),
datetime(2003, 10, 17, 20, 54, 47, 282310))
def testNextMonthPlusOneWeek(self):
self.assertEqual(self.now+relativedelta(months=+1, weeks=+1),
datetime(2003, 10, 24, 20, 54, 47, 282310))
def testNextMonthPlusOneWeek10am(self):
self.assertEqual(self.today +
relativedelta(months=+1, weeks=+1, hour=10),
datetime(2003, 10, 24, 10, 0))
def testNextMonthPlusOneWeek10amDiff(self):
self.assertEqual(relativedelta(datetime(2003, 10, 24, 10, 0),
self.today),
relativedelta(months=+1, days=+7, hours=+10))
def testOneMonthBeforeOneYear(self):
self.assertEqual(self.now+relativedelta(years=+1, months=-1),
datetime(2004, 8, 17, 20, 54, 47, 282310))
def testMonthsOfDiffNumOfDays(self):
self.assertEqual(date(2003, 1, 27)+relativedelta(months=+1),
date(2003, 2, 27))
self.assertEqual(date(2003, 1, 31)+relativedelta(months=+1),
date(2003, 2, 28))
self.assertEqual(date(2003, 1, 31)+relativedelta(months=+2),
date(2003, 3, 31))
def testMonthsOfDiffNumOfDaysWithYears(self):
self.assertEqual(date(2000, 2, 28)+relativedelta(years=+1),
date(2001, 2, 28))
self.assertEqual(date(2000, 2, 29)+relativedelta(years=+1),
date(2001, 2, 28))
self.assertEqual(date(1999, 2, 28)+relativedelta(years=+1),
date(2000, 2, 28))
self.assertEqual(date(1999, 3, 1)+relativedelta(years=+1),
date(2000, 3, 1))
self.assertEqual(date(1999, 3, 1)+relativedelta(years=+1),
date(2000, 3, 1))
self.assertEqual(date(2001, 2, 28)+relativedelta(years=-1),
date(2000, 2, 28))
self.assertEqual(date(2001, 3, 1)+relativedelta(years=-1),
date(2000, 3, 1))
def testNextFriday(self):
self.assertEqual(self.today+relativedelta(weekday=FR),
date(2003, 9, 19))
def testNextFridayInt(self):
self.assertEqual(self.today+relativedelta(weekday=calendar.FRIDAY),
date(2003, 9, 19))
def testLastFridayInThisMonth(self):
self.assertEqual(self.today+relativedelta(day=31, weekday=FR(-1)),
date(2003, 9, 26))
def testNextWednesdayIsToday(self):
self.assertEqual(self.today+relativedelta(weekday=WE),
date(2003, 9, 17))
def testNextWenesdayNotToday(self):
self.assertEqual(self.today+relativedelta(days=+1, weekday=WE),
date(2003, 9, 24))
def test15thISOYearWeek(self):
self.assertEqual(date(2003, 1, 1) +
relativedelta(day=4, weeks=+14, weekday=MO(-1)),
date(2003, 4, 7))
def testMillenniumAge(self):
self.assertEqual(relativedelta(self.now, date(2001, 1, 1)),
relativedelta(years=+2, months=+8, days=+16,
hours=+20, minutes=+54, seconds=+47,
microseconds=+282310))
def testJohnAge(self):
self.assertEqual(relativedelta(self.now,
datetime(1978, 4, 5, 12, 0)),
relativedelta(years=+25, months=+5, days=+12,
hours=+8, minutes=+54, seconds=+47,
microseconds=+282310))
def testJohnAgeWithDate(self):
self.assertEqual(relativedelta(self.today,
datetime(1978, 4, 5, 12, 0)),
relativedelta(years=+25, months=+5, days=+11,
hours=+12))
def testYearDay(self):
self.assertEqual(date(2003, 1, 1)+relativedelta(yearday=260),
date(2003, 9, 17))
self.assertEqual(date(2002, 1, 1)+relativedelta(yearday=260),
date(2002, 9, 17))
self.assertEqual(date(2000, 1, 1)+relativedelta(yearday=260),
date(2000, 9, 16))
self.assertEqual(self.today+relativedelta(yearday=261),
date(2003, 9, 18))
def testYearDayBug(self):
# Tests a problem reported by Adam Ryan.
self.assertEqual(date(2010, 1, 1)+relativedelta(yearday=15),
date(2010, 1, 15))
def testNonLeapYearDay(self):
self.assertEqual(date(2003, 1, 1)+relativedelta(nlyearday=260),
date(2003, 9, 17))
self.assertEqual(date(2002, 1, 1)+relativedelta(nlyearday=260),
date(2002, 9, 17))
self.assertEqual(date(2000, 1, 1)+relativedelta(nlyearday=260),
date(2000, 9, 17))
self.assertEqual(self.today+relativedelta(yearday=261),
date(2003, 9, 18))
def testAddition(self):
self.assertEqual(relativedelta(days=10) +
relativedelta(years=1, months=2, days=3, hours=4,
minutes=5, microseconds=6),
relativedelta(years=1, months=2, days=13, hours=4,
minutes=5, microseconds=6))
def testAdditionToDatetime(self):
self.assertEqual(datetime(2000, 1, 1) + relativedelta(days=1),
datetime(2000, 1, 2))
def testRightAdditionToDatetime(self):
self.assertEqual(relativedelta(days=1) + datetime(2000, 1, 1),
datetime(2000, 1, 2))
def testSubtraction(self):
self.assertEqual(relativedelta(days=10) -
relativedelta(years=1, months=2, days=3, hours=4,
minutes=5, microseconds=6),
relativedelta(years=-1, months=-2, days=7, hours=-4,
minutes=-5, microseconds=-6))
def testRightSubtractionFromDatetime(self):
self.assertEqual(datetime(2000, 1, 2) - relativedelta(days=1),
datetime(2000, 1, 1))
def testSubractionWithDatetime(self):
self.assertRaises(TypeError, lambda x, y: x - y,
(relativedelta(days=1), datetime(2000, 1, 1)))
def testMultiplication(self):
self.assertEqual(datetime(2000, 1, 1) + relativedelta(days=1) * 28,
datetime(2000, 1, 29))
self.assertEqual(datetime(2000, 1, 1) + 28 * relativedelta(days=1),
datetime(2000, 1, 29))
def testDivision(self):
self.assertEqual(datetime(2000, 1, 1) + relativedelta(days=28) / 28,
datetime(2000, 1, 2))
def testBoolean(self):
self.assertFalse(relativedelta(days=0))
self.assertTrue(relativedelta(days=1))
class RRuleTest(unittest.TestCase):
def testYearly(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1998, 9, 2, 9, 0),
datetime(1999, 9, 2, 9, 0)])
def testYearlyInterval(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
interval=2,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1999, 9, 2, 9, 0),
datetime(2001, 9, 2, 9, 0)])
def testYearlyIntervalLarge(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
interval=100,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(2097, 9, 2, 9, 0),
datetime(2197, 9, 2, 9, 0)])
def testYearlyByMonth(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
bymonth=(1, 3),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 2, 9, 0),
datetime(1998, 3, 2, 9, 0),
datetime(1999, 1, 2, 9, 0)])
def testYearlyByMonthDay(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
bymonthday=(1, 3),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 3, 9, 0),
datetime(1997, 10, 1, 9, 0),
datetime(1997, 10, 3, 9, 0)])
def testYearlyByMonthAndMonthDay(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
bymonth=(1, 3),
bymonthday=(5, 7),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 5, 9, 0),
datetime(1998, 1, 7, 9, 0),
datetime(1998, 3, 5, 9, 0)])
def testYearlyByWeekDay(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 4, 9, 0),
datetime(1997, 9, 9, 9, 0)])
def testYearlyByNWeekDay(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
byweekday=(TU(1), TH(-1)),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 25, 9, 0),
datetime(1998, 1, 6, 9, 0),
datetime(1998, 12, 31, 9, 0)])
def testYearlyByNWeekDayLarge(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
byweekday=(TU(3), TH(-3)),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 11, 9, 0),
datetime(1998, 1, 20, 9, 0),
datetime(1998, 12, 17, 9, 0)])
def testYearlyByMonthAndWeekDay(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
bymonth=(1, 3),
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 9, 0),
datetime(1998, 1, 6, 9, 0),
datetime(1998, 1, 8, 9, 0)])
def testYearlyByMonthAndNWeekDay(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
bymonth=(1, 3),
byweekday=(TU(1), TH(-1)),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 6, 9, 0),
datetime(1998, 1, 29, 9, 0),
datetime(1998, 3, 3, 9, 0)])
def testYearlyByMonthAndNWeekDayLarge(self):
# This is interesting because the TH(-3) ends up before
# the TU(3).
self.assertEqual(list(rrule(YEARLY,
count=3,
bymonth=(1, 3),
byweekday=(TU(3), TH(-3)),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 15, 9, 0),
datetime(1998, 1, 20, 9, 0),
datetime(1998, 3, 12, 9, 0)])
def testYearlyByMonthDayAndWeekDay(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
bymonthday=(1, 3),
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 9, 0),
datetime(1998, 2, 3, 9, 0),
datetime(1998, 3, 3, 9, 0)])
def testYearlyByMonthAndMonthDayAndWeekDay(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
bymonth=(1, 3),
bymonthday=(1, 3),
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 9, 0),
datetime(1998, 3, 3, 9, 0),
datetime(2001, 3, 1, 9, 0)])
def testYearlyByYearDay(self):
self.assertEqual(list(rrule(YEARLY,
count=4,
byyearday=(1, 100, 200, 365),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 31, 9, 0),
datetime(1998, 1, 1, 9, 0),
datetime(1998, 4, 10, 9, 0),
datetime(1998, 7, 19, 9, 0)])
def testYearlyByYearDayNeg(self):
self.assertEqual(list(rrule(YEARLY,
count=4,
byyearday=(-365, -266, -166, -1),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 31, 9, 0),
datetime(1998, 1, 1, 9, 0),
datetime(1998, 4, 10, 9, 0),
datetime(1998, 7, 19, 9, 0)])
def testYearlyByMonthAndYearDay(self):
self.assertEqual(list(rrule(YEARLY,
count=4,
bymonth=(4, 7),
byyearday=(1, 100, 200, 365),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 10, 9, 0),
datetime(1998, 7, 19, 9, 0),
datetime(1999, 4, 10, 9, 0),
datetime(1999, 7, 19, 9, 0)])
def testYearlyByMonthAndYearDayNeg(self):
self.assertEqual(list(rrule(YEARLY,
count=4,
bymonth=(4, 7),
byyearday=(-365, -266, -166, -1),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 10, 9, 0),
datetime(1998, 7, 19, 9, 0),
datetime(1999, 4, 10, 9, 0),
datetime(1999, 7, 19, 9, 0)])
def testYearlyByWeekNo(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
byweekno=20,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 5, 11, 9, 0),
datetime(1998, 5, 12, 9, 0),
datetime(1998, 5, 13, 9, 0)])
def testYearlyByWeekNoAndWeekDay(self):
# That's a nice one. The first days of week number one
# may be in the last year.
self.assertEqual(list(rrule(YEARLY,
count=3,
byweekno=1,
byweekday=MO,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 29, 9, 0),
datetime(1999, 1, 4, 9, 0),
datetime(2000, 1, 3, 9, 0)])
def testYearlyByWeekNoAndWeekDayLarge(self):
# Another nice test. The last days of week number 52/53
# may be in the next year.
self.assertEqual(list(rrule(YEARLY,
count=3,
byweekno=52,
byweekday=SU,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 28, 9, 0),
datetime(1998, 12, 27, 9, 0),
datetime(2000, 1, 2, 9, 0)])
def testYearlyByWeekNoAndWeekDayLast(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
byweekno=-1,
byweekday=SU,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 28, 9, 0),
datetime(1999, 1, 3, 9, 0),
datetime(2000, 1, 2, 9, 0)])
def testYearlyByEaster(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
byeaster=0,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 12, 9, 0),
datetime(1999, 4, 4, 9, 0),
datetime(2000, 4, 23, 9, 0)])
def testYearlyByEasterPos(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
byeaster=1,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 13, 9, 0),
datetime(1999, 4, 5, 9, 0),
datetime(2000, 4, 24, 9, 0)])
def testYearlyByEasterNeg(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
byeaster=-1,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 11, 9, 0),
datetime(1999, 4, 3, 9, 0),
datetime(2000, 4, 22, 9, 0)])
def testYearlyByWeekNoAndWeekDay53(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
byweekno=53,
byweekday=MO,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 12, 28, 9, 0),
datetime(2004, 12, 27, 9, 0),
datetime(2009, 12, 28, 9, 0)])
def testYearlyByHour(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
byhour=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 0),
datetime(1998, 9, 2, 6, 0),
datetime(1998, 9, 2, 18, 0)])
def testYearlyByMinute(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
byminute=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 6),
datetime(1997, 9, 2, 9, 18),
datetime(1998, 9, 2, 9, 6)])
def testYearlyBySecond(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0, 6),
datetime(1997, 9, 2, 9, 0, 18),
datetime(1998, 9, 2, 9, 0, 6)])
def testYearlyByHourAndMinute(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
byhour=(6, 18),
byminute=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 6),
datetime(1997, 9, 2, 18, 18),
datetime(1998, 9, 2, 6, 6)])
def testYearlyByHourAndSecond(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
byhour=(6, 18),
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 0, 6),
datetime(1997, 9, 2, 18, 0, 18),
datetime(1998, 9, 2, 6, 0, 6)])
def testYearlyByMinuteAndSecond(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
byminute=(6, 18),
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 6, 6),
datetime(1997, 9, 2, 9, 6, 18),
datetime(1997, 9, 2, 9, 18, 6)])
def testYearlyByHourAndMinuteAndSecond(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
byhour=(6, 18),
byminute=(6, 18),
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 6, 6),
datetime(1997, 9, 2, 18, 6, 18),
datetime(1997, 9, 2, 18, 18, 6)])
def testYearlyBySetPos(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
bymonthday=15,
byhour=(6, 18),
bysetpos=(3, -3),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 11, 15, 18, 0),
datetime(1998, 2, 15, 6, 0),
datetime(1998, 11, 15, 18, 0)])
def testMonthly(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 10, 2, 9, 0),
datetime(1997, 11, 2, 9, 0)])
def testMonthlyInterval(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
interval=2,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 11, 2, 9, 0),
datetime(1998, 1, 2, 9, 0)])
def testMonthlyIntervalLarge(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
interval=18,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1999, 3, 2, 9, 0),
datetime(2000, 9, 2, 9, 0)])
def testMonthlyByMonth(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
bymonth=(1, 3),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 2, 9, 0),
datetime(1998, 3, 2, 9, 0),
datetime(1999, 1, 2, 9, 0)])
def testMonthlyByMonthDay(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
bymonthday=(1, 3),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 3, 9, 0),
datetime(1997, 10, 1, 9, 0),
datetime(1997, 10, 3, 9, 0)])
def testMonthlyByMonthAndMonthDay(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
bymonth=(1, 3),
bymonthday=(5, 7),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 5, 9, 0),
datetime(1998, 1, 7, 9, 0),
datetime(1998, 3, 5, 9, 0)])
def testMonthlyByWeekDay(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 4, 9, 0),
datetime(1997, 9, 9, 9, 0)])
# Third Monday of the month
self.assertEqual(rrule(MONTHLY,
byweekday=(MO(+3)),
dtstart=datetime(1997, 9, 1)).between(datetime(1997, 9, 1),
datetime(1997, 12, 1)),
[datetime(1997, 9, 15, 0, 0),
datetime(1997, 10, 20, 0, 0),
datetime(1997, 11, 17, 0, 0)])
def testMonthlyByNWeekDay(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
byweekday=(TU(1), TH(-1)),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 25, 9, 0),
datetime(1997, 10, 7, 9, 0)])
def testMonthlyByNWeekDayLarge(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
byweekday=(TU(3), TH(-3)),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 11, 9, 0),
datetime(1997, 9, 16, 9, 0),
datetime(1997, 10, 16, 9, 0)])
def testMonthlyByMonthAndWeekDay(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
bymonth=(1, 3),
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 9, 0),
datetime(1998, 1, 6, 9, 0),
datetime(1998, 1, 8, 9, 0)])
def testMonthlyByMonthAndNWeekDay(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
bymonth=(1, 3),
byweekday=(TU(1), TH(-1)),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 6, 9, 0),
datetime(1998, 1, 29, 9, 0),
datetime(1998, 3, 3, 9, 0)])
def testMonthlyByMonthAndNWeekDayLarge(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
bymonth=(1, 3),
byweekday=(TU(3), TH(-3)),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 15, 9, 0),
datetime(1998, 1, 20, 9, 0),
datetime(1998, 3, 12, 9, 0)])
def testMonthlyByMonthDayAndWeekDay(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
bymonthday=(1, 3),
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 9, 0),
datetime(1998, 2, 3, 9, 0),
datetime(1998, 3, 3, 9, 0)])
def testMonthlyByMonthAndMonthDayAndWeekDay(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
bymonth=(1, 3),
bymonthday=(1, 3),
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 9, 0),
datetime(1998, 3, 3, 9, 0),
datetime(2001, 3, 1, 9, 0)])
def testMonthlyByYearDay(self):
self.assertEqual(list(rrule(MONTHLY,
count=4,
byyearday=(1, 100, 200, 365),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 31, 9, 0),
datetime(1998, 1, 1, 9, 0),
datetime(1998, 4, 10, 9, 0),
datetime(1998, 7, 19, 9, 0)])
def testMonthlyByYearDayNeg(self):
self.assertEqual(list(rrule(MONTHLY,
count=4,
byyearday=(-365, -266, -166, -1),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 31, 9, 0),
datetime(1998, 1, 1, 9, 0),
datetime(1998, 4, 10, 9, 0),
datetime(1998, 7, 19, 9, 0)])
def testMonthlyByMonthAndYearDay(self):
self.assertEqual(list(rrule(MONTHLY,
count=4,
bymonth=(4, 7),
byyearday=(1, 100, 200, 365),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 10, 9, 0),
datetime(1998, 7, 19, 9, 0),
datetime(1999, 4, 10, 9, 0),
datetime(1999, 7, 19, 9, 0)])
def testMonthlyByMonthAndYearDayNeg(self):
self.assertEqual(list(rrule(MONTHLY,
count=4,
bymonth=(4, 7),
byyearday=(-365, -266, -166, -1),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 10, 9, 0),
datetime(1998, 7, 19, 9, 0),
datetime(1999, 4, 10, 9, 0),
datetime(1999, 7, 19, 9, 0)])
def testMonthlyByWeekNo(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
byweekno=20,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 5, 11, 9, 0),
datetime(1998, 5, 12, 9, 0),
datetime(1998, 5, 13, 9, 0)])
def testMonthlyByWeekNoAndWeekDay(self):
# That's a nice one. The first days of week number one
# may be in the last year.
self.assertEqual(list(rrule(MONTHLY,
count=3,
byweekno=1,
byweekday=MO,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 29, 9, 0),
datetime(1999, 1, 4, 9, 0),
datetime(2000, 1, 3, 9, 0)])
def testMonthlyByWeekNoAndWeekDayLarge(self):
# Another nice test. The last days of week number 52/53
# may be in the next year.
self.assertEqual(list(rrule(MONTHLY,
count=3,
byweekno=52,
byweekday=SU,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 28, 9, 0),
datetime(1998, 12, 27, 9, 0),
datetime(2000, 1, 2, 9, 0)])
def testMonthlyByWeekNoAndWeekDayLast(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
byweekno=-1,
byweekday=SU,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 28, 9, 0),
datetime(1999, 1, 3, 9, 0),
datetime(2000, 1, 2, 9, 0)])
def testMonthlyByWeekNoAndWeekDay53(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
byweekno=53,
byweekday=MO,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 12, 28, 9, 0),
datetime(2004, 12, 27, 9, 0),
datetime(2009, 12, 28, 9, 0)])
def testMonthlyByEaster(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
byeaster=0,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 12, 9, 0),
datetime(1999, 4, 4, 9, 0),
datetime(2000, 4, 23, 9, 0)])
def testMonthlyByEasterPos(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
byeaster=1,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 13, 9, 0),
datetime(1999, 4, 5, 9, 0),
datetime(2000, 4, 24, 9, 0)])
def testMonthlyByEasterNeg(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
byeaster=-1,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 11, 9, 0),
datetime(1999, 4, 3, 9, 0),
datetime(2000, 4, 22, 9, 0)])
def testMonthlyByHour(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
byhour=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 0),
datetime(1997, 10, 2, 6, 0),
datetime(1997, 10, 2, 18, 0)])
def testMonthlyByMinute(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
byminute=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 6),
datetime(1997, 9, 2, 9, 18),
datetime(1997, 10, 2, 9, 6)])
def testMonthlyBySecond(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0, 6),
datetime(1997, 9, 2, 9, 0, 18),
datetime(1997, 10, 2, 9, 0, 6)])
def testMonthlyByHourAndMinute(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
byhour=(6, 18),
byminute=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 6),
datetime(1997, 9, 2, 18, 18),
datetime(1997, 10, 2, 6, 6)])
def testMonthlyByHourAndSecond(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
byhour=(6, 18),
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 0, 6),
datetime(1997, 9, 2, 18, 0, 18),
datetime(1997, 10, 2, 6, 0, 6)])
def testMonthlyByMinuteAndSecond(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
byminute=(6, 18),
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 6, 6),
datetime(1997, 9, 2, 9, 6, 18),
datetime(1997, 9, 2, 9, 18, 6)])
def testMonthlyByHourAndMinuteAndSecond(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
byhour=(6, 18),
byminute=(6, 18),
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 6, 6),
datetime(1997, 9, 2, 18, 6, 18),
datetime(1997, 9, 2, 18, 18, 6)])
def testMonthlyBySetPos(self):
self.assertEqual(list(rrule(MONTHLY,
count=3,
bymonthday=(13, 17),
byhour=(6, 18),
bysetpos=(3, -3),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 13, 18, 0),
datetime(1997, 9, 17, 6, 0),
datetime(1997, 10, 13, 18, 0)])
def testWeekly(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 9, 9, 0),
datetime(1997, 9, 16, 9, 0)])
def testWeeklyInterval(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
interval=2,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 16, 9, 0),
datetime(1997, 9, 30, 9, 0)])
def testWeeklyIntervalLarge(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
interval=20,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1998, 1, 20, 9, 0),
datetime(1998, 6, 9, 9, 0)])
def testWeeklyByMonth(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
bymonth=(1, 3),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 6, 9, 0),
datetime(1998, 1, 13, 9, 0),
datetime(1998, 1, 20, 9, 0)])
def testWeeklyByMonthDay(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
bymonthday=(1, 3),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 3, 9, 0),
datetime(1997, 10, 1, 9, 0),
datetime(1997, 10, 3, 9, 0)])
def testWeeklyByMonthAndMonthDay(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
bymonth=(1, 3),
bymonthday=(5, 7),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 5, 9, 0),
datetime(1998, 1, 7, 9, 0),
datetime(1998, 3, 5, 9, 0)])
def testWeeklyByWeekDay(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 4, 9, 0),
datetime(1997, 9, 9, 9, 0)])
def testWeeklyByNWeekDay(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
byweekday=(TU(1), TH(-1)),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 4, 9, 0),
datetime(1997, 9, 9, 9, 0)])
def testWeeklyByMonthAndWeekDay(self):
# This test is interesting, because it crosses the year
# boundary in a weekly period to find day '1' as a
# valid recurrence.
self.assertEqual(list(rrule(WEEKLY,
count=3,
bymonth=(1, 3),
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 9, 0),
datetime(1998, 1, 6, 9, 0),
datetime(1998, 1, 8, 9, 0)])
def testWeeklyByMonthAndNWeekDay(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
bymonth=(1, 3),
byweekday=(TU(1), TH(-1)),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 9, 0),
datetime(1998, 1, 6, 9, 0),
datetime(1998, 1, 8, 9, 0)])
def testWeeklyByMonthDayAndWeekDay(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
bymonthday=(1, 3),
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 9, 0),
datetime(1998, 2, 3, 9, 0),
datetime(1998, 3, 3, 9, 0)])
def testWeeklyByMonthAndMonthDayAndWeekDay(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
bymonth=(1, 3),
bymonthday=(1, 3),
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 9, 0),
datetime(1998, 3, 3, 9, 0),
datetime(2001, 3, 1, 9, 0)])
def testWeeklyByYearDay(self):
self.assertEqual(list(rrule(WEEKLY,
count=4,
byyearday=(1, 100, 200, 365),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 31, 9, 0),
datetime(1998, 1, 1, 9, 0),
datetime(1998, 4, 10, 9, 0),
datetime(1998, 7, 19, 9, 0)])
def testWeeklyByYearDayNeg(self):
self.assertEqual(list(rrule(WEEKLY,
count=4,
byyearday=(-365, -266, -166, -1),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 31, 9, 0),
datetime(1998, 1, 1, 9, 0),
datetime(1998, 4, 10, 9, 0),
datetime(1998, 7, 19, 9, 0)])
def testWeeklyByMonthAndYearDay(self):
self.assertEqual(list(rrule(WEEKLY,
count=4,
bymonth=(1, 7),
byyearday=(1, 100, 200, 365),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 9, 0),
datetime(1998, 7, 19, 9, 0),
datetime(1999, 1, 1, 9, 0),
datetime(1999, 7, 19, 9, 0)])
def testWeeklyByMonthAndYearDayNeg(self):
self.assertEqual(list(rrule(WEEKLY,
count=4,
bymonth=(1, 7),
byyearday=(-365, -266, -166, -1),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 9, 0),
datetime(1998, 7, 19, 9, 0),
datetime(1999, 1, 1, 9, 0),
datetime(1999, 7, 19, 9, 0)])
def testWeeklyByWeekNo(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
byweekno=20,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 5, 11, 9, 0),
datetime(1998, 5, 12, 9, 0),
datetime(1998, 5, 13, 9, 0)])
def testWeeklyByWeekNoAndWeekDay(self):
# That's a nice one. The first days of week number one
# may be in the last year.
self.assertEqual(list(rrule(WEEKLY,
count=3,
byweekno=1,
byweekday=MO,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 29, 9, 0),
datetime(1999, 1, 4, 9, 0),
datetime(2000, 1, 3, 9, 0)])
def testWeeklyByWeekNoAndWeekDayLarge(self):
# Another nice test. The last days of week number 52/53
# may be in the next year.
self.assertEqual(list(rrule(WEEKLY,
count=3,
byweekno=52,
byweekday=SU,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 28, 9, 0),
datetime(1998, 12, 27, 9, 0),
datetime(2000, 1, 2, 9, 0)])
def testWeeklyByWeekNoAndWeekDayLast(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
byweekno=-1,
byweekday=SU,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 28, 9, 0),
datetime(1999, 1, 3, 9, 0),
datetime(2000, 1, 2, 9, 0)])
def testWeeklyByWeekNoAndWeekDay53(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
byweekno=53,
byweekday=MO,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 12, 28, 9, 0),
datetime(2004, 12, 27, 9, 0),
datetime(2009, 12, 28, 9, 0)])
def testWeeklyByEaster(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
byeaster=0,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 12, 9, 0),
datetime(1999, 4, 4, 9, 0),
datetime(2000, 4, 23, 9, 0)])
def testWeeklyByEasterPos(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
byeaster=1,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 13, 9, 0),
datetime(1999, 4, 5, 9, 0),
datetime(2000, 4, 24, 9, 0)])
def testWeeklyByEasterNeg(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
byeaster=-1,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 11, 9, 0),
datetime(1999, 4, 3, 9, 0),
datetime(2000, 4, 22, 9, 0)])
def testWeeklyByHour(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
byhour=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 0),
datetime(1997, 9, 9, 6, 0),
datetime(1997, 9, 9, 18, 0)])
def testWeeklyByMinute(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
byminute=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 6),
datetime(1997, 9, 2, 9, 18),
datetime(1997, 9, 9, 9, 6)])
def testWeeklyBySecond(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0, 6),
datetime(1997, 9, 2, 9, 0, 18),
datetime(1997, 9, 9, 9, 0, 6)])
def testWeeklyByHourAndMinute(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
byhour=(6, 18),
byminute=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 6),
datetime(1997, 9, 2, 18, 18),
datetime(1997, 9, 9, 6, 6)])
def testWeeklyByHourAndSecond(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
byhour=(6, 18),
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 0, 6),
datetime(1997, 9, 2, 18, 0, 18),
datetime(1997, 9, 9, 6, 0, 6)])
def testWeeklyByMinuteAndSecond(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
byminute=(6, 18),
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 6, 6),
datetime(1997, 9, 2, 9, 6, 18),
datetime(1997, 9, 2, 9, 18, 6)])
def testWeeklyByHourAndMinuteAndSecond(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
byhour=(6, 18),
byminute=(6, 18),
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 6, 6),
datetime(1997, 9, 2, 18, 6, 18),
datetime(1997, 9, 2, 18, 18, 6)])
def testWeeklyBySetPos(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
byweekday=(TU, TH),
byhour=(6, 18),
bysetpos=(3, -3),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 0),
datetime(1997, 9, 4, 6, 0),
datetime(1997, 9, 9, 18, 0)])
def testDaily(self):
self.assertEqual(list(rrule(DAILY,
count=3,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 3, 9, 0),
datetime(1997, 9, 4, 9, 0)])
def testDailyInterval(self):
self.assertEqual(list(rrule(DAILY,
count=3,
interval=2,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 4, 9, 0),
datetime(1997, 9, 6, 9, 0)])
def testDailyIntervalLarge(self):
self.assertEqual(list(rrule(DAILY,
count=3,
interval=92,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 12, 3, 9, 0),
datetime(1998, 3, 5, 9, 0)])
def testDailyByMonth(self):
self.assertEqual(list(rrule(DAILY,
count=3,
bymonth=(1, 3),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 9, 0),
datetime(1998, 1, 2, 9, 0),
datetime(1998, 1, 3, 9, 0)])
def testDailyByMonthDay(self):
self.assertEqual(list(rrule(DAILY,
count=3,
bymonthday=(1, 3),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 3, 9, 0),
datetime(1997, 10, 1, 9, 0),
datetime(1997, 10, 3, 9, 0)])
def testDailyByMonthAndMonthDay(self):
self.assertEqual(list(rrule(DAILY,
count=3,
bymonth=(1, 3),
bymonthday=(5, 7),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 5, 9, 0),
datetime(1998, 1, 7, 9, 0),
datetime(1998, 3, 5, 9, 0)])
def testDailyByWeekDay(self):
self.assertEqual(list(rrule(DAILY,
count=3,
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 4, 9, 0),
datetime(1997, 9, 9, 9, 0)])
def testDailyByNWeekDay(self):
self.assertEqual(list(rrule(DAILY,
count=3,
byweekday=(TU(1), TH(-1)),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 4, 9, 0),
datetime(1997, 9, 9, 9, 0)])
def testDailyByMonthAndWeekDay(self):
self.assertEqual(list(rrule(DAILY,
count=3,
bymonth=(1, 3),
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 9, 0),
datetime(1998, 1, 6, 9, 0),
datetime(1998, 1, 8, 9, 0)])
def testDailyByMonthAndNWeekDay(self):
self.assertEqual(list(rrule(DAILY,
count=3,
bymonth=(1, 3),
byweekday=(TU(1), TH(-1)),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 9, 0),
datetime(1998, 1, 6, 9, 0),
datetime(1998, 1, 8, 9, 0)])
def testDailyByMonthDayAndWeekDay(self):
self.assertEqual(list(rrule(DAILY,
count=3,
bymonthday=(1, 3),
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 9, 0),
datetime(1998, 2, 3, 9, 0),
datetime(1998, 3, 3, 9, 0)])
def testDailyByMonthAndMonthDayAndWeekDay(self):
self.assertEqual(list(rrule(DAILY,
count=3,
bymonth=(1, 3),
bymonthday=(1, 3),
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 9, 0),
datetime(1998, 3, 3, 9, 0),
datetime(2001, 3, 1, 9, 0)])
def testDailyByYearDay(self):
self.assertEqual(list(rrule(DAILY,
count=4,
byyearday=(1, 100, 200, 365),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 31, 9, 0),
datetime(1998, 1, 1, 9, 0),
datetime(1998, 4, 10, 9, 0),
datetime(1998, 7, 19, 9, 0)])
def testDailyByYearDayNeg(self):
self.assertEqual(list(rrule(DAILY,
count=4,
byyearday=(-365, -266, -166, -1),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 31, 9, 0),
datetime(1998, 1, 1, 9, 0),
datetime(1998, 4, 10, 9, 0),
datetime(1998, 7, 19, 9, 0)])
def testDailyByMonthAndYearDay(self):
self.assertEqual(list(rrule(DAILY,
count=4,
bymonth=(1, 7),
byyearday=(1, 100, 200, 365),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 9, 0),
datetime(1998, 7, 19, 9, 0),
datetime(1999, 1, 1, 9, 0),
datetime(1999, 7, 19, 9, 0)])
def testDailyByMonthAndYearDayNeg(self):
self.assertEqual(list(rrule(DAILY,
count=4,
bymonth=(1, 7),
byyearday=(-365, -266, -166, -1),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 9, 0),
datetime(1998, 7, 19, 9, 0),
datetime(1999, 1, 1, 9, 0),
datetime(1999, 7, 19, 9, 0)])
def testDailyByWeekNo(self):
self.assertEqual(list(rrule(DAILY,
count=3,
byweekno=20,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 5, 11, 9, 0),
datetime(1998, 5, 12, 9, 0),
datetime(1998, 5, 13, 9, 0)])
def testDailyByWeekNoAndWeekDay(self):
# That's a nice one. The first days of week number one
# may be in the last year.
self.assertEqual(list(rrule(DAILY,
count=3,
byweekno=1,
byweekday=MO,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 29, 9, 0),
datetime(1999, 1, 4, 9, 0),
datetime(2000, 1, 3, 9, 0)])
def testDailyByWeekNoAndWeekDayLarge(self):
# Another nice test. The last days of week number 52/53
# may be in the next year.
self.assertEqual(list(rrule(DAILY,
count=3,
byweekno=52,
byweekday=SU,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 28, 9, 0),
datetime(1998, 12, 27, 9, 0),
datetime(2000, 1, 2, 9, 0)])
def testDailyByWeekNoAndWeekDayLast(self):
self.assertEqual(list(rrule(DAILY,
count=3,
byweekno=-1,
byweekday=SU,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 28, 9, 0),
datetime(1999, 1, 3, 9, 0),
datetime(2000, 1, 2, 9, 0)])
def testDailyByWeekNoAndWeekDay53(self):
self.assertEqual(list(rrule(DAILY,
count=3,
byweekno=53,
byweekday=MO,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 12, 28, 9, 0),
datetime(2004, 12, 27, 9, 0),
datetime(2009, 12, 28, 9, 0)])
def testDailyByEaster(self):
self.assertEqual(list(rrule(DAILY,
count=3,
byeaster=0,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 12, 9, 0),
datetime(1999, 4, 4, 9, 0),
datetime(2000, 4, 23, 9, 0)])
def testDailyByEasterPos(self):
self.assertEqual(list(rrule(DAILY,
count=3,
byeaster=1,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 13, 9, 0),
datetime(1999, 4, 5, 9, 0),
datetime(2000, 4, 24, 9, 0)])
def testDailyByEasterNeg(self):
self.assertEqual(list(rrule(DAILY,
count=3,
byeaster=-1,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 11, 9, 0),
datetime(1999, 4, 3, 9, 0),
datetime(2000, 4, 22, 9, 0)])
def testDailyByHour(self):
self.assertEqual(list(rrule(DAILY,
count=3,
byhour=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 0),
datetime(1997, 9, 3, 6, 0),
datetime(1997, 9, 3, 18, 0)])
def testDailyByMinute(self):
self.assertEqual(list(rrule(DAILY,
count=3,
byminute=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 6),
datetime(1997, 9, 2, 9, 18),
datetime(1997, 9, 3, 9, 6)])
def testDailyBySecond(self):
self.assertEqual(list(rrule(DAILY,
count=3,
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0, 6),
datetime(1997, 9, 2, 9, 0, 18),
datetime(1997, 9, 3, 9, 0, 6)])
def testDailyByHourAndMinute(self):
self.assertEqual(list(rrule(DAILY,
count=3,
byhour=(6, 18),
byminute=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 6),
datetime(1997, 9, 2, 18, 18),
datetime(1997, 9, 3, 6, 6)])
def testDailyByHourAndSecond(self):
self.assertEqual(list(rrule(DAILY,
count=3,
byhour=(6, 18),
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 0, 6),
datetime(1997, 9, 2, 18, 0, 18),
datetime(1997, 9, 3, 6, 0, 6)])
def testDailyByMinuteAndSecond(self):
self.assertEqual(list(rrule(DAILY,
count=3,
byminute=(6, 18),
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 6, 6),
datetime(1997, 9, 2, 9, 6, 18),
datetime(1997, 9, 2, 9, 18, 6)])
def testDailyByHourAndMinuteAndSecond(self):
self.assertEqual(list(rrule(DAILY,
count=3,
byhour=(6, 18),
byminute=(6, 18),
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 6, 6),
datetime(1997, 9, 2, 18, 6, 18),
datetime(1997, 9, 2, 18, 18, 6)])
def testDailyBySetPos(self):
self.assertEqual(list(rrule(DAILY,
count=3,
byhour=(6, 18),
byminute=(15, 45),
bysetpos=(3, -3),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 15),
datetime(1997, 9, 3, 6, 45),
datetime(1997, 9, 3, 18, 15)])
def testHourly(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 2, 10, 0),
datetime(1997, 9, 2, 11, 0)])
def testHourlyInterval(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
interval=2,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 2, 11, 0),
datetime(1997, 9, 2, 13, 0)])
def testHourlyIntervalLarge(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
interval=769,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 10, 4, 10, 0),
datetime(1997, 11, 5, 11, 0)])
def testHourlyByMonth(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
bymonth=(1, 3),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 0, 0),
datetime(1998, 1, 1, 1, 0),
datetime(1998, 1, 1, 2, 0)])
def testHourlyByMonthDay(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
bymonthday=(1, 3),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 3, 0, 0),
datetime(1997, 9, 3, 1, 0),
datetime(1997, 9, 3, 2, 0)])
def testHourlyByMonthAndMonthDay(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
bymonth=(1, 3),
bymonthday=(5, 7),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 5, 0, 0),
datetime(1998, 1, 5, 1, 0),
datetime(1998, 1, 5, 2, 0)])
def testHourlyByWeekDay(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 2, 10, 0),
datetime(1997, 9, 2, 11, 0)])
def testHourlyByNWeekDay(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
byweekday=(TU(1), TH(-1)),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 2, 10, 0),
datetime(1997, 9, 2, 11, 0)])
def testHourlyByMonthAndWeekDay(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
bymonth=(1, 3),
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 0, 0),
datetime(1998, 1, 1, 1, 0),
datetime(1998, 1, 1, 2, 0)])
def testHourlyByMonthAndNWeekDay(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
bymonth=(1, 3),
byweekday=(TU(1), TH(-1)),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 0, 0),
datetime(1998, 1, 1, 1, 0),
datetime(1998, 1, 1, 2, 0)])
def testHourlyByMonthDayAndWeekDay(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
bymonthday=(1, 3),
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 0, 0),
datetime(1998, 1, 1, 1, 0),
datetime(1998, 1, 1, 2, 0)])
def testHourlyByMonthAndMonthDayAndWeekDay(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
bymonth=(1, 3),
bymonthday=(1, 3),
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 0, 0),
datetime(1998, 1, 1, 1, 0),
datetime(1998, 1, 1, 2, 0)])
def testHourlyByYearDay(self):
self.assertEqual(list(rrule(HOURLY,
count=4,
byyearday=(1, 100, 200, 365),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 31, 0, 0),
datetime(1997, 12, 31, 1, 0),
datetime(1997, 12, 31, 2, 0),
datetime(1997, 12, 31, 3, 0)])
def testHourlyByYearDayNeg(self):
self.assertEqual(list(rrule(HOURLY,
count=4,
byyearday=(-365, -266, -166, -1),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 31, 0, 0),
datetime(1997, 12, 31, 1, 0),
datetime(1997, 12, 31, 2, 0),
datetime(1997, 12, 31, 3, 0)])
def testHourlyByMonthAndYearDay(self):
self.assertEqual(list(rrule(HOURLY,
count=4,
bymonth=(4, 7),
byyearday=(1, 100, 200, 365),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 10, 0, 0),
datetime(1998, 4, 10, 1, 0),
datetime(1998, 4, 10, 2, 0),
datetime(1998, 4, 10, 3, 0)])
def testHourlyByMonthAndYearDayNeg(self):
self.assertEqual(list(rrule(HOURLY,
count=4,
bymonth=(4, 7),
byyearday=(-365, -266, -166, -1),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 10, 0, 0),
datetime(1998, 4, 10, 1, 0),
datetime(1998, 4, 10, 2, 0),
datetime(1998, 4, 10, 3, 0)])
def testHourlyByWeekNo(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
byweekno=20,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 5, 11, 0, 0),
datetime(1998, 5, 11, 1, 0),
datetime(1998, 5, 11, 2, 0)])
def testHourlyByWeekNoAndWeekDay(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
byweekno=1,
byweekday=MO,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 29, 0, 0),
datetime(1997, 12, 29, 1, 0),
datetime(1997, 12, 29, 2, 0)])
def testHourlyByWeekNoAndWeekDayLarge(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
byweekno=52,
byweekday=SU,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 28, 0, 0),
datetime(1997, 12, 28, 1, 0),
datetime(1997, 12, 28, 2, 0)])
def testHourlyByWeekNoAndWeekDayLast(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
byweekno=-1,
byweekday=SU,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 28, 0, 0),
datetime(1997, 12, 28, 1, 0),
datetime(1997, 12, 28, 2, 0)])
def testHourlyByWeekNoAndWeekDay53(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
byweekno=53,
byweekday=MO,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 12, 28, 0, 0),
datetime(1998, 12, 28, 1, 0),
datetime(1998, 12, 28, 2, 0)])
def testHourlyByEaster(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
byeaster=0,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 12, 0, 0),
datetime(1998, 4, 12, 1, 0),
datetime(1998, 4, 12, 2, 0)])
def testHourlyByEasterPos(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
byeaster=1,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 13, 0, 0),
datetime(1998, 4, 13, 1, 0),
datetime(1998, 4, 13, 2, 0)])
def testHourlyByEasterNeg(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
byeaster=-1,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 11, 0, 0),
datetime(1998, 4, 11, 1, 0),
datetime(1998, 4, 11, 2, 0)])
def testHourlyByHour(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
byhour=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 0),
datetime(1997, 9, 3, 6, 0),
datetime(1997, 9, 3, 18, 0)])
def testHourlyByMinute(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
byminute=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 6),
datetime(1997, 9, 2, 9, 18),
datetime(1997, 9, 2, 10, 6)])
def testHourlyBySecond(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0, 6),
datetime(1997, 9, 2, 9, 0, 18),
datetime(1997, 9, 2, 10, 0, 6)])
def testHourlyByHourAndMinute(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
byhour=(6, 18),
byminute=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 6),
datetime(1997, 9, 2, 18, 18),
datetime(1997, 9, 3, 6, 6)])
def testHourlyByHourAndSecond(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
byhour=(6, 18),
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 0, 6),
datetime(1997, 9, 2, 18, 0, 18),
datetime(1997, 9, 3, 6, 0, 6)])
def testHourlyByMinuteAndSecond(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
byminute=(6, 18),
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 6, 6),
datetime(1997, 9, 2, 9, 6, 18),
datetime(1997, 9, 2, 9, 18, 6)])
def testHourlyByHourAndMinuteAndSecond(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
byhour=(6, 18),
byminute=(6, 18),
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 6, 6),
datetime(1997, 9, 2, 18, 6, 18),
datetime(1997, 9, 2, 18, 18, 6)])
def testHourlyBySetPos(self):
self.assertEqual(list(rrule(HOURLY,
count=3,
byminute=(15, 45),
bysecond=(15, 45),
bysetpos=(3, -3),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 15, 45),
datetime(1997, 9, 2, 9, 45, 15),
datetime(1997, 9, 2, 10, 15, 45)])
def testMinutely(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 2, 9, 1),
datetime(1997, 9, 2, 9, 2)])
def testMinutelyInterval(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
interval=2,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 2, 9, 2),
datetime(1997, 9, 2, 9, 4)])
def testMinutelyIntervalLarge(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
interval=1501,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 3, 10, 1),
datetime(1997, 9, 4, 11, 2)])
def testMinutelyByMonth(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
bymonth=(1, 3),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 0, 0),
datetime(1998, 1, 1, 0, 1),
datetime(1998, 1, 1, 0, 2)])
def testMinutelyByMonthDay(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
bymonthday=(1, 3),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 3, 0, 0),
datetime(1997, 9, 3, 0, 1),
datetime(1997, 9, 3, 0, 2)])
def testMinutelyByMonthAndMonthDay(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
bymonth=(1, 3),
bymonthday=(5, 7),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 5, 0, 0),
datetime(1998, 1, 5, 0, 1),
datetime(1998, 1, 5, 0, 2)])
def testMinutelyByWeekDay(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 2, 9, 1),
datetime(1997, 9, 2, 9, 2)])
def testMinutelyByNWeekDay(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
byweekday=(TU(1), TH(-1)),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 2, 9, 1),
datetime(1997, 9, 2, 9, 2)])
def testMinutelyByMonthAndWeekDay(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
bymonth=(1, 3),
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 0, 0),
datetime(1998, 1, 1, 0, 1),
datetime(1998, 1, 1, 0, 2)])
def testMinutelyByMonthAndNWeekDay(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
bymonth=(1, 3),
byweekday=(TU(1), TH(-1)),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 0, 0),
datetime(1998, 1, 1, 0, 1),
datetime(1998, 1, 1, 0, 2)])
def testMinutelyByMonthDayAndWeekDay(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
bymonthday=(1, 3),
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 0, 0),
datetime(1998, 1, 1, 0, 1),
datetime(1998, 1, 1, 0, 2)])
def testMinutelyByMonthAndMonthDayAndWeekDay(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
bymonth=(1, 3),
bymonthday=(1, 3),
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 0, 0),
datetime(1998, 1, 1, 0, 1),
datetime(1998, 1, 1, 0, 2)])
def testMinutelyByYearDay(self):
self.assertEqual(list(rrule(MINUTELY,
count=4,
byyearday=(1, 100, 200, 365),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 31, 0, 0),
datetime(1997, 12, 31, 0, 1),
datetime(1997, 12, 31, 0, 2),
datetime(1997, 12, 31, 0, 3)])
def testMinutelyByYearDayNeg(self):
self.assertEqual(list(rrule(MINUTELY,
count=4,
byyearday=(-365, -266, -166, -1),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 31, 0, 0),
datetime(1997, 12, 31, 0, 1),
datetime(1997, 12, 31, 0, 2),
datetime(1997, 12, 31, 0, 3)])
def testMinutelyByMonthAndYearDay(self):
self.assertEqual(list(rrule(MINUTELY,
count=4,
bymonth=(4, 7),
byyearday=(1, 100, 200, 365),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 10, 0, 0),
datetime(1998, 4, 10, 0, 1),
datetime(1998, 4, 10, 0, 2),
datetime(1998, 4, 10, 0, 3)])
def testMinutelyByMonthAndYearDayNeg(self):
self.assertEqual(list(rrule(MINUTELY,
count=4,
bymonth=(4, 7),
byyearday=(-365, -266, -166, -1),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 10, 0, 0),
datetime(1998, 4, 10, 0, 1),
datetime(1998, 4, 10, 0, 2),
datetime(1998, 4, 10, 0, 3)])
def testMinutelyByWeekNo(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
byweekno=20,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 5, 11, 0, 0),
datetime(1998, 5, 11, 0, 1),
datetime(1998, 5, 11, 0, 2)])
def testMinutelyByWeekNoAndWeekDay(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
byweekno=1,
byweekday=MO,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 29, 0, 0),
datetime(1997, 12, 29, 0, 1),
datetime(1997, 12, 29, 0, 2)])
def testMinutelyByWeekNoAndWeekDayLarge(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
byweekno=52,
byweekday=SU,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 28, 0, 0),
datetime(1997, 12, 28, 0, 1),
datetime(1997, 12, 28, 0, 2)])
def testMinutelyByWeekNoAndWeekDayLast(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
byweekno=-1,
byweekday=SU,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 28, 0, 0),
datetime(1997, 12, 28, 0, 1),
datetime(1997, 12, 28, 0, 2)])
def testMinutelyByWeekNoAndWeekDay53(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
byweekno=53,
byweekday=MO,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 12, 28, 0, 0),
datetime(1998, 12, 28, 0, 1),
datetime(1998, 12, 28, 0, 2)])
def testMinutelyByEaster(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
byeaster=0,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 12, 0, 0),
datetime(1998, 4, 12, 0, 1),
datetime(1998, 4, 12, 0, 2)])
def testMinutelyByEasterPos(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
byeaster=1,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 13, 0, 0),
datetime(1998, 4, 13, 0, 1),
datetime(1998, 4, 13, 0, 2)])
def testMinutelyByEasterNeg(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
byeaster=-1,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 11, 0, 0),
datetime(1998, 4, 11, 0, 1),
datetime(1998, 4, 11, 0, 2)])
def testMinutelyByHour(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
byhour=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 0),
datetime(1997, 9, 2, 18, 1),
datetime(1997, 9, 2, 18, 2)])
def testMinutelyByMinute(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
byminute=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 6),
datetime(1997, 9, 2, 9, 18),
datetime(1997, 9, 2, 10, 6)])
def testMinutelyBySecond(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0, 6),
datetime(1997, 9, 2, 9, 0, 18),
datetime(1997, 9, 2, 9, 1, 6)])
def testMinutelyByHourAndMinute(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
byhour=(6, 18),
byminute=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 6),
datetime(1997, 9, 2, 18, 18),
datetime(1997, 9, 3, 6, 6)])
def testMinutelyByHourAndSecond(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
byhour=(6, 18),
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 0, 6),
datetime(1997, 9, 2, 18, 0, 18),
datetime(1997, 9, 2, 18, 1, 6)])
def testMinutelyByMinuteAndSecond(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
byminute=(6, 18),
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 6, 6),
datetime(1997, 9, 2, 9, 6, 18),
datetime(1997, 9, 2, 9, 18, 6)])
def testMinutelyByHourAndMinuteAndSecond(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
byhour=(6, 18),
byminute=(6, 18),
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 6, 6),
datetime(1997, 9, 2, 18, 6, 18),
datetime(1997, 9, 2, 18, 18, 6)])
def testMinutelyBySetPos(self):
self.assertEqual(list(rrule(MINUTELY,
count=3,
bysecond=(15, 30, 45),
bysetpos=(3, -3),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0, 15),
datetime(1997, 9, 2, 9, 0, 45),
datetime(1997, 9, 2, 9, 1, 15)])
def testSecondly(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0, 0),
datetime(1997, 9, 2, 9, 0, 1),
datetime(1997, 9, 2, 9, 0, 2)])
def testSecondlyInterval(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
interval=2,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0, 0),
datetime(1997, 9, 2, 9, 0, 2),
datetime(1997, 9, 2, 9, 0, 4)])
def testSecondlyIntervalLarge(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
interval=90061,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0, 0),
datetime(1997, 9, 3, 10, 1, 1),
datetime(1997, 9, 4, 11, 2, 2)])
def testSecondlyByMonth(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
bymonth=(1, 3),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 0, 0, 0),
datetime(1998, 1, 1, 0, 0, 1),
datetime(1998, 1, 1, 0, 0, 2)])
def testSecondlyByMonthDay(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
bymonthday=(1, 3),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 3, 0, 0, 0),
datetime(1997, 9, 3, 0, 0, 1),
datetime(1997, 9, 3, 0, 0, 2)])
def testSecondlyByMonthAndMonthDay(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
bymonth=(1, 3),
bymonthday=(5, 7),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 5, 0, 0, 0),
datetime(1998, 1, 5, 0, 0, 1),
datetime(1998, 1, 5, 0, 0, 2)])
def testSecondlyByWeekDay(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0, 0),
datetime(1997, 9, 2, 9, 0, 1),
datetime(1997, 9, 2, 9, 0, 2)])
def testSecondlyByNWeekDay(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
byweekday=(TU(1), TH(-1)),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0, 0),
datetime(1997, 9, 2, 9, 0, 1),
datetime(1997, 9, 2, 9, 0, 2)])
def testSecondlyByMonthAndWeekDay(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
bymonth=(1, 3),
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 0, 0, 0),
datetime(1998, 1, 1, 0, 0, 1),
datetime(1998, 1, 1, 0, 0, 2)])
def testSecondlyByMonthAndNWeekDay(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
bymonth=(1, 3),
byweekday=(TU(1), TH(-1)),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 0, 0, 0),
datetime(1998, 1, 1, 0, 0, 1),
datetime(1998, 1, 1, 0, 0, 2)])
def testSecondlyByMonthDayAndWeekDay(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
bymonthday=(1, 3),
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 0, 0, 0),
datetime(1998, 1, 1, 0, 0, 1),
datetime(1998, 1, 1, 0, 0, 2)])
def testSecondlyByMonthAndMonthDayAndWeekDay(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
bymonth=(1, 3),
bymonthday=(1, 3),
byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 1, 0, 0, 0),
datetime(1998, 1, 1, 0, 0, 1),
datetime(1998, 1, 1, 0, 0, 2)])
def testSecondlyByYearDay(self):
self.assertEqual(list(rrule(SECONDLY,
count=4,
byyearday=(1, 100, 200, 365),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 31, 0, 0, 0),
datetime(1997, 12, 31, 0, 0, 1),
datetime(1997, 12, 31, 0, 0, 2),
datetime(1997, 12, 31, 0, 0, 3)])
def testSecondlyByYearDayNeg(self):
self.assertEqual(list(rrule(SECONDLY,
count=4,
byyearday=(-365, -266, -166, -1),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 31, 0, 0, 0),
datetime(1997, 12, 31, 0, 0, 1),
datetime(1997, 12, 31, 0, 0, 2),
datetime(1997, 12, 31, 0, 0, 3)])
def testSecondlyByMonthAndYearDay(self):
self.assertEqual(list(rrule(SECONDLY,
count=4,
bymonth=(4, 7),
byyearday=(1, 100, 200, 365),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 10, 0, 0, 0),
datetime(1998, 4, 10, 0, 0, 1),
datetime(1998, 4, 10, 0, 0, 2),
datetime(1998, 4, 10, 0, 0, 3)])
def testSecondlyByMonthAndYearDayNeg(self):
self.assertEqual(list(rrule(SECONDLY,
count=4,
bymonth=(4, 7),
byyearday=(-365, -266, -166, -1),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 10, 0, 0, 0),
datetime(1998, 4, 10, 0, 0, 1),
datetime(1998, 4, 10, 0, 0, 2),
datetime(1998, 4, 10, 0, 0, 3)])
def testSecondlyByWeekNo(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
byweekno=20,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 5, 11, 0, 0, 0),
datetime(1998, 5, 11, 0, 0, 1),
datetime(1998, 5, 11, 0, 0, 2)])
def testSecondlyByWeekNoAndWeekDay(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
byweekno=1,
byweekday=MO,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 29, 0, 0, 0),
datetime(1997, 12, 29, 0, 0, 1),
datetime(1997, 12, 29, 0, 0, 2)])
def testSecondlyByWeekNoAndWeekDayLarge(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
byweekno=52,
byweekday=SU,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 28, 0, 0, 0),
datetime(1997, 12, 28, 0, 0, 1),
datetime(1997, 12, 28, 0, 0, 2)])
def testSecondlyByWeekNoAndWeekDayLast(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
byweekno=-1,
byweekday=SU,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 12, 28, 0, 0, 0),
datetime(1997, 12, 28, 0, 0, 1),
datetime(1997, 12, 28, 0, 0, 2)])
def testSecondlyByWeekNoAndWeekDay53(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
byweekno=53,
byweekday=MO,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 12, 28, 0, 0, 0),
datetime(1998, 12, 28, 0, 0, 1),
datetime(1998, 12, 28, 0, 0, 2)])
def testSecondlyByEaster(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
byeaster=0,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 12, 0, 0, 0),
datetime(1998, 4, 12, 0, 0, 1),
datetime(1998, 4, 12, 0, 0, 2)])
def testSecondlyByEasterPos(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
byeaster=1,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 13, 0, 0, 0),
datetime(1998, 4, 13, 0, 0, 1),
datetime(1998, 4, 13, 0, 0, 2)])
def testSecondlyByEasterNeg(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
byeaster=-1,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 4, 11, 0, 0, 0),
datetime(1998, 4, 11, 0, 0, 1),
datetime(1998, 4, 11, 0, 0, 2)])
def testSecondlyByHour(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
byhour=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 0, 0),
datetime(1997, 9, 2, 18, 0, 1),
datetime(1997, 9, 2, 18, 0, 2)])
def testSecondlyByMinute(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
byminute=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 6, 0),
datetime(1997, 9, 2, 9, 6, 1),
datetime(1997, 9, 2, 9, 6, 2)])
def testSecondlyBySecond(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0, 6),
datetime(1997, 9, 2, 9, 0, 18),
datetime(1997, 9, 2, 9, 1, 6)])
def testSecondlyByHourAndMinute(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
byhour=(6, 18),
byminute=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 6, 0),
datetime(1997, 9, 2, 18, 6, 1),
datetime(1997, 9, 2, 18, 6, 2)])
def testSecondlyByHourAndSecond(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
byhour=(6, 18),
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 0, 6),
datetime(1997, 9, 2, 18, 0, 18),
datetime(1997, 9, 2, 18, 1, 6)])
def testSecondlyByMinuteAndSecond(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
byminute=(6, 18),
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 6, 6),
datetime(1997, 9, 2, 9, 6, 18),
datetime(1997, 9, 2, 9, 18, 6)])
def testSecondlyByHourAndMinuteAndSecond(self):
self.assertEqual(list(rrule(SECONDLY,
count=3,
byhour=(6, 18),
byminute=(6, 18),
bysecond=(6, 18),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 18, 6, 6),
datetime(1997, 9, 2, 18, 6, 18),
datetime(1997, 9, 2, 18, 18, 6)])
def testSecondlyByHourAndMinuteAndSecondBug(self):
# This explores a bug found by Mathieu Bridon.
self.assertEqual(list(rrule(SECONDLY,
count=3,
bysecond=(0,),
byminute=(1,),
dtstart=datetime(2010, 3, 22, 12, 1))),
[datetime(2010, 3, 22, 12, 1),
datetime(2010, 3, 22, 13, 1),
datetime(2010, 3, 22, 14, 1)])
def testLongIntegers(self):
if not PY3: # There is no longs in python3
self.assertEqual(list(rrule(MINUTELY,
count=long(2),
interval=long(2),
bymonth=long(2),
byweekday=long(3),
byhour=long(6),
byminute=long(6),
bysecond=long(6),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 2, 5, 6, 6, 6),
datetime(1998, 2, 12, 6, 6, 6)])
self.assertEqual(list(rrule(YEARLY,
count=long(2),
bymonthday=long(5),
byweekno=long(2),
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1998, 1, 5, 9, 0),
datetime(2004, 1, 5, 9, 0)])
def testHourlyBadRRule(self):
"""
When `byhour` is specified with `freq=HOURLY`, there are certain
combinations of `dtstart` and `byhour` which result in an rrule with no
valid values.
See https://github.com/dateutil/dateutil/issues/4
"""
self.assertRaises(ValueError, rrule, HOURLY,
**dict(interval=4, byhour=(7, 11, 15, 19),
dtstart=datetime(1997, 9, 2, 9, 0)))
def testMinutelyBadRRule(self):
"""
See :func:`testHourlyBadRRule` for details.
"""
self.assertRaises(ValueError, rrule, MINUTELY,
**dict(interval=12, byminute=(10, 11, 25, 39, 50),
dtstart=datetime(1997, 9, 2, 9, 0)))
def testSecondlyBadRRule(self):
"""
See :func:`testHourlyBadRRule` for details.
"""
self.assertRaises(ValueError, rrule, SECONDLY,
**dict(interval=10, bysecond=(2, 15, 37, 42, 59),
dtstart=datetime(1997, 9, 2, 9, 0)))
def testMinutelyBadComboRRule(self):
"""
Certain values of :param:`interval` in :class:`rrule`, when combined
with certain values of :param:`byhour` create rules which apply to no
valid dates. The library should detect this case in the iterator and
raise a :exception:`ValueError`.
"""
# In Python 2.7 you can use a context manager for this.
def make_bad_rrule():
list(rrule(MINUTELY, interval=120, byhour=(10, 12, 14, 16),
count=2, dtstart=datetime(1997, 9, 2, 9, 0)))
self.assertRaises(ValueError, make_bad_rrule)
def testSecondlyBadComboRRule(self):
"""
See :func:`testMinutelyBadComboRRule' for details.
"""
# In Python 2.7 you can use a context manager for this.
def make_bad_minute_rrule():
list(rrule(SECONDLY, interval=360, byminute=(10, 28, 49),
count=4, dtstart=datetime(1997, 9, 2, 9, 0)))
def make_bad_hour_rrule():
list(rrule(SECONDLY, interval=43200, byhour=(2, 10, 18, 23),
count=4, dtstart=datetime(1997, 9, 2, 9, 0)))
self.assertRaises(ValueError, make_bad_minute_rrule)
self.assertRaises(ValueError, make_bad_hour_rrule)
def testUntilNotMatching(self):
self.assertEqual(list(rrule(DAILY,
count=3,
dtstart=datetime(1997, 9, 2, 9, 0),
until=datetime(1997, 9, 5, 8, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 3, 9, 0),
datetime(1997, 9, 4, 9, 0)])
def testUntilMatching(self):
self.assertEqual(list(rrule(DAILY,
count=3,
dtstart=datetime(1997, 9, 2, 9, 0),
until=datetime(1997, 9, 4, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 3, 9, 0),
datetime(1997, 9, 4, 9, 0)])
def testUntilSingle(self):
self.assertEqual(list(rrule(DAILY,
count=3,
dtstart=datetime(1997, 9, 2, 9, 0),
until=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0)])
def testUntilEmpty(self):
self.assertEqual(list(rrule(DAILY,
count=3,
dtstart=datetime(1997, 9, 2, 9, 0),
until=datetime(1997, 9, 1, 9, 0))),
[])
def testUntilWithDate(self):
self.assertEqual(list(rrule(DAILY,
count=3,
dtstart=datetime(1997, 9, 2, 9, 0),
until=date(1997, 9, 5))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 3, 9, 0),
datetime(1997, 9, 4, 9, 0)])
def testWkStIntervalMO(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
interval=2,
byweekday=(TU, SU),
wkst=MO,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 7, 9, 0),
datetime(1997, 9, 16, 9, 0)])
def testWkStIntervalSU(self):
self.assertEqual(list(rrule(WEEKLY,
count=3,
interval=2,
byweekday=(TU, SU),
wkst=SU,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 14, 9, 0),
datetime(1997, 9, 16, 9, 0)])
def testDTStartIsDate(self):
self.assertEqual(list(rrule(DAILY,
count=3,
dtstart=date(1997, 9, 2))),
[datetime(1997, 9, 2, 0, 0),
datetime(1997, 9, 3, 0, 0),
datetime(1997, 9, 4, 0, 0)])
def testDTStartWithMicroseconds(self):
self.assertEqual(list(rrule(DAILY,
count=3,
dtstart=datetime(1997, 9, 2, 9, 0, 0, 500000))),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 3, 9, 0),
datetime(1997, 9, 4, 9, 0)])
def testMaxYear(self):
self.assertEqual(list(rrule(YEARLY,
count=3,
bymonth=2,
bymonthday=31,
dtstart=datetime(9997, 9, 2, 9, 0, 0))),
[])
def testGetItem(self):
self.assertEqual(rrule(DAILY,
count=3,
dtstart=datetime(1997, 9, 2, 9, 0))[0],
datetime(1997, 9, 2, 9, 0))
def testGetItemNeg(self):
self.assertEqual(rrule(DAILY,
count=3,
dtstart=datetime(1997, 9, 2, 9, 0))[-1],
datetime(1997, 9, 4, 9, 0))
def testGetItemSlice(self):
self.assertEqual(rrule(DAILY,
# count=3,
dtstart=datetime(1997, 9, 2, 9, 0))[1:2],
[datetime(1997, 9, 3, 9, 0)])
def testGetItemSliceEmpty(self):
self.assertEqual(rrule(DAILY,
count=3,
dtstart=datetime(1997, 9, 2, 9, 0))[:],
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 3, 9, 0),
datetime(1997, 9, 4, 9, 0)])
def testGetItemSliceStep(self):
self.assertEqual(rrule(DAILY,
count=3,
dtstart=datetime(1997, 9, 2, 9, 0))[::-2],
[datetime(1997, 9, 4, 9, 0),
datetime(1997, 9, 2, 9, 0)])
def testCount(self):
self.assertEqual(rrule(DAILY,
count=3,
dtstart=datetime(1997, 9, 2, 9, 0)).count(),
3)
def testContains(self):
rr = rrule(DAILY, count=3, dtstart=datetime(1997, 9, 2, 9, 0))
self.assertEqual(datetime(1997, 9, 3, 9, 0) in rr, True)
def testContainsNot(self):
rr = rrule(DAILY, count=3, dtstart=datetime(1997, 9, 2, 9, 0))
self.assertEqual(datetime(1997, 9, 3, 9, 0) not in rr, False)
def testBefore(self):
self.assertEqual(rrule(DAILY, # count=5
dtstart=datetime(1997, 9, 2, 9, 0)).before(datetime(1997, 9, 5, 9, 0)),
datetime(1997, 9, 4, 9, 0))
def testBeforeInc(self):
self.assertEqual(rrule(DAILY,
#count=5,
dtstart=datetime(1997, 9, 2, 9, 0))
.before(datetime(1997, 9, 5, 9, 0), inc=True),
datetime(1997, 9, 5, 9, 0))
def testAfter(self):
self.assertEqual(rrule(DAILY,
#count=5,
dtstart=datetime(1997, 9, 2, 9, 0))
.after(datetime(1997, 9, 4, 9, 0)),
datetime(1997, 9, 5, 9, 0))
def testAfterInc(self):
self.assertEqual(rrule(DAILY,
#count=5,
dtstart=datetime(1997, 9, 2, 9, 0))
.after(datetime(1997, 9, 4, 9, 0), inc=True),
datetime(1997, 9, 4, 9, 0))
def testBetween(self):
self.assertEqual(rrule(DAILY,
#count=5,
dtstart=datetime(1997, 9, 2, 9, 0))
.between(datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 6, 9, 0)),
[datetime(1997, 9, 3, 9, 0),
datetime(1997, 9, 4, 9, 0),
datetime(1997, 9, 5, 9, 0)])
def testBetweenInc(self):
self.assertEqual(rrule(DAILY,
#count=5,
dtstart=datetime(1997, 9, 2, 9, 0))
.between(datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 6, 9, 0), inc=True),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 3, 9, 0),
datetime(1997, 9, 4, 9, 0),
datetime(1997, 9, 5, 9, 0),
datetime(1997, 9, 6, 9, 0)])
def testCachePre(self):
rr = rrule(DAILY, count=15, cache=True,
dtstart=datetime(1997, 9, 2, 9, 0))
self.assertEqual(list(rr),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 3, 9, 0),
datetime(1997, 9, 4, 9, 0),
datetime(1997, 9, 5, 9, 0),
datetime(1997, 9, 6, 9, 0),
datetime(1997, 9, 7, 9, 0),
datetime(1997, 9, 8, 9, 0),
datetime(1997, 9, 9, 9, 0),
datetime(1997, 9, 10, 9, 0),
datetime(1997, 9, 11, 9, 0),
datetime(1997, 9, 12, 9, 0),
datetime(1997, 9, 13, 9, 0),
datetime(1997, 9, 14, 9, 0),
datetime(1997, 9, 15, 9, 0),
datetime(1997, 9, 16, 9, 0)])
def testCachePost(self):
rr = rrule(DAILY, count=15, cache=True,
dtstart=datetime(1997, 9, 2, 9, 0))
for x in rr: pass
self.assertEqual(list(rr),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 3, 9, 0),
datetime(1997, 9, 4, 9, 0),
datetime(1997, 9, 5, 9, 0),
datetime(1997, 9, 6, 9, 0),
datetime(1997, 9, 7, 9, 0),
datetime(1997, 9, 8, 9, 0),
datetime(1997, 9, 9, 9, 0),
datetime(1997, 9, 10, 9, 0),
datetime(1997, 9, 11, 9, 0),
datetime(1997, 9, 12, 9, 0),
datetime(1997, 9, 13, 9, 0),
datetime(1997, 9, 14, 9, 0),
datetime(1997, 9, 15, 9, 0),
datetime(1997, 9, 16, 9, 0)])
def testCachePostInternal(self):
rr = rrule(DAILY, count=15, cache=True,
dtstart=datetime(1997, 9, 2, 9, 0))
for x in rr: pass
self.assertEqual(rr._cache,
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 3, 9, 0),
datetime(1997, 9, 4, 9, 0),
datetime(1997, 9, 5, 9, 0),
datetime(1997, 9, 6, 9, 0),
datetime(1997, 9, 7, 9, 0),
datetime(1997, 9, 8, 9, 0),
datetime(1997, 9, 9, 9, 0),
datetime(1997, 9, 10, 9, 0),
datetime(1997, 9, 11, 9, 0),
datetime(1997, 9, 12, 9, 0),
datetime(1997, 9, 13, 9, 0),
datetime(1997, 9, 14, 9, 0),
datetime(1997, 9, 15, 9, 0),
datetime(1997, 9, 16, 9, 0)])
def testCachePreContains(self):
rr = rrule(DAILY, count=3, cache=True,
dtstart=datetime(1997, 9, 2, 9, 0))
self.assertEqual(datetime(1997, 9, 3, 9, 0) in rr, True)
def testCachePostContains(self):
rr = rrule(DAILY, count=3, cache=True,
dtstart=datetime(1997, 9, 2, 9, 0))
for x in rr: pass
self.assertEqual(datetime(1997, 9, 3, 9, 0) in rr, True)
def testSet(self):
set = rruleset()
set.rrule(rrule(YEARLY, count=2, byweekday=TU,
dtstart=datetime(1997, 9, 2, 9, 0)))
set.rrule(rrule(YEARLY, count=1, byweekday=TH,
dtstart=datetime(1997, 9, 2, 9, 0)))
self.assertEqual(list(set),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 4, 9, 0),
datetime(1997, 9, 9, 9, 0)])
def testSetDate(self):
set = rruleset()
set.rrule(rrule(YEARLY, count=1, byweekday=TU,
dtstart=datetime(1997, 9, 2, 9, 0)))
set.rdate(datetime(1997, 9, 4, 9))
set.rdate(datetime(1997, 9, 9, 9))
self.assertEqual(list(set),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 4, 9, 0),
datetime(1997, 9, 9, 9, 0)])
def testSetExRule(self):
set = rruleset()
set.rrule(rrule(YEARLY, count=6, byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0)))
set.exrule(rrule(YEARLY, count=3, byweekday=TH,
dtstart=datetime(1997, 9, 2, 9, 0)))
self.assertEqual(list(set),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 9, 9, 0),
datetime(1997, 9, 16, 9, 0)])
def testSetExDate(self):
set = rruleset()
set.rrule(rrule(YEARLY, count=6, byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0)))
set.exdate(datetime(1997, 9, 4, 9))
set.exdate(datetime(1997, 9, 11, 9))
set.exdate(datetime(1997, 9, 18, 9))
self.assertEqual(list(set),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 9, 9, 0),
datetime(1997, 9, 16, 9, 0)])
def testSetExDateRevOrder(self):
set = rruleset()
set.rrule(rrule(MONTHLY, count=5, bymonthday=10,
dtstart=datetime(2004, 1, 1, 9, 0)))
set.exdate(datetime(2004, 4, 10, 9, 0))
set.exdate(datetime(2004, 2, 10, 9, 0))
self.assertEqual(list(set),
[datetime(2004, 1, 10, 9, 0),
datetime(2004, 3, 10, 9, 0),
datetime(2004, 5, 10, 9, 0)])
def testSetDateAndExDate(self):
set = rruleset()
set.rdate(datetime(1997, 9, 2, 9))
set.rdate(datetime(1997, 9, 4, 9))
set.rdate(datetime(1997, 9, 9, 9))
set.rdate(datetime(1997, 9, 11, 9))
set.rdate(datetime(1997, 9, 16, 9))
set.rdate(datetime(1997, 9, 18, 9))
set.exdate(datetime(1997, 9, 4, 9))
set.exdate(datetime(1997, 9, 11, 9))
set.exdate(datetime(1997, 9, 18, 9))
self.assertEqual(list(set),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 9, 9, 0),
datetime(1997, 9, 16, 9, 0)])
def testSetDateAndExRule(self):
set = rruleset()
set.rdate(datetime(1997, 9, 2, 9))
set.rdate(datetime(1997, 9, 4, 9))
set.rdate(datetime(1997, 9, 9, 9))
set.rdate(datetime(1997, 9, 11, 9))
set.rdate(datetime(1997, 9, 16, 9))
set.rdate(datetime(1997, 9, 18, 9))
set.exrule(rrule(YEARLY, count=3, byweekday=TH,
dtstart=datetime(1997, 9, 2, 9, 0)))
self.assertEqual(list(set),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 9, 9, 0),
datetime(1997, 9, 16, 9, 0)])
def testSetCount(self):
set = rruleset()
set.rrule(rrule(YEARLY, count=6, byweekday=(TU, TH),
dtstart=datetime(1997, 9, 2, 9, 0)))
set.exrule(rrule(YEARLY, count=3, byweekday=TH,
dtstart=datetime(1997, 9, 2, 9, 0)))
self.assertEqual(set.count(), 3)
def testSetCachePre(self):
set = rruleset()
set.rrule(rrule(YEARLY, count=2, byweekday=TU,
dtstart=datetime(1997, 9, 2, 9, 0)))
set.rrule(rrule(YEARLY, count=1, byweekday=TH,
dtstart=datetime(1997, 9, 2, 9, 0)))
self.assertEqual(list(set),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 4, 9, 0),
datetime(1997, 9, 9, 9, 0)])
def testSetCachePost(self):
set = rruleset(cache=True)
set.rrule(rrule(YEARLY, count=2, byweekday=TU,
dtstart=datetime(1997, 9, 2, 9, 0)))
set.rrule(rrule(YEARLY, count=1, byweekday=TH,
dtstart=datetime(1997, 9, 2, 9, 0)))
for x in set: pass
self.assertEqual(list(set),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 4, 9, 0),
datetime(1997, 9, 9, 9, 0)])
def testSetCachePostInternal(self):
set = rruleset(cache=True)
set.rrule(rrule(YEARLY, count=2, byweekday=TU,
dtstart=datetime(1997, 9, 2, 9, 0)))
set.rrule(rrule(YEARLY, count=1, byweekday=TH,
dtstart=datetime(1997, 9, 2, 9, 0)))
for x in set: pass
self.assertEqual(list(set._cache),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 4, 9, 0),
datetime(1997, 9, 9, 9, 0)])
def testStr(self):
self.assertEqual(list(rrulestr(
"DTSTART:19970902T090000\n"
"RRULE:FREQ=YEARLY;COUNT=3\n"
)),
[datetime(1997, 9, 2, 9, 0),
datetime(1998, 9, 2, 9, 0),
datetime(1999, 9, 2, 9, 0)])
def testStrType(self):
self.assertEqual(isinstance(rrulestr(
"DTSTART:19970902T090000\n"
"RRULE:FREQ=YEARLY;COUNT=3\n"
), rrule), True)
def testStrForceSetType(self):
self.assertEqual(isinstance(rrulestr(
"DTSTART:19970902T090000\n"
"RRULE:FREQ=YEARLY;COUNT=3\n"
, forceset=True), rruleset), True)
def testStrSetType(self):
self.assertEqual(isinstance(rrulestr(
"DTSTART:19970902T090000\n"
"RRULE:FREQ=YEARLY;COUNT=2;BYDAY=TU\n"
"RRULE:FREQ=YEARLY;COUNT=1;BYDAY=TH\n"
), rruleset), True)
def testStrCase(self):
self.assertEqual(list(rrulestr(
"dtstart:19970902T090000\n"
"rrule:freq=yearly;count=3\n"
)),
[datetime(1997, 9, 2, 9, 0),
datetime(1998, 9, 2, 9, 0),
datetime(1999, 9, 2, 9, 0)])
def testStrSpaces(self):
self.assertEqual(list(rrulestr(
" DTSTART:19970902T090000 "
" RRULE:FREQ=YEARLY;COUNT=3 "
)),
[datetime(1997, 9, 2, 9, 0),
datetime(1998, 9, 2, 9, 0),
datetime(1999, 9, 2, 9, 0)])
def testStrSpacesAndLines(self):
self.assertEqual(list(rrulestr(
" DTSTART:19970902T090000 \n"
" \n"
" RRULE:FREQ=YEARLY;COUNT=3 \n"
)),
[datetime(1997, 9, 2, 9, 0),
datetime(1998, 9, 2, 9, 0),
datetime(1999, 9, 2, 9, 0)])
def testStrNoDTStart(self):
self.assertEqual(list(rrulestr(
"RRULE:FREQ=YEARLY;COUNT=3\n"
, dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1998, 9, 2, 9, 0),
datetime(1999, 9, 2, 9, 0)])
def testStrValueOnly(self):
self.assertEqual(list(rrulestr(
"FREQ=YEARLY;COUNT=3\n"
, dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1998, 9, 2, 9, 0),
datetime(1999, 9, 2, 9, 0)])
def testStrUnfold(self):
self.assertEqual(list(rrulestr(
"FREQ=YEA\n RLY;COUNT=3\n", unfold=True,
dtstart=datetime(1997, 9, 2, 9, 0))),
[datetime(1997, 9, 2, 9, 0),
datetime(1998, 9, 2, 9, 0),
datetime(1999, 9, 2, 9, 0)])
def testStrSet(self):
self.assertEqual(list(rrulestr(
"DTSTART:19970902T090000\n"
"RRULE:FREQ=YEARLY;COUNT=2;BYDAY=TU\n"
"RRULE:FREQ=YEARLY;COUNT=1;BYDAY=TH\n"
)),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 4, 9, 0),
datetime(1997, 9, 9, 9, 0)])
def testStrSetDate(self):
self.assertEqual(list(rrulestr(
"DTSTART:19970902T090000\n"
"RRULE:FREQ=YEARLY;COUNT=1;BYDAY=TU\n"
"RDATE:19970904T090000\n"
"RDATE:19970909T090000\n"
)),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 4, 9, 0),
datetime(1997, 9, 9, 9, 0)])
def testStrSetExRule(self):
self.assertEqual(list(rrulestr(
"DTSTART:19970902T090000\n"
"RRULE:FREQ=YEARLY;COUNT=6;BYDAY=TU,TH\n"
"EXRULE:FREQ=YEARLY;COUNT=3;BYDAY=TH\n"
)),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 9, 9, 0),
datetime(1997, 9, 16, 9, 0)])
def testStrSetExDate(self):
self.assertEqual(list(rrulestr(
"DTSTART:19970902T090000\n"
"RRULE:FREQ=YEARLY;COUNT=6;BYDAY=TU,TH\n"
"EXDATE:19970904T090000\n"
"EXDATE:19970911T090000\n"
"EXDATE:19970918T090000\n"
)),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 9, 9, 0),
datetime(1997, 9, 16, 9, 0)])
def testStrSetDateAndExDate(self):
self.assertEqual(list(rrulestr(
"DTSTART:19970902T090000\n"
"RDATE:19970902T090000\n"
"RDATE:19970904T090000\n"
"RDATE:19970909T090000\n"
"RDATE:19970911T090000\n"
"RDATE:19970916T090000\n"
"RDATE:19970918T090000\n"
"EXDATE:19970904T090000\n"
"EXDATE:19970911T090000\n"
"EXDATE:19970918T090000\n"
)),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 9, 9, 0),
datetime(1997, 9, 16, 9, 0)])
def testStrSetDateAndExRule(self):
self.assertEqual(list(rrulestr(
"DTSTART:19970902T090000\n"
"RDATE:19970902T090000\n"
"RDATE:19970904T090000\n"
"RDATE:19970909T090000\n"
"RDATE:19970911T090000\n"
"RDATE:19970916T090000\n"
"RDATE:19970918T090000\n"
"EXRULE:FREQ=YEARLY;COUNT=3;BYDAY=TH\n"
)),
[datetime(1997, 9, 2, 9, 0),
datetime(1997, 9, 9, 9, 0),
datetime(1997, 9, 16, 9, 0)])
def testStrKeywords(self):
self.assertEqual(list(rrulestr(
"DTSTART:19970902T090000\n"
"RRULE:FREQ=YEARLY;COUNT=3;INTERVAL=3;"
"BYMONTH=3;BYWEEKDAY=TH;BYMONTHDAY=3;"
"BYHOUR=3;BYMINUTE=3;BYSECOND=3\n"
)),
[datetime(2033, 3, 3, 3, 3, 3),
datetime(2039, 3, 3, 3, 3, 3),
datetime(2072, 3, 3, 3, 3, 3)])
def testStrNWeekDay(self):
self.assertEqual(list(rrulestr(
"DTSTART:19970902T090000\n"
"RRULE:FREQ=YEARLY;COUNT=3;BYDAY=1TU,-1TH\n"
)),
[datetime(1997, 12, 25, 9, 0),
datetime(1998, 1, 6, 9, 0),
datetime(1998, 12, 31, 9, 0)])
def testBadBySetPos(self):
self.assertRaises(ValueError,
rrule, MONTHLY,
count=1,
bysetpos=0,
dtstart=datetime(1997, 9, 2, 9, 0))
def testBadBySetPosMany(self):
self.assertRaises(ValueError,
rrule, MONTHLY,
count=1,
bysetpos=(-1, 0, 1),
dtstart=datetime(1997, 9, 2, 9, 0))
class ParserTest(unittest.TestCase):
def setUp(self):
self.tzinfos = {"BRST": -10800}
self.brsttz = tzoffset("BRST", -10800)
self.default = datetime(2003, 9, 25)
def testDateCommandFormat(self):
self.assertEqual(parse("Thu Sep 25 10:36:28 BRST 2003",
tzinfos=self.tzinfos),
datetime(2003, 9, 25, 10, 36, 28,
tzinfo=self.brsttz))
def testDateCommandFormatUnicode(self):
self.assertEqual(parse("Thu Sep 25 10:36:28 BRST 2003",
tzinfos=self.tzinfos),
datetime(2003, 9, 25, 10, 36, 28,
tzinfo=self.brsttz))
def testDateCommandFormatReversed(self):
self.assertEqual(parse("2003 10:36:28 BRST 25 Sep Thu",
tzinfos=self.tzinfos),
datetime(2003, 9, 25, 10, 36, 28,
tzinfo=self.brsttz))
def testDateCommandFormatWithLong(self):
if not PY3:
self.assertEqual(parse("Thu Sep 25 10:36:28 BRST 2003",
tzinfos={"BRST": long(-10800)}),
datetime(2003, 9, 25, 10, 36, 28,
tzinfo=self.brsttz))
def testDateCommandFormatIgnoreTz(self):
self.assertEqual(parse("Thu Sep 25 10:36:28 BRST 2003",
ignoretz=True),
datetime(2003, 9, 25, 10, 36, 28))
def testDateCommandFormatStrip1(self):
self.assertEqual(parse("Thu Sep 25 10:36:28 2003"),
datetime(2003, 9, 25, 10, 36, 28))
def testDateCommandFormatStrip2(self):
self.assertEqual(parse("Thu Sep 25 10:36:28", default=self.default),
datetime(2003, 9, 25, 10, 36, 28))
def testDateCommandFormatStrip3(self):
self.assertEqual(parse("Thu Sep 10:36:28", default=self.default),
datetime(2003, 9, 25, 10, 36, 28))
def testDateCommandFormatStrip4(self):
self.assertEqual(parse("Thu 10:36:28", default=self.default),
datetime(2003, 9, 25, 10, 36, 28))
def testDateCommandFormatStrip5(self):
self.assertEqual(parse("Sep 10:36:28", default=self.default),
datetime(2003, 9, 25, 10, 36, 28))
def testDateCommandFormatStrip6(self):
self.assertEqual(parse("10:36:28", default=self.default),
datetime(2003, 9, 25, 10, 36, 28))
def testDateCommandFormatStrip7(self):
self.assertEqual(parse("10:36", default=self.default),
datetime(2003, 9, 25, 10, 36))
def testDateCommandFormatStrip8(self):
self.assertEqual(parse("Thu Sep 25 2003"),
datetime(2003, 9, 25))
def testDateCommandFormatStrip9(self):
self.assertEqual(parse("Sep 25 2003"),
datetime(2003, 9, 25))
def testDateCommandFormatStrip10(self):
self.assertEqual(parse("Sep 2003", default=self.default),
datetime(2003, 9, 25))
def testDateCommandFormatStrip11(self):
self.assertEqual(parse("Sep", default=self.default),
datetime(2003, 9, 25))
def testDateCommandFormatStrip12(self):
self.assertEqual(parse("2003", default=self.default),
datetime(2003, 9, 25))
def testDateRCommandFormat(self):
self.assertEqual(parse("Thu, 25 Sep 2003 10:49:41 -0300"),
datetime(2003, 9, 25, 10, 49, 41,
tzinfo=self.brsttz))
def testISOFormat(self):
self.assertEqual(parse("2003-09-25T10:49:41.5-03:00"),
datetime(2003, 9, 25, 10, 49, 41, 500000,
tzinfo=self.brsttz))
def testISOFormatStrip1(self):
self.assertEqual(parse("2003-09-25T10:49:41-03:00"),
datetime(2003, 9, 25, 10, 49, 41,
tzinfo=self.brsttz))
def testISOFormatStrip2(self):
self.assertEqual(parse("2003-09-25T10:49:41"),
datetime(2003, 9, 25, 10, 49, 41))
def testISOFormatStrip3(self):
self.assertEqual(parse("2003-09-25T10:49"),
datetime(2003, 9, 25, 10, 49))
def testISOFormatStrip4(self):
self.assertEqual(parse("2003-09-25T10"),
datetime(2003, 9, 25, 10))
def testISOFormatStrip5(self):
self.assertEqual(parse("2003-09-25"),
datetime(2003, 9, 25))
def testISOStrippedFormat(self):
self.assertEqual(parse("20030925T104941.5-0300"),
datetime(2003, 9, 25, 10, 49, 41, 500000,
tzinfo=self.brsttz))
def testISOStrippedFormatStrip1(self):
self.assertEqual(parse("20030925T104941-0300"),
datetime(2003, 9, 25, 10, 49, 41,
tzinfo=self.brsttz))
def testISOStrippedFormatStrip2(self):
self.assertEqual(parse("20030925T104941"),
datetime(2003, 9, 25, 10, 49, 41))
def testISOStrippedFormatStrip3(self):
self.assertEqual(parse("20030925T1049"),
datetime(2003, 9, 25, 10, 49, 0))
def testISOStrippedFormatStrip4(self):
self.assertEqual(parse("20030925T10"),
datetime(2003, 9, 25, 10))
def testISOStrippedFormatStrip5(self):
self.assertEqual(parse("20030925"),
datetime(2003, 9, 25))
def testNoSeparator1(self):
self.assertEqual(parse("199709020908"),
datetime(1997, 9, 2, 9, 8))
def testNoSeparator2(self):
self.assertEqual(parse("19970902090807"),
datetime(1997, 9, 2, 9, 8, 7))
def testDateWithDash1(self):
self.assertEqual(parse("2003-09-25"),
datetime(2003, 9, 25))
def testDateWithDash2(self):
self.assertEqual(parse("2003-Sep-25"),
datetime(2003, 9, 25))
def testDateWithDash3(self):
self.assertEqual(parse("25-Sep-2003"),
datetime(2003, 9, 25))
def testDateWithDash4(self):
self.assertEqual(parse("25-Sep-2003"),
datetime(2003, 9, 25))
def testDateWithDash5(self):
self.assertEqual(parse("Sep-25-2003"),
datetime(2003, 9, 25))
def testDateWithDash6(self):
self.assertEqual(parse("09-25-2003"),
datetime(2003, 9, 25))
def testDateWithDash7(self):
self.assertEqual(parse("25-09-2003"),
datetime(2003, 9, 25))
def testDateWithDash8(self):
self.assertEqual(parse("10-09-2003", dayfirst=True),
datetime(2003, 9, 10))
def testDateWithDash9(self):
self.assertEqual(parse("10-09-2003"),
datetime(2003, 10, 9))
def testDateWithDash10(self):
self.assertEqual(parse("10-09-03"),
datetime(2003, 10, 9))
def testDateWithDash11(self):
self.assertEqual(parse("10-09-03", yearfirst=True),
datetime(2010, 9, 3))
def testDateWithDot1(self):
self.assertEqual(parse("2003.09.25"),
datetime(2003, 9, 25))
def testDateWithDot2(self):
self.assertEqual(parse("2003.Sep.25"),
datetime(2003, 9, 25))
def testDateWithDot3(self):
self.assertEqual(parse("25.Sep.2003"),
datetime(2003, 9, 25))
def testDateWithDot4(self):
self.assertEqual(parse("25.Sep.2003"),
datetime(2003, 9, 25))
def testDateWithDot5(self):
self.assertEqual(parse("Sep.25.2003"),
datetime(2003, 9, 25))
def testDateWithDot6(self):
self.assertEqual(parse("09.25.2003"),
datetime(2003, 9, 25))
def testDateWithDot7(self):
self.assertEqual(parse("25.09.2003"),
datetime(2003, 9, 25))
def testDateWithDot8(self):
self.assertEqual(parse("10.09.2003", dayfirst=True),
datetime(2003, 9, 10))
def testDateWithDot9(self):
self.assertEqual(parse("10.09.2003"),
datetime(2003, 10, 9))
def testDateWithDot10(self):
self.assertEqual(parse("10.09.03"),
datetime(2003, 10, 9))
def testDateWithDot11(self):
self.assertEqual(parse("10.09.03", yearfirst=True),
datetime(2010, 9, 3))
def testDateWithSlash1(self):
self.assertEqual(parse("2003/09/25"),
datetime(2003, 9, 25))
def testDateWithSlash2(self):
self.assertEqual(parse("2003/Sep/25"),
datetime(2003, 9, 25))
def testDateWithSlash3(self):
self.assertEqual(parse("25/Sep/2003"),
datetime(2003, 9, 25))
def testDateWithSlash4(self):
self.assertEqual(parse("25/Sep/2003"),
datetime(2003, 9, 25))
def testDateWithSlash5(self):
self.assertEqual(parse("Sep/25/2003"),
datetime(2003, 9, 25))
def testDateWithSlash6(self):
self.assertEqual(parse("09/25/2003"),
datetime(2003, 9, 25))
def testDateWithSlash7(self):
self.assertEqual(parse("25/09/2003"),
datetime(2003, 9, 25))
def testDateWithSlash8(self):
self.assertEqual(parse("10/09/2003", dayfirst=True),
datetime(2003, 9, 10))
def testDateWithSlash9(self):
self.assertEqual(parse("10/09/2003"),
datetime(2003, 10, 9))
def testDateWithSlash10(self):
self.assertEqual(parse("10/09/03"),
datetime(2003, 10, 9))
def testDateWithSlash11(self):
self.assertEqual(parse("10/09/03", yearfirst=True),
datetime(2010, 9, 3))
def testDateWithSpace1(self):
self.assertEqual(parse("2003 09 25"),
datetime(2003, 9, 25))
def testDateWithSpace2(self):
self.assertEqual(parse("2003 Sep 25"),
datetime(2003, 9, 25))
def testDateWithSpace3(self):
self.assertEqual(parse("25 Sep 2003"),
datetime(2003, 9, 25))
def testDateWithSpace4(self):
self.assertEqual(parse("25 Sep 2003"),
datetime(2003, 9, 25))
def testDateWithSpace5(self):
self.assertEqual(parse("Sep 25 2003"),
datetime(2003, 9, 25))
def testDateWithSpace6(self):
self.assertEqual(parse("09 25 2003"),
datetime(2003, 9, 25))
def testDateWithSpace7(self):
self.assertEqual(parse("25 09 2003"),
datetime(2003, 9, 25))
def testDateWithSpace8(self):
self.assertEqual(parse("10 09 2003", dayfirst=True),
datetime(2003, 9, 10))
def testDateWithSpace9(self):
self.assertEqual(parse("10 09 2003"),
datetime(2003, 10, 9))
def testDateWithSpace10(self):
self.assertEqual(parse("10 09 03"),
datetime(2003, 10, 9))
def testDateWithSpace11(self):
self.assertEqual(parse("10 09 03", yearfirst=True),
datetime(2010, 9, 3))
def testDateWithSpace12(self):
self.assertEqual(parse("25 09 03"),
datetime(2003, 9, 25))
def testStrangelyOrderedDate1(self):
self.assertEqual(parse("03 25 Sep"),
datetime(2003, 9, 25))
def testStrangelyOrderedDate2(self):
self.assertEqual(parse("2003 25 Sep"),
datetime(2003, 9, 25))
def testStrangelyOrderedDate3(self):
self.assertEqual(parse("25 03 Sep"),
datetime(2025, 9, 3))
def testHourWithLetters(self):
self.assertEqual(parse("10h36m28.5s", default=self.default),
datetime(2003, 9, 25, 10, 36, 28, 500000))
def testHourWithLettersStrip1(self):
self.assertEqual(parse("10h36m28s", default=self.default),
datetime(2003, 9, 25, 10, 36, 28))
def testHourWithLettersStrip2(self):
self.assertEqual(parse("10h36m", default=self.default),
datetime(2003, 9, 25, 10, 36))
def testHourWithLettersStrip3(self):
self.assertEqual(parse("10h", default=self.default),
datetime(2003, 9, 25, 10))
def testHourWithLettersStrip4(self):
self.assertEqual(parse("10 h 36", default=self.default),
datetime(2003, 9, 25, 10, 36))
def testHourAmPm1(self):
self.assertEqual(parse("10h am", default=self.default),
datetime(2003, 9, 25, 10))
def testHourAmPm2(self):
self.assertEqual(parse("10h pm", default=self.default),
datetime(2003, 9, 25, 22))
def testHourAmPm3(self):
self.assertEqual(parse("10am", default=self.default),
datetime(2003, 9, 25, 10))
def testHourAmPm4(self):
self.assertEqual(parse("10pm", default=self.default),
datetime(2003, 9, 25, 22))
def testHourAmPm5(self):
self.assertEqual(parse("10:00 am", default=self.default),
datetime(2003, 9, 25, 10))
def testHourAmPm6(self):
self.assertEqual(parse("10:00 pm", default=self.default),
datetime(2003, 9, 25, 22))
def testHourAmPm7(self):
self.assertEqual(parse("10:00am", default=self.default),
datetime(2003, 9, 25, 10))
def testHourAmPm8(self):
self.assertEqual(parse("10:00pm", default=self.default),
datetime(2003, 9, 25, 22))
def testHourAmPm9(self):
self.assertEqual(parse("10:00a.m", default=self.default),
datetime(2003, 9, 25, 10))
def testHourAmPm10(self):
self.assertEqual(parse("10:00p.m", default=self.default),
datetime(2003, 9, 25, 22))
def testHourAmPm11(self):
self.assertEqual(parse("10:00a.m.", default=self.default),
datetime(2003, 9, 25, 10))
def testHourAmPm12(self):
self.assertEqual(parse("10:00p.m.", default=self.default),
datetime(2003, 9, 25, 22))
def testPertain(self):
self.assertEqual(parse("Sep 03", default=self.default),
datetime(2003, 9, 3))
self.assertEqual(parse("Sep of 03", default=self.default),
datetime(2003, 9, 25))
def testWeekdayAlone(self):
self.assertEqual(parse("Wed", default=self.default),
datetime(2003, 10, 1))
def testLongWeekday(self):
self.assertEqual(parse("Wednesday", default=self.default),
datetime(2003, 10, 1))
def testLongMonth(self):
self.assertEqual(parse("October", default=self.default),
datetime(2003, 10, 25))
def testZeroYear(self):
self.assertEqual(parse("31-Dec-00", default=self.default),
datetime(2000, 12, 31))
def testFuzzy(self):
s = "Today is 25 of September of 2003, exactly " \
"at 10:49:41 with timezone -03:00."
self.assertEqual(parse(s, fuzzy=True),
datetime(2003, 9, 25, 10, 49, 41,
tzinfo=self.brsttz))
def testFuzzyWithTokens(self):
s = "Today is 25 of September of 2003, exactly " \
"at 10:49:41 with timezone -03:00."
self.assertEqual(parse(s, fuzzy_with_tokens=True),
(datetime(2003, 9, 25, 10, 49, 41,
tzinfo=self.brsttz),
('Today is ', 'of ', ', exactly at ',
' with timezone ', '.')))
def testFuzzyAMPMProblem(self):
# Sometimes fuzzy parsing results in AM/PM flag being set without
# hours - if it's fuzzy it should ignore that.
s1 = "I have a meeting on March 1, 1974."
s2 = "On June 8th, 2020, I am going to be the first man on Mars"
# Also don't want any erroneous AM or PMs changing the parsed time
s3 = "Meet me at the AM/PM on Sunset at 3:00 AM on December 3rd, 2003"
s4 = "Meet me at 3:00AM on December 3rd, 2003 at the AM/PM on Sunset"
self.assertEqual(parse(s1, fuzzy=True), datetime(1974, 3, 1))
self.assertEqual(parse(s2, fuzzy=True), datetime(2020, 6, 8))
self.assertEqual(parse(s3, fuzzy=True), datetime(2003, 12, 3, 3))
self.assertEqual(parse(s4, fuzzy=True), datetime(2003, 12, 3, 3))
def testExtraSpace(self):
self.assertEqual(parse(" July 4 , 1976 12:01:02 am "),
datetime(1976, 7, 4, 0, 1, 2))
def testRandomFormat1(self):
self.assertEqual(parse("Wed, July 10, '96"),
datetime(1996, 7, 10, 0, 0))
def testRandomFormat2(self):
self.assertEqual(parse("1996.07.10 AD at 15:08:56 PDT",
ignoretz=True),
datetime(1996, 7, 10, 15, 8, 56))
def testRandomFormat3(self):
self.assertEqual(parse("1996.July.10 AD 12:08 PM"),
datetime(1996, 7, 10, 12, 8))
def testRandomFormat4(self):
self.assertEqual(parse("Tuesday, April 12, 1952 AD 3:30:42pm PST",
ignoretz=True),
datetime(1952, 4, 12, 15, 30, 42))
def testRandomFormat5(self):
self.assertEqual(parse("November 5, 1994, 8:15:30 am EST",
ignoretz=True),
datetime(1994, 11, 5, 8, 15, 30))
def testRandomFormat6(self):
self.assertEqual(parse("1994-11-05T08:15:30-05:00",
ignoretz=True),
datetime(1994, 11, 5, 8, 15, 30))
def testRandomFormat7(self):
self.assertEqual(parse("1994-11-05T08:15:30Z",
ignoretz=True),
datetime(1994, 11, 5, 8, 15, 30))
def testRandomFormat8(self):
self.assertEqual(parse("July 4, 1976"), datetime(1976, 7, 4))
def testRandomFormat9(self):
self.assertEqual(parse("7 4 1976"), datetime(1976, 7, 4))
def testRandomFormat10(self):
self.assertEqual(parse("4 jul 1976"), datetime(1976, 7, 4))
def testRandomFormat11(self):
self.assertEqual(parse("7-4-76"), datetime(1976, 7, 4))
def testRandomFormat12(self):
self.assertEqual(parse("19760704"), datetime(1976, 7, 4))
def testRandomFormat13(self):
self.assertEqual(parse("0:01:02", default=self.default),
datetime(2003, 9, 25, 0, 1, 2))
def testRandomFormat14(self):
self.assertEqual(parse("12h 01m02s am", default=self.default),
datetime(2003, 9, 25, 0, 1, 2))
def testRandomFormat15(self):
self.assertEqual(parse("0:01:02 on July 4, 1976"),
datetime(1976, 7, 4, 0, 1, 2))
def testRandomFormat16(self):
self.assertEqual(parse("0:01:02 on July 4, 1976"),
datetime(1976, 7, 4, 0, 1, 2))
def testRandomFormat17(self):
self.assertEqual(parse("1976-07-04T00:01:02Z", ignoretz=True),
datetime(1976, 7, 4, 0, 1, 2))
def testRandomFormat18(self):
self.assertEqual(parse("July 4, 1976 12:01:02 am"),
datetime(1976, 7, 4, 0, 1, 2))
def testRandomFormat19(self):
self.assertEqual(parse("Mon Jan 2 04:24:27 1995"),
datetime(1995, 1, 2, 4, 24, 27))
def testRandomFormat20(self):
self.assertEqual(parse("Tue Apr 4 00:22:12 PDT 1995", ignoretz=True),
datetime(1995, 4, 4, 0, 22, 12))
def testRandomFormat21(self):
self.assertEqual(parse("04.04.95 00:22"),
datetime(1995, 4, 4, 0, 22))
def testRandomFormat22(self):
self.assertEqual(parse("Jan 1 1999 11:23:34.578"),
datetime(1999, 1, 1, 11, 23, 34, 578000))
def testRandomFormat23(self):
self.assertEqual(parse("950404 122212"),
datetime(1995, 4, 4, 12, 22, 12))
def testRandomFormat24(self):
self.assertEqual(parse("0:00 PM, PST", default=self.default,
ignoretz=True),
datetime(2003, 9, 25, 12, 0))
def testRandomFormat25(self):
self.assertEqual(parse("12:08 PM", default=self.default),
datetime(2003, 9, 25, 12, 8))
def testRandomFormat26(self):
self.assertEqual(parse("5:50 A.M. on June 13, 1990"),
datetime(1990, 6, 13, 5, 50))
def testRandomFormat27(self):
self.assertEqual(parse("3rd of May 2001"), datetime(2001, 5, 3))
def testRandomFormat28(self):
self.assertEqual(parse("5th of March 2001"), datetime(2001, 3, 5))
def testRandomFormat29(self):
self.assertEqual(parse("1st of May 2003"), datetime(2003, 5, 1))
def testRandomFormat30(self):
self.assertEqual(parse("01h02m03", default=self.default),
datetime(2003, 9, 25, 1, 2, 3))
def testRandomFormat31(self):
self.assertEqual(parse("01h02", default=self.default),
datetime(2003, 9, 25, 1, 2))
def testRandomFormat32(self):
self.assertEqual(parse("01h02s", default=self.default),
datetime(2003, 9, 25, 1, 0, 2))
def testRandomFormat33(self):
self.assertEqual(parse("01m02", default=self.default),
datetime(2003, 9, 25, 0, 1, 2))
def testRandomFormat34(self):
self.assertEqual(parse("01m02h", default=self.default),
datetime(2003, 9, 25, 2, 1))
def testRandomFormat35(self):
self.assertEqual(parse("2004 10 Apr 11h30m", default=self.default),
datetime(2004, 4, 10, 11, 30))
def testErrorType01(self):
self.assertRaises(ValueError,
parse,'shouldfail')
def testIncreasingCTime(self):
# This test will check 200 different years, every month, every day,
# every hour, every minute, every second, and every weekday, using
# a delta of more or less 1 year, 1 month, 1 day, 1 minute and
# 1 second.
delta = timedelta(days=365+31+1, seconds=1+60+60*60)
dt = datetime(1900, 1, 1, 0, 0, 0, 0)
for i in range(200):
self.assertEqual(parse(dt.ctime()), dt)
dt += delta
def testIncreasingISOFormat(self):
delta = timedelta(days=365+31+1, seconds=1+60+60*60)
dt = datetime(1900, 1, 1, 0, 0, 0, 0)
for i in range(200):
self.assertEqual(parse(dt.isoformat()), dt)
dt += delta
def testMicrosecondsPrecisionError(self):
# Skip found out that sad precision problem. :-(
dt1 = parse("00:11:25.01")
dt2 = parse("00:12:10.01")
self.assertEqual(dt1.microsecond, 10000)
self.assertEqual(dt2.microsecond, 10000)
def testMicrosecondPrecisionErrorReturns(self):
# One more precision issue, discovered by Eric Brown. This should
# be the last one, as we're no longer using floating points.
for ms in [100001, 100000, 99999, 99998,
10001, 10000, 9999, 9998,
1001, 1000, 999, 998,
101, 100, 99, 98]:
dt = datetime(2008, 2, 27, 21, 26, 1, ms)
self.assertEqual(parse(dt.isoformat()), dt)
def testHighPrecisionSeconds(self):
self.assertEqual(parse("20080227T21:26:01.123456789"),
datetime(2008, 2, 27, 21, 26, 1, 123456))
def testCustomParserInfo(self):
# Custom parser info wasn't working, as Michael Elsdörfer discovered.
from dateutil.parser import parserinfo, parser
class myparserinfo(parserinfo):
MONTHS = parserinfo.MONTHS[:]
MONTHS[0] = ("Foo", "Foo")
myparser = parser(myparserinfo())
dt = myparser.parse("01/Foo/2007")
self.assertEqual(dt, datetime(2007, 1, 1))
class EasterTest(unittest.TestCase):
easterlist = [
# WESTERN ORTHODOX
(date(1990, 4, 15), date(1990, 4, 15)),
(date(1991, 3, 31), date(1991, 4, 7)),
(date(1992, 4, 19), date(1992, 4, 26)),
(date(1993, 4, 11), date(1993, 4, 18)),
(date(1994, 4, 3), date(1994, 5, 1)),
(date(1995, 4, 16), date(1995, 4, 23)),
(date(1996, 4, 7), date(1996, 4, 14)),
(date(1997, 3, 30), date(1997, 4, 27)),
(date(1998, 4, 12), date(1998, 4, 19)),
(date(1999, 4, 4), date(1999, 4, 11)),
(date(2000, 4, 23), date(2000, 4, 30)),
(date(2001, 4, 15), date(2001, 4, 15)),
(date(2002, 3, 31), date(2002, 5, 5)),
(date(2003, 4, 20), date(2003, 4, 27)),
(date(2004, 4, 11), date(2004, 4, 11)),
(date(2005, 3, 27), date(2005, 5, 1)),
(date(2006, 4, 16), date(2006, 4, 23)),
(date(2007, 4, 8), date(2007, 4, 8)),
(date(2008, 3, 23), date(2008, 4, 27)),
(date(2009, 4, 12), date(2009, 4, 19)),
(date(2010, 4, 4), date(2010, 4, 4)),
(date(2011, 4, 24), date(2011, 4, 24)),
(date(2012, 4, 8), date(2012, 4, 15)),
(date(2013, 3, 31), date(2013, 5, 5)),
(date(2014, 4, 20), date(2014, 4, 20)),
(date(2015, 4, 5), date(2015, 4, 12)),
(date(2016, 3, 27), date(2016, 5, 1)),
(date(2017, 4, 16), date(2017, 4, 16)),
(date(2018, 4, 1), date(2018, 4, 8)),
(date(2019, 4, 21), date(2019, 4, 28)),
(date(2020, 4, 12), date(2020, 4, 19)),
(date(2021, 4, 4), date(2021, 5, 2)),
(date(2022, 4, 17), date(2022, 4, 24)),
(date(2023, 4, 9), date(2023, 4, 16)),
(date(2024, 3, 31), date(2024, 5, 5)),
(date(2025, 4, 20), date(2025, 4, 20)),
(date(2026, 4, 5), date(2026, 4, 12)),
(date(2027, 3, 28), date(2027, 5, 2)),
(date(2028, 4, 16), date(2028, 4, 16)),
(date(2029, 4, 1), date(2029, 4, 8)),
(date(2030, 4, 21), date(2030, 4, 28)),
(date(2031, 4, 13), date(2031, 4, 13)),
(date(2032, 3, 28), date(2032, 5, 2)),
(date(2033, 4, 17), date(2033, 4, 24)),
(date(2034, 4, 9), date(2034, 4, 9)),
(date(2035, 3, 25), date(2035, 4, 29)),
(date(2036, 4, 13), date(2036, 4, 20)),
(date(2037, 4, 5), date(2037, 4, 5)),
(date(2038, 4, 25), date(2038, 4, 25)),
(date(2039, 4, 10), date(2039, 4, 17)),
(date(2040, 4, 1), date(2040, 5, 6)),
(date(2041, 4, 21), date(2041, 4, 21)),
(date(2042, 4, 6), date(2042, 4, 13)),
(date(2043, 3, 29), date(2043, 5, 3)),
(date(2044, 4, 17), date(2044, 4, 24)),
(date(2045, 4, 9), date(2045, 4, 9)),
(date(2046, 3, 25), date(2046, 4, 29)),
(date(2047, 4, 14), date(2047, 4, 21)),
(date(2048, 4, 5), date(2048, 4, 5)),
(date(2049, 4, 18), date(2049, 4, 25)),
(date(2050, 4, 10), date(2050, 4, 17)),
]
def testEaster(self):
for western, orthodox in self.easterlist:
self.assertEqual(western, easter(western.year, EASTER_WESTERN))
self.assertEqual(orthodox, easter(orthodox.year, EASTER_ORTHODOX))
class TZTest(unittest.TestCase):
TZFILE_EST5EDT = b"""
VFppZgAAAAAAAAAAAAAAAAAAAAAAAAAEAAAABAAAAAAAAADrAAAABAAAABCeph5wn7rrYKCGAHCh
ms1gomXicKOD6eCkaq5wpTWnYKZTyvCnFYlgqDOs8Kj+peCqE47wqt6H4KvzcPCsvmngrdNS8K6e
S+CvszTwsH4t4LGcUXCyZ0pgs3wzcLRHLGC1XBVwticOYLc793C4BvBguRvZcLnm0mC7BPXwu8a0
YLzk1/C9r9DgvsS58L+PsuDApJvwwW+U4MKEffDDT3bgxGRf8MUvWODGTXxwxw864MgtXnDI+Fdg
yg1AcMrYOWDLiPBw0iP0cNJg++DTdeTw1EDd4NVVxvDWIL/g1zWo8NgAoeDZFYrw2eCD4Nr+p3Db
wGXg3N6JcN2pgmDevmtw34lkYOCeTXDhaUZg4n4vcONJKGDkXhFw5Vcu4OZHLfDnNxDg6CcP8OkW
8uDqBvHw6vbU4Ovm0/Ds1rbg7ca18O6/02Dvr9Jw8J+1YPGPtHDyf5dg82+WcPRfeWD1T3hw9j9b
YPcvWnD4KHfg+Q88cPoIWeD6+Fjw++g74PzYOvD9yB3g/rgc8P+n/+AAl/7wAYfh4AJ34PADcP5g
BGD9cAVQ4GAGQN9wBzDCYAeNGXAJEKRgCa2U8ArwhmAL4IVwDNmi4A3AZ3AOuYTgD6mD8BCZZuAR
iWXwEnlI4BNpR/AUWSrgFUkp8BY5DOAXKQvwGCIpYBkI7fAaAgtgGvIKcBvh7WAc0exwHcHPYB6x
znAfobFgIHYA8CGBk2AiVeLwI2qv4CQ1xPAlSpHgJhWm8Ccqc+An/sNwKQpV4CnepXAq6jfgK76H
cCzTVGAtnmlwLrM2YC9+S3AwkxhgMWdn8DJy+mAzR0nwNFLcYDUnK/A2Mr5gNwcN8Dgb2uA45u/w
Ofu84DrG0fA7257gPK/ucD27gOA+j9BwP5ti4EBvsnBBhH9gQk+UcENkYWBEL3ZwRURDYEYPWHBH
JCVgR/h08EkEB2BJ2FbwSuPpYEu4OPBMzQXgTZga8E6s5+BPd/zwUIzJ4FFhGXBSbKvgU0D7cFRM
jeBVIN1wVixv4FcAv3BYFYxgWOChcFn1bmBawINwW9VQYFypn/BdtTJgXomB8F+VFGBgaWPwYX4w
4GJJRfBjXhLgZCkn8GU99OBmEkRwZx3W4GfyJnBo/bjgadIIcGrdmuBrsepwbMa3YG2RzHBupplg
b3GucHCGe2BxWsrwcmZdYHM6rPB0Rj9gdRqO8HYvW+B2+nDweA894HjaUvB57x/gero08HvPAeB8
o1Fwfa7j4H6DM3B/jsXgAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQAB
AAEAAQABAgMBAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQAB
AAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEA
AQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQAB
AAEAAQABAAEAAQABAAEAAQABAAEAAf//x8ABAP//ubAABP//x8ABCP//x8ABDEVEVABFU1QARVdU
AEVQVAAAAAABAAAAAQ==
"""
EUROPE_HELSINKI = b"""
VFppZgAAAAAAAAAAAAAAAAAAAAAAAAAFAAAABQAAAAAAAAB1AAAABQAAAA2kc28Yy85RYMy/hdAV
I+uQFhPckBcDzZAX876QGOOvkBnToJAaw5GQG7y9EBysrhAdnJ8QHoyQEB98gRAgbHIQIVxjECJM
VBAjPEUQJCw2ECUcJxAmDBgQJwVDkCf1NJAo5SWQKdUWkCrFB5ArtPiQLKTpkC2U2pAuhMuQL3S8
kDBkrZAxXdkQMnK0EDM9uxA0UpYQNR2dEDYyeBA2/X8QOBuUkDjdYRA5+3aQOr1DEDvbWJA8pl+Q
Pbs6kD6GQZA/mxyQQGYjkEGEORBCRgWQQ2QbEEQl55BFQ/0QRgXJkEcj3xBH7uYQSQPBEEnOyBBK
46MQS66qEEzMv5BNjowQTqyhkE9ubhBQjIOQUVeKkFJsZZBTN2yQVExHkFUXTpBWLCmQVvcwkFgV
RhBY1xKQWfUoEFq29JBb1QoQXKAREF207BBef/MQX5TOEGBf1RBhfeqQYj+3EGNdzJBkH5kQZT2u
kGYItZBnHZCQZ+iXkGj9cpBpyHmQat1UkGuoW5BsxnEQbYg9kG6mUxBvaB+QcIY1EHFRPBByZhcQ
czEeEHRF+RB1EQAQdi8VkHbw4hB4DveQeNDEEHnu2ZB6sKYQe867kHyZwpB9rp2QfnmkkH+Of5AC
AQIDBAMEAwQDBAMEAwQDBAMEAwQDBAMEAwQDBAMEAwQDBAMEAwQDBAMEAwQDBAMEAwQDBAMEAwQD
BAMEAwQDBAMEAwQDBAMEAwQDBAMEAwQDBAMEAwQDBAMEAwQDBAMEAwQDBAMEAwQDBAMEAwQDBAME
AwQAABdoAAAAACowAQQAABwgAAkAACowAQQAABwgAAlITVQARUVTVABFRVQAAAAAAQEAAAABAQ==
"""
NEW_YORK = b"""
VFppZgAAAAAAAAAAAAAAAAAAAAAAAAAEAAAABAAAABcAAADrAAAABAAAABCeph5wn7rrYKCGAHCh
ms1gomXicKOD6eCkaq5wpTWnYKZTyvCnFYlgqDOs8Kj+peCqE47wqt6H4KvzcPCsvmngrdNS8K6e
S+CvszTwsH4t4LGcUXCyZ0pgs3wzcLRHLGC1XBVwticOYLc793C4BvBguRvZcLnm0mC7BPXwu8a0
YLzk1/C9r9DgvsS58L+PsuDApJvwwW+U4MKEffDDT3bgxGRf8MUvWODGTXxwxw864MgtXnDI+Fdg
yg1AcMrYOWDLiPBw0iP0cNJg++DTdeTw1EDd4NVVxvDWIL/g1zWo8NgAoeDZFYrw2eCD4Nr+p3Db
wGXg3N6JcN2pgmDevmtw34lkYOCeTXDhaUZg4n4vcONJKGDkXhFw5Vcu4OZHLfDnNxDg6CcP8OkW
8uDqBvHw6vbU4Ovm0/Ds1rbg7ca18O6/02Dvr9Jw8J+1YPGPtHDyf5dg82+WcPRfeWD1T3hw9j9b
YPcvWnD4KHfg+Q88cPoIWeD6+Fjw++g74PzYOvD9yB3g/rgc8P+n/+AAl/7wAYfh4AJ34PADcP5g
BGD9cAVQ4GEGQN9yBzDCYgeNGXMJEKRjCa2U9ArwhmQL4IV1DNmi5Q3AZ3YOuYTmD6mD9xCZZucR
iWX4EnlI6BNpR/kUWSrpFUkp+RY5DOoXKQv6GCIpaxkI7fsaAgtsGvIKfBvh7Wwc0ex8HcHPbR6x
zn0fobFtIHYA/SGBk20iVeL+I2qv7iQ1xP4lSpHuJhWm/ycqc+8n/sOAKQpV8CnepYAq6jfxK76H
gSzTVHItnmmCLrM2cy9+S4MwkxhzMWdoBDJy+nQzR0oENFLcdTUnLAU2Mr51NwcOBjgb2vY45vAG
Ofu89jrG0gY72572PK/uhj27gPY+j9CGP5ti9kBvsoZBhH92Qk+UhkNkYXZEL3aHRURDd0XzqQdH
LV/3R9OLB0kNQfdJs20HSu0j90uciYdM1kB3TXxrh062IndPXE2HUJYEd1E8L4dSdeZ3UxwRh1RV
yHdU+/OHVjWqd1blEAdYHsb3WMTyB1n+qPdapNQHW96K91yEtgddvmz3XmSYB1+eTvdgTbSHYYdr
d2ItlodjZ013ZA14h2VHL3dl7VqHZycRd2fNPIdpBvN3aa0eh2rm1XdrljsHbM/x9212HQdur9P3
b1X/B3CPtfdxNeEHcm+X93MVwwd0T3n3dP7fh3Y4lnd23sGHeBh4d3i+o4d5+Fp3ep6Fh3vYPHd8
fmeHfbged35eSYd/mAB3AAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQAB
AAEAAQABAgMBAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQAB
AAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEA
AQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQABAAEAAQAB
AAEAAQABAAEAAQABAAEAAQABAAEAAf//x8ABAP//ubAABP//x8ABCP//x8ABDEVEVABFU1QARVdU
AEVQVAAEslgAAAAAAQWk7AEAAAACB4YfggAAAAMJZ1MDAAAABAtIhoQAAAAFDSsLhQAAAAYPDD8G
AAAABxDtcocAAAAIEs6mCAAAAAkVn8qJAAAACheA/goAAAALGWIxiwAAAAwdJeoMAAAADSHa5Q0A
AAAOJZ6djgAAAA8nf9EPAAAAECpQ9ZAAAAARLDIpEQAAABIuE1ySAAAAEzDnJBMAAAAUM7hIlAAA
ABU2jBAVAAAAFkO3G5YAAAAXAAAAAQAAAAE=
"""
TZICAL_EST5EDT = """
BEGIN:VTIMEZONE
TZID:US-Eastern
LAST-MODIFIED:19870101T000000Z
TZURL:http://zones.stds_r_us.net/tz/US-Eastern
BEGIN:STANDARD
DTSTART:19671029T020000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
TZNAME:EST
END:STANDARD
BEGIN:DAYLIGHT
DTSTART:19870405T020000
RRULE:FREQ=YEARLY;BYDAY=1SU;BYMONTH=4
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
TZNAME:EDT
END:DAYLIGHT
END:VTIMEZONE
"""
def testStrStart1(self):
self.assertEqual(datetime(2003, 4, 6, 1, 59,
tzinfo=tzstr("EST5EDT")).tzname(), "EST")
self.assertEqual(datetime(2003, 4, 6, 2, 00,
tzinfo=tzstr("EST5EDT")).tzname(), "EDT")
def testStrEnd1(self):
self.assertEqual(datetime(2003, 10, 26, 0, 59,
tzinfo=tzstr("EST5EDT")).tzname(), "EDT")
self.assertEqual(datetime(2003, 10, 26, 1, 00,
tzinfo=tzstr("EST5EDT")).tzname(), "EST")
def testStrStart2(self):
s = "EST5EDT,4,0,6,7200,10,0,26,7200,3600"
self.assertEqual(datetime(2003, 4, 6, 1, 59,
tzinfo=tzstr(s)).tzname(), "EST")
self.assertEqual(datetime(2003, 4, 6, 2, 00,
tzinfo=tzstr(s)).tzname(), "EDT")
def testStrEnd2(self):
s = "EST5EDT,4,0,6,7200,10,0,26,7200,3600"
self.assertEqual(datetime(2003, 10, 26, 0, 59,
tzinfo=tzstr(s)).tzname(), "EDT")
self.assertEqual(datetime(2003, 10, 26, 1, 00,
tzinfo=tzstr(s)).tzname(), "EST")
def testStrStart3(self):
s = "EST5EDT,4,1,0,7200,10,-1,0,7200,3600"
self.assertEqual(datetime(2003, 4, 6, 1, 59,
tzinfo=tzstr(s)).tzname(), "EST")
self.assertEqual(datetime(2003, 4, 6, 2, 00,
tzinfo=tzstr(s)).tzname(), "EDT")
def testStrEnd3(self):
s = "EST5EDT,4,1,0,7200,10,-1,0,7200,3600"
self.assertEqual(datetime(2003, 10, 26, 0, 59,
tzinfo=tzstr(s)).tzname(), "EDT")
self.assertEqual(datetime(2003, 10, 26, 1, 00,
tzinfo=tzstr(s)).tzname(), "EST")
def testStrStart4(self):
s = "EST5EDT4,M4.1.0/02:00:00,M10-5-0/02:00"
self.assertEqual(datetime(2003, 4, 6, 1, 59,
tzinfo=tzstr(s)).tzname(), "EST")
self.assertEqual(datetime(2003, 4, 6, 2, 00,
tzinfo=tzstr(s)).tzname(), "EDT")
def testStrEnd4(self):
s = "EST5EDT4,M4.1.0/02:00:00,M10-5-0/02:00"
self.assertEqual(datetime(2003, 10, 26, 0, 59,
tzinfo=tzstr(s)).tzname(), "EDT")
self.assertEqual(datetime(2003, 10, 26, 1, 00,
tzinfo=tzstr(s)).tzname(), "EST")
def testStrStart5(self):
s = "EST5EDT4,95/02:00:00,298/02:00"
self.assertEqual(datetime(2003, 4, 6, 1, 59,
tzinfo=tzstr(s)).tzname(), "EST")
self.assertEqual(datetime(2003, 4, 6, 2, 00,
tzinfo=tzstr(s)).tzname(), "EDT")
def testStrEnd5(self):
s = "EST5EDT4,95/02:00:00,298/02"
self.assertEqual(datetime(2003, 10, 26, 0, 59,
tzinfo=tzstr(s)).tzname(), "EDT")
self.assertEqual(datetime(2003, 10, 26, 1, 00,
tzinfo=tzstr(s)).tzname(), "EST")
def testStrStart6(self):
s = "EST5EDT4,J96/02:00:00,J299/02:00"
self.assertEqual(datetime(2003, 4, 6, 1, 59,
tzinfo=tzstr(s)).tzname(), "EST")
self.assertEqual(datetime(2003, 4, 6, 2, 00,
tzinfo=tzstr(s)).tzname(), "EDT")
def testStrEnd6(self):
s = "EST5EDT4,J96/02:00:00,J299/02"
self.assertEqual(datetime(2003, 10, 26, 0, 59,
tzinfo=tzstr(s)).tzname(), "EDT")
self.assertEqual(datetime(2003, 10, 26, 1, 00,
tzinfo=tzstr(s)).tzname(), "EST")
def testStrStr(self):
# Test that tzstr() won't throw an error if given a str instead
# of a unicode literal.
self.assertEqual(datetime(2003, 4, 6, 1, 59,
tzinfo=tzstr(str("EST5EDT"))).tzname(), "EST")
self.assertEqual(datetime(2003, 4, 6, 2, 00,
tzinfo=tzstr(str("EST5EDT"))).tzname(), "EDT")
def testStrCmp1(self):
self.assertEqual(tzstr("EST5EDT"),
tzstr("EST5EDT4,M4.1.0/02:00:00,M10-5-0/02:00"))
def testStrCmp2(self):
self.assertEqual(tzstr("EST5EDT"),
tzstr("EST5EDT,4,1,0,7200,10,-1,0,7200,3600"))
def testRangeCmp1(self):
self.assertEqual(tzstr("EST5EDT"),
tzrange("EST", -18000, "EDT", -14400,
relativedelta(hours=+2,
month=4, day=1,
weekday=SU(+1)),
relativedelta(hours=+1,
month=10, day=31,
weekday=SU(-1))))
def testRangeCmp2(self):
self.assertEqual(tzstr("EST5EDT"),
tzrange("EST", -18000, "EDT"))
def testFileStart1(self):
tz = tzfile(BytesIO(base64.decodestring(self.TZFILE_EST5EDT)))
self.assertEqual(datetime(2003, 4, 6, 1, 59, tzinfo=tz).tzname(), "EST")
self.assertEqual(datetime(2003, 4, 6, 2, 00, tzinfo=tz).tzname(), "EDT")
def testFileEnd1(self):
tz = tzfile(BytesIO(base64.decodestring(self.TZFILE_EST5EDT)))
self.assertEqual(datetime(2003, 10, 26, 0, 59, tzinfo=tz).tzname(),
"EDT")
self.assertEqual(datetime(2003, 10, 26, 1, 00, tzinfo=tz).tzname(),
"EST")
def testZoneInfoFileStart1(self):
tz = zoneinfo.gettz("EST5EDT")
self.assertEqual(datetime(2003, 4, 6, 1, 59, tzinfo=tz).tzname(), "EST",
MISSING_TARBALL)
self.assertEqual(datetime(2003, 4, 6, 2, 00, tzinfo=tz).tzname(), "EDT")
def testZoneInfoFileEnd1(self):
tz = zoneinfo.gettz("EST5EDT")
self.assertEqual(datetime(2003, 10, 26, 0, 59, tzinfo=tz).tzname(),
"EDT", MISSING_TARBALL)
self.assertEqual(datetime(2003, 10, 26, 1, 00, tzinfo=tz).tzname(),
"EST")
def testZoneInfoOffsetSignal(self):
utc = zoneinfo.gettz("UTC")
nyc = zoneinfo.gettz("America/New_York")
self.assertNotEqual(utc, None, MISSING_TARBALL)
self.assertNotEqual(nyc, None)
t0 = datetime(2007, 11, 4, 0, 30, tzinfo=nyc)
t1 = t0.astimezone(utc)
t2 = t1.astimezone(nyc)
self.assertEqual(t0, t2)
self.assertEqual(nyc.dst(t0), timedelta(hours=1))
def testICalStart1(self):
tz = tzical(StringIO(self.TZICAL_EST5EDT)).get()
self.assertEqual(datetime(2003, 4, 6, 1, 59, tzinfo=tz).tzname(), "EST")
self.assertEqual(datetime(2003, 4, 6, 2, 00, tzinfo=tz).tzname(), "EDT")
def testICalEnd1(self):
tz = tzical(StringIO(self.TZICAL_EST5EDT)).get()
self.assertEqual(datetime(2003, 10, 26, 0, 59, tzinfo=tz).tzname(), "EDT")
self.assertEqual(datetime(2003, 10, 26, 1, 00, tzinfo=tz).tzname(), "EST")
def testRoundNonFullMinutes(self):
# This timezone has an offset of 5992 seconds in 1900-01-01.
tz = tzfile(BytesIO(base64.decodestring(self.EUROPE_HELSINKI)))
self.assertEqual(str(datetime(1900, 1, 1, 0, 0, tzinfo=tz)),
"1900-01-01 00:00:00+01:40")
def testLeapCountDecodesProperly(self):
# This timezone has leapcnt, and failed to decode until
# Eugene Oden notified about the issue.
tz = tzfile(BytesIO(base64.decodestring(self.NEW_YORK)))
self.assertEqual(datetime(2007, 3, 31, 20, 12).tzname(), None)
def testGettz(self):
# bug 892569
str(gettz('UTC'))
def testBrokenIsDstHandling(self):
# tzrange._isdst() was using a date() rather than a datetime().
# Issue reported by Lennart Regebro.
dt = datetime(2007, 8, 6, 4, 10, tzinfo=tzutc())
self.assertEqual(dt.astimezone(tz=gettz("GMT+2")),
datetime(2007, 8, 6, 6, 10, tzinfo=tzstr("GMT+2")))
def testGMTHasNoDaylight(self):
# tzstr("GMT+2") improperly considered daylight saving time.
# Issue reported by Lennart Regebro.
dt = datetime(2007, 8, 6, 4, 10)
self.assertEqual(gettz("GMT+2").dst(dt), timedelta(0))
def testGMTOffset(self):
# GMT and UTC offsets have inverted signal when compared to the
# usual TZ variable handling.
dt = datetime(2007, 8, 6, 4, 10, tzinfo=tzutc())
self.assertEqual(dt.astimezone(tz=tzstr("GMT+2")),
datetime(2007, 8, 6, 6, 10, tzinfo=tzstr("GMT+2")))
self.assertEqual(dt.astimezone(tz=gettz("UTC-2")),
datetime(2007, 8, 6, 2, 10, tzinfo=tzstr("UTC-2")))
@unittest.skipUnless(sys.platform.startswith("win"), "requires Windows")
def testIsdstZoneWithNoDaylightSaving(self):
tz = tzwin.tzwin("UTC")
dt = parse("2013-03-06 19:08:15")
self.assertFalse(tz._isdst(dt))
# vim:ts=4:sw=4
|
trankmichael/scipy | refs/heads/master | scipy/io/harwell_boeing/setup.py | 128 | #!/usr/bin/env python
from __future__ import division, print_function, absolute_import
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('harwell_boeing',parent_package,top_path)
config.add_data_dir('tests')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
H0w13/WebGrabber | refs/heads/master | crawler/crawler/workers/eastmoney/fund_fetcher.py | 1 | import logging
import random
import time
from ...framework.core.response import Response
from ...framework.fetcher.sock5fetcher import Sock5Fetcher
class FundFetcher(Sock5Fetcher):
def __init__(self):
Sock5Fetcher.__init__(self)
return
def doWork(self, request):
time.sleep(random.randint(0, self.sleep_time))
try:
content = self.url_fetch(request.url, False)
logging.warning("%s downloaded content for %s",
self.__class__.__name__, request.identifier)
logging.warning("page url is %s", request.url)
response = Response(request.identifier, "Parser")
response.addTags(request.tags)
response.build(content)
return [response]
except Exception as excep:
logging.error("FundFetcher.doWork() error: %s", excep)
return []
|
Maccimo/intellij-community | refs/heads/master | python/testData/resolve/multiFile/pkgResourcesNamespace/root1/pkg/a.py | 64 | import pkg.second
# <ref>
|
roopali8/tempest | refs/heads/master | tempest/api/compute/admin/test_hosts_negative.py | 9 | # Copyright 2013 Huawei Technologies Co.,LTD.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib.common.utils import data_utils
from tempest_lib import exceptions as lib_exc
from tempest.api.compute import base
from tempest import test
class HostsAdminNegativeTestJSON(base.BaseV2ComputeAdminTest):
"""
Tests hosts API using admin privileges.
"""
@classmethod
def setup_clients(cls):
super(HostsAdminNegativeTestJSON, cls).setup_clients()
cls.client = cls.os_adm.hosts_client
cls.non_admin_client = cls.os.hosts_client
def _get_host_name(self):
hosts = self.client.list_hosts()
self.assertTrue(len(hosts) >= 1)
hostname = hosts[0]['host_name']
return hostname
@test.attr(type=['negative'])
@test.idempotent_id('dd032027-0210-4d9c-860e-69b1b8deed5f')
def test_list_hosts_with_non_admin_user(self):
self.assertRaises(lib_exc.Forbidden,
self.non_admin_client.list_hosts)
@test.attr(type=['negative'])
@test.idempotent_id('e75b0a1a-041f-47a1-8b4a-b72a6ff36d3f')
def test_show_host_detail_with_nonexistent_hostname(self):
nonexitent_hostname = data_utils.rand_name('rand_hostname')
self.assertRaises(lib_exc.NotFound,
self.client.show_host, nonexitent_hostname)
@test.attr(type=['negative'])
@test.idempotent_id('19ebe09c-bfd4-4b7c-81a2-e2e0710f59cc')
def test_show_host_detail_with_non_admin_user(self):
hostname = self._get_host_name()
self.assertRaises(lib_exc.Forbidden,
self.non_admin_client.show_host,
hostname)
@test.attr(type=['negative'])
@test.idempotent_id('e40c72b1-0239-4ed6-ba21-81a184df1f7c')
def test_update_host_with_non_admin_user(self):
hostname = self._get_host_name()
self.assertRaises(lib_exc.Forbidden,
self.non_admin_client.update_host,
hostname,
status='enable',
maintenance_mode='enable')
@test.attr(type=['negative'])
@test.idempotent_id('76e396fe-5418-4dd3-a186-5b301edc0721')
def test_update_host_with_extra_param(self):
# only 'status' and 'maintenance_mode' are the valid params.
hostname = self._get_host_name()
self.assertRaises(lib_exc.BadRequest,
self.client.update_host,
hostname,
status='enable',
maintenance_mode='enable',
param='XXX')
@test.attr(type=['negative'])
@test.idempotent_id('fbe2bf3e-3246-4a95-a59f-94e4e298ec77')
def test_update_host_with_invalid_status(self):
# 'status' can only be 'enable' or 'disable'
hostname = self._get_host_name()
self.assertRaises(lib_exc.BadRequest,
self.client.update_host,
hostname,
status='invalid',
maintenance_mode='enable')
@test.attr(type=['negative'])
@test.idempotent_id('ab1e230e-5e22-41a9-8699-82b9947915d4')
def test_update_host_with_invalid_maintenance_mode(self):
# 'maintenance_mode' can only be 'enable' or 'disable'
hostname = self._get_host_name()
self.assertRaises(lib_exc.BadRequest,
self.client.update_host,
hostname,
status='enable',
maintenance_mode='invalid')
@test.attr(type=['negative'])
@test.idempotent_id('0cd85f75-6992-4a4a-b1bd-d11e37fd0eee')
def test_update_host_without_param(self):
# 'status' or 'maintenance_mode' needed for host update
hostname = self._get_host_name()
self.assertRaises(lib_exc.BadRequest,
self.client.update_host,
hostname)
@test.attr(type=['negative'])
@test.idempotent_id('23c92146-2100-4d68-b2d6-c7ade970c9c1')
def test_update_nonexistent_host(self):
nonexitent_hostname = data_utils.rand_name('rand_hostname')
self.assertRaises(lib_exc.NotFound,
self.client.update_host,
nonexitent_hostname,
status='enable',
maintenance_mode='enable')
@test.attr(type=['negative'])
@test.idempotent_id('0d981ac3-4320-4898-b674-82b61fbb60e4')
def test_startup_nonexistent_host(self):
nonexitent_hostname = data_utils.rand_name('rand_hostname')
self.assertRaises(lib_exc.NotFound,
self.client.startup_host,
nonexitent_hostname)
@test.attr(type=['negative'])
@test.idempotent_id('9f4ebb7e-b2ae-4e5b-a38f-0fd1bb0ddfca')
def test_startup_host_with_non_admin_user(self):
hostname = self._get_host_name()
self.assertRaises(lib_exc.Forbidden,
self.non_admin_client.startup_host,
hostname)
@test.attr(type=['negative'])
@test.idempotent_id('9e637444-29cf-4244-88c8-831ae82c31b6')
def test_shutdown_nonexistent_host(self):
nonexitent_hostname = data_utils.rand_name('rand_hostname')
self.assertRaises(lib_exc.NotFound,
self.client.shutdown_host,
nonexitent_hostname)
@test.attr(type=['negative'])
@test.idempotent_id('a803529c-7e3f-4d3c-a7d6-8e1c203d27f6')
def test_shutdown_host_with_non_admin_user(self):
hostname = self._get_host_name()
self.assertRaises(lib_exc.Forbidden,
self.non_admin_client.shutdown_host,
hostname)
@test.attr(type=['negative'])
@test.idempotent_id('f86bfd7b-0b13-4849-ae29-0322e83ee58b')
def test_reboot_nonexistent_host(self):
nonexitent_hostname = data_utils.rand_name('rand_hostname')
self.assertRaises(lib_exc.NotFound,
self.client.reboot_host,
nonexitent_hostname)
@test.attr(type=['negative'])
@test.idempotent_id('02d79bb9-eb57-4612-abf6-2cb38897d2f8')
def test_reboot_host_with_non_admin_user(self):
hostname = self._get_host_name()
self.assertRaises(lib_exc.Forbidden,
self.non_admin_client.reboot_host,
hostname)
|
FRidh/scipy | refs/heads/master | scipy/weave/tests/test_numpy_scalar_spec.py | 91 | from __future__ import absolute_import, print_function
import os
import sys
import tempfile
import numpy
from numpy.testing import TestCase, assert_, run_module_suite
from scipy.weave import inline_tools, ext_tools
from scipy.weave.build_tools import msvc_exists, gcc_exists
from scipy.weave.catalog import unique_file
from scipy.weave.numpy_scalar_spec import numpy_complex_scalar_converter
from weave_test_utils import dec
def unique_mod(d,file_name):
f = os.path.basename(unique_file(d,file_name))
m = os.path.splitext(f)[0]
return m
#----------------------------------------------------------------------------
# Scalar conversion test classes
# int, float, complex
#----------------------------------------------------------------------------
class NumpyComplexScalarConverter(TestCase):
compiler = ''
def setUp(self):
self.converter = numpy_complex_scalar_converter()
@dec.slow
def test_type_match_string(self):
assert_(not self.converter.type_match('string'))
@dec.slow
def test_type_match_int(self):
assert_(not self.converter.type_match(5))
@dec.slow
def test_type_match_float(self):
assert_(not self.converter.type_match(5.))
@dec.slow
def test_type_match_complex128(self):
assert_(self.converter.type_match(numpy.complex128(5.+1j)))
@dec.slow
def test_complex_var_in(self):
mod_name = sys._getframe().f_code.co_name + self.compiler
mod_name = unique_mod(test_dir,mod_name)
mod = ext_tools.ext_module(mod_name)
a = numpy.complex(1.+1j)
code = "a=std::complex<double>(2.,2.);"
test = ext_tools.ext_function('test',code,['a'])
mod.add_function(test)
mod.compile(location=test_dir, compiler=self.compiler)
exec('from ' + mod_name + ' import test')
b = numpy.complex128(1.+1j)
test(b)
try:
b = 1.
test(b)
except TypeError:
pass
try:
b = 'abc'
test(b)
except TypeError:
pass
@dec.slow
def test_complex_return(self):
mod_name = sys._getframe().f_code.co_name + self.compiler
mod_name = unique_mod(test_dir,mod_name)
mod = ext_tools.ext_module(mod_name)
a = 1.+1j
code = """
a= a + std::complex<double>(2.,2.);
return_val = PyComplex_FromDoubles(a.real(),a.imag());
"""
test = ext_tools.ext_function('test',code,['a'])
mod.add_function(test)
mod.compile(location=test_dir, compiler=self.compiler)
exec('from ' + mod_name + ' import test')
b = 1.+1j
c = test(b)
assert_(c == 3.+3j)
@dec.slow
def test_inline(self):
a = numpy.complex128(1+1j)
result = inline_tools.inline("return_val=1.0/a;",['a'])
assert_(result == .5-.5j)
for _n in dir():
if _n[-9:] == 'Converter':
if msvc_exists():
exec("class Test%sMsvc(%s):\n compiler = 'msvc'" % (_n,_n))
else:
exec("class Test%sUnix(%s):\n compiler = ''" % (_n,_n))
if gcc_exists():
exec("class Test%sGcc(%s):\n compiler = 'gcc'" % (_n,_n))
def setup_test_location():
test_dir = tempfile.mkdtemp()
sys.path.insert(0,test_dir)
return test_dir
test_dir = setup_test_location()
def teardown_test_location():
import tempfile
test_dir = os.path.join(tempfile.gettempdir(),'test_files')
if sys.path[0] == test_dir:
sys.path = sys.path[1:]
return test_dir
if not msvc_exists():
for _n in dir():
if _n[:8] == 'TestMsvc':
exec('del '+_n)
else:
for _n in dir():
if _n[:8] == 'TestUnix':
exec('del '+_n)
if not (gcc_exists() and msvc_exists() and sys.platform == 'win32'):
for _n in dir():
if _n[:7] == 'TestGcc':
exec('del '+_n)
if __name__ == "__main__":
run_module_suite()
|
Pluto-tv/blink-crosswalk | refs/heads/master | Tools/Scripts/webkitpy/thirdparty/coverage/data.py | 64 | """Coverage data for Coverage."""
import os
from coverage.backward import pickle, sorted # pylint: disable=W0622
from coverage.files import PathAliases
class CoverageData(object):
"""Manages collected coverage data, including file storage.
The data file format is a pickled dict, with these keys:
* collector: a string identifying the collecting software
* lines: a dict mapping filenames to sorted lists of line numbers
executed:
{ 'file1': [17,23,45], 'file2': [1,2,3], ... }
* arcs: a dict mapping filenames to sorted lists of line number pairs:
{ 'file1': [(17,23), (17,25), (25,26)], ... }
"""
def __init__(self, basename=None, collector=None):
"""Create a CoverageData.
`basename` is the name of the file to use for storing data.
`collector` is a string describing the coverage measurement software.
"""
self.collector = collector or 'unknown'
self.use_file = True
# Construct the filename that will be used for data file storage, if we
# ever do any file storage.
self.filename = basename or ".coverage"
self.filename = os.path.abspath(self.filename)
# A map from canonical Python source file name to a dictionary in
# which there's an entry for each line number that has been
# executed:
#
# {
# 'filename1.py': { 12: None, 47: None, ... },
# ...
# }
#
self.lines = {}
# A map from canonical Python source file name to a dictionary with an
# entry for each pair of line numbers forming an arc:
#
# {
# 'filename1.py': { (12,14): None, (47,48): None, ... },
# ...
# }
#
self.arcs = {}
self.os = os
self.sorted = sorted
self.pickle = pickle
def usefile(self, use_file=True):
"""Set whether or not to use a disk file for data."""
self.use_file = use_file
def read(self):
"""Read coverage data from the coverage data file (if it exists)."""
if self.use_file:
self.lines, self.arcs = self._read_file(self.filename)
else:
self.lines, self.arcs = {}, {}
def write(self, suffix=None):
"""Write the collected coverage data to a file.
`suffix` is a suffix to append to the base file name. This can be used
for multiple or parallel execution, so that many coverage data files
can exist simultaneously. A dot will be used to join the base name and
the suffix.
"""
if self.use_file:
filename = self.filename
if suffix:
filename += "." + suffix
self.write_file(filename)
def erase(self):
"""Erase the data, both in this object, and from its file storage."""
if self.use_file:
if self.filename and os.path.exists(self.filename):
os.remove(self.filename)
self.lines = {}
self.arcs = {}
def line_data(self):
"""Return the map from filenames to lists of line numbers executed."""
return dict(
[(f, self.sorted(lmap.keys())) for f, lmap in self.lines.items()]
)
def arc_data(self):
"""Return the map from filenames to lists of line number pairs."""
return dict(
[(f, self.sorted(amap.keys())) for f, amap in self.arcs.items()]
)
def write_file(self, filename):
"""Write the coverage data to `filename`."""
# Create the file data.
data = {}
data['lines'] = self.line_data()
arcs = self.arc_data()
if arcs:
data['arcs'] = arcs
if self.collector:
data['collector'] = self.collector
# Write the pickle to the file.
fdata = open(filename, 'wb')
try:
self.pickle.dump(data, fdata, 2)
finally:
fdata.close()
def read_file(self, filename):
"""Read the coverage data from `filename`."""
self.lines, self.arcs = self._read_file(filename)
def raw_data(self, filename):
"""Return the raw pickled data from `filename`."""
fdata = open(filename, 'rb')
try:
data = pickle.load(fdata)
finally:
fdata.close()
return data
def _read_file(self, filename):
"""Return the stored coverage data from the given file.
Returns two values, suitable for assigning to `self.lines` and
`self.arcs`.
"""
lines = {}
arcs = {}
try:
data = self.raw_data(filename)
if isinstance(data, dict):
# Unpack the 'lines' item.
lines = dict([
(f, dict.fromkeys(linenos, None))
for f, linenos in data.get('lines', {}).items()
])
# Unpack the 'arcs' item.
arcs = dict([
(f, dict.fromkeys(arcpairs, None))
for f, arcpairs in data.get('arcs', {}).items()
])
except Exception:
pass
return lines, arcs
def combine_parallel_data(self, aliases=None):
"""Combine a number of data files together.
Treat `self.filename` as a file prefix, and combine the data from all
of the data files starting with that prefix plus a dot.
If `aliases` is provided, it's a `PathAliases` object that is used to
re-map paths to match the local machine's.
"""
aliases = aliases or PathAliases()
data_dir, local = os.path.split(self.filename)
localdot = local + '.'
for f in os.listdir(data_dir or '.'):
if f.startswith(localdot):
full_path = os.path.join(data_dir, f)
new_lines, new_arcs = self._read_file(full_path)
for filename, file_data in new_lines.items():
filename = aliases.map(filename)
self.lines.setdefault(filename, {}).update(file_data)
for filename, file_data in new_arcs.items():
filename = aliases.map(filename)
self.arcs.setdefault(filename, {}).update(file_data)
if f != local:
os.remove(full_path)
def add_line_data(self, line_data):
"""Add executed line data.
`line_data` is { filename: { lineno: None, ... }, ...}
"""
for filename, linenos in line_data.items():
self.lines.setdefault(filename, {}).update(linenos)
def add_arc_data(self, arc_data):
"""Add measured arc data.
`arc_data` is { filename: { (l1,l2): None, ... }, ...}
"""
for filename, arcs in arc_data.items():
self.arcs.setdefault(filename, {}).update(arcs)
def touch_file(self, filename):
"""Ensure that `filename` appears in the data, empty if needed."""
self.lines.setdefault(filename, {})
def measured_files(self):
"""A list of all files that had been measured."""
return list(self.lines.keys())
def executed_lines(self, filename):
"""A map containing all the line numbers executed in `filename`.
If `filename` hasn't been collected at all (because it wasn't executed)
then return an empty map.
"""
return self.lines.get(filename) or {}
def executed_arcs(self, filename):
"""A map containing all the arcs executed in `filename`."""
return self.arcs.get(filename) or {}
def add_to_hash(self, filename, hasher):
"""Contribute `filename`'s data to the Md5Hash `hasher`."""
hasher.update(self.executed_lines(filename))
hasher.update(self.executed_arcs(filename))
def summary(self, fullpath=False):
"""Return a dict summarizing the coverage data.
Keys are based on the filenames, and values are the number of executed
lines. If `fullpath` is true, then the keys are the full pathnames of
the files, otherwise they are the basenames of the files.
"""
summ = {}
if fullpath:
filename_fn = lambda f: f
else:
filename_fn = self.os.path.basename
for filename, lines in self.lines.items():
summ[filename_fn(filename)] = len(lines)
return summ
def has_arcs(self):
"""Does this data have arcs?"""
return bool(self.arcs)
if __name__ == '__main__':
# Ad-hoc: show the raw data in a data file.
import pprint, sys
covdata = CoverageData()
if sys.argv[1:]:
fname = sys.argv[1]
else:
fname = covdata.filename
pprint.pprint(covdata.raw_data(fname))
|
jirikuncar/invenio | refs/heads/master | invenio/modules/jsonalchemy/testsuite/functions/sync_meeting_names.py | 33 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2014 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from invenio.modules.jsonalchemy.jsonext.functions.util_merge_fields_info_list \
import util_merge_fields_info_list
def sync_meeting_names(self, field_name, connected_field, action): # pylint: disable=W0613
"""
Sync corparate names content only when `__setitem__` or similar is used
"""
if action == 'set':
if field_name == 'corporate_names' and self.get('corporate_names'):
self.__setitem__('_first_corporate_name',
self['corporate_names'][0],
exclude=['connect'])
if self['corporate_names'][1:]:
self.__setitem__('_additional_corporate_names',
self['corporate_names'][1:],
exclude=['connect'])
elif field_name in ('_first_author', '_additional_authors'):
self.__setitem__(
'corporate_names',
util_merge_fields_info_list(self, ['_first_corporate_name',
'_additional_corporate_names']),
exclude=['connect'])
|
bitchute-kodi/bitchute-kodi | refs/heads/master | chardet/euctwprober.py | 289 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import EUCTWDistributionAnalysis
from .mbcssm import EUCTW_SM_MODEL
class EUCTWProber(MultiByteCharSetProber):
def __init__(self):
super(EUCTWProber, self).__init__()
self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL)
self.distribution_analyzer = EUCTWDistributionAnalysis()
self.reset()
@property
def charset_name(self):
return "EUC-TW"
@property
def language(self):
return "Taiwan"
|
ghadagesandip/zend-doctrine | refs/heads/master | vendor/doctrine/orm/docs/en/_exts/configurationblock.py | 2577 | #Copyright (c) 2010 Fabien Potencier
#
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is furnished
#to do so, subject to the following conditions:
#
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#THE SOFTWARE.
from docutils.parsers.rst import Directive, directives
from docutils import nodes
from string import upper
class configurationblock(nodes.General, nodes.Element):
pass
class ConfigurationBlock(Directive):
has_content = True
required_arguments = 0
optional_arguments = 0
final_argument_whitespace = True
option_spec = {}
formats = {
'html': 'HTML',
'xml': 'XML',
'php': 'PHP',
'yaml': 'YAML',
'jinja': 'Twig',
'html+jinja': 'Twig',
'jinja+html': 'Twig',
'php+html': 'PHP',
'html+php': 'PHP',
'ini': 'INI',
'php-annotations': 'Annotations',
}
def run(self):
env = self.state.document.settings.env
node = nodes.Element()
node.document = self.state.document
self.state.nested_parse(self.content, self.content_offset, node)
entries = []
for i, child in enumerate(node):
if isinstance(child, nodes.literal_block):
# add a title (the language name) before each block
#targetid = "configuration-block-%d" % env.new_serialno('configuration-block')
#targetnode = nodes.target('', '', ids=[targetid])
#targetnode.append(child)
innernode = nodes.emphasis(self.formats[child['language']], self.formats[child['language']])
para = nodes.paragraph()
para += [innernode, child]
entry = nodes.list_item('')
entry.append(para)
entries.append(entry)
resultnode = configurationblock()
resultnode.append(nodes.bullet_list('', *entries))
return [resultnode]
def visit_configurationblock_html(self, node):
self.body.append(self.starttag(node, 'div', CLASS='configuration-block'))
def depart_configurationblock_html(self, node):
self.body.append('</div>\n')
def visit_configurationblock_latex(self, node):
pass
def depart_configurationblock_latex(self, node):
pass
def setup(app):
app.add_node(configurationblock,
html=(visit_configurationblock_html, depart_configurationblock_html),
latex=(visit_configurationblock_latex, depart_configurationblock_latex))
app.add_directive('configuration-block', ConfigurationBlock)
|
BancDelTempsDAW/symfony | refs/heads/master | vendor/doctrine/orm/docs/en/_exts/configurationblock.py | 2577 | #Copyright (c) 2010 Fabien Potencier
#
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is furnished
#to do so, subject to the following conditions:
#
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#THE SOFTWARE.
from docutils.parsers.rst import Directive, directives
from docutils import nodes
from string import upper
class configurationblock(nodes.General, nodes.Element):
pass
class ConfigurationBlock(Directive):
has_content = True
required_arguments = 0
optional_arguments = 0
final_argument_whitespace = True
option_spec = {}
formats = {
'html': 'HTML',
'xml': 'XML',
'php': 'PHP',
'yaml': 'YAML',
'jinja': 'Twig',
'html+jinja': 'Twig',
'jinja+html': 'Twig',
'php+html': 'PHP',
'html+php': 'PHP',
'ini': 'INI',
'php-annotations': 'Annotations',
}
def run(self):
env = self.state.document.settings.env
node = nodes.Element()
node.document = self.state.document
self.state.nested_parse(self.content, self.content_offset, node)
entries = []
for i, child in enumerate(node):
if isinstance(child, nodes.literal_block):
# add a title (the language name) before each block
#targetid = "configuration-block-%d" % env.new_serialno('configuration-block')
#targetnode = nodes.target('', '', ids=[targetid])
#targetnode.append(child)
innernode = nodes.emphasis(self.formats[child['language']], self.formats[child['language']])
para = nodes.paragraph()
para += [innernode, child]
entry = nodes.list_item('')
entry.append(para)
entries.append(entry)
resultnode = configurationblock()
resultnode.append(nodes.bullet_list('', *entries))
return [resultnode]
def visit_configurationblock_html(self, node):
self.body.append(self.starttag(node, 'div', CLASS='configuration-block'))
def depart_configurationblock_html(self, node):
self.body.append('</div>\n')
def visit_configurationblock_latex(self, node):
pass
def depart_configurationblock_latex(self, node):
pass
def setup(app):
app.add_node(configurationblock,
html=(visit_configurationblock_html, depart_configurationblock_html),
latex=(visit_configurationblock_latex, depart_configurationblock_latex))
app.add_directive('configuration-block', ConfigurationBlock)
|
WladimirSidorenko/DiscourseSenser | refs/heads/master | tests/xgboost/test_xgb_explicit.py | 1 | #!/usr/bin/env python
# -*- coding: utf-8; mode: python; -*-
##################################################################
# Imports
from __future__ import absolute_import
import dsenser
from dsenser.xgboost.explicit import XGBoostExplicitSenser
from mock import patch
from unittest import TestCase
##################################################################
# Constants
##################################################################
# Test Classes
class TestXGBoostExplict(TestCase):
def test_train(self):
with patch("dsenser.wang.wangbase.WangBaseSenser.train",
autospec=True):
xgb = XGBoostExplicitSenser()
xgb.train([], {})
|
Perkville/django-autocomplete-light | refs/heads/v2 | test_project/linked_data/__init__.py | 5 | default_app_config = 'linked_data.apps.TestApp'
|
jeezybrick/django | refs/heads/master | tests/test_client/auth_backends.py | 315 | from django.contrib.auth.backends import ModelBackend
class TestClientBackend(ModelBackend):
pass
|
iulian787/spack | refs/heads/develop | var/spack/repos/builtin/packages/perl-file-which/package.py | 5 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PerlFileWhich(PerlPackage):
"""Perl implementation of the which utility as an API"""
homepage = "http://cpansearch.perl.org/src/PLICEASE/File-Which-1.22/lib/File/Which.pm"
url = "http://search.cpan.org/CPAN/authors/id/P/PL/PLICEASE/File-Which-1.22.tar.gz"
version('1.22', sha256='e8a8ffcf96868c6879e82645db4ff9ef00c2d8a286fed21971e7280f52cf0dd4')
|
ramusus/django-odnoklassniki-discussions | refs/heads/master | odnoklassniki_discussions/models.py | 1 | # -*- coding: utf-8 -*-
import logging
import re
from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.utils.translation import ugettext as _
from m2m_history.fields import ManyToManyHistoryField
from odnoklassniki_api.decorators import atomic, fetch_all
from odnoklassniki_api.fields import JSONField
from odnoklassniki_api.models import (OdnoklassnikiModel, OdnoklassnikiPKModel,
OdnoklassnikiTimelineManager)
from odnoklassniki_users.models import User
log = logging.getLogger('odnoklassniki_discussions')
DISCUSSION_TYPES = [
'GROUP_TOPIC',
'GROUP_PHOTO',
'USER_STATUS',
'USER_PHOTO',
'USER_FORUM',
'USER_ALBUM',
'USER_2LVL_FORUM',
'MOVIE',
'SCHOOL_FORUM',
'HAPPENING_TOPIC',
'GROUP_MOVIE',
'CITY_NEWS',
'CHAT',
]
COMMENT_TYPES = ['ACTIVE_MESSAGE']
DISCUSSION_TYPE_CHOICES = [(type, type) for type in DISCUSSION_TYPES]
COMMENT_TYPE_CHOICES = [(type, type) for type in COMMENT_TYPES]
DISCUSSION_TYPE_DEFAULT = 'GROUP_TOPIC'
class DiscussionRemoteManager(OdnoklassnikiTimelineManager):
@atomic
def fetch_one(self, id, type, **kwargs):
if type not in DISCUSSION_TYPES:
raise ValueError("Wrong value of type argument %s" % type)
kwargs['discussionId'] = id
kwargs['discussionType'] = type
if 'fields' not in kwargs:
kwargs['fields'] = self.get_request_fields('discussion', 'media_topic', 'group', 'user', 'theme', 'poll',
'group_photo', prefix=True)
result = super(OdnoklassnikiTimelineManager, self).get(method='get_one', **kwargs)
return self.get_or_create_from_instance(result)
@fetch_all
def get(self, **kwargs):
return super(DiscussionRemoteManager, self).get(**kwargs), self.response
def parse_response(self, response, extra_fields=None):
if 'media_topics' in response:
response = response['media_topics']
elif 'owner_id' in extra_fields:
# in case of fetch_group
# TODO: change condition based on response
# has_more not in dict and we need to handle pagination manualy
if 'feeds' not in response:
response.pop('anchor', None)
return self.model.objects.none()
else:
response = [feed for feed in response['feeds'] if feed['pattern'] == 'POST']
else:
# in case of fetch_one
pass
return super(DiscussionRemoteManager, self).parse_response(response, extra_fields)
# def update_discussions_count(self, instances, group, *args, **kwargs):
# group.discussions_count = len(instances)
# group.save()
# return instances
@atomic
@fetch_all(has_more=None)
def fetch_group(self, group, count=100, **kwargs):
from odnoklassniki_groups.models import Group
kwargs['gid'] = group.pk
kwargs['count'] = int(count)
kwargs['patterns'] = 'POST'
kwargs['fields'] = self.get_request_fields('feed', 'media_topic', prefix=True)
kwargs['extra_fields'] = {
'owner_id': group.pk, 'owner_content_type_id': ContentType.objects.get_for_model(Group).pk}
discussions = super(DiscussionRemoteManager, self).fetch(method='stream', **kwargs)
return discussions, self.response
@atomic
def fetch_mediatopics(self, ids, **kwargs):
kwargs['topic_ids'] = ','.join(map(str, ids))
kwargs['media_limit'] = 3
if 'fields' not in kwargs:
kwargs['fields'] = self.get_request_fields('media_topic', prefix=True)
return super(DiscussionRemoteManager, self).fetch(method='mget', **kwargs)
class CommentRemoteManager(OdnoklassnikiTimelineManager):
def parse_response(self, response, extra_fields=None):
return super(CommentRemoteManager, self).parse_response(response.get('comments', []), extra_fields)
@fetch_all(has_more='has_more')
def get(self, discussion, count=100, **kwargs):
kwargs['discussionId'] = discussion.id
kwargs['discussionType'] = discussion.object_type
kwargs['count'] = int(count)
kwargs['extra_fields'] = {'discussion_id': discussion.id}
comments = super(CommentRemoteManager, self).get(**kwargs)
return comments, self.response
@atomic
def fetch(self, discussion, **kwargs):
'''
Get all comments, reverse order and save them, because we need to store reply_to_comment relation
'''
comments = super(CommentRemoteManager, self).fetch(discussion=discussion, **kwargs)
discussion.comments_count = comments.count()
discussion.save()
return comments
class Discussion(OdnoklassnikiPKModel):
methods_namespace = ''
remote_pk_field = 'object_id'
owner_content_type = models.ForeignKey(ContentType, related_name='odnoklassniki_discussions_owners')
owner_id = models.BigIntegerField(db_index=True)
owner = generic.GenericForeignKey('owner_content_type', 'owner_id')
author_content_type = models.ForeignKey(ContentType, related_name='odnoklassniki_discussions_authors')
author_id = models.BigIntegerField(db_index=True)
author = generic.GenericForeignKey('author_content_type', 'author_id')
object_type = models.CharField(max_length=20, choices=DISCUSSION_TYPE_CHOICES, default=DISCUSSION_TYPE_DEFAULT)
title = models.TextField()
message = models.TextField()
date = models.DateTimeField(db_index=True)
last_activity_date = models.DateTimeField(null=True)
last_user_access_date = models.DateTimeField(null=True)
new_comments_count = models.PositiveIntegerField(default=0)
comments_count = models.PositiveIntegerField(default=0)
likes_count = models.PositiveIntegerField(default=0)
reshares_count = models.PositiveIntegerField(default=0)
# vote
last_vote_date = models.DateTimeField(null=True)
votes_count = models.PositiveIntegerField(default=0)
question = models.TextField()
liked_it = models.BooleanField(default=False)
entities = JSONField(null=True)
ref_objects = JSONField(null=True)
attrs = JSONField(null=True)
like_users = ManyToManyHistoryField(User, related_name='like_discussions')
remote = DiscussionRemoteManager(methods={
'get': 'discussions.getList',
'get_one': 'discussions.get',
'get_likes': 'discussions.getDiscussionLikes',
'stream': 'stream.get',
'mget': 'mediatopic.getByIds',
})
# def __unicode__(self):
# return self.name
class Meta:
verbose_name = _('Odnoklassniki discussion')
verbose_name_plural = _('Odnoklassniki discussions')
def _substitute(self, old_instance):
super(Discussion, self)._substitute(old_instance)
try:
if self.entities['themes'][0]['images'][0] is None:
self.entities['themes'][0]['images'][0] = old_instance.entities['themes'][0]['images'][0]
except (KeyError, TypeError):
pass
def save(self, *args, **kwargs):
# make 2 dicts {id: instance} for group and users from entities
if self.entities:
entities = {
'users': [],
'groups': [],
}
for resource in self.entities.get('users', []):
entities['users'] += [User.remote.get_or_create_from_resource(resource)]
for resource in self.entities.get('groups', []):
from odnoklassniki_groups.models import Group
entities['groups'] += [Group.remote.get_or_create_from_resource(resource)]
for field in ['users', 'groups']:
entities[field] = dict([(instance.id, instance) for instance in entities[field]])
# set owner
if self.ref_objects:
for resource in self.ref_objects:
id = int(resource['id'])
if resource['type'] == 'GROUP':
self.owner = entities['groups'][id]
elif resource['type'] == 'USER':
self.owner = entities['user'][id]
else:
log.warning("Strange type of object in ref_objects %s for duscussion ID=%s" % (resource, self.id))
# set author
if self.author_id:
if self.author_id in entities['groups']:
self.author = entities['groups'][self.author_id]
elif self.author_id in entities['users']:
self.author = entities['users'][self.author_id]
else:
log.warning("Imposible to find author with ID=%s in entities of duscussion ID=%s" %
(self.author_id, self.id))
self.author_id = None
if self.owner and not self.author_id:
# of no author_id (owner_uid), so it's equal to owner from ref_objects
self.author = self.owner
if self.author_id and not self.author:
self.author = self.author_content_type.model_class().objects.get_or_create(pk=self.author_id)[0]
if self.owner_id and not self.owner:
self.owner = self.owner_content_type.model_class().objects.get_or_create(pk=self.owner_id)[0]
return super(Discussion, self).save(*args, **kwargs)
@property
def refresh_kwargs(self):
return {'id': self.id, 'type': self.object_type or DISCUSSION_TYPE_DEFAULT}
@property
def slug(self):
return '%s/topic/%s' % (self.owner.slug, self.id)
def parse(self, response):
from odnoklassniki_groups.models import Group
if 'discussion' in response:
response.update(response.pop('discussion'))
# Discussion.remote.fetch_one
if 'entities' in response and 'media_topics' in response['entities'] \
and len(response['entities']['media_topics']) == 1:
response.update(response['entities'].pop('media_topics')[0])
if 'polls' in response['entities']:
response.update(response['entities'].pop('polls')[0])
if 'vote_summary' in response:
response['last_vote_date'] = response['vote_summary']['last_vote_date_ms'] / 1000
response['votes_count'] = response['vote_summary']['count']
# media_topics
if 'like_summary' in response:
response['likes_count'] = response['like_summary']['count']
response.pop('like_summary')
if 'reshare_summary' in response:
response['reshares_count'] = response['reshare_summary']['count']
response.pop('reshare_summary')
if 'discussion_summary' in response:
response['comments_count'] = response['discussion_summary']['comments_count']
response.pop('discussion_summary')
if 'author_ref' in response:
i = response.pop('author_ref').split(':')
response['author_id'] = i[1]
self.author_content_type = ContentType.objects.get(app_label='odnoklassniki_%ss' % i[0], model=i[0])
if 'owner_ref' in response:
i = response.pop('owner_ref').split(':')
response['owner_id'] = i[1]
self.owner_content_type = ContentType.objects.get(app_label='odnoklassniki_%ss' % i[0], model=i[0])
if 'created_ms' in response:
response['date'] = response.pop('created_ms') / 1000
if 'media' in response:
response['title'] = response['media'][0]['text']
# in API owner is author
if 'owner_uid' in response:
response['author_id'] = response.pop('owner_uid')
# some name cleaning
if 'like_count' in response:
response['likes_count'] = response.pop('like_count')
if 'total_comments_count' in response:
response['comments_count'] = response.pop('total_comments_count')
if 'creation_date' in response:
response['date'] = response.pop('creation_date')
# response of stream.get has another format
if 'message' in response and '{media_topic' in response['message']:
regexp = r'{media_topic:?(\d+)?}'
m = re.findall(regexp, response['message'])
if len(m):
response['id'] = m[0]
response['message'] = re.sub(regexp, '', response['message'])
return super(Discussion, self).parse(response)
def fetch_comments(self, **kwargs):
return Comment.remote.fetch(discussion=self, **kwargs)
def update_likes_count(self, instances, *args, **kwargs):
users = User.objects.filter(pk__in=instances)
self.like_users = users
self.likes_count = len(instances)
self.save()
return users
@atomic
@fetch_all(return_all=update_likes_count, has_more=None)
def fetch_likes(self, count=100, **kwargs):
kwargs['discussionId'] = self.id
kwargs['discussionType'] = self.object_type
kwargs['count'] = int(count)
# kwargs['fields'] = Discussion.remote.get_request_fields('user')
response = Discussion.remote.api_call(method='get_likes', **kwargs)
# has_more not in dict and we need to handle pagination manualy
if 'users' not in response:
response.pop('anchor', None)
users_ids = []
else:
users_ids = list(User.remote.get_or_create_from_resources_list(
response['users']).values_list('pk', flat=True))
return users_ids, response
class Comment(OdnoklassnikiModel):
methods_namespace = 'discussions'
# temporary variable for distance from parse() to save()
author_type = None
id = models.CharField(max_length=68, primary_key=True)
discussion = models.ForeignKey(Discussion, related_name='comments')
# denormalization for query optimization
owner_content_type = models.ForeignKey(ContentType, related_name='odnoklassniki_comments_owners')
owner_id = models.BigIntegerField(db_index=True)
owner = generic.GenericForeignKey('owner_content_type', 'owner_id')
author_content_type = models.ForeignKey(ContentType, related_name='odnoklassniki_comments_authors')
author_id = models.BigIntegerField(db_index=True)
author = generic.GenericForeignKey('author_content_type', 'author_id')
reply_to_comment = models.ForeignKey('self', null=True, verbose_name=u'Это ответ на комментарий')
reply_to_author_content_type = models.ForeignKey(
ContentType, null=True, related_name='odnoklassniki_comments_reply_to_authors')
reply_to_author_id = models.BigIntegerField(db_index=True, null=True)
reply_to_author = generic.GenericForeignKey('reply_to_author_content_type', 'reply_to_author_id')
object_type = models.CharField(max_length=20, choices=COMMENT_TYPE_CHOICES)
text = models.TextField()
date = models.DateTimeField()
likes_count = models.PositiveIntegerField(default=0)
liked_it = models.BooleanField(default=False)
attrs = JSONField(null=True)
like_users = ManyToManyHistoryField(User, related_name='like_comments')
remote = CommentRemoteManager(methods={
'get': 'getComments',
'get_one': 'getComment',
'get_likes': 'getCommentLikes',
})
class Meta:
verbose_name = _('Odnoklassniki comment')
verbose_name_plural = _('Odnoklassniki comments')
@property
def slug(self):
return self.discussion.slug
def save(self, *args, **kwargs):
self.owner = self.discussion.owner
if self.author_id and not self.author:
if self.author_type == 'GROUP':
if self.author_id == self.owner_id:
self.author = self.owner
else:
from odnoklassniki_groups.models import Group
try:
self.author = Group.remote.fetch(ids=[self.author_id])[0]
except IndexError:
raise Exception("Can't fetch Odnoklassniki comment's group-author with ID %s" % self.author_id)
else:
try:
self.author = User.objects.get(pk=self.author_id)
except User.DoesNotExist:
try:
self.author = User.remote.fetch(ids=[self.author_id])[0]
except IndexError:
raise Exception("Can't fetch Odnoklassniki comment's user-author with ID %s" % self.author_id)
# it's hard to get proper reply_to_author_content_type in case we fetch comments from last
if self.reply_to_author_id and not self.reply_to_author_content_type:
self.reply_to_author_content_type = ContentType.objects.get_for_model(User)
# if self.reply_to_comment_id and self.reply_to_author_id and not self.reply_to_author_content_type:
# try:
# self.reply_to_author = User.objects.get(pk=self.reply_to_author_id)
# except User.DoesNotExist:
# self.reply_to_author = self.reply_to_comment.author
# check for existing comment from self.reply_to_comment to prevent ItegrityError
if self.reply_to_comment_id:
try:
self.reply_to_comment = Comment.objects.get(pk=self.reply_to_comment_id)
except Comment.DoesNotExist:
log.error("Try to save comment ID=%s with reply_to_comment_id=%s that doesn't exist in DB" %
(self.id, self.reply_to_comment_id))
self.reply_to_comment = None
return super(Comment, self).save(*args, **kwargs)
def parse(self, response):
# rename becouse discussion has object_type
if 'type' in response:
response['object_type'] = response.pop('type')
if 'like_count' in response:
response['likes_count'] = response.pop('like_count')
if 'reply_to_id' in response:
response['reply_to_author_id'] = response.pop('reply_to_id')
if 'reply_to_comment_id' in response:
response['reply_to_comment'] = response.pop('reply_to_comment_id')
# if author is a group
if 'author_type' in response:
response.pop('author_name')
self.author_type = response.pop('author_type')
return super(Comment, self).parse(response)
def update_likes_count(self, instances, *args, **kwargs):
users = User.objects.filter(pk__in=instances)
self.like_users = users
self.likes_count = len(instances)
self.save()
return users
@atomic
@fetch_all(return_all=update_likes_count, has_more=None)
def fetch_likes(self, count=100, **kwargs):
kwargs['comment_id'] = self.id
kwargs['discussionId'] = self.discussion.id
kwargs['discussionType'] = self.discussion.object_type
kwargs['count'] = int(count)
# kwargs['fields'] = Comment.remote.get_request_fields('user')
response = Comment.remote.api_call(method='get_likes', **kwargs)
# has_more not in dict and we need to handle pagination manualy
if 'users' not in response:
response.pop('anchor', None)
users_ids = []
else:
users_ids = list(User.remote.get_or_create_from_resources_list(
response['users']).values_list('pk', flat=True))
return users_ids, response
|
nfcpy/ndeftool | refs/heads/master | src/ndeftool/commands/TeXT.py | 1 | # -*- coding: utf-8 -*-
import click
import ndef
from ndeftool.cli import command_processor, dmsg
@click.command(short_help="Create an NFC Forum Text Record.")
@click.argument('text')
@click.option('-l', '--language', default='en',
help="Set the IANA language code.")
@click.option('--encoding', default='UTF-8',
type=click.Choice(['UTF-8', 'UTF-16']),
help="Set the encoding (default UTF-8).")
@command_processor
def cmd(message, **kwargs):
"""The *text* command creates an NFC Forum Text Record with the given
input text. The text language defaults to 'en' and can be set
with --language followed by the IANA language code.
\b
Examples:
ndeftool text '' | xxd -g 1
ndeftool text 'Created with the nfcpy ndeftool.' print
ndeftool text 'first record' text 'second record' print
ndeftool text -l en 'English' text -l de 'Deutsch' print
"""
dmsg(__name__ + ' ' + str(kwargs))
if message is None:
message = []
content = kwargs['text']
language = kwargs['language']
encoding = kwargs['encoding']
record = ndef.TextRecord(content, language, encoding)
message.append(record)
return message
|
tung7970/mbed-os-1 | refs/heads/master | tools/host_tests/host_tests_plugins/module_copy_firefox.py | 128 | """
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from os.path import join, basename
from host_test_plugins import HostTestPluginBase
class HostTestPluginCopyMethod_Firefox(HostTestPluginBase):
def file_store_firefox(self, file_path, dest_disk):
try:
from selenium import webdriver
profile = webdriver.FirefoxProfile()
profile.set_preference('browser.download.folderList', 2) # custom location
profile.set_preference('browser.download.manager.showWhenStarting', False)
profile.set_preference('browser.download.dir', dest_disk)
profile.set_preference('browser.helperApps.neverAsk.saveToDisk', 'application/octet-stream')
# Launch browser with profile and get file
browser = webdriver.Firefox(profile)
browser.get(file_path)
browser.close()
except:
return False
return True
# Plugin interface
name = 'HostTestPluginCopyMethod_Firefox'
type = 'CopyMethod'
capabilities = ['firefox']
required_parameters = ['image_path', 'destination_disk']
def setup(self, *args, **kwargs):
""" Configure plugin, this function should be called before plugin execute() method is used.
"""
try:
from selenium import webdriver
except ImportError, e:
self.print_plugin_error("Error: firefox copy method requires selenium library. %s"% e)
return False
return True
def execute(self, capabilitity, *args, **kwargs):
""" Executes capability by name.
Each capability may directly just call some command line
program or execute building pythonic function
"""
result = False
if self.check_parameters(capabilitity, *args, **kwargs) is True:
image_path = kwargs['image_path']
destination_disk = kwargs['destination_disk']
# Prepare correct command line parameter values
image_base_name = basename(image_path)
destination_path = join(destination_disk, image_base_name)
if capabilitity == 'firefox':
self.file_store_firefox(image_path, destination_path)
return result
def load_plugin():
""" Returns plugin available in this module
"""
return HostTestPluginCopyMethod_Firefox()
|
rdo-management/tuskar | refs/heads/mgt-master | tuskar/manager/role.py | 1 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from os import path as os_path
from tuskar.manager import models
from tuskar.storage.stores import TemplateExtraStore
from tuskar.storage.stores import TemplateStore
from tuskar.templates import parser
class RoleManager(object):
def __init__(self):
super(RoleManager, self).__init__()
self.template_store = TemplateStore()
self.template_extra_store = TemplateExtraStore()
def list_roles(self, only_latest=False):
"""Returns a list of all roles known to Tuskar.
:param only_latest: if true, only the highest version of each role
will be returned
:type only_latest: bool
:return: list of tuskar model instances for each role
:rtype: [tuskar.manager.models.Role]
"""
db_roles = self.template_store.list(only_latest=only_latest)
roles = [self._role_to_tuskar_object(r) for r in db_roles]
return roles
def retrieve_role_by_uuid(self, role_uuid):
"""Returns the role with the given UUID.
:type role_uuid: str
:rtype: tuskar.manager.models.Role
:raises tuskar.storage.exceptions.UnknownUUID: if there is no role with
the given ID
"""
db_role = self.template_store.retrieve(role_uuid)
role = self._role_to_tuskar_object(db_role)
return role
def retrieve_db_role_by_uuid(self, role_uuid):
return self.template_store.retrieve(role_uuid)
def retrieve_db_role_extra(self):
return self.template_extra_store.list(only_latest=False)
def template_extra_data_for_output(self, template_extra_paths, prefix=''):
"""Compile and return role-extra data for output as a string
:param template_extra_paths: a list of {k,v} (name=>path)
:type template_extra_paths: list of dict
:param prefix: a prefix path
:type prefix: string
:return: a dict of path=>contents
:rtype: dict
The keys in template_extra_paths correspond to the names of stored
role-extra data and the values are the paths at which the
corresponding files ares expected to be. This list is returned by
common.utils.resolve_template_extra_data for example:
[{'extra_common_yaml': 'hieradata/common.yaml'},
{'extra_object_yaml': 'hieradata/object.yaml'}]
Using this create a new dict that maps the path (values above) as
key to the contents of the corresponding stored role-extra object
(using the name above to retrieve it). For the example input
above, the output would be like:
{
"hieradata/common.yaml": "CONTENTS",
"hieradata/object.yaml": "CONTENTS"
}
In those cases that the template_extra_paths were generated for a
non Role template (i.e. those templates read from the resource
registry), include their path prefix - so that the extra data files
are created relative to the template. For example the template
'path/to/some_template.yaml' has a reference to the extra-data file
'hieradata/common.yaml'. The resulting extra-data file returned by
tuskar must then be:
{
"path/to/hieradata/common.yaml": "CONTENTS",
}
"""
res = {}
for path in template_extra_paths:
role_extra_name = path.keys()[0]
role_extra_path = path[role_extra_name]
db_role_extra = self.template_extra_store.retrieve_by_name(
role_extra_name)
role_extra_path = os_path.join(prefix, role_extra_path)
res[role_extra_path] = db_role_extra.contents
return res
@staticmethod
def _role_to_tuskar_object(db_role):
parsed = parser.parse_template(db_role.contents)
role = models.Role(db_role.uuid, db_role.name, db_role.version,
parsed.description, parsed)
return role
|
andela-ifageyinbo/django | refs/heads/master | tests/utils_tests/test_datastructures.py | 262 | """
Tests for stuff in django.utils.datastructures.
"""
import copy
from django.test import SimpleTestCase
from django.utils import six
from django.utils.datastructures import (
DictWrapper, ImmutableList, MultiValueDict, MultiValueDictKeyError,
OrderedSet,
)
class OrderedSetTests(SimpleTestCase):
def test_bool(self):
# Refs #23664
s = OrderedSet()
self.assertFalse(s)
s.add(1)
self.assertTrue(s)
def test_len(self):
s = OrderedSet()
self.assertEqual(len(s), 0)
s.add(1)
s.add(2)
s.add(2)
self.assertEqual(len(s), 2)
class MultiValueDictTests(SimpleTestCase):
def test_multivaluedict(self):
d = MultiValueDict({'name': ['Adrian', 'Simon'],
'position': ['Developer']})
self.assertEqual(d['name'], 'Simon')
self.assertEqual(d.get('name'), 'Simon')
self.assertEqual(d.getlist('name'), ['Adrian', 'Simon'])
self.assertEqual(
sorted(six.iteritems(d)),
[('name', 'Simon'), ('position', 'Developer')]
)
self.assertEqual(
sorted(six.iterlists(d)),
[('name', ['Adrian', 'Simon']), ('position', ['Developer'])]
)
six.assertRaisesRegex(self, MultiValueDictKeyError, 'lastname',
d.__getitem__, 'lastname')
self.assertEqual(d.get('lastname'), None)
self.assertEqual(d.get('lastname', 'nonexistent'), 'nonexistent')
self.assertEqual(d.getlist('lastname'), [])
self.assertEqual(d.getlist('doesnotexist', ['Adrian', 'Simon']),
['Adrian', 'Simon'])
d.setlist('lastname', ['Holovaty', 'Willison'])
self.assertEqual(d.getlist('lastname'), ['Holovaty', 'Willison'])
self.assertEqual(sorted(six.itervalues(d)),
['Developer', 'Simon', 'Willison'])
def test_appendlist(self):
d = MultiValueDict()
d.appendlist('name', 'Adrian')
d.appendlist('name', 'Simon')
self.assertEqual(d.getlist('name'), ['Adrian', 'Simon'])
def test_copy(self):
for copy_func in [copy.copy, lambda d: d.copy()]:
d1 = MultiValueDict({
"developers": ["Carl", "Fred"]
})
self.assertEqual(d1["developers"], "Fred")
d2 = copy_func(d1)
d2.update({"developers": "Groucho"})
self.assertEqual(d2["developers"], "Groucho")
self.assertEqual(d1["developers"], "Fred")
d1 = MultiValueDict({
"key": [[]]
})
self.assertEqual(d1["key"], [])
d2 = copy_func(d1)
d2["key"].append("Penguin")
self.assertEqual(d1["key"], ["Penguin"])
self.assertEqual(d2["key"], ["Penguin"])
def test_dict_translation(self):
mvd = MultiValueDict({
'devs': ['Bob', 'Joe'],
'pm': ['Rory'],
})
d = mvd.dict()
self.assertEqual(sorted(six.iterkeys(d)), sorted(six.iterkeys(mvd)))
for key in six.iterkeys(mvd):
self.assertEqual(d[key], mvd[key])
self.assertEqual({}, MultiValueDict().dict())
class ImmutableListTests(SimpleTestCase):
def test_sort(self):
d = ImmutableList(range(10))
# AttributeError: ImmutableList object is immutable.
self.assertRaisesMessage(AttributeError,
'ImmutableList object is immutable.', d.sort)
self.assertEqual(repr(d), '(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)')
def test_custom_warning(self):
d = ImmutableList(range(10), warning="Object is immutable!")
self.assertEqual(d[1], 1)
# AttributeError: Object is immutable!
self.assertRaisesMessage(AttributeError,
'Object is immutable!', d.__setitem__, 1, 'test')
class DictWrapperTests(SimpleTestCase):
def test_dictwrapper(self):
f = lambda x: "*%s" % x
d = DictWrapper({'a': 'a'}, f, 'xx_')
self.assertEqual(
"Normal: %(a)s. Modified: %(xx_a)s" % d,
'Normal: a. Modified: *a'
)
|
sgammon/libcloud | refs/heads/trunk | libcloud/loadbalancer/base.py | 6 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from libcloud.common.base import ConnectionKey, BaseDriver
from libcloud.common.types import LibcloudError
__all__ = [
'Member',
'LoadBalancer',
'Algorithm',
'Driver',
'DEFAULT_ALGORITHM'
]
class Member(object):
"""
Represents a load balancer member.
"""
def __init__(self, id, ip, port, balancer=None, extra=None):
"""
:param id: Member ID.
:type id: ``str``
:param ip: IP address of this member.
:param ip: ``str``
:param port: Port of this member
:param port: ``str``
:param balancer: Balancer this member is attached to. (optional)
:param balancer: :class:`.LoadBalancer`
:param extra: Provider specific attributes.
:type extra: ``dict``
"""
self.id = str(id) if id else None
self.ip = ip
self.port = port
self.balancer = balancer
self.extra = extra or {}
def __repr__(self):
return ('<Member: id=%s, address=%s:%s>' % (self.id,
self.ip, self.port))
class LoadBalancer(object):
"""
Provide a common interface for handling Load Balancers.
"""
def __init__(self, id, name, state, ip, port, driver, extra=None):
"""
:param id: Load balancer ID.
:type id: ``str``
:param name: Load balancer name.
:type name: ``str``
:param state: State this loadbalancer is in.
:type state: :class:`libcloud.loadbalancer.types.State`
:param ip: IP address of this loadbalancer.
:type ip: ``str``
:param port: Port of this loadbalancer.
:type port: ``int``
:param driver: Driver this loadbalancer belongs to.
:type driver: :class:`.Driver`
:param extra: Provier specific attributes. (optional)
:type extra: ``dict``
"""
self.id = str(id) if id else None
self.name = name
self.state = state
self.ip = ip
self.port = port
self.driver = driver
self.extra = extra or {}
def attach_compute_node(self, node):
return self.driver.balancer_attach_compute_node(balancer=self,
node=node)
def attach_member(self, member):
return self.driver.balancer_attach_member(balancer=self,
member=member)
def detach_member(self, member):
return self.driver.balancer_detach_member(balancer=self,
member=member)
def list_members(self):
return self.driver.balancer_list_members(balancer=self)
def destroy(self):
return self.driver.destroy_balancer(balancer=self)
def __repr__(self):
return ('<LoadBalancer: id=%s, name=%s, state=%s>' % (self.id,
self.name, self.state))
class Algorithm(object):
"""
Represents a load balancing algorithm.
"""
RANDOM = 0
ROUND_ROBIN = 1
LEAST_CONNECTIONS = 2
WEIGHTED_ROUND_ROBIN = 3
WEIGHTED_LEAST_CONNECTIONS = 4
DEFAULT_ALGORITHM = Algorithm.ROUND_ROBIN
class Driver(BaseDriver):
"""
A base Driver class to derive from
This class is always subclassed by a specific driver.
"""
name = None
website = None
connectionCls = ConnectionKey
_ALGORITHM_TO_VALUE_MAP = {}
_VALUE_TO_ALGORITHM_MAP = {}
def __init__(self, key, secret=None, secure=True, host=None,
port=None, **kwargs):
super(Driver, self).__init__(key=key, secret=secret, secure=secure,
host=host, port=port, **kwargs)
def list_protocols(self):
"""
Return a list of supported protocols.
:rtype: ``list`` of ``str``
"""
raise NotImplementedError(
'list_protocols not implemented for this driver')
def list_balancers(self):
"""
List all loadbalancers
:rtype: ``list`` of :class:`LoadBalancer`
"""
raise NotImplementedError(
'list_balancers not implemented for this driver')
def create_balancer(self, name, port, protocol, algorithm, members):
"""
Create a new load balancer instance
:param name: Name of the new load balancer (required)
:type name: ``str``
:param port: Port the load balancer should listen on, defaults to 80
:type port: ``str``
:param protocol: Loadbalancer protocol, defaults to http.
:type protocol: ``str``
:param members: list of Members to attach to balancer
:type members: ``list`` of :class:`Member`
:param algorithm: Load balancing algorithm, defaults to ROUND_ROBIN.
:type algorithm: :class:`Algorithm`
:rtype: :class:`LoadBalancer`
"""
raise NotImplementedError(
'create_balancer not implemented for this driver')
def destroy_balancer(self, balancer):
"""
Destroy a load balancer
:param balancer: LoadBalancer which should be used
:type balancer: :class:`LoadBalancer`
:return: ``True`` if the destroy was successful, otherwise ``False``.
:rtype: ``bool``
"""
raise NotImplementedError(
'destroy_balancer not implemented for this driver')
def get_balancer(self, balancer_id):
"""
Return a :class:`LoadBalancer` object.
:param balancer_id: id of a load balancer you want to fetch
:type balancer_id: ``str``
:rtype: :class:`LoadBalancer`
"""
raise NotImplementedError(
'get_balancer not implemented for this driver')
def update_balancer(self, balancer, **kwargs):
"""
Sets the name, algorithm, protocol, or port on a load balancer.
:param balancer: LoadBalancer which should be used
:type balancer: :class:`LoadBalancer`
:param name: New load balancer name
:type name: ``str``
:param algorithm: New load balancer algorithm
:type algorithm: :class:`Algorithm`
:param protocol: New load balancer protocol
:type protocol: ``str``
:param port: New load balancer port
:type port: ``int``
:rtype: :class:`LoadBalancer`
"""
raise NotImplementedError(
'update_balancer not implemented for this driver')
def balancer_attach_compute_node(self, balancer, node):
"""
Attach a compute node as a member to the load balancer.
:param balancer: LoadBalancer which should be used
:type balancer: :class:`LoadBalancer`
:param node: Node to join to the balancer
:type node: :class:`Node`
:return: Member after joining the balancer.
:rtype: :class:`Member`
"""
member = Member(id=None, ip=node.public_ips[0], port=balancer.port)
return self.balancer_attach_member(balancer, member)
def balancer_attach_member(self, balancer, member):
"""
Attach a member to balancer
:param balancer: LoadBalancer which should be used
:type balancer: :class:`LoadBalancer`
:param member: Member to join to the balancer
:type member: :class:`Member`
:return: Member after joining the balancer.
:rtype: :class:`Member`
"""
raise NotImplementedError(
'balancer_attach_member not implemented for this driver')
def balancer_detach_member(self, balancer, member):
"""
Detach member from balancer
:param balancer: LoadBalancer which should be used
:type balancer: :class:`LoadBalancer`
:param member: Member which should be used
:type member: :class:`Member`
:return: ``True`` if member detach was successful, otherwise ``False``.
:rtype: ``bool``
"""
raise NotImplementedError(
'balancer_detach_member not implemented for this driver')
def balancer_list_members(self, balancer):
"""
Return list of members attached to balancer
:param balancer: LoadBalancer which should be used
:type balancer: :class:`LoadBalancer`
:rtype: ``list`` of :class:`Member`
"""
raise NotImplementedError(
'balancer_list_members not implemented for this driver')
def list_supported_algorithms(self):
"""
Return algorithms supported by this driver.
:rtype: ``list`` of ``str``
"""
return list(self._ALGORITHM_TO_VALUE_MAP.keys())
def _value_to_algorithm(self, value):
"""
Return :class`Algorithm` based on the value.
:param value: Algorithm name (e.g. http, tcp, ...).
:type value: ``str``
@rype :class:`Algorithm`
"""
try:
return self._VALUE_TO_ALGORITHM_MAP[value]
except KeyError:
raise LibcloudError(value='Invalid value: %s' % (value),
driver=self)
def _algorithm_to_value(self, algorithm):
"""
Return string value for the provided algorithm.
:param value: Algorithm enum.
:type value: :class:`Algorithm`
@rype ``str``
"""
try:
return self._ALGORITHM_TO_VALUE_MAP[algorithm]
except KeyError:
raise LibcloudError(value='Invalid algorithm: %s' % (algorithm),
driver=self)
|
Nivl/www.melvin.la | refs/heads/master | nivls_website/commons/happyforms/__init__.py | 3 | from django import forms
from django.forms.fields import FileField
from django.core.exceptions import ValidationError
class BaseForm:
"""
We do some automatic stripping of globally bad input. For example, we
strip leading/trailing spaces from all data. Since Django-core hasn't done
this already: http://code.djangoproject.com/ticket/6362
"""
def _clean_fields(self):
for name, field in self.fields.items():
# value_from_datadict() gets the data from the data dictionaries.
# Each widget type knows how to retrieve its own data, because some
# widgets split data over several HTML fields.
value = field.widget.value_from_datadict(self.data, self.files,
self.add_prefix(name))
try:
if isinstance(field, FileField):
initial = self.initial.get(name, field.initial)
value = field.clean(value, initial)
else:
if isinstance(value, basestring):
value = field.clean(value.strip())
else:
value = field.clean(value)
self.cleaned_data[name] = value
if hasattr(self, 'clean_%s' % name):
value = getattr(self, 'clean_%s' % name)()
self.cleaned_data[name] = value
except ValidationError, e:
self._errors[name] = self.error_class(e.messages)
if name in self.cleaned_data:
del self.cleaned_data[name]
class Form(BaseForm, forms.Form):
pass
class ModelForm(BaseForm, forms.ModelForm):
pass
|
oracleyue/rubber-for-latex | refs/heads/master | src/converters/compressor.py | 1 | # This file is part of Rubber and thus covered by the GPL
# (c) Nicolas Boulenguez 2015
"""
Compressing the output of Rubber.
"""
from rubber import _, msg
import rubber.depend
class Node (rubber.depend.Node):
def __init__ (self, node_dictionary, constructor, extension, source):
super (Node, self).__init__ (node_dictionary)
self.constructor = constructor
self.target = source + extension
self.source = source
self.add_product (self.target)
self.add_source (source)
def run (self):
msg.progress (_ ("compressing %s into %s") % (self.source, self.target))
try:
with open (self.source, 'rb') as f_in:
with self.constructor (self.target, 'wb') as f_out:
f_out.writelines (f_in)
except:
msg.error (_ ("compression failed"))
return False
return True
|
apark263/tensorflow | refs/heads/master | tensorflow/python/util/lock_util_test.py | 54 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for lock_util."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import random
import time
from absl.testing import parameterized
from tensorflow.python.platform import test
from tensorflow.python.util import lock_util
class GroupLockTest(test.TestCase, parameterized.TestCase):
@parameterized.parameters(1, 2, 3, 5, 10)
def testGroups(self, num_groups):
lock = lock_util.GroupLock(num_groups)
num_threads = 10
finished = set()
def thread_fn(thread_id):
time.sleep(random.random() * 0.1)
group_id = thread_id % num_groups
with lock.group(group_id):
time.sleep(random.random() * 0.1)
self.assertGreater(lock._group_member_counts[group_id], 0)
for g, c in enumerate(lock._group_member_counts):
if g != group_id:
self.assertEqual(0, c)
finished.add(thread_id)
threads = [
self.checkedThread(target=thread_fn, args=(i,))
for i in range(num_threads)
]
for i in range(num_threads):
threads[i].start()
for i in range(num_threads):
threads[i].join()
self.assertEqual(set(range(num_threads)), finished)
if __name__ == "__main__":
test.main()
|
mercycorps/TolaActivity | refs/heads/master | scripts/workflow_migration.py | 1 | from django.apps import apps
from django import db
from django.db import connection
app_models = apps.get_app_config('workflow').get_models()
#rename the app tables from the old activitydb to workflow
def run():
print "Migration"
for app in app_models:
name = str(app._meta.db_table)
new_appname = "tola_activity." + name
temp = name.split("_")
old_appname = "tola_activity.activitydb_" + temp[1]
sql_query = "RENAME TABLE %s TO %s" % (old_appname,new_appname)
print sql_query
#catch any existing tables
try:
# Renaming model from 'Foo' to 'Bar'
with connection.cursor() as cursor:
cursor.execute(sql_query)
except:
"Table Already Exists"
name_list = [
'program_country',
'program_fund_code',
'program_sector',
'program_user_access',
'projectagreement_evaluate',
'projectagreement_capacity',
'projectagreement_stakeholder',
'projectagreement_site',
'projectcomplete_site',
'projectcomplete_stakeholder',
'quantitativeoutputs',
'stakeholder_contact',
'tolauser_countries'
]
for name in name_list:
old_appname = "tola_activity.activitydb_" + name
new_appname = "tola_activity.workflow_" + name
sql_query = "RENAME TABLE %s TO %s" % (old_appname, new_appname)
try:
# Renaming model from 'Foo' to 'Bar'
with connection.cursor() as cursor:
cursor.execute(sql_query)
except:
"Table Already Exists"
# rename formlibrary tables
try:
# Renaming model from 'Foo' to 'Bar'
with connection.cursor() as cursor:
cursor.execute("RENAME TABLE activitydb_beneficiary TO formlibrary_beneficiary")
cursor.execute("RENAME TABLE activitydb_beneficiary_distribution TO formlibrary_beneficiary_distribution")
cursor.execute("RENAME TABLE activitydb_beneficiary_program TO formlibrary_beneficiary_program")
cursor.execute("RENAME TABLE activitydb_beneficiary_training TO formlibrary_beneficiary_training")
cursor.execute("RENAME TABLE activitydb_trainingattendance TO formlibrary_trainingattendance")
cursor.execute("RENAME TABLE activitydb_distribution TO formlibrary_distribution")
except:
"Table Already Exists"
|
kmill/textadv | refs/heads/master | textadv/gameworld/basickinds.py | 1 | ### Not to be imported
## Should be execfile'd
# basickinds.py
# These are definitions of the core kinds in the world, where a kind
# is something like a class of objects. The definitions of KindOf and
# IsA are in basicrelations.py
world.add_relation(KindOf("room", "kind"))
world.add_relation(KindOf("thing", "kind"))
world.add_relation(KindOf("door", "thing"))
world.add_relation(KindOf("container", "thing"))
world.add_relation(KindOf("supporter", "thing"))
world.add_relation(KindOf("person", "thing"))
world.add_relation(KindOf("backdrop", "thing"))
world.add_relation(KindOf("region", "kind"))
# The choice of these kinds was greatly influenced by Inform 7. The
# kind "kind" is the root of this structure just so that there is a
# root. The following are basic properties of the other kinds.
#
# A room represents a place. These are not contained in anything, and
# can be a part of an Exit relation.
#
# A thing represents some object that can be interacted with.
#
# A door is a thing which can be in two rooms and which also can be
# part of the Exit relation.
#
# Containers and supporters are things which can contain and support
# things, respectively. These are distinct because it simplifies the
# core library (as enterables, these two end up being mutually
# exclusive).
#
# Persons represent objects with which one can communicate. This also
# encompasses the player character.
#
# Backdrops are things which can be present in multiple rooms (that
# is, there is a rule which moves backdrop to an appropriate room),
# effectively breaking the rule that things can't be in more than one
# room.
#
# Regions are kinds which can contain rooms, which breaks the rule
# that rooms are not contained in anything. These are used to group
# together rooms.
|
courtarro/gnuradio-wg-grc | refs/heads/master | gr-qtgui/apps/plot_time_raster_base.py | 47 | #!/usr/bin/env python
#
# Copyright 2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, blocks
from gnuradio.eng_option import eng_option
from optparse import OptionParser
import os, sys
try:
from gnuradio import qtgui
from PyQt4 import QtGui, QtCore
import sip
except ImportError:
print "Error: Program requires PyQt4 and gr-qtgui."
sys.exit(1)
try:
import scipy
except ImportError:
print "Error: Scipy required (www.scipy.org)."
sys.exit(1)
try:
from gnuradio.qtgui.plot_form import *
from gnuradio.qtgui.plot_base import *
except ImportError:
from plot_form import *
from plot_base import *
class plot_base(gr.top_block):
def __init__(self, filelist, samp_rate, start,
nsamples, max_nsamples,
auto_scale):
gr.top_block.__init__(self)
self._filelist = filelist
self._samp_rate = samp_rate
self._center_freq = 0
self._start = start
self._max_nsamps = max_nsamples
self._nsigs = len(self._filelist)
self._auto_scale = auto_scale
self._nsamps = nsamples
self._is_setup = False
self._y_min = -20
self._y_max = 20
self._y_range = 2
self._y_value = 1
self.gui_y_axis = None
self.qapp = QtGui.QApplication(sys.argv)
def setup(self):
self.skip = blocks.skiphead(self.dsize, self._start)
n = 0
self.srcs = list()
self._data_min = sys.maxint
self._data_max = -sys.maxint - 1
for f in self._filelist:
data,_min,_max = self.read_samples(f, self._start, self._nsamps)
if(_min < self._data_min):
self._data_min = _min
if(_max > self._data_max):
self._data_max = _max
self.srcs.append(self.src_type(data))
# Set default labels based on file names
fname = f.split("/")[-1]
#self.gui_snk.set_line_label(n, "{0}".format(fname))
n += 1
self.connect(self.srcs[0], self.skip)
self.connect(self.skip, (self.gui_snk, 0))
for i,s in enumerate(self.srcs[1:]):
self.connect(s, (self.gui_snk, i+1))
self.gui_snk.set_update_time(0)
self.gui_snk.enable_menu(False)
self.auto_scale(self._auto_scale)
# Get Python Qt references
pyQt = self.gui_snk.pyqwidget()
self.pyWin = sip.wrapinstance(pyQt, QtGui.QWidget)
self._is_setup = True
def is_setup(self):
return self._is_setup
def set_y_axis(self, y_min, y_max):
if(not self._auto_scale):
self.gui_snk.set_intensity_range(y_min, y_max)
return y_min, y_max
else:
return None
def get_gui(self):
if(self.is_setup()):
return self.pyWin
else:
return None
def reset(self, newstart, newnsamps):
self.stop()
self.wait()
self._start = newstart
self._data_min = sys.maxint
self._data_max = -sys.maxint - 1
for s,f in zip(self.srcs, self._filelist):
data,_min,_max = self.read_samples(f, self._start, newnsamps)
if(_min < self._data_min):
self._data_min = _min
if(_max > self._data_max):
self._data_max = _max
s.set_data(data)
if(len(data) < newnsamps):
newnsamps = len(data)
self.auto_scale(self._auto_scale)
self._nsamps = newnsamps
self.start()
def auto_scale(self, state):
if(state > 0):
self.gui_snk.set_intensity_range(self._data_min, self._data_max)
self._auto_scale = True
self._y_value = self._data_max
self._y_range = self._data_max - self._data_min
self._y_min = 10*self._data_min
self._y_max = 10*self._data_max
if(self.gui_y_axis):
self.gui_y_axis(self._data_min, self._data_max)
else:
self._auto_scale = False
def setup_options(desc):
parser = OptionParser(option_class=eng_option, description=desc,
conflict_handler="resolve")
parser.add_option("-N", "--nsamples", type="int", default=1000000,
help="Set the number of samples to display [default=%default]")
parser.add_option("-S", "--start", type="int", default=0,
help="Starting sample number [default=%default]")
parser.add_option("-C", "--ncols", type="int", default=100,
help="Number of columns [default=%default]")
parser.add_option("-R", "--nrows", type="int", default=100,
help="Number of rows [default=%default]")
parser.add_option("-r", "--sample-rate", type="eng_float", default=1.0,
help="Set the sample rate of the signal [default=%default]")
parser.add_option("", "--no-auto-scale", action="store_true", default=False,
help="Do not auto-scale the plot [default=%default]")
(options,args) = parser.parse_args()
if(len(args) < 1):
parser.print_help()
sys.exit(0)
return (options,args)
|
varshadyavaiah/RED-with-In-and-Out-RIO-queue-discipline | refs/heads/master | examples/energy/examples-to-run.py | 196 | #! /usr/bin/env python
## -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
# A list of C++ examples to run in order to ensure that they remain
# buildable and runnable over time. Each tuple in the list contains
#
# (example_name, do_run, do_valgrind_run).
#
# See test.py for more information.
cpp_examples = [
("energy-model-example", "True", "True"),
]
# A list of Python examples to run in order to ensure that they remain
# runnable over time. Each tuple in the list contains
#
# (example_name, do_run).
#
# See test.py for more information.
python_examples = []
|
PokemonGoF/PokemonGo-Bot-Desktop | refs/heads/development | build/pywin/Lib/test/crashers/buffer_mutate.py | 66 | #
# The various methods of bufferobject.c (here buffer_subscript()) call
# get_buf() before calling potentially more Python code (here via
# PySlice_GetIndicesEx()). But get_buf() already returned a void*
# pointer. This void* pointer can become invalid if the object
# underlying the buffer is mutated (here a bytearray object).
#
# As usual, please keep in mind that the three "here" in the sentence
# above are only examples. Each can be changed easily and lead to
# another crasher.
#
# This crashes for me on Linux 32-bits with CPython 2.6 and 2.7
# with a segmentation fault.
#
class PseudoIndex(object):
def __index__(self):
for c in "foobar"*n:
a.append(c)
return n * 4
for n in range(1, 100000, 100):
a = bytearray("test"*n)
buf = buffer(a)
s = buf[:PseudoIndex():1]
#print repr(s)
#assert s == "test"*n
|
haobtc/bitcoin | refs/heads/master | qa/rpc-tests/keypool.py | 86 | #!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Exercise the wallet keypool, and interaction with wallet encryption/locking
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class KeyPoolTest(BitcoinTestFramework):
def run_test(self):
nodes = self.nodes
addr_before_encrypting = nodes[0].getnewaddress()
addr_before_encrypting_data = nodes[0].validateaddress(addr_before_encrypting)
wallet_info_old = nodes[0].getwalletinfo()
assert(addr_before_encrypting_data['hdmasterkeyid'] == wallet_info_old['hdmasterkeyid'])
# Encrypt wallet and wait to terminate
nodes[0].encryptwallet('test')
bitcoind_processes[0].wait()
# Restart node 0
nodes[0] = start_node(0, self.options.tmpdir)
# Keep creating keys
addr = nodes[0].getnewaddress()
addr_data = nodes[0].validateaddress(addr)
wallet_info = nodes[0].getwalletinfo()
assert(addr_before_encrypting_data['hdmasterkeyid'] != wallet_info['hdmasterkeyid'])
assert(addr_data['hdmasterkeyid'] == wallet_info['hdmasterkeyid'])
try:
addr = nodes[0].getnewaddress()
raise AssertionError('Keypool should be exhausted after one address')
except JSONRPCException as e:
assert(e.error['code']==-12)
# put three new keys in the keypool
nodes[0].walletpassphrase('test', 12000)
nodes[0].keypoolrefill(3)
nodes[0].walletlock()
# drain the keys
addr = set()
addr.add(nodes[0].getrawchangeaddress())
addr.add(nodes[0].getrawchangeaddress())
addr.add(nodes[0].getrawchangeaddress())
addr.add(nodes[0].getrawchangeaddress())
# assert that four unique addresses were returned
assert(len(addr) == 4)
# the next one should fail
try:
addr = nodes[0].getrawchangeaddress()
raise AssertionError('Keypool should be exhausted after three addresses')
except JSONRPCException as e:
assert(e.error['code']==-12)
# refill keypool with three new addresses
nodes[0].walletpassphrase('test', 1)
nodes[0].keypoolrefill(3)
# test walletpassphrase timeout
time.sleep(1.1)
assert_equal(nodes[0].getwalletinfo()["unlocked_until"], 0)
# drain them by mining
nodes[0].generate(1)
nodes[0].generate(1)
nodes[0].generate(1)
nodes[0].generate(1)
try:
nodes[0].generate(1)
raise AssertionError('Keypool should be exhausted after three addesses')
except JSONRPCException as e:
assert(e.error['code']==-12)
def __init__(self):
super().__init__()
self.setup_clean_chain = False
self.num_nodes = 1
def setup_network(self):
self.nodes = self.setup_nodes()
if __name__ == '__main__':
KeyPoolTest().main()
|
timothydmorton/bokeh | refs/heads/master | examples/plotting/file/grid.py | 48 | import numpy as np
from bokeh.plotting import figure, gridplot, output_file, show
N = 50
x = np.linspace(0, 4*np.pi, N)
y = np.sin(x)
output_file("grid.html", title="grid.py example")
TOOLS = "pan,wheel_zoom,box_zoom,reset,save,crosshair"
l = figure(title="line", tools=TOOLS, plot_width=300, plot_height=300)
l.line(x,y, line_width=3, color="gold")
aw = figure(title="annular wedge", tools=TOOLS, plot_width=300, plot_height=300)
aw.annular_wedge(x, y, 10, 20, 0.6, 4.1, color="navy", alpha=0.5,
inner_radius_units="screen", outer_radius_units="screen")
bez = figure(title="bezier", tools=TOOLS, plot_width=300, plot_height=300)
bez.bezier(x, y, x+0.4, y, x+0.1, y+0.2, x-0.1, y-0.2,
line_width=2, color="olive")
q = figure(title="quad", tools=TOOLS, plot_width=300, plot_height=300)
q.quad(x, x-0.2, y, y-0.2, color="tomato", alpha=0.4)
# specify "empty" grid cells with None
p = gridplot([[l, None, aw], [bez, q, None]])
show(p)
|
alfasin/st2 | refs/heads/master | st2common/tests/unit/test_isotime_utils.py | 11 | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import unittest
from st2common.util import isotime
from st2common.util import date
class IsoTimeUtilsTestCase(unittest.TestCase):
def test_validate(self):
self.assertTrue(isotime.validate('2000-01-01 12:00:00Z'))
self.assertTrue(isotime.validate('2000-01-01 12:00:00+00'))
self.assertTrue(isotime.validate('2000-01-01 12:00:00+0000'))
self.assertTrue(isotime.validate('2000-01-01 12:00:00+00:00'))
self.assertTrue(isotime.validate('2000-01-01 12:00:00.000000Z'))
self.assertTrue(isotime.validate('2000-01-01 12:00:00.000000+00'))
self.assertTrue(isotime.validate('2000-01-01 12:00:00.000000+0000'))
self.assertTrue(isotime.validate('2000-01-01 12:00:00.000000+00:00'))
self.assertTrue(isotime.validate('2000-01-01T12:00:00Z'))
self.assertTrue(isotime.validate('2000-01-01T12:00:00.000000Z'))
self.assertTrue(isotime.validate('2000-01-01T12:00:00+00:00'))
self.assertTrue(isotime.validate('2000-01-01T12:00:00.000000+00:00'))
self.assertTrue(isotime.validate('2015-02-10T21:21:53.399Z'))
self.assertFalse(isotime.validate('2000-01-01', raise_exception=False))
self.assertFalse(isotime.validate('2000-01-01T12:00:00', raise_exception=False))
self.assertFalse(isotime.validate('2000-01-01T12:00:00+00:00Z', raise_exception=False))
self.assertFalse(isotime.validate('2000-01-01T12:00:00.000000', raise_exception=False))
self.assertFalse(isotime.validate('Epic!', raise_exception=False))
self.assertFalse(isotime.validate(object(), raise_exception=False))
self.assertRaises(ValueError, isotime.validate, 'Epic!', True)
def test_parse(self):
dt = date.add_utc_tz(datetime.datetime(2000, 1, 1, 12))
self.assertEqual(isotime.parse('2000-01-01 12:00:00Z'), dt)
self.assertEqual(isotime.parse('2000-01-01 12:00:00+00'), dt)
self.assertEqual(isotime.parse('2000-01-01 12:00:00+0000'), dt)
self.assertEqual(isotime.parse('2000-01-01 12:00:00+00:00'), dt)
self.assertEqual(isotime.parse('2000-01-01 12:00:00.000000Z'), dt)
self.assertEqual(isotime.parse('2000-01-01 12:00:00.000000+00'), dt)
self.assertEqual(isotime.parse('2000-01-01 12:00:00.000000+0000'), dt)
self.assertEqual(isotime.parse('2000-01-01 12:00:00.000000+00:00'), dt)
self.assertEqual(isotime.parse('2000-01-01T12:00:00Z'), dt)
self.assertEqual(isotime.parse('2000-01-01T12:00:00+00:00'), dt)
self.assertEqual(isotime.parse('2000-01-01T12:00:00.000000Z'), dt)
self.assertEqual(isotime.parse('2000-01-01T12:00:00.000000+00:00'), dt)
self.assertEqual(isotime.parse('2000-01-01T12:00:00.000Z'), dt)
def test_format(self):
dt = date.add_utc_tz(datetime.datetime(2000, 1, 1, 12))
dt_str_usec_offset = '2000-01-01T12:00:00.000000+00:00'
dt_str_usec = '2000-01-01T12:00:00.000000Z'
dt_str_offset = '2000-01-01T12:00:00+00:00'
dt_str = '2000-01-01T12:00:00Z'
dt_unicode = u'2000-01-01T12:00:00Z'
self.assertEqual(isotime.format(dt, usec=True, offset=True), dt_str_usec_offset)
self.assertEqual(isotime.format(dt, usec=True, offset=False), dt_str_usec)
self.assertEqual(isotime.format(dt, usec=False, offset=True), dt_str_offset)
self.assertEqual(isotime.format(dt, usec=False, offset=False), dt_str)
self.assertEqual(isotime.format(dt_str, usec=False, offset=False), dt_str)
self.assertEqual(isotime.format(dt_unicode, usec=False, offset=False), dt_unicode)
def test_format_tz_naive(self):
dt1 = datetime.datetime.utcnow()
dt2 = isotime.parse(isotime.format(dt1, usec=True))
self.assertEqual(dt2, date.add_utc_tz(dt1))
def test_format_tz_aware(self):
dt1 = date.add_utc_tz(datetime.datetime.utcnow())
dt2 = isotime.parse(isotime.format(dt1, usec=True))
self.assertEqual(dt2, dt1)
def test_format_sec_truncated(self):
dt1 = date.add_utc_tz(datetime.datetime.utcnow())
dt2 = isotime.parse(isotime.format(dt1, usec=False))
dt3 = datetime.datetime(dt1.year, dt1.month, dt1.day, dt1.hour, dt1.minute, dt1.second)
self.assertLess(dt2, dt1)
self.assertEqual(dt2, date.add_utc_tz(dt3))
|
fimbox/CasualRelighting | refs/heads/master | relighting_projects/fruit_table_projected/solve_shaded_input.py | 1 | import os
ROOT_FOLDER = '../relighting_projects/fruit_table_projected'
INPUT_FOLDER = ROOT_FOLDER
OUTPUT_FOLDER = ROOT_FOLDER + '/py_results'
SCREENSHOT_VIEW = "-0,260829061 0,2491733 0,976253152"
cam = Item("Camera")
cam.Fov = 40
def screenshotMesh(mesh_name, path):
RaytracerTool.actionSelectMesh(mesh_name, "hideothers")
RaytracerTool.actionFocusCameraOnMesh(SCREENSHOT_VIEW)
RaytracerTool.actionDeselectMesh()
RaytracerTool.actionSaveScreenshot(path, 1920,1280)
def solveMesh( mesh_path ):
print("solving " + mesh_path)
RaytracerTool.actionClearAllMeshes();
RaytracerTool.actionCreateTextureProjector()
mtp = Item("MultiTextureProjectorHandle")
mtp.onLoadAverageMesh(mesh_path)
BaseTool.actionExecuteScript(ROOT_FOLDER + "/specular_solving_average_py.tscript")
# for convienient viewing:
RaytracerTool.EnableRealtimeVertexSH = True
prefix_output = os.path.basename(mesh_path).replace(".bin_mesh","")
output_folder = OUTPUT_FOLDER + "/" + prefix_output
if not os.path.exists(output_folder+"/"):
os.makedirs(output_folder+"/")
RaytracerTool.actionSaveEnvironmentToCrossFile(output_folder + "/hdr_cross.png" )
RaytracerTool.actionSaveEnvironmentToSphereFile(output_folder + "/hdr_sphere.png" )
# RaytracerTool.actionClearHDR("white")
screenshotMesh("Mesh Base Mesh", output_folder+"/input.png")
screenshotMesh("Mesh Plain Lit Mesh", output_folder+"/shade.png")
screenshotMesh("Mesh Albedo Fin Mesh", output_folder+"/albedo.png")
screenshotMesh("Mesh Relighten Mesh", output_folder+"/relit.png")
screenshotMesh("Mesh Segment Mesh", output_folder+"/segments.png")
solveMesh( INPUT_FOLDER + "/improved_average.bin_mesh" )
# solveMesh( INPUT_FOLDER + "/raw_bunny_white_hdr.bin_mesh" )
|
katzoo/amu | refs/heads/master | isi/cluster/Task703.py | 1 | # -*- coding: utf-8 -*-
""" Task 703 """
def average_vectors(first_idx, second_idx, clusters):
""" rozw """
return [(a+b)/2 for a, b in \
zip(clusters[first_idx].vec, clusters[second_idx].vec)] |
jasonkying/pip | refs/heads/develop | tests/unit/test_req_uninstall.py | 28 | import os
import pytest
from mock import Mock
import pip.req.req_uninstall
from pip.req.req_uninstall import UninstallPathSet
# Pretend all files are local, so UninstallPathSet accepts files in the tmpdir,
# outside the virtualenv
def mock_is_local(path):
return True
class TestUninstallPathSet(object):
def test_add(self, tmpdir, monkeypatch):
monkeypatch.setattr(pip.req.req_uninstall, 'is_local', mock_is_local)
file_extant = os.path.join(tmpdir, 'foo')
file_nonexistant = os.path.join(tmpdir, 'nonexistant')
with open(file_extant, 'w'):
pass
ups = UninstallPathSet(dist=Mock())
assert ups.paths == set()
ups.add(file_extant)
assert ups.paths == set([file_extant])
ups.add(file_nonexistant)
assert ups.paths == set([file_extant])
@pytest.mark.skipif("sys.platform == 'win32'")
def test_add_symlink(self, tmpdir, monkeypatch):
monkeypatch.setattr(pip.req.req_uninstall, 'is_local', mock_is_local)
f = os.path.join(tmpdir, 'foo')
with open(f, 'w'):
pass
l = os.path.join(tmpdir, 'foo_link')
os.symlink(f, l)
ups = UninstallPathSet(dist=Mock())
ups.add(l)
assert ups.paths == set([l])
|
mikebrevard/UnixAdministration | refs/heads/master | vagrant/etc/data/genData/venv/lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/compat.py | 2942 | ######################## BEGIN LICENSE BLOCK ########################
# Contributor(s):
# Ian Cordasco - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import sys
if sys.version_info < (3, 0):
base_str = (str, unicode)
else:
base_str = (bytes, str)
def wrap_ord(a):
if sys.version_info < (3, 0) and isinstance(a, base_str):
return ord(a)
else:
return a
|
gfacciol/pvflip | refs/heads/master | glfw/glfw.py | 1 | """
Python bindings for GLFW.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
__author__ = 'Florian Rhiem (florian.rhiem@gmail.com)'
__copyright__ = 'Copyright (c) 2013-2016 Florian Rhiem'
__license__ = 'MIT'
__version__ = '1.3.1'
# By default (ERROR_REPORTING = True), GLFW errors will be reported as Python
# exceptions. Set ERROR_REPORTING to False or set a curstom error callback to
# disable this behavior.
ERROR_REPORTING = True
import ctypes
import os
import platform
import functools
import glob
import sys
import subprocess
import textwrap
# Python 3 compatibility:
try:
_getcwd = os.getcwdu
except AttributeError:
_getcwd = os.getcwd
if sys.version_info.major > 2:
_to_char_p = lambda s: s.encode('utf-8')
def _reraise(exception, traceback):
raise exception.with_traceback(traceback)
else:
_to_char_p = lambda s: s
def _reraise(exception, traceback):
raise (exception, None, traceback)
class GLFWError(Exception):
"""
Exception class used for reporting GLFW errors.
"""
def __init__(self, message):
super(GLFWError, self).__init__(message)
def _find_library_candidates(library_names,
library_file_extensions,
library_search_paths):
"""
Finds and returns filenames which might be the library you are looking for.
"""
candidates = set()
for library_name in library_names:
for search_path in library_search_paths:
glob_query = os.path.join(search_path, '*'+library_name+'*')
for filename in glob.iglob(glob_query):
filename = os.path.realpath(filename)
if filename in candidates:
continue
basename = os.path.basename(filename)
if basename.startswith('lib'+library_name):
basename_end = basename[len('lib'+library_name):]
elif basename.startswith(library_name):
basename_end = basename[len(library_name):]
else:
continue
for file_extension in library_file_extensions:
if basename_end.startswith(file_extension):
if basename_end[len(file_extension):][:1] in ('', '.'):
candidates.add(filename)
if basename_end.endswith(file_extension):
basename_middle = basename_end[:-len(file_extension)]
if all(c in '0123456789.' for c in basename_middle):
candidates.add(filename)
return candidates
def _load_library(library_names, library_file_extensions,
library_search_paths, version_check_callback):
"""
Finds, loads and returns the most recent version of the library.
"""
candidates = _find_library_candidates(library_names,
library_file_extensions,
library_search_paths)
library_versions = []
for filename in candidates:
version = version_check_callback(filename)
if version is not None and version >= (3, 0, 0):
library_versions.append((version, filename))
if not library_versions:
return None
library_versions.sort()
return ctypes.CDLL(library_versions[-1][1])
def _glfw_get_version(filename):
"""
Queries and returns the library version tuple or None by using a
subprocess.
"""
version_checker_source = '''
import sys
import ctypes
def get_version(library_handle):
"""
Queries and returns the library version tuple or None.
"""
major_value = ctypes.c_int(0)
major = ctypes.pointer(major_value)
minor_value = ctypes.c_int(0)
minor = ctypes.pointer(minor_value)
rev_value = ctypes.c_int(0)
rev = ctypes.pointer(rev_value)
if hasattr(library_handle, 'glfwGetVersion'):
library_handle.glfwGetVersion(major, minor, rev)
version = (major_value.value,
minor_value.value,
rev_value.value)
return version
else:
return None
try:
input_func = raw_input
except NameError:
input_func = input
filename = input_func().strip()
try:
library_handle = ctypes.CDLL(filename)
except OSError:
pass
else:
version = get_version(library_handle)
print(version)
'''
args = [sys.executable, '-c', textwrap.dedent(version_checker_source)]
process = subprocess.Popen(args, universal_newlines=True,
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
out = process.communicate(filename)[0]
out = out.strip()
if out:
return eval(out)
else:
return None
here = os.path.dirname(__file__)
if sys.platform.startswith('win'):
# try the procompiled winXX libraries
try:
if platform.architecture()[0] == '64bit':
_glfw = ctypes.CDLL(os.path.join(here,'glfw-3.2.1.bin.WIN64/lib-mingw-w64/glfw3.dll'))
else:
_glfw = ctypes.CDLL(os.path.join(here,'glfw-3.2.1.bin.WIN32/lib-mingw/glfw3.dll'))
except OSError:
# try glfw3.dll on windows system
try:
_glfw = ctypes.CDLL('glfw3.dll')
except OSError:
_glfw = None
else: # not Windows -> Linux or Mac
# try to access the compiled version first
if sys.platform.startswith('darwin'):
glfwlib = os.path.join(here,'libglfw.dylib')
if not os.path.exists(glfwlib):
glfwlib = os.path.join(here,'glfw-3.3.bin.MAC64/libglfw.3.3.dylib')
elif sys.platform.startswith('linux'):
glfwlib = os.path.join(here,'libglfw.so')
try:
_glfw = ctypes.CDLL(glfwlib)
except OSError:
_glfw = None
# if failed search it on the system (slower)
if _glfw == None:
_glfw = _load_library(['glfw', 'glfw3'], ['.so', '.dylib'],
['',
here,
os.path.join(here,'glfw-3.3.bin.MAC64/'),
'/usr/lib64', '/usr/local/lib64',
'/usr/lib', '/usr/local/lib',
'/usr/lib/x86_64-linux-gnu/'], _glfw_get_version)
# otherwise try to build
if _glfw == None:
print('BUILDING GLFW...')
r = os.system('mkdir -p %s/build; cd %s/build; cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_SHARED_LIBS=ON -DGLFW_BUILD_EXAMPLES=OFF -DGLFW_BUILD_TESTS=OFF ../glfw_src; make; cp src/libglfw.so src/libglfw.dylib %s '%(here,here, here))
if (r):
os.system("make -j -C %s/glfw_src" % here)
os.system("cp %s/glfw_src/libglfw.so %s" % (here,here))
try:
_glfw = _load_library(['glfw', 'glfw3'], ['.so', '.dylib'],
['', here,], _glfw_get_version)
if _glfw == None:
raise OSError('Library file not found')
print('CLEANING BUILD...')
os.system('rm -fr %s/build '%(here))
except OSError:
print("SORRY: THE GLFW BUILD FAILED")
_glfw = None
if _glfw is None:
raise ImportError("Failed to load GLFW3 shared library.")
_callback_repositories = []
class _GLFWwindow(ctypes.Structure):
"""
Wrapper for:
typedef struct GLFWwindow GLFWwindow;
"""
_fields_ = [("dummy", ctypes.c_int)]
class _GLFWmonitor(ctypes.Structure):
"""
Wrapper for:
typedef struct GLFWmonitor GLFWmonitor;
"""
_fields_ = [("dummy", ctypes.c_int)]
class _GLFWvidmode(ctypes.Structure):
"""
Wrapper for:
typedef struct GLFWvidmode GLFWvidmode;
"""
_fields_ = [("width", ctypes.c_int),
("height", ctypes.c_int),
("red_bits", ctypes.c_int),
("green_bits", ctypes.c_int),
("blue_bits", ctypes.c_int),
("refresh_rate", ctypes.c_uint)]
def __init__(self):
ctypes.Structure.__init__(self)
self.width = 0
self.height = 0
self.red_bits = 0
self.green_bits = 0
self.blue_bits = 0
self.refresh_rate = 0
def wrap(self, video_mode):
"""
Wraps a nested python sequence.
"""
size, bits, self.refresh_rate = video_mode
self.width, self.height = size
self.red_bits, self.green_bits, self.blue_bits = bits
def unwrap(self):
"""
Returns a nested python sequence.
"""
size = self.width, self.height
bits = self.red_bits, self.green_bits, self.blue_bits
return size, bits, self.refresh_rate
class _GLFWgammaramp(ctypes.Structure):
"""
Wrapper for:
typedef struct GLFWgammaramp GLFWgammaramp;
"""
_fields_ = [("red", ctypes.POINTER(ctypes.c_ushort)),
("green", ctypes.POINTER(ctypes.c_ushort)),
("blue", ctypes.POINTER(ctypes.c_ushort)),
("size", ctypes.c_uint)]
def __init__(self):
ctypes.Structure.__init__(self)
self.red = None
self.red_array = None
self.green = None
self.green_array = None
self.blue = None
self.blue_array = None
self.size = 0
def wrap(self, gammaramp):
"""
Wraps a nested python sequence.
"""
red, green, blue = gammaramp
size = min(len(red), len(green), len(blue))
array_type = ctypes.c_ushort*size
self.size = ctypes.c_uint(size)
self.red_array = array_type()
self.green_array = array_type()
self.blue_array = array_type()
for i in range(self.size):
self.red_array[i] = int(red[i]*65535)
self.green_array[i] = int(green[i]*65535)
self.blue_array[i] = int(blue[i]*65535)
pointer_type = ctypes.POINTER(ctypes.c_ushort)
self.red = ctypes.cast(self.red_array, pointer_type)
self.green = ctypes.cast(self.green_array, pointer_type)
self.blue = ctypes.cast(self.blue_array, pointer_type)
def unwrap(self):
"""
Returns a nested python sequence.
"""
red = [self.red[i]/65535.0 for i in range(self.size)]
green = [self.green[i]/65535.0 for i in range(self.size)]
blue = [self.blue[i]/65535.0 for i in range(self.size)]
return red, green, blue
class _GLFWcursor(ctypes.Structure):
"""
Wrapper for:
typedef struct GLFWcursor GLFWcursor;
"""
_fields_ = [("dummy", ctypes.c_int)]
class _GLFWimage(ctypes.Structure):
"""
Wrapper for:
typedef struct GLFWimage GLFWimage;
"""
_fields_ = [("width", ctypes.c_int),
("height", ctypes.c_int),
("pixels", ctypes.POINTER(ctypes.c_ubyte))]
def __init__(self):
ctypes.Structure.__init__(self)
self.width = 0
self.height = 0
self.pixels = None
self.pixels_array = None
def wrap(self, image):
"""
Wraps a nested python sequence.
"""
self.width, self.height, pixels = image
array_type = ctypes.c_ubyte * 4 * self.width * self.height
self.pixels_array = array_type()
for i in range(self.height):
for j in range(self.width):
for k in range(4):
self.pixels_array[i][j][k] = pixels[i][j][k]
pointer_type = ctypes.POINTER(ctypes.c_ubyte)
self.pixels = ctypes.cast(self.pixels_array, pointer_type)
def unwrap(self):
"""
Returns a nested python sequence.
"""
pixels = [[[int(c) for c in p] for p in l] for l in self.pixels_array]
return self.width, self.height, pixels
VERSION_MAJOR = 3
VERSION_MINOR = 2
VERSION_REVISION = 1
RELEASE = 0
PRESS = 1
REPEAT = 2
KEY_UNKNOWN = -1
KEY_SPACE = 32
KEY_APOSTROPHE = 39
KEY_COMMA = 44
KEY_MINUS = 45
KEY_PERIOD = 46
KEY_SLASH = 47
KEY_0 = 48
KEY_1 = 49
KEY_2 = 50
KEY_3 = 51
KEY_4 = 52
KEY_5 = 53
KEY_6 = 54
KEY_7 = 55
KEY_8 = 56
KEY_9 = 57
KEY_SEMICOLON = 59
KEY_EQUAL = 61
KEY_A = 65
KEY_B = 66
KEY_C = 67
KEY_D = 68
KEY_E = 69
KEY_F = 70
KEY_G = 71
KEY_H = 72
KEY_I = 73
KEY_J = 74
KEY_K = 75
KEY_L = 76
KEY_M = 77
KEY_N = 78
KEY_O = 79
KEY_P = 80
KEY_Q = 81
KEY_R = 82
KEY_S = 83
KEY_T = 84
KEY_U = 85
KEY_V = 86
KEY_W = 87
KEY_X = 88
KEY_Y = 89
KEY_Z = 90
KEY_LEFT_BRACKET = 91
KEY_BACKSLASH = 92
KEY_RIGHT_BRACKET = 93
KEY_GRAVE_ACCENT = 96
KEY_WORLD_1 = 161
KEY_WORLD_2 = 162
KEY_ESCAPE = 256
KEY_ENTER = 257
KEY_TAB = 258
KEY_BACKSPACE = 259
KEY_INSERT = 260
KEY_DELETE = 261
KEY_RIGHT = 262
KEY_LEFT = 263
KEY_DOWN = 264
KEY_UP = 265
KEY_PAGE_UP = 266
KEY_PAGE_DOWN = 267
KEY_HOME = 268
KEY_END = 269
KEY_CAPS_LOCK = 280
KEY_SCROLL_LOCK = 281
KEY_NUM_LOCK = 282
KEY_PRINT_SCREEN = 283
KEY_PAUSE = 284
KEY_F1 = 290
KEY_F2 = 291
KEY_F3 = 292
KEY_F4 = 293
KEY_F5 = 294
KEY_F6 = 295
KEY_F7 = 296
KEY_F8 = 297
KEY_F9 = 298
KEY_F10 = 299
KEY_F11 = 300
KEY_F12 = 301
KEY_F13 = 302
KEY_F14 = 303
KEY_F15 = 304
KEY_F16 = 305
KEY_F17 = 306
KEY_F18 = 307
KEY_F19 = 308
KEY_F20 = 309
KEY_F21 = 310
KEY_F22 = 311
KEY_F23 = 312
KEY_F24 = 313
KEY_F25 = 314
KEY_KP_0 = 320
KEY_KP_1 = 321
KEY_KP_2 = 322
KEY_KP_3 = 323
KEY_KP_4 = 324
KEY_KP_5 = 325
KEY_KP_6 = 326
KEY_KP_7 = 327
KEY_KP_8 = 328
KEY_KP_9 = 329
KEY_KP_DECIMAL = 330
KEY_KP_DIVIDE = 331
KEY_KP_MULTIPLY = 332
KEY_KP_SUBTRACT = 333
KEY_KP_ADD = 334
KEY_KP_ENTER = 335
KEY_KP_EQUAL = 336
KEY_LEFT_SHIFT = 340
KEY_LEFT_CONTROL = 341
KEY_LEFT_ALT = 342
KEY_LEFT_SUPER = 343
KEY_RIGHT_SHIFT = 344
KEY_RIGHT_CONTROL = 345
KEY_RIGHT_ALT = 346
KEY_RIGHT_SUPER = 347
KEY_MENU = 348
KEY_LAST = KEY_MENU
MOD_SHIFT = 0x0001
MOD_CONTROL = 0x0002
MOD_ALT = 0x0004
MOD_SUPER = 0x0008
MOUSE_BUTTON_1 = 0
MOUSE_BUTTON_2 = 1
MOUSE_BUTTON_3 = 2
MOUSE_BUTTON_4 = 3
MOUSE_BUTTON_5 = 4
MOUSE_BUTTON_6 = 5
MOUSE_BUTTON_7 = 6
MOUSE_BUTTON_8 = 7
MOUSE_BUTTON_LAST = MOUSE_BUTTON_8
MOUSE_BUTTON_LEFT = MOUSE_BUTTON_1
MOUSE_BUTTON_RIGHT = MOUSE_BUTTON_2
MOUSE_BUTTON_MIDDLE = MOUSE_BUTTON_3
JOYSTICK_1 = 0
JOYSTICK_2 = 1
JOYSTICK_3 = 2
JOYSTICK_4 = 3
JOYSTICK_5 = 4
JOYSTICK_6 = 5
JOYSTICK_7 = 6
JOYSTICK_8 = 7
JOYSTICK_9 = 8
JOYSTICK_10 = 9
JOYSTICK_11 = 10
JOYSTICK_12 = 11
JOYSTICK_13 = 12
JOYSTICK_14 = 13
JOYSTICK_15 = 14
JOYSTICK_16 = 15
JOYSTICK_LAST = JOYSTICK_16
NOT_INITIALIZED = 0x00010001
NO_CURRENT_CONTEXT = 0x00010002
INVALID_ENUM = 0x00010003
INVALID_VALUE = 0x00010004
OUT_OF_MEMORY = 0x00010005
API_UNAVAILABLE = 0x00010006
VERSION_UNAVAILABLE = 0x00010007
PLATFORM_ERROR = 0x00010008
FORMAT_UNAVAILABLE = 0x00010009
NO_WINDOW_CONTEXT = 0x0001000A
FOCUSED = 0x00020001
ICONIFIED = 0x00020002
RESIZABLE = 0x00020003
VISIBLE = 0x00020004
DECORATED = 0x00020005
AUTO_ICONIFY = 0x00020006
FLOATING = 0x00020007
MAXIMIZED = 0x00020008
RED_BITS = 0x00021001
GREEN_BITS = 0x00021002
BLUE_BITS = 0x00021003
ALPHA_BITS = 0x00021004
DEPTH_BITS = 0x00021005
STENCIL_BITS = 0x00021006
ACCUM_RED_BITS = 0x00021007
ACCUM_GREEN_BITS = 0x00021008
ACCUM_BLUE_BITS = 0x00021009
ACCUM_ALPHA_BITS = 0x0002100A
AUX_BUFFERS = 0x0002100B
STEREO = 0x0002100C
SAMPLES = 0x0002100D
SRGB_CAPABLE = 0x0002100E
REFRESH_RATE = 0x0002100F
DOUBLEBUFFER = 0x00021010
CLIENT_API = 0x00022001
CONTEXT_VERSION_MAJOR = 0x00022002
CONTEXT_VERSION_MINOR = 0x00022003
CONTEXT_REVISION = 0x00022004
CONTEXT_ROBUSTNESS = 0x00022005
OPENGL_FORWARD_COMPAT = 0x00022006
OPENGL_DEBUG_CONTEXT = 0x00022007
OPENGL_PROFILE = 0x00022008
CONTEXT_RELEASE_BEHAVIOR = 0x00022009
CONTEXT_NO_ERROR = 0x0002200A
CONTEXT_CREATION_API = 0x0002200B
NO_API = 0
OPENGL_API = 0x00030001
OPENGL_ES_API = 0x00030002
NO_ROBUSTNESS = 0
NO_RESET_NOTIFICATION = 0x00031001
LOSE_CONTEXT_ON_RESET = 0x00031002
OPENGL_ANY_PROFILE = 0
OPENGL_CORE_PROFILE = 0x00032001
OPENGL_COMPAT_PROFILE = 0x00032002
CURSOR = 0x00033001
STICKY_KEYS = 0x00033002
STICKY_MOUSE_BUTTONS = 0x00033003
CURSOR_NORMAL = 0x00034001
CURSOR_HIDDEN = 0x00034002
CURSOR_DISABLED = 0x00034003
ANY_RELEASE_BEHAVIOR = 0
RELEASE_BEHAVIOR_FLUSH = 0x00035001
RELEASE_BEHAVIOR_NONE = 0x00035002
NATIVE_CONTEXT_API = 0x00036001
EGL_CONTEXT_API = 0x00036002
ARROW_CURSOR = 0x00036001
IBEAM_CURSOR = 0x00036002
CROSSHAIR_CURSOR = 0x00036003
HAND_CURSOR = 0x00036004
HRESIZE_CURSOR = 0x00036005
VRESIZE_CURSOR = 0x00036006
CONNECTED = 0x00040001
DISCONNECTED = 0x00040002
DONT_CARE = -1
_exc_info_from_callback = None
def _callback_exception_decorator(func):
@functools.wraps(func)
def callback_wrapper(*args, **kwargs):
global _exc_info_from_callback
if _exc_info_from_callback is not None:
# We are on the way back to Python after an exception was raised.
# Do not call further callbacks and wait for the errcheck function
# to handle the exception first.
return
try:
return func(*args, **kwargs)
except (KeyboardInterrupt, SystemExit):
raise
except:
_exc_info_from_callback = sys.exc_info()
return callback_wrapper
def _prepare_errcheck():
"""
This function sets the errcheck attribute of all ctypes wrapped functions
to evaluate the _exc_info_from_callback global variable and re-raise any
exceptions that might have been raised in callbacks.
It also modifies all callback types to automatically wrap the function
using the _callback_exception_decorator.
"""
def errcheck(result, *args):
global _exc_info_from_callback
if _exc_info_from_callback is not None:
exc = _exc_info_from_callback
_exc_info_from_callback = None
_reraise(exc[1], exc[2])
return result
for symbol in dir(_glfw):
if symbol.startswith('glfw'):
getattr(_glfw, symbol).errcheck = errcheck
_globals = globals()
for symbol in _globals:
if symbol.startswith('_GLFW') and symbol.endswith('fun'):
def wrapper_cfunctype(func, cfunctype=_globals[symbol]):
return cfunctype(_callback_exception_decorator(func))
_globals[symbol] = wrapper_cfunctype
_GLFWerrorfun = ctypes.CFUNCTYPE(None,
ctypes.c_int,
ctypes.c_char_p)
_GLFWwindowposfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int)
_GLFWwindowsizefun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int)
_GLFWwindowclosefun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow))
_GLFWwindowrefreshfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow))
_GLFWwindowfocusfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int)
_GLFWwindowiconifyfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int)
_GLFWframebuffersizefun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int)
_GLFWmousebuttonfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int,
ctypes.c_int)
_GLFWcursorposfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_double,
ctypes.c_double)
_GLFWcursorenterfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int)
_GLFWscrollfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_double,
ctypes.c_double)
_GLFWkeyfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int,
ctypes.c_int,
ctypes.c_int)
_GLFWcharfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int)
_GLFWmonitorfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWmonitor),
ctypes.c_int)
_GLFWdropfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.POINTER(ctypes.c_char_p))
_GLFWcharmodsfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_uint,
ctypes.c_int)
_GLFWjoystickfun = ctypes.CFUNCTYPE(None,
ctypes.c_int,
ctypes.c_int)
_glfw.glfwInit.restype = ctypes.c_int
_glfw.glfwInit.argtypes = []
def init():
"""
Initializes the GLFW library.
Wrapper for:
int glfwInit(void);
"""
cwd = _getcwd()
res = _glfw.glfwInit()
os.chdir(cwd)
return res
_glfw.glfwTerminate.restype = None
_glfw.glfwTerminate.argtypes = []
def terminate():
"""
Terminates the GLFW library.
Wrapper for:
void glfwTerminate(void);
"""
for callback_repository in _callback_repositories:
for window_addr in list(callback_repository.keys()):
del callback_repository[window_addr]
for window_addr in list(_window_user_data_repository.keys()):
del _window_user_data_repository[window_addr]
_glfw.glfwTerminate()
_glfw.glfwGetVersion.restype = None
_glfw.glfwGetVersion.argtypes = [ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_version():
"""
Retrieves the version of the GLFW library.
Wrapper for:
void glfwGetVersion(int* major, int* minor, int* rev);
"""
major_value = ctypes.c_int(0)
major = ctypes.pointer(major_value)
minor_value = ctypes.c_int(0)
minor = ctypes.pointer(minor_value)
rev_value = ctypes.c_int(0)
rev = ctypes.pointer(rev_value)
_glfw.glfwGetVersion(major, minor, rev)
return major_value.value, minor_value.value, rev_value.value
_glfw.glfwGetVersionString.restype = ctypes.c_char_p
_glfw.glfwGetVersionString.argtypes = []
def get_version_string():
"""
Returns a string describing the compile-time configuration.
Wrapper for:
const char* glfwGetVersionString(void);
"""
return _glfw.glfwGetVersionString()
@_callback_exception_decorator
def _raise_glfw_errors_as_exceptions(error_code, description):
"""
Default error callback that raises GLFWError exceptions for glfw errors.
Set an alternative error callback or set glfw.ERROR_REPORTING to False to
disable this behavior.
"""
global ERROR_REPORTING
if ERROR_REPORTING:
message = "(%d) %s" % (error_code, description)
raise GLFWError(message)
_default_error_callback = _GLFWerrorfun(_raise_glfw_errors_as_exceptions)
_error_callback = (_raise_glfw_errors_as_exceptions, _default_error_callback)
_glfw.glfwSetErrorCallback.restype = _GLFWerrorfun
_glfw.glfwSetErrorCallback.argtypes = [_GLFWerrorfun]
_glfw.glfwSetErrorCallback(_default_error_callback)
def set_error_callback(cbfun):
"""
Sets the error callback.
Wrapper for:
GLFWerrorfun glfwSetErrorCallback(GLFWerrorfun cbfun);
"""
global _error_callback
previous_callback = _error_callback
if cbfun is None:
cbfun = _raise_glfw_errors_as_exceptions
c_cbfun = _default_error_callback
else:
c_cbfun = _GLFWerrorfun(cbfun)
_error_callback = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetErrorCallback(cbfun)
if previous_callback is not None and previous_callback[0] != _raise_glfw_errors_as_exceptions:
return previous_callback[0]
_glfw.glfwGetMonitors.restype = ctypes.POINTER(ctypes.POINTER(_GLFWmonitor))
_glfw.glfwGetMonitors.argtypes = [ctypes.POINTER(ctypes.c_int)]
def get_monitors():
"""
Returns the currently connected monitors.
Wrapper for:
GLFWmonitor** glfwGetMonitors(int* count);
"""
count_value = ctypes.c_int(0)
count = ctypes.pointer(count_value)
result = _glfw.glfwGetMonitors(count)
monitors = [result[i] for i in range(count_value.value)]
return monitors
_glfw.glfwGetPrimaryMonitor.restype = ctypes.POINTER(_GLFWmonitor)
_glfw.glfwGetPrimaryMonitor.argtypes = []
def get_primary_monitor():
"""
Returns the primary monitor.
Wrapper for:
GLFWmonitor* glfwGetPrimaryMonitor(void);
"""
return _glfw.glfwGetPrimaryMonitor()
_glfw.glfwGetMonitorPos.restype = None
_glfw.glfwGetMonitorPos.argtypes = [ctypes.POINTER(_GLFWmonitor),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_monitor_pos(monitor):
"""
Returns the position of the monitor's viewport on the virtual screen.
Wrapper for:
void glfwGetMonitorPos(GLFWmonitor* monitor, int* xpos, int* ypos);
"""
xpos_value = ctypes.c_int(0)
xpos = ctypes.pointer(xpos_value)
ypos_value = ctypes.c_int(0)
ypos = ctypes.pointer(ypos_value)
_glfw.glfwGetMonitorPos(monitor, xpos, ypos)
return xpos_value.value, ypos_value.value
_glfw.glfwGetMonitorPhysicalSize.restype = None
_glfw.glfwGetMonitorPhysicalSize.argtypes = [ctypes.POINTER(_GLFWmonitor),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_monitor_physical_size(monitor):
"""
Returns the physical size of the monitor.
Wrapper for:
void glfwGetMonitorPhysicalSize(GLFWmonitor* monitor, int* width, int* height);
"""
width_value = ctypes.c_int(0)
width = ctypes.pointer(width_value)
height_value = ctypes.c_int(0)
height = ctypes.pointer(height_value)
_glfw.glfwGetMonitorPhysicalSize(monitor, width, height)
return width_value.value, height_value.value
_glfw.glfwGetMonitorName.restype = ctypes.c_char_p
_glfw.glfwGetMonitorName.argtypes = [ctypes.POINTER(_GLFWmonitor)]
def get_monitor_name(monitor):
"""
Returns the name of the specified monitor.
Wrapper for:
const char* glfwGetMonitorName(GLFWmonitor* monitor);
"""
return _glfw.glfwGetMonitorName(monitor)
_monitor_callback = None
_glfw.glfwSetMonitorCallback.restype = _GLFWmonitorfun
_glfw.glfwSetMonitorCallback.argtypes = [_GLFWmonitorfun]
def set_monitor_callback(cbfun):
"""
Sets the monitor configuration callback.
Wrapper for:
GLFWmonitorfun glfwSetMonitorCallback(GLFWmonitorfun cbfun);
"""
global _monitor_callback
previous_callback = _monitor_callback
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWmonitorfun(cbfun)
_monitor_callback = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetMonitorCallback(cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_glfw.glfwGetVideoModes.restype = ctypes.POINTER(_GLFWvidmode)
_glfw.glfwGetVideoModes.argtypes = [ctypes.POINTER(_GLFWmonitor),
ctypes.POINTER(ctypes.c_int)]
def get_video_modes(monitor):
"""
Returns the available video modes for the specified monitor.
Wrapper for:
const GLFWvidmode* glfwGetVideoModes(GLFWmonitor* monitor, int* count);
"""
count_value = ctypes.c_int(0)
count = ctypes.pointer(count_value)
result = _glfw.glfwGetVideoModes(monitor, count)
videomodes = [result[i].unwrap() for i in range(count_value.value)]
return videomodes
_glfw.glfwGetVideoMode.restype = ctypes.POINTER(_GLFWvidmode)
_glfw.glfwGetVideoMode.argtypes = [ctypes.POINTER(_GLFWmonitor)]
def get_video_mode(monitor):
"""
Returns the current mode of the specified monitor.
Wrapper for:
const GLFWvidmode* glfwGetVideoMode(GLFWmonitor* monitor);
"""
videomode = _glfw.glfwGetVideoMode(monitor).contents
return videomode.unwrap()
_glfw.glfwSetGamma.restype = None
_glfw.glfwSetGamma.argtypes = [ctypes.POINTER(_GLFWmonitor),
ctypes.c_float]
def set_gamma(monitor, gamma):
"""
Generates a gamma ramp and sets it for the specified monitor.
Wrapper for:
void glfwSetGamma(GLFWmonitor* monitor, float gamma);
"""
_glfw.glfwSetGamma(monitor, gamma)
_glfw.glfwGetGammaRamp.restype = ctypes.POINTER(_GLFWgammaramp)
_glfw.glfwGetGammaRamp.argtypes = [ctypes.POINTER(_GLFWmonitor)]
def get_gamma_ramp(monitor):
"""
Retrieves the current gamma ramp for the specified monitor.
Wrapper for:
const GLFWgammaramp* glfwGetGammaRamp(GLFWmonitor* monitor);
"""
gammaramp = _glfw.glfwGetGammaRamp(monitor).contents
return gammaramp.unwrap()
_glfw.glfwSetGammaRamp.restype = None
_glfw.glfwSetGammaRamp.argtypes = [ctypes.POINTER(_GLFWmonitor),
ctypes.POINTER(_GLFWgammaramp)]
def set_gamma_ramp(monitor, ramp):
"""
Sets the current gamma ramp for the specified monitor.
Wrapper for:
void glfwSetGammaRamp(GLFWmonitor* monitor, const GLFWgammaramp* ramp);
"""
gammaramp = _GLFWgammaramp()
gammaramp.wrap(ramp)
_glfw.glfwSetGammaRamp(monitor, ctypes.pointer(gammaramp))
_glfw.glfwDefaultWindowHints.restype = None
_glfw.glfwDefaultWindowHints.argtypes = []
def default_window_hints():
"""
Resets all window hints to their default values.
Wrapper for:
void glfwDefaultWindowHints(void);
"""
_glfw.glfwDefaultWindowHints()
_glfw.glfwWindowHint.restype = None
_glfw.glfwWindowHint.argtypes = [ctypes.c_int,
ctypes.c_int]
def window_hint(target, hint):
"""
Sets the specified window hint to the desired value.
Wrapper for:
void glfwWindowHint(int target, int hint);
"""
_glfw.glfwWindowHint(target, hint)
_glfw.glfwCreateWindow.restype = ctypes.POINTER(_GLFWwindow)
_glfw.glfwCreateWindow.argtypes = [ctypes.c_int,
ctypes.c_int,
ctypes.c_char_p,
ctypes.POINTER(_GLFWmonitor),
ctypes.POINTER(_GLFWwindow)]
def create_window(width, height, title, monitor, share):
"""
Creates a window and its associated context.
Wrapper for:
GLFWwindow* glfwCreateWindow(int width, int height, const char* title, GLFWmonitor* monitor, GLFWwindow* share);
"""
return _glfw.glfwCreateWindow(width, height, _to_char_p(title),
monitor, share)
_glfw.glfwDestroyWindow.restype = None
_glfw.glfwDestroyWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def destroy_window(window):
"""
Destroys the specified window and its context.
Wrapper for:
void glfwDestroyWindow(GLFWwindow* window);
"""
_glfw.glfwDestroyWindow(window)
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_ulong)).contents.value
for callback_repository in _callback_repositories:
if window_addr in callback_repository:
del callback_repository[window_addr]
if window_addr in _window_user_data_repository:
del _window_user_data_repository[window_addr]
_glfw.glfwWindowShouldClose.restype = ctypes.c_int
_glfw.glfwWindowShouldClose.argtypes = [ctypes.POINTER(_GLFWwindow)]
def window_should_close(window):
"""
Checks the close flag of the specified window.
Wrapper for:
int glfwWindowShouldClose(GLFWwindow* window);
"""
return _glfw.glfwWindowShouldClose(window)
_glfw.glfwSetWindowShouldClose.restype = None
_glfw.glfwSetWindowShouldClose.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int]
def set_window_should_close(window, value):
"""
Sets the close flag of the specified window.
Wrapper for:
void glfwSetWindowShouldClose(GLFWwindow* window, int value);
"""
_glfw.glfwSetWindowShouldClose(window, value)
_glfw.glfwSetWindowTitle.restype = None
_glfw.glfwSetWindowTitle.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_char_p]
def set_window_title(window, title):
"""
Sets the title of the specified window.
Wrapper for:
void glfwSetWindowTitle(GLFWwindow* window, const char* title);
"""
_glfw.glfwSetWindowTitle(window, _to_char_p(title))
_glfw.glfwGetWindowPos.restype = None
_glfw.glfwGetWindowPos.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_window_pos(window):
"""
Retrieves the position of the client area of the specified window.
Wrapper for:
void glfwGetWindowPos(GLFWwindow* window, int* xpos, int* ypos);
"""
xpos_value = ctypes.c_int(0)
xpos = ctypes.pointer(xpos_value)
ypos_value = ctypes.c_int(0)
ypos = ctypes.pointer(ypos_value)
_glfw.glfwGetWindowPos(window, xpos, ypos)
return xpos_value.value, ypos_value.value
_glfw.glfwSetWindowPos.restype = None
_glfw.glfwSetWindowPos.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int]
def set_window_pos(window, xpos, ypos):
"""
Sets the position of the client area of the specified window.
Wrapper for:
void glfwSetWindowPos(GLFWwindow* window, int xpos, int ypos);
"""
_glfw.glfwSetWindowPos(window, xpos, ypos)
_glfw.glfwGetWindowSize.restype = None
_glfw.glfwGetWindowSize.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_window_size(window):
"""
Retrieves the size of the client area of the specified window.
Wrapper for:
void glfwGetWindowSize(GLFWwindow* window, int* width, int* height);
"""
width_value = ctypes.c_int(0)
width = ctypes.pointer(width_value)
height_value = ctypes.c_int(0)
height = ctypes.pointer(height_value)
_glfw.glfwGetWindowSize(window, width, height)
return width_value.value, height_value.value
_glfw.glfwSetWindowSize.restype = None
_glfw.glfwSetWindowSize.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int]
def set_window_size(window, width, height):
"""
Sets the size of the client area of the specified window.
Wrapper for:
void glfwSetWindowSize(GLFWwindow* window, int width, int height);
"""
_glfw.glfwSetWindowSize(window, width, height)
_glfw.glfwGetFramebufferSize.restype = None
_glfw.glfwGetFramebufferSize.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_framebuffer_size(window):
"""
Retrieves the size of the framebuffer of the specified window.
Wrapper for:
void glfwGetFramebufferSize(GLFWwindow* window, int* width, int* height);
"""
width_value = ctypes.c_int(0)
width = ctypes.pointer(width_value)
height_value = ctypes.c_int(0)
height = ctypes.pointer(height_value)
_glfw.glfwGetFramebufferSize(window, width, height)
return width_value.value, height_value.value
_glfw.glfwIconifyWindow.restype = None
_glfw.glfwIconifyWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def iconify_window(window):
"""
Iconifies the specified window.
Wrapper for:
void glfwIconifyWindow(GLFWwindow* window);
"""
_glfw.glfwIconifyWindow(window)
_glfw.glfwRestoreWindow.restype = None
_glfw.glfwRestoreWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def restore_window(window):
"""
Restores the specified window.
Wrapper for:
void glfwRestoreWindow(GLFWwindow* window);
"""
_glfw.glfwRestoreWindow(window)
_glfw.glfwShowWindow.restype = None
_glfw.glfwShowWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def show_window(window):
"""
Makes the specified window visible.
Wrapper for:
void glfwShowWindow(GLFWwindow* window);
"""
_glfw.glfwShowWindow(window)
_glfw.glfwHideWindow.restype = None
_glfw.glfwHideWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def hide_window(window):
"""
Hides the specified window.
Wrapper for:
void glfwHideWindow(GLFWwindow* window);
"""
_glfw.glfwHideWindow(window)
_glfw.glfwGetWindowMonitor.restype = ctypes.POINTER(_GLFWmonitor)
_glfw.glfwGetWindowMonitor.argtypes = [ctypes.POINTER(_GLFWwindow)]
def get_window_monitor(window):
"""
Returns the monitor that the window uses for full screen mode.
Wrapper for:
GLFWmonitor* glfwGetWindowMonitor(GLFWwindow* window);
"""
return _glfw.glfwGetWindowMonitor(window)
_glfw.glfwGetWindowAttrib.restype = ctypes.c_int
_glfw.glfwGetWindowAttrib.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int]
def get_window_attrib(window, attrib):
"""
Returns an attribute of the specified window.
Wrapper for:
int glfwGetWindowAttrib(GLFWwindow* window, int attrib);
"""
return _glfw.glfwGetWindowAttrib(window, attrib)
_window_user_data_repository = {}
_glfw.glfwSetWindowUserPointer.restype = None
_glfw.glfwSetWindowUserPointer.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_void_p]
def set_window_user_pointer(window, pointer):
"""
Sets the user pointer of the specified window. You may pass a normal python object into this function and it will
be wrapped automatically. The object will be kept in existence until the pointer is set to something else or
until the window is destroyed.
Wrapper for:
void glfwSetWindowUserPointer(GLFWwindow* window, void* pointer);
"""
data = (False, pointer)
if not isinstance(pointer, ctypes.c_void_p):
data = (True, pointer)
# Create a void pointer for the python object
pointer = ctypes.cast(ctypes.pointer(ctypes.py_object(pointer)), ctypes.c_void_p)
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
_window_user_data_repository[window_addr] = data
_glfw.glfwSetWindowUserPointer(window, pointer)
_glfw.glfwGetWindowUserPointer.restype = ctypes.c_void_p
_glfw.glfwGetWindowUserPointer.argtypes = [ctypes.POINTER(_GLFWwindow)]
def get_window_user_pointer(window):
"""
Returns the user pointer of the specified window.
Wrapper for:
void* glfwGetWindowUserPointer(GLFWwindow* window);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_user_data_repository:
data = _window_user_data_repository[window_addr]
is_wrapped_py_object = data[0]
if is_wrapped_py_object:
return data[1]
return _glfw.glfwGetWindowUserPointer(window)
_window_pos_callback_repository = {}
_callback_repositories.append(_window_pos_callback_repository)
_glfw.glfwSetWindowPosCallback.restype = _GLFWwindowposfun
_glfw.glfwSetWindowPosCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowposfun]
def set_window_pos_callback(window, cbfun):
"""
Sets the position callback for the specified window.
Wrapper for:
GLFWwindowposfun glfwSetWindowPosCallback(GLFWwindow* window, GLFWwindowposfun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_pos_callback_repository:
previous_callback = _window_pos_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowposfun(cbfun)
_window_pos_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowPosCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_window_size_callback_repository = {}
_callback_repositories.append(_window_size_callback_repository)
_glfw.glfwSetWindowSizeCallback.restype = _GLFWwindowsizefun
_glfw.glfwSetWindowSizeCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowsizefun]
def set_window_size_callback(window, cbfun):
"""
Sets the size callback for the specified window.
Wrapper for:
GLFWwindowsizefun glfwSetWindowSizeCallback(GLFWwindow* window, GLFWwindowsizefun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_size_callback_repository:
previous_callback = _window_size_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowsizefun(cbfun)
_window_size_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowSizeCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_window_close_callback_repository = {}
_callback_repositories.append(_window_close_callback_repository)
_glfw.glfwSetWindowCloseCallback.restype = _GLFWwindowclosefun
_glfw.glfwSetWindowCloseCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowclosefun]
def set_window_close_callback(window, cbfun):
"""
Sets the close callback for the specified window.
Wrapper for:
GLFWwindowclosefun glfwSetWindowCloseCallback(GLFWwindow* window, GLFWwindowclosefun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_close_callback_repository:
previous_callback = _window_close_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowclosefun(cbfun)
_window_close_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowCloseCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_window_refresh_callback_repository = {}
_callback_repositories.append(_window_refresh_callback_repository)
_glfw.glfwSetWindowRefreshCallback.restype = _GLFWwindowrefreshfun
_glfw.glfwSetWindowRefreshCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowrefreshfun]
def set_window_refresh_callback(window, cbfun):
"""
Sets the refresh callback for the specified window.
Wrapper for:
GLFWwindowrefreshfun glfwSetWindowRefreshCallback(GLFWwindow* window, GLFWwindowrefreshfun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_refresh_callback_repository:
previous_callback = _window_refresh_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowrefreshfun(cbfun)
_window_refresh_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowRefreshCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_window_focus_callback_repository = {}
_callback_repositories.append(_window_focus_callback_repository)
_glfw.glfwSetWindowFocusCallback.restype = _GLFWwindowfocusfun
_glfw.glfwSetWindowFocusCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowfocusfun]
def set_window_focus_callback(window, cbfun):
"""
Sets the focus callback for the specified window.
Wrapper for:
GLFWwindowfocusfun glfwSetWindowFocusCallback(GLFWwindow* window, GLFWwindowfocusfun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_focus_callback_repository:
previous_callback = _window_focus_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowfocusfun(cbfun)
_window_focus_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowFocusCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_window_iconify_callback_repository = {}
_callback_repositories.append(_window_iconify_callback_repository)
_glfw.glfwSetWindowIconifyCallback.restype = _GLFWwindowiconifyfun
_glfw.glfwSetWindowIconifyCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowiconifyfun]
def set_window_iconify_callback(window, cbfun):
"""
Sets the iconify callback for the specified window.
Wrapper for:
GLFWwindowiconifyfun glfwSetWindowIconifyCallback(GLFWwindow* window, GLFWwindowiconifyfun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_iconify_callback_repository:
previous_callback = _window_iconify_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowiconifyfun(cbfun)
_window_iconify_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowIconifyCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_framebuffer_size_callback_repository = {}
_callback_repositories.append(_framebuffer_size_callback_repository)
_glfw.glfwSetFramebufferSizeCallback.restype = _GLFWframebuffersizefun
_glfw.glfwSetFramebufferSizeCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWframebuffersizefun]
def set_framebuffer_size_callback(window, cbfun):
"""
Sets the framebuffer resize callback for the specified window.
Wrapper for:
GLFWframebuffersizefun glfwSetFramebufferSizeCallback(GLFWwindow* window, GLFWframebuffersizefun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _framebuffer_size_callback_repository:
previous_callback = _framebuffer_size_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWframebuffersizefun(cbfun)
_framebuffer_size_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetFramebufferSizeCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_glfw.glfwPollEvents.restype = None
_glfw.glfwPollEvents.argtypes = []
def poll_events():
"""
Processes all pending events.
Wrapper for:
void glfwPollEvents(void);
"""
_glfw.glfwPollEvents()
_glfw.glfwWaitEvents.restype = None
_glfw.glfwWaitEvents.argtypes = []
def wait_events():
"""
Waits until events are pending and processes them.
Wrapper for:
void glfwWaitEvents(void);
"""
_glfw.glfwWaitEvents()
_glfw.glfwGetInputMode.restype = ctypes.c_int
_glfw.glfwGetInputMode.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int]
def get_input_mode(window, mode):
"""
Returns the value of an input option for the specified window.
Wrapper for:
int glfwGetInputMode(GLFWwindow* window, int mode);
"""
return _glfw.glfwGetInputMode(window, mode)
_glfw.glfwSetInputMode.restype = None
_glfw.glfwSetInputMode.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int]
def set_input_mode(window, mode, value):
"""
Sets an input option for the specified window.
@param[in] window The window whose input mode to set.
@param[in] mode One of `GLFW_CURSOR`, `GLFW_STICKY_KEYS` or
`GLFW_STICKY_MOUSE_BUTTONS`.
@param[in] value The new value of the specified input mode.
Wrapper for:
void glfwSetInputMode(GLFWwindow* window, int mode, int value);
"""
_glfw.glfwSetInputMode(window, mode, value)
_glfw.glfwGetKey.restype = ctypes.c_int
_glfw.glfwGetKey.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int]
def get_key(window, key):
"""
Returns the last reported state of a keyboard key for the specified
window.
Wrapper for:
int glfwGetKey(GLFWwindow* window, int key);
"""
return _glfw.glfwGetKey(window, key)
_glfw.glfwGetMouseButton.restype = ctypes.c_int
_glfw.glfwGetMouseButton.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int]
def get_mouse_button(window, button):
"""
Returns the last reported state of a mouse button for the specified
window.
Wrapper for:
int glfwGetMouseButton(GLFWwindow* window, int button);
"""
return _glfw.glfwGetMouseButton(window, button)
_glfw.glfwGetCursorPos.restype = None
_glfw.glfwGetCursorPos.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.POINTER(ctypes.c_double),
ctypes.POINTER(ctypes.c_double)]
def get_cursor_pos(window):
"""
Retrieves the last reported cursor position, relative to the client
area of the window.
Wrapper for:
void glfwGetCursorPos(GLFWwindow* window, double* xpos, double* ypos);
"""
xpos_value = ctypes.c_double(0.0)
xpos = ctypes.pointer(xpos_value)
ypos_value = ctypes.c_double(0.0)
ypos = ctypes.pointer(ypos_value)
_glfw.glfwGetCursorPos(window, xpos, ypos)
return xpos_value.value, ypos_value.value
_glfw.glfwSetCursorPos.restype = None
_glfw.glfwSetCursorPos.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_double,
ctypes.c_double]
def set_cursor_pos(window, xpos, ypos):
"""
Sets the position of the cursor, relative to the client area of the window.
Wrapper for:
void glfwSetCursorPos(GLFWwindow* window, double xpos, double ypos);
"""
_glfw.glfwSetCursorPos(window, xpos, ypos)
_key_callback_repository = {}
_callback_repositories.append(_key_callback_repository)
_glfw.glfwSetKeyCallback.restype = _GLFWkeyfun
_glfw.glfwSetKeyCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWkeyfun]
def set_key_callback(window, cbfun):
"""
Sets the key callback.
Wrapper for:
GLFWkeyfun glfwSetKeyCallback(GLFWwindow* window, GLFWkeyfun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _key_callback_repository:
previous_callback = _key_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWkeyfun(cbfun)
_key_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetKeyCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_char_callback_repository = {}
_callback_repositories.append(_char_callback_repository)
_glfw.glfwSetCharCallback.restype = _GLFWcharfun
_glfw.glfwSetCharCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWcharfun]
def set_char_callback(window, cbfun):
"""
Sets the Unicode character callback.
Wrapper for:
GLFWcharfun glfwSetCharCallback(GLFWwindow* window, GLFWcharfun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _char_callback_repository:
previous_callback = _char_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWcharfun(cbfun)
_char_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetCharCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_mouse_button_callback_repository = {}
_callback_repositories.append(_mouse_button_callback_repository)
_glfw.glfwSetMouseButtonCallback.restype = _GLFWmousebuttonfun
_glfw.glfwSetMouseButtonCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWmousebuttonfun]
def set_mouse_button_callback(window, cbfun):
"""
Sets the mouse button callback.
Wrapper for:
GLFWmousebuttonfun glfwSetMouseButtonCallback(GLFWwindow* window, GLFWmousebuttonfun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _mouse_button_callback_repository:
previous_callback = _mouse_button_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWmousebuttonfun(cbfun)
_mouse_button_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetMouseButtonCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_cursor_pos_callback_repository = {}
_callback_repositories.append(_cursor_pos_callback_repository)
_glfw.glfwSetCursorPosCallback.restype = _GLFWcursorposfun
_glfw.glfwSetCursorPosCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWcursorposfun]
def set_cursor_pos_callback(window, cbfun):
"""
Sets the cursor position callback.
Wrapper for:
GLFWcursorposfun glfwSetCursorPosCallback(GLFWwindow* window, GLFWcursorposfun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _cursor_pos_callback_repository:
previous_callback = _cursor_pos_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWcursorposfun(cbfun)
_cursor_pos_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetCursorPosCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_cursor_enter_callback_repository = {}
_callback_repositories.append(_cursor_enter_callback_repository)
_glfw.glfwSetCursorEnterCallback.restype = _GLFWcursorenterfun
_glfw.glfwSetCursorEnterCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWcursorenterfun]
def set_cursor_enter_callback(window, cbfun):
"""
Sets the cursor enter/exit callback.
Wrapper for:
GLFWcursorenterfun glfwSetCursorEnterCallback(GLFWwindow* window, GLFWcursorenterfun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _cursor_enter_callback_repository:
previous_callback = _cursor_enter_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWcursorenterfun(cbfun)
_cursor_enter_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetCursorEnterCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_scroll_callback_repository = {}
_callback_repositories.append(_scroll_callback_repository)
_glfw.glfwSetScrollCallback.restype = _GLFWscrollfun
_glfw.glfwSetScrollCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWscrollfun]
def set_scroll_callback(window, cbfun):
"""
Sets the scroll callback.
Wrapper for:
GLFWscrollfun glfwSetScrollCallback(GLFWwindow* window, GLFWscrollfun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _scroll_callback_repository:
previous_callback = _scroll_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWscrollfun(cbfun)
_scroll_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetScrollCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_glfw.glfwJoystickPresent.restype = ctypes.c_int
_glfw.glfwJoystickPresent.argtypes = [ctypes.c_int]
def joystick_present(joy):
"""
Returns whether the specified joystick is present.
Wrapper for:
int glfwJoystickPresent(int joy);
"""
return _glfw.glfwJoystickPresent(joy)
_glfw.glfwGetJoystickAxes.restype = ctypes.POINTER(ctypes.c_float)
_glfw.glfwGetJoystickAxes.argtypes = [ctypes.c_int,
ctypes.POINTER(ctypes.c_int)]
def get_joystick_axes(joy):
"""
Returns the values of all axes of the specified joystick.
Wrapper for:
const float* glfwGetJoystickAxes(int joy, int* count);
"""
count_value = ctypes.c_int(0)
count = ctypes.pointer(count_value)
result = _glfw.glfwGetJoystickAxes(joy, count)
return result, count_value.value
_glfw.glfwGetJoystickButtons.restype = ctypes.POINTER(ctypes.c_ubyte)
_glfw.glfwGetJoystickButtons.argtypes = [ctypes.c_int,
ctypes.POINTER(ctypes.c_int)]
def get_joystick_buttons(joy):
"""
Returns the state of all buttons of the specified joystick.
Wrapper for:
const unsigned char* glfwGetJoystickButtons(int joy, int* count);
"""
count_value = ctypes.c_int(0)
count = ctypes.pointer(count_value)
result = _glfw.glfwGetJoystickButtons(joy, count)
return result, count_value.value
_glfw.glfwGetJoystickName.restype = ctypes.c_char_p
_glfw.glfwGetJoystickName.argtypes = [ctypes.c_int]
def get_joystick_name(joy):
"""
Returns the name of the specified joystick.
Wrapper for:
const char* glfwGetJoystickName(int joy);
"""
return _glfw.glfwGetJoystickName(joy)
_glfw.glfwSetClipboardString.restype = None
_glfw.glfwSetClipboardString.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_char_p]
def set_clipboard_string(window, string):
"""
Sets the clipboard to the specified string.
Wrapper for:
void glfwSetClipboardString(GLFWwindow* window, const char* string);
"""
_glfw.glfwSetClipboardString(window, _to_char_p(string))
_glfw.glfwGetClipboardString.restype = ctypes.c_char_p
_glfw.glfwGetClipboardString.argtypes = [ctypes.POINTER(_GLFWwindow)]
def get_clipboard_string(window):
"""
Retrieves the contents of the clipboard as a string.
Wrapper for:
const char* glfwGetClipboardString(GLFWwindow* window);
"""
return _glfw.glfwGetClipboardString(window)
_glfw.glfwGetTime.restype = ctypes.c_double
_glfw.glfwGetTime.argtypes = []
def get_time():
"""
Returns the value of the GLFW timer.
Wrapper for:
double glfwGetTime(void);
"""
return _glfw.glfwGetTime()
_glfw.glfwSetTime.restype = None
_glfw.glfwSetTime.argtypes = [ctypes.c_double]
def set_time(time):
"""
Sets the GLFW timer.
Wrapper for:
void glfwSetTime(double time);
"""
_glfw.glfwSetTime(time)
_glfw.glfwMakeContextCurrent.restype = None
_glfw.glfwMakeContextCurrent.argtypes = [ctypes.POINTER(_GLFWwindow)]
def make_context_current(window):
"""
Makes the context of the specified window current for the calling
thread.
Wrapper for:
void glfwMakeContextCurrent(GLFWwindow* window);
"""
_glfw.glfwMakeContextCurrent(window)
_glfw.glfwGetCurrentContext.restype = ctypes.POINTER(_GLFWwindow)
_glfw.glfwGetCurrentContext.argtypes = []
def get_current_context():
"""
Returns the window whose context is current on the calling thread.
Wrapper for:
GLFWwindow* glfwGetCurrentContext(void);
"""
return _glfw.glfwGetCurrentContext()
_glfw.glfwSwapBuffers.restype = None
_glfw.glfwSwapBuffers.argtypes = [ctypes.POINTER(_GLFWwindow)]
def swap_buffers(window):
"""
Swaps the front and back buffers of the specified window.
Wrapper for:
void glfwSwapBuffers(GLFWwindow* window);
"""
_glfw.glfwSwapBuffers(window)
_glfw.glfwSwapInterval.restype = None
_glfw.glfwSwapInterval.argtypes = [ctypes.c_int]
def swap_interval(interval):
"""
Sets the swap interval for the current context.
Wrapper for:
void glfwSwapInterval(int interval);
"""
_glfw.glfwSwapInterval(interval)
_glfw.glfwExtensionSupported.restype = ctypes.c_int
_glfw.glfwExtensionSupported.argtypes = [ctypes.c_char_p]
def extension_supported(extension):
"""
Returns whether the specified extension is available.
Wrapper for:
int glfwExtensionSupported(const char* extension);
"""
return _glfw.glfwExtensionSupported(_to_char_p(extension))
_glfw.glfwGetProcAddress.restype = ctypes.c_void_p
_glfw.glfwGetProcAddress.argtypes = [ctypes.c_char_p]
def get_proc_address(procname):
"""
Returns the address of the specified function for the current
context.
Wrapper for:
GLFWglproc glfwGetProcAddress(const char* procname);
"""
return _glfw.glfwGetProcAddress(_to_char_p(procname))
if hasattr(_glfw, 'glfwSetDropCallback'):
_window_drop_callback_repository = {}
_callback_repositories.append(_window_drop_callback_repository)
_glfw.glfwSetDropCallback.restype = _GLFWdropfun
_glfw.glfwSetDropCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWdropfun]
def set_drop_callback(window, cbfun):
"""
Sets the file drop callback.
Wrapper for:
GLFWdropfun glfwSetDropCallback(GLFWwindow* window, GLFWdropfun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_drop_callback_repository:
previous_callback = _window_drop_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
else:
def cb_wrapper(window, count, c_paths, cbfun=cbfun):
paths = [c_paths[i].decode('utf-8') for i in range(count)]
cbfun(window, paths)
cbfun = cb_wrapper
c_cbfun = _GLFWdropfun(cbfun)
_window_drop_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetDropCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
if hasattr(_glfw, 'glfwSetCharModsCallback'):
_window_char_mods_callback_repository = {}
_callback_repositories.append(_window_char_mods_callback_repository)
_glfw.glfwSetCharModsCallback.restype = _GLFWcharmodsfun
_glfw.glfwSetCharModsCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWcharmodsfun]
def set_char_mods_callback(window, cbfun):
"""
Sets the Unicode character with modifiers callback.
Wrapper for:
GLFWcharmodsfun glfwSetCharModsCallback(GLFWwindow* window, GLFWcharmodsfun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_char_mods_callback_repository:
previous_callback = _window_char_mods_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWcharmodsfun(cbfun)
_window_char_mods_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetCharModsCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
if hasattr(_glfw, 'glfwVulkanSupported'):
_glfw.glfwVulkanSupported.restype = ctypes.c_int
_glfw.glfwVulkanSupported.argtypes = []
def vulkan_supported():
"""
Returns whether the Vulkan loader has been found.
Wrapper for:
int glfwVulkanSupported(void);
"""
return _glfw.glfwVulkanSupported() != 0
if hasattr(_glfw, 'glfwGetRequiredInstanceExtensions'):
_glfw.glfwGetRequiredInstanceExtensions.restype = ctypes.POINTER(ctypes.c_char_p)
_glfw.glfwGetRequiredInstanceExtensions.argtypes = [ctypes.POINTER(ctypes.c_uint32)]
def get_required_instance_extensions():
"""
Returns the Vulkan instance extensions required by GLFW.
Wrapper for:
const char** glfwGetRequiredInstanceExtensions(uint32_t* count);
"""
count_value = ctypes.c_uint32(0)
count = ctypes.pointer(count_value)
c_extensions = _glfw.glfwGetRequiredInstanceExtensions(count)
count = count_value.value
extensions = [c_extensions[i].decode('utf-8') for i in range(count)]
return extensions
if hasattr(_glfw, 'glfwGetTimerValue'):
_glfw.glfwGetTimerValue.restype = ctypes.c_uint64
_glfw.glfwGetTimerValue.argtypes = []
def get_timer_value():
"""
Returns the current value of the raw timer.
Wrapper for:
uint64_t glfwGetTimerValue(void);
"""
return int(_glfw.glfwGetTimerValue())
if hasattr(_glfw, 'glfwGetTimerFrequency'):
_glfw.glfwGetTimerFrequency.restype = ctypes.c_uint64
_glfw.glfwGetTimerFrequency.argtypes = []
def get_timer_frequency():
"""
Returns the frequency, in Hz, of the raw timer.
Wrapper for:
uint64_t glfwGetTimerFrequency(void);
"""
return int(_glfw.glfwGetTimerFrequency())
if hasattr(_glfw, 'glfwSetJoystickCallback'):
_joystick_callback = None
_glfw.glfwSetJoystickCallback.restype = _GLFWjoystickfun
_glfw.glfwSetJoystickCallback.argtypes = [_GLFWjoystickfun]
def set_joystick_callback(cbfun):
"""
Sets the error callback.
Wrapper for:
GLFWjoystickfun glfwSetJoystickCallback(GLFWjoystickfun cbfun);
"""
global _joystick_callback
previous_callback = _error_callback
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWjoystickfun(cbfun)
_joystick_callback = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetJoystickCallback(cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
if hasattr(_glfw, 'glfwGetKeyName'):
_glfw.glfwGetKeyName.restype = ctypes.c_char_p
_glfw.glfwGetKeyName.argtypes = [ctypes.c_int, ctypes.c_int]
def get_key_name(key, scancode):
"""
Returns the localized name of the specified printable key.
Wrapper for:
const char* glfwGetKeyName(int key, int scancode);
"""
key_name = _glfw.glfwGetKeyName(key, scancode)
if key_name:
return key_name.decode('utf-8')
return None
if hasattr(_glfw, 'glfwCreateCursor'):
_glfw.glfwCreateCursor.restype = ctypes.POINTER(_GLFWcursor)
_glfw.glfwCreateCursor.argtypes = [ctypes.POINTER(_GLFWimage),
ctypes.c_int,
ctypes.c_int]
def create_cursor(image, xhot, yhot):
"""
Creates a custom cursor.
Wrapper for:
GLFWcursor* glfwCreateCursor(const GLFWimage* image, int xhot, int yhot);
"""
c_image = _GLFWimage()
c_image.wrap(image)
return _glfw.glfwCreateCursor(ctypes.pointer(c_image), xhot, yhot)
if hasattr(_glfw, 'glfwCreateStandardCursor'):
_glfw.glfwCreateStandardCursor.restype = ctypes.POINTER(_GLFWcursor)
_glfw.glfwCreateStandardCursor.argtypes = [ctypes.c_int]
def create_standard_cursor(shape):
"""
Creates a cursor with a standard shape.
Wrapper for:
GLFWcursor* glfwCreateStandardCursor(int shape);
"""
return _glfw.glfwCreateStandardCursor(shape)
if hasattr(_glfw, 'glfwDestroyCursor'):
_glfw.glfwDestroyCursor.restype = None
_glfw.glfwDestroyCursor.argtypes = [ctypes.POINTER(_GLFWcursor)]
def destroy_cursor(cursor):
"""
Destroys a cursor.
Wrapper for:
void glfwDestroyCursor(GLFWcursor* cursor);
"""
_glfw.glfwDestroyCursor(cursor)
if hasattr(_glfw, 'glfwSetCursor'):
_glfw.glfwSetCursor.restype = None
_glfw.glfwSetCursor.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.POINTER(_GLFWcursor)]
def set_cursor(window, cursor):
"""
Sets the cursor for the window.
Wrapper for:
void glfwSetCursor(GLFWwindow* window, GLFWcursor* cursor);
"""
_glfw.glfwSetCursor(window, cursor)
if hasattr(_glfw, 'glfwCreateWindowSurface'):
_glfw.glfwCreateWindowSurface.restype = ctypes.c_int
_glfw.glfwCreateWindowSurface.argtypes = [ctypes.c_void_p,
ctypes.POINTER(_GLFWwindow),
ctypes.c_void_p,
ctypes.c_void_p]
def create_window_surface(instance, window, allocator, surface):
"""
Creates a Vulkan surface for the specified window.
Wrapper for:
VkResult glfwCreateWindowSurface(VkInstance instance, GLFWwindow* window, const VkAllocationCallbacks* allocator, VkSurfaceKHR* surface);
"""
return _glfw.glfwCreateWindowSurface(instance, window, allocator, surface)
if hasattr(_glfw, 'glfwGetPhysicalDevicePresentationSupport'):
_glfw.glfwGetPhysicalDevicePresentationSupport.restype = ctypes.c_int
_glfw.glfwGetPhysicalDevicePresentationSupport.argtypes = [ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_uint32]
def get_physical_device_presentation_support(instance, device, queuefamily):
"""
Creates a Vulkan surface for the specified window.
Wrapper for:
int glfwGetPhysicalDevicePresentationSupport(VkInstance instance, VkPhysicalDevice device, uint32_t queuefamily);
"""
return _glfw.glfwGetPhysicalDevicePresentationSupport(instance, device, queuefamily)
if hasattr(_glfw, 'glfwGetInstanceProcAddress'):
_glfw.glfwGetInstanceProcAddress.restype = ctypes.c_void_p
_glfw.glfwGetInstanceProcAddress.argtypes = [ctypes.c_void_p,
ctypes.c_char_p]
def get_instance_proc_address(instance, procname):
"""
Returns the address of the specified Vulkan instance function.
Wrapper for:
GLFWvkproc glfwGetInstanceProcAddress(VkInstance instance, const char* procname);
"""
return _glfw.glfwGetInstanceProcAddress(instance, procname)
if hasattr(_glfw, 'glfwSetWindowIcon'):
_glfw.glfwSetWindowIcon.restype = None
_glfw.glfwSetWindowIcon.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.POINTER(_GLFWimage)]
def set_window_icon(window, count, image):
"""
Sets the icon for the specified window.
Wrapper for:
void glfwSetWindowIcon(GLFWwindow* window, int count, const GLFWimage* images);
"""
_image = _GLFWimage()
_image.wrap(image)
_glfw.glfwSetWindowIcon(window, count, ctypes.pointer(_image))
if hasattr(_glfw, 'glfwSetWindowSizeLimits'):
_glfw.glfwSetWindowSizeLimits.restype = None
_glfw.glfwSetWindowSizeLimits.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int, ctypes.c_int,
ctypes.c_int, ctypes.c_int]
def set_window_size_limits(window,
minwidth, minheight,
maxwidth, maxheight):
"""
Sets the size limits of the specified window.
Wrapper for:
void glfwSetWindowSizeLimits(GLFWwindow* window, int minwidth, int minheight, int maxwidth, int maxheight);
"""
_glfw.glfwSetWindowSizeLimits(window,
minwidth, minheight,
maxwidth, maxheight)
if hasattr(_glfw, 'glfwSetWindowAspectRatio'):
_glfw.glfwSetWindowAspectRatio.restype = None
_glfw.glfwSetWindowAspectRatio.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int, ctypes.c_int]
def set_window_aspect_ratio(window, numer, denom):
"""
Sets the aspect ratio of the specified window.
Wrapper for:
void glfwSetWindowAspectRatio(GLFWwindow* window, int numer, int denom);
"""
_glfw.glfwSetWindowAspectRatio(window, numer, denom)
if hasattr(_glfw, 'glfwGetWindowFrameSize'):
_glfw.glfwGetWindowFrameSize.restype = None
_glfw.glfwGetWindowFrameSize.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def set_get_window_frame_size(window):
"""
Retrieves the size of the frame of the window.
Wrapper for:
void glfwGetWindowFrameSize(GLFWwindow* window, int* left, int* top, int* right, int* bottom);
"""
left = ctypes.c_int(0)
top = ctypes.c_int(0)
right = ctypes.c_int(0)
bottom = ctypes.c_int(0)
_glfw.glfwGetWindowFrameSize(window,
ctypes.pointer(left),
ctypes.pointer(top),
ctypes.pointer(right),
ctypes.pointer(bottom))
return left.value, top.value, right.value, bottom.value
if hasattr(_glfw, 'glfwMaximizeWindow'):
_glfw.glfwMaximizeWindow.restype = None
_glfw.glfwMaximizeWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def maximize_window(window):
"""
Maximizes the specified window.
Wrapper for:
void glfwMaximizeWindow(GLFWwindow* window);
"""
_glfw.glfwMaximizeWindow(window)
if hasattr(_glfw, 'glfwFocusWindow'):
_glfw.glfwFocusWindow.restype = None
_glfw.glfwFocusWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def focus_window(window):
"""
Brings the specified window to front and sets input focus.
Wrapper for:
void glfwFocusWindow(GLFWwindow* window);
"""
_glfw.glfwFocusWindow(window)
if hasattr(_glfw, 'glfwSetWindowMonitor'):
_glfw.glfwSetWindowMonitor.restype = None
_glfw.glfwSetWindowMonitor.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.POINTER(_GLFWmonitor),
ctypes.c_int,
ctypes.c_int,
ctypes.c_int,
ctypes.c_int,
ctypes.c_int]
def set_window_monitor(window, monitor, xpos, ypos, width, height,
refresh_rate):
"""
Sets the mode, monitor, video mode and placement of a window.
Wrapper for:
void glfwSetWindowMonitor(GLFWwindow* window, GLFWmonitor* monitor, int xpos, int ypos, int width, int height, int refreshRate);
"""
_glfw.glfwSetWindowMonitor(window, monitor,
xpos, ypos, width, height, refresh_rate)
if hasattr(_glfw, 'glfwWaitEventsTimeout'):
_glfw.glfwWaitEventsTimeout.restype = None
_glfw.glfwWaitEventsTimeout.argtypes = [ctypes.c_double]
def wait_events_timeout(timeout):
"""
Waits with timeout until events are queued and processes them.
Wrapper for:
void glfwWaitEventsTimeout(double timeout);
"""
_glfw.glfwWaitEventsTimeout(timeout)
if hasattr(_glfw, 'glfwPostEmptyEvent'):
_glfw.glfwPostEmptyEvent.restype = None
_glfw.glfwPostEmptyEvent.argtypes = []
def post_empty_event():
"""
Posts an empty event to the event queue.
Wrapper for:
void glfwPostEmptyEvent();
"""
_glfw.glfwPostEmptyEvent()
_prepare_errcheck()
|
tedder/ansible | refs/heads/devel | lib/ansible/modules/cloud/azure/azure_rm_cdnprofile_facts.py | 9 | #!/usr/bin/python
#
# Copyright (c) 2018 Hai Cao, <t-haicao@microsoft.com>, Yunge Zhu <yungez@microsoft.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_cdnprofile_facts
version_added: "2.8"
short_description: Get Azure CDN profile facts
description:
- Get facts for a specific Azure CDN profile or all CDN profiles.
options:
name:
description:
- Limit results to a specific CDN profile.
resource_group:
description:
- The resource group to search for the desired CDN profile
tags:
description:
- Limit results by providing a list of tags. Format tags as 'key' or 'key:value'.
extends_documentation_fragment:
- azure
author:
- "Hai Cao (@caohai) <t-haicao@microsoft.com>"
- "Yunge Zhu (@yungezz) <yungez@microsoft.com>"
'''
EXAMPLES = '''
- name: Get facts for one CDN profile
azure_rm_cdnprofile_facts:
name: Testing
resource_group: TestRG
- name: Get facts for all CDN profiles
azure_rm_cdnprofile_facts:
- name: Get facts by tags
azure_rm_cdnprofile_facts:
tags:
- Environment:Test
'''
RETURN = '''
cdnprofiles:
description: List of CDN profiles.
returned: always
type: complex
contains:
resource_group:
description:
- Name of a resource group where the CDN profile exists.
returned: always
type: str
sample: testGroup
name:
description:
- Name of the CDN profile.
returned: always
type: str
sample: Testing
location:
description:
- Location of the CDN profile.
type: str
sample: WestUS
id:
description:
- ID of the CDN profile.
type: str
sample: /subscriptions/XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX/resourcegroups/cdntest/providers/Microsoft.Cdn/profiles/cdntest
provisioning_state:
description:
- Provisioning status of the profile.
type: str
sample: Succeeded
resource_state:
description:
- Resource status of the profile.
type: str
sample: Active
sku:
description:
- The pricing tier, defines a CDN provider, feature list and rate of the CDN profile.
type: str
sample: standard_verizon
type:
description:
- The type of the CDN profile.
type: str
sample: Microsoft.Cdn/profiles
tags:
description:
- The tags of the CDN profile.
type: list
sample: [
{"foo": "bar"}
]
'''
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
try:
from azure.mgmt.cdn.models import ErrorResponseException
from azure.common import AzureHttpError
from azure.mgmt.cdn import CdnManagementClient
except Exception:
# handled in azure_rm_common
pass
import re
AZURE_OBJECT_CLASS = 'profiles'
class AzureRMCdnprofileFacts(AzureRMModuleBase):
"""Utility class to get Azure CDN profile facts"""
def __init__(self):
self.module_args = dict(
name=dict(type='str'),
resource_group=dict(type='str'),
tags=dict(type='list')
)
self.results = dict(
changed=False,
cdnprofiles=[]
)
self.name = None
self.resource_group = None
self.tags = None
self.cdn_client = None
super(AzureRMCdnprofileFacts, self).__init__(
derived_arg_spec=self.module_args,
supports_tags=False,
facts_module=True
)
def exec_module(self, **kwargs):
for key in self.module_args:
setattr(self, key, kwargs[key])
self.cdn_client = self.get_cdn_client()
if self.name and not self.resource_group:
self.fail("Parameter error: resource group required when filtering by name.")
if self.name:
self.results['cdnprofiles'] = self.get_item()
elif self.resource_group:
self.results['cdnprofiles'] = self.list_resource_group()
else:
self.results['cdnprofiles'] = self.list_all()
return self.results
def get_item(self):
"""Get a single Azure CDN profile"""
self.log('Get properties for {0}'.format(self.name))
item = None
result = []
try:
item = self.cdn_client.profiles.get(
self.resource_group, self.name)
except ErrorResponseException:
pass
if item and self.has_tags(item.tags, self.tags):
result = [self.serialize_cdnprofile(item)]
return result
def list_resource_group(self):
"""Get all Azure CDN profiles within a resource group"""
self.log('List all Azure CDNs within a resource group')
try:
response = self.cdn_client.profiles.list_by_resource_group(
self.resource_group)
except AzureHttpError as exc:
self.fail('Failed to list all items - {0}'.format(str(exc)))
results = []
for item in response:
if self.has_tags(item.tags, self.tags):
results.append(self.serialize_cdnprofile(item))
return results
def list_all(self):
"""Get all Azure CDN profiles within a subscription"""
self.log('List all CDN profiles within a subscription')
try:
response = self.cdn_client.profiles.list()
except Exception as exc:
self.fail("Error listing all items - {0}".format(str(exc)))
results = []
for item in response:
if self.has_tags(item.tags, self.tags):
results.append(self.serialize_cdnprofile(item))
return results
def serialize_cdnprofile(self, cdnprofile):
'''
Convert a CDN profile object to dict.
:param cdn: CDN profile object
:return: dict
'''
result = self.serialize_obj(cdnprofile, AZURE_OBJECT_CLASS)
new_result = {}
new_result['id'] = cdnprofile.id
new_result['resource_group'] = re.sub('\\/.*', '', re.sub('.*resourcegroups\\/', '', result['id']))
new_result['name'] = cdnprofile.name
new_result['type'] = cdnprofile.type
new_result['location'] = cdnprofile.location
new_result['resource_state'] = cdnprofile.resource_state
new_result['sku'] = cdnprofile.sku.name
new_result['provisioning_state'] = cdnprofile.provisioning_state
new_result['tags'] = cdnprofile.tags
return new_result
def get_cdn_client(self):
if not self.cdn_client:
self.cdn_client = self.get_mgmt_svc_client(CdnManagementClient,
base_url=self._cloud_environment.endpoints.resource_manager,
api_version='2017-04-02')
return self.cdn_client
def main():
"""Main module execution code path"""
AzureRMCdnprofileFacts()
if __name__ == '__main__':
main()
|
ther12k/android-client | refs/heads/master | phone/jni/webrtc/sources/build/merge_libs.py | 22 | #!/usr/bin/env python
# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
# Searches for libraries and/or object files on the specified path and
# merges them into a single library.
import subprocess
import sys
if __name__ == '__main__':
if len(sys.argv) != 3:
sys.stderr.write('Usage: ' + sys.argv[0] + ' <search_path> <output_lib>\n')
sys.exit(2)
search_path = sys.argv[1]
output_lib = sys.argv[2]
from subprocess import call, PIPE
if sys.platform.startswith('linux'):
call(["rm -f " + output_lib], shell=True)
call(["rm -rf " + search_path + "/obj.target/*do_not_use*"], shell=True)
call(["ar crs " + output_lib + " $(find " + search_path +
"/obj.target -name *\.o)"], shell=True)
call(["ar crs " + output_lib + " $(find " + search_path +
"/obj/gen -name *\.o)"], shell=True)
elif sys.platform == 'darwin':
call(["rm -f " + output_lib], shell=True)
call(["rm -f " + search_path + "/*do_not_use*"], shell=True)
call(["libtool -static -v -o " + output_lib + " " + search_path + "/*.a"],
shell=True)
elif sys.platform == 'win32':
# We need to execute a batch file to set some environment variables for the
# lib command. VS 8 uses vsvars.bat and VS 9 uses vsvars32.bat. It's
# required that at least one of them is in the system PATH. We try both and
# suppress stderr and stdout to fail silently.
call(["vsvars.bat"], stderr=PIPE, stdout=PIPE, shell=True)
call(["vsvars32.bat"], stderr=PIPE, stdout=PIPE, shell=True)
call(["del " + output_lib], shell=True)
call(["del /F /S /Q " + search_path + "/lib/*do_not_use*"],
shell=True)
call(["lib /OUT:" + output_lib + " " + search_path + "/lib/*.lib"],
shell=True)
else:
sys.stderr.write('Platform not supported: %r\n\n' % sys.platform)
sys.exit(1)
sys.exit(0)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.