repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
---|---|---|---|---|
rwl/openpowersystem | refs/heads/master | cpsm/load_model/non_conform_load_schedule.py | 1 | #------------------------------------------------------------------------------
# Copyright (C) 2009 Richard Lincoln
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation; version 2 dated June, 1991.
#
# This software is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANDABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#------------------------------------------------------------------------------
""" An active power (Y1-axis) and reactive power (Y2-axis) schedule (curves) versus time (X-axis) for non-conforming loads, e.g., large industrial load or power station service (where modeled)
"""
# <<< imports
# @generated
from cpsm.load_model.season_day_type_schedule import SeasonDayTypeSchedule
from cpsm.load_model.non_conform_load_group import NonConformLoadGroup
from google.appengine.ext import db
# >>> imports
class NonConformLoadSchedule(SeasonDayTypeSchedule):
""" An active power (Y1-axis) and reactive power (Y2-axis) schedule (curves) versus time (X-axis) for non-conforming loads, e.g., large industrial load or power station service (where modeled)
"""
# <<< non_conform_load_schedule.attributes
# @generated
# >>> non_conform_load_schedule.attributes
# <<< non_conform_load_schedule.references
# @generated
# The NonConformLoadGroup where the NonConformLoadSchedule belongs.
non_conform_load_group = db.ReferenceProperty(NonConformLoadGroup,
collection_name="non_conform_load_schedules")
# >>> non_conform_load_schedule.references
# <<< non_conform_load_schedule.operations
# @generated
# >>> non_conform_load_schedule.operations
# EOF -------------------------------------------------------------------------
|
degs098/python-social-auth | refs/heads/master | examples/django_me_example/manage.py | 126 | #!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'example.settings')
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
roopali8/tempest | refs/heads/master | tempest/api/messaging/test_claims.py | 11 | # Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from six.moves.urllib import parse as urlparse
from tempest_lib.common.utils import data_utils
from tempest_lib import decorators
from tempest.api.messaging import base
from tempest import config
from tempest import test
LOG = logging.getLogger(__name__)
CONF = config.CONF
class TestClaims(base.BaseMessagingTest):
@classmethod
def resource_setup(cls):
super(TestClaims, cls).resource_setup()
cls.queue_name = data_utils.rand_name('Queues-Test')
# Create Queue
cls.create_queue(cls.queue_name)
def _post_and_claim_messages(self, queue_name, repeat=1):
# Post Messages
message_body = self.generate_message_body(repeat=repeat)
self.client.post_messages(queue_name=self.queue_name,
rbody=message_body)
# Post Claim
claim_ttl = data_utils.rand_int_id(start=60,
end=CONF.messaging.max_claim_ttl)
claim_grace = data_utils.\
rand_int_id(start=60, end=CONF.messaging.max_claim_grace)
claim_body = {"ttl": claim_ttl, "grace": claim_grace}
resp, body = self.client.post_claims(queue_name=self.queue_name,
rbody=claim_body)
return resp, body
@test.attr(type='smoke')
@test.idempotent_id('936cb1ca-b7af-44dd-a752-805e8c98156f')
def test_post_claim(self):
_, body = self._post_and_claim_messages(queue_name=self.queue_name)
claimed_message_uri = body[0]['href']
# Skipping this step till bug-1331517 is fixed
# Get posted claim
# self.client.query_claim(claimed_message_uri)
# Delete Claimed message
self.client.delete_messages(claimed_message_uri)
@decorators.skip_because(bug="1331517")
@test.attr(type='smoke')
@test.idempotent_id('84e491f4-68c6-451f-9846-b8f868eb27c5')
def test_query_claim(self):
# Post a Claim
resp, body = self._post_and_claim_messages(queue_name=self.queue_name)
# Query Claim
claim_uri = resp['location']
self.client.query_claim(claim_uri)
# Delete Claimed message
claimed_message_uri = body[0]['href']
self.delete_messages(claimed_message_uri)
@decorators.skip_because(bug="1328111")
@test.attr(type='smoke')
@test.idempotent_id('420ef0c5-9bd6-4b82-b06d-d9da330fefd3')
def test_update_claim(self):
# Post a Claim
resp, body = self._post_and_claim_messages(queue_name=self.queue_name)
claim_uri = resp['location']
claimed_message_uri = body[0]['href']
# Update Claim
claim_ttl = data_utils.rand_int_id(start=60,
end=CONF.messaging.max_claim_ttl)
update_rbody = {"ttl": claim_ttl}
self.client.update_claim(claim_uri, rbody=update_rbody)
# Verify claim ttl >= updated ttl value
_, body = self.client.query_claim(claim_uri)
updated_claim_ttl = body["ttl"]
self.assertTrue(updated_claim_ttl >= claim_ttl)
# Delete Claimed message
self.client.delete_messages(claimed_message_uri)
@test.attr(type='smoke')
@test.idempotent_id('fd4c7921-cb3f-4ed8-9ac8-e8f1e74c44aa')
def test_release_claim(self):
# Post a Claim
resp, body = self._post_and_claim_messages(queue_name=self.queue_name)
claim_uri = resp['location']
# Release Claim
self.client.release_claim(claim_uri)
# Delete Claimed message
# This will implicitly verify that the claim is deleted.
message_uri = urlparse.urlparse(claim_uri).path
self.client.delete_messages(message_uri)
@classmethod
def resource_cleanup(cls):
cls.delete_queue(cls.queue_name)
super(TestClaims, cls).resource_cleanup()
|
amith01994/intellij-community | refs/heads/master | python/lib/Lib/site-packages/django/contrib/gis/tests/utils.py | 397 | from django.conf import settings
from django.db import DEFAULT_DB_ALIAS
# function that will pass a test.
def pass_test(*args): return
def no_backend(test_func, backend):
"Use this decorator to disable test on specified backend."
if settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE'].rsplit('.')[-1] == backend:
return pass_test
else:
return test_func
# Decorators to disable entire test functions for specific
# spatial backends.
def no_oracle(func): return no_backend(func, 'oracle')
def no_postgis(func): return no_backend(func, 'postgis')
def no_mysql(func): return no_backend(func, 'mysql')
def no_spatialite(func): return no_backend(func, 'spatialite')
# Shortcut booleans to omit only portions of tests.
_default_db = settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE'].rsplit('.')[-1]
oracle = _default_db == 'oracle'
postgis = _default_db == 'postgis'
mysql = _default_db == 'mysql'
spatialite = _default_db == 'spatialite'
|
MartinEnder/erpnext-de | refs/heads/develop | erpnext/controllers/item_variant.py | 10 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import cstr, flt
import json
class ItemVariantExistsError(frappe.ValidationError): pass
class InvalidItemAttributeValueError(frappe.ValidationError): pass
class ItemTemplateCannotHaveStock(frappe.ValidationError): pass
@frappe.whitelist()
def get_variant(item, args):
"""Validates Attributes and their Values, then looks for an exactly matching Item Variant
:param item: Template Item
:param args: A dictionary with "Attribute" as key and "Attribute Value" as value
"""
if isinstance(args, basestring):
args = json.loads(args)
if not args:
frappe.throw(_("Please specify at least one attribute in the Attributes table"))
validate_item_variant_attributes(item, args)
return find_variant(item, args)
def validate_item_variant_attributes(item, args):
attribute_values = {}
for t in frappe.get_all("Item Attribute Value", fields=["parent", "attribute_value"],
filters={"parent": ["in", args.keys()]}):
(attribute_values.setdefault(t.parent, [])).append(t.attribute_value)
numeric_attributes = frappe._dict((t.attribute, t) for t in \
frappe.db.sql("""select attribute, from_range, to_range, increment from `tabItem Variant Attribute`
where parent = %s and numeric_values=1""", (item), as_dict=1))
for attribute, value in args.items():
if attribute in numeric_attributes:
numeric_attribute = numeric_attributes[attribute]
from_range = numeric_attribute.from_range
to_range = numeric_attribute.to_range
increment = numeric_attribute.increment
if increment == 0:
# defensive validation to prevent ZeroDivisionError
frappe.throw(_("Increment for Attribute {0} cannot be 0").format(attribute))
is_in_range = from_range <= flt(value) <= to_range
precision = max(len(cstr(v).split(".")[-1].rstrip("0")) for v in (value, increment))
#avoid precision error by rounding the remainder
remainder = flt((flt(value) - from_range) % increment, precision)
is_incremental = remainder==0 or remainder==0 or remainder==increment
if not (is_in_range and is_incremental):
frappe.throw(_("Value for Attribute {0} must be within the range of {1} to {2} in the increments of {3}")\
.format(attribute, from_range, to_range, increment), InvalidItemAttributeValueError)
elif value not in attribute_values.get(attribute, []):
frappe.throw(_("Value {0} for Attribute {1} does not exist in the list of valid Item Attribute Values").format(
value, attribute))
def find_variant(item, args):
conditions = ["""(iv_attribute.attribute="{0}" and iv_attribute.attribute_value="{1}")"""\
.format(frappe.db.escape(key), frappe.db.escape(cstr(value))) for key, value in args.items()]
conditions = " or ".join(conditions)
# use approximate match and shortlist possible variant matches
# it is approximate because we are matching using OR condition
# and it need not be exact match at this stage
# this uses a simpler query instead of using multiple exists conditions
possible_variants = frappe.db.sql_list("""select name from `tabItem` item
where variant_of=%s and exists (
select name from `tabItem Variant Attribute` iv_attribute
where iv_attribute.parent=item.name
and ({conditions})
)""".format(conditions=conditions), item)
for variant in possible_variants:
variant = frappe.get_doc("Item", variant)
if len(args.keys()) == len(variant.get("attributes")):
# has the same number of attributes and values
# assuming no duplication as per the validation in Item
match_count = 0
for attribute, value in args.items():
for row in variant.attributes:
if row.attribute==attribute and row.attribute_value== cstr(value):
# this row matches
match_count += 1
break
if match_count == len(args.keys()):
return variant.name
@frappe.whitelist()
def create_variant(item, args):
if isinstance(args, basestring):
args = json.loads(args)
template = frappe.get_doc("Item", item)
variant = frappe.new_doc("Item")
variant_attributes = []
for d in template.attributes:
variant_attributes.append({
"attribute": d.attribute,
"attribute_value": args.get(d.attribute)
})
variant.set("attributes", variant_attributes)
copy_attributes_to_variant(template, variant)
make_variant_item_code(template, variant)
return variant
def copy_attributes_to_variant(item, variant):
from frappe.model import no_value_fields
for field in item.meta.fields:
if field.fieldtype not in no_value_fields and (not field.no_copy)\
and field.fieldname not in ("item_code", "item_name", "show_in_website"):
if variant.get(field.fieldname) != item.get(field.fieldname):
variant.set(field.fieldname, item.get(field.fieldname))
variant.variant_of = item.name
variant.has_variants = 0
if variant.attributes:
variant.description += "\n"
for d in variant.attributes:
variant.description += "<p>" + d.attribute + ": " + cstr(d.attribute_value) + "</p>"
def make_variant_item_code(template, variant):
"""Uses template's item code and abbreviations to make variant's item code"""
if variant.item_code:
return
abbreviations = []
for attr in variant.attributes:
item_attribute = frappe.db.sql("""select i.numeric_values, v.abbr
from `tabItem Attribute` i left join `tabItem Attribute Value` v
on (i.name=v.parent)
where i.name=%(attribute)s and v.attribute_value=%(attribute_value)s""", {
"attribute": attr.attribute,
"attribute_value": attr.attribute_value
}, as_dict=True)
if not item_attribute:
# somehow an invalid item attribute got used
return
if item_attribute[0].numeric_values:
# don't generate item code if one of the attributes is numeric
return
abbreviations.append(item_attribute[0].abbr)
if abbreviations:
variant.item_code = "{0}-{1}".format(template.item_code, "-".join(abbreviations))
if variant.item_code:
variant.item_name = variant.item_code
|
AndyKrivovjas/notes | refs/heads/master | app/category/serializers.py | 1 | from datetime import datetime
from app.users.models import User
from .models import Category
from rest_framework import serializers
class CategorySerializer(serializers.ModelSerializer):
def __init__(self, *args, **kwargs):
super(serializers.ModelSerializer, self).__init__(*args, **kwargs)
@staticmethod
def add(owner, validated_data):
category = Category(name=validated_data['name'], owner=owner, date_added=datetime.now(), date_modified=datetime.now())
category.parent_id = validated_data.get('parent_id') or 0
category.save()
return category
@staticmethod
def update(category, validated_data):
if validated_data.get('name'):
category.name = validated_data.get('name')
if validated_data.get('parent_id'):
category.parent_id = validated_data.get('parent_id')
category.date_modified = datetime.now()
category.save()
return category
class Meta:
model = Category
fields = ('id', 'name', 'parent_id', 'date_added', 'date_modified', )
|
SnappleCap/oh-mainline | refs/heads/master | vendor/packages/south/south/tests/otherfakeapp/migrations/0002_second.py | 176 | from south.db import db
from django.db import models
class Migration:
def forwards(self):
pass
def backwards(self):
pass
|
Jgarcia-IAS/localizacion | refs/heads/master | openerp/addons-extra/odoo-pruebas/odoo-server/addons/hr/res_users.py | 303 | from openerp import api
from openerp.osv import fields, osv
class res_users(osv.Model):
""" Update of res.users class
- if adding groups to an user, check if base.group_user is in it
(member of 'Employee'), create an employee form linked to it.
"""
_name = 'res.users'
_inherit = ['res.users']
_columns = {
'display_employees_suggestions': fields.boolean("Display Employees Suggestions"),
}
_defaults = {
'display_employees_suggestions': True,
}
def __init__(self, pool, cr):
""" Override of __init__ to add access rights on
display_employees_suggestions fields. Access rights are disabled by
default, but allowed on some specific fields defined in
self.SELF_{READ/WRITE}ABLE_FIELDS.
"""
init_res = super(res_users, self).__init__(pool, cr)
# duplicate list to avoid modifying the original reference
self.SELF_WRITEABLE_FIELDS = list(self.SELF_WRITEABLE_FIELDS)
self.SELF_WRITEABLE_FIELDS.append('display_employees_suggestions')
# duplicate list to avoid modifying the original reference
self.SELF_READABLE_FIELDS = list(self.SELF_READABLE_FIELDS)
self.SELF_READABLE_FIELDS.append('display_employees_suggestions')
return init_res
def stop_showing_employees_suggestions(self, cr, uid, user_id, context=None):
"""Update display_employees_suggestions value to False"""
if context is None:
context = {}
self.write(cr, uid, user_id, {"display_employees_suggestions": False}, context)
def _create_welcome_message(self, cr, uid, user, context=None):
"""Do not welcome new users anymore, welcome new employees instead"""
return True
def _message_post_get_eid(self, cr, uid, thread_id, context=None):
assert thread_id, "res.users does not support posting global messages"
if context and 'thread_model' in context:
context = dict(context or {})
context['thread_model'] = 'hr.employee'
if isinstance(thread_id, (list, tuple)):
thread_id = thread_id[0]
return self.pool.get('hr.employee').search(cr, uid, [('user_id', '=', thread_id)], context=context)
@api.cr_uid_ids_context
def message_post(self, cr, uid, thread_id, context=None, **kwargs):
""" Redirect the posting of message on res.users to the related employee.
This is done because when giving the context of Chatter on the
various mailboxes, we do not have access to the current partner_id. """
if kwargs.get('type') == 'email':
return super(res_users, self).message_post(cr, uid, thread_id, context=context, **kwargs)
res = None
employee_ids = self._message_post_get_eid(cr, uid, thread_id, context=context)
if not employee_ids: # no employee: fall back on previous behavior
return super(res_users, self).message_post(cr, uid, thread_id, context=context, **kwargs)
for employee_id in employee_ids:
res = self.pool.get('hr.employee').message_post(cr, uid, employee_id, context=context, **kwargs)
return res
|
mlsecproject/gglsbl-rest | refs/heads/master | config.py | 1 | from os import environ
import logging.config
from apscheduler.schedulers.background import BackgroundScheduler
from multiprocessing import cpu_count
from subprocess import Popen
logging.config.fileConfig('logging.conf')
bind = "0.0.0.0:5000"
workers = int(environ.get('WORKERS', cpu_count() * 8 + 1))
timeout = int(environ.get('TIMEOUT', 120))
access_log_format = '%(h)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s" "%({X-Forwarded-For}i)s" "%({X-Forwarded-Port}i)s" "%({X-Forwarded-Proto}i)s" "%({X-Amzn-Trace-Id}i)s"'
max_requests = int(environ.get('MAX_REQUESTS', 16384))
limit_request_line = int(environ.get('LIMIT_REQUEST_LINE', 8190))
keepalive = int(environ.get('KEEPALIVE', 60))
log = logging.getLogger(__name__)
def update():
log.info("Starting update process...")
po = Popen("python3 update.py", shell=True)
log.info("Update started as PID %d", po.pid)
rc = po.wait()
log.info("Update process finished with status code %d", rc)
sched = None
def on_starting(server):
log.info("Initial database load...")
po = Popen("python3 update.py", shell=True)
log.info("Update started as PID %d", po.pid)
rc = po.wait()
log.info("Update process finished with status code %d", rc)
log.info("Starting scheduler...")
global sched
sched = BackgroundScheduler(timezone="UTC")
sched.start()
sched.add_job(update, id="update", coalesce=True, max_instances=1, trigger='interval', minutes=30)
|
ms-iot/python | refs/heads/develop | cpython/Doc/includes/sqlite3/executescript.py | 140 | import sqlite3
con = sqlite3.connect(":memory:")
cur = con.cursor()
cur.executescript("""
create table person(
firstname,
lastname,
age
);
create table book(
title,
author,
published
);
insert into book(title, author, published)
values (
'Dirk Gently''s Holistic Detective Agency',
'Douglas Adams',
1987
);
""")
|
Farthen/OTFBot | refs/heads/v1.0 | otfbot/plugins/ircClient/url.py | 1 | # This file is part of OtfBot.
# -*- coding: utf-8 -*-
#
# OtfBot is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# OtfBot is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with OtfBot; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
# (c) 2008 by Alexander Schier
# (c) 2008 by Robert Weidlich
#
import urllib2, re, string
from HTMLParser import HTMLParser, HTMLParseError
from otfbot.lib import chatMod, urlutils
class Plugin(chatMod.chatMod):
def __init__(self, bot):
self.bot = bot
self.parser = titleExtractor()
self.autoTiny=self.bot.config.get("autotiny", False, "url", self.bot.network)
self.autoTinyLength=int(self.bot.config.get("autoLength", "50", "url", self.bot.network))
self.autoPreview=self.bot.config.get("autopreview", False, "url", self.bot.network)
self.autoServerinfo=self.bot.config.get("autoserverinfo", False, "url", self.bot.network)
self.lasturl=""
def command(self, user, channel, command, options):
response = ""
self.parser= titleExtractor()
headers=None
if "preview" in command:
d=urlutils.download(options, headers={'Accept':'text/html'})
d.addCallback(self.processPreview, channel)
d.addErrback(self.error, channel)
if "tinyurl" in command:
if options!="":
d=urlutils.download("http://tinyurl.com/api-create.php?url="+options)
else:
d=urlutils.download("http://tinyurl.com/api-create.php?url="+self.lasturl)
d.addCallback(self.processTiny, channel)
d.addErrback(self.error, channel)
def error(self, failure, channel):
self.bot.sendmsg(channel, "Error while retrieving informations: "+failure.getErrorMessage())
def processTiny(self, data, channel):
self.bot.sendmsg(channel, "[Link Info] "+data )
def processPreview(self, data, channel):
try:
self.parser.feed(data)
if self.parser.get_result() != '':
self.bot.sendmsg(channel, "[Link Info] " + self.parser.get_result())
except HTMLParseError, e:
self.logger.debug(e)
del self.parser
self.parser=titleExtractor()
self.parser.reset()
def msg(self, user, channel, msg):
mask=0
# http://www.truerwords.net/2539
regex=re.match(".*((ftp|http|https):(([A-Za-z0-9$_.+!*(),;/?:@&~=-])|%[A-Fa-f0-9]{2}){2,}(#([a-zA-Z0-9][a-zA-Z0-9$_.+!*(),;/?:@&~=%-]*))?([A-Za-z0-9$_+!*();/?:~-])).*", msg)
if regex:
url=regex.group(1)
if string.lower(user.split("!")[0]) != string.lower(self.bot.nickname):
cmd=""
if not "tinyurl.com" in url:
if len(url) > self.autoTinyLength and self.autoTiny:
cmd+="+tinyurl"
else:
self.lasturl=url
if self.autoPreview:
cmd+="+preview"
if self.autoServerinfo:
cmd+="+serverinfo"
self.command(user, channel, cmd, url)
class titleExtractor(HTMLParser):
intitle=False
title=""
def handle_starttag(self, tag, attrs):
if tag == "title":
self.intitle=True
else:
self.intitle=False
def handle_endtag(self, tag):
if tag == "title":
self.intitle=False
def handle_data(self, data):
if self.intitle:
self.title = data
def get_result(self):
return self.title
|
IsaacHaze/tweepy | refs/heads/master | tests/test_streaming.py | 37 | from __future__ import absolute_import, print_function
from .config import tape
import six
if six.PY3:
import unittest
from unittest.case import skip
else:
import unittest2 as unittest
from unittest2.case import skip
from tweepy.api import API
from tweepy.auth import OAuthHandler
from tweepy.models import Status
from tweepy.streaming import Stream, StreamListener, ReadBuffer
from .config import create_auth
from .test_utils import mock_tweet
from mock import MagicMock, patch
if six.PY3:
getresponse_location = 'http.client.HTTPConnection.getresponse'
else:
getresponse_location = 'httplib.HTTPConnection.getresponse'
class MockStreamListener(StreamListener):
def __init__(self, test_case):
super(MockStreamListener, self).__init__()
self.test_case = test_case
self.status_count = 0
self.status_stop_count = 0
self.connect_cb = None
def on_connect(self):
if self.connect_cb:
self.connect_cb()
def on_timeout(self):
self.test_case.fail('timeout')
return False
def on_error(self, code):
print("response: %s" % code)
return True
def on_status(self, status):
self.status_count += 1
self.test_case.assertIsInstance(status, Status)
if self.status_stop_count == self.status_count:
return False
class TweepyStreamTests(unittest.TestCase):
def setUp(self):
self.auth = create_auth()
self.listener = MockStreamListener(self)
self.stream = Stream(self.auth, self.listener, timeout=3.0)
def tearDown(self):
self.stream.disconnect()
def on_connect(self):
API(self.auth).update_status(mock_tweet())
def test_userstream(self):
# Generate random tweet which should show up in the stream.
self.listener.connect_cb = self.on_connect
self.listener.status_stop_count = 1
self.stream.userstream()
self.assertEqual(self.listener.status_count, 1)
@skip("Sitestream only available to whitelisted accounts.")
def test_sitestream(self):
self.listener.connect_cb = self.on_connect
self.listener.status_stop_count = 1
self.stream.sitestream(follow=[self.auth.get_username()])
self.assertEqual(self.listener.status_count, 1)
def test_userstream_with_params(self):
# Generate random tweet which should show up in the stream.
def on_connect():
API(self.auth).update_status(mock_tweet())
self.listener.connect_cb = on_connect
self.listener.status_stop_count = 1
self.stream.userstream(_with='user', replies='all', stall_warnings=True)
self.assertEqual(self.listener.status_count, 1)
def test_sample(self):
self.listener.status_stop_count = 10
self.stream.sample()
self.assertEquals(self.listener.status_count,
self.listener.status_stop_count)
def test_filter_track(self):
self.listener.status_stop_count = 5
phrases = ['twitter']
self.stream.filter(track=phrases)
self.assertEquals(self.listener.status_count,
self.listener.status_stop_count)
def test_track_encoding(self):
s = Stream(None, None)
s._start = lambda async: None
s.filter(track=[u'Caf\xe9'])
# Should be UTF-8 encoded
self.assertEqual(u'Caf\xe9'.encode('utf8'), s.session.params['track'])
def test_follow_encoding(self):
s = Stream(None, None)
s._start = lambda async: None
s.filter(follow=[u'Caf\xe9'])
# Should be UTF-8 encoded
self.assertEqual(u'Caf\xe9'.encode('utf8'), s.session.params['follow'])
class TweepyStreamReadBuffer(unittest.TestCase):
stream = """11\n{id:12345}\n\n24\n{id:23456, test:"blah"}\n"""
def test_read_tweet(self):
for length in [1, 2, 5, 10, 20, 50]:
buf = ReadBuffer(six.StringIO(self.stream), length)
self.assertEqual('11\n', buf.read_line())
self.assertEqual('{id:12345}\n', buf.read_len(11))
self.assertEqual('\n', buf.read_line())
self.assertEqual('24\n', buf.read_line())
self.assertEqual('{id:23456, test:"blah"}\n', buf.read_len(24))
def test_read_empty_buffer(self):
"""
Requests can be closed by twitter.
The ReadBuffer should not loop infinitely when this happens.
Instead it should return and let the outer _read_loop handle it.
"""
# If the test fails, we are in danger of an infinite loop
# so we need to do some work to block that from happening
class InfiniteLoopException(Exception):
pass
self.called_count = 0
call_limit = 5
def on_read(chunk_size):
self.called_count += 1
if self.called_count > call_limit:
# we have failed
raise InfiniteLoopException("Oops, read() was called a bunch of times")
return ""
# Create a fake stream
stream = six.StringIO('')
# Mock it's read function so it can't be called too many times
mock_read = MagicMock(side_effect=on_read)
try:
with patch.multiple(stream, create=True, read=mock_read, closed=True):
# Now the stream can't call 'read' more than call_limit times
# and it looks like a requests stream that is closed
buf = ReadBuffer(stream, 50)
buf.read_line("\n")
except InfiniteLoopException:
self.fail("ReadBuffer.read_line tried to loop infinitely.")
# The mocked function not have been called at all since the stream looks closed
self.assertEqual(mock_read.call_count, 0)
def test_read_unicode_tweet(self):
stream = '11\n{id:12345}\n\n23\n{id:23456, test:"\xe3\x81\x93"}\n\n'
for length in [1, 2, 5, 10, 20, 50]:
buf = ReadBuffer(six.StringIO(stream), length)
self.assertEqual('11\n', buf.read_line())
self.assertEqual('{id:12345}\n', buf.read_len(11))
self.assertEqual('\n', buf.read_line())
self.assertEqual('23\n', buf.read_line())
self.assertEqual('{id:23456, test:"\xe3\x81\x93"}\n', buf.read_len(23))
class TweepyStreamBackoffTests(unittest.TestCase):
def setUp(self):
#bad auth causes twitter to return 401 errors
self.auth = OAuthHandler("bad-key", "bad-secret")
self.auth.set_access_token("bad-token", "bad-token-secret")
self.listener = MockStreamListener(self)
self.stream = Stream(self.auth, self.listener)
def tearDown(self):
self.stream.disconnect()
def test_exp_backoff(self):
self.stream = Stream(self.auth, self.listener, timeout=3.0,
retry_count=1, retry_time=1.0, retry_time_cap=100.0)
self.stream.sample()
# 1 retry, should be 4x the retry_time
self.assertEqual(self.stream.retry_time, 4.0)
def test_exp_backoff_cap(self):
self.stream = Stream(self.auth, self.listener, timeout=3.0,
retry_count=1, retry_time=1.0, retry_time_cap=3.0)
self.stream.sample()
# 1 retry, but 4x the retry_time exceeds the cap, so should be capped
self.assertEqual(self.stream.retry_time, 3.0)
mock_resp = MagicMock()
mock_resp.return_value.status = 420
@patch(getresponse_location, mock_resp)
def test_420(self):
self.stream = Stream(self.auth, self.listener, timeout=3.0, retry_count=0,
retry_time=1.0, retry_420=1.5, retry_time_cap=20.0)
self.stream.sample()
# no retries, but error 420, should be double the retry_420, not double the retry_time
self.assertEqual(self.stream.retry_time, 3.0)
|
randynobx/ansible | refs/heads/devel | lib/ansible/module_utils/facts/hardware/netbsd.py | 223 | # This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import re
from ansible.module_utils.six.moves import reduce
from ansible.module_utils.facts.hardware.base import Hardware, HardwareCollector
from ansible.module_utils.facts.timeout import TimeoutError, timeout
from ansible.module_utils.facts.utils import get_file_content, get_file_lines, get_mount_size
from ansible.module_utils.facts.sysctl import get_sysctl
class NetBSDHardware(Hardware):
"""
NetBSD-specific subclass of Hardware. Defines memory and CPU facts:
- memfree_mb
- memtotal_mb
- swapfree_mb
- swaptotal_mb
- processor (a list)
- processor_cores
- processor_count
- devices
"""
platform = 'NetBSD'
MEMORY_FACTS = ['MemTotal', 'SwapTotal', 'MemFree', 'SwapFree']
def populate(self, collected_facts=None):
hardware_facts = {}
self.sysctl = get_sysctl(self.module, ['machdep'])
cpu_facts = self.get_cpu_facts()
memory_facts = self.get_memory_facts()
mount_facts = {}
try:
mount_facts = self.get_mount_facts()
except TimeoutError:
pass
dmi_facts = self.get_dmi_facts()
hardware_facts.update(cpu_facts)
hardware_facts.update(memory_facts)
hardware_facts.update(mount_facts)
hardware_facts.update(dmi_facts)
return hardware_facts
def get_cpu_facts(self):
cpu_facts = {}
i = 0
physid = 0
sockets = {}
if not os.access("/proc/cpuinfo", os.R_OK):
return cpu_facts
cpu_facts['processor'] = []
for line in get_file_lines("/proc/cpuinfo"):
data = line.split(":", 1)
key = data[0].strip()
# model name is for Intel arch, Processor (mind the uppercase P)
# works for some ARM devices, like the Sheevaplug.
if key == 'model name' or key == 'Processor':
if 'processor' not in cpu_facts:
cpu_facts['processor'] = []
cpu_facts['processor'].append(data[1].strip())
i += 1
elif key == 'physical id':
physid = data[1].strip()
if physid not in sockets:
sockets[physid] = 1
elif key == 'cpu cores':
sockets[physid] = int(data[1].strip())
if len(sockets) > 0:
cpu_facts['processor_count'] = len(sockets)
cpu_facts['processor_cores'] = reduce(lambda x, y: x + y, sockets.values())
else:
cpu_facts['processor_count'] = i
cpu_facts['processor_cores'] = 'NA'
return cpu_facts
def get_memory_facts(self):
memory_facts = {}
if not os.access("/proc/meminfo", os.R_OK):
return memory_facts
for line in get_file_lines("/proc/meminfo"):
data = line.split(":", 1)
key = data[0]
if key in NetBSDHardware.MEMORY_FACTS:
val = data[1].strip().split(' ')[0]
memory_facts["%s_mb" % key.lower()] = int(val) // 1024
return memory_facts
@timeout()
def get_mount_facts(self):
mount_facts = {}
mount_facts['mounts'] = []
fstab = get_file_content('/etc/fstab')
if not fstab:
return mount_facts
for line in fstab.splitlines():
if line.startswith('#') or line.strip() == '':
continue
fields = re.sub(r'\s+', ' ', line).split()
mount_statvfs_info = get_mount_size(fields[1])
mount_info = {'mount': fields[1],
'device': fields[0],
'fstype': fields[2],
'options': fields[3]}
mount_info.update(mount_statvfs_info)
mount_facts['mounts'].append(mount_info)
return mount_facts
def get_dmi_facts(self):
dmi_facts = {}
# We don't use dmidecode(8) here because:
# - it would add dependency on an external package
# - dmidecode(8) can only be ran as root
# So instead we rely on sysctl(8) to provide us the information on a
# best-effort basis. As a bonus we also get facts on non-amd64/i386
# platforms this way.
sysctl_to_dmi = {
'machdep.dmi.system-product': 'product_name',
'machdep.dmi.system-version': 'product_version',
'machdep.dmi.system-uuid': 'product_uuid',
'machdep.dmi.system-serial': 'product_serial',
'machdep.dmi.system-vendor': 'system_vendor',
}
for mib in sysctl_to_dmi:
if mib in self.sysctl:
dmi_facts[sysctl_to_dmi[mib]] = self.sysctl[mib]
return dmi_facts
class NetBSDHardwareCollector(HardwareCollector):
_fact_class = NetBSDHardware
_platform = 'NetBSD'
|
coxm/djangocms-cascade | refs/heads/master | tests/test_base.py | 5 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from django.contrib.auth.models import User
from cms.api import create_page
from cms.admin.pageadmin import PageAdmin
from cms.models.placeholdermodel import Placeholder
from cms.test_utils.testcases import CMSTestCase
class CascadeTestCase(CMSTestCase):
admin_password = 'secret'
def setUp(self):
self._createAdminUser()
page = create_page('HOME', 'testing.html', 'en', published=True, in_navigation=True,
created_by=self.user)
self.placeholder = Placeholder.objects.create(slot='Main Content')
self.placeholder.page_set.add(page)
self.placeholder.save()
self.request = self.get_request(language='en', page=page)
self.admin_site = admin.sites.AdminSite()
self.page_admin = PageAdmin(page, self.admin_site)
def _createAdminUser(self):
self.user = User.objects.create_user('admin', 'admin@example.com', self.admin_password)
self.user.is_staff = True
self.user.is_superuser = True
self.user.save()
logged_in = self.client.login(username=self.user.username, password=self.admin_password)
self.assertTrue(logged_in, 'User is not logged in')
|
andrewnc/scikit-learn | refs/heads/master | benchmarks/bench_tree.py | 297 | """
To run this, you'll need to have installed.
* scikit-learn
Does two benchmarks
First, we fix a training set, increase the number of
samples to classify and plot number of classified samples as a
function of time.
In the second benchmark, we increase the number of dimensions of the
training set, classify a sample and plot the time taken as a function
of the number of dimensions.
"""
import numpy as np
import pylab as pl
import gc
from datetime import datetime
# to store the results
scikit_classifier_results = []
scikit_regressor_results = []
mu_second = 0.0 + 10 ** 6 # number of microseconds in a second
def bench_scikit_tree_classifier(X, Y):
"""Benchmark with scikit-learn decision tree classifier"""
from sklearn.tree import DecisionTreeClassifier
gc.collect()
# start time
tstart = datetime.now()
clf = DecisionTreeClassifier()
clf.fit(X, Y).predict(X)
delta = (datetime.now() - tstart)
# stop time
scikit_classifier_results.append(
delta.seconds + delta.microseconds / mu_second)
def bench_scikit_tree_regressor(X, Y):
"""Benchmark with scikit-learn decision tree regressor"""
from sklearn.tree import DecisionTreeRegressor
gc.collect()
# start time
tstart = datetime.now()
clf = DecisionTreeRegressor()
clf.fit(X, Y).predict(X)
delta = (datetime.now() - tstart)
# stop time
scikit_regressor_results.append(
delta.seconds + delta.microseconds / mu_second)
if __name__ == '__main__':
print('============================================')
print('Warning: this is going to take a looong time')
print('============================================')
n = 10
step = 10000
n_samples = 10000
dim = 10
n_classes = 10
for i in range(n):
print('============================================')
print('Entering iteration %s of %s' % (i, n))
print('============================================')
n_samples += step
X = np.random.randn(n_samples, dim)
Y = np.random.randint(0, n_classes, (n_samples,))
bench_scikit_tree_classifier(X, Y)
Y = np.random.randn(n_samples)
bench_scikit_tree_regressor(X, Y)
xx = range(0, n * step, step)
pl.figure('scikit-learn tree benchmark results')
pl.subplot(211)
pl.title('Learning with varying number of samples')
pl.plot(xx, scikit_classifier_results, 'g-', label='classification')
pl.plot(xx, scikit_regressor_results, 'r-', label='regression')
pl.legend(loc='upper left')
pl.xlabel('number of samples')
pl.ylabel('Time (s)')
scikit_classifier_results = []
scikit_regressor_results = []
n = 10
step = 500
start_dim = 500
n_classes = 10
dim = start_dim
for i in range(0, n):
print('============================================')
print('Entering iteration %s of %s' % (i, n))
print('============================================')
dim += step
X = np.random.randn(100, dim)
Y = np.random.randint(0, n_classes, (100,))
bench_scikit_tree_classifier(X, Y)
Y = np.random.randn(100)
bench_scikit_tree_regressor(X, Y)
xx = np.arange(start_dim, start_dim + n * step, step)
pl.subplot(212)
pl.title('Learning in high dimensional spaces')
pl.plot(xx, scikit_classifier_results, 'g-', label='classification')
pl.plot(xx, scikit_regressor_results, 'r-', label='regression')
pl.legend(loc='upper left')
pl.xlabel('number of dimensions')
pl.ylabel('Time (s)')
pl.axis('tight')
pl.show()
|
makerbot/s3g | refs/heads/master | technician_tests/techtest_BotFactory.py | 1 | from __future__ import (absolute_import, print_function, unicode_literals)
from __future__ import (absolute_import, print_function, unicode_literals)
import os
import sys
import uuid
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
try:
import unittest2 as unittest
except ImportError:
import unittest
import mock
import makerbot_driver
class TestLiveBotConnected(unittest.TestCase):
def setUp(self):
pass
def test_leaves_bot_open(self):
ignore= raw_input("Please Verify a valid MakerBot is connected(Y/n)>")
self.assertEqual(ignore.lower(),'Y'.lower())
md = makerbot_driver.MachineDetector()
md.scan()
availMachine = md.get_first_machine()
self.assertTrue(availMachine != None)
print(availMachine)
bFact = makerbot_driver.BotFactory()
s3gObj, profile = bFact.build_from_port(availMachine, False)
# re-opening s3g here fails
self.assertFalse(s3gObj.is_open())
if __name__ == '__main__':
unittest.main()
|
ubiar/odoo | refs/heads/8.0 | openerp/addons/test_workflow/models.py | 337 | # -*- coding: utf-8 -*-
import openerp.osv.orm
class m(openerp.osv.orm.Model):
""" A model for which we will define a workflow (see data.xml). """
_name = 'test.workflow.model'
def print_(self, cr, uid, ids, s, context=None):
print ' Running activity `%s` for record %s' % (s, ids)
return True
def print_a(self, cr, uid, ids, context=None):
return self.print_(cr, uid, ids, 'a', context)
def print_b(self, cr, uid, ids, context=None):
return self.print_(cr, uid, ids, 'b', context)
def print_c(self, cr, uid, ids, context=None):
return self.print_(cr, uid, ids, 'c', context)
def condition(self, cr, uid, ids, context=None):
m = self.pool['test.workflow.trigger']
for r in m.browse(cr, uid, [1], context=context):
if not r.value:
return False
return True
def trigger(self, cr, uid, context=None):
return openerp.workflow.trg_trigger(uid, 'test.workflow.trigger', 1, cr)
class n(openerp.osv.orm.Model):
""" A model used for the trigger feature. """
_name = 'test.workflow.trigger'
_columns = { 'value': openerp.osv.fields.boolean('Value') }
_defaults = { 'value': False }
class a(openerp.osv.orm.Model):
_name = 'test.workflow.model.a'
_columns = { 'value': openerp.osv.fields.integer('Value') }
_defaults = { 'value': 0 }
class b(openerp.osv.orm.Model):
_name = 'test.workflow.model.b'
_inherit = 'test.workflow.model.a'
class c(openerp.osv.orm.Model):
_name = 'test.workflow.model.c'
_inherit = 'test.workflow.model.a'
class d(openerp.osv.orm.Model):
_name = 'test.workflow.model.d'
_inherit = 'test.workflow.model.a'
class e(openerp.osv.orm.Model):
_name = 'test.workflow.model.e'
_inherit = 'test.workflow.model.a'
for name in 'bcdefghijkl':
#
# Do not use type() to create the class here, but use the class construct.
# This is because the __module__ of the new class would be the one of the
# metaclass that provides method __new__!
#
class NewModel(openerp.osv.orm.Model):
_name = 'test.workflow.model.%s' % name
_inherit = 'test.workflow.model.a'
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
denverdino/compose | refs/heads/master | tests/unit/cli/formatter_test.py | 16 | from __future__ import absolute_import
from __future__ import unicode_literals
import logging
from compose.cli import colors
from compose.cli.formatter import ConsoleWarningFormatter
from tests import unittest
MESSAGE = 'this is the message'
def makeLogRecord(level):
return logging.LogRecord('name', level, 'pathame', 0, MESSAGE, (), None)
class ConsoleWarningFormatterTestCase(unittest.TestCase):
def setUp(self):
self.formatter = ConsoleWarningFormatter()
def test_format_warn(self):
output = self.formatter.format(makeLogRecord(logging.WARN))
expected = colors.yellow('WARNING') + ': '
assert output == expected + MESSAGE
def test_format_error(self):
output = self.formatter.format(makeLogRecord(logging.ERROR))
expected = colors.red('ERROR') + ': '
assert output == expected + MESSAGE
def test_format_info(self):
output = self.formatter.format(makeLogRecord(logging.INFO))
assert output == MESSAGE
|
adityaduggal/erpnext | refs/heads/develop | erpnext/agriculture/doctype/detected_disease/detected_disease.py | 23 | # -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class DetectedDisease(Document):
pass
|
chris48s/UK-Polling-Stations | refs/heads/master | polling_stations/apps/data_collection/management/commands/import_gateshead.py | 1 | from data_collection.management.commands import BaseXpressDCCsvInconsistentPostcodesImporter
class Command(BaseXpressDCCsvInconsistentPostcodesImporter):
council_id = 'E08000037'
addresses_name = 'parl.2017-06-08/Version 1/Democracy_Club__08June2017 (6).tsv'
stations_name = 'parl.2017-06-08/Version 1/Democracy_Club__08June2017 (6).tsv'
elections = ['parl.2017-06-08']
csv_delimiter = '\t'
|
d120/pyofahrt | refs/heads/master | faq/migrations/0001_initial.py | 1 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-04-19 22:40
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [
migrations.CreateModel(
name='FaqCategory',
fields=[
('id', models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name='ID')),
('name', models.CharField(max_length=255,
verbose_name='Name')),
],
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name='ID')),
('text', models.CharField(
max_length=255, verbose_name='Frage')),
('answer', models.TextField(verbose_name='Antwort')),
('category', models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to='faq.FaqCategory')),
],
),
]
|
VaibhavAgarwalVA/sympy | refs/heads/master | sympy/physics/quantum/tests/test_shor.py | 99 | from sympy.utilities.pytest import XFAIL
from sympy.physics.quantum.qapply import qapply
from sympy.physics.quantum.qubit import Qubit
from sympy.physics.quantum.shor import CMod, getr
@XFAIL
def test_CMod():
assert qapply(CMod(4, 2, 2)*Qubit(0, 0, 1, 0, 0, 0, 0, 0)) == \
Qubit(0, 0, 1, 0, 0, 0, 0, 0)
assert qapply(CMod(5, 5, 7)*Qubit(0, 0, 1, 0, 0, 0, 0, 0, 0, 0)) == \
Qubit(0, 0, 1, 0, 0, 0, 0, 0, 1, 0)
assert qapply(CMod(3, 2, 3)*Qubit(0, 1, 0, 0, 0, 0)) == \
Qubit(0, 1, 0, 0, 0, 1)
def test_continued_frac():
assert getr(513, 1024, 10) == 2
assert getr(169, 1024, 11) == 6
assert getr(314, 4096, 16) == 13
|
lipari/flux-core | refs/heads/master | src/bindings/python/flux/kz.py | 1 | import errno
import os
import sys
from flux._kz import ffi, lib
from flux.wrapper import Wrapper, WrapperPimpl
class KZWrapper(Wrapper):
# This empty class accepts new methods, preventing accidental overloading
# across wrappers
pass
RAW = KZWrapper(ffi, lib, prefixes=['kz_', ])
# override error check behavior for kz_get, necessary due to errno EAGAIN
RAW.kz_get.set_error_check(lambda x: False)
def generic_write(stream, string):
if not isinstance(stream, int):
stream.write(string)
else:
os.write(stream, string)
@ffi.callback('kz_ready_f')
def kz_stream_handler(kz_handle, arg):
del kz_handle # unused
(stream, prefix, handle) = ffi.from_handle(arg)
buf = ffi.new('char *[1]')
while True:
try:
count = RAW.get(handle, buf)
if count == 0:
break
if prefix is None:
generic_write(stream, ffi.string(buf[0]))
else:
for _ in ffi.string(buf[0]).splitlines(True):
generic_write(stream, prefix)
generic_write(stream, ffi.string(buf[0]))
except EnvironmentError as err:
if err.errno == errno.EAGAIN:
pass
else:
raise err
return None
KZWATCHES = {}
def attach(flux_handle,
key,
stream,
prefix=None,
flags=(RAW.KZ_FLAGS_READ
| RAW.KZ_FLAGS_NONBLOCK
| RAW.KZ_FLAGS_NOEXIST)):
handle = RAW.kz_open(flux_handle, key, flags)
warg = (stream, prefix, handle)
KZWATCHES[key] = warg
return RAW.set_ready_cb(handle, kz_stream_handler, ffi.new_handle(warg))
def detach(flux_handle, key):
del flux_handle # unused
(_, _, handle) = KZWATCHES.pop(key, None)
return RAW.close(handle)
class KZStream(WrapperPimpl):
class InnerWrapper(Wrapper):
def __init__(self,
flux_handle,
name,
flags=(RAW.KZ_FLAGS_READ | RAW.KZ_FLAGS_NONBLOCK |
RAW.KZ_FLAGS_NOEXIST),
handle=None,
prefix=False):
self.destroyer = RAW.kz_close
self.handle = None
self.prefix = prefix
if flux_handle is None and handle is None: # pragma: no cover
raise ValueError(
"flux_handle must be a valid Flux object or handle must "
"be a valid kvsdir cdata pointer")
if handle is None:
handle = RAW.kz_open(flux_handle, name, flags)
super(self.__class__, self).__init__(ffi, lib,
handle=handle,
match=ffi.typeof('kz_t *'),
prefixes=[
'kz_',
], )
def __del__(self):
if self.handle is not None:
self.destroyer(self.handle)
self.handle = None
def attach(self, stream=sys.stdout):
""" Redirect all output from this KZ stream to the specified stream"""
arg = (stream, self.prefix, self.handle)
self.set_ready_cb(kz_stream_handler, ffi.new_handle(arg))
def __init__(self,
flux_handle,
name,
flags=(RAW.KZ_FLAGS_READ | RAW.KZ_FLAGS_NONBLOCK |
RAW.KZ_FLAGS_NOEXIST),
handle=None,
prefix=False):
super(KZStream, self).__init__()
self.flux_handle = flux_handle
self.prefix = prefix
self.name = name
if flux_handle is None and handle is None:
raise ValueError(
"flux_handle must be a valid Flux object or handle must be a "
"valid kvsdir cdata pointer")
self.pimpl = self.InnerWrapper(
flux_handle, name, flags, handle, prefix)
def __enter__(self):
"""Allow this to be used as a context manager"""
return self
def __exit__(self, type_arg, value, tb):
"""
When used as a context manager, the KVSDir commits itself on exit
"""
self.pimpl.__del__()
return False
|
F5Networks/f5-ansible-modules | refs/heads/doc-update | ansible_collections/f5networks/f5_modules/plugins/modules/bigip_message_routing_router.py | 1 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2019, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = r'''
---
module: bigip_message_routing_router
short_description: Manages router profiles for message-routing protocols
description:
- Manages router profiles for message-routing protocols.
version_added: "1.0.0"
options:
name:
description:
- Specifies the name of the router profile.
required: True
type: str
description:
description:
- The user-defined description of the router profile.
type: str
type:
description:
- Parameter used to specify the type of the router profile to manage.
- Default setting is C(generic) with more options coming.
type: str
choices:
- generic
default: generic
parent:
description:
- The parent template of this router profile. Once this value has been set, it cannot be changed.
- The default values are set by the system if not specified and they correspond to the router type created,
for example, C(/Common/messagerouter) for C(generic) C(type) and so on.
type: str
ignore_client_port:
description:
- When C(yes), the remote port on clientside connections (connections where the peer connected to the BIG-IP)
is ignored when searching for an existing connection.
type: bool
inherited_traffic_group:
description:
- When set to C(yes), the C(traffic_group) will be inherited from the containing folder. When not specified the
system sets this to C(no) when creating new router profile.
type: bool
traffic_group:
description:
- Specifies the traffic-group of the router profile.
- Setting the C(traffic_group) to an empty string value C("") will cause the device to inherit from containing
folder, which means the value of C(inherited_traffic_group) on device will be C(yes).
type: str
use_local_connection:
description:
- If C(yes), the router will route a message to an existing connection on the same TMM as the message was
received.
type: bool
max_pending_bytes:
description:
- The maximum number of bytes worth of pending messages that will be held while waiting for a connection to a
peer to be created. Once reached, any additional messages to the peer will be flagged as undeliverable
and returned to the originator.
- The accepted range is between 0 and 4294967295 inclusive.
type: int
max_pending_messages:
description:
- The maximum number of pending messages that will be held while waiting for a connection to a peer to be created.
Once reached, any additional messages to the peer will be flagged as undeliverable and returned
to the originator.
- The accepted range is between 0 and 65535 inclusive.
type: int
max_retries:
description:
- Sets the maximum number of time a message may be resubmitted for rerouting by the C(MR::retry) iRule command.
- The accepted range is between 0 and 4294967295 inclusive.
type: int
mirror:
description:
- Enables or disables state mirroring. State mirroring can be used to maintain the same state information in the
standby unit that is in the active unit.
type: bool
mirrored_msg_sweeper_interval:
description:
- Specifies the maximum time in milliseconds that a message will be held on the standby device as it waits for
the active device to route the message.
- Messages on the standby device held for longer than the configurable sweeper interval, will be dropped.
- The acceptable range is between 0 and 4294967295 inclusive.
type: int
routes:
description:
- Specifies a list of static routes for the router instance to use.
- The route must be on the same partition as router profile.
type: list
elements: str
partition:
description:
- Device partition to create router profile on.
type: str
default: Common
state:
description:
- When C(present), ensures the router profile exists.
- When C(absent), ensures the router profile is removed.
type: str
choices:
- present
- absent
default: present
notes:
- Requires BIG-IP >= 14.0.0
extends_documentation_fragment: f5networks.f5_modules.f5
author:
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Create a generic router profile
bigip_message_routing_router:
name: foo
max_retries: 10
ignore_client_port: yes
routes:
- /Common/route1
- /Common/route2
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Modify a generic router profile
bigip_message_routing_router:
name: foo
ignore_client_port: no
mirror: yes
mirrored_msg_sweeper_interval: 4000
traffic_group: /Common/traffic-group-2
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Remove a generic router profile
bigip_message_routing_router:
name: foo
state: absent
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
'''
RETURN = r'''
description:
description: The user-defined description of the router profile.
returned: changed
type: str
sample: My description
parent:
description: The parent template of this router profile.
returned: changed
type: str
sample: /Common/messagerouter
ignore_client_port:
description: Enables ignoring of the remote port on clientside connections when searching for an existing connection.
returned: changed
type: bool
sample: no
inherited_traffic_group:
description: Specifies if a traffic-group should be inherited from containing folder.
returned: changed
type: bool
sample: yes
traffic_group:
description: The traffic-group of the router profile.
returned: changed
type: str
sample: /Common/traffic-group-1
use_local_connection:
description: Enables routing of messages to an existing connection on the same TMM as the message was received.
returned: changed
type: bool
sample: yes
max_pending_bytes:
description: The maximum number of bytes worth of pending messages that will be held.
returned: changed
type: int
sample: 10000
max_pending_messages:
description: The maximum number of pending messages that will be held.
returned: changed
type: int
sample: 64
max_retries:
description: The maximum number of time a message may be resubmitted for rerouting.
returned: changed
type: int
sample: 10
mirror:
description: Enables or disables state mirroring.
returned: changed
type: bool
sample: yes
mirrored_msg_sweeper_interval:
description: The maximum time in milliseconds that a message will be held on the standby device.
returned: changed
type: int
sample: 2000
routes:
description: The list of static routes for the router instance to use.
returned: changed
type: list
sample: ['/Common/route1', '/Common/route2']
'''
from datetime import datetime
from distutils.version import LooseVersion
from ansible.module_utils.basic import (
AnsibleModule, env_fallback
)
from ..module_utils.bigip import F5RestClient
from ..module_utils.common import (
F5ModuleError, AnsibleF5Parameters, transform_name, f5_argument_spec, flatten_boolean, fq_name
)
from ..module_utils.compare import (
cmp_str_with_none, cmp_simple_list
)
from ..module_utils.icontrol import tmos_version
from ..module_utils.teem import send_teem
class Parameters(AnsibleF5Parameters):
api_map = {
'defaultsFrom': 'parent',
'useLocalConnection': 'use_local_connection',
'ignoreClientPort': 'ignore_client_port',
'inheritedTrafficGroup': 'inherited_traffic_group',
'maxPendingBytes': 'max_pending_bytes',
'maxPendingMessages': 'max_pending_messages',
'maxRetries': 'max_retries',
'mirroredMessageSweeperInterval': 'mirrored_msg_sweeper_interval',
'trafficGroup': 'traffic_group',
}
api_attributes = [
'description',
'useLocalConnection',
'ignoreClientPort',
'inheritedTrafficGroup',
'maxPendingBytes',
'maxPendingMessages',
'maxRetries',
'mirror',
'mirroredMessageSweeperInterval',
'trafficGroup',
'routes',
'defaultsFrom',
]
returnables = [
'parent',
'description',
'use_local_connection',
'ignore_client_port',
'inherited_traffic_group',
'max_pending_bytes',
'max_pending_messages',
'max_retries',
'mirrored_msg_sweeper_interval',
'traffic_group',
'mirror',
'routes',
]
updatables = [
'description',
'use_local_connection',
'ignore_client_port',
'inherited_traffic_group',
'max_pending_bytes',
'max_pending_messages',
'max_retries',
'mirrored_msg_sweeper_interval',
'traffic_group',
'mirror',
'routes',
'parent',
]
@property
def ignore_client_port(self):
return flatten_boolean(self._values['ignore_client_port'])
@property
def use_local_connection(self):
return flatten_boolean(self._values['use_local_connection'])
class ApiParameters(Parameters):
pass
class ModuleParameters(Parameters):
@property
def parent(self):
if self._values['parent'] is None:
return None
result = fq_name(self.partition, self._values['parent'])
return result
@property
def inherited_traffic_group(self):
result = flatten_boolean(self._values['inherited_traffic_group'])
if result is None:
return None
if result == 'yes':
return 'true'
return 'false'
@property
def mirror(self):
result = flatten_boolean(self._values['mirror'])
if result is None:
return None
if result == 'yes':
return 'enabled'
return 'disabled'
@property
def max_pending_bytes(self):
if self._values['max_pending_bytes'] is None:
return None
if 0 <= self._values['max_pending_bytes'] <= 4294967295:
return self._values['max_pending_bytes']
raise F5ModuleError(
"Valid 'max_pending_bytes' must be in range 0 - 4294967295 bytes."
)
@property
def max_retries(self):
if self._values['max_retries'] is None:
return None
if 0 <= self._values['max_retries'] <= 4294967295:
return self._values['max_retries']
raise F5ModuleError(
"Valid 'max_retries' must be in range 0 - 4294967295."
)
@property
def max_pending_messages(self):
if self._values['max_pending_messages'] is None:
return None
if 0 <= self._values['max_pending_messages'] <= 65535:
return self._values['max_pending_messages']
raise F5ModuleError(
"Valid 'max_pending_messages' must be in range 0 - 65535 messages."
)
@property
def mirrored_msg_sweeper_interval(self):
if self._values['mirrored_msg_sweeper_interval'] is None:
return None
if 0 <= self._values['mirrored_msg_sweeper_interval'] <= 4294967295:
return self._values['mirrored_msg_sweeper_interval']
raise F5ModuleError(
"Valid 'mirrored_msg_sweeper_interval' must be in range 0 - 4294967295 milliseconds."
)
@property
def routes(self):
if self._values['routes'] is None:
return None
if len(self._values['routes']) == 1 and self._values['routes'][0] == "":
return ""
result = [fq_name(self.partition, peer) for peer in self._values['routes']]
return result
@property
def traffic_group(self):
if self._values['traffic_group'] is None:
return None
if self._values['traffic_group'] == "":
return ""
result = fq_name('Common', self._values['traffic_group'])
return result
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
raise
return result
class UsableChanges(Changes):
pass
class ReportableChanges(Changes):
@property
def mirror(self):
result = flatten_boolean(self._values['mirror'])
return result
@property
def inherited_traffic_group(self):
result = self._values['inherited_traffic_group']
if result == 'true':
return 'yes'
if result == 'false':
return 'no'
return None
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def description(self):
return cmp_str_with_none(self.want.description, self.have.description)
@property
def parent(self):
if self.want.parent is None:
return None
if self.want.parent != self.have.parent:
raise F5ModuleError(
"The parent router profile cannot be changed."
)
@property
def routes(self):
result = cmp_simple_list(self.want.routes, self.have.routes)
return result
class BaseManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = F5RestClient(**self.module.params)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def exec_module(self):
start = datetime.now().isoformat()
version = tmos_version(self.client)
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
send_teem(start, self.module, version)
return result
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def absent(self):
if self.exists():
return self.remove()
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the resource.")
return True
def create(self):
self._set_changed_options()
if self.module.check_mode:
return True
self.create_on_device()
return True
class GenericModuleManager(BaseManager):
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/message-routing/generic/router/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]:
return True
errors = [401, 403, 409, 500, 501, 502, 503, 504]
if resp.status in errors or 'code' in response and response['code'] in errors:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/ltm/message-routing/generic/router/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]:
return True
raise F5ModuleError(resp.content)
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/ltm/message-routing/generic/router/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]:
return True
raise F5ModuleError(resp.content)
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/message-routing/generic/router/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
response = self.client.api.delete(uri)
if response.status == 200:
return True
raise F5ModuleError(response.content)
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/message-routing/generic/router/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]:
return ApiParameters(params=response)
raise F5ModuleError(resp.content)
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = F5RestClient(**self.module.params)
self.kwargs = kwargs
def version_less_than_14(self):
version = tmos_version(self.client)
if LooseVersion(version) < LooseVersion('14.0.0'):
return True
return False
def exec_module(self):
if self.version_less_than_14():
raise F5ModuleError('Message routing is not supported on TMOS version below 14.x')
if self.module.params['type'] == 'generic':
manager = self.get_manager('generic')
else:
raise F5ModuleError(
"Unknown type specified."
)
return manager.exec_module()
def get_manager(self, type):
if type == 'generic':
return GenericModuleManager(**self.kwargs)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
description=dict(),
parent=dict(),
ignore_client_port=dict(type='bool'),
inherited_traffic_group=dict(type='bool'),
use_local_connection=dict(type='bool'),
max_pending_bytes=dict(type='int'),
max_pending_messages=dict(type='int'),
max_retries=dict(type='int'),
mirror=dict(type='bool'),
mirrored_msg_sweeper_interval=dict(type='int'),
routes=dict(
type='list',
elements='str',
),
traffic_group=dict(),
type=dict(
choices=['generic'],
default='generic'
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
),
state=dict(
default='present',
choices=['present', 'absent']
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
)
try:
mm = ModuleManager(module=module)
results = mm.exec_module()
module.exit_json(**results)
except F5ModuleError as ex:
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
|
RangerWolf/wxFan | refs/heads/master | client/bot.py | 1 | # -*- coding:utf-8 -*-
import sys, os, json
import platform
import schedule
import thread
from datetime import datetime
import itchat, time
from itchat.content import *
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
reload(sys)
sys.setdefaultencoding('utf-8')
@itchat.msg_register([TEXT, MAP, CARD, NOTE, SHARING])
def text_reply(msg):
itchat.send('%s: %s' % (msg['Type'], msg['Text']), msg['FromUserName'])
@itchat.msg_register([PICTURE, RECORDING, ATTACHMENT, VIDEO])
def download_files(msg):
msg['Text'](msg['FileName'])
return '@%s@%s' % ({'Picture': 'img', 'Video': 'vid'}.get(msg['Type'], 'fil'), msg['FileName'])
@itchat.msg_register(FRIENDS)
def add_friend(msg):
itchat.add_friend(**msg['Text']) # 该操作会自动将新好友的消息录入,不需要重载通讯录
itchat.send_msg('Nice to meet you!', msg['RecommendInfo']['UserName'])
@itchat.msg_register(TEXT, isGroupChat=True)
def text_reply(msg):
print json.dumps(msg)
if msg['isAt']:
import requests
try :
ret_msg = requests.post("http://localhost:5678/smart_reply", data=json.dumps(msg)).text
print "ret_msg:", ret_msg
itchat.send("@" + msg['ActualNickName'] + " :" + ret_msg, msg['FromUserName'])
except Exception, ex:
print ex
itchat.send("@" + msg['ActualNickName'] + " :" + u"服务器开小差啦~", msg['FromUserName'])
else :
if msg["Content"] in ["fan", "饭", "+1"] :
itchat.send("@" + msg['ActualNickName'] + " :" + u'您是要订饭吗? 订饭请直接@我 ^_^', msg['FromUserName'])
def auto_notify_fan(thread_name) :
"""
每天3点钟, 自动提醒大家订饭
:return:
"""
def daily_notify() :
print "now:", datetime.now()
print "automatically message :", u"测试: 今天没订饭的同学们记得订饭哦"
target_chatroom_nickname = "wxbot_dev"
target_chatroom_nickname = u"CDC自动订饭群-Dev阶段"
chatrooms = itchat.get_chatrooms()
for room in chatrooms :
if room['NickName'] == target_chatroom_nickname :
room_id = room['UserName']
print "send message to id:", room_id
itchat.send(u"每天自动提醒: 今天没订饭的同学们记得订饭哦", room_id)
schedule.every().day.at("15:15").do(daily_notify)
while True:
schedule.run_pending()
time.sleep(10)
if __name__ == '__main__':
enableCmdQR=True
is_windows = any(platform.win32_ver())
if is_windows :
enableCmdQR = False
itchat.auto_login(True, enableCmdQR=enableCmdQR)
thread.start_new_thread(auto_notify_fan, ("test thread",))
print json.dumps(itchat.get_chatrooms())
itchat.run()
|
cloudbase/neutron-virtualbox | refs/heads/virtualbox_agent | neutron/db/migration/alembic_migrations/versions/27cc183af192_ml2_vnic_type.py | 17 | # Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""ml2_vnic_type
Revision ID: 27cc183af192
Revises: 4ca36cfc898c
Create Date: 2014-02-09 12:19:21.362967
"""
# revision identifiers, used by Alembic.
revision = '27cc183af192'
down_revision = '4ca36cfc898c'
from alembic import op
import sqlalchemy as sa
from neutron.db import migration
def upgrade():
if migration.schema_has_table('ml2_port_bindings'):
op.add_column('ml2_port_bindings',
sa.Column('vnic_type', sa.String(length=64),
nullable=False,
server_default='normal'))
def downgrade():
pass
|
muraliselva10/designate | refs/heads/master | designate/tests/test_schema/test_format.py | 5 | # Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Author: Kiall Mac Innes <kiall@hp.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from designate.tests import TestCase
from designate.schema import format
LOG = logging.getLogger(__name__)
class SchemaFormatTest(TestCase):
def test_is_ipv4(self):
valid_ipaddresses = [
'0.0.0.1',
'127.0.0.1',
'10.0.0.1',
'192.0.2.2',
]
invalid_ipaddresses = [
'0.0.0.0',
'0.0.0.256',
'0.0.256.0',
'0.256.0.0',
'256.0.0.0',
'127.0.0',
'127.0.0.999',
'127.0.0.256',
'127.0..1',
'-1.0.0.1',
'1.0.-0.1',
'1.0.0.-1',
'ABCDEF',
'ABC/DEF',
'ABC\\DEF',
# Trailing newline - Bug 1471158
"127.0.0.1\n",
]
for ipaddress in valid_ipaddresses:
self.assertTrue(format.is_ipv4(ipaddress))
for ipaddress in invalid_ipaddresses:
self.assertFalse(format.is_ipv4(ipaddress))
def test_is_ipv6(self):
valid_ipaddresses = [
'2001:db8::0',
'2001:0db8:85a3:0000:0000:8a2e:0370:7334',
'2001:db8:85a3:0000:0000:8a2e:0370:7334',
'2001:db8:85a3::8a2e:0370:7334',
]
invalid_ipaddresses = [
# Invalid characters
'hhhh:hhhh:hhhh:hhhh:hhhh:hhhh:hhhh:hhhh'
# Trailing newline - Bug 1471158
"2001:db8::0\n",
]
for ipaddress in valid_ipaddresses:
self.assertTrue(format.is_ipv6(ipaddress),
'Expected Valid: %s' % ipaddress)
for ipaddress in invalid_ipaddresses:
self.assertFalse(format.is_ipv6(ipaddress),
'Expected Invalid: %s' % ipaddress)
def test_is_hostname(self):
valid_hostnames = [
'example.com.',
'www.example.com.',
'*.example.com.',
'12345.example.com.',
'192-0-2-1.example.com.',
'ip192-0-2-1.example.com.',
'www.ip192-0-2-1.example.com.',
'ip192-0-2-1.www.example.com.',
'abc-123.example.com.',
'_tcp.example.com.',
'_service._tcp.example.com.',
('1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2'
'.ip6.arpa.'),
'1.1.1.1.in-addr.arpa.',
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.',
('abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghi.'),
]
invalid_hostnames = [
'**.example.com.',
'*.*.example.org.',
'a.*.example.org.',
# Exceeds single lable length limit
('abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijkL'
'.'),
('abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijkL'
'.'),
# Exceeds total length limit
('abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopq.'),
# Empty label part
'abc..def.',
'..',
# Invalid character
'abc$.def.',
'abc.def$.',
# Labels must not start with a -
'-abc.',
'abc.-def.',
'abc.-def.ghi.',
# Labels must not end with a -
'abc-.',
'abc.def-.',
'abc.def-.ghi.',
# Labels must not start or end with a -
'-abc-.',
'abc.-def-.',
'abc.-def-.ghi.',
# Trailing newline - Bug 1471158
"www.example.com.\n",
]
for hostname in valid_hostnames:
self.assertTrue(format.is_hostname(hostname))
for hostname in invalid_hostnames:
self.assertFalse(format.is_hostname(hostname))
def test_is_domainname(self):
valid_domainnames = [
'example.com.',
'www.example.com.',
'12345.example.com.',
'192-0-2-1.example.com.',
'ip192-0-2-1.example.com.',
'www.ip192-0-2-1.example.com.',
'ip192-0-2-1.www.example.com.',
'abc-123.example.com.',
'_tcp.example.com.',
'_service._tcp.example.com.',
('1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2'
'.ip6.arpa.'),
'1.1.1.1.in-addr.arpa.',
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.',
('abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghi.'),
]
invalid_domainnames = [
'*.example.com.',
'**.example.com.',
'*.*.example.org.',
'a.*.example.org.',
# Exceeds single lable length limit
('abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijkL'
'.'),
('abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijkL'
'.'),
('abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijkL'
'.'),
# Exceeds total length limit
('abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopq.'),
# Empty label part
'abc..def.',
'..',
# Invalid character
'abc$.def.',
'abc.def$.',
# Labels must not start with a -
'-abc.',
'abc.-def.',
'abc.-def.ghi.',
# Labels must not end with a -
'abc-.',
'abc.def-.',
'abc.def-.ghi.',
# Labels must not start or end with a -
'-abc-.',
'abc.-def-.',
'abc.-def-.ghi.',
# Trailing newline - Bug 1471158
"example.com.\n",
]
for domainname in valid_domainnames:
self.assertTrue(format.is_domainname(domainname), domainname)
for domainname in invalid_domainnames:
self.assertFalse(format.is_domainname(domainname), domainname)
def test_is_srv_hostname(self):
valid_hostnames = [
'_sip._tcp.example.com.',
'_sip._udp.example.com.',
]
invalid_hostnames = [
# Invalid Formats
'_tcp.example.com.',
'sip._udp.example.com.',
'_sip.udp.example.com.',
'sip.udp.example.com.',
# Trailing newline - Bug 1471158
"_sip._tcp.example.com.\n",
]
for hostname in valid_hostnames:
self.assertTrue(format.is_srv_hostname(hostname),
'Expected Valid: %s' % hostname)
for hostname in invalid_hostnames:
self.assertFalse(format.is_srv_hostname(hostname),
'Expected Invalid: %s' % hostname)
def test_is_tldname(self):
valid_tldnames = [
'com',
'net',
'org',
'co.uk',
]
invalid_tldnames = [
# Invalid Formats
'com.',
'.com',
# Trailing newline - Bug 1471158
"com\n",
]
for tldname in valid_tldnames:
self.assertTrue(format.is_tldname(tldname),
'Expected Valid: %s' % tldname)
for tldname in invalid_tldnames:
self.assertFalse(format.is_tldname(tldname),
'Expected Invalid: %s' % tldname)
def test_is_email(self):
valid_emails = [
'user@example.com',
'user@emea.example.com',
'user@example.com',
'first.last@example.com',
]
invalid_emails = [
# We use the email addr for the SOA RNAME field, this means the
# entire address, excluding the @ must be chacracters valid
# as a DNS name. i.e. + and % addressing is invalid.
'user+plus@example.com',
'user%example.org@example.com',
'example.org',
'@example.org',
'user@*.example.org',
'user',
'user@',
'user+plus',
'user+plus@',
'user%example.org',
'user%example.org@',
'user@example.org.',
# Exceeds total length limit
('user@fghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.'
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopq.'),
# Exceeds single lable length limit
('user@abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefg'
'hijkL.'),
('user@abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefg'
'hijk.abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefg'
'hijkL.'),
# Exceeds single lable length limit in username part
('abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijkL'
'@example.com.'),
]
for email in valid_emails:
LOG.debug('Expecting success for: %s' % email)
self.assertTrue(format.is_email(email))
for email in invalid_emails:
LOG.debug('Expecting failure for: %s' % email)
self.assertFalse(format.is_email(email))
def test_is_sshfp(self):
valid_sshfps = [
'72d30d211ce8c464de2811e534de23b9be9b4dc4',
]
invalid_sshfps = [
# Invalid Formats
'P2d30d211ce8c464de2811e534de23b9be9b4dc4', # "P" !IN [A-F]
'72d30d211',
# Trailing newline - Bug 1471158
"72d30d211ce8c464de2811e534de23b9be9b4dc4\n",
]
for sshfp in valid_sshfps:
self.assertTrue(format.is_sshfp(sshfp),
'Expected Valid: %s' % sshfp)
for sshfp in invalid_sshfps:
self.assertFalse(format.is_sshfp(sshfp),
'Expected Invalid: %s' % sshfp)
def test_is_uuid(self):
valid_uuids = [
'd3693ef8-2188-11e5-bf77-676ff9eb39dd',
]
invalid_uuids = [
# Invalid Formats
'p3693ef8-2188-11e5-bf77-676ff9eb39dd', # "p" !IN [A-F]
'd3693ef8218811e5bf77676ff9eb39dd',
# Trailing newline - Bug 1471158
"d3693ef8-2188-11e5-bf77-676ff9eb39dd\n",
]
for uuid in valid_uuids:
self.assertTrue(format.is_uuid(uuid),
'Expected Valid: %s' % uuid)
for uuid in invalid_uuids:
self.assertFalse(format.is_uuid(uuid),
'Expected Invalid: %s' % uuid)
def test_is_fip_id(self):
valid_fip_ids = [
'region-a:d3693ef8-2188-11e5-bf77-676ff9eb39dd',
]
invalid_fip_ids = [
# Invalid Formats
'region-a:p3693ef8-2188-11e5-bf77-676ff9eb39dd', # "p" !IN [A-F]
# Trailing newline - Bug 1471158
"region-a:d3693ef8-2188-11e5-bf77-676ff9eb39dd\n",
]
for fip_id in valid_fip_ids:
self.assertTrue(format.is_floating_ip_id(fip_id),
'Expected Valid: %s' % fip_id)
for fip_id in invalid_fip_ids:
self.assertFalse(format.is_floating_ip_id(fip_id),
'Expected Invalid: %s' % fip_id)
def test_is_ip_and_port(self):
valid_ip_and_ports = [
'192.0.2.1:80',
'192.0.2.1:1',
'192.0.2.1:65535',
]
invalid_ip_and_ports = [
'192.0.2.1:65536',
# Trailing newline - Bug 1471158
"192.0.2.1:80\n",
]
for ip_and_port in valid_ip_and_ports:
self.assertTrue(format.is_ip_and_port(ip_and_port),
'Expected Valid: %s' % ip_and_port)
for ip_and_port in invalid_ip_and_ports:
self.assertFalse(format.is_ip_and_port(ip_and_port),
'Expected Invalid: %s' % ip_and_port)
|
harihpr/tweetclickers | refs/heads/master | pybossa/auth/category.py | 5 | # -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from flask.ext.login import current_user
def create(category=None):
if current_user.is_authenticated():
if current_user.admin is True:
return True
else:
return False
else:
return False
def read(category=None):
return True
def update(category):
return create(category)
def delete(category):
return create(category)
|
MTG/sms-tools | refs/heads/master | software/transformations_interface/hpsMorph_GUI_frame.py | 2 | # GUI frame for the hpsMorph_function.py
try:
# for Python2
from Tkinter import * ## notice capitalized T in Tkinter
import tkFileDialog, tkMessageBox
except ImportError:
# for Python3
from tkinter import * ## notice lowercase 't' in tkinter here
from tkinter import filedialog as tkFileDialog
from tkinter import messagebox as tkMessageBox
import sys, os
from scipy.io.wavfile import read
import numpy as np
import hpsMorph_function as hM
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../models/'))
import utilFunctions as UF
class HpsMorph_frame:
def __init__(self, parent):
self.parent = parent
self.initUI()
def initUI(self):
## INPUT FILE 1
choose1_label = "inputFile1:"
Label(self.parent, text=choose1_label).grid(row=0, column=0, sticky=W, padx=5, pady=(10,2))
#TEXTBOX TO PRINT PATH OF THE SOUND FILE
self.filelocation1 = Entry(self.parent)
self.filelocation1.focus_set()
self.filelocation1["width"] = 30
self.filelocation1.grid(row=0,column=0, sticky=W, padx=(75, 5), pady=(10,2))
self.filelocation1.delete(0, END)
self.filelocation1.insert(0, '../../sounds/violin-B3.wav')
#BUTTON TO BROWSE SOUND FILE 1
open_file1 = Button(self.parent, text="...", command=self.browse_file1) #see: def browse_file(self)
open_file1.grid(row=0, column=0, sticky=W, padx=(330, 6), pady=(10,2)) #put it beside the filelocation textbox
#BUTTON TO PREVIEW SOUND FILE 1
preview1 = Button(self.parent, text=">", command=lambda:UF.wavplay(self.filelocation1.get()), bg="gray30", fg="white")
preview1.grid(row=0, column=0, sticky=W, padx=(375,6), pady=(10,2))
#ANALYSIS WINDOW TYPE SOUND 1
wtype1_label = "window1:"
Label(self.parent, text=wtype1_label).grid(row=1, column=0, sticky=W, padx=5, pady=(4,2))
self.w1_type = StringVar()
self.w1_type.set("blackman") # initial value
window1_option = OptionMenu(self.parent, self.w1_type, "rectangular", "hanning", "hamming", "blackman", "blackmanharris")
window1_option.grid(row=1, column=0, sticky=W, padx=(68,5), pady=(4,2))
#WINDOW SIZE SOUND 1
M1_label = "M1:"
Label(self.parent, text=M1_label).grid(row=1, column=0, sticky=W, padx=(180, 5), pady=(4,2))
self.M1 = Entry(self.parent, justify=CENTER)
self.M1["width"] = 5
self.M1.grid(row=1,column=0, sticky=W, padx=(208,5), pady=(4,2))
self.M1.delete(0, END)
self.M1.insert(0, "1001")
#FFT SIZE SOUND 1
N1_label = "N1:"
Label(self.parent, text=N1_label).grid(row=1, column=0, sticky=W, padx=(265, 5), pady=(4,2))
self.N1 = Entry(self.parent, justify=CENTER)
self.N1["width"] = 5
self.N1.grid(row=1,column=0, sticky=W, padx=(290,5), pady=(4,2))
self.N1.delete(0, END)
self.N1.insert(0, "1024")
#THRESHOLD MAGNITUDE SOUND 1
t1_label = "t1:"
Label(self.parent, text=t1_label).grid(row=1, column=0, sticky=W, padx=(343,5), pady=(4,2))
self.t1 = Entry(self.parent, justify=CENTER)
self.t1["width"] = 5
self.t1.grid(row=1, column=0, sticky=W, padx=(370,5), pady=(4,2))
self.t1.delete(0, END)
self.t1.insert(0, "-100")
#MIN DURATION SINUSOIDAL TRACKS SOUND 1
minSineDur1_label = "minSineDur1:"
Label(self.parent, text=minSineDur1_label).grid(row=2, column=0, sticky=W, padx=(5, 5), pady=(4,2))
self.minSineDur1 = Entry(self.parent, justify=CENTER)
self.minSineDur1["width"] = 5
self.minSineDur1.grid(row=2, column=0, sticky=W, padx=(92,5), pady=(4,2))
self.minSineDur1.delete(0, END)
self.minSineDur1.insert(0, "0.05")
#MIN FUNDAMENTAL FREQUENCY SOUND 1
minf01_label = "minf01:"
Label(self.parent, text=minf01_label).grid(row=2, column=0, sticky=W, padx=(157,5), pady=(4,2))
self.minf01 = Entry(self.parent, justify=CENTER)
self.minf01["width"] = 5
self.minf01.grid(row=2, column=0, sticky=W, padx=(208,5), pady=(4,2))
self.minf01.delete(0, END)
self.minf01.insert(0, "200")
#MAX FUNDAMENTAL FREQUENCY SOUND 1
maxf01_label = "maxf01:"
Label(self.parent, text=maxf01_label).grid(row=2, column=0, sticky=W, padx=(270,5), pady=(4,2))
self.maxf01 = Entry(self.parent, justify=CENTER)
self.maxf01["width"] = 5
self.maxf01.grid(row=2, column=0, sticky=W, padx=(325,5), pady=(4,2))
self.maxf01.delete(0, END)
self.maxf01.insert(0, "300")
#MAX ERROR ACCEPTED SOUND 1
f0et1_label = "f0et1:"
Label(self.parent, text=f0et1_label).grid(row=3, column=0, sticky=W, padx=5, pady=(4,2))
self.f0et1 = Entry(self.parent, justify=CENTER)
self.f0et1["width"] = 3
self.f0et1.grid(row=3, column=0, sticky=W, padx=(45,5), pady=(4,2))
self.f0et1.delete(0, END)
self.f0et1.insert(0, "10")
#ALLOWED DEVIATION OF HARMONIC TRACKS SOUND 1
harmDevSlope1_label = "harmDevSlope1:"
Label(self.parent, text=harmDevSlope1_label).grid(row=3, column=0, sticky=W, padx=(108,5), pady=(4,2))
self.harmDevSlope1 = Entry(self.parent, justify=CENTER)
self.harmDevSlope1["width"] = 5
self.harmDevSlope1.grid(row=3, column=0, sticky=W, padx=(215,5), pady=(4,2))
self.harmDevSlope1.delete(0, END)
self.harmDevSlope1.insert(0, "0.01")
###
#SEPARATION LINE
Frame(self.parent,height=1,width=50,bg="black").grid(row=4, pady=5, sticky=W+E)
###
## INPUT FILE 2
choose2_label = "inputFile2:"
Label(self.parent, text=choose2_label).grid(row=5, column=0, sticky=W, padx=5, pady=(2,2))
#TEXTBOX TO PRINT PATH OF THE SOUND FILE
self.filelocation2 = Entry(self.parent)
self.filelocation2.focus_set()
self.filelocation2["width"] = 30
self.filelocation2.grid(row=5,column=0, sticky=W, padx=(75, 5), pady=(2,2))
self.filelocation2.delete(0, END)
self.filelocation2.insert(0, '../../sounds/soprano-E4.wav')
#BUTTON TO BROWSE SOUND FILE 2
open_file2 = Button(self.parent, text="...", command=self.browse_file2) #see: def browse_file(self)
open_file2.grid(row=5, column=0, sticky=W, padx=(330, 6), pady=(2,2)) #put it beside the filelocation textbox
#BUTTON TO PREVIEW SOUND FILE 2
preview2 = Button(self.parent, text=">", command=lambda:UF.wavplay(self.filelocation2.get()), bg="gray30", fg="white")
preview2.grid(row=5, column=0, sticky=W, padx=(375,6), pady=(2,2))
#ANALYSIS WINDOW TYPE SOUND 2
wtype2_label = "window2:"
Label(self.parent, text=wtype2_label).grid(row=6, column=0, sticky=W, padx=5, pady=(4,2))
self.w2_type = StringVar()
self.w2_type.set("hamming") # initial value
window2_option = OptionMenu(self.parent, self.w2_type, "rectangular", "hanning", "hamming", "blackman", "blackmanharris")
window2_option.grid(row=6, column=0, sticky=W, padx=(68,5), pady=(4,2))
#WINDOW SIZE SOUND 2
M2_label = "M2:"
Label(self.parent, text=M2_label).grid(row=6, column=0, sticky=W, padx=(180, 5), pady=(4,2))
self.M2 = Entry(self.parent, justify=CENTER)
self.M2["width"] = 5
self.M2.grid(row=6,column=0, sticky=W, padx=(208,5), pady=(4,2))
self.M2.delete(0, END)
self.M2.insert(0, "901")
#FFT SIZE SOUND 2
N2_label = "N2:"
Label(self.parent, text=N2_label).grid(row=6, column=0, sticky=W, padx=(265, 5), pady=(4,2))
self.N2 = Entry(self.parent, justify=CENTER)
self.N2["width"] = 5
self.N2.grid(row=6,column=0, sticky=W, padx=(290,5), pady=(4,2))
self.N2.delete(0, END)
self.N2.insert(0, "1024")
#THRESHOLD MAGNITUDE SOUND 2
t2_label = "t2:"
Label(self.parent, text=t2_label).grid(row=6, column=0, sticky=W, padx=(343,5), pady=(4,2))
self.t2 = Entry(self.parent, justify=CENTER)
self.t2["width"] = 5
self.t2.grid(row=6, column=0, sticky=W, padx=(370,5), pady=(4,2))
self.t2.delete(0, END)
self.t2.insert(0, "-100")
#MIN DURATION SINUSOIDAL TRACKS SOUND 2
minSineDur2_label = "minSineDur2:"
Label(self.parent, text=minSineDur2_label).grid(row=7, column=0, sticky=W, padx=(5, 5), pady=(4,2))
self.minSineDur2 = Entry(self.parent, justify=CENTER)
self.minSineDur2["width"] = 5
self.minSineDur2.grid(row=7, column=0, sticky=W, padx=(92,5), pady=(4,2))
self.minSineDur2.delete(0, END)
self.minSineDur2.insert(0, "0.05")
#MIN FUNDAMENTAL FREQUENCY SOUND 2
minf02_label = "minf02:"
Label(self.parent, text=minf02_label).grid(row=7, column=0, sticky=W, padx=(157,5), pady=(4,2))
self.minf02 = Entry(self.parent, justify=CENTER)
self.minf02["width"] = 5
self.minf02.grid(row=7, column=0, sticky=W, padx=(208,5), pady=(4,2))
self.minf02.delete(0, END)
self.minf02.insert(0, "250")
#MAX FUNDAMENTAL FREQUENCY SOUND 2
maxf02_label = "maxf02:"
Label(self.parent, text=maxf02_label).grid(row=7, column=0, sticky=W, padx=(270,5), pady=(4,2))
self.maxf02 = Entry(self.parent, justify=CENTER)
self.maxf02["width"] = 5
self.maxf02.grid(row=7, column=0, sticky=W, padx=(325,5), pady=(4,2))
self.maxf02.delete(0, END)
self.maxf02.insert(0, "500")
#MAX ERROR ACCEPTED SOUND 2
f0et2_label = "f0et2:"
Label(self.parent, text=f0et2_label).grid(row=8, column=0, sticky=W, padx=5, pady=(4,2))
self.f0et2 = Entry(self.parent, justify=CENTER)
self.f0et2["width"] = 3
self.f0et2.grid(row=8, column=0, sticky=W, padx=(45,5), pady=(4,2))
self.f0et2.delete(0, END)
self.f0et2.insert(0, "10")
#ALLOWED DEVIATION OF HARMONIC TRACKS SOUND 2
harmDevSlope2_label = "harmDevSlope2:"
Label(self.parent, text=harmDevSlope2_label).grid(row=8, column=0, sticky=W, padx=(108,5), pady=(4,2))
self.harmDevSlope2 = Entry(self.parent, justify=CENTER)
self.harmDevSlope2["width"] = 5
self.harmDevSlope2.grid(row=8, column=0, sticky=W, padx=(215,5), pady=(4,2))
self.harmDevSlope2.delete(0, END)
self.harmDevSlope2.insert(0, "0.01")
###
#SEPARATION LINE
Frame(self.parent,height=1,width=50,bg="black").grid(row=9, pady=5, sticky=W+E)
###
#MAX NUMBER OF HARMONICS SOUND 1
nH_label = "nH:"
Label(self.parent, text=nH_label).grid(row=10, column=0, sticky=W, padx=(5,5), pady=(2,2))
self.nH = Entry(self.parent, justify=CENTER)
self.nH["width"] = 5
self.nH.grid(row=10, column=0, sticky=W, padx=(35,5), pady=(2,2))
self.nH.delete(0, END)
self.nH.insert(0, "60")
#DECIMATION FACTOR SOUND 1
stocf_label = "stocf:"
Label(self.parent, text=stocf_label).grid(row=10, column=0, sticky=W, padx=(98,5), pady=(2,2))
self.stocf = Entry(self.parent, justify=CENTER)
self.stocf["width"] = 5
self.stocf.grid(row=10, column=0, sticky=W, padx=(138,5), pady=(2,2))
self.stocf.delete(0, END)
self.stocf.insert(0, "0.1")
#BUTTON TO DO THE ANALYSIS OF THE SOUND
self.compute = Button(self.parent, text="Analysis", command=self.analysis, bg="dark red", fg="white")
self.compute.grid(row=10, column=0, padx=(210, 5), pady=(2,2), sticky=W)
###
#SEPARATION LINE
Frame(self.parent,height=1,width=50,bg="black").grid(row=11, pady=5, sticky=W+E)
###
#
hfreqIntp_label = "harmonic frequencies interpolation factors, 0 to 1 (time,value pairs)"
Label(self.parent, text=hfreqIntp_label).grid(row=12, column=0, sticky=W, padx=5, pady=(2,2))
self.hfreqIntp = Entry(self.parent, justify=CENTER)
self.hfreqIntp["width"] = 35
self.hfreqIntp.grid(row=13, column=0, sticky=W+E, padx=5, pady=(0,2))
self.hfreqIntp.delete(0, END)
self.hfreqIntp.insert(0, "[0, 0, .1, 0, .9, 1, 1, 1]")
#
hmagIntp_label = "harmonic magnitudes interpolation factors, 0 to 1 (time,value pairs)"
Label(self.parent, text=hmagIntp_label).grid(row=14, column=0, sticky=W, padx=5, pady=(5,2))
self.hmagIntp = Entry(self.parent, justify=CENTER)
self.hmagIntp["width"] = 35
self.hmagIntp.grid(row=15, column=0, sticky=W+E, padx=5, pady=(0,2))
self.hmagIntp.delete(0, END)
self.hmagIntp.insert(0, "[0, 0, .1, 0, .9, 1, 1, 1]")
#
stocIntp_label = "stochastic component interpolation factors, 0 to 1 (time,value pairs)"
Label(self.parent, text=stocIntp_label).grid(row=16, column=0, sticky=W, padx=5, pady=(5,2))
self.stocIntp = Entry(self.parent, justify=CENTER)
self.stocIntp["width"] = 35
self.stocIntp.grid(row=17, column=0, sticky=W+E, padx=5, pady=(0,2))
self.stocIntp.delete(0, END)
self.stocIntp.insert(0, "[0, 0, .1, 0, .9, 1, 1, 1]")
#BUTTON TO DO THE SYNTHESIS
self.compute = Button(self.parent, text="Apply Transformation", command=self.transformation_synthesis, bg="dark green", fg="white")
self.compute.grid(row=18, column=0, padx=5, pady=(10,15), sticky=W)
#BUTTON TO PLAY TRANSFORMATION SYNTHESIS OUTPUT
self.transf_output = Button(self.parent, text=">", command=lambda:UF.wavplay('output_sounds/' + os.path.basename(self.filelocation1.get())[:-4] + '_hpsMorph.wav'), bg="gray30", fg="white")
self.transf_output.grid(row=18, column=0, padx=(165,5), pady=(10,15), sticky=W)
# define options for opening file
self.file_opt = options = {}
options['defaultextension'] = '.wav'
options['filetypes'] = [('All files', '.*'), ('Wav files', '.wav')]
options['initialdir'] = '../../sounds/'
options['title'] = 'Open a mono audio file .wav with sample frequency 44100 Hz'
def browse_file1(self):
self.filename1 = tkFileDialog.askopenfilename(**self.file_opt)
#set the text of the self.filelocation
self.filelocation1.delete(0, END)
self.filelocation1.insert(0,self.filename1)
def browse_file2(self):
self.filename2 = tkFileDialog.askopenfilename(**self.file_opt)
#set the text of the self.filelocation
self.filelocation2.delete(0, END)
self.filelocation2.insert(0,self.filename2)
def analysis(self):
try:
inputFile1 = self.filelocation1.get()
window1 = self.w1_type.get()
M1 = int(self.M1.get())
N1 = int(self.N1.get())
t1 = int(self.t1.get())
minSineDur1 = float(self.minSineDur1.get())
minf01 = int(self.minf01.get())
maxf01 = int(self.maxf01.get())
f0et1 = int(self.f0et1.get())
harmDevSlope1 = float(self.harmDevSlope1.get())
nH = int(self.nH.get())
stocf = float(self.stocf.get())
inputFile2 = self.filelocation2.get()
window2 = self.w2_type.get()
M2 = int(self.M2.get())
N2 = int(self.N2.get())
t2 = int(self.t2.get())
minSineDur2 = float(self.minSineDur2.get())
minf02 = int(self.minf02.get())
maxf02 = int(self.maxf02.get())
f0et2 = int(self.f0et2.get())
harmDevSlope2 = float(self.harmDevSlope2.get())
self.inputFile1, self.fs1, self.hfreq1, self.hmag1, self.stocEnv1, \
self.inputFile2, self.hfreq2, self.hmag2, self.stocEnv2 = hM.analysis(inputFile1, window1, M1, N1, t1, \
minSineDur1, nH, minf01, maxf01, f0et1, harmDevSlope1, stocf, inputFile2, window2, M2, N2, t2, minSineDur2, minf02, maxf02, f0et2, harmDevSlope2)
except ValueError as errorMessage:
tkMessageBox.showerror("Input values error", errorMessage)
def transformation_synthesis(self):
try:
inputFile1 = self.inputFile1
fs = self.fs1
hfreq1 = self.hfreq1
hmag1 = self.hmag1
stocEnv1 = self.stocEnv1
inputFile2 = self.inputFile2
hfreq2 = self.hfreq2
hmag2 = self.hmag2
stocEnv2 = self.stocEnv2
hfreqIntp = np.array(eval(self.hfreqIntp.get()))
hmagIntp = np.array(eval(self.hmagIntp.get()))
stocIntp = np.array(eval(self.stocIntp.get()))
hM.transformation_synthesis(inputFile1, fs, hfreq1, hmag1, stocEnv1, inputFile2, hfreq2, hmag2, stocEnv2, hfreqIntp, hmagIntp, stocIntp)
except ValueError as errorMessage:
tkMessageBox.showerror("Input values error", errorMessage)
except AttributeError:
tkMessageBox.showerror("Analysis not computed", "First you must analyse the sound!")
|
goddardl/cortex | refs/heads/master | test/IECoreRI/MultipleContextsTest.py | 7 | ##########################################################################
#
# Copyright (c) 2008-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import IECore
import IECoreRI
import os.path
import os
class MultipleContextsTest( IECoreRI.TestCase ) :
def test( self ) :
r1 = IECoreRI.Renderer( "test/IECoreRI/output/contextOne.rib" )
r2 = IECoreRI.Renderer( "test/IECoreRI/output/contextTwo.rib" )
self.assertEqual( r1.getAttribute( "doubleSided" ), IECore.BoolData( True ) )
self.assertEqual( r2.getAttribute( "doubleSided" ), IECore.BoolData( True ) )
r1.setAttribute( "doubleSided", IECore.BoolData( False ) )
self.assertEqual( r1.getAttribute( "doubleSided" ), IECore.BoolData( False ) )
self.assertEqual( r2.getAttribute( "doubleSided" ), IECore.BoolData( True ) )
r1.setAttribute( "doubleSided", IECore.BoolData( True ) )
self.assertEqual( r1.getAttribute( "doubleSided" ), IECore.BoolData( True ) )
self.assertEqual( r2.getAttribute( "doubleSided" ), IECore.BoolData( True ) )
r2.setAttribute( "doubleSided", IECore.BoolData( False ) )
self.assertEqual( r1.getAttribute( "doubleSided" ), IECore.BoolData( True ) )
self.assertEqual( r2.getAttribute( "doubleSided" ), IECore.BoolData( False ) )
if __name__ == "__main__":
unittest.main()
|
crypto101/merlyn | refs/heads/master | merlyn/test/test_auth.py | 1 | from axiom.store import Store
from merlyn import auth
from OpenSSL.crypto import FILETYPE_PEM, load_certificate, load_privatekey
from twisted.python.log import ILogObserver, addObserver, removeObserver
from twisted.test.proto_helpers import StringTransport
from twisted.trial.unittest import SynchronousTestCase
from zope.interface import implementer
from zope.interface.verify import verifyObject
class UserTests(SynchronousTestCase):
def test_emailIndexed(self):
"""The email attribute of the User item is indexed.
"""
self.assertTrue(auth.User.email.indexed)
@implementer(ILogObserver)
class FakeLogObserver(object):
def __init__(self):
self.events = []
def __call__(self, eventDict):
self.events.append(eventDict)
class FakeLogObserverTests(SynchronousTestCase):
def test_interface(self):
"""The fake log observer implements ILogObserver.
"""
self.assertTrue(verifyObject(ILogObserver, FakeLogObserver()))
class TOFUContextFactoryTests(SynchronousTestCase):
"""Tests for TOFU/POP (Trust On First Use/Persistence of Pseudonym)
behavior for the context factory.
"""
def setUp(self):
self.store = Store()
self.user = auth.User(store=self.store, email="user@example.com")
self.ctxFactory = auth._TOFUContextFactory(self.store)
self.observer = FakeLogObserver()
addObserver(self.observer)
self.addCleanup(removeObserver, self.observer)
def _getLogMessage(self):
for e in self.observer.events:
if not e.get("message"):
continue
return e["message"][0]
def test_firstConnection(self):
"""First connections store the digest. Connection succeeds.
"""
verifyResult = self.ctxFactory._verify(None, realUserCert, 0, 0, 0)
self.assertTrue(verifyResult)
self.assertEqual(self.user.digest, realUserCert.digest("sha512"))
message = self._getLogMessage()
self.assertIn("First connection", message)
self.assertIn(self.user.email, message)
self.assertIn(self.user.digest, message)
def test_correctDigest(self):
"""Connection attempts with the correct digest succeed.
"""
self.user.digest = realUserCert.digest("sha512")
verifyResult = self.ctxFactory._verify(None, realUserCert, 0, 0, 0)
self.assertTrue(verifyResult)
message = self._getLogMessage()
self.assertIn("Successful connection", message)
self.assertIn(self.user.email, message)
def test_noSuchEmail(self):
"""Connection attempts for unknown e-mail addresses fail.
"""
verifyResult = self.ctxFactory._verify(None, bogusCert, 0, 0, 0)
self.assertFalse(verifyResult)
message = self._getLogMessage()
self.assertIn("Connection attempt", message)
self.assertIn("by {!r}".format(auth.emailForCert(bogusCert)), message)
self.assertIn("digest was " + bogusCert.digest("sha512"), message)
def test_badDigest(self):
"""Connection attempts with a bad digest fail.
"""
self.user.digest = realUserCert.digest("sha512")
verifyResult = self.ctxFactory._verify(None, impostorCert, 0, 0, 0)
self.assertFalse(verifyResult)
message = self._getLogMessage()
self.assertIn("Failed connection", message)
self.assertIn("digest was " + impostorCert.digest("sha512"), message)
self.assertIn("expecting " + self.user.digest, message)
class UserMixinTests(SynchronousTestCase):
def setUp(self):
self.userMixin = auth.UserMixin()
self.store = self.userMixin.store = Store()
def test_getUser(self):
"""The user mixin gets the user using the peer certificate.
"""
user = auth.User(store=self.store,
email="user@example.com",
digest=realUserCert.digest("sha512"))
self.userMixin.transport = transport = StringTransport()
transport.getPeerCertificate = lambda: realUserCert
self.assertEqual(self.userMixin.user, user)
def test_cache(self):
"""If the ``_user`` cache is primed, it is used.
"""
sentinel = object()
self.userMixin._user = sentinel
self.assertEqual(self.userMixin.user, sentinel)
realUserKey = load_privatekey(FILETYPE_PEM, """
-----BEGIN RSA PRIVATE KEY-----
MIIJJwIBAAKCAgEApnviSoR0JPFjSaYs3pB4ycA2+CNcvnPpFFMZscATw5J+H5Sd
+P2xYo5XP7N8Kjs6RxFwu50fePqO5BXpMlum0KGP3hT7gQ9uk2WkaXFF5FEHwBkN
Sa8JTHXoHp5n2QWkh/h5G5lSkjfk5IzdzJYsI7LVCFnS8FEL4r5EOTm32EDNIQgv
1FhmT3rAw7swAUc984oZrGbaGDAJpt8WfCFZG0mUU1ha6ASb5dtQZ2pxvJ5ZJRco
V7vd2nTeSMhUKCDPrQqdnwH657s6TzXWE8VkI0rN7LYFtaCRbI9VoRWZwosrRJgL
DvRMg3I3baX/lRckYwDmsNr0200TfSAT8kqEKhdOH0zk3OpA7KuAjCdWQZMY1C8V
2jPYwuePIfRHYOUIxWTBaka6KNNWa9r2mSLA0IcZ6ddfeNf5j2rTrA9h+dvmFEtK
UOkpxmKUWeNLJBcUz+TBiOfzMgMRUHM6C0SQAVqPVVZZp5dWt8GX6V2wyQrh584T
bYHE3kCKmpZhY+TaeoQV7pi3oQ2KmX0Ao94ecMqFuqL4WFABb0d1vx8kxfPyJ0Fg
U9hSMrwRE+ExGrZ69VF0RNknxBZZDREzD9GJVlTZXLOx37i+7LbtKmZXeZXwuLKJ
vrktXDDaQPUV66DWamqnjUQ6NlYrdFY4omRNISOcT8ytjRpyocxpt8YtlfECAwEA
AQKCAgEAiofJK6J9loP5zz3kVio3KAG2e9HJCX0ftFbVqY+fonwSUKr0rExFPzIc
LZhnOCjifGJpwOOkXaF4JxiIW+vhqfbV5MDm6mRx6VqJbWfg9XPrlBAEe4yXmzT9
OgUrem10k+PQuoNhLuQtpXQF14gaIHZdR76ehHOcBUe3Mzrw3JRHXDYYvoP0VixZ
nET1VAr45N7EMC3BSqEmVuGJLy78m3UlZBjARBIZuzE7/WGYVJAas39KhX6Aw5e9
oyh2xpFO3blYoQgfxJWJloHAqeD1S1yib1ai95gtifzXDtwPfs8Y6NHvWbk0tafj
sWyQeHmyQGNukjkPyC+hiNuZXWJeB+RKVm7lBZ8zG5sR50UGAeT3qptsUm8eVODo
iCeoJut8DHmT0DfA/RG6TKaekuDXGWhMwh9aTnltHt9a9fpC41KqXNNjudwBl+Sb
3QKTEf06iL+MssUrGEYjdRoftmk8W2BNzWb0zWl+D75ejzal1zuVRyJ9qf7VVypb
cL0znKPypSEsG1vX18H6dAKw8xCsjzm9MMPB4iJ+mpbLLJN2GTeYZ2HGg7/NMRWB
G70V88ZRjWJIh9tSYsDQloccQm0SlK/TDaGgYu1iRna+lxE0pvV2iTfsCJM1200i
Q0KMJsFmOkiSymp/R7UAnyCdjlhAMUnOm9x7cVR9fx8Ix3Zb1EUCggEBANeRedOz
CfTO9cf40G9g18vFztPY3o5eUaL+pK9kCVwWWZxbRz6J/ys7BKKtTBXCeNqIu3WA
rsSpQ6DNhSv9fXz7g9trorNPZQuXqw+d2Rw89VwYJiWydl8+cM/r8qDYKfTOoGP0
J/TvkwznqCsE+ZKUAGhfUoek5oMyXyE8q6GrLTkhjOagEFN5j0VZknrkBllv/Xnl
pbSmK89mA7d2e76yoXDvzUqDor500oFzCCt64VRrXKBhXDr2mrnBCazMahGNTIaJ
U6491UxqOQN/TCZ+IN3EuW0CS8f9XZxaS26JJrIO/TtA34QeoKHj/j94UnxlQjPo
vTaUxkg7Ur2RPYsCggEBAMW1nsJjPVjXUUnCBHVwCAz0KvLi+R+ZgpH99ANgTeYn
jqP5RkjIPSKVFJWqmEpt52MBSBad79ypzYkcTtT3nXkeAgTwJuQEnveNCaSMpmlQ
bMOgQO+tMydZH4CoEkdijPIfwEooTPKP9crn22+z7XhK4v/s0iaBE4IqBSPrUAjd
ZfVDB3lgxF7tqukwxSIqXbfvhPbGLewjmM6E+RwncJ1HJrbQMybSQLe5TtKS4nKQ
e+xeu/kW7uP+FCK7oTeIyuvbDEWsKCLCYcjkax4hCd/rJs+pMdKkYke0H+ySZxwk
8OramVCF2K9pyiemcjJBN6ElSoGYhW/pM3RCHkPL4fMCgf8GvIUSGIY3IECN/ziE
QoJ727Ka7CwIRupGLa73zCh+uDQUrsWLLsTKlQ2QB9pY07rzGVLCWUMc4i062TFQ
Lpu9TB7SvIpZECIYOqUd19DxEPaZ6idHBkysrUbZOIZcgGTPQaXBed/Fx7bQsGyQ
65bg/b8Fg/UQSBbsAqb2Yu76Hl9LacD9dAMOmL3hbOsm6/lG0jkZlhOXkZnM4WM8
WHeFfg+Nd/DyYyqyyPPLF80pjq179d7vJBu9u/cZ1u52d+zYn5HEooX66/O+b5NY
iKHYkhh01bD1txynI0PJnwi8a4zKA63mLCDQACUE6hsH4LqzKHbpKFzBV+TaXQA4
7FECggEAZwEYlW3eqEqFr0fFyulzSExtk91srWns/OKyHpAuBZrWVdepJoIsV7gT
4WXfsedQheRFCoN+VBijXKvC5nGbOV7I7omvuVwu9gok2/XrPTMJd2ImcrhpzjZA
k2b9HvPZOswQApK8hCM8i1oAmVHEhsd9PJjFZAobf9UkmHIgYH34gK9LVZF0vYBV
auhdzE8GRK4lN+xIQJ7LHc1pe6GQqmBHazdNbwxba1zAFDUyhT2BUsSIal3oWCAn
nXDjrWs3TWnyGtp2jqV3DJL0u926p058CfS8YGIEUhcmCrq7vY4BdlotRiZ1ne4f
xEiTdltEAFDNYHd2DbgRdqB75BZ0wQKCAQEA0G7GH4w89CQDQWqe540MWaaodFZD
9SQNHEHx0sQmmumc+sd5OWOt6HNZXZxzIplU22c0WIPg52t4oAG4ALE87dkTqtiI
c8hibKRlDZdEOkvPRnoh1re43PvZQ4lGfDE55hAGSe+H0UfYyRDp/ptVJwiLgF6Q
DejgTHgS30qIdFrsWdoiepl/suH27bfxViA3Datu8aqAh0i9IMnlYIl/5JUX7CtT
9jnj3zOmjt4UqmEikqzA/d/h4QBAY2wEOzO3LHMsQmXkd1QFDgH5dpzaDdgpKfjE
p5G2VV8lmOBt+Vx5PqBiPxfsTbsEFi35C3bc2F6ZBBGYqtWbclYrCvjbMg==
-----END RSA PRIVATE KEY-----
""")
realUserCert = load_certificate(FILETYPE_PEM, """
-----BEGIN CERTIFICATE-----
MIIE8TCCAtkCADANBgkqhkiG9w0BAQ0FADA9MRowGAYDVQQDExFDcnlwdG8gMTAx
IENsaWVudDEfMB0GCSqGSIb3DQEJARYQdXNlckBleGFtcGxlLmNvbTAiGA8yMDEz
MTIxODAwMDAwMFoYDzIwMTgxMjE4MDAwMDAwWjA9MRowGAYDVQQDExFDcnlwdG8g
MTAxIENsaWVudDEfMB0GCSqGSIb3DQEJARYQdXNlckBleGFtcGxlLmNvbTCCAiIw
DQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKZ74kqEdCTxY0mmLN6QeMnANvgj
XL5z6RRTGbHAE8OSfh+Unfj9sWKOVz+zfCo7OkcRcLudH3j6juQV6TJbptChj94U
+4EPbpNlpGlxReRRB8AZDUmvCUx16B6eZ9kFpIf4eRuZUpI35OSM3cyWLCOy1QhZ
0vBRC+K+RDk5t9hAzSEIL9RYZk96wMO7MAFHPfOKGaxm2hgwCabfFnwhWRtJlFNY
WugEm+XbUGdqcbyeWSUXKFe73dp03kjIVCggz60KnZ8B+ue7Ok811hPFZCNKzey2
BbWgkWyPVaEVmcKLK0SYCw70TINyN22l/5UXJGMA5rDa9NtNE30gE/JKhCoXTh9M
5NzqQOyrgIwnVkGTGNQvFdoz2MLnjyH0R2DlCMVkwWpGuijTVmva9pkiwNCHGenX
X3jX+Y9q06wPYfnb5hRLSlDpKcZilFnjSyQXFM/kwYjn8zIDEVBzOgtEkAFaj1VW
WaeXVrfBl+ldsMkK4efOE22BxN5AipqWYWPk2nqEFe6Yt6ENipl9AKPeHnDKhbqi
+FhQAW9Hdb8fJMXz8idBYFPYUjK8ERPhMRq2evVRdETZJ8QWWQ0RMw/RiVZU2Vyz
sd+4vuy27SpmV3mV8Liyib65LVww2kD1Feug1mpqp41EOjZWK3RWOKJkTSEjnE/M
rY0acqHMabfGLZXxAgMBAAEwDQYJKoZIhvcNAQENBQADggIBABnlQWPzqLEqLsFb
5ykb3S3H7x8NJO8ln9xoejkKQj8YxoJbIaAThjCv3gzQbobVkDMTbpStn3AlC8fG
gQHSTfaOl+A41LFo9Y7spKjGRdFGYz7uQY6d5xgHFB+aQ0am5vuAThEp/FxTuCTA
X8JpuTPB8yLJrT7vh3446zx5fPEXhfeRw7h8QdeczgCj2CRzblqcPSplK5FbgOjE
GuefAEmeb2GU60SeLtmtXDcR28ujJrQlQgHk3xSP9Mg/YAVZ+4YnpfuiQmOWXsSA
gRTPiOR+5l47QzDnpJLPlTa+oow/mXPT58Zkimgh60hqfpTShLM0oylubedkKGKn
UvZ5Zv/CACI2epLxDlgZXZcySp+bJradxVdUZPPW/Tmnc2NIZD/gCXLH7YNEDUyv
ZnOh50N7xUg7qrhnr+IloXog+Y5wRQkj76ejuMlPPEOMz2xlnzMIpLEl6b+HkFwT
BWeWlfyzpTWteKMlq/Rw3ghQ2kFhayrckqnaWKNmErK55vZJok4UP+U/ilC4L/ZM
XLZgb39Awni8L9p59ZrK3mn1VbD/l6axff3fj2Db7lb7pcT1ssT2gazgQvPxHEYt
U2fKTgUyO2tWNHed2PCKSJ6F6rpG4RRcN53BTnOo27b38NrZnx06bh9OUW3Ws5Qf
43YN/h7UXI5gAnnHR4fgkR0H8prK
-----END CERTIFICATE-----
""")
impostorKey = load_privatekey(FILETYPE_PEM, """
-----BEGIN RSA PRIVATE KEY-----
MIIJKgIBAAKCAgEAvmcckrGyAMGlAwpp6uPQz2TyUMlYBiNZNyTldiWw3aC81c4r
Z+X+JfsAp1Iwb2odlizEUBqRnN/ydqqTKFcJmF0JDMtMoX56+PzS/yYwHsTWUyIY
TxTgPqr/cYSRtKzVP+EhbOFwqeg5ncdpmfh1+bixbNZ19wrKi85r0+laGvUmhVkb
c453OgwYt/JOdH+lfkCelyYQq6xbj/HMhhzxKxZP3CqFBnLAS3r2WUZUHK/vxvbX
2GdlvBukBnhICp+BlzIkBlNyWlO5qaK/RIK8/NvCcQUmEJUUJnJfPoR9k2LtujkO
488aZLfQ6vgEXb8wPnCv6UxUM/UixeeuakJrlxYVEhQ9om/Tk75oi+4yyKl/B3vm
KqZQuW0HNF4UhJX86heW36QzWLsuLmg3gkLTxJmkPWgGMbSZaj3DVHF78LQpMDeg
AbCrT+UB6yqtodhn2NPrKUTU8j8YEScW7RFiMDMnbQcI557h5GlJC938Ytrqpjcr
VdPphhb0rCmdb3nf9b8UfJVuLS7cc2tt3OOt8IU42cbK7pPAt7+uHTG0RcJrjMkS
wteQD2a+VPOUDZXogYoo+oNiJZpVUprBb/6zwqStBxOAqqz8vROq9SFeSnSZJTQY
7X6BqgeGzT27Is1U4UOFTpUp30HiJ9KXVX6fp8SNj82qBLt8qbtsEUUVRLECAwEA
AQKCAgAS0UP8p30tH/Y797KCGWPQq2xbWZrOeH3fulDHPXBeZv1isA6QJSXaARWO
c8v/puAnsGLye726YFOpMLB8gyWank8/qXP4XfSvWOVNfCuzTsbTzoHShwCmkOXQ
BUcVMSOePZS9Gwa0dBQFqOih4/Fc7cjzNbrQ4IsmCA+WEPDryyC0exsAb6sO3JUw
0My6LMdhU+eYjpWFMfKWplINSxz2oizgWH9vJLYmf4+LQS0c7LJo2op4g7eFQMIU
NZ0BF8SJ+dWfnm2lybKGtmPq1HTzFJEB9H1PlDw6lIEfP57diyBtkCgNkbFNFPGb
10kvLq8I7MAl8Xo87FQ0dPJC5C+Xwf/wwUlll74T9V4hW2dAzuT3jupDYX0HJPnC
aP0f+qtliQgx4nYYb9Eu2c7auq7dPn5qfy7rVlEq66pFe7N2JBkXEqJm+q7UgPfI
S4fHMjPcLUoytO9SeO8lxyGh205p5EQcn798gB6wPvDOf1UT1NmxdC1UOy2Rabtc
LicK0V2v5V79fgsAzbc0drilIuxYTsV7jWhwecPp0/y+ugfdq3x0CfRsOum4pcnB
H1mQNmR85gEZilQx9CjoKuifwEaK0oSDh9eVGZyplSFOMukYaPiywufzH6t84nxc
/CnBpJgTASgaLansTLijmq7hDAqVUq5c/72t/avTw7qzpl3JsQKCAQEA+2H+/ORX
GyMcenS1OlyXQvtNQ2R5XxO7GenFAX+VtnIBrHsY4U/bMFv0VUL7gFA5EDA+IcLz
Ie/1HeO7DjpcmqTF8XNEcH3+vi/GZ3QViXFlRQBAijlkRUKVF0bWSRqj1p608M18
vYoN6uhiWrJwK75zEQdTQGKk8VdbNeYOLfs98wW0OR9AN10WrqAcmZAaV7Dlb6ec
QcYwg7hqrcByiOWLtSONK5WxtjcGeCH5KRMBBdhie8WhH4pEux8pgyHrYgGuNL0q
qvEm6oAwbrAUHoNrunU47rCTV7FX9vBU5GuoyCjErk3NRt+XPhHgYuFRxiFFMPA5
91+0p7gB8BJjzQKCAQEAweZjFGsBiHq5c4lUw7OPqRUo2rjbYbQYXwQBah4Vk2dT
6HOGJwFBoGqldl7xz3RUvepfkmjuIZoc1Vy6UAypV3uD77dJrYJdxJdcPhp+HrN7
YNE35CWO1deXPltBUCdoNZATMkAmjtkbovmk4gu64OnJYvo3cKJ71XfFfUrOuTzY
4HT1dOmXSfH548VCTXUEu6tbB38aG7xVMz3hXF1yQdu2SAyHjaAHyGKrwX7S71Ds
6bwUMtyTU6th1LGfz90hkGaSmfJ1F2/4lb7GRTnCr13Jxl4uO68710T6QW1WLSQ0
/p43EVgts4M+W0VR5SzAvS42Dix2kKjRNM5yfwxIdQKCAQEAgYCQffOcNCy4ZRVu
r2w3uJgBy7AdHq/peYYGqajylZTR6+tWe+xJvPYCP1JMgmPRoddYisgFvPwDSKyj
FsdWIYy1NJfvMAyYiZ3PFkilN7MlOpDQruS2FUAh0mX5yptgwBXunQcfNf3DAbtJ
v/Og+cgZOzKM3uRymKoqIPAtad6+oU3U9IB28o6QOtHdKfckuvw0lnrActoI8DK3
Ml+sIX4vpNd1yHhLntVmDclitJhHtJ0uzxiW0srGcaeyGQ4GVu0Ks7yoGHw3UiNL
0BoBo16MxvfQppZssYZ5DIvvD+Wug78M48bM87AIGD/ZWtc861cEcBuxoRC63pRa
2zR+GQKCAQEAnLN4NzQTVRz5ayn9WvtuipMTJVBn25oUaBVwnzYY8bt70EwsirE1
PFNzzSoF+kZlheY3vrcWXAmUa8o4uCDDanPjuINEA/lrlklMvtPiQSWD/EaZCMRh
nuhQzpApRIHUchUxrlax0pgbAacHXbdlHAdUPa1ByMFHmsjkzdD7KDDIhP2AsS9m
mNf5v93XK4n6fUCKnJBXpTqbEIJd8quCfz71HV0i344JPCSh8gpwpf+ct3jMSh6A
4gmLUr0KDo8DZRPAPrH3dy2ClGJNEf0QHXGKc8oBSzLfBaY1KVMXZfvw6CUtE9NT
e9QBPPnUqYV1bm4+OU4ts9L639ZIKezfUQKCAQEA0461Xiiv3b/3enTNinMjy6GK
CgRA9hpDeAS4PlaxPRoEorNPKTbZW9vJAEDZh8qc2GmucKhozzb6MGm4D39YefFe
sQaVcXDa21ukQWrWFFIU/iQDb9uwKQWs36EVqd7tWvd5OBDjQasnpWuVuMVJ7Vjv
gUiereTvONQfIAmpyxI529V6lVTGZnyNDRA21OW8JpZvF7BcNjrQH9bnDJFfA66H
mIc9IjX30bN2RKJKyN0IPbzC5lkb08Pk6Kb78tqI7ljyfA4baTWdR0cZEzYAspSS
oAkA6Sc7vb+mOXF4XGuoFI9k3/U7AI2+ZcwQB7muVez8nFE93n6xXksGp7vASg==
-----END RSA PRIVATE KEY-----
""")
impostorCert = load_certificate(FILETYPE_PEM, """
-----BEGIN CERTIFICATE-----
MIIE8TCCAtkCADANBgkqhkiG9w0BAQ0FADA9MRowGAYDVQQDExFDcnlwdG8gMTAx
IENsaWVudDEfMB0GCSqGSIb3DQEJARYQdXNlckBleGFtcGxlLmNvbTAiGA8yMDEz
MTIxODAwMDAwMFoYDzIwMTgxMjE4MDAwMDAwWjA9MRowGAYDVQQDExFDcnlwdG8g
MTAxIENsaWVudDEfMB0GCSqGSIb3DQEJARYQdXNlckBleGFtcGxlLmNvbTCCAiIw
DQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL5nHJKxsgDBpQMKaerj0M9k8lDJ
WAYjWTck5XYlsN2gvNXOK2fl/iX7AKdSMG9qHZYsxFAakZzf8naqkyhXCZhdCQzL
TKF+evj80v8mMB7E1lMiGE8U4D6q/3GEkbSs1T/hIWzhcKnoOZ3HaZn4dfm4sWzW
dfcKyovOa9PpWhr1JoVZG3OOdzoMGLfyTnR/pX5AnpcmEKusW4/xzIYc8SsWT9wq
hQZywEt69llGVByv78b219hnZbwbpAZ4SAqfgZcyJAZTclpTuamiv0SCvPzbwnEF
JhCVFCZyXz6EfZNi7bo5DuPPGmS30Or4BF2/MD5wr+lMVDP1IsXnrmpCa5cWFRIU
PaJv05O+aIvuMsipfwd75iqmULltBzReFISV/OoXlt+kM1i7Li5oN4JC08SZpD1o
BjG0mWo9w1Rxe/C0KTA3oAGwq0/lAesqraHYZ9jT6ylE1PI/GBEnFu0RYjAzJ20H
COee4eRpSQvd/GLa6qY3K1XT6YYW9KwpnW953/W/FHyVbi0u3HNrbdzjrfCFONnG
yu6TwLe/rh0xtEXCa4zJEsLXkA9mvlTzlA2V6IGKKPqDYiWaVVKawW/+s8KkrQcT
gKqs/L0TqvUhXkp0mSU0GO1+gaoHhs09uyLNVOFDhU6VKd9B4ifSl1V+n6fEjY/N
qgS7fKm7bBFFFUSxAgMBAAEwDQYJKoZIhvcNAQENBQADggIBALU0ItdvHxNBJ/0f
dFVcrBxPzXrZMmXzLf8KqLVn46iDefb+NzW1yZd2ZaaPuLOSySXLXdokY0cmeUYv
04Ainl0EG4EVfV930vcg2Q0He1EJyiDTqEEozdP9e+vkjuLbrnrjCMn69FVmELhu
W1jQRaR5amcpOWXs4qhehthZWkDEBUIs5cwDNZXRFWzJq2IsT5bjy/XJYa4wiXD1
z/BWzRovOsdhZgX+YY3AhNGzyXxoKWjYh8+38Rt9bQJ9SH1ypbzx2BgYTT9hd0e1
uTi3Ss6ewQCuZqkoxcrkV0478Dxj7zUphHUl7AcbFz6vj2n1s9G0HjQDHRzYDMCj
KZ/SAbvT4G4S3pu9LPOtzmMFsTcPcZ8+njD0PrwvEXduMMSeOxpmO2a+/ARhqld1
6dS+R9YMtAvj3nInShEf8LtWTNMdzzQZrr4VVqtid2zxUeiY83L/xJCtXvbaxz5u
RpJXTDYxDZWSXNdppOydRonIAPqDOCMBrVUPPU3jNs0HtPROej1Xjh5EPI5affSc
pOUOQ1i/Og7gQtcyNtvwmgBn8yhTVZnwgS0GGTITIjJYMCnco8GgXGjhnBNp0zWv
y+UVyEjsKa5MbEyDxvIN36xACb3qG6za2S87L8DE0fwGvExD9FM7P6l5ZBAV+xd9
UvElfcF0Vk5PLLFNUTBMpoDv5GSZ
-----END CERTIFICATE-----
""")
bogusCert = load_certificate(FILETYPE_PEM, """
-----BEGIN CERTIFICATE-----
MIIE8zCCAtsCADANBgkqhkiG9w0BAQ0FADA+MRowGAYDVQQDExFDcnlwdG8gMTAx
IENsaWVudDEgMB4GCSqGSIb3DQEJARYRQk9HVVNAZXhhbXBsZS5jb20wIhgPMjAx
MzEyMTgwMDAwMDBaGA8yMDE4MTIxODAwMDAwMFowPjEaMBgGA1UEAxMRQ3J5cHRv
IDEwMSBDbGllbnQxIDAeBgkqhkiG9w0BCQEWEUJPR1VTQGV4YW1wbGUuY29tMIIC
IjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAvmcckrGyAMGlAwpp6uPQz2Ty
UMlYBiNZNyTldiWw3aC81c4rZ+X+JfsAp1Iwb2odlizEUBqRnN/ydqqTKFcJmF0J
DMtMoX56+PzS/yYwHsTWUyIYTxTgPqr/cYSRtKzVP+EhbOFwqeg5ncdpmfh1+bix
bNZ19wrKi85r0+laGvUmhVkbc453OgwYt/JOdH+lfkCelyYQq6xbj/HMhhzxKxZP
3CqFBnLAS3r2WUZUHK/vxvbX2GdlvBukBnhICp+BlzIkBlNyWlO5qaK/RIK8/NvC
cQUmEJUUJnJfPoR9k2LtujkO488aZLfQ6vgEXb8wPnCv6UxUM/UixeeuakJrlxYV
EhQ9om/Tk75oi+4yyKl/B3vmKqZQuW0HNF4UhJX86heW36QzWLsuLmg3gkLTxJmk
PWgGMbSZaj3DVHF78LQpMDegAbCrT+UB6yqtodhn2NPrKUTU8j8YEScW7RFiMDMn
bQcI557h5GlJC938YtrqpjcrVdPphhb0rCmdb3nf9b8UfJVuLS7cc2tt3OOt8IU4
2cbK7pPAt7+uHTG0RcJrjMkSwteQD2a+VPOUDZXogYoo+oNiJZpVUprBb/6zwqSt
BxOAqqz8vROq9SFeSnSZJTQY7X6BqgeGzT27Is1U4UOFTpUp30HiJ9KXVX6fp8SN
j82qBLt8qbtsEUUVRLECAwEAATANBgkqhkiG9w0BAQ0FAAOCAgEAm/qYWE6sc5Ms
ZfZVXLAO/y5n7M5Fn30krZ6QEPZGrTjmPTgokyPvl+w1syQKjlSl/4ezfO8nocZK
RmgTIXv740FxtzCuXNjYvdREUH9Sf3UiDjazRoeXdUAacaKGxglfnlw2F4XlVq3G
JCUpLafPrJJWBAt47RvaK2sT0VmsgrKWCnTrAvkx9lD3sr7lazo1y6VCoYu7JQUI
g5sO+db0B7CkG4+uRgEmRSsSX9VQhRSQgXY6gE+ac1mKtjIaygyM4ndEAVoaHtI0
3+ANFh7atilQNAuJvkQS1ZypgY6SQ2Ap10zZFO4M5EUq3iSpX/8IT1D7HsbLskm1
XySFXlQ3EUiVRbgZ6Q07FUNI0+BRrk6lH3r771Xwb1dqW0k1VyI2KM95Hd7Z38Bz
v8S8XtBKMzvTNqAP6qFpUXuxjIVUPu3AxEChnOtpJ1ney7QJCpyWzuQMvgC3/Hvw
W3x1/bG+IJRg7tlBBsTYG8fefENzBpJVslTgLVHaHgnO3XrGI0EJR3B4hZ5HDzyH
XG82KXZ7uSM3RKDKsKN+UQdtUhBVrKskA3M/25ZIN8Ah+A5BO7jdh3hIA8fMPBaX
xMSAjNLyo3RjjpJMgeEs2+zqBqW4NKRB2ojeWZUA0dXgCO1nFlorAVSXNAHICKrk
zSrTx+wpRsqC46MW1cq5bvEJ7yqas/Q=
-----END CERTIFICATE-----
""")
|
blacklin/kbengine | refs/heads/master | kbe/src/lib/python/Lib/test/test_contains.py | 173 | from collections import deque
from test.support import run_unittest
import unittest
class base_set:
def __init__(self, el):
self.el = el
class myset(base_set):
def __contains__(self, el):
return self.el == el
class seq(base_set):
def __getitem__(self, n):
return [self.el][n]
class TestContains(unittest.TestCase):
def test_common_tests(self):
a = base_set(1)
b = myset(1)
c = seq(1)
self.assertIn(1, b)
self.assertNotIn(0, b)
self.assertIn(1, c)
self.assertNotIn(0, c)
self.assertRaises(TypeError, lambda: 1 in a)
self.assertRaises(TypeError, lambda: 1 not in a)
# test char in string
self.assertIn('c', 'abc')
self.assertNotIn('d', 'abc')
self.assertIn('', '')
self.assertIn('', 'abc')
self.assertRaises(TypeError, lambda: None in 'abc')
def test_builtin_sequence_types(self):
# a collection of tests on builtin sequence types
a = range(10)
for i in a:
self.assertIn(i, a)
self.assertNotIn(16, a)
self.assertNotIn(a, a)
a = tuple(a)
for i in a:
self.assertIn(i, a)
self.assertNotIn(16, a)
self.assertNotIn(a, a)
class Deviant1:
"""Behaves strangely when compared
This class is designed to make sure that the contains code
works when the list is modified during the check.
"""
aList = list(range(15))
def __eq__(self, other):
if other == 12:
self.aList.remove(12)
self.aList.remove(13)
self.aList.remove(14)
return 0
self.assertNotIn(Deviant1(), Deviant1.aList)
def test_nonreflexive(self):
# containment and equality tests involving elements that are
# not necessarily equal to themselves
class MyNonReflexive(object):
def __eq__(self, other):
return False
def __hash__(self):
return 28
values = float('nan'), 1, None, 'abc', MyNonReflexive()
constructors = list, tuple, dict.fromkeys, set, frozenset, deque
for constructor in constructors:
container = constructor(values)
for elem in container:
self.assertIn(elem, container)
self.assertTrue(container == constructor(values))
self.assertTrue(container == container)
def test_main():
run_unittest(TestContains)
if __name__ == '__main__':
test_main()
|
pschmitt/home-assistant | refs/heads/dev | tests/components/google/conftest.py | 13 | """Test configuration and mocks for the google integration."""
import pytest
from tests.async_mock import patch
TEST_CALENDAR = {
"id": "qwertyuiopasdfghjklzxcvbnm@import.calendar.google.com",
"etag": '"3584134138943410"',
"timeZone": "UTC",
"accessRole": "reader",
"foregroundColor": "#000000",
"selected": True,
"kind": "calendar#calendarListEntry",
"backgroundColor": "#16a765",
"description": "Test Calendar",
"summary": "We are, we are, a... Test Calendar",
"colorId": "8",
"defaultReminders": [],
"track": True,
}
@pytest.fixture
def test_calendar():
"""Return a test calendar."""
return TEST_CALENDAR
@pytest.fixture
def mock_next_event():
"""Mock the google calendar data."""
patch_google_cal = patch(
"homeassistant.components.google.calendar.GoogleCalendarData"
)
with patch_google_cal as google_cal_data:
yield google_cal_data
|
Spleen64/Sick-Beard | refs/heads/master | lib/imdb/_compat.py | 128 | """
_compat module (imdb package).
This module provides compatibility functions used by the imdb package
to deal with unusual environments.
Copyright 2008-2010 Davide Alberani <da@erlug.linux.it>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
# TODO: now we're heavily using the 'logging' module, which was not
# present in Python 2.2. To work in a Symbian environment, we
# need to create a fake 'logging' module (its functions may call
# the 'warnings' module, or do nothing at all).
import os
# If true, we're working on a Symbian device.
if os.name == 'e32':
# Replace os.path.expandvars and os.path.expanduser, if needed.
def _noact(x):
"""Ad-hoc replacement for IMDbPY."""
return x
try:
os.path.expandvars
except AttributeError:
os.path.expandvars = _noact
try:
os.path.expanduser
except AttributeError:
os.path.expanduser = _noact
# time.strptime is missing, on Symbian devices.
import time
try:
time.strptime
except AttributeError:
import re
_re_web_time = re.compile(r'Episode dated (\d+) (\w+) (\d+)')
_re_ptdf_time = re.compile(r'\((\d+)-(\d+)-(\d+)\)')
_month2digit = {'January': '1', 'February': '2', 'March': '3',
'April': '4', 'May': '5', 'June': '6', 'July': '7',
'August': '8', 'September': '9', 'October': '10',
'November': '11', 'December': '12'}
def strptime(s, format):
"""Ad-hoc strptime replacement for IMDbPY."""
try:
if format.startswith('Episode'):
res = _re_web_time.findall(s)[0]
return (int(res[2]), int(_month2digit[res[1]]), int(res[0]),
0, 0, 0, 0, 1, 0)
else:
res = _re_ptdf_time.findall(s)[0]
return (int(res[0]), int(res[1]), int(res[2]),
0, 0, 0, 0, 1, 0)
except:
raise ValueError('error in IMDbPY\'s ad-hoc strptime!')
time.strptime = strptime
|
royosherove/bitcoinxt | refs/heads/master | contrib/seeds/generate-seeds.py | 115 | #!/usr/bin/python
# Copyright (c) 2014 Wladmir J. van der Laan
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Script to generate list of seed nodes for chainparams.cpp.
This script expects two text files in the directory that is passed as an
argument:
nodes_main.txt
nodes_test.txt
These files must consist of lines in the format
<ip>
<ip>:<port>
[<ipv6>]
[<ipv6>]:<port>
<onion>.onion
0xDDBBCCAA (IPv4 little-endian old pnSeeds format)
The output will be two data structures with the peers in binary format:
static SeedSpec6 pnSeed6_main[]={
...
}
static SeedSpec6 pnSeed6_test[]={
...
}
These should be pasted into `src/chainparamsseeds.h`.
'''
from __future__ import print_function, division
from base64 import b32decode
from binascii import a2b_hex
import sys, os
import re
# ipv4 in ipv6 prefix
pchIPv4 = bytearray([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff])
# tor-specific ipv6 prefix
pchOnionCat = bytearray([0xFD,0x87,0xD8,0x7E,0xEB,0x43])
def name_to_ipv6(addr):
if len(addr)>6 and addr.endswith('.onion'):
vchAddr = b32decode(addr[0:-6], True)
if len(vchAddr) != 16-len(pchOnionCat):
raise ValueError('Invalid onion %s' % s)
return pchOnionCat + vchAddr
elif '.' in addr: # IPv4
return pchIPv4 + bytearray((int(x) for x in addr.split('.')))
elif ':' in addr: # IPv6
sub = [[], []] # prefix, suffix
x = 0
addr = addr.split(':')
for i,comp in enumerate(addr):
if comp == '':
if i == 0 or i == (len(addr)-1): # skip empty component at beginning or end
continue
x += 1 # :: skips to suffix
assert(x < 2)
else: # two bytes per component
val = int(comp, 16)
sub[x].append(val >> 8)
sub[x].append(val & 0xff)
nullbytes = 16 - len(sub[0]) - len(sub[1])
assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0))
return bytearray(sub[0] + ([0] * nullbytes) + sub[1])
elif addr.startswith('0x'): # IPv4-in-little-endian
return pchIPv4 + bytearray(reversed(a2b_hex(addr[2:])))
else:
raise ValueError('Could not parse address %s' % addr)
def parse_spec(s, defaultport):
match = re.match('\[([0-9a-fA-F:]+)\](?::([0-9]+))?$', s)
if match: # ipv6
host = match.group(1)
port = match.group(2)
elif s.count(':') > 1: # ipv6, no port
host = s
port = ''
else:
(host,_,port) = s.partition(':')
if not port:
port = defaultport
else:
port = int(port)
host = name_to_ipv6(host)
return (host,port)
def process_nodes(g, f, structname, defaultport):
g.write('static SeedSpec6 %s[] = {\n' % structname)
first = True
for line in f:
comment = line.find('#')
if comment != -1:
line = line[0:comment]
line = line.strip()
if not line:
continue
if not first:
g.write(',\n')
first = False
(host,port) = parse_spec(line, defaultport)
hoststr = ','.join(('0x%02x' % b) for b in host)
g.write(' {{%s}, %i}' % (hoststr, port))
g.write('\n};\n')
def main():
if len(sys.argv)<2:
print(('Usage: %s <path_to_nodes_txt>' % sys.argv[0]), file=sys.stderr)
exit(1)
g = sys.stdout
indir = sys.argv[1]
g.write('#ifndef BITCOIN_CHAINPARAMSSEEDS_H\n')
g.write('#define BITCOIN_CHAINPARAMSSEEDS_H\n')
g.write('/**\n')
g.write(' * List of fixed seed nodes for the bitcoin network\n')
g.write(' * AUTOGENERATED by contrib/seeds/generate-seeds.py\n')
g.write(' *\n')
g.write(' * Each line contains a 16-byte IPv6 address and a port.\n')
g.write(' * IPv4 as well as onion addresses are wrapped inside a IPv6 address accordingly.\n')
g.write(' */\n')
with open(os.path.join(indir,'nodes_main.txt'),'r') as f:
process_nodes(g, f, 'pnSeed6_main', 8333)
g.write('\n')
with open(os.path.join(indir,'nodes_test.txt'),'r') as f:
process_nodes(g, f, 'pnSeed6_test', 18333)
g.write('#endif // BITCOIN_CHAINPARAMSSEEDS_H\n')
if __name__ == '__main__':
main()
|
yakovenkodenis/rethinkdb | refs/heads/next | test/rql_test/connections/http_support/httpbin/helpers.py | 49 | # -*- coding: utf-8 -*-
"""
httpbin.helpers
~~~~~~~~~~~~~~~
This module provides helper functions for httpbin.
"""
import json
import base64
from hashlib import md5
from werkzeug.http import parse_authorization_header
from flask import request, make_response
try:
from urlparse import urlparse, urlunparse
except ImportError:
from urllib.parse import urlparse, urlunparse
from .structures import CaseInsensitiveDict
ASCII_ART = """
-=[ teapot ]=-
_...._
.' _ _ `.
| ."` ^ `". _,
\_;`"---"`|//
| ;/
\_ _/
`\"\"\"`
"""
REDIRECT_LOCATION = '/redirect/1'
ENV_HEADERS = (
'X-Varnish',
'X-Request-Start',
'X-Heroku-Queue-Depth',
'X-Real-Ip',
'X-Forwarded-Proto',
'X-Heroku-Queue-Wait-Time',
'X-Forwarded-For',
'X-Heroku-Dynos-In-Use',
'X-Forwarded-For',
'X-Forwarded-Protocol',
'X-Forwarded-Port'
)
ROBOT_TXT = """User-agent: *
Disallow: /deny
"""
ANGRY_ASCII ="""
.-''''''-.
.' _ _ '.
/ O O \\
: :
| |
: __ :
\ .-"` `"-. /
'. .'
'-......-'
YOU SHOUDN'T BE HERE
"""
def json_safe(string, content_type='application/octet-stream'):
"""Returns JSON-safe version of `string`.
If `string` is a Unicode string or a valid UTF-8, it is returned unmodified,
as it can safely be encoded to JSON string.
If `string` contains raw/binary data, it is Base64-encoded, formatted and
returned according to "data" URL scheme (RFC2397). Since JSON is not
suitable for binary data, some additional encoding was necessary; "data"
URL scheme was chosen for its simplicity.
"""
try:
string = string.decode('utf-8')
_encoded = json.dumps(string)
return string
except (ValueError, TypeError):
return b''.join([
b'data:',
content_type.encode('utf-8'),
b';base64,',
base64.b64encode(string)
]).decode('utf-8')
def get_files():
"""Returns files dict from request context."""
files = dict()
for k, v in request.files.items():
content_type = request.files[k].content_type or 'application/octet-stream'
val = json_safe(v.read(), content_type)
if files.get(k):
if not isinstance(files[k], list):
files[k] = [files[k]]
files[k].append(val)
else:
files[k] = val
return files
def get_headers(hide_env=True):
"""Returns headers dict from request context."""
headers = dict(request.headers.items())
if hide_env and ('show_env' not in request.args):
for key in ENV_HEADERS:
try:
del headers[key]
except KeyError:
pass
return CaseInsensitiveDict(headers.items())
def semiflatten(multi):
"""Convert a MutiDict into a regular dict. If there are more than one value
for a key, the result will have a list of values for the key. Otherwise it
will have the plain value."""
if multi:
result = multi.to_dict(flat=False)
for k, v in result.items():
if len(v) == 1:
result[k] = v[0]
return result
else:
return multi
def get_url(request):
"""
Since we might be hosted behind a proxy, we need to check the
X-Forwarded-Proto header to find out what protocol was used to access us.
"""
if 'X-Forwarded-Proto' not in request.headers:
return request.url
url = list(urlparse(request.url))
url[0] = request.headers.get('X-Forwarded-Proto')
return urlunparse(url)
def get_dict(*keys, **extras):
"""Returns request dict of given keys."""
_keys = ('url', 'args', 'form', 'data', 'origin', 'headers', 'files', 'json')
assert all(map(_keys.__contains__, keys))
data = request.data
form = request.form
form = semiflatten(request.form)
try:
_json = json.loads(data.decode('utf-8'))
except (ValueError, TypeError):
_json = None
d = dict(
url=get_url(request),
args=semiflatten(request.args),
form=form,
data=json_safe(data),
origin=request.headers.get('X-Forwarded-For', request.remote_addr),
headers=get_headers(),
files=get_files(),
json=_json
)
out_d = dict()
for key in keys:
out_d[key] = d.get(key)
out_d.update(extras)
return out_d
def status_code(code):
"""Returns response object of given status code."""
redirect = dict(headers=dict(location=REDIRECT_LOCATION))
code_map = {
301: redirect,
302: redirect,
303: redirect,
304: dict(data=''),
305: redirect,
307: redirect,
401: dict(headers={'WWW-Authenticate': 'Basic realm="Fake Realm"'}),
402: dict(
data='Fuck you, pay me!',
headers={
'x-more-info': 'http://vimeo.com/22053820'
}
),
407: dict(headers={'Proxy-Authenticate': 'Basic realm="Fake Realm"'}),
418: dict( # I'm a teapot!
data=ASCII_ART,
headers={
'x-more-info': 'http://tools.ietf.org/html/rfc2324'
}
),
}
r = make_response()
r.status_code = code
if code in code_map:
m = code_map[code]
if 'data' in m:
r.data = m['data']
if 'headers' in m:
r.headers = m['headers']
return r
def check_basic_auth(user, passwd):
"""Checks user authentication using HTTP Basic Auth."""
auth = request.authorization
return auth and auth.username == user and auth.password == passwd
# Digest auth helpers
# qop is a quality of protection
def H(data):
return md5(data).hexdigest()
def HA1(realm, username, password):
"""Create HA1 hash by realm, username, password
HA1 = md5(A1) = MD5(username:realm:password)
"""
if not realm:
realm = u''
return H(b":".join([username.encode('utf-8'),
realm.encode('utf-8'),
password.encode('utf-8')]))
def HA2(credentails, request):
"""Create HA2 md5 hash
If the qop directive's value is "auth" or is unspecified, then HA2:
HA2 = md5(A2) = MD5(method:digestURI)
If the qop directive's value is "auth-int" , then HA2 is
HA2 = md5(A2) = MD5(method:digestURI:MD5(entityBody))
"""
if credentails.get("qop") == "auth" or credentails.get('qop') is None:
return H(b":".join([request['method'].encode('utf-8'), request['uri'].encode('utf-8')]))
elif credentails.get("qop") == "auth-int":
for k in 'method', 'uri', 'body':
if k not in request:
raise ValueError("%s required" % k)
return H("%s:%s:%s" % (request['method'],
request['uri'],
H(request['body'])))
raise ValueError
def response(credentails, user, password, request):
"""Compile digest auth response
If the qop directive's value is "auth" or "auth-int" , then compute the response as follows:
RESPONSE = MD5(HA1:nonce:nonceCount:clienNonce:qop:HA2)
Else if the qop directive is unspecified, then compute the response as follows:
RESPONSE = MD5(HA1:nonce:HA2)
Arguments:
- `credentails`: credentails dict
- `user`: request user name
- `password`: request user password
- `request`: request dict
"""
for key in 'nonce', 'realm':
if key not in credentails:
raise ValueError("%s required for response" % key)
response = None
HA1_value = HA1(
credentails.get('realm'),
user,
password
)
HA2_value = HA2(credentails, request)
if credentails.get('qop') is None:
response = H(b":".join([
HA1_value.encode('utf-8'),
credentails.get('nonce').encode('utf-8'),
HA2_value.encode('utf-8')
]))
elif credentails.get('qop') == 'auth' or credentails.get('qop') == 'auth-int':
for k in 'nonce', 'nc', 'cnonce', 'qop':
if k not in credentails:
raise ValueError("%s required for response H" % k)
response = H(b":".join([HA1_value.encode('utf-8'),
credentails.get('nonce').encode('utf-8'),
credentails.get('nc').encode('utf-8'),
credentails.get('cnonce').encode('utf-8'),
credentails.get('qop').encode('utf-8'),
HA2_value.encode('utf-8')]))
else:
raise ValueError("qop value are wrong")
return response
def check_digest_auth(user, passwd):
"""Check user authentication using HTTP Digest auth"""
if request.headers.get('Authorization'):
credentails = parse_authorization_header(request.headers.get('Authorization'))
if not credentails:
return False
response_hash = response(credentails, user, passwd, dict(uri=request.path,
body=request.data,
method=request.method))
if credentails.get('response') == response_hash:
return True
return False
def secure_cookie():
"""Return true if cookie should have secure attribute"""
return request.environ['wsgi.url_scheme'] == 'https'
|
frankosan/pypers | refs/heads/master | pypers/steps/gatk/__init__.py | 18 | from os.path import realpath, dirname
from pypers import import_all
# Import all Steps in this directory.
import_all(namespace=globals(), dir=dirname(realpath(__file__)))
|
agry/NGECore2 | refs/heads/master | scripts/mobiles/generic/faction/imperial/imp_stormrifle_79.py | 2 | import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from resources.datatables import FactionStatus
from java.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('crackdown_stormtrooper_rifleman')
mobileTemplate.setLevel(79)
mobileTemplate.setDifficulty(Difficulty.ELITE)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(False)
mobileTemplate.setScale(1)
mobileTemplate.setSocialGroup("imperial")
mobileTemplate.setAssistRange(6)
mobileTemplate.setStalker(False)
mobileTemplate.setFaction("imperial")
mobileTemplate.setFactionStatus(FactionStatus.Combatant)
templates = Vector()
templates.add('object/mobile/shared_dressed_stormtrooper_rifleman_m.iff')
mobileTemplate.setTemplates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/ranged/carbine/shared_carbine_e11.iff', WeaponType.CARBINE, 1.0, 15, 'energy')
weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
attacks = Vector()
mobileTemplate.setDefaultAttack('rangedShot')
mobileTemplate.setAttacks(attacks)
core.spawnService.addMobileTemplate('imp_stormrifle_79', mobileTemplate)
return |
kod3r/splash | refs/heads/master | splash/tests/test_har.py | 4 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import unittest
import warnings
from splash.har import schema
from splash.har.utils import entries2pages
from splash.tests import test_redirects
from splash.tests.utils import NON_EXISTING_RESOLVABLE
from .test_render import BaseRenderTest
class BaseHarRenderTest(BaseRenderTest):
endpoint = 'render.har'
try:
schema.get_validator()
VALIDATION_SUPPORTED = True
except Exception as e:
warnings.warn("jsonschema validation is not supported and will be skipped. "
"Please install jsonschema >= 2.0 or jsonschema >= 1.0 + isodate. "
"Exception: %r" % e)
VALIDATION_SUPPORTED = False
def assertValidHarData(self, data, url):
if self.VALIDATION_SUPPORTED:
schema.validate(data)
first_url = data["log"]["entries"][0]["request"]["url"]
self.assertEqual(first_url, url)
def assertValidHar(self, url, **params):
query = {"url": url}
query.update(params)
resp = self.request(query)
self.assertStatusCode(resp, 200)
data = resp.json()
# from pprint import pprint
# pprint(data)
self.assertValidHarData(data, url)
self.assertValidTimings(data)
return data
def assertRequestedUrls(self, data, correct_urls):
requested_urls = {e["request"]["url"] for e in data["log"]["entries"]}
self.assertEqual(requested_urls, set(correct_urls))
def assertRequestedUrlsStatuses(self, data, correct_urls_statuses):
urls_statuses = {
(e["request"]["url"], e["response"]["status"])
for e in data["log"]["entries"]
}
self.assertEqual(urls_statuses, set(correct_urls_statuses))
def assertValidTimings(self, data):
page0 = data['log']['pages'][0]
self.assertIn("_onStarted", page0["pageTimings"])
class HarRenderTest(BaseHarRenderTest):
""" Tests for HAR data in render.har endpoint """
def test_jsrender(self):
url = self.mockurl("jsrender")
data = self.assertValidHar(url)
self.assertRequestedUrlsStatuses(data, [(url, 200)])
def test_jsalert(self):
self.assertValidHar(self.mockurl("jsalert"), timeout=3)
def test_jsconfirm(self):
self.assertValidHar(self.mockurl("jsconfirm"), timeout=3)
def test_iframes(self):
data = self.assertValidHar(self.mockurl("iframes"), timeout=3)
self.assertRequestedUrls(data, [
self.mockurl("iframes"),
self.mockurl('iframes/1.html'),
self.mockurl('iframes/2.html'),
self.mockurl('iframes/3.html'),
# self.mockurl('iframes/4.html'), # wait is zero, delayed iframe
self.mockurl('iframes/5.html'),
self.mockurl('iframes/6.html'),
self.mockurl('iframes/script.js'),
self.mockurl('iframes/script2.js', host="0.0.0.0"),
self.mockurl('iframes/nested.html'),
])
def test_iframes_wait(self):
data = self.assertValidHar(self.mockurl("iframes"), timeout=3, wait=0.5)
self.assertRequestedUrls(data, [
self.mockurl("iframes"),
self.mockurl('iframes/1.html'),
self.mockurl('iframes/2.html'),
self.mockurl('iframes/3.html'),
self.mockurl('iframes/4.html'), # wait is not zero, delayed iframe
self.mockurl('iframes/5.html'),
self.mockurl('iframes/6.html'),
self.mockurl('iframes/script.js'),
self.mockurl('iframes/script2.js', host="0.0.0.0"),
self.mockurl('iframes/nested.html'),
])
def test_timeout(self):
r = self.request({"url": self.mockurl("delay?n=10"), "timeout": 0.5})
self.assertStatusCode(r, 504)
def test_wait(self):
self.assertValidHar(self.mockurl("jsinterval"))
self.assertValidHar(self.mockurl("jsinterval"), wait=0.2)
def test_meta_redirect_nowait(self):
data = self.assertValidHar(self.mockurl('meta-redirect0'))
self.assertRequestedUrlsStatuses(data, [
(self.mockurl('meta-redirect0'), 200),
])
def test_meta_redirect_wait(self):
data = self.assertValidHar(self.mockurl('meta-redirect0'), wait=0.1)
self.assertRequestedUrlsStatuses(data, [
(self.mockurl('meta-redirect0'), 200),
(self.mockurl('meta-redirect-target/'), 200),
])
def test_meta_redirect_delay_wait(self):
data = self.assertValidHar(self.mockurl('meta-redirect1'), wait=0.1)
self.assertRequestedUrlsStatuses(data, [
(self.mockurl('meta-redirect1'), 200),
])
def test_meta_redirect_delay_wait_enough(self):
data = self.assertValidHar(self.mockurl('meta-redirect1'), wait=0.3)
self.assertRequestedUrlsStatuses(data, [
(self.mockurl('meta-redirect1'), 200),
(self.mockurl('meta-redirect-target/'), 200),
])
def test_meta_redirect_slowload2_wait_more(self):
data = self.assertValidHar(self.mockurl('meta-redirect-slowload2'), wait=0.3)
self.assertRequestedUrlsStatuses(data, [
(self.mockurl('meta-redirect-slowload2'), 200),
(self.mockurl('slow.gif?n=2'), 200),
(self.mockurl('meta-redirect-target/'), 200),
])
def test_redirect_nowait(self):
data = self.assertValidHar(self.mockurl('jsredirect'))
self.assertRequestedUrlsStatuses(data, [
(self.mockurl('jsredirect'), 200),
])
def test_redirect_wait(self):
data = self.assertValidHar(self.mockurl('jsredirect'), wait=0.1)
self.assertRequestedUrlsStatuses(data, [
(self.mockurl('jsredirect'), 200),
(self.mockurl('jsredirect-target'), 200),
])
def test_redirect_onload_nowait(self):
data = self.assertValidHar(self.mockurl('jsredirect-onload'))
self.assertRequestedUrlsStatuses(data, [
(self.mockurl('jsredirect-onload'), 200) # not redirected
])
def test_redirect_onload_wait(self):
data = self.assertValidHar(self.mockurl('jsredirect-onload'), wait=0.1)
self.assertRequestedUrlsStatuses(data, [
(self.mockurl('jsredirect-onload'), 200),
(self.mockurl('jsredirect-target'), 200),
])
def test_redirect_chain_nowait(self):
data = self.assertValidHar(self.mockurl('jsredirect-chain'))
# not redirected
self.assertRequestedUrlsStatuses(data, [
(self.mockurl('jsredirect-chain'), 200),
])
def test_redirect_chain_wait(self):
data = self.assertValidHar(self.mockurl('jsredirect-chain'), wait=0.2)
# redirected
self.assertRequestedUrlsStatuses(data, [
(self.mockurl('jsredirect-chain'), 200),
(self.mockurl('jsredirect'), 200),
(self.mockurl('jsredirect-target'), 200),
])
def test_redirect_slowimage_nowait(self):
data = self.assertValidHar(self.mockurl('jsredirect-slowimage'))
self.assertRequestedUrlsStatuses(data, [
(self.mockurl('jsredirect-slowimage'), 200),
(self.mockurl('jsredirect-target'), 200),
(self.mockurl('slow.gif?n=2'), 0),
])
pages = entries2pages(data["log"]["entries"])
self.assertEqual(len(pages), 2)
self.assertEqual(len(pages[0]), 2) # jsredirect-slowimage and slow.gif?n=2
self.assertEqual(len(pages[1]), 1) # jsredirect-target
self.assertEqual(pages[0][1]["response"]["statusText"], "cancelled")
def test_redirect_slowimage_wait(self):
data = self.assertValidHar(self.mockurl('jsredirect-slowimage'), wait=0.1)
self.assertRequestedUrlsStatuses(data, [
(self.mockurl('jsredirect-slowimage'), 200),
(self.mockurl('jsredirect-target'), 200),
(self.mockurl('slow.gif?n=2'), 0),
])
@unittest.skipIf(NON_EXISTING_RESOLVABLE, "non existing hosts are resolvable")
def test_bad_related(self):
data = self.assertValidHar(self.mockurl("bad-related"))
self.assertRequestedUrlsStatuses(data, [
(self.mockurl('bad-related'), 200),
('http://non-existing/', 0),
])
pages = entries2pages(data["log"]["entries"])
self.assertEqual(len(pages), 1)
self.assertEqual(len(pages[0]), 2)
self.assertEqual(pages[0][1]["response"]["statusText"], "invalid_hostname")
class HarHttpRedirectTest(test_redirects.HttpRedirectTest, BaseHarRenderTest):
def assertHarRedirectedResponse(self, resp, code, url):
self.assertStatusCode(resp, 200)
data = resp.json()
self.assertValidHarData(data, url)
self.assertRequestedUrlsStatuses(data, [
(url, code),
(self.mockurl('getrequest?http_code=%s' % code), 200)
])
redir_url = data["log"]["entries"][0]["response"]["redirectURL"]
self.assertEqual(redir_url, "/getrequest?http_code=%s" % code)
def assertBaseurlHttpRedirectWorks(self, code):
url = self.mockurl("http-redirect?code=%s" % code)
resp = self.request({"url": url, "baseurl": url})
self.assertHarRedirectedResponse(resp, code, url)
def assertHttpRedirectWorks(self, code):
url = self.mockurl("http-redirect?code=%s" % code)
resp = self.request({"url": url})
self.assertHarRedirectedResponse(resp, code, url)
class RenderJsonHarTest(HarRenderTest):
""" Tests for HAR data in render.json endpoint """
endpoint = 'render.json'
def assertValidHar(self, url, **params):
query = {"url": url, "har": 1}
query.update(params)
resp = self.request(query)
self.assertStatusCode(resp, 200)
data = resp.json()["har"]
# from pprint import pprint
# pprint(data)
self.assertValidHarData(data, url)
return data
|
avanov/django | refs/heads/master | tests/migrations/test_optimizer.py | 11 | # -*- coding: utf-8 -*-
from django.db import migrations, models
from django.db.migrations.optimizer import MigrationOptimizer
from django.test import SimpleTestCase
from .models import CustomModelBase, EmptyManager
class OptimizerTests(SimpleTestCase):
"""
Tests the migration autodetector.
"""
def optimize(self, operations):
"""
Handy shortcut for getting results + number of loops
"""
optimizer = MigrationOptimizer()
return optimizer.optimize(operations), optimizer._iterations
def assertOptimizesTo(self, operations, expected, exact=None, less_than=None):
result, iterations = self.optimize(operations)
result = [repr(f.deconstruct()) for f in result]
expected = [repr(f.deconstruct()) for f in expected]
self.assertEqual(expected, result)
if exact is not None and iterations != exact:
raise self.failureException("Optimization did not take exactly %s iterations (it took %s)" % (exact, iterations))
if less_than is not None and iterations >= less_than:
raise self.failureException("Optimization did not take less than %s iterations (it took %s)" % (less_than, iterations))
def test_single(self):
"""
Tests that the optimizer does nothing on a single operation,
and that it does it in just one pass.
"""
self.assertOptimizesTo(
[migrations.DeleteModel("Foo")],
[migrations.DeleteModel("Foo")],
exact=1,
)
def test_create_delete_model(self):
"""
CreateModel and DeleteModel should collapse into nothing.
"""
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.DeleteModel("Foo"),
],
[],
)
def test_create_rename_model(self):
"""
CreateModel should absorb RenameModels.
"""
managers = [('objects', EmptyManager())]
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Foo",
fields=[("name", models.CharField(max_length=255))],
options={'verbose_name': 'Foo'},
bases=(CustomModelBase),
managers=managers,
),
migrations.RenameModel("Foo", "Bar"),
],
[
migrations.CreateModel(
"Bar",
[("name", models.CharField(max_length=255))],
options={'verbose_name': 'Foo'},
bases=(CustomModelBase),
managers=managers,
)
],
)
def test_rename_model_self(self):
"""
RenameModels should absorb themselves.
"""
self.assertOptimizesTo(
[
migrations.RenameModel("Foo", "Baa"),
migrations.RenameModel("Baa", "Bar"),
],
[
migrations.RenameModel("Foo", "Bar"),
],
)
def test_create_alter_delete_model(self):
"""
CreateModel, AlterModelTable, AlterUniqueTogether, and DeleteModel should collapse into nothing.
"""
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.AlterModelTable("Foo", "woohoo"),
migrations.AlterUniqueTogether("Foo", [["a", "b"]]),
migrations.DeleteModel("Foo"),
],
[],
)
def test_optimize_through_create(self):
"""
We should be able to optimize away create/delete through a create or delete
of a different model, but only if the create operation does not mention the model
at all.
"""
# These should work
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
migrations.DeleteModel("Foo"),
],
[
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
],
)
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
migrations.DeleteModel("Bar"),
migrations.DeleteModel("Foo"),
],
[],
)
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
migrations.DeleteModel("Foo"),
migrations.DeleteModel("Bar"),
],
[],
)
# This should not work - FK should block it
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("other", models.ForeignKey("testapp.Foo"))]),
migrations.DeleteModel("Foo"),
],
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("other", models.ForeignKey("testapp.Foo"))]),
migrations.DeleteModel("Foo"),
],
)
# This should not work - bases should block it
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("size", models.IntegerField())], bases=("testapp.Foo", )),
migrations.DeleteModel("Foo"),
],
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("size", models.IntegerField())], bases=("testapp.Foo", )),
migrations.DeleteModel("Foo"),
],
)
def test_create_model_add_field(self):
"""
AddField should optimize into CreateModel.
"""
managers = [('objects', EmptyManager())]
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Foo",
fields=[("name", models.CharField(max_length=255))],
options={'verbose_name': 'Foo'},
bases=(CustomModelBase),
managers=managers,
),
migrations.AddField("Foo", "age", models.IntegerField()),
],
[
migrations.CreateModel(
name="Foo",
fields=[
("name", models.CharField(max_length=255)),
("age", models.IntegerField()),
],
options={'verbose_name': 'Foo'},
bases=(CustomModelBase),
managers=managers,
),
],
)
def test_create_model_add_field_not_through_fk(self):
"""
AddField should NOT optimize into CreateModel if it's an FK to a model
that's between them.
"""
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Link", [("url", models.TextField())]),
migrations.AddField("Foo", "link", models.ForeignKey("migrations.Link")),
],
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Link", [("url", models.TextField())]),
migrations.AddField("Foo", "link", models.ForeignKey("migrations.Link")),
],
)
def test_create_model_add_field_not_through_m2m_through(self):
"""
AddField should NOT optimize into CreateModel if it's an M2M using a
through that's created between them.
"""
# Note: The middle model is not actually a valid through model,
# but that doesn't matter, as we never render it.
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("LinkThrough", []),
migrations.AddField("Foo", "link", models.ManyToManyField("migrations.Link", through="migrations.LinkThrough")),
],
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("LinkThrough", []),
migrations.AddField("Foo", "link", models.ManyToManyField("migrations.Link", through="migrations.LinkThrough")),
],
)
def test_create_model_alter_field(self):
"""
AlterField should optimize into CreateModel.
"""
managers = [('objects', EmptyManager())]
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Foo",
fields=[("name", models.CharField(max_length=255))],
options={'verbose_name': 'Foo'},
bases=(CustomModelBase),
managers=managers,
),
migrations.AlterField("Foo", "name", models.IntegerField()),
],
[
migrations.CreateModel(
name="Foo",
fields=[
("name", models.IntegerField()),
],
options={'verbose_name': 'Foo'},
bases=(CustomModelBase),
managers=managers,
),
],
)
def test_create_model_rename_field(self):
"""
RenameField should optimize into CreateModel.
"""
managers = [('objects', EmptyManager())]
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Foo",
fields=[("name", models.CharField(max_length=255))],
options={'verbose_name': 'Foo'},
bases=(CustomModelBase),
managers=managers,
),
migrations.RenameField("Foo", "name", "title"),
],
[
migrations.CreateModel(
name="Foo",
fields=[
("title", models.CharField(max_length=255)),
],
options={'verbose_name': 'Foo'},
bases=(CustomModelBase),
managers=managers,
),
],
)
def test_add_field_rename_field(self):
"""
RenameField should optimize into AddField
"""
self.assertOptimizesTo(
[
migrations.AddField("Foo", "name", models.CharField(max_length=255)),
migrations.RenameField("Foo", "name", "title"),
],
[
migrations.AddField("Foo", "title", models.CharField(max_length=255)),
],
)
def test_alter_field_rename_field(self):
"""
RenameField should optimize to the other side of AlterField,
and into itself.
"""
self.assertOptimizesTo(
[
migrations.AlterField("Foo", "name", models.CharField(max_length=255)),
migrations.RenameField("Foo", "name", "title"),
migrations.RenameField("Foo", "title", "nom"),
],
[
migrations.RenameField("Foo", "name", "nom"),
migrations.AlterField("Foo", "nom", models.CharField(max_length=255)),
],
)
def test_create_model_remove_field(self):
"""
RemoveField should optimize into CreateModel.
"""
managers = [('objects', EmptyManager())]
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Foo",
fields=[
("name", models.CharField(max_length=255)),
("age", models.IntegerField()),
],
options={'verbose_name': 'Foo'},
bases=(CustomModelBase),
managers=managers,
),
migrations.RemoveField("Foo", "age"),
],
[
migrations.CreateModel(
name="Foo",
fields=[
("name", models.CharField(max_length=255)),
],
options={'verbose_name': 'Foo'},
bases=(CustomModelBase),
managers=managers,
),
],
)
def test_add_field_alter_field(self):
"""
AlterField should optimize into AddField.
"""
self.assertOptimizesTo(
[
migrations.AddField("Foo", "age", models.IntegerField()),
migrations.AlterField("Foo", "age", models.FloatField(default=2.4)),
],
[
migrations.AddField("Foo", name="age", field=models.FloatField(default=2.4)),
],
)
def test_add_field_delete_field(self):
"""
RemoveField should cancel AddField
"""
self.assertOptimizesTo(
[
migrations.AddField("Foo", "age", models.IntegerField()),
migrations.RemoveField("Foo", "age"),
],
[],
)
def test_alter_field_delete_field(self):
"""
RemoveField should absorb AlterField
"""
self.assertOptimizesTo(
[
migrations.AlterField("Foo", "age", models.IntegerField()),
migrations.RemoveField("Foo", "age"),
],
[
migrations.RemoveField("Foo", "age"),
],
)
def test_optimize_through_fields(self):
"""
Checks that field-level through checking is working.
This should manage to collapse model Foo to nonexistence,
and model Bar to a single IntegerField called "width".
"""
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
migrations.AddField("Foo", "age", models.IntegerField()),
migrations.AddField("Bar", "width", models.IntegerField()),
migrations.AlterField("Foo", "age", models.IntegerField()),
migrations.RenameField("Bar", "size", "dimensions"),
migrations.RemoveField("Foo", "age"),
migrations.RenameModel("Foo", "Phou"),
migrations.RemoveField("Bar", "dimensions"),
migrations.RenameModel("Phou", "Fou"),
migrations.DeleteModel("Fou"),
],
[
migrations.CreateModel("Bar", [("width", models.IntegerField())]),
],
)
|
qiankunshe/sky_engine | refs/heads/master | build/android/pylib/utils/proguard.py | 51 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import re
import tempfile
from pylib import constants
from pylib import cmd_helper
_PROGUARD_CLASS_RE = re.compile(r'\s*?- Program class:\s*([\S]+)$')
_PROGUARD_SUPERCLASS_RE = re.compile(r'\s*? Superclass:\s*([\S]+)$')
_PROGUARD_SECTION_RE = re.compile(
r'^(?:Interfaces|Constant Pool|Fields|Methods|Class file attributes) '
r'\(count = \d+\):$')
_PROGUARD_METHOD_RE = re.compile(r'\s*?- Method:\s*(\S*)[(].*$')
_PROGUARD_ANNOTATION_RE = re.compile(r'\s*?- Annotation \[L(\S*);\]:$')
_PROGUARD_ANNOTATION_CONST_RE = (
re.compile(r'\s*?- Constant element value.*$'))
_PROGUARD_ANNOTATION_VALUE_RE = re.compile(r'\s*?- \S+? \[(.*)\]$')
_PROGUARD_PATH_SDK = os.path.join(
constants.ANDROID_SDK_ROOT, 'tools', 'proguard', 'lib', 'proguard.jar')
_PROGUARD_PATH_BUILT = (
os.path.join(os.environ['ANDROID_BUILD_TOP'], 'external', 'proguard',
'lib', 'proguard.jar')
if 'ANDROID_BUILD_TOP' in os.environ else None)
_PROGUARD_PATH = (
_PROGUARD_PATH_SDK if os.path.exists(_PROGUARD_PATH_SDK)
else _PROGUARD_PATH_BUILT)
def Dump(jar_path):
"""Dumps class and method information from a JAR into a dict via proguard.
Args:
jar_path: An absolute path to the JAR file to dump.
Returns:
A dict in the following format:
{
'classes': [
{
'class': '',
'superclass': '',
'annotations': {},
'methods': [
{
'method': '',
'annotations': {},
},
...
],
},
...
],
}
"""
with tempfile.NamedTemporaryFile() as proguard_output:
cmd_helper.RunCmd(['java', '-jar',
_PROGUARD_PATH,
'-injars', jar_path,
'-dontshrink',
'-dontoptimize',
'-dontobfuscate',
'-dontpreverify',
'-dump', proguard_output.name])
results = {
'classes': [],
}
annotation = None
annotation_has_value = False
class_result = None
method_result = None
for line in proguard_output:
line = line.strip('\r\n')
m = _PROGUARD_CLASS_RE.match(line)
if m:
class_result = {
'class': m.group(1).replace('/', '.'),
'superclass': '',
'annotations': {},
'methods': [],
}
results['classes'].append(class_result)
annotation = None
annotation_has_value = False
method_result = None
continue
if not class_result:
continue
m = _PROGUARD_SUPERCLASS_RE.match(line)
if m:
class_result['superclass'] = m.group(1).replace('/', '.')
continue
m = _PROGUARD_SECTION_RE.match(line)
if m:
annotation = None
annotation_has_value = False
method_result = None
continue
m = _PROGUARD_METHOD_RE.match(line)
if m:
method_result = {
'method': m.group(1),
'annotations': {},
}
class_result['methods'].append(method_result)
annotation = None
annotation_has_value = False
continue
m = _PROGUARD_ANNOTATION_RE.match(line)
if m:
# Ignore the annotation package.
annotation = m.group(1).split('/')[-1]
if method_result:
method_result['annotations'][annotation] = None
else:
class_result['annotations'][annotation] = None
continue
if annotation:
if not annotation_has_value:
m = _PROGUARD_ANNOTATION_CONST_RE.match(line)
annotation_has_value = bool(m)
else:
m = _PROGUARD_ANNOTATION_VALUE_RE.match(line)
if m:
if method_result:
method_result['annotations'][annotation] = m.group(1)
else:
class_result['annotations'][annotation] = m.group(1)
annotation_has_value = None
return results
|
spnow/grr | refs/heads/master | executables/python_hacks/shutdown_host.py | 16 | #!/usr/bin/env python
"""Shut down windows hosts."""
import platform
tested_versions = ['xp', 'vista', '2008', '2003']
cmd = 'cmd'
args = ['/c', '%SystemRoot%\\System32\\shutdown.exe', '/s', '/f']
os_version = platform.platform().lower()
# pylint: disable=undefined-variable
if 'time_in_seconds' in py_args:
args.extend(['/t', py_args['time_in_seconds']])
else:
args.extend(['/t', '20'])
if 'reason' in py_args:
args.extend(['/c', py_args['reason']])
for version in tested_versions:
if os_version.find(version) != -1:
stdout, stderr, exit_status, time_taken = client_utils_common.Execute(
cmd, args, time_limit=-1, bypass_whitelist=True)
magic_return_str = '%s, %s, %s, %s' % (stdout.encode('base64'),
stderr.encode('base64'),
exit_status,
time_taken)
break
|
shaufi10/odoo | refs/heads/8.0 | addons/l10n_be_hr_payroll/__init__.py | 438 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 OpenERP SA (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import l10n_be_hr_payroll
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
benoitsteiner/tensorflow-opencl | refs/heads/master | tensorflow/contrib/ndlstm/python/lstm1d_test.py | 94 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for 1D LSTM."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.ndlstm.python import lstm1d as lstm1d_lib
from tensorflow.python.framework import constant_op
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
lstm1d = lstm1d_lib
def _rand(*size):
return np.random.uniform(size=size).astype("f")
class Lstm1DTest(test.TestCase):
def testSequenceToSequenceDims(self):
with self.test_session():
inputs = constant_op.constant(_rand(17, 1, 5))
outputs = lstm1d.ndlstm_base(inputs, 8)
variables.global_variables_initializer().run()
names = [v.name for v in variables.trainable_variables()]
self.assertEqual(len(names), 2)
result = outputs.eval()
self.assertEqual(tuple(result.shape), (17, 1, 8))
def testSequenceToSequenceGradient(self):
with self.test_session():
size = (17, 1, 15)
output_size = (17, 1, 8)
inputs = constant_op.constant(_rand(*size))
outputs = lstm1d.ndlstm_base(inputs, 8, dynamic=False)
variables.global_variables_initializer().run()
gradients = gradients_impl.gradients(outputs, inputs)
if 1: # pylint: disable=using-constant-test
gradients = gradients_impl.gradients(outputs, inputs)[0].eval()
self.assertEqual(gradients.shape, size)
else:
# TODO(tmb) tf.test.compute_gradient error is currently broken
# with dynamic_rnn. Enable this test case eventually.
err = gradient_checker.compute_gradient_error(
inputs, size, outputs, output_size, delta=1e-4)
self.assert_(not np.isnan(err))
self.assert_(err < 0.1)
def testSequenceToSequenceGradientReverse(self):
with self.test_session():
size = (17, 1, 15)
output_size = (17, 1, 8)
inputs = constant_op.constant(_rand(*size))
outputs = lstm1d.ndlstm_base(inputs, 8, reverse=1, dynamic=False)
variables.global_variables_initializer().run()
if 1: # pylint: disable=using-constant-test
gradients = gradients_impl.gradients(outputs, inputs)[0].eval()
self.assertEqual(gradients.shape, size)
else:
# TODO(tmb) tf.test.compute_gradient error is currently broken
# with dynamic_rnn. Enable this test case eventually.
err = gradient_checker.compute_gradient_error(
inputs, size, outputs, output_size, delta=1e-4)
self.assert_(not np.isnan(err))
self.assert_(err < 0.1)
def testSequenceToFinalDims(self):
with self.test_session():
inputs = constant_op.constant(_rand(17, 6, 5))
outputs = lstm1d.sequence_to_final(inputs, 8)
variables.global_variables_initializer().run()
names = [v.name for v in variables.trainable_variables()]
self.assertEqual(len(names), 2)
result = outputs.eval()
self.assertEqual(tuple(result.shape), (6, 8))
def testSequenceSoftmaxDims(self):
with self.test_session():
inputs = constant_op.constant(_rand(17, 1, 5))
outputs = lstm1d.sequence_softmax(inputs, 8)
variables.global_variables_initializer().run()
result = outputs.eval()
self.assertEqual(tuple(result.shape), (17, 1, 8))
if __name__ == "__main__":
test.main()
|
hlieberman/ansible-modules-core | refs/heads/devel | network/eos/eos_template.py | 28 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = """
---
module: eos_template
version_added: "2.1"
author: "Peter sprygada (@privateip)"
short_description: Manage Arista EOS device configurations
description:
- Manages network device configurations over SSH or eAPI. This module
allows implementors to work with the device running-config. It
provides a way to push a set of commands onto a network device
by evaluting the current running-config and only pushing configuration
commands that are not already configured. The config source can
be a set of commands or a template.
extends_documentation_fragment: eos
options:
src:
description:
- The path to the config source. The source can be either a
file with config or a template that will be merged during
runtime. By default the task will search for the source
file in role or playbook root folder in templates directory.
required: true
force:
description:
- The force argument instructs the module to not consider the
current devices running-config. When set to true, this will
cause the module to push the contents of I(src) into the device
without first checking if already configured.
required: false
default: false
choices: ['yes', 'no']
include_defaults:
description:
- By default when the M(eos_template) connects to the remote
device to retrieve the configuration it will issue the `show
running-config` command. If this option is set to True then
the issued command will be `show running-config all`
required: false
default: false
choices: ['yes', 'no']
backup:
description:
- When this argument is configured true, the module will backup
the running-config from the node prior to making any changes.
The backup file will be written to backup_{{ hostname }} in
the root of the playbook directory.
required: false
default: false
choices: ['yes', 'no']
replace:
description:
- This argument will cause the provided configuration to be replaced
on the destination node. The use of the replace argument will
always cause the task to set changed to true and will implies
I(force) is true. This argument is only valid with I(transport)
is eapi.
required: false
default: false
choices: ['yes', 'no']
config:
description:
- The module, by default, will connect to the remote device and
retrieve the current running-config to use as a base for comparing
against the contents of source. There are times when it is not
desirable to have the task get the current running-config for
every task in a playbook. The I(config) argument allows the
implementer to pass in the configuruation to use as the base
config for comparision.
required: false
default: null
"""
EXAMPLES = """
- name: push a configuration onto the device
eos_template:
src: config.j2
- name: forceable push a configuration onto the device
eos_template:
src: config.j2
force: yes
- name: provide the base configuration for comparision
eos_template:
src: candidate_config.txt
config: current_config.txt
"""
RETURN = """
updates:
description: The set of commands that will be pushed to the remote device
returned: always
type: list
sample: ['...', '...']
responses:
description: The set of responses from issuing the commands on the device
retured: when not check_mode
type: list
sample: ['...', '...']
"""
import re
def get_config(module):
config = module.params.get('config')
if not config and not module.params['force']:
config = module.config
return config
def filter_exit(commands):
# Filter out configuration mode commands followed immediately by an
# exit command indented by one level only, e.g.
# - route-map map01 permit 10
# - exit
#
# Build a temporary list as we filter, then copy the temp list
# back onto the commands list.
temp = []
ind_prev = 999
count = 0
for c in commands:
ind_this = c.count(' ')
if re.search(r"^\s*exit$", c) and ind_this == ind_prev + 1:
temp.pop()
count -= 1
if count != 0:
ind_prev = temp[-1].count(' ')
continue
temp.append(c)
ind_prev = ind_this
count += 1
return temp
def main():
""" main entry point for module execution
"""
argument_spec = dict(
src=dict(required=True),
force=dict(default=False, type='bool'),
include_defaults=dict(default=False, type='bool'),
backup=dict(default=False, type='bool'),
replace=dict(default=False, type='bool'),
config=dict()
)
mutually_exclusive = [('config', 'backup'), ('config', 'force')]
module = get_module(argument_spec=argument_spec,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True)
replace = module.params['replace']
commands = list()
running = None
result = dict(changed=False)
candidate = NetworkConfig(contents=module.params['src'], indent=3)
if replace:
if module.params['transport'] == 'cli':
module.fail_json(msg='config replace is only supported over eapi')
commands = str(candidate).split('\n')
else:
contents = get_config(module)
if contents:
running = NetworkConfig(contents=contents, indent=3)
result['_backup'] = contents
if not module.params['force']:
commands = candidate.difference((running or list()))
else:
commands = str(candidate).split('\n')
if commands:
commands = filter_exit(commands)
if not module.check_mode:
commands = [str(c).strip() for c in commands]
response = module.configure(commands, replace=replace)
result['responses'] = response
result['changed'] = True
result['updates'] = commands
module.exit_json(**result)
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
from ansible.module_utils.shell import *
from ansible.module_utils.netcfg import *
from ansible.module_utils.eos import *
if __name__ == '__main__':
main()
|
getopenmono/mbed | refs/heads/master | workspace_tools/host_tests/udp_link_layer_auto.py | 124 | """
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""
How to use:
make.py -m LPC1768 -t ARM -d E:\ -n NET_14
udp_link_layer_auto.py -p COM20 -d E:\ -t 10
"""
import re
import uuid
import socket
import thread
from sys import stdout
from time import time, sleep
from host_test import DefaultTest
from SocketServer import BaseRequestHandler, UDPServer
# Received datagrams (with time)
dict_udp_recv_datagrams = dict()
# Sent datagrams (with time)
dict_udp_sent_datagrams = dict()
class UDPEchoClient_Handler(BaseRequestHandler):
def handle(self):
""" One handle per connection
"""
_data, _socket = self.request
# Process received datagram
data_str = repr(_data)[1:-1]
dict_udp_recv_datagrams[data_str] = time()
def udp_packet_recv(threadName, server_ip, server_port):
""" This function will receive packet stream from mbed device
"""
server = UDPServer((server_ip, server_port), UDPEchoClient_Handler)
print "[UDP_COUNTER] Listening for connections... %s:%d"% (server_ip, server_port)
server.serve_forever()
class UDPEchoServerTest(DefaultTest):
ECHO_SERVER_ADDRESS = "" # UDP IP of datagram bursts
ECHO_PORT = 0 # UDP port for datagram bursts
CONTROL_PORT = 23 # TCP port used to get stats from mbed device, e.g. counters
s = None # Socket
TEST_PACKET_COUNT = 1000 # how many packets should be send
TEST_STRESS_FACTOR = 0.001 # stress factor: 10 ms
PACKET_SATURATION_RATIO = 29.9 # Acceptable packet transmission in %
PATTERN_SERVER_IP = "Server IP Address is (\d+).(\d+).(\d+).(\d+):(\d+)"
re_detect_server_ip = re.compile(PATTERN_SERVER_IP)
def get_control_data(self, command="stat\n"):
BUFFER_SIZE = 256
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((self.ECHO_SERVER_ADDRESS, self.CONTROL_PORT))
except Exception, e:
data = None
s.send(command)
data = s.recv(BUFFER_SIZE)
s.close()
return data
def test(self):
serial_ip_msg = self.mbed.serial_readline()
if serial_ip_msg is None:
return self.RESULT_IO_SERIAL
stdout.write(serial_ip_msg)
stdout.flush()
# Searching for IP address and port prompted by server
m = self.re_detect_server_ip.search(serial_ip_msg)
if m and len(m.groups()):
self.ECHO_SERVER_ADDRESS = ".".join(m.groups()[:4])
self.ECHO_PORT = int(m.groups()[4]) # must be integer for socket.connect method
self.notify("HOST: UDP Server found at: " + self.ECHO_SERVER_ADDRESS + ":" + str(self.ECHO_PORT))
# Open client socket to burst datagrams to UDP server in mbed
try:
self.s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
except Exception, e:
self.s = None
self.notify("HOST: Error: %s"% e)
return self.RESULT_ERROR
# UDP replied receiver works in background to get echoed datagrams
SERVER_IP = str(socket.gethostbyname(socket.getfqdn()))
SERVER_PORT = self.ECHO_PORT + 1
thread.start_new_thread(udp_packet_recv, ("Thread-udp-recv", SERVER_IP, SERVER_PORT))
sleep(0.5)
# Burst part
for no in range(self.TEST_PACKET_COUNT):
TEST_STRING = str(uuid.uuid4())
payload = str(no) + "__" + TEST_STRING
self.s.sendto(payload, (self.ECHO_SERVER_ADDRESS, self.ECHO_PORT))
dict_udp_sent_datagrams[payload] = time()
sleep(self.TEST_STRESS_FACTOR)
if self.s is not None:
self.s.close()
# Wait 5 seconds for packets to come
result = True
self.notify("HOST: Test Summary:")
for d in range(5):
sleep(1.0)
summary_datagram_success = (float(len(dict_udp_recv_datagrams)) / float(self.TEST_PACKET_COUNT)) * 100.0
self.notify("HOST: Datagrams received after +%d sec: %.3f%% (%d / %d), stress=%.3f ms"% (d,
summary_datagram_success,
len(dict_udp_recv_datagrams),
self.TEST_PACKET_COUNT,
self.TEST_STRESS_FACTOR))
result = result and (summary_datagram_success >= self.PACKET_SATURATION_RATIO)
stdout.flush()
# Getting control data from test
self.notify("...")
self.notify("HOST: Mbed Summary:")
mbed_stats = self.get_control_data()
self.notify(mbed_stats)
return self.RESULT_SUCCESS if result else self.RESULT_FAILURE
if __name__ == '__main__':
UDPEchoServerTest().run()
|
Hydrosys4/Master | refs/heads/master | photomod.py | 1 | from __future__ import print_function
import time
from time import sleep
import datetime
import os
import sys
import subprocess
from PIL import Image # to make thumbnail
from subprocess import call
from shutil import copyfile
def videodevlist():
folderpath="/dev"
videolist=[]
filelist=os.listdir(folderpath)
for filename in filelist:
if "video" in filename:
# following code was necessary starting from raspbian buster, by default there are video10,video11,video12 devices created
#get the video number
videonumberstr=get_digits(filename)
try:
videonumber=int(videonumberstr)
except:
videonumber=-1
print("not able to convert the video number")
if videonumber>-1:
if (videonumber<10):
print("check video " , filename)
# following code was necessary starting from raspbian buster, linux kernel v4l2 was updated, now one single webcam can show more than one videoXX dev
if checkvideoformatexist(videonumberstr):
print("OK video " , filename)
videolist.append(filename)
return videolist # item1 (path) item2 (name) item3 (datetime)
def get_digits(x):
return ''.join(ele for ele in x if ele.isdigit())
def checkvideoformatexist(videonumberstr):
# v4l2-ctl -d /dev/video0 -D
DeviceType="Video Capture"
formats=['YU12','YUYV','RGB3','JPEG','H264','MJPG','YVYU','VYUY','UYVY','NV12','BGR3','YV12','NV21','BGR4']
# v4l2-ctl --list-formats -d 1
cmd = ['v4l2-ctl', '--list-formats' , '-d', videonumberstr]
try:
scanoutput = subprocess.check_output(cmd).decode('utf-8')
except:
print("error to execute the command" , cmd)
logger.error("error to execute the command %s",cmd)
return False
if not DeviceType in scanoutput:
print("not a video capture device =" , videonumberstr)
return False
# check if one of the format is inside the output string
for formatitem in formats:
if formatitem in scanoutput:
print("At least a format = ", formatitem ," for video capture device =" , videonumberstr)
return True
return False
def checkPIcam(device):
cmd = ['v4l2-ctl', '-d', '/dev/'+device, '-D']
wordtofind="bm2835"
isfound=executeandsearch(cmd,wordtofind)
return isfound
def findPIcam():
devicelist=videodevlist()
# v4l2-ctl -d /dev/video0 -D
for device in devicelist:
cmd = ['v4l2-ctl', '-d', '/dev/'+device, '-D']
wordtofind="bm2835"
isfound=executeandsearch(cmd,wordtofind)
if isfound:
return device
return ""
def executeandsearch(cmd,wordtofind):
try:
scanoutput = subprocess.check_output(cmd).decode('utf-8')
except:
print("error to execute the command" , cmd)
logger.error("error to execute the command %s",cmd)
return False
for line in scanoutput.split('\n'):
#print " line ",line
strstart=line.find(wordtofind)
if strstart>-1:
#found
return True
return False
def saveshot(filepath, video, realshot, resolution, positionvalue, vdirection):
shottaken=False
print("take photo")
if vdirection=="neg":
rotdeg="180"
else:
rotdeg="0"
currentdate=datetime.datetime.now().strftime("%y-%m-%d,%H:%M")
print("Current date and time: " , currentdate)
if realshot:
filenamenopath=currentdate+"@"+video+"@"+positionvalue+".jpg"
filenamenopath2=currentdate+"@"+video+"@"+positionvalue+"F.jpg"
filenamenopath3=currentdate+"@"+video+"@"+positionvalue+"R.jpg"
else:
filenamenopath="testimage.jpg"
filenamenopath2=filenamenopath
filenamenopath3=filenamenopath
filename=os.path.join(filepath, filenamenopath)
print("Start Photo procedure: ", video , " ************************************************")
print("FILE : ", filename)
cam_list = "/dev/" + video
if not (video==""):
filexist=os.path.isfile(filename)
print("file already exist = ", filexist)
if (filexist)and(not realshot):
os.rename(filename, filename + ".bak")
shottaken=False
w=resolution.split("x")[0]
h=resolution.split("x")[1]
filenamebase=filenamenopath.split(".")[0]
extension=filename.split(".")[1]
# capture image using V4l2
# http://www.geeetech.com/wiki/index.php/Raspberry_Pi_Camera_Module
# v4l2-ctl --set-fmt-video=width=2592,height=1944,pixelformat=3
# v4l2-ctl --stream-mmap=3 --stream-count=1 --stream-to=somefile.jpg
# https://www.raspberrypi.org/forums/viewtopic.php?f=43&t=62364&start=450
# check the v4l2 setting: v4l2-ctl -d /dev/video0 --list-ctrls
# setting
# v4l2-ctl --set-ctrl=gain=00
# sudo v4l2-ctl -d /dev/video0 --set-ctrl=auto_exposure=1
# (auto exposure=0 ->auto; exposure=1 ->manual, each camera has its own name of the parameter, auto_exposure, exposure_auto)
# v4l2-ctl --set-ctrl=exposure_absolute=10
# raspistill provides way better photo than the fswebcam when using the raspbery camera
# raspberry camera is on video0 only
# there is no reliable way to detect the raspicam, then just try to get a picture first with raspistill
if checkPIcam(video):
print("The video device should be PI camera")
shottaken=takeshotandsave_raspistill(filepath,filenamenopath3, video, resolution,rotdeg)
if not shottaken: # gives it a second chance :)
shottaken=takeshotandsave_fswebcam(filepath,filenamenopath2, video, resolution,rotdeg)
else:
print(" The video device should be USB camera")
shottaken=takeshotandsave_fswebcam(filepath,filenamenopath2, video, resolution,rotdeg)
#shottaken=takeshotandsave_mjpg_streamer(filepath,filenamenopath, video, resolution)
if (not shottaken)and(not realshot):
if filexist:
os.rename(filename + ".bak", filename)
print("Picture acknowledge return = " ,shottaken)
else:
print("camera not connected")
return shottaken
def takeshotandsave_raspistill(filepath,filenamenopath, video, resolution, rotdeg):
shottaken=False
if rotdeg=="180":
vflip="-vf -hf"
else:
vflip=""
print("flip ", vflip)
cam_list = "/dev/" + video
i=0
while (not shottaken)and(i<3):
i=i+1
filename=os.path.join(filepath, filenamenopath)
print("FILE : ", filename)
shottaken=False
w=resolution.split("x")[0]
h=resolution.split("x")[1]
print("try raspistill")
filenamebase=filenamenopath.split(".")[0]
extension=filename.split(".")[1]
# create the picture files
isok=False
try:
myproc = subprocess.check_output("raspistill "+vflip+" -w "+w+" -h "+h+" -q 95 -a 12 -a \"%Y-%m-%d %X (UTC)\" -o " + filename, shell=True, stderr=subprocess.STDOUT)
isok=True
except:
print("problem to execute command")
newfilexist=os.path.isfile(filename)
print("file was created = ", newfilexist)
if (isok)and(newfilexist):
print("raspistill got picture")
shottaken=True
# make thumbnail
ExistandThumb(filepath,filenamenopath,shottaken)
else:
print("raspistill not able to get picture")
shottaken=False
print("RASPISTILL Picture take = " ,shottaken, " Attempt ", i)
return shottaken
def takeshotandsave_fswebcam(filepath,filenamenopath, video, resolution, rotdeg):
shottaken=False
if not (video==""):
cam_list = "/dev/" + video
i=0
while (not shottaken)and(i<3):
i=i+1
filename=os.path.join(filepath, filenamenopath)
print("FILE : ", filename)
shottaken=False
w=resolution.split("x")[0]
h=resolution.split("x")[1]
print("try fswebcam")
filenamebase=filenamenopath.split(".")[0]
extension=filename.split(".")[1]
#fswebcam option
if i==1:
S="15"
else:
S="5"
# create the picture files
#fswebcam option
isok=False
try:
myproc = subprocess.check_output("fswebcam -q -d "+ cam_list +" -r "+resolution+" -S "+S+" --rotate "+rotdeg+" -s brightness=50% -s Contrast=50% --jpeg 95 " + filename, shell=True, stderr=subprocess.STDOUT)
isok=True
except:
print("problem to execute command")
# -R use read() method -- NOT WORKING ---
# -D delay before taking frames
# -S skip the first frames
# -q quiet output mode
# -d device
# -r resoltion
# -F takes frames
print("output from subprocess: " , myproc)
newfilexist=os.path.isfile(filename)
print("file was created = ", newfilexist)
if (isok)and(newfilexist):
print("fswebcam got picture")
shottaken=True
# make thumbnail
ExistandThumb(filepath,filenamenopath,shottaken)
else:
print("fswebcam not able to get picture")
shottaken=False
print("FSWEBCAM Picture take = " ,shottaken, " Attempt ", i)
else:
print("camera not connected")
return shottaken
def takeshotandsave_mjpg_streamer(filepath,filenamenopath, video, resolution , rotdeg):
shottaken=False
filename=os.path.join(filepath, filenamenopath)
print("FILE : ", filename)
if not (video==""):
cam_list = "/dev/" + video
shottaken=False
w=resolution.split("x")[0]
h=resolution.split("x")[1]
print("try mjpg_streamer")
filenamebase=filenamenopath.split(".")[0]
extension=filename.split(".")[1]
pathmjpg=os.path.join(filepath,"mjpg")
if not os.path.exists(pathmjpg):
# fi folder do not exist, create it
os.makedirs(pathmjpg)
else:
#remove all files in folder
for the_file in os.listdir(pathmjpg):
file_path = os.path.join(pathmjpg, the_file)
try:
if os.path.isfile(file_path):
os.unlink(file_path)
except Exception as e:
print(e)
# create the picture files
fps="20"
if (video=="video0")and(int(w)>1024):
print("mjpg_streamer using the raspicam")
stream="mjpg_streamer -i '/usr/local/lib/mjpg-streamer/input_raspicam.so -d /dev/"+video+" -x "+w+" -y "+h+" -fps "+fps+" -rot "+rotdeg+"' -o '/usr/local/lib/mjpg-streamer/output_file.so -f "+pathmjpg+" -d 100' &"
else:
stream="mjpg_streamer -i '/usr/local/lib/mjpg-streamer/input_uvc.so -d /dev/"+video+" -r "+w+"x"+h+" -f "+fps+" -rot "+rotdeg+"' -o '/usr/local/lib/mjpg-streamer/output_file.so -f "+pathmjpg+" -d 100' &"
call ([stream], shell=True)
time.sleep(2)
call (["sudo pkill mjpg_streamer"], shell=True)
# take last saved file in the folder
folderpath=pathmjpg
filenamelist=[]
sortedlist=sorted([f for f in os.listdir(folderpath) if os.path.isfile(os.path.join(folderpath, f))])
sortedlist.reverse()
lastfile=""
for files in sortedlist:
if (files.endswith(".jpg") or files.endswith(".png")):
lastfile=files
break
#copy lastfile to filepath
if not (lastfile==""):
shottaken=True
#copy file to the right folder and right name
src=os.path.join(pathmjpg, lastfile)
dst=filename
copyfile(src, dst)
# make thumbnail
ExistandThumb(filepath,filenamenopath,shottaken)
else:
print("mjpg_streame not able to get picture")
shottaken=False
print("MJPG_STREAMER Picture take = " ,shottaken)
else:
print("camera not connected")
return shottaken
def ExistandThumb(filepath,filenamenopath,shottaken):
filename=os.path.join(filepath, filenamenopath)
newfilexist=os.path.isfile(filename)
# make thumbnail
if (shottaken and newfilexist):
paththumb=os.path.join(filepath,"thumb")
if not os.path.exists(paththumb):
os.makedirs(paththumb)
try:
image = Image.open(filename)
image.thumbnail((300, 300))
thumbname=os.path.join(paththumb,filenamenopath)
image.save(thumbname)
except:
print("not able to make thumbnail")
return newfilexist
def thumbconsistency(apprunningpath):
# check if there is a thumbnail without corresponding image
filepath=os.path.join(apprunningpath, "static")
filepath=os.path.join(filepath, "hydropicture")
# control if the folder hydropicture exist otherwise create it
if not os.path.exists(filepath):
os.makedirs(filepath)
print("Hydropicture folder has been created")
paththumb=os.path.join(filepath,"thumb")
if not os.path.exists(paththumb):
os.makedirs(paththumb)
print("Hydropicture thumbnail folder has been created")
filenamelist=os.listdir(filepath)
thumbnamelist=os.listdir(paththumb)
for thumbnail in thumbnamelist:
if thumbnail not in filenamelist:
print("thumbnail has no corresponding image, delete")
os.remove(os.path.join(paththumb, thumbnail))
# create thumbnail in case picture has no coresponding thumbnail
for fileimage in filenamelist:
if os.path.isfile(os.path.join(filepath, fileimage)):
if fileimage not in thumbnamelist:
print("image has no corresponding thumbnail, create")
#create thumbnail
try:
image = Image.open(os.path.join(filepath,fileimage))
image.thumbnail((300, 300))
thumbname=os.path.join(paththumb,os.path.basename(fileimage))
image.save(thumbname)
except:
not "able to make thumbnail"
return True
if __name__ == '__main__':
"""
prova funzioni di camera
"""
print("PI cam device :" , findPIcam())
#saveshot()
|
SteveS84/Galaxy-Player-4.0-Kernel | refs/heads/master | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Core.py | 802 | # Core.py - Python extension for perf trace, core functions
#
# Copyright (C) 2010 by Tom Zanussi <tzanussi@gmail.com>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
from collections import defaultdict
def autodict():
return defaultdict(autodict)
flag_fields = autodict()
symbolic_fields = autodict()
def define_flag_field(event_name, field_name, delim):
flag_fields[event_name][field_name]['delim'] = delim
def define_flag_value(event_name, field_name, value, field_str):
flag_fields[event_name][field_name]['values'][value] = field_str
def define_symbolic_field(event_name, field_name):
# nothing to do, really
pass
def define_symbolic_value(event_name, field_name, value, field_str):
symbolic_fields[event_name][field_name]['values'][value] = field_str
def flag_str(event_name, field_name, value):
string = ""
if flag_fields[event_name][field_name]:
print_delim = 0
keys = flag_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string += flag_fields[event_name][field_name]['values'][idx]
break
if idx and (value & idx) == idx:
if print_delim and flag_fields[event_name][field_name]['delim']:
string += " " + flag_fields[event_name][field_name]['delim'] + " "
string += flag_fields[event_name][field_name]['values'][idx]
print_delim = 1
value &= ~idx
return string
def symbol_str(event_name, field_name, value):
string = ""
if symbolic_fields[event_name][field_name]:
keys = symbolic_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string = symbolic_fields[event_name][field_name]['values'][idx]
break
if (value == idx):
string = symbolic_fields[event_name][field_name]['values'][idx]
break
return string
trace_flags = { 0x00: "NONE", \
0x01: "IRQS_OFF", \
0x02: "IRQS_NOSUPPORT", \
0x04: "NEED_RESCHED", \
0x08: "HARDIRQ", \
0x10: "SOFTIRQ" }
def trace_flag_str(value):
string = ""
print_delim = 0
keys = trace_flags.keys()
for idx in keys:
if not value and not idx:
string += "NONE"
break
if idx and (value & idx) == idx:
if print_delim:
string += " | ";
string += trace_flags[idx]
print_delim = 1
value &= ~idx
return string
|
xubenben/scikit-learn | refs/heads/master | examples/decomposition/plot_pca_vs_fa_model_selection.py | 142 | """
===============================================================
Model selection with Probabilistic PCA and Factor Analysis (FA)
===============================================================
Probabilistic PCA and Factor Analysis are probabilistic models.
The consequence is that the likelihood of new data can be used
for model selection and covariance estimation.
Here we compare PCA and FA with cross-validation on low rank data corrupted
with homoscedastic noise (noise variance
is the same for each feature) or heteroscedastic noise (noise variance
is the different for each feature). In a second step we compare the model
likelihood to the likelihoods obtained from shrinkage covariance estimators.
One can observe that with homoscedastic noise both FA and PCA succeed
in recovering the size of the low rank subspace. The likelihood with PCA
is higher than FA in this case. However PCA fails and overestimates
the rank when heteroscedastic noise is present. Under appropriate
circumstances the low rank models are more likely than shrinkage models.
The automatic estimation from
Automatic Choice of Dimensionality for PCA. NIPS 2000: 598-604
by Thomas P. Minka is also compared.
"""
print(__doc__)
# Authors: Alexandre Gramfort
# Denis A. Engemann
# License: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from scipy import linalg
from sklearn.decomposition import PCA, FactorAnalysis
from sklearn.covariance import ShrunkCovariance, LedoitWolf
from sklearn.cross_validation import cross_val_score
from sklearn.grid_search import GridSearchCV
###############################################################################
# Create the data
n_samples, n_features, rank = 1000, 50, 10
sigma = 1.
rng = np.random.RandomState(42)
U, _, _ = linalg.svd(rng.randn(n_features, n_features))
X = np.dot(rng.randn(n_samples, rank), U[:, :rank].T)
# Adding homoscedastic noise
X_homo = X + sigma * rng.randn(n_samples, n_features)
# Adding heteroscedastic noise
sigmas = sigma * rng.rand(n_features) + sigma / 2.
X_hetero = X + rng.randn(n_samples, n_features) * sigmas
###############################################################################
# Fit the models
n_components = np.arange(0, n_features, 5) # options for n_components
def compute_scores(X):
pca = PCA()
fa = FactorAnalysis()
pca_scores, fa_scores = [], []
for n in n_components:
pca.n_components = n
fa.n_components = n
pca_scores.append(np.mean(cross_val_score(pca, X)))
fa_scores.append(np.mean(cross_val_score(fa, X)))
return pca_scores, fa_scores
def shrunk_cov_score(X):
shrinkages = np.logspace(-2, 0, 30)
cv = GridSearchCV(ShrunkCovariance(), {'shrinkage': shrinkages})
return np.mean(cross_val_score(cv.fit(X).best_estimator_, X))
def lw_score(X):
return np.mean(cross_val_score(LedoitWolf(), X))
for X, title in [(X_homo, 'Homoscedastic Noise'),
(X_hetero, 'Heteroscedastic Noise')]:
pca_scores, fa_scores = compute_scores(X)
n_components_pca = n_components[np.argmax(pca_scores)]
n_components_fa = n_components[np.argmax(fa_scores)]
pca = PCA(n_components='mle')
pca.fit(X)
n_components_pca_mle = pca.n_components_
print("best n_components by PCA CV = %d" % n_components_pca)
print("best n_components by FactorAnalysis CV = %d" % n_components_fa)
print("best n_components by PCA MLE = %d" % n_components_pca_mle)
plt.figure()
plt.plot(n_components, pca_scores, 'b', label='PCA scores')
plt.plot(n_components, fa_scores, 'r', label='FA scores')
plt.axvline(rank, color='g', label='TRUTH: %d' % rank, linestyle='-')
plt.axvline(n_components_pca, color='b',
label='PCA CV: %d' % n_components_pca, linestyle='--')
plt.axvline(n_components_fa, color='r',
label='FactorAnalysis CV: %d' % n_components_fa, linestyle='--')
plt.axvline(n_components_pca_mle, color='k',
label='PCA MLE: %d' % n_components_pca_mle, linestyle='--')
# compare with other covariance estimators
plt.axhline(shrunk_cov_score(X), color='violet',
label='Shrunk Covariance MLE', linestyle='-.')
plt.axhline(lw_score(X), color='orange',
label='LedoitWolf MLE' % n_components_pca_mle, linestyle='-.')
plt.xlabel('nb of components')
plt.ylabel('CV scores')
plt.legend(loc='lower right')
plt.title(title)
plt.show()
|
keedio/hue | refs/heads/master | desktop/core/ext-py/tablib-0.10.0/tablib/packages/xlwt3/Row.py | 46 | # -*- coding: windows-1252 -*-
from . import BIFFRecords
from . import Style
from .Cell import StrCell, BlankCell, NumberCell, FormulaCell, MulBlankCell, BooleanCell, ErrorCell, \
_get_cells_biff_data_mul
from . import ExcelFormula
import datetime as dt
try:
from decimal import Decimal
except ImportError:
# Python 2.3: decimal not supported; create dummy Decimal class
class Decimal(object):
pass
class Row(object):
__slots__ = [# private variables
"__idx",
"__parent",
"__parent_wb",
"__cells",
"__min_col_idx",
"__max_col_idx",
"__xf_index",
"__has_default_xf_index",
"__height_in_pixels",
# public variables
"height",
"has_default_height",
"height_mismatch",
"level",
"collapse",
"hidden",
"space_above",
"space_below"]
def __init__(self, rowx, parent_sheet):
if not (isinstance(rowx, int) and 0 <= rowx <= 65535):
raise ValueError("row index (%r) not an int in range(65536)" % rowx)
self.__idx = rowx
self.__parent = parent_sheet
self.__parent_wb = parent_sheet.get_parent()
self.__cells = {}
self.__min_col_idx = 0
self.__max_col_idx = 0
self.__xf_index = 0x0F
self.__has_default_xf_index = 0
self.__height_in_pixels = 0x11
self.height = 0x00FF
self.has_default_height = 0x00
self.height_mismatch = 0
self.level = 0
self.collapse = 0
self.hidden = 0
self.space_above = 0
self.space_below = 0
def __adjust_height(self, style):
twips = style.font.height
points = float(twips)/20.0
# Cell height in pixels can be calcuted by following approx. formula:
# cell height in pixels = font height in points * 83/50 + 2/5
# It works when screen resolution is 96 dpi
pix = int(round(points*83.0/50.0 + 2.0/5.0))
if pix > self.__height_in_pixels:
self.__height_in_pixels = pix
def __adjust_bound_col_idx(self, *args):
for arg in args:
iarg = int(arg)
if not ((0 <= iarg <= 255) and arg == iarg):
raise ValueError("column index (%r) not an int in range(256)" % arg)
sheet = self.__parent
if iarg < self.__min_col_idx:
self.__min_col_idx = iarg
if iarg > self.__max_col_idx:
self.__max_col_idx = iarg
if iarg < sheet.first_used_col:
sheet.first_used_col = iarg
if iarg > sheet.last_used_col:
sheet.last_used_col = iarg
def __excel_date_dt(self, date):
if isinstance(date, dt.date) and (not isinstance(date, dt.datetime)):
epoch = dt.date(1899, 12, 31)
elif isinstance(date, dt.time):
date = dt.datetime.combine(dt.datetime(1900, 1, 1), date)
epoch = dt.datetime(1900, 1, 1, 0, 0, 0)
else:
epoch = dt.datetime(1899, 12, 31, 0, 0, 0)
delta = date - epoch
xldate = delta.days + float(delta.seconds) / (24*60*60)
# Add a day for Excel's missing leap day in 1900
if xldate > 59:
xldate += 1
return xldate
def get_height_in_pixels(self):
return self.__height_in_pixels
def set_style(self, style):
self.__adjust_height(style)
self.__xf_index = self.__parent_wb.add_style(style)
self.__has_default_xf_index = 1
def get_xf_index(self):
return self.__xf_index
def get_cells_count(self):
return len(self.__cells)
def get_min_col(self):
return self.__min_col_idx
def get_max_col(self):
return self.__max_col_idx
def get_row_biff_data(self):
height_options = (self.height & 0x07FFF)
height_options |= (self.has_default_height & 0x01) << 15
options = (self.level & 0x07) << 0
options |= (self.collapse & 0x01) << 4
options |= (self.hidden & 0x01) << 5
options |= (self.height_mismatch & 0x01) << 6
options |= (self.__has_default_xf_index & 0x01) << 7
options |= (0x01 & 0x01) << 8
options |= (self.__xf_index & 0x0FFF) << 16
options |= (self.space_above & 1) << 28
options |= (self.space_below & 1) << 29
return BIFFRecords.RowRecord(self.__idx, self.__min_col_idx,
self.__max_col_idx, height_options, options).get()
def insert_cell(self, col_index, cell_obj):
if col_index in self.__cells:
if not self.__parent._cell_overwrite_ok:
msg = "Attempt to overwrite cell: sheetname=%r rowx=%d colx=%d" \
% (self.__parent.name, self.__idx, col_index)
raise Exception(msg)
prev_cell_obj = self.__cells[col_index]
sst_idx = getattr(prev_cell_obj, 'sst_idx', None)
if sst_idx is not None:
self.__parent_wb.del_str(sst_idx)
self.__cells[col_index] = cell_obj
def insert_mulcells(self, colx1, colx2, cell_obj):
self.insert_cell(colx1, cell_obj)
for col_index in range(colx1+1, colx2+1):
self.insert_cell(col_index, None)
def get_cells_biff_data(self):
cell_items = [item for item in self.__cells.items() if item[1] is not None]
cell_items.sort() # in column order
return _get_cells_biff_data_mul(self.__idx, cell_items)
# previously:
# return ''.join([cell.get_biff_data() for colx, cell in cell_items])
def get_index(self):
return self.__idx
def set_cell_text(self, colx, value, style=Style.default_style):
self.__adjust_height(style)
self.__adjust_bound_col_idx(colx)
xf_index = self.__parent_wb.add_style(style)
self.insert_cell(colx, StrCell(self.__idx, colx, xf_index, self.__parent_wb.add_str(value)))
def set_cell_blank(self, colx, style=Style.default_style):
self.__adjust_height(style)
self.__adjust_bound_col_idx(colx)
xf_index = self.__parent_wb.add_style(style)
self.insert_cell(colx, BlankCell(self.__idx, colx, xf_index))
def set_cell_mulblanks(self, first_colx, last_colx, style=Style.default_style):
assert 0 <= first_colx <= last_colx <= 255
self.__adjust_height(style)
self.__adjust_bound_col_idx(first_colx, last_colx)
xf_index = self.__parent_wb.add_style(style)
# ncols = last_colx - first_colx + 1
self.insert_mulcells(first_colx, last_colx, MulBlankCell(self.__idx, first_colx, last_colx, xf_index))
def set_cell_number(self, colx, number, style=Style.default_style):
self.__adjust_height(style)
self.__adjust_bound_col_idx(colx)
xf_index = self.__parent_wb.add_style(style)
self.insert_cell(colx, NumberCell(self.__idx, colx, xf_index, number))
def set_cell_date(self, colx, datetime_obj, style=Style.default_style):
self.__adjust_height(style)
self.__adjust_bound_col_idx(colx)
xf_index = self.__parent_wb.add_style(style)
self.insert_cell(colx,
NumberCell(self.__idx, colx, xf_index, self.__excel_date_dt(datetime_obj)))
def set_cell_formula(self, colx, formula, style=Style.default_style, calc_flags=0):
self.__adjust_height(style)
self.__adjust_bound_col_idx(colx)
xf_index = self.__parent_wb.add_style(style)
self.__parent_wb.add_sheet_reference(formula)
self.insert_cell(colx, FormulaCell(self.__idx, colx, xf_index, formula, calc_flags=0))
def set_cell_boolean(self, colx, value, style=Style.default_style):
self.__adjust_height(style)
self.__adjust_bound_col_idx(colx)
xf_index = self.__parent_wb.add_style(style)
self.insert_cell(colx, BooleanCell(self.__idx, colx, xf_index, bool(value)))
def set_cell_error(self, colx, error_string_or_code, style=Style.default_style):
self.__adjust_height(style)
self.__adjust_bound_col_idx(colx)
xf_index = self.__parent_wb.add_style(style)
self.insert_cell(colx, ErrorCell(self.__idx, colx, xf_index, error_string_or_code))
def write(self, col, label, style=Style.default_style):
self.__adjust_height(style)
self.__adjust_bound_col_idx(col)
style_index = self.__parent_wb.add_style(style)
if isinstance(label, str):
if len(label) > 0:
self.insert_cell(col,
StrCell(self.__idx, col, style_index, self.__parent_wb.add_str(label))
)
else:
self.insert_cell(col, BlankCell(self.__idx, col, style_index))
elif isinstance(label, bool): # bool is subclass of int; test bool first
self.insert_cell(col, BooleanCell(self.__idx, col, style_index, label))
elif isinstance(label, (float, int, Decimal)):
self.insert_cell(col, NumberCell(self.__idx, col, style_index, label))
elif isinstance(label, (dt.datetime, dt.date, dt.time)):
date_number = self.__excel_date_dt(label)
self.insert_cell(col, NumberCell(self.__idx, col, style_index, date_number))
elif label is None:
self.insert_cell(col, BlankCell(self.__idx, col, style_index))
elif isinstance(label, ExcelFormula.Formula):
self.__parent_wb.add_sheet_reference(label)
self.insert_cell(col, FormulaCell(self.__idx, col, style_index, label))
else:
raise Exception("Unexpected data type %r" % type(label))
write_blanks = set_cell_mulblanks
|
AsgerPetersen/QGIS | refs/heads/master | python/ext-libs/pygments/lexers/_scilab_builtins.py | 364 | # -*- coding: utf-8 -*-
"""
pygments.lexers._scilab_builtins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Builtin list for the ScilabLexer.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
# These lists are generated automatically.
# Run the following in a Scilab script:
#
# varType=["functions", "commands", "macros", "variables" ];
# fd = mopen('list.txt','wt');
#
# for j=1:size(varType,"*")
# myStr="";
# a=completion("",varType(j));
# myStr=varType(j)+"_kw = [";
# for i=1:size(a,"*")
# myStr = myStr + """" + a(i) + """";
# if size(a,"*") <> i then
# myStr = myStr + ","; end
# end
# myStr = myStr + "]";
# mputl(myStr,fd);
# end
# mclose(fd);
#
# Then replace "$" by "\\$" manually.
functions_kw = ["%XMLAttr_6","%XMLAttr_e","%XMLAttr_i_XMLElem","%XMLAttr_length","%XMLAttr_p","%XMLAttr_size","%XMLDoc_6","%XMLDoc_e","%XMLDoc_i_XMLList","%XMLDoc_p","%XMLElem_6","%XMLElem_e","%XMLElem_i_XMLDoc","%XMLElem_i_XMLElem","%XMLElem_i_XMLList","%XMLElem_p","%XMLList_6","%XMLList_e","%XMLList_i_XMLElem","%XMLList_i_XMLList","%XMLList_length","%XMLList_p","%XMLList_size","%XMLNs_6","%XMLNs_e","%XMLNs_i_XMLElem","%XMLNs_p","%XMLSet_6","%XMLSet_e","%XMLSet_length","%XMLSet_p","%XMLSet_size","%XMLValid_p","%b_i_XMLList","%c_i_XMLAttr","%c_i_XMLDoc","%c_i_XMLElem","%c_i_XMLList","%ce_i_XMLList","%fptr_i_XMLList","%h_i_XMLList","%hm_i_XMLList","%i_abs","%i_cumprod","%i_cumsum","%i_diag","%i_i_XMLList","%i_matrix","%i_max","%i_maxi","%i_min","%i_mini","%i_mput","%i_p","%i_prod","%i_sum","%i_tril","%i_triu","%ip_i_XMLList","%l_i_XMLList","%lss_i_XMLList","%mc_i_XMLList","%msp_full","%msp_i_XMLList","%msp_spget","%p_i_XMLList","%ptr_i_XMLList","%r_i_XMLList","%s_i_XMLList","%sp_i_XMLList","%spb_i_XMLList","%st_i_XMLList","Calendar","ClipBoard","Matplot","Matplot1","PlaySound","TCL_DeleteInterp","TCL_DoOneEvent","TCL_EvalFile","TCL_EvalStr","TCL_ExistArray","TCL_ExistInterp","TCL_ExistVar","TCL_GetVar","TCL_GetVersion","TCL_SetVar","TCL_UnsetVar","TCL_UpVar","_","_code2str","_str2code","about","abs","acos","addcb","addf","addhistory","addinter","amell","and","argn","arl2_ius","ascii","asin","atan","backslash","balanc","banner","base2dec","basename","bdiag","beep","besselh","besseli","besselj","besselk","bessely","beta","bezout","bfinit","blkfc1i","blkslvi","bool2s","browsehistory","browsevar","bsplin3val","buildDocv2","buildouttb","bvode","c_link","calerf","call","callblk","captions","cd","cdfbet","cdfbin","cdfchi","cdfchn","cdff","cdffnc","cdfgam","cdfnbn","cdfnor","cdfpoi","cdft","ceil","champ","champ1","chdir","chol","clc","clean","clear","clear_pixmap","clearfun","clearglobal","closeEditor","closeXcos","code2str","coeff","comp","completion","conj","contour2di","contr","conv2","convstr","copy","copyfile","corr","cos","coserror","createdir","cshep2d","ctree2","ctree3","ctree4","cumprod","cumsum","curblock","curblockc","dasrt","dassl","data2sig","debug","dec2base","deff","definedfields","degree","delbpt","delete","deletefile","delip","delmenu","det","dgettext","dhinf","diag","diary","diffobjs","disp","dispbpt","displayhistory","disposefftwlibrary","dlgamma","dnaupd","dneupd","double","draw","drawaxis","drawlater","drawnow","dsaupd","dsearch","dseupd","duplicate","editor","editvar","emptystr","end_scicosim","ereduc","errcatch","errclear","error","eval_cshep2d","exec","execstr","exists","exit","exp","expm","exportUI","export_to_hdf5","eye","fadj2sp","fec","feval","fft","fftw","fftw_flags","fftw_forget_wisdom","fftwlibraryisloaded","file","filebrowser","fileext","fileinfo","fileparts","filesep","find","findBD","findfiles","floor","format","fort","fprintfMat","freq","frexp","fromc","fromjava","fscanfMat","fsolve","fstair","full","fullpath","funcprot","funptr","gamma","gammaln","geom3d","get","get_absolute_file_path","get_fftw_wisdom","getblocklabel","getcallbackobject","getdate","getdebuginfo","getdefaultlanguage","getdrives","getdynlibext","getenv","getfield","gethistory","gethistoryfile","getinstalledlookandfeels","getio","getlanguage","getlongpathname","getlookandfeel","getmd5","getmemory","getmodules","getos","getpid","getrelativefilename","getscicosvars","getscilabmode","getshortpathname","gettext","getvariablesonstack","getversion","glist","global","glue","grand","grayplot","grep","gsort","gstacksize","havewindow","helpbrowser","hess","hinf","historymanager","historysize","host","iconvert","iconvert","ieee","ilib_verbose","imag","impl","import_from_hdf5","imult","inpnvi","int","int16","int2d","int32","int3d","int8","interp","interp2d","interp3d","intg","intppty","inttype","inv","is_handle_valid","isalphanum","isascii","isdef","isdigit","isdir","isequal","isequalbitwise","iserror","isfile","isglobal","isletter","isreal","iswaitingforinput","javaclasspath","javalibrarypath","kron","lasterror","ldiv","ldivf","legendre","length","lib","librarieslist","libraryinfo","linear_interpn","lines","link","linmeq","list","load","loadScicos","loadfftwlibrary","loadhistory","log","log1p","lsq","lsq_splin","lsqrsolve","lsslist","lstcat","lstsize","ltitr","lu","ludel","lufact","luget","lusolve","macr2lst","macr2tree","matfile_close","matfile_listvar","matfile_open","matfile_varreadnext","matfile_varwrite","matrix","max","maxfiles","mclearerr","mclose","meof","merror","messagebox","mfprintf","mfscanf","mget","mgeti","mgetl","mgetstr","min","mlist","mode","model2blk","mopen","move","movefile","mprintf","mput","mputl","mputstr","mscanf","mseek","msprintf","msscanf","mtell","mtlb_mode","mtlb_sparse","mucomp","mulf","nearfloat","newaxes","newest","newfun","nnz","notify","number_properties","ode","odedc","ones","opentk","optim","or","ordmmd","parallel_concurrency","parallel_run","param3d","param3d1","part","pathconvert","pathsep","phase_simulation","plot2d","plot2d1","plot2d2","plot2d3","plot2d4","plot3d","plot3d1","pointer_xproperty","poly","ppol","pppdiv","predef","print","printf","printfigure","printsetupbox","prod","progressionbar","prompt","pwd","qld","qp_solve","qr","raise_window","rand","rankqr","rat","rcond","rdivf","read","read4b","readb","readgateway","readmps","real","realtime","realtimeinit","regexp","relocate_handle","remez","removedir","removelinehistory","res_with_prec","resethistory","residu","resume","return","ricc","ricc_old","rlist","roots","rotate_axes","round","rpem","rtitr","rubberbox","save","saveafterncommands","saveconsecutivecommands","savehistory","schur","sci_haltscicos","sci_tree2","sci_tree3","sci_tree4","sciargs","scicos_debug","scicos_debug_count","scicos_time","scicosim","scinotes","sctree","semidef","set","set_blockerror","set_fftw_wisdom","set_xproperty","setbpt","setdefaultlanguage","setenv","setfield","sethistoryfile","setlanguage","setlookandfeel","setmenu","sfact","sfinit","show_pixmap","show_window","showalluimenushandles","sident","sig2data","sign","simp","simp_mode","sin","size","slash","sleep","sorder","sparse","spchol","spcompack","spec","spget","splin","splin2d","splin3d","spones","sprintf","sqrt","stacksize","str2code","strcat","strchr","strcmp","strcspn","strindex","string","stringbox","stripblanks","strncpy","strrchr","strrev","strsplit","strspn","strstr","strsubst","strtod","strtok","subf","sum","svd","swap_handles","symfcti","syredi","system_getproperty","system_setproperty","ta2lpd","tan","taucs_chdel","taucs_chfact","taucs_chget","taucs_chinfo","taucs_chsolve","tempname","testmatrix","timer","tlist","tohome","tokens","toolbar","toprint","tr_zer","tril","triu","type","typename","uiDisplayTree","uicontextmenu","uicontrol","uigetcolor","uigetdir","uigetfile","uigetfont","uimenu","uint16","uint32","uint8","uipopup","uiputfile","uiwait","ulink","umf_ludel","umf_lufact","umf_luget","umf_luinfo","umf_lusolve","umfpack","unglue","unix","unsetmenu","unzoom","updatebrowsevar","usecanvas","user","var2vec","varn","vec2var","waitbar","warnBlockByUID","warning","what","where","whereis","who","winsid","with_embedded_jre","with_module","writb","write","write4b","x_choose","x_choose_modeless","x_dialog","x_mdialog","xarc","xarcs","xarrows","xchange","xchoicesi","xclick","xcos","xcosAddToolsMenu","xcosConfigureXmlFile","xcosDiagramToScilab","xcosPalCategoryAdd","xcosPalDelete","xcosPalDisable","xcosPalEnable","xcosPalGenerateIcon","xcosPalLoad","xcosPalMove","xcosUpdateBlock","xdel","xfarc","xfarcs","xfpoly","xfpolys","xfrect","xget","xgetech","xgetmouse","xgraduate","xgrid","xlfont","xls_open","xls_read","xmlAddNs","xmlAsNumber","xmlAsText","xmlDTD","xmlDelete","xmlDocument","xmlDump","xmlElement","xmlFormat","xmlGetNsByHref","xmlGetNsByPrefix","xmlGetOpenDocs","xmlIsValidObject","xmlNs","xmlRead","xmlReadStr","xmlRelaxNG","xmlRemove","xmlSchema","xmlSetAttributes","xmlValidate","xmlWrite","xmlXPath","xname","xpause","xpoly","xpolys","xrect","xrects","xs2bmp","xs2eps","xs2gif","xs2jpg","xs2pdf","xs2png","xs2ppm","xs2ps","xs2svg","xsegs","xset","xsetech","xstring","xstringb","xtitle","zeros","znaupd","zneupd","zoom_rect"]
commands_kw = ["abort","apropos","break","case","catch","clc","clear","continue","do","else","elseif","end","endfunction","exit","for","function","help","if","pause","pwd","quit","resume","return","select","then","try","what","while","who"]
macros_kw = ["%0_i_st","%3d_i_h","%Block_xcosUpdateBlock","%TNELDER_p","%TNELDER_string","%TNMPLOT_p","%TNMPLOT_string","%TOPTIM_p","%TOPTIM_string","%TSIMPLEX_p","%TSIMPLEX_string","%_gsort","%_strsplit","%ar_p","%asn","%b_a_b","%b_a_s","%b_c_s","%b_c_spb","%b_cumprod","%b_cumsum","%b_d_s","%b_diag","%b_e","%b_f_s","%b_f_spb","%b_g_s","%b_g_spb","%b_h_s","%b_h_spb","%b_i_b","%b_i_ce","%b_i_h","%b_i_hm","%b_i_s","%b_i_sp","%b_i_spb","%b_i_st","%b_iconvert","%b_l_b","%b_l_s","%b_m_b","%b_m_s","%b_matrix","%b_n_hm","%b_o_hm","%b_p_s","%b_prod","%b_r_b","%b_r_s","%b_s_b","%b_s_s","%b_string","%b_sum","%b_tril","%b_triu","%b_x_b","%b_x_s","%c_a_c","%c_b_c","%c_b_s","%c_diag","%c_e","%c_eye","%c_f_s","%c_i_c","%c_i_ce","%c_i_h","%c_i_hm","%c_i_lss","%c_i_r","%c_i_s","%c_i_st","%c_matrix","%c_n_l","%c_n_st","%c_o_l","%c_o_st","%c_ones","%c_rand","%c_tril","%c_triu","%cblock_c_cblock","%cblock_c_s","%cblock_e","%cblock_f_cblock","%cblock_p","%cblock_size","%ce_6","%ce_c_ce","%ce_e","%ce_f_ce","%ce_i_ce","%ce_i_s","%ce_i_st","%ce_matrix","%ce_p","%ce_size","%ce_string","%ce_t","%champdat_i_h","%choose","%diagram_xcos","%dir_p","%fptr_i_st","%grayplot_i_h","%h_i_st","%hm_1_hm","%hm_1_s","%hm_2_hm","%hm_2_s","%hm_3_hm","%hm_3_s","%hm_4_hm","%hm_4_s","%hm_5","%hm_a_hm","%hm_a_r","%hm_a_s","%hm_abs","%hm_and","%hm_bool2s","%hm_c_hm","%hm_ceil","%hm_conj","%hm_cos","%hm_cumprod","%hm_cumsum","%hm_d_hm","%hm_d_s","%hm_degree","%hm_e","%hm_exp","%hm_f_hm","%hm_fft","%hm_find","%hm_floor","%hm_g_hm","%hm_h_hm","%hm_i_b","%hm_i_ce","%hm_i_hm","%hm_i_i","%hm_i_p","%hm_i_r","%hm_i_s","%hm_i_st","%hm_iconvert","%hm_imag","%hm_int","%hm_isnan","%hm_isreal","%hm_j_hm","%hm_j_s","%hm_k_hm","%hm_k_s","%hm_log","%hm_m_p","%hm_m_r","%hm_m_s","%hm_matrix","%hm_maxi","%hm_mean","%hm_median","%hm_mini","%hm_n_b","%hm_n_c","%hm_n_hm","%hm_n_i","%hm_n_p","%hm_n_s","%hm_o_b","%hm_o_c","%hm_o_hm","%hm_o_i","%hm_o_p","%hm_o_s","%hm_ones","%hm_or","%hm_p","%hm_prod","%hm_q_hm","%hm_r_s","%hm_rand","%hm_real","%hm_round","%hm_s","%hm_s_hm","%hm_s_r","%hm_s_s","%hm_sign","%hm_sin","%hm_size","%hm_sqrt","%hm_st_deviation","%hm_string","%hm_sum","%hm_x_hm","%hm_x_p","%hm_x_s","%hm_zeros","%i_1_s","%i_2_s","%i_3_s","%i_4_s","%i_Matplot","%i_a_i","%i_a_s","%i_and","%i_ascii","%i_b_s","%i_bezout","%i_champ","%i_champ1","%i_contour","%i_contour2d","%i_d_i","%i_d_s","%i_e","%i_fft","%i_g_i","%i_gcd","%i_h_i","%i_i_ce","%i_i_h","%i_i_hm","%i_i_i","%i_i_s","%i_i_st","%i_j_i","%i_j_s","%i_l_s","%i_lcm","%i_length","%i_m_i","%i_m_s","%i_mfprintf","%i_mprintf","%i_msprintf","%i_n_s","%i_o_s","%i_or","%i_p_i","%i_p_s","%i_plot2d","%i_plot2d1","%i_plot2d2","%i_q_s","%i_r_i","%i_r_s","%i_round","%i_s_i","%i_s_s","%i_sign","%i_string","%i_x_i","%i_x_s","%ip_a_s","%ip_i_st","%ip_m_s","%ip_n_ip","%ip_o_ip","%ip_p","%ip_s_s","%ip_string","%k","%l_i_h","%l_i_s","%l_i_st","%l_isequal","%l_n_c","%l_n_l","%l_n_m","%l_n_p","%l_n_s","%l_n_st","%l_o_c","%l_o_l","%l_o_m","%l_o_p","%l_o_s","%l_o_st","%lss_a_lss","%lss_a_p","%lss_a_r","%lss_a_s","%lss_c_lss","%lss_c_p","%lss_c_r","%lss_c_s","%lss_e","%lss_eye","%lss_f_lss","%lss_f_p","%lss_f_r","%lss_f_s","%lss_i_ce","%lss_i_lss","%lss_i_p","%lss_i_r","%lss_i_s","%lss_i_st","%lss_inv","%lss_l_lss","%lss_l_p","%lss_l_r","%lss_l_s","%lss_m_lss","%lss_m_p","%lss_m_r","%lss_m_s","%lss_n_lss","%lss_n_p","%lss_n_r","%lss_n_s","%lss_norm","%lss_o_lss","%lss_o_p","%lss_o_r","%lss_o_s","%lss_ones","%lss_r_lss","%lss_r_p","%lss_r_r","%lss_r_s","%lss_rand","%lss_s","%lss_s_lss","%lss_s_p","%lss_s_r","%lss_s_s","%lss_size","%lss_t","%lss_v_lss","%lss_v_p","%lss_v_r","%lss_v_s","%lt_i_s","%m_n_l","%m_o_l","%mc_i_h","%mc_i_s","%mc_i_st","%mc_n_st","%mc_o_st","%mc_string","%mps_p","%mps_string","%msp_a_s","%msp_abs","%msp_e","%msp_find","%msp_i_s","%msp_i_st","%msp_length","%msp_m_s","%msp_maxi","%msp_n_msp","%msp_nnz","%msp_o_msp","%msp_p","%msp_sparse","%msp_spones","%msp_t","%p_a_lss","%p_a_r","%p_c_lss","%p_c_r","%p_cumprod","%p_cumsum","%p_d_p","%p_d_r","%p_d_s","%p_det","%p_e","%p_f_lss","%p_f_r","%p_i_ce","%p_i_h","%p_i_hm","%p_i_lss","%p_i_p","%p_i_r","%p_i_s","%p_i_st","%p_inv","%p_j_s","%p_k_p","%p_k_r","%p_k_s","%p_l_lss","%p_l_p","%p_l_r","%p_l_s","%p_m_hm","%p_m_lss","%p_m_r","%p_matrix","%p_n_l","%p_n_lss","%p_n_r","%p_o_l","%p_o_lss","%p_o_r","%p_o_sp","%p_p_s","%p_prod","%p_q_p","%p_q_r","%p_q_s","%p_r_lss","%p_r_p","%p_r_r","%p_r_s","%p_s_lss","%p_s_r","%p_simp","%p_string","%p_sum","%p_v_lss","%p_v_p","%p_v_r","%p_v_s","%p_x_hm","%p_x_r","%p_y_p","%p_y_r","%p_y_s","%p_z_p","%p_z_r","%p_z_s","%r_a_hm","%r_a_lss","%r_a_p","%r_a_r","%r_a_s","%r_c_lss","%r_c_p","%r_c_r","%r_c_s","%r_clean","%r_cumprod","%r_d_p","%r_d_r","%r_d_s","%r_det","%r_diag","%r_e","%r_eye","%r_f_lss","%r_f_p","%r_f_r","%r_f_s","%r_i_ce","%r_i_hm","%r_i_lss","%r_i_p","%r_i_r","%r_i_s","%r_i_st","%r_inv","%r_j_s","%r_k_p","%r_k_r","%r_k_s","%r_l_lss","%r_l_p","%r_l_r","%r_l_s","%r_m_hm","%r_m_lss","%r_m_p","%r_m_r","%r_m_s","%r_matrix","%r_n_lss","%r_n_p","%r_n_r","%r_n_s","%r_norm","%r_o_lss","%r_o_p","%r_o_r","%r_o_s","%r_ones","%r_p","%r_p_s","%r_prod","%r_q_p","%r_q_r","%r_q_s","%r_r_lss","%r_r_p","%r_r_r","%r_r_s","%r_rand","%r_s","%r_s_hm","%r_s_lss","%r_s_p","%r_s_r","%r_s_s","%r_simp","%r_size","%r_string","%r_sum","%r_t","%r_tril","%r_triu","%r_v_lss","%r_v_p","%r_v_r","%r_v_s","%r_x_p","%r_x_r","%r_x_s","%r_y_p","%r_y_r","%r_y_s","%r_z_p","%r_z_r","%r_z_s","%s_1_hm","%s_1_i","%s_2_hm","%s_2_i","%s_3_hm","%s_3_i","%s_4_hm","%s_4_i","%s_5","%s_a_b","%s_a_hm","%s_a_i","%s_a_ip","%s_a_lss","%s_a_msp","%s_a_r","%s_a_sp","%s_and","%s_b_i","%s_b_s","%s_c_b","%s_c_cblock","%s_c_lss","%s_c_r","%s_c_sp","%s_d_b","%s_d_i","%s_d_p","%s_d_r","%s_d_sp","%s_e","%s_f_b","%s_f_cblock","%s_f_lss","%s_f_r","%s_f_sp","%s_g_b","%s_g_s","%s_h_b","%s_h_s","%s_i_b","%s_i_c","%s_i_ce","%s_i_h","%s_i_hm","%s_i_i","%s_i_lss","%s_i_p","%s_i_r","%s_i_s","%s_i_sp","%s_i_spb","%s_i_st","%s_j_i","%s_k_hm","%s_k_p","%s_k_r","%s_k_sp","%s_l_b","%s_l_hm","%s_l_i","%s_l_lss","%s_l_p","%s_l_r","%s_l_s","%s_l_sp","%s_m_b","%s_m_hm","%s_m_i","%s_m_ip","%s_m_lss","%s_m_msp","%s_m_r","%s_matrix","%s_n_hm","%s_n_i","%s_n_l","%s_n_lss","%s_n_r","%s_n_st","%s_o_hm","%s_o_i","%s_o_l","%s_o_lss","%s_o_r","%s_o_st","%s_or","%s_p_b","%s_p_i","%s_pow","%s_q_hm","%s_q_i","%s_q_p","%s_q_r","%s_q_sp","%s_r_b","%s_r_i","%s_r_lss","%s_r_p","%s_r_r","%s_r_s","%s_r_sp","%s_s_b","%s_s_hm","%s_s_i","%s_s_ip","%s_s_lss","%s_s_r","%s_s_sp","%s_simp","%s_v_lss","%s_v_p","%s_v_r","%s_v_s","%s_x_b","%s_x_hm","%s_x_i","%s_x_r","%s_y_p","%s_y_r","%s_y_sp","%s_z_p","%s_z_r","%s_z_sp","%sn","%sp_a_s","%sp_a_sp","%sp_and","%sp_c_s","%sp_ceil","%sp_cos","%sp_cumprod","%sp_cumsum","%sp_d_s","%sp_d_sp","%sp_diag","%sp_e","%sp_exp","%sp_f_s","%sp_floor","%sp_gsort","%sp_i_ce","%sp_i_h","%sp_i_s","%sp_i_sp","%sp_i_st","%sp_int","%sp_inv","%sp_k_s","%sp_k_sp","%sp_l_s","%sp_l_sp","%sp_length","%sp_norm","%sp_or","%sp_p_s","%sp_prod","%sp_q_s","%sp_q_sp","%sp_r_s","%sp_r_sp","%sp_round","%sp_s_s","%sp_s_sp","%sp_sin","%sp_sqrt","%sp_string","%sp_sum","%sp_tril","%sp_triu","%sp_y_s","%sp_y_sp","%sp_z_s","%sp_z_sp","%spb_and","%spb_c_b","%spb_cumprod","%spb_cumsum","%spb_diag","%spb_e","%spb_f_b","%spb_g_b","%spb_g_spb","%spb_h_b","%spb_h_spb","%spb_i_b","%spb_i_ce","%spb_i_h","%spb_i_st","%spb_or","%spb_prod","%spb_sum","%spb_tril","%spb_triu","%st_6","%st_c_st","%st_e","%st_f_st","%st_i_b","%st_i_c","%st_i_fptr","%st_i_h","%st_i_i","%st_i_ip","%st_i_lss","%st_i_msp","%st_i_p","%st_i_r","%st_i_s","%st_i_sp","%st_i_spb","%st_i_st","%st_matrix","%st_n_c","%st_n_l","%st_n_mc","%st_n_p","%st_n_s","%st_o_c","%st_o_l","%st_o_mc","%st_o_p","%st_o_s","%st_o_tl","%st_p","%st_size","%st_string","%st_t","%ticks_i_h","%xls_e","%xls_p","%xlssheet_e","%xlssheet_p","%xlssheet_size","%xlssheet_string","DominationRank","G_make","IsAScalar","NDcost","OS_Version","PlotSparse","ReadHBSparse","ReadmiMatrix","TCL_CreateSlave","WritemiMatrix","abcd","abinv","accept_func_default","accept_func_vfsa","acf","acosd","acosh","acoshm","acosm","acot","acotd","acoth","acsc","acscd","acsch","add_demo","add_help_chapter","add_module_help_chapter","add_param","add_profiling","adj2sp","aff2ab","ana_style","analpf","analyze","aplat","apropos","arhnk","arl2","arma2p","armac","armax","armax1","arobasestring2strings","arsimul","ascii2string","asciimat","asec","asecd","asech","asind","asinh","asinhm","asinm","assert_checkalmostequal","assert_checkequal","assert_checkerror","assert_checkfalse","assert_checkfilesequal","assert_checktrue","assert_comparecomplex","assert_computedigits","assert_cond2reltol","assert_cond2reqdigits","assert_generror","atand","atanh","atanhm","atanm","atomsAutoload","atomsAutoloadAdd","atomsAutoloadDel","atomsAutoloadList","atomsCategoryList","atomsCheckModule","atomsDepTreeShow","atomsGetConfig","atomsGetInstalled","atomsGetLoaded","atomsGetLoadedPath","atomsInstall","atomsIsInstalled","atomsIsLoaded","atomsList","atomsLoad","atomsRemove","atomsRepositoryAdd","atomsRepositoryDel","atomsRepositoryList","atomsRestoreConfig","atomsSaveConfig","atomsSearch","atomsSetConfig","atomsShow","atomsSystemInit","atomsSystemUpdate","atomsTest","atomsUpdate","atomsVersion","augment","auread","auwrite","balreal","bench_run","bilin","bilt","bin2dec","binomial","bitand","bitcmp","bitget","bitor","bitset","bitxor","black","blanks","bloc2exp","bloc2ss","block_parameter_error","bode","bstap","buttmag","bvodeS","bytecode","bytecodewalk","cainv","calendar","calfrq","canon","casc","cat","cat_code","cb_m2sci_gui","ccontrg","cell","cell2mat","cellstr","center","cepstrum","cfspec","char","chart","cheb1mag","cheb2mag","check_gateways","check_help","check_modules_xml","check_versions","chepol","chfact","chsolve","classmarkov","clean_help","clock","cls2dls","cmb_lin","cmndred","cmoment","coding_ga_binary","coding_ga_identity","coff","coffg","colcomp","colcompr","colinout","colregul","companion","complex","compute_initial_temp","cond","cond2sp","condestsp","config","configure_msifort","configure_msvc","cont_frm","cont_mat","contrss","conv","convert_to_float","convertindex","convol","convol2d","copfac","correl","cosd","cosh","coshm","cosm","cotd","cotg","coth","cothm","covar","createfun","createstruct","crossover_ga_binary","crossover_ga_default","csc","cscd","csch","csgn","csim","cspect","ctr_gram","czt","dae","daeoptions","damp","datafit","date","datenum","datevec","dbphi","dcf","ddp","dec2bin","dec2hex","dec2oct","del_help_chapter","del_module_help_chapter","demo_begin","demo_choose","demo_compiler","demo_end","demo_file_choice","demo_folder_choice","demo_function_choice","demo_gui","demo_mdialog","demo_message","demo_run","demo_viewCode","denom","derivat","derivative","des2ss","des2tf","detectmsifort64tools","detectmsvc64tools","determ","detr","detrend","devtools_run_builder","dft","dhnorm","diff","diophant","dir","dirname","dispfiles","dllinfo","dscr","dsimul","dt_ility","dtsi","edit","edit_error","eigenmarkov","ell1mag","enlarge_shape","entropy","eomday","epred","eqfir","eqiir","equil","equil1","erf","erfc","erfcx","erfinv","etime","eval","evans","evstr","expression2code","extract_help_examples","factor","factorial","factors","faurre","ffilt","fft2","fftshift","fieldnames","filt_sinc","filter","findABCD","findAC","findBDK","findR","find_freq","find_links","find_scicos_version","findm","findmsifortcompiler","findmsvccompiler","findx0BD","firstnonsingleton","fit_dat","fix","fixedpointgcd","flipdim","flts","fminsearch","format_txt","fourplan","fprintf","frep2tf","freson","frfit","frmag","fscanf","fseek_origin","fsfirlin","fspec","fspecg","fstabst","ftest","ftuneq","fullfile","fullrf","fullrfk","fun2string","g_margin","gainplot","gamitg","gcare","gcd","gencompilationflags_unix","generateBlockImage","generateBlockImages","generic_i_ce","generic_i_h","generic_i_hm","generic_i_s","generic_i_st","genlib","genlib_old","genmarkov","geomean","getDiagramVersion","getModelicaPath","get_file_path","get_function_path","get_param","get_profile","get_scicos_version","getd","getscilabkeywords","getshell","gettklib","gfare","gfrancis","givens","glever","gmres","group","gschur","gspec","gtild","h2norm","h_cl","h_inf","h_inf_st","h_norm","hallchart","halt","hank","hankelsv","harmean","haveacompiler","head_comments","help","help_from_sci","help_skeleton","hermit","hex2dec","hilb","hilbert","horner","householder","hrmt","htrianr","hypermat","ifft","iir","iirgroup","iirlp","iirmod","ilib_build","ilib_compile","ilib_for_link","ilib_gen_Make","ilib_gen_Make_unix","ilib_gen_cleaner","ilib_gen_gateway","ilib_gen_loader","ilib_include_flag","ilib_mex_build","im_inv","importScicosDiagram","importScicosPal","importXcosDiagram","imrep2ss","ind2sub","inistate","init_ga_default","init_param","initial_scicos_tables","input","instruction2code","intc","intdec","integrate","interp1","interpln","intersect","intl","intsplin","inttrap","inv_coeff","invr","invrs","invsyslin","iqr","isLeapYear","is_absolute_path","is_param","iscell","iscellstr","isempty","isfield","isinf","isnan","isnum","issparse","isstruct","isvector","jmat","justify","kalm","karmarkar","kernel","kpure","krac2","kroneck","lattn","launchtest","lcf","lcm","lcmdiag","leastsq","leqe","leqr","lev","levin","lex_sort","lft","lin","lin2mu","lincos","lindquist","linf","linfn","linsolve","linspace","list2vec","list_param","listfiles","listfunctions","listvarinfile","lmisolver","lmitool","loadXcosLibs","loadmatfile","loadwave","log10","log2","logm","logspace","lqe","lqg","lqg2stan","lqg_ltr","lqr","ls","lyap","m2sci_gui","m_circle","macglov","macrovar","mad","makecell","manedit","mapsound","markp2ss","matfile2sci","mdelete","mean","meanf","median","mese","meshgrid","mfft","mfile2sci","minreal","minss","mkdir","modulo","moment","mrfit","msd","mstr2sci","mtlb","mtlb_0","mtlb_a","mtlb_all","mtlb_any","mtlb_axes","mtlb_axis","mtlb_beta","mtlb_box","mtlb_choices","mtlb_close","mtlb_colordef","mtlb_cond","mtlb_conv","mtlb_cov","mtlb_cumprod","mtlb_cumsum","mtlb_dec2hex","mtlb_delete","mtlb_diag","mtlb_diff","mtlb_dir","mtlb_double","mtlb_e","mtlb_echo","mtlb_error","mtlb_eval","mtlb_exist","mtlb_eye","mtlb_false","mtlb_fft","mtlb_fftshift","mtlb_filter","mtlb_find","mtlb_findstr","mtlb_fliplr","mtlb_fopen","mtlb_format","mtlb_fprintf","mtlb_fread","mtlb_fscanf","mtlb_full","mtlb_fwrite","mtlb_get","mtlb_grid","mtlb_hold","mtlb_i","mtlb_ifft","mtlb_image","mtlb_imp","mtlb_int16","mtlb_int32","mtlb_int8","mtlb_is","mtlb_isa","mtlb_isfield","mtlb_isletter","mtlb_isspace","mtlb_l","mtlb_legendre","mtlb_linspace","mtlb_logic","mtlb_logical","mtlb_loglog","mtlb_lower","mtlb_max","mtlb_mean","mtlb_median","mtlb_mesh","mtlb_meshdom","mtlb_min","mtlb_more","mtlb_num2str","mtlb_ones","mtlb_pcolor","mtlb_plot","mtlb_prod","mtlb_qr","mtlb_qz","mtlb_rand","mtlb_randn","mtlb_rcond","mtlb_realmax","mtlb_realmin","mtlb_repmat","mtlb_s","mtlb_semilogx","mtlb_semilogy","mtlb_setstr","mtlb_size","mtlb_sort","mtlb_sortrows","mtlb_sprintf","mtlb_sscanf","mtlb_std","mtlb_strcmp","mtlb_strcmpi","mtlb_strfind","mtlb_strrep","mtlb_subplot","mtlb_sum","mtlb_t","mtlb_toeplitz","mtlb_tril","mtlb_triu","mtlb_true","mtlb_type","mtlb_uint16","mtlb_uint32","mtlb_uint8","mtlb_upper","mtlb_var","mtlb_zeros","mu2lin","mutation_ga_binary","mutation_ga_default","mvcorrel","mvvacov","nancumsum","nand2mean","nanmax","nanmean","nanmeanf","nanmedian","nanmin","nanstdev","nansum","narsimul","ndgrid","ndims","nehari","neigh_func_csa","neigh_func_default","neigh_func_fsa","neigh_func_vfsa","neldermead_cget","neldermead_configure","neldermead_costf","neldermead_defaultoutput","neldermead_destroy","neldermead_display","neldermead_function","neldermead_get","neldermead_log","neldermead_new","neldermead_restart","neldermead_search","neldermead_updatesimp","nextpow2","nfreq","nicholschart","nlev","nmplot_cget","nmplot_configure","nmplot_contour","nmplot_destroy","nmplot_display","nmplot_function","nmplot_get","nmplot_historyplot","nmplot_log","nmplot_new","nmplot_outputcmd","nmplot_restart","nmplot_search","nmplot_simplexhistory","noisegen","nonreg_test_run","norm","now","null","num2cell","numdiff","numer","nyquist","nyquistfrequencybounds","obs_gram","obscont","observer","obsv_mat","obsvss","oct2dec","odeoptions","optim_ga","optim_moga","optim_nsga","optim_nsga2","optim_sa","optimbase_cget","optimbase_checkbounds","optimbase_checkcostfun","optimbase_checkx0","optimbase_configure","optimbase_destroy","optimbase_display","optimbase_function","optimbase_get","optimbase_hasbounds","optimbase_hasconstraints","optimbase_hasnlcons","optimbase_histget","optimbase_histset","optimbase_incriter","optimbase_isfeasible","optimbase_isinbounds","optimbase_isinnonlincons","optimbase_log","optimbase_logshutdown","optimbase_logstartup","optimbase_new","optimbase_outputcmd","optimbase_outstruct","optimbase_proj2bnds","optimbase_set","optimbase_stoplog","optimbase_terminate","optimget","optimplotfunccount","optimplotfval","optimplotx","optimset","optimsimplex_center","optimsimplex_check","optimsimplex_compsomefv","optimsimplex_computefv","optimsimplex_deltafv","optimsimplex_deltafvmax","optimsimplex_destroy","optimsimplex_dirmat","optimsimplex_fvmean","optimsimplex_fvstdev","optimsimplex_fvvariance","optimsimplex_getall","optimsimplex_getallfv","optimsimplex_getallx","optimsimplex_getfv","optimsimplex_getn","optimsimplex_getnbve","optimsimplex_getve","optimsimplex_getx","optimsimplex_gradientfv","optimsimplex_log","optimsimplex_new","optimsimplex_print","optimsimplex_reflect","optimsimplex_setall","optimsimplex_setallfv","optimsimplex_setallx","optimsimplex_setfv","optimsimplex_setn","optimsimplex_setnbve","optimsimplex_setve","optimsimplex_setx","optimsimplex_shrink","optimsimplex_size","optimsimplex_sort","optimsimplex_tostring","optimsimplex_xbar","orth","p_margin","pack","pareto_filter","parrot","pbig","pca","pcg","pdiv","pen2ea","pencan","pencost","penlaur","perctl","perl","perms","permute","pertrans","pfactors","pfss","phasemag","phaseplot","phc","pinv","playsnd","plotprofile","plzr","pmodulo","pol2des","pol2str","polar","polfact","prbs_a","prettyprint","primes","princomp","profile","proj","projsl","projspec","psmall","pspect","qmr","qpsolve","quart","quaskro","rafiter","randpencil","range","rank","read_csv","readxls","recompilefunction","recons","reglin","regress","remezb","remove_param","remove_profiling","repfreq","replace_Ix_by_Fx","repmat","reset_profiling","resize_matrix","returntoscilab","rhs2code","ric_desc","riccati","rmdir","routh_t","rowcomp","rowcompr","rowinout","rowregul","rowshuff","rref","sample","samplef","samwr","savematfile","savewave","scanf","sci2exp","sciGUI_init","sci_sparse","scicos_getvalue","scicos_simulate","scicos_workspace_init","scisptdemo","scitest","sdiff","sec","secd","sech","selection_ga_elitist","selection_ga_random","sensi","set_param","setdiff","sgrid","show_margins","show_pca","showprofile","signm","sinc","sincd","sind","sinh","sinhm","sinm","sm2des","sm2ss","smga","smooth","solve","sound","soundsec","sp2adj","spaninter","spanplus","spantwo","specfact","speye","sprand","spzeros","sqroot","sqrtm","squarewave","squeeze","srfaur","srkf","ss2des","ss2ss","ss2tf","sscanf","sskf","ssprint","ssrand","st_deviation","st_i_generic","st_ility","stabil","statgain","stdev","stdevf","steadycos","strange","strcmpi","struct","sub2ind","sva","svplot","sylm","sylv","sysconv","sysdiag","sysfact","syslin","syssize","system","systmat","tabul","tand","tanh","tanhm","tanm","tbx_build_blocks","tbx_build_cleaner","tbx_build_gateway","tbx_build_gateway_clean","tbx_build_gateway_loader","tbx_build_help","tbx_build_help_loader","tbx_build_loader","tbx_build_macros","tbx_build_src","tbx_builder","tbx_builder_gateway","tbx_builder_gateway_lang","tbx_builder_help","tbx_builder_help_lang","tbx_builder_macros","tbx_builder_src","tbx_builder_src_lang","temp_law_csa","temp_law_default","temp_law_fsa","temp_law_huang","temp_law_vfsa","test_clean","test_on_columns","test_run","test_run_level","testexamples","tf2des","tf2ss","thrownan","tic","time_id","toc","toeplitz","tokenpos","toolboxes","trace","trans","translatepaths","tree2code","trfmod","trianfml","trimmean","trisolve","trzeros","typeof","ui_observer","union","unique","unit_test_run","unix_g","unix_s","unix_w","unix_x","unobs","unpack","variance","variancef","vec2list","vectorfind","ver","warnobsolete","wavread","wavwrite","wcenter","weekday","wfir","wfir_gui","whereami","who_user","whos","wiener","wigner","winclose","window","winlist","with_javasci","with_macros_source","with_modelica_compiler","with_pvm","with_texmacs","with_tk","write_csv","xcosBlockEval","xcosBlockInterface","xcosCodeGeneration","xcosConfigureModelica","xcosPal","xcosPalAdd","xcosPalAddBlock","xcosPalExport","xcosShowBlockWarning","xcosValidateBlockSet","xcosValidateCompareBlock","xcos_compile","xcos_run","xcos_simulate","xcos_workspace_init","xmltochm","xmltoformat","xmltohtml","xmltojar","xmltopdf","xmltops","xmltoweb","yulewalk","zeropen","zgrid","zpbutt","zpch1","zpch2","zpell"]
builtin_consts = ["\\$","%F","%T","%e","%eps","%f","%fftw","%gui","%i","%inf","%io","%modalWarning","%nan","%pi","%s","%t","%tk","%toolboxes","%toolboxes_dir","%z","PWD","SCI","SCIHOME","TMPDIR","a","ans","assertlib","atomslib","cacsdlib","compatibility_functilib","corelib","data_structureslib","demo_toolslib","development_toolslib","differential_equationlib","dynamic_linklib","elementary_functionslib","fd","fileiolib","functionslib","genetic_algorithmslib","helptoolslib","home","i","integerlib","interpolationlib","iolib","j","linear_algebralib","m2scilib","matiolib","modules_managerlib","myStr","neldermeadlib","optimbaselib","optimizationlib","optimsimplexlib","output_streamlib","overloadinglib","parameterslib","polynomialslib","scicos_autolib","scicos_utilslib","scinoteslib","signal_processinglib","simulated_annealinglib","soundlib","sparselib","special_functionslib","spreadsheetlib","statisticslib","stringlib","tclscilib","timelib","umfpacklib","varType","xcoslib"]
|
webspinner/webspinner-gae-cms | refs/heads/master | appengine_utilities/cache.py | 26 | # -*- coding: utf-8 -*-
"""
Copyright (c) 2008, appengine-utilities project
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither the name of the appengine-utilities project nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
# main python imports
import datetime
import pickle
import random
import sys
# google appengine import
from google.appengine.ext import db
from google.appengine.api import memcache
# settings
try:
import settings_default
import settings
if settings.__name__.rsplit('.', 1)[0] != settings_default.__name__.rsplit('.', 1)[0]:
settings = settings_default
except:
settings = settings_default
class _AppEngineUtilities_Cache(db.Model):
cachekey = db.StringProperty()
createTime = db.DateTimeProperty(auto_now_add=True)
timeout = db.DateTimeProperty()
value = db.BlobProperty()
class Cache(object):
"""
Cache is used for storing pregenerated output and/or objects in the Big
Table datastore to minimize the amount of queries needed for page
displays. The idea is that complex queries that generate the same
results really should only be run once. Cache can be used to store
pregenerated value made from queries (or other calls such as
urlFetch()), or the query objects themselves.
Cache is a standard dictionary object and can be used as such. It attesmpts
to store data in both memcache, and the datastore. However, should a
datastore write fail, it will not try again. This is for performance
reasons.
"""
def __init__(self, clean_check_percent = settings.cache["CLEAN_CHECK_PERCENT"],
max_hits_to_clean = settings.cache["MAX_HITS_TO_CLEAN"],
default_timeout = settings.cache["DEFAULT_TIMEOUT"]):
"""
Initializer
Args:
clean_check_percent: how often cache initialization should
run the cache cleanup
max_hits_to_clean: maximum number of stale hits to clean
default_timeout: default length a cache item is good for
"""
self.clean_check_percent = clean_check_percent
self.max_hits_to_clean = max_hits_to_clean
self.default_timeout = default_timeout
if random.randint(1, 100) < self.clean_check_percent:
try:
self._clean_cache()
except:
pass
if 'AEU_Events' in sys.modules['__main__'].__dict__:
sys.modules['__main__'].AEU_Events.fire_event('cacheInitialized')
def _clean_cache(self):
"""
_clean_cache is a routine that is run to find and delete cache
items that are old. This helps keep the size of your over all
datastore down.
It only deletes the max_hits_to_clean per attempt, in order
to maximize performance. Default settings are 20 hits, 50%
of requests. Generally less hits cleaned on more requests will
give you better performance.
Returns True on completion
"""
query = _AppEngineUtilities_Cache.all()
query.filter('timeout < ', datetime.datetime.now())
results = query.fetch(self.max_hits_to_clean)
db.delete(results)
return True
def _validate_key(self, key):
"""
Internal method for key validation. This can be used by a superclass
to introduce more checks on key names.
Args:
key: Key name to check
Returns True is key is valid, otherwise raises KeyError.
"""
if key == None:
raise KeyError
return True
def _validate_value(self, value):
"""
Internal method for value validation. This can be used by a superclass
to introduce more checks on key names.
Args:
value: value to check
Returns True is value is valid, otherwise raises ValueError.
"""
if value == None:
raise ValueError
return True
def _validate_timeout(self, timeout):
"""
Internal method to validate timeouts. If no timeout
is passed, then the default_timeout is used.
Args:
timeout: datetime.datetime format
Returns the timeout
"""
if timeout == None:
timeout = datetime.datetime.now() +\
datetime.timedelta(seconds=self.default_timeout)
if type(timeout) == type(1):
timeout = datetime.datetime.now() + \
datetime.timedelta(seconds = timeout)
if type(timeout) != datetime.datetime:
raise TypeError
if timeout < datetime.datetime.now():
raise ValueError
return timeout
def add(self, key = None, value = None, timeout = None):
"""
Adds an entry to the cache, if one does not already exist. If they key
already exists, KeyError will be raised.
Args:
key: Key name of the cache object
value: Value of the cache object
timeout: timeout value for the cache object.
Returns the cache object.
"""
self._validate_key(key)
self._validate_value(value)
timeout = self._validate_timeout(timeout)
if key in self:
raise KeyError
cacheEntry = _AppEngineUtilities_Cache()
cacheEntry.cachekey = key
cacheEntry.value = pickle.dumps(value)
cacheEntry.timeout = timeout
# try to put the entry, if it fails silently pass
# failures may happen due to timeouts, the datastore being read
# only for maintenance or other applications. However, cache
# not being able to write to the datastore should not
# break the application
try:
cacheEntry.put()
except:
pass
memcache_timeout = timeout - datetime.datetime.now()
memcache.set('cache-%s' % (key), value, int(memcache_timeout.seconds))
if 'AEU_Events' in sys.modules['__main__'].__dict__:
sys.modules['__main__'].AEU_Events.fire_event('cacheAdded')
return self.get(key)
def set(self, key = None, value = None, timeout = None):
"""
Sets an entry to the cache, overwriting an existing value
if one already exists.
Args:
key: Key name of the cache object
value: Value of the cache object
timeout: timeout value for the cache object.
Returns the cache object.
"""
self._validate_key(key)
self._validate_value(value)
timeout = self._validate_timeout(timeout)
cacheEntry = self._read(key)
if not cacheEntry:
cacheEntry = _AppEngineUtilities_Cache()
cacheEntry.cachekey = key
cacheEntry.value = pickle.dumps(value)
cacheEntry.timeout = timeout
try:
cacheEntry.put()
except:
pass
memcache_timeout = timeout - datetime.datetime.now()
memcache.set('cache-%s' % (key), value, int(memcache_timeout.seconds))
if 'AEU_Events' in sys.modules['__main__'].__dict__:
sys.modules['__main__'].AEU_Events.fire_event('cacheSet')
return value
def _read(self, key = None):
"""
_read is an internal method that will get the cache entry directly
from the datastore, and return the entity. This is used for datastore
maintenance within the class.
Args:
key: The key to retrieve
Returns the cache entity
"""
query = _AppEngineUtilities_Cache.all()
query.filter('cachekey', key)
query.filter('timeout > ', datetime.datetime.now())
results = query.fetch(1)
if len(results) is 0:
return None
if 'AEU_Events' in sys.modules['__main__'].__dict__:
sys.modules['__main__'].AEU_Events.fire_event('cacheReadFromDatastore')
if 'AEU_Events' in sys.modules['__main__'].__dict__:
sys.modules['__main__'].AEU_Events.fire_event('cacheRead')
return results[0]
def delete(self, key = None):
"""
Deletes a cache object.
Args:
key: The key of the cache object to delete.
Returns True.
"""
memcache.delete('cache-%s' % (key))
result = self._read(key)
if result:
if 'AEU_Events' in sys.modules['__main__'].__dict__:
sys.modules['__main__'].AEU_Events.fire_event('cacheDeleted')
result.delete()
return True
def get(self, key):
"""
Used to return the cache value associated with the key passed.
Args:
key: The key of the value to retrieve.
Returns the value of the cache item.
"""
mc = memcache.get('cache-%s' % (key))
if mc:
if 'AEU_Events' in sys.modules['__main__'].__dict__:
sys.modules['__main__'].AEU_Events.fire_event('cacheReadFromMemcache')
if 'AEU_Events' in sys.modules['__main__'].__dict__:
sys.modules['__main__'].AEU_Events.fire_event('cacheRead')
return mc
result = self._read(key)
if result:
timeout = result.timeout - datetime.datetime.now()
memcache.set('cache-%s' % (key), pickle.loads(result.value),
int(timeout.seconds))
if 'AEU_Events' in sys.modules['__main__'].__dict__:
sys.modules['__main__'].AEU_Events.fire_event('cacheRead')
return pickle.loads(result.value)
else:
raise KeyError
def get_many(self, keys):
"""
Returns a dict mapping each key in keys to its value. If the given
key is missing, it will be missing from the response dict.
Args:
keys: A list of keys to retrieve.
Returns a dictionary of key/value pairs.
"""
dict = {}
for key in keys:
value = self.get(key)
if value is not None:
dict[key] = value
return dict
def __getitem__(self, key):
"""
__getitem__ is necessary for this object to emulate a container.
"""
return self.get(key)
def __setitem__(self, key, value):
"""
__setitem__ is necessary for this object to emulate a container.
"""
return self.set(key, value)
def __delitem__(self, key):
"""
Implement the 'del' keyword
"""
return self.delete(key)
def __contains__(self, key):
"""
Implements "in" operator
"""
try:
self.__getitem__(key)
except KeyError:
return False
return True
def has_key(self, keyname):
"""
Equivalent to k in a, use that form in new code
"""
return self.__contains__(keyname)
|
sergecodd/FireFox-OS | refs/heads/master | B2G/gecko/config/JarMaker.py | 2 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
'''jarmaker.py provides a python class to package up chrome content by
processing jar.mn files.
See the documentation for jar.mn on MDC for further details on the format.
'''
import sys
import os
import os.path
import errno
import re
import logging
from time import localtime
from optparse import OptionParser
from MozZipFile import ZipFile
from cStringIO import StringIO
from datetime import datetime
from utils import pushback_iter, lockFile
from Preprocessor import Preprocessor
from buildlist import addEntriesToListFile
if sys.platform == "win32":
from ctypes import windll, WinError
CreateHardLink = windll.kernel32.CreateHardLinkA
__all__ = ['JarMaker']
class ZipEntry:
'''Helper class for jar output.
This class defines a simple file-like object for a zipfile.ZipEntry
so that we can consecutively write to it and then close it.
This methods hooks into ZipFile.writestr on close().
'''
def __init__(self, name, zipfile):
self._zipfile = zipfile
self._name = name
self._inner = StringIO()
def write(self, content):
'Append the given content to this zip entry'
self._inner.write(content)
return
def close(self):
'The close method writes the content back to the zip file.'
self._zipfile.writestr(self._name, self._inner.getvalue())
def getModTime(aPath):
if not os.path.isfile(aPath):
return 0
mtime = os.stat(aPath).st_mtime
return localtime(mtime)
class JarMaker(object):
'''JarMaker reads jar.mn files and process those into jar files or
flat directories, along with chrome.manifest files.
'''
ignore = re.compile('\s*(\#.*)?$')
jarline = re.compile('(?:(?P<jarfile>[\w\d.\-\_\\\/]+).jar\:)|(?:\s*(\#.*)?)\s*$')
regline = re.compile('\%\s+(.*)$')
entryre = '(?P<optPreprocess>\*)?(?P<optOverwrite>\+?)\s+'
entryline = re.compile(entryre + '(?P<output>[\w\d.\-\_\\\/\+\@]+)\s*(\((?P<locale>\%?)(?P<source>[\w\d.\-\_\\\/\@]+)\))?\s*$')
def __init__(self, outputFormat = 'flat', useJarfileManifest = True,
useChromeManifest = False):
self.outputFormat = outputFormat
self.useJarfileManifest = useJarfileManifest
self.useChromeManifest = useChromeManifest
self.pp = Preprocessor()
def getCommandLineParser(self):
'''Get a optparse.OptionParser for jarmaker.
This OptionParser has the options for jarmaker as well as
the options for the inner PreProcessor.
'''
# HACK, we need to unescape the string variables we get,
# the perl versions didn't grok strings right
p = self.pp.getCommandLineParser(unescapeDefines = True)
p.add_option('-f', type="choice", default="jar",
choices=('jar', 'flat', 'symlink'),
help="fileformat used for output", metavar="[jar, flat, symlink]")
p.add_option('-v', action="store_true", dest="verbose",
help="verbose output")
p.add_option('-q', action="store_false", dest="verbose",
help="verbose output")
p.add_option('-e', action="store_true",
help="create chrome.manifest instead of jarfile.manifest")
p.add_option('--both-manifests', action="store_true",
dest="bothManifests",
help="create chrome.manifest and jarfile.manifest")
p.add_option('-s', type="string", action="append", default=[],
help="source directory")
p.add_option('-t', type="string",
help="top source directory")
p.add_option('-c', '--l10n-src', type="string", action="append",
help="localization directory")
p.add_option('--l10n-base', type="string", action="append", default=[],
help="base directory to be used for localization (multiple)")
p.add_option('-j', type="string",
help="jarfile directory")
# backwards compat, not needed
p.add_option('-a', action="store_false", default=True,
help="NOT SUPPORTED, turn auto-registration of chrome off (installed-chrome.txt)")
p.add_option('-d', type="string",
help="UNUSED, chrome directory")
p.add_option('-o', help="cross compile for auto-registration, ignored")
p.add_option('-l', action="store_true",
help="ignored (used to switch off locks)")
p.add_option('-x', action="store_true",
help="force Unix")
p.add_option('-z', help="backwards compat, ignored")
p.add_option('-p', help="backwards compat, ignored")
return p
def processIncludes(self, includes):
'''Process given includes with the inner PreProcessor.
Only use this for #defines, the includes shouldn't generate
content.
'''
self.pp.out = StringIO()
for inc in includes:
self.pp.do_include(inc)
includesvalue = self.pp.out.getvalue()
if includesvalue:
logging.info("WARNING: Includes produce non-empty output")
self.pp.out = None
pass
def finalizeJar(self, jarPath, chromebasepath, register,
doZip=True):
'''Helper method to write out the chrome registration entries to
jarfile.manifest or chrome.manifest, or both.
The actual file processing is done in updateManifest.
'''
# rewrite the manifest, if entries given
if not register:
return
chromeManifest = os.path.join(os.path.dirname(jarPath),
'..', 'chrome.manifest')
if self.useJarfileManifest:
self.updateManifest(jarPath + '.manifest', chromebasepath % '',
register)
addEntriesToListFile(chromeManifest, ['manifest chrome/%s.manifest' % (os.path.basename(jarPath),)])
if self.useChromeManifest:
self.updateManifest(chromeManifest, chromebasepath % 'chrome/',
register)
def updateManifest(self, manifestPath, chromebasepath, register):
'''updateManifest replaces the % in the chrome registration entries
with the given chrome base path, and updates the given manifest file.
'''
lock = lockFile(manifestPath + '.lck')
try:
myregister = dict.fromkeys(map(lambda s: s.replace('%', chromebasepath),
register.iterkeys()))
manifestExists = os.path.isfile(manifestPath)
mode = (manifestExists and 'r+b') or 'wb'
mf = open(manifestPath, mode)
if manifestExists:
# import previous content into hash, ignoring empty ones and comments
imf = re.compile('(#.*)?$')
for l in re.split('[\r\n]+', mf.read()):
if imf.match(l):
continue
myregister[l] = None
mf.seek(0)
for k in myregister.iterkeys():
mf.write(k + os.linesep)
mf.close()
finally:
lock = None
def makeJar(self, infile=None,
jardir='',
sourcedirs=[], topsourcedir='', localedirs=None):
'''makeJar is the main entry point to JarMaker.
It takes the input file, the output directory, the source dirs and the
top source dir as argument, and optionally the l10n dirs.
'''
if isinstance(infile, basestring):
logging.info("processing " + infile)
pp = self.pp.clone()
pp.out = StringIO()
pp.do_include(infile)
lines = pushback_iter(pp.out.getvalue().splitlines())
try:
while True:
l = lines.next()
m = self.jarline.match(l)
if not m:
raise RuntimeError(l)
if m.group('jarfile') is None:
# comment
continue
self.processJarSection(m.group('jarfile'), lines,
jardir, sourcedirs, topsourcedir,
localedirs)
except StopIteration:
# we read the file
pass
return
def makeJars(self, infiles, l10nbases,
jardir='',
sourcedirs=[], topsourcedir='', localedirs=None):
'''makeJars is the second main entry point to JarMaker.
It takes an iterable sequence of input file names, the l10nbases,
the output directory, the source dirs and the
top source dir as argument, and optionally the l10n dirs.
It iterates over all inputs, guesses srcdir and l10ndir from the
path and topsourcedir and calls into makeJar.
The l10ndirs are created by guessing the relativesrcdir, and resolving
that against the l10nbases. l10nbases can either be path strings, or
callables. In the latter case, that will be called with the
relativesrcdir as argument, and is expected to return a path string.
This logic is disabled if the jar.mn path is not inside the topsrcdir.
'''
topsourcedir = os.path.normpath(os.path.abspath(topsourcedir))
def resolveL10nBase(relpath):
def _resolve(base):
if isinstance(base, basestring):
return os.path.join(base, relpath)
if callable(base):
return base(relpath)
return base
return _resolve
for infile in infiles:
srcdir = os.path.normpath(os.path.abspath(os.path.dirname(infile)))
l10ndir = srcdir
if os.path.basename(srcdir) == 'locales':
l10ndir = os.path.dirname(l10ndir)
l10ndirs = None
# srcdir may not be a child of topsourcedir, in which case
# we assume that the caller passed in suitable sourcedirs,
# and just skip passing in localedirs
if srcdir.startswith(topsourcedir):
rell10ndir = l10ndir[len(topsourcedir):].lstrip(os.sep)
l10ndirs = map(resolveL10nBase(rell10ndir), l10nbases)
if localedirs is not None:
l10ndirs += [os.path.normpath(os.path.abspath(s))
for s in localedirs]
srcdirs = [os.path.normpath(os.path.abspath(s))
for s in sourcedirs] + [srcdir]
self.makeJar(infile=infile,
sourcedirs=srcdirs, topsourcedir=topsourcedir,
localedirs=l10ndirs,
jardir=jardir)
def processJarSection(self, jarfile, lines,
jardir, sourcedirs, topsourcedir, localedirs):
'''Internal method called by makeJar to actually process a section
of a jar.mn file.
jarfile is the basename of the jarfile or the directory name for
flat output, lines is a pushback_iterator of the lines of jar.mn,
the remaining options are carried over from makeJar.
'''
# chromebasepath is used for chrome registration manifests
# %s is getting replaced with chrome/ for chrome.manifest, and with
# an empty string for jarfile.manifest
chromebasepath = '%s' + os.path.basename(jarfile)
if self.outputFormat == 'jar':
chromebasepath = 'jar:' + chromebasepath + '.jar!'
chromebasepath += '/'
jarfile = os.path.join(jardir, jarfile)
jf = None
if self.outputFormat == 'jar':
#jar
jarfilepath = jarfile + '.jar'
try:
os.makedirs(os.path.dirname(jarfilepath))
except OSError, error:
if error.errno != errno.EEXIST:
raise
jf = ZipFile(jarfilepath, 'a', lock = True)
outHelper = self.OutputHelper_jar(jf)
else:
outHelper = getattr(self, 'OutputHelper_' + self.outputFormat)(jarfile)
register = {}
# This loop exits on either
# - the end of the jar.mn file
# - an line in the jar.mn file that's not part of a jar section
# - on an exception raised, close the jf in that case in a finally
try:
while True:
try:
l = lines.next()
except StopIteration:
# we're done with this jar.mn, and this jar section
self.finalizeJar(jarfile, chromebasepath, register)
if jf is not None:
jf.close()
# reraise the StopIteration for makeJar
raise
if self.ignore.match(l):
continue
m = self.regline.match(l)
if m:
rline = m.group(1)
register[rline] = 1
continue
m = self.entryline.match(l)
if not m:
# neither an entry line nor chrome reg, this jar section is done
self.finalizeJar(jarfile, chromebasepath, register)
if jf is not None:
jf.close()
lines.pushback(l)
return
self._processEntryLine(m, sourcedirs, topsourcedir, localedirs,
outHelper, jf)
finally:
if jf is not None:
jf.close()
return
def _processEntryLine(self, m,
sourcedirs, topsourcedir, localedirs,
outHelper, jf):
out = m.group('output')
src = m.group('source') or os.path.basename(out)
# pick the right sourcedir -- l10n, topsrc or src
if m.group('locale'):
src_base = localedirs
elif src.startswith('/'):
# path/in/jar/file_name.xul (/path/in/sourcetree/file_name.xul)
# refers to a path relative to topsourcedir, use that as base
# and strip the leading '/'
src_base = [topsourcedir]
src = src[1:]
else:
# use srcdirs and the objdir (current working dir) for relative paths
src_base = sourcedirs + [os.getcwd()]
# check if the source file exists
realsrc = None
for _srcdir in src_base:
if os.path.isfile(os.path.join(_srcdir, src)):
realsrc = os.path.join(_srcdir, src)
break
if realsrc is None:
if jf is not None:
jf.close()
raise RuntimeError('File "%s" not found in %s' % (src, ', '.join(src_base)))
if m.group('optPreprocess'):
outf = outHelper.getOutput(out)
inf = open(realsrc)
pp = self.pp.clone()
if src[-4:] == '.css':
pp.setMarker('%')
pp.out = outf
pp.do_include(inf)
pp.warnUnused(realsrc)
outf.close()
inf.close()
return
# copy or symlink if newer or overwrite
if (m.group('optOverwrite')
or (getModTime(realsrc) >
outHelper.getDestModTime(m.group('output')))):
if self.outputFormat == 'symlink':
outHelper.symlink(realsrc, out)
return
outf = outHelper.getOutput(out)
# open in binary mode, this can be images etc
inf = open(realsrc, 'rb')
outf.write(inf.read())
outf.close()
inf.close()
class OutputHelper_jar(object):
'''Provide getDestModTime and getOutput for a given jarfile.
'''
def __init__(self, jarfile):
self.jarfile = jarfile
def getDestModTime(self, aPath):
try :
info = self.jarfile.getinfo(aPath)
return info.date_time
except:
return 0
def getOutput(self, name):
return ZipEntry(name, self.jarfile)
class OutputHelper_flat(object):
'''Provide getDestModTime and getOutput for a given flat
output directory. The helper method ensureDirFor is used by
the symlink subclass.
'''
def __init__(self, basepath):
self.basepath = basepath
def getDestModTime(self, aPath):
return getModTime(os.path.join(self.basepath, aPath))
def getOutput(self, name):
out = self.ensureDirFor(name)
# remove previous link or file
try:
os.remove(out)
except OSError, e:
if e.errno != errno.ENOENT:
raise
return open(out, 'wb')
def ensureDirFor(self, name):
out = os.path.join(self.basepath, name)
outdir = os.path.dirname(out)
if not os.path.isdir(outdir):
try:
os.makedirs(outdir)
except OSError, error:
if error.errno != errno.EEXIST:
raise
return out
class OutputHelper_symlink(OutputHelper_flat):
'''Subclass of OutputHelper_flat that provides a helper for
creating a symlink including creating the parent directories.
'''
def symlink(self, src, dest):
out = self.ensureDirFor(dest)
# remove previous link or file
try:
os.remove(out)
except OSError, e:
if e.errno != errno.ENOENT:
raise
if sys.platform != "win32":
os.symlink(src, out)
else:
# On Win32, use ctypes to create a hardlink
rv = CreateHardLink(out, src, None)
if rv == 0:
raise WinError()
def main():
jm = JarMaker()
p = jm.getCommandLineParser()
(options, args) = p.parse_args()
jm.processIncludes(options.I)
jm.outputFormat = options.f
if options.e:
jm.useChromeManifest = True
jm.useJarfileManifest = False
if options.bothManifests:
jm.useChromeManifest = True
jm.useJarfileManifest = True
noise = logging.INFO
if options.verbose is not None:
noise = (options.verbose and logging.DEBUG) or logging.WARN
if sys.version_info[:2] > (2,3):
logging.basicConfig(format = "%(message)s")
else:
logging.basicConfig()
logging.getLogger().setLevel(noise)
topsrc = options.t
topsrc = os.path.normpath(os.path.abspath(topsrc))
if not args:
jm.makeJar(infile=sys.stdin,
sourcedirs=options.s, topsourcedir=topsrc,
localedirs=options.l10n_src,
jardir=options.j)
else:
jm.makeJars(args, options.l10n_base,
jardir=options.j,
sourcedirs=options.s, topsourcedir=topsrc,
localedirs=options.l10n_src)
if __name__ == "__main__":
main()
|
proxysh/Safejumper-for-Desktop | refs/heads/master | buildlinux/env32/local/lib/python2.7/encodings/mac_roman.py | 593 | """ Python Character Mapping Codec mac_roman generated from 'MAPPINGS/VENDORS/APPLE/ROMAN.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='mac-roman',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> CONTROL CHARACTER
u'\x01' # 0x01 -> CONTROL CHARACTER
u'\x02' # 0x02 -> CONTROL CHARACTER
u'\x03' # 0x03 -> CONTROL CHARACTER
u'\x04' # 0x04 -> CONTROL CHARACTER
u'\x05' # 0x05 -> CONTROL CHARACTER
u'\x06' # 0x06 -> CONTROL CHARACTER
u'\x07' # 0x07 -> CONTROL CHARACTER
u'\x08' # 0x08 -> CONTROL CHARACTER
u'\t' # 0x09 -> CONTROL CHARACTER
u'\n' # 0x0A -> CONTROL CHARACTER
u'\x0b' # 0x0B -> CONTROL CHARACTER
u'\x0c' # 0x0C -> CONTROL CHARACTER
u'\r' # 0x0D -> CONTROL CHARACTER
u'\x0e' # 0x0E -> CONTROL CHARACTER
u'\x0f' # 0x0F -> CONTROL CHARACTER
u'\x10' # 0x10 -> CONTROL CHARACTER
u'\x11' # 0x11 -> CONTROL CHARACTER
u'\x12' # 0x12 -> CONTROL CHARACTER
u'\x13' # 0x13 -> CONTROL CHARACTER
u'\x14' # 0x14 -> CONTROL CHARACTER
u'\x15' # 0x15 -> CONTROL CHARACTER
u'\x16' # 0x16 -> CONTROL CHARACTER
u'\x17' # 0x17 -> CONTROL CHARACTER
u'\x18' # 0x18 -> CONTROL CHARACTER
u'\x19' # 0x19 -> CONTROL CHARACTER
u'\x1a' # 0x1A -> CONTROL CHARACTER
u'\x1b' # 0x1B -> CONTROL CHARACTER
u'\x1c' # 0x1C -> CONTROL CHARACTER
u'\x1d' # 0x1D -> CONTROL CHARACTER
u'\x1e' # 0x1E -> CONTROL CHARACTER
u'\x1f' # 0x1F -> CONTROL CHARACTER
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> CONTROL CHARACTER
u'\xc4' # 0x80 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0x81 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc7' # 0x82 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xc9' # 0x83 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xd1' # 0x84 -> LATIN CAPITAL LETTER N WITH TILDE
u'\xd6' # 0x85 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xdc' # 0x86 -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xe1' # 0x87 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe0' # 0x88 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe2' # 0x89 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe4' # 0x8A -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe3' # 0x8B -> LATIN SMALL LETTER A WITH TILDE
u'\xe5' # 0x8C -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe7' # 0x8D -> LATIN SMALL LETTER C WITH CEDILLA
u'\xe9' # 0x8E -> LATIN SMALL LETTER E WITH ACUTE
u'\xe8' # 0x8F -> LATIN SMALL LETTER E WITH GRAVE
u'\xea' # 0x90 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0x91 -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xed' # 0x92 -> LATIN SMALL LETTER I WITH ACUTE
u'\xec' # 0x93 -> LATIN SMALL LETTER I WITH GRAVE
u'\xee' # 0x94 -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0x95 -> LATIN SMALL LETTER I WITH DIAERESIS
u'\xf1' # 0x96 -> LATIN SMALL LETTER N WITH TILDE
u'\xf3' # 0x97 -> LATIN SMALL LETTER O WITH ACUTE
u'\xf2' # 0x98 -> LATIN SMALL LETTER O WITH GRAVE
u'\xf4' # 0x99 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf6' # 0x9A -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf5' # 0x9B -> LATIN SMALL LETTER O WITH TILDE
u'\xfa' # 0x9C -> LATIN SMALL LETTER U WITH ACUTE
u'\xf9' # 0x9D -> LATIN SMALL LETTER U WITH GRAVE
u'\xfb' # 0x9E -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0x9F -> LATIN SMALL LETTER U WITH DIAERESIS
u'\u2020' # 0xA0 -> DAGGER
u'\xb0' # 0xA1 -> DEGREE SIGN
u'\xa2' # 0xA2 -> CENT SIGN
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa7' # 0xA4 -> SECTION SIGN
u'\u2022' # 0xA5 -> BULLET
u'\xb6' # 0xA6 -> PILCROW SIGN
u'\xdf' # 0xA7 -> LATIN SMALL LETTER SHARP S
u'\xae' # 0xA8 -> REGISTERED SIGN
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\u2122' # 0xAA -> TRADE MARK SIGN
u'\xb4' # 0xAB -> ACUTE ACCENT
u'\xa8' # 0xAC -> DIAERESIS
u'\u2260' # 0xAD -> NOT EQUAL TO
u'\xc6' # 0xAE -> LATIN CAPITAL LETTER AE
u'\xd8' # 0xAF -> LATIN CAPITAL LETTER O WITH STROKE
u'\u221e' # 0xB0 -> INFINITY
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\u2264' # 0xB2 -> LESS-THAN OR EQUAL TO
u'\u2265' # 0xB3 -> GREATER-THAN OR EQUAL TO
u'\xa5' # 0xB4 -> YEN SIGN
u'\xb5' # 0xB5 -> MICRO SIGN
u'\u2202' # 0xB6 -> PARTIAL DIFFERENTIAL
u'\u2211' # 0xB7 -> N-ARY SUMMATION
u'\u220f' # 0xB8 -> N-ARY PRODUCT
u'\u03c0' # 0xB9 -> GREEK SMALL LETTER PI
u'\u222b' # 0xBA -> INTEGRAL
u'\xaa' # 0xBB -> FEMININE ORDINAL INDICATOR
u'\xba' # 0xBC -> MASCULINE ORDINAL INDICATOR
u'\u03a9' # 0xBD -> GREEK CAPITAL LETTER OMEGA
u'\xe6' # 0xBE -> LATIN SMALL LETTER AE
u'\xf8' # 0xBF -> LATIN SMALL LETTER O WITH STROKE
u'\xbf' # 0xC0 -> INVERTED QUESTION MARK
u'\xa1' # 0xC1 -> INVERTED EXCLAMATION MARK
u'\xac' # 0xC2 -> NOT SIGN
u'\u221a' # 0xC3 -> SQUARE ROOT
u'\u0192' # 0xC4 -> LATIN SMALL LETTER F WITH HOOK
u'\u2248' # 0xC5 -> ALMOST EQUAL TO
u'\u2206' # 0xC6 -> INCREMENT
u'\xab' # 0xC7 -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0xC8 -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u2026' # 0xC9 -> HORIZONTAL ELLIPSIS
u'\xa0' # 0xCA -> NO-BREAK SPACE
u'\xc0' # 0xCB -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xc3' # 0xCC -> LATIN CAPITAL LETTER A WITH TILDE
u'\xd5' # 0xCD -> LATIN CAPITAL LETTER O WITH TILDE
u'\u0152' # 0xCE -> LATIN CAPITAL LIGATURE OE
u'\u0153' # 0xCF -> LATIN SMALL LIGATURE OE
u'\u2013' # 0xD0 -> EN DASH
u'\u2014' # 0xD1 -> EM DASH
u'\u201c' # 0xD2 -> LEFT DOUBLE QUOTATION MARK
u'\u201d' # 0xD3 -> RIGHT DOUBLE QUOTATION MARK
u'\u2018' # 0xD4 -> LEFT SINGLE QUOTATION MARK
u'\u2019' # 0xD5 -> RIGHT SINGLE QUOTATION MARK
u'\xf7' # 0xD6 -> DIVISION SIGN
u'\u25ca' # 0xD7 -> LOZENGE
u'\xff' # 0xD8 -> LATIN SMALL LETTER Y WITH DIAERESIS
u'\u0178' # 0xD9 -> LATIN CAPITAL LETTER Y WITH DIAERESIS
u'\u2044' # 0xDA -> FRACTION SLASH
u'\u20ac' # 0xDB -> EURO SIGN
u'\u2039' # 0xDC -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
u'\u203a' # 0xDD -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
u'\ufb01' # 0xDE -> LATIN SMALL LIGATURE FI
u'\ufb02' # 0xDF -> LATIN SMALL LIGATURE FL
u'\u2021' # 0xE0 -> DOUBLE DAGGER
u'\xb7' # 0xE1 -> MIDDLE DOT
u'\u201a' # 0xE2 -> SINGLE LOW-9 QUOTATION MARK
u'\u201e' # 0xE3 -> DOUBLE LOW-9 QUOTATION MARK
u'\u2030' # 0xE4 -> PER MILLE SIGN
u'\xc2' # 0xE5 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\xca' # 0xE6 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xc1' # 0xE7 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xcb' # 0xE8 -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\xc8' # 0xE9 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\xcd' # 0xEA -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0xEB -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0xEC -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\xcc' # 0xED -> LATIN CAPITAL LETTER I WITH GRAVE
u'\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xd4' # 0xEF -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\uf8ff' # 0xF0 -> Apple logo
u'\xd2' # 0xF1 -> LATIN CAPITAL LETTER O WITH GRAVE
u'\xda' # 0xF2 -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xdb' # 0xF3 -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xd9' # 0xF4 -> LATIN CAPITAL LETTER U WITH GRAVE
u'\u0131' # 0xF5 -> LATIN SMALL LETTER DOTLESS I
u'\u02c6' # 0xF6 -> MODIFIER LETTER CIRCUMFLEX ACCENT
u'\u02dc' # 0xF7 -> SMALL TILDE
u'\xaf' # 0xF8 -> MACRON
u'\u02d8' # 0xF9 -> BREVE
u'\u02d9' # 0xFA -> DOT ABOVE
u'\u02da' # 0xFB -> RING ABOVE
u'\xb8' # 0xFC -> CEDILLA
u'\u02dd' # 0xFD -> DOUBLE ACUTE ACCENT
u'\u02db' # 0xFE -> OGONEK
u'\u02c7' # 0xFF -> CARON
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
jason-weirather/IDP | refs/heads/master | bin/novel_genephed.py | 2 | #!/usr/bin/python
import sys
import os
if len(sys.argv) >= 3:
ref_refFlat_filename = sys.argv[1]
tag_refFlat_filename = sys.argv[2]
output_filename = sys.argv[3]
else:
print ("usage: python novel_genephed.py ref_refFlat_filename tag_refFlat_filename output_filename")
print ("or ./novel_genephed.py ref_refFlat_filename tag_refFlat_filename output_filename")
sys.exit(1)
################################################################################
def GetPathAndName(pathfilename):
ls=pathfilename.split('/')
filename=ls[-1]
path='/'.join(ls[0:-1])+'/'
if len(ls)==1:
path="./"
return path, filename
def addrefFlat(ref_iso_dt,ref_dt, ref_refFlat_filename):
ref=open(ref_refFlat_filename,'r')
for refline in ref:
ls = refline.strip().split('\t')
gene = ls[0]
ID = ls[1]
chr_name = ls[2]
if ls[8]=="1":
continue
jun_end_ls = ls[9].strip(',').split(',')[1:]
jun_start_ls = ls[10].strip(',').split(',')[:-1]
locus = chr_name+':'+'_'.join(jun_start_ls)+'-'+('_').join(jun_end_ls)
if not ref_iso_dt.has_key(locus):
ref_iso_dt[locus]=[[],[]]
ref_iso_dt[locus][0].append(gene)
ref_iso_dt[locus][1].append(ID)
if not ref_dt.has_key(locus):
ref_dt[locus]=[]
ref_dt[locus].append(refline)
ref.close()
################################################################################
ref_iso_dt = {}
ref_dt = {}
addrefFlat(ref_iso_dt,ref_dt, ref_refFlat_filename)
################################################################################
output = open(output_filename,'w')
tag = open(tag_refFlat_filename,'r')
known_locus_set = set()
for line in tag:
ls = line.strip().split('\t')
gene = ls[0]
ID = ls[1]
chr_name = ls[2]
if ls[8]=="1":
continue
jun_end_ls = ls[9].strip(',').split(',')[1:]
jun_start_ls = ls[10].strip(',').split(',')[:-1]
locus = chr_name+':'+'_'.join(jun_start_ls)+'-'+('_').join(jun_end_ls)
old_end = ls[10].strip(',').split(',')[-1]
old_start = ls[9].strip(',').split(',')[0]
if ref_dt.has_key(locus):
known_locus_set.add(locus)
if ref_iso_dt.has_key(locus):
locus_ls = locus.split(':')
output_ls = []
output_ls.append('|'.join(ref_iso_dt[locus][0]))
output_ls.append('|'.join(ref_iso_dt[locus][1]))
output_ls.append(locus_ls[0])
output_ls.append('?')
ls1ls =locus_ls[1].split("-")
exon_start_ls=ls1ls[1].split('_')
exon_end_ls=ls1ls[0].split('_')
oldexon_start_ls = exon_start_ls
oldexon_end_ls = exon_end_ls
oldexon_start_ls.insert(0,old_start)
oldexon_end_ls.append(old_end )
temp_output_ls = output_ls
N_exon = str( len(exon_end_ls) )
temp_output_ls.append( exon_start_ls[0] )
temp_output_ls.append( exon_end_ls[-1] )
temp_output_ls.append( exon_start_ls[0] )
temp_output_ls.append( exon_end_ls[-1] )
temp_output_ls.append( N_exon )
temp_output_ls.append( ','.join(oldexon_start_ls) + ',' )
temp_output_ls.append( ','.join(oldexon_end_ls) + ',' )
temp_output_ls[3] = "+"
print '\t'.join(temp_output_ls) + '\t' + gene + '\t'.join(ls[10:])
exon_start_ls.insert(0, str( int(exon_end_ls[0])-100 ) )
exon_end_ls.append( str( int(exon_start_ls[-1])+100 ) )
N_exon = str( len(exon_end_ls) )
output_ls.append( exon_start_ls[0] )
output_ls.append( exon_end_ls[-1] )
output_ls.append( exon_start_ls[0] )
output_ls.append( exon_end_ls[-1] )
output_ls.append( N_exon )
output_ls.append( ','.join(exon_start_ls) + ',')
output_ls.append( ','.join(exon_end_ls) + ',')
# print '\t'.join(temp_output_ls) + '\t' + gene + '\t' + ls[11] + '\t' + ls[12]
continue
output.write( line )
tag.close()
output.close()
ref_refFlat_path, ref_refFlat_file = GetPathAndName(ref_refFlat_filename)
tag_refFlat_path, tag_refFlat_file = GetPathAndName(tag_refFlat_filename)
known_output = open(tag_refFlat_path + "known_"+ tag_refFlat_file + '_' + ref_refFlat_file,'w')
for locus in known_locus_set:
for refline in ref_dt[locus]:
known_output.write(refline)
known_output.close()
|
stephaneAG/PengPod700 | refs/heads/master | QtEsrc/backup_qt/qt-everywhere-opensource-src-4.8.5/src/3rdparty/webkit/Source/ThirdParty/gyp/test/generator-output/gyptest-relocate.py | 151 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that a project hierarchy created with the --generator-output=
option can be built even when it's relocated to a different path.
"""
import TestGyp
test = TestGyp.TestGyp()
test.writable(test.workpath('src'), False)
test.run_gyp('prog1.gyp',
'-Dset_symroot=1',
'--generator-output=' + test.workpath('gypfiles'),
chdir='src')
test.writable(test.workpath('src'), True)
test.relocate('src', 'relocate/src')
test.relocate('gypfiles', 'relocate/gypfiles')
test.writable(test.workpath('relocate/src'), False)
test.writable(test.workpath('relocate/src/build'), True)
test.writable(test.workpath('relocate/src/subdir2/build'), True)
test.writable(test.workpath('relocate/src/subdir3/build'), True)
test.build('prog1.gyp', test.ALL, chdir='relocate/gypfiles')
chdir = 'relocate/gypfiles'
expect = """\
Hello from %s
Hello from inc.h
Hello from inc1/include1.h
Hello from inc2/include2.h
Hello from inc3/include3.h
Hello from subdir2/deeper/deeper.h
"""
if test.format == 'xcode':
chdir = 'relocate/src'
test.run_built_executable('prog1', chdir=chdir, stdout=expect % 'prog1.c')
if test.format == 'xcode':
chdir = 'relocate/src/subdir2'
test.run_built_executable('prog2', chdir=chdir, stdout=expect % 'prog2.c')
if test.format == 'xcode':
chdir = 'relocate/src/subdir3'
test.run_built_executable('prog3', chdir=chdir, stdout=expect % 'prog3.c')
test.pass_test()
|
streamlio/heron | refs/heads/master | third_party/pex/pex/crawler.py | 13 | # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
"""Support for webpage parsing and crawling."""
import os
import re
import threading
import traceback
from .compatibility import PY3
from .http import Context
from .link import Link
from .tracer import TRACER
from .util import Memoizer
if PY3:
from queue import Empty, Queue
from urllib.parse import urlparse
else:
from Queue import Empty, Queue
from urlparse import urlparse
def unescape(s):
"""Unescapes html. Taken from https://wiki.python.org/moin/EscapingHtml"""
s = s.replace("<", "<")
s = s.replace(">", ">")
# this has to be last:
s = s.replace("&", "&")
return s
class PageParser(object):
"""A helper class to extract and differentiate ordinary and download links from webpages."""
HREF_RE = re.compile(r"""href=(?:"([^"]*)"|\'([^\']*)\'|([^>\s\n]*))""", re.I | re.S)
REL_RE = re.compile(r"""<[^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*>""", re.I)
REL_SKIP_EXTENSIONS = frozenset(['.zip', '.tar', '.tar.gz', '.tar.bz2', '.tgz', '.exe'])
REL_TYPES = frozenset(['homepage', 'download'])
@classmethod
def href_match_to_url(cls, match):
def pick(group):
return '' if group is None else group
return unescape(pick(match.group(1)) or pick(match.group(2)) or pick(match.group(3)))
@classmethod
def rel_links(cls, page):
"""return rel= links that should be scraped, skipping obviously data links."""
for match in cls.REL_RE.finditer(page):
href, rel = match.group(0), match.group(1)
if rel not in cls.REL_TYPES:
continue
href_match = cls.HREF_RE.search(href)
if href_match:
href = cls.href_match_to_url(href_match)
parsed_href = urlparse(href)
if any(parsed_href.path.endswith(ext) for ext in cls.REL_SKIP_EXTENSIONS):
continue
yield href
@classmethod
def links(cls, page):
"""return all links on a page, including potentially rel= links."""
for match in cls.HREF_RE.finditer(page):
yield cls.href_match_to_url(match)
def partition(L, pred):
return filter(lambda v: not pred(v), L), filter(lambda v: pred(v), L)
class Crawler(object):
"""A multi-threaded crawler that supports local (disk) and remote (web) crawling."""
# Memoizer for calls to Crawler.crawl().
_CRAWL_CACHE = Memoizer()
@classmethod
def reset_cache(cls):
"""Reset the internal crawl cache. This is intended primarily for tests."""
cls._CRAWL_CACHE = Memoizer()
@classmethod
def crawl_local(cls, link):
try:
dirents = os.listdir(link.local_path)
except OSError as e:
TRACER.log('Failed to read %s: %s' % (link.local_path, e), V=1)
return set(), set()
files, dirs = partition([os.path.join(link.local_path, fn) for fn in dirents], os.path.isdir)
return set(map(Link.from_filename, files)), set(map(Link.from_filename, dirs))
@classmethod
def crawl_remote(cls, context, link):
try:
content = context.content(link)
except context.Error as e:
TRACER.log('Failed to read %s: %s' % (link.url, e), V=1)
return set(), set()
links = set(link.join(href) for href in PageParser.links(content))
rel_links = set(link.join(href) for href in PageParser.rel_links(content))
return links, rel_links
@classmethod
def crawl_link(cls, context, link):
if link.local:
return cls.crawl_local(link)
elif link.remote:
return cls.crawl_remote(context, link)
else:
TRACER.log('Failed to crawl %s: unknown scheme %s' % (link.url, link.scheme))
return set(), set()
def __init__(self, context=None, threads=1):
self._threads = threads
self.context = context or Context.get()
def _make_cache_key(self, links, follow_links):
return (follow_links,) + tuple(links)
def crawl(self, link_or_links, follow_links=False):
links = list(Link.wrap_iterable(link_or_links))
cache_key = self._make_cache_key(links, follow_links)
# Memoize crawling to a global Memoizer (Crawler._CRAWL_CACHE).
result = self._CRAWL_CACHE.get(cache_key)
if result is None:
result = self._crawl(links, follow_links)
self._CRAWL_CACHE.store(cache_key, result)
return result
def _crawl(self, link_or_links, follow_links):
links, seen = set(), set()
queue = Queue()
converged = threading.Event()
def execute():
while not converged.is_set():
try:
link = queue.get(timeout=0.01)
except Empty:
continue
if link not in seen:
seen.add(link)
try:
roots, rels = self.crawl_link(self.context, link)
except Exception as e:
TRACER.log('Unknown exception encountered: %s' % e)
for line in traceback.format_exc().splitlines():
TRACER.log(line)
queue.task_done()
continue
links.update(roots)
if follow_links:
for rel in rels:
if rel not in seen:
queue.put(rel)
queue.task_done()
for i, link in enumerate(link_or_links):
TRACER.log('crawling link i=%s link=%s follow_links=%s' % (i, link, follow_links), V=3)
queue.put(link)
workers = []
for _ in range(self._threads):
worker = threading.Thread(target=execute)
workers.append(worker)
worker.daemon = True
worker.start()
queue.join()
converged.set()
# We deliberately do not join the worker threads, since they are no longer of any use to us.
return links
|
2014c2g4/2015cda_g7 | refs/heads/master | static/Brython3.1.1-20150328-091302/Lib/warnings.py | 752 | """Python part of the warnings subsystem."""
# Note: function level imports should *not* be used
# in this module as it may cause import lock deadlock.
# See bug 683658.
import linecache
import sys
__all__ = ["warn", "showwarning", "formatwarning", "filterwarnings",
"resetwarnings", "catch_warnings"]
def showwarning(message, category, filename, lineno, file=None, line=None):
"""Hook to write a warning to a file; replace if you like."""
if file is None:
file = sys.stderr
try:
file.write(formatwarning(message, category, filename, lineno, line))
except IOError:
pass # the file (probably stderr) is invalid - this warning gets lost.
def formatwarning(message, category, filename, lineno, line=None):
"""Function to format a warning the standard way."""
s = "%s:%s: %s: %s\n" % (filename, lineno, category.__name__, message)
line = linecache.getline(filename, lineno) if line is None else line
if line:
line = line.strip()
s += " %s\n" % line
return s
def filterwarnings(action, message="", category=Warning, module="", lineno=0,
append=False):
"""Insert an entry into the list of warnings filters (at the front).
'action' -- one of "error", "ignore", "always", "default", "module",
or "once"
'message' -- a regex that the warning message must match
'category' -- a class that the warning must be a subclass of
'module' -- a regex that the module name must match
'lineno' -- an integer line number, 0 matches all warnings
'append' -- if true, append to the list of filters
"""
import re
assert action in ("error", "ignore", "always", "default", "module",
"once"), "invalid action: %r" % (action,)
assert isinstance(message, str), "message must be a string"
assert isinstance(category, type), "category must be a class"
assert issubclass(category, Warning), "category must be a Warning subclass"
assert isinstance(module, str), "module must be a string"
assert isinstance(lineno, int) and lineno >= 0, \
"lineno must be an int >= 0"
item = (action, re.compile(message, re.I), category,
re.compile(module), lineno)
if append:
filters.append(item)
else:
filters.insert(0, item)
def simplefilter(action, category=Warning, lineno=0, append=False):
"""Insert a simple entry into the list of warnings filters (at the front).
A simple filter matches all modules and messages.
'action' -- one of "error", "ignore", "always", "default", "module",
or "once"
'category' -- a class that the warning must be a subclass of
'lineno' -- an integer line number, 0 matches all warnings
'append' -- if true, append to the list of filters
"""
assert action in ("error", "ignore", "always", "default", "module",
"once"), "invalid action: %r" % (action,)
assert isinstance(lineno, int) and lineno >= 0, \
"lineno must be an int >= 0"
item = (action, None, category, None, lineno)
if append:
filters.append(item)
else:
filters.insert(0, item)
def resetwarnings():
"""Clear the list of warning filters, so that no filters are active."""
filters[:] = []
class _OptionError(Exception):
"""Exception used by option processing helpers."""
pass
# Helper to process -W options passed via sys.warnoptions
def _processoptions(args):
for arg in args:
try:
_setoption(arg)
except _OptionError as msg:
print("Invalid -W option ignored:", msg, file=sys.stderr)
# Helper for _processoptions()
def _setoption(arg):
import re
parts = arg.split(':')
if len(parts) > 5:
raise _OptionError("too many fields (max 5): %r" % (arg,))
while len(parts) < 5:
parts.append('')
action, message, category, module, lineno = [s.strip()
for s in parts]
action = _getaction(action)
message = re.escape(message)
category = _getcategory(category)
module = re.escape(module)
if module:
module = module + '$'
if lineno:
try:
lineno = int(lineno)
if lineno < 0:
raise ValueError
except (ValueError, OverflowError):
raise _OptionError("invalid lineno %r" % (lineno,))
else:
lineno = 0
filterwarnings(action, message, category, module, lineno)
# Helper for _setoption()
def _getaction(action):
if not action:
return "default"
if action == "all": return "always" # Alias
for a in ('default', 'always', 'ignore', 'module', 'once', 'error'):
if a.startswith(action):
return a
raise _OptionError("invalid action: %r" % (action,))
# Helper for _setoption()
def _getcategory(category):
import re
if not category:
return Warning
if re.match("^[a-zA-Z0-9_]+$", category):
try:
cat = eval(category)
except NameError:
raise _OptionError("unknown warning category: %r" % (category,))
else:
i = category.rfind(".")
module = category[:i]
klass = category[i+1:]
try:
m = __import__(module, None, None, [klass])
except ImportError:
raise _OptionError("invalid module name: %r" % (module,))
try:
cat = getattr(m, klass)
except AttributeError:
raise _OptionError("unknown warning category: %r" % (category,))
if not issubclass(cat, Warning):
raise _OptionError("invalid warning category: %r" % (category,))
return cat
# Code typically replaced by _warnings
def warn(message, category=None, stacklevel=1):
"""Issue a warning, or maybe ignore it or raise an exception."""
# Check if message is already a Warning object
if isinstance(message, Warning):
category = message.__class__
# Check category argument
if category is None:
category = UserWarning
assert issubclass(category, Warning)
# Get context information
try:
caller = sys._getframe(stacklevel)
except ValueError:
globals = sys.__dict__
lineno = 1
else:
globals = caller.f_globals
lineno = caller.f_lineno
if '__name__' in globals:
module = globals['__name__']
else:
module = "<string>"
filename = globals.get('__file__')
if filename:
fnl = filename.lower()
if fnl.endswith((".pyc", ".pyo")):
filename = filename[:-1]
else:
if module == "__main__":
try:
filename = sys.argv[0]
except AttributeError:
# embedded interpreters don't have sys.argv, see bug #839151
filename = '__main__'
if not filename:
filename = module
registry = globals.setdefault("__warningregistry__", {})
warn_explicit(message, category, filename, lineno, module, registry,
globals)
def warn_explicit(message, category, filename, lineno,
module=None, registry=None, module_globals=None):
lineno = int(lineno)
if module is None:
module = filename or "<unknown>"
if module[-3:].lower() == ".py":
module = module[:-3] # XXX What about leading pathname?
if registry is None:
registry = {}
if isinstance(message, Warning):
text = str(message)
category = message.__class__
else:
text = message
message = category(message)
key = (text, category, lineno)
# Quick test for common case
if registry.get(key):
return
# Search the filters
for item in filters:
action, msg, cat, mod, ln = item
if ((msg is None or msg.match(text)) and
issubclass(category, cat) and
(mod is None or mod.match(module)) and
(ln == 0 or lineno == ln)):
break
else:
action = defaultaction
# Early exit actions
if action == "ignore":
registry[key] = 1
return
# Prime the linecache for formatting, in case the
# "file" is actually in a zipfile or something.
linecache.getlines(filename, module_globals)
if action == "error":
raise message
# Other actions
if action == "once":
registry[key] = 1
oncekey = (text, category)
if onceregistry.get(oncekey):
return
onceregistry[oncekey] = 1
elif action == "always":
pass
elif action == "module":
registry[key] = 1
altkey = (text, category, 0)
if registry.get(altkey):
return
registry[altkey] = 1
elif action == "default":
registry[key] = 1
else:
# Unrecognized actions are errors
raise RuntimeError(
"Unrecognized action (%r) in warnings.filters:\n %s" %
(action, item))
if not callable(showwarning):
raise TypeError("warnings.showwarning() must be set to a "
"function or method")
# Print message and context
showwarning(message, category, filename, lineno)
class WarningMessage(object):
"""Holds the result of a single showwarning() call."""
_WARNING_DETAILS = ("message", "category", "filename", "lineno", "file",
"line")
def __init__(self, message, category, filename, lineno, file=None,
line=None):
local_values = locals()
for attr in self._WARNING_DETAILS:
setattr(self, attr, local_values[attr])
self._category_name = category.__name__ if category else None
def __str__(self):
return ("{message : %r, category : %r, filename : %r, lineno : %s, "
"line : %r}" % (self.message, self._category_name,
self.filename, self.lineno, self.line))
class catch_warnings(object):
"""A context manager that copies and restores the warnings filter upon
exiting the context.
The 'record' argument specifies whether warnings should be captured by a
custom implementation of warnings.showwarning() and be appended to a list
returned by the context manager. Otherwise None is returned by the context
manager. The objects appended to the list are arguments whose attributes
mirror the arguments to showwarning().
The 'module' argument is to specify an alternative module to the module
named 'warnings' and imported under that name. This argument is only useful
when testing the warnings module itself.
"""
def __init__(self, *, record=False, module=None):
"""Specify whether to record warnings and if an alternative module
should be used other than sys.modules['warnings'].
For compatibility with Python 3.0, please consider all arguments to be
keyword-only.
"""
self._record = record
self._module = sys.modules['warnings'] if module is None else module
self._entered = False
def __repr__(self):
args = []
if self._record:
args.append("record=True")
if self._module is not sys.modules['warnings']:
args.append("module=%r" % self._module)
name = type(self).__name__
return "%s(%s)" % (name, ", ".join(args))
def __enter__(self):
if self._entered:
raise RuntimeError("Cannot enter %r twice" % self)
self._entered = True
self._filters = self._module.filters
self._module.filters = self._filters[:]
self._showwarning = self._module.showwarning
if self._record:
log = []
def showwarning(*args, **kwargs):
log.append(WarningMessage(*args, **kwargs))
self._module.showwarning = showwarning
return log
else:
return None
def __exit__(self, *exc_info):
if not self._entered:
raise RuntimeError("Cannot exit %r without entering first" % self)
self._module.filters = self._filters
self._module.showwarning = self._showwarning
# filters contains a sequence of filter 5-tuples
# The components of the 5-tuple are:
# - an action: error, ignore, always, default, module, or once
# - a compiled regex that must match the warning message
# - a class representing the warning category
# - a compiled regex that must match the module that is being warned
# - a line number for the line being warning, or 0 to mean any line
# If either if the compiled regexs are None, match anything.
_warnings_defaults = False
try:
from _warnings import (filters, _defaultaction, _onceregistry,
warn, warn_explicit)
defaultaction = _defaultaction
onceregistry = _onceregistry
_warnings_defaults = True
except ImportError:
filters = []
defaultaction = "default"
onceregistry = {}
# Module initialization
_processoptions(sys.warnoptions)
if not _warnings_defaults:
silence = [ImportWarning, PendingDeprecationWarning]
silence.append(DeprecationWarning)
for cls in silence:
simplefilter("ignore", category=cls)
bytes_warning = sys.flags.bytes_warning
if bytes_warning > 1:
bytes_action = "error"
elif bytes_warning:
bytes_action = "default"
else:
bytes_action = "ignore"
simplefilter(bytes_action, category=BytesWarning, append=1)
# resource usage warnings are enabled by default in pydebug mode
if hasattr(sys, 'gettotalrefcount'):
resource_action = "always"
else:
resource_action = "ignore"
simplefilter(resource_action, category=ResourceWarning, append=1)
del _warnings_defaults
|
ocefpaf/cartopy | refs/heads/master | lib/cartopy/tests/crs/test_utm.py | 2 | # (C) British Crown Copyright 2018, Met Office
#
# This file is part of cartopy.
#
# cartopy is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# cartopy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with cartopy. If not, see <https://www.gnu.org/licenses/>.
"""
Tests for the UTM coordinate system.
"""
from __future__ import (absolute_import, division, print_function)
import numpy as np
from numpy.testing import assert_almost_equal
import pytest
import cartopy.crs as ccrs
from .helpers import check_proj_params
@pytest.mark.parametrize('south', [False, True])
def test_default(south):
zone = 1 # Limits are fixed, so don't bother checking other zones.
utm = ccrs.UTM(zone, southern_hemisphere=south)
other_args = {'ellps=WGS84', 'units=m', 'zone={}'.format(zone)}
if south:
other_args |= {'south'}
check_proj_params('utm', utm, other_args)
assert_almost_equal(np.array(utm.x_limits),
[-250000, 1250000])
assert_almost_equal(np.array(utm.y_limits),
[-10000000, 25000000])
def test_ellipsoid_transform():
# USGS Professional Paper 1395, pp 269 - 271
globe = ccrs.Globe(ellipse='clrk66')
utm = ccrs.UTM(zone=18, globe=globe)
geodetic = utm.as_geodetic()
other_args = {'ellps=clrk66', 'units=m', 'zone=18'}
check_proj_params('utm', utm, other_args)
assert_almost_equal(np.array(utm.x_limits),
[-250000, 1250000])
assert_almost_equal(np.array(utm.y_limits),
[-10000000, 25000000])
result = utm.transform_point(-73.5, 40.5, geodetic)
assert_almost_equal(result, np.array([127106.5 + 500000, 4484124.4]),
decimal=1)
inverse_result = geodetic.transform_point(result[0], result[1], utm)
assert_almost_equal(inverse_result, [-73.5, 40.5])
|
ewdurbin/sentry | refs/heads/master | tests/sentry/utils/http/tests.py | 12 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import mock
from django.conf import settings
from exam import fixture
from sentry.models import Project
from sentry.testutils import TestCase
from sentry.utils.http import (
is_same_domain, is_valid_origin, get_origins, absolute_uri)
class AbsoluteUriTest(TestCase):
def test_without_path(self):
assert absolute_uri() == settings.SENTRY_URL_PREFIX
def test_with_path(self):
assert absolute_uri('/foo/bar') == '%s/foo/bar' % (settings.SENTRY_URL_PREFIX,)
class SameDomainTestCase(TestCase):
def test_is_same_domain(self):
url1 = 'http://example.com/foo/bar'
url2 = 'http://example.com/biz/baz'
self.assertTrue(is_same_domain(url1, url2))
def test_is_same_domain_diff_scheme(self):
url1 = 'https://example.com/foo/bar'
url2 = 'http://example.com/biz/baz'
self.assertTrue(is_same_domain(url1, url2))
def test_is_same_domain_diff_port(self):
url1 = 'http://example.com:80/foo/bar'
url2 = 'http://example.com:13/biz/baz'
self.assertFalse(is_same_domain(url1, url2))
class GetOriginsTestCase(TestCase):
def test_project_default(self):
project = Project.objects.get()
with self.settings(SENTRY_ALLOW_ORIGIN=None):
result = get_origins(project)
self.assertEquals(result, frozenset(['*']))
def test_project(self):
project = Project.objects.get()
project.update_option('sentry:origins', ['http://foo.example'])
with self.settings(SENTRY_ALLOW_ORIGIN=None):
result = get_origins(project)
self.assertEquals(result, frozenset(['http://foo.example']))
def test_project_and_setting(self):
project = Project.objects.get()
project.update_option('sentry:origins', ['http://foo.example'])
with self.settings(SENTRY_ALLOW_ORIGIN='http://example.com'):
result = get_origins(project)
self.assertEquals(result, frozenset(['http://foo.example', 'http://example.com']))
def test_setting_empty(self):
with self.settings(SENTRY_ALLOW_ORIGIN=None):
result = get_origins(None)
self.assertEquals(result, frozenset([]))
def test_setting_all(self):
with self.settings(SENTRY_ALLOW_ORIGIN='*'):
result = get_origins(None)
self.assertEquals(result, frozenset(['*']))
def test_setting_uri(self):
with self.settings(SENTRY_ALLOW_ORIGIN='http://example.com'):
result = get_origins(None)
self.assertEquals(result, frozenset(['http://example.com']))
class IsValidOriginTestCase(TestCase):
@fixture
def project(self):
return mock.Mock()
def isValidOrigin(self, origin, inputs):
with mock.patch('sentry.utils.http.get_origins') as get_origins:
get_origins.return_value = inputs
result = is_valid_origin(origin, self.project)
get_origins.assert_called_once_with(self.project)
return result
def test_global_wildcard_matches_domain(self):
result = self.isValidOrigin('http://example.com', ['*'])
self.assertEquals(result, True)
def test_domain_wildcard_matches_domain(self):
result = self.isValidOrigin('http://example.com', ['*.example.com'])
self.assertEquals(result, True)
def test_domain_wildcard_matches_domain_with_port(self):
result = self.isValidOrigin('http://example.com:80', ['*.example.com'])
self.assertEquals(result, True)
def test_domain_wildcard_matches_subdomain(self):
result = self.isValidOrigin('http://foo.example.com', ['*.example.com'])
self.assertEquals(result, True)
def test_domain_wildcard_matches_subdomain_with_port(self):
result = self.isValidOrigin('http://foo.example.com:80', ['*.example.com'])
self.assertEquals(result, True)
def test_domain_wildcard_does_not_match_others(self):
result = self.isValidOrigin('http://foo.com', ['*.example.com'])
self.assertEquals(result, False)
def test_domain_wildcard_matches_domain_with_path(self):
result = self.isValidOrigin('http://foo.example.com/foo/bar', ['*.example.com'])
self.assertEquals(result, True)
def test_base_domain_matches_domain(self):
result = self.isValidOrigin('http://example.com', ['example.com'])
self.assertEquals(result, True)
def test_base_domain_matches_domain_with_path(self):
result = self.isValidOrigin('http://example.com/foo/bar', ['example.com'])
self.assertEquals(result, True)
def test_base_domain_matches_domain_with_port(self):
result = self.isValidOrigin('http://example.com:80', ['example.com'])
self.assertEquals(result, True)
def test_base_domain_matches_domain_with_explicit_port(self):
result = self.isValidOrigin('http://example.com:80', ['example.com:80'])
assert result is True
def test_base_domain_does_not_match_domain_with_invalid_port(self):
result = self.isValidOrigin('http://example.com:80', ['example.com:443'])
assert result is False
def test_base_domain_does_not_match_subdomain(self):
result = self.isValidOrigin('http://example.com', ['foo.example.com'])
self.assertEquals(result, False)
def test_full_uri_match(self):
result = self.isValidOrigin('http://example.com', ['http://example.com'])
self.assertEquals(result, True)
def test_full_uri_match_requires_scheme(self):
result = self.isValidOrigin('https://example.com', ['http://example.com'])
self.assertEquals(result, False)
def test_full_uri_match_does_not_require_port(self):
result = self.isValidOrigin('http://example.com:80', ['http://example.com'])
self.assertEquals(result, True)
def test_partial_uri_match(self):
result = self.isValidOrigin('http://example.com/foo/bar', ['http://example.com'])
self.assertEquals(result, True)
def test_null_valid_with_global(self):
result = self.isValidOrigin('null', ['*'])
self.assertEquals(result, True)
def test_null_invalid_graceful_with_domains(self):
result = self.isValidOrigin('null', ['http://example.com'])
self.assertEquals(result, False)
def test_custom_protocol_with_location(self):
result = self.isValidOrigin('sp://custom-thing/foo/bar', ['sp://custom-thing'])
assert result is True
result = self.isValidOrigin('sp://custom-thing-two/foo/bar', ['sp://custom-thing'])
assert result is False
def test_custom_protocol_without_location(self):
result = self.isValidOrigin('sp://custom-thing/foo/bar', ['sp://*'])
assert result is True
result = self.isValidOrigin('dp://custom-thing/foo/bar', ['sp://'])
assert result is False
def test_custom_protocol_with_domainish_match(self):
result = self.isValidOrigin('sp://custom-thing.foobar/foo/bar', ['sp://*.foobar'])
assert result is True
result = self.isValidOrigin('sp://custom-thing.bizbaz/foo/bar', ['sp://*.foobar'])
assert result is False
|
dongguangming/python-phonenumbers | refs/heads/dev | python/phonenumbers/shortdata/region_FJ.py | 11 | """Auto-generated file, do not edit by hand. FJ metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_FJ = PhoneMetadata(id='FJ', country_code=None, international_prefix=None,
general_desc=PhoneNumberDesc(national_number_pattern='[0-579]\\d{1,4}', possible_number_pattern='\\d{2,5}'),
toll_free=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
premium_rate=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
emergency=PhoneNumberDesc(national_number_pattern='91[17]', possible_number_pattern='\\d{3}', example_number='911'),
short_code=PhoneNumberDesc(national_number_pattern='0(?:1[34]|8[1-4])|1(?:0[1-3]|[25]9)|2[289]|30|4(?:0404|4)|54|75|91[137]', possible_number_pattern='\\d{2,5}', example_number='22'),
standard_rate=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
carrier_specific=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
short_data=True)
|
authmillenon/RIOT | refs/heads/master | dist/tools/stm32loader/stm32loader.py | 32 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# vim: sw=4:ts=4:si:et:enc=utf-8
# Author: Ivan A-R <ivan@tuxotronic.org>
# With hacky error recovery by Gordon Williams <gw@pur3.co.uk>
# Project page: http://tuxotronic.org/wiki/projects/stm32loader
#
# This file is part of stm32loader.
#
# stm32loader is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 3, or (at your option) any later
# version.
#
# stm32loader is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# You should have received a copy of the GNU General Public License
# along with stm32loader; see the file COPYING3. If not see
# <http://www.gnu.org/licenses/>.
from __future__ import print_function
import sys
import getopt
import serial
import time
import glob
import tempfile
import os
import subprocess
try:
from progressbar import *
usepbar = 1
except:
usepbar = 0
# Verbose level
QUIET = 0
def mdebug(level, message):
if QUIET >= level:
print(message, file=sys.stderr)
# Takes chip IDs (obtained via Get ID command) to human-readable names
CHIP_ID_STRS = {0x410: 'STM32F1, performance, medium-density',
0x411: 'STM32F2',
0x412: 'STM32F1, performance, low-density',
0x413: 'STM32F4',
0x414: 'STM32F1, performance, high-density',
0x416: 'STM32L1, performance, medium-density',
0x418: 'STM32F1, connectivity',
0x420: 'STM32F1, value, medium-density',
0x428: 'STM32F1, value, high-density',
0x429: 'STM32L1',
0x430: 'STM32F1, performance, XL-density'}
class CmdException(Exception):
pass
class CommandInterface(object):
def open(self, aport='/dev/tty.usbserial-FTD3TMCH', abaudrate=115200) :
self.sp = serial.Serial(
port=aport,
baudrate=abaudrate, # baudrate
bytesize=8, # number of databits
parity=serial.PARITY_EVEN,
stopbits=1,
xonxoff=0, # enable software flow control
rtscts=0, # disable RTS/CTS flow control
timeout=0.5 # set a timeout value, None for waiting forever
)
def _wait_for_ack(self, info="", timeout=0):
stop = time.time() + timeout
got = None
while not got:
got = self.sp.read(1)
if time.time() > stop:
break
if not got:
raise CmdException("No response to %s" % info)
# wait for ask
ask = ord(got)
if ask == 0x79:
# ACK
return 1
elif ask == 0x1F:
# NACK
raise CmdException("Chip replied with a NACK during %s" % info)
# Unknown response
raise CmdException("Unrecognised response 0x%x to %s" % (ask, info))
def reset(self, swapRtsDtr=False):
if swapRtsDtr:
self.sp.setRTS(1)
time.sleep(0.1)
self.sp.setRTS(0)
time.sleep(0.5)
else:
self.sp.setDTR(1)
time.sleep(0.1)
self.sp.setDTR(0)
time.sleep(0.5)
def initChip(self, swapRtsDtr=False):
# Set boot
if swapRtsDtr:
self.sp.setDTR(0)
else:
self.sp.setRTS(0)
self.reset(swapRtsDtr)
# Be a bit more persistent when trying to initialise the chip
stop = time.time() + 5.0
while time.time() <= stop:
self.sp.write(bytes([0x7f]))
got = self.sp.read()
# The chip will ACK a sync the very first time and
# NACK it every time afterwards
if got and got in bytes([0x79,0x1f]):
# Synced up
return
raise CmdException('No response while trying to sync')
def releaseChip(self, swapRtsDtr=False):
if swapRtsDtr:
self.sp.setDTR(1)
else:
self.sp.setRTS(1)
self.reset()
def cmdGeneric(self, cmd):
cmdByte = bytes([cmd])
ctrlByte = bytes([cmd ^ 0xFF])
self.sp.write(cmdByte)
self.sp.write(ctrlByte) # Control byte
return self._wait_for_ack(hex(cmd))
def cmdGet(self):
if self.cmdGeneric(0x00):
mdebug(10, "*** Get command");
len = ord(self.sp.read())
version = ord(self.sp.read())
mdebug(10, " Bootloader version: "+hex(version))
dat = map(lambda c: hex(ord(c)), self.sp.read(len))
mdebug(10, " Available commands: "+str(dat))
self._wait_for_ack("0x00 end")
return version
else:
raise CmdException("Get (0x00) failed")
def cmdGetVersion(self):
if self.cmdGeneric(0x01):
mdebug(10, "*** GetVersion command")
version = ord(self.sp.read())
self.sp.read(2)
self._wait_for_ack("0x01 end")
mdebug(10, " Bootloader version: "+hex(version))
return version
else:
raise CmdException("GetVersion (0x01) failed")
def cmdGetID(self):
if self.cmdGeneric(0x02):
mdebug(10, "*** GetID command")
len = ord(self.sp.read())
id = self.sp.read(len+1)
self._wait_for_ack("0x02 end")
return id
else:
raise CmdException("GetID (0x02) failed")
def _encode_addr(self, addr):
byte3 = (addr >> 0) & 0xFF
byte2 = (addr >> 8) & 0xFF
byte1 = (addr >> 16) & 0xFF
byte0 = (addr >> 24) & 0xFF
crc = byte0 ^ byte1 ^ byte2 ^ byte3
return [byte0, byte1, byte2, byte3, crc]
def cmdReadMemory(self, addr, lng):
assert(lng <= 256)
if self.cmdGeneric(0x11):
mdebug(10, "*** ReadMemory command")
self.sp.write(self._encode_addr(addr))
self._wait_for_ack("0x11 address failed")
N = (lng - 1) & 0xFF
crc = N ^ 0xFF
self.sp.write(bytes([N, crc]))
self._wait_for_ack("0x11 length failed")
return self.sp.read(lng)
else:
raise CmdException("ReadMemory (0x11) failed")
def cmdGo(self, addr):
if self.cmdGeneric(0x21):
mdebug(10, "*** Go command")
self.sp.write(self._encode_addr(addr))
self._wait_for_ack("0x21 go failed")
else:
raise CmdException("Go (0x21) failed")
def cmdWriteMemory(self, addr, data):
assert(len(data) <= 256)
if self.cmdGeneric(0x31):
mdebug(10, "*** Write memory command")
self.sp.write(bytes(self._encode_addr(addr)))
self._wait_for_ack("0x31 address failed")
#map(lambda c: hex(ord(c)), data)
lng = (len(data)-1) & 0xFF
mdebug(10, " %s bytes to write" % [lng+1]);
self.sp.write(bytes([lng])) # len really
crc = 0xFF
try:
dataBytes = []
for c in data:
crc = crc ^ c
dataBytes.append(c)
dataBytes.append(crc)
self.sp.write(bytes(dataBytes))
self._wait_for_ack("0x31 programming failed")
mdebug(10, " Write memory done")
except:
mdebug(5, " WRITE FAIL - try and recover")
for c in data:
self.sp.write(bytes([255]))
mdebug(5, " WRITE FAIL - wait")
stop = time.time() + 1
while time.time() < stop:
if self.sp.inWaiting()>0: self.sp.read(self.sp.inWaiting())
mdebug(5, " WRITE FAIL - retry")
self.cmdWriteMemory(addr, data)
else:
raise CmdException("Write memory (0x31) failed")
def cmdEraseMemory(self, sectors = None):
if self.cmdGeneric(0x43):
mdebug(10, "*** Erase memory command")
if sectors is None:
# Global erase
self.sp.write(chr(0xFF))
self.sp.write(chr(0x00))
else:
# Sectors erase
self.sp.write(chr((len(sectors)-1) & 0xFF))
crc = 0xFF
for c in sectors:
crc = crc ^ c
self.sp.write(chr(c))
self.sp.write(chr(crc))
self._wait_for_ack("0x43 erasing failed")
mdebug(10, " Erase memory done")
else:
raise CmdException("Erase memory (0x43) failed")
GLOBAL_ERASE_TIMEOUT_SECONDS = 20 # This takes a while
def cmdExtendedEraseMemory(self, useSectorErase = False, amountOfSectors = 0x1ff):
if self.cmdGeneric(0x44):
if not useSectorErase:
mdebug(10, "*** Extended erase memory command")
# Global mass erase
mdebug(5, "Global mass erase; this may take a while")
self.sp.write(chr(0xFF))
self.sp.write(chr(0xFF))
# Checksum
self.sp.write(chr(0x00))
self._wait_for_ack("0x44 extended erase failed",
timeout=self.GLOBAL_ERASE_TIMEOUT_SECONDS)
mdebug(10, " Extended erase memory done")
else:
mdebug(10, " Performing non global erase")
# Data to be sent
data = []
crc = 0
msb = (amountOfSectors >> 8) & 0xff
lsb = amountOfSectors & 0xff
crc ^= msb
crc ^= lsb
data.append(msb)
data.append(lsb)
for sector in range(0, amountOfSectors+1):
msb = sector >> 8 & 0xff
lsb = sector & 0xff
crc ^= msb
crc ^= lsb
data.append(msb)
data.append(lsb)
data.append(crc)
for b in data:
self.sp.write(bytes([b]))
self._wait_for_ack("0x44 erasing failed", timeout=self.GLOBAL_ERASE_TIMEOUT_SECONDS)
mdebug(10, " Erase memory done")
else:
raise CmdException("Extended erase memory (0x44) failed")
def cmdWriteProtect(self, sectors):
if self.cmdGeneric(0x63):
mdebug(10, "*** Write protect command")
self.sp.write(chr((len(sectors)-1) & 0xFF))
crc = 0xFF
for c in sectors:
crc = crc ^ c
self.sp.write(chr(c))
self.sp.write(chr(crc))
self._wait_for_ack("0x63 write protect failed")
mdebug(10, " Write protect done")
else:
raise CmdException("Write Protect memory (0x63) failed")
def cmdWriteUnprotect(self):
if self.cmdGeneric(0x73):
mdebug(10, "*** Write Unprotect command")
self._wait_for_ack("0x73 write unprotect failed")
self._wait_for_ack("0x73 write unprotect 2 failed")
mdebug(10, " Write Unprotect done")
else:
raise CmdException("Write Unprotect (0x73) failed")
def cmdReadoutProtect(self):
if self.cmdGeneric(0x82):
mdebug(10, "*** Readout protect command")
self._wait_for_ack("0x82 readout protect failed")
self._wait_for_ack("0x82 readout protect 2 failed")
mdebug(10, " Read protect done")
else:
raise CmdException("Readout protect (0x82) failed")
def cmdReadoutUnprotect(self):
if self.cmdGeneric(0x92):
mdebug(10, "*** Readout Unprotect command")
self._wait_for_ack("0x92 readout unprotect failed")
self._wait_for_ack("0x92 readout unprotect 2 failed")
mdebug(10, " Read Unprotect done")
else:
raise CmdException("Readout unprotect (0x92) failed")
# Complex commands section
def readMemory(self, addr, lng):
data = bytes([])
if usepbar:
widgets = ['Reading: ', Percentage(),', ', ETA(), ' ', Bar()]
pbar = ProgressBar(widgets=widgets,maxval=lng, term_width=79).start()
while lng > 256:
if usepbar:
pbar.update(pbar.maxval-lng)
else:
mdebug(5, "Read %(len)d bytes at 0x%(addr)X" % {'addr': addr, 'len': 256})
data += self.cmdReadMemory(addr, 256)
addr = addr + 256
lng = lng - 256
if usepbar:
pbar.update(pbar.maxval-lng)
pbar.finish()
else:
mdebug(5, "Read %(len)d bytes at 0x%(addr)X" % {'addr': addr, 'len': 256})
data += self.cmdReadMemory(addr, lng)
return data
def writeMemory(self, addr, data):
lng = len(data)
mdebug(5, "Writing %(lng)d bytes to start address 0x%(addr)X" %
{ 'lng': lng, 'addr': addr})
if usepbar:
widgets = ['Writing: ', Percentage(),' ', ETA(), ' ', Bar()]
pbar = ProgressBar(widgets=widgets, maxval=lng, term_width=79).start()
offs = 0
while lng > 256:
if usepbar:
pbar.update(pbar.maxval-lng)
else:
mdebug(5, "Write %(len)d bytes at 0x%(addr)X" % {'addr': addr, 'len': 256})
self.cmdWriteMemory(addr, data[offs:offs+256])
offs = offs + 256
addr = addr + 256
lng = lng - 256
if usepbar:
pbar.update(pbar.maxval-lng)
pbar.finish()
else:
mdebug(5, "Write %(len)d bytes at 0x%(addr)X" % {'addr': addr, 'len': 256})
self.cmdWriteMemory(addr, data[offs:offs+lng] + bytes([0xFF] * (256-lng)) )
def PCLKHack(self):
RCC_CFGR = 0x40021004
mdebug(5, "Modifying PCLK speed at 0x%(addr)X" % {'addr': RCC_CFGR})
# reg = self.cmdReadMemory(RCC_CFGR, 4)
# reg[1] = (reg[1] & 0xF8) | 0x04
reg = [10, 60, 29, 0]
# self.cmdWriteMemory(RCC_CFGR, reg)
if self.cmdGeneric(0x31):
self.sp.write(self._encode_addr(RCC_CFGR))
self._wait_for_ack("0x31 address failed")
self.sp.write(chr(3)) # len really
self.sp.write(chr(reg[0]))
self.sp.write(chr(reg[1]))
self.sp.write(chr(reg[2]))
self.sp.write(chr(reg[3]))
crc = 3^reg[0]^reg[1]^reg[2]^reg[3];
self.sp.write(chr(crc))
self._wait_for_ack("0x31 programming failed")
mdebug(10, " PCLK write memory done")
def resetDevice(self):
AIRCR = 0xE000ED0C
mdebug(5, "Writing to Reset Register")
reg = [0x04,0x00,0xFA,0x05]
if self.cmdGeneric(0x31):
self.sp.write(self._encode_addr(AIRCR))
self._wait_for_ack("0x31 address failed")
self.sp.write(chr(3)) # len really
self.sp.write(chr(reg[0]))
self.sp.write(chr(reg[1]))
self.sp.write(chr(reg[2]))
self.sp.write(chr(reg[3]))
crc = 3^reg[0]^reg[1]^reg[2]^reg[3];
self.sp.write(chr(crc))
# don't wait for ack - device will have rebooted
mdebug(10, " reset done")
def usage():
print("""Usage: %s [-hqVewvrXuS] [-l length] [-p port] [-b baud] [-a addr] [file.bin]
-h This help
-q Quiet
-V Verbose
-e Erase
-w Write
-v Verify
-X Reset after
-r Read
-u Use sector erase instead of global erase. You need to specify the amount of sectors with '-l'
-l length Length of read or to erase when using sector erase
-S Swap RTS and DTR: use RTS for reset and DTR for boot0
-p port Serial port (default: first USB-like port in /dev)
-b baud Baud speed (default: 115200)
-a addr Target address
-s n Skip writing N bytes from beginning of the binary (does not affect start address)
-k Change PCLK frequency to make USB stable on Espruino 1v43 bootloaders
Example: ./stm32loader.py -e -w -v example/main.bin
To use sector erase instead of global: ./stm32loader.py -e -u -w -v -l 0x1ff example/main.bin
""" % sys.argv[0])
def read(filename):
"""Read the file to be programmed and turn it into a binary"""
with open(filename, 'rb') as f:
bytes = f.read()
if bytes.startswith(b'\x7FELF'):
# Actually an ELF file. Convert to binary
handle, path = tempfile.mkstemp(suffix='.bin', prefix='stm32loader')
try:
os.close(handle)
# Try a couple of options for objcopy
for name in ['arm-none-eabi-objcopy', 'arm-linux-gnueabi-objcopy']:
try:
code = subprocess.call([name, '-Obinary', filename, path])
if code == 0:
return read(path)
except OSError:
pass
else:
raise Exception('Error %d while converting to a binary file' % code)
finally:
# Remove the temporary file
os.unlink(path)
else:
return bytes
if __name__ == "__main__":
had_error = False
conf = {
'port': 'auto',
'baud': 115200,
'address': 0x08000000,
'skip' : 0,
'erase': 0,
'useSectorErase': False,
'swapRtsDtr': False,
'write': 0,
'verify': 0,
'read': 0,
'reset': 0,
'len': 1000,
'fname':'',
'pclk_hack':0,
}
# http://www.python.org/doc/2.5.2/lib/module-getopt.html
try:
opts, args = getopt.getopt(sys.argv[1:], "hqVewvrXudSp:b:a:c:s:l:k")
except getopt.GetoptError as err:
# print help information and exit:
print(str(err)) # will print something like "option -a not recognized"
usage()
sys.exit(2)
for o, a in opts:
if o == '-V':
QUIET = 10
elif o == '-q':
QUIET = 0
elif o == '-h':
usage()
sys.exit(0)
elif o == '-e':
conf['erase'] = 1
elif o == '-w':
conf['write'] = 1
elif o == '-v':
conf['verify'] = 1
elif o == '-r':
conf['read'] = 1
elif o == '-X':
conf['reset'] = 1
elif o == '-p':
conf['port'] = a
elif o == '-u':
conf['useSectorErase'] = True
elif o == '-b':
conf['baud'] = eval(a)
elif o == '-a':
conf['address'] = eval(a)
elif o == '-s':
conf['skip'] = eval(a)
elif o == '-l':
conf['len'] = eval(a)
elif o == '-k':
conf['pclk_hack'] = 1
elif o == '-S':
conf['swapRtsDtr'] = True
else:
assert False, "unhandled option"
# Try and find the port automatically
if conf['port'] == 'auto':
ports = []
# Get a list of all USB-like names in /dev
for name in ['tty.usbserial', 'ttyUSB']:
ports.extend(glob.glob('/dev/%s*' % name))
ports = sorted(ports)
if ports:
# Found something - take it
conf['port'] = ports[0]
cmd = CommandInterface()
cmd.open(conf['port'], conf['baud'])
mdebug(10, "Open port %(port)s, baud %(baud)d" % {'port':conf['port'],
'baud':conf['baud']})
try:
if (conf['write'] or conf['verify']):
mdebug(5, "Reading data from %s" % args[0])
data = read(args[0])
if conf['skip']:
mdebug(5, "Skipping %d bytes" % conf['skip'])
data = data[conf['skip']:]
try:
cmd.initChip(conf['swapRtsDtr'])
except CmdException:
print("Can't init. Ensure BOOT0=1, BOOT1=0, and reset device")
bootversion = cmd.cmdGet()
mdebug(0, "Bootloader version 0x%X" % bootversion)
if bootversion < 20 or bootversion >= 100:
raise Exception('Unreasonable bootloader version %d' % bootversion)
chip_id = cmd.cmdGetID()
assert len(chip_id) == 2, "Unreasonable chip id: %s" % repr(chip_id)
chip_id_num = (chip_id[0] << 8) | chip_id[1]
chip_id_str = CHIP_ID_STRS.get(chip_id_num, None)
if chip_id_str is None:
mdebug(0, 'Warning: unrecognised chip ID 0x%x' % chip_id_num)
else:
mdebug(0, "Chip id 0x%x, %s" % (chip_id_num, chip_id_str))
if conf['pclk_hack']:
cmd.PCLKHack()
if conf['erase']:
# Pre-3.0 bootloaders use the erase memory
# command. Starting with 3.0, extended erase memory
# replaced this command.
if bootversion < 0x30:
cmd.cmdEraseMemory()
else:
cmd.cmdExtendedEraseMemory(conf['useSectorErase'], conf['len'])
if conf['write']:
print("Writing binary")
cmd.writeMemory(conf['address'], data)
if conf['verify']:
verify = cmd.readMemory(conf['address'], len(data))
if(data == verify):
print("Verification OK")
else:
print("Verification FAILED")
print(str(len(data)) + ' vs ' + str(len(verify)))
for i in range(0, len(data)):
if data[i] != verify[i]:
print(hex(i) + ': ' + hex(data[i]) + ' vs ' + hex(verify[i]))
had_error = True
if not conf['write'] and conf['read']:
rdata = cmd.readMemory(conf['address'], conf['len'])
file(args[0], 'wb').write(''.join(map(chr,rdata)))
if conf['reset']:
cmd.resetDevice()
finally:
if not conf['reset']:
cmd.releaseChip(conf['swapRtsDtr'])
if had_error: exit(1)
|
hanicker/odoo | refs/heads/8.0 | openerp/cli/deploy.py | 369 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import os
import requests
import sys
import tempfile
import zipfile
from . import Command
class Deploy(Command):
"""Deploy a module on an Odoo instance"""
def __init__(self):
super(Deploy, self).__init__()
self.session = requests.session()
def deploy_module(self, module_path, url, login, password, db='', force=False):
url = url.rstrip('/')
self.authenticate(url, login, password, db)
module_file = self.zip_module(module_path)
try:
return self.upload_module(url, module_file, force=force)
finally:
os.remove(module_file)
def upload_module(self, server, module_file, force=False):
print("Uploading module file...")
url = server + '/base_import_module/upload'
files = dict(mod_file=open(module_file, 'rb'))
force = '1' if force else ''
res = self.session.post(url, files=files, data=dict(force=force))
if res.status_code != 200:
raise Exception("Could not authenticate on server '%s'" % server)
return res.text
def authenticate(self, server, login, password, db=''):
print("Authenticating on server '%s' ..." % server)
# Fixate session with a given db if any
self.session.get(server + '/web/login', params=dict(db=db))
args = dict(login=login, password=password, db=db)
res = self.session.post(server + '/base_import_module/login', args)
if res.status_code == 404:
raise Exception("The server '%s' does not have the 'base_import_module' installed." % server)
elif res.status_code != 200:
raise Exception(res.text)
def zip_module(self, path):
path = os.path.abspath(path)
if not os.path.isdir(path):
raise Exception("Could not find module directory '%s'" % path)
container, module_name = os.path.split(path)
temp = tempfile.mktemp(suffix='.zip')
try:
print("Zipping module directory...")
with zipfile.ZipFile(temp, 'w') as zfile:
for root, dirs, files in os.walk(path):
for file in files:
file_path = os.path.join(root, file)
zfile.write(file_path, file_path.split(container).pop())
return temp
except Exception:
os.remove(temp)
raise
def run(self, cmdargs):
parser = argparse.ArgumentParser(
prog="%s deploy" % sys.argv[0].split(os.path.sep)[-1],
description=self.__doc__
)
parser.add_argument('path', help="Path of the module to deploy")
parser.add_argument('url', nargs='?', help='Url of the server (default=http://localhost:8069)', default="http://localhost:8069")
parser.add_argument('--db', dest='db', help='Database to use if server does not use db-filter.')
parser.add_argument('--login', dest='login', default="admin", help='Login (default=admin)')
parser.add_argument('--password', dest='password', default="admin", help='Password (default=admin)')
parser.add_argument('--verify-ssl', action='store_true', help='Verify SSL certificate')
parser.add_argument('--force', action='store_true', help='Force init even if module is already installed. (will update `noupdate="1"` records)')
if not cmdargs:
sys.exit(parser.print_help())
args = parser.parse_args(args=cmdargs)
if not args.verify_ssl:
self.session.verify = False
try:
if not args.url.startswith(('http://', 'https://')):
args.url = 'https://%s' % args.url
result = self.deploy_module(args.path, args.url, args.login, args.password, args.db, force=args.force)
print(result)
except Exception, e:
sys.exit("ERROR: %s" % e)
|
lwthatcher/Compass | refs/heads/master | web/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/common_test.py | 2542 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for the common.py file."""
import gyp.common
import unittest
import sys
class TestTopologicallySorted(unittest.TestCase):
def test_Valid(self):
"""Test that sorting works on a valid graph with one possible order."""
graph = {
'a': ['b', 'c'],
'b': [],
'c': ['d'],
'd': ['b'],
}
def GetEdge(node):
return tuple(graph[node])
self.assertEqual(
gyp.common.TopologicallySorted(graph.keys(), GetEdge),
['a', 'c', 'd', 'b'])
def test_Cycle(self):
"""Test that an exception is thrown on a cyclic graph."""
graph = {
'a': ['b'],
'b': ['c'],
'c': ['d'],
'd': ['a'],
}
def GetEdge(node):
return tuple(graph[node])
self.assertRaises(
gyp.common.CycleError, gyp.common.TopologicallySorted,
graph.keys(), GetEdge)
class TestGetFlavor(unittest.TestCase):
"""Test that gyp.common.GetFlavor works as intended"""
original_platform = ''
def setUp(self):
self.original_platform = sys.platform
def tearDown(self):
sys.platform = self.original_platform
def assertFlavor(self, expected, argument, param):
sys.platform = argument
self.assertEqual(expected, gyp.common.GetFlavor(param))
def test_platform_default(self):
self.assertFlavor('freebsd', 'freebsd9' , {})
self.assertFlavor('freebsd', 'freebsd10', {})
self.assertFlavor('openbsd', 'openbsd5' , {})
self.assertFlavor('solaris', 'sunos5' , {});
self.assertFlavor('solaris', 'sunos' , {});
self.assertFlavor('linux' , 'linux2' , {});
self.assertFlavor('linux' , 'linux3' , {});
def test_param(self):
self.assertFlavor('foobar', 'linux2' , {'flavor': 'foobar'})
if __name__ == '__main__':
unittest.main()
|
thijstriemstra/Pi-GPIO-Server | refs/heads/master | pi_gpio/events.py | 1 | from pi_gpio import socketio
from config.pins import PinManager
class PinEventManager(PinManager):
def __init__(self):
super(PinEventManager, self).__init__()
self.socketio = socketio
self.edge = {
'RISING': self.gpio.RISING,
'FALLING': self.gpio.FALLING,
'BOTH': self.gpio.BOTH
}
def build_event_callback(self, num, name, event):
def event_callback(num):
data = {
'num': num,
'name': name,
'event': event
}
self.socketio.emit('pin:event', data)
print(data)
return event_callback
def register_gpio_events(self):
for num, config in self.pins.items():
event = config.get('event', None)
name = config.get('name', '')
if event:
edge = self.edge[event]
bounce = config['bounce']
cb = self.build_event_callback(num, name, event)
self.gpio.add_event_detect(num, edge, callback=cb, bouncetime=bounce)
|
albertz/music-player | refs/heads/master | src/TaskSystem.py | 1 |
"""
Here are all subprocess, threading etc related utilities,
most of them quite low level.
"""
from __future__ import print_function
from utils import *
from threading import Condition, Thread, RLock, Lock, currentThread
import Logging
import sys
import os
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
PY3 = sys.version_info[0] >= 3
def do_in_mainthread(f, wait=True):
# Better use daemonThreadCall() instead.
# Note: We don't need/want the NSThread.isMainThread() check and extra handling.
# The `performSelectorOnMainThread:withObject:waitUntilDone:` does the right thing
# in case we are the main thread: if wait is True, it is executed from here,
# otherwise it is queued and executed in the next frame.
global quit
if quit:
raise KeyboardInterrupt
global isFork
if isFork:
Logging.debugWarn("called do_in_mainthread in fork")
raise SystemError("called do_in_mainthread in fork")
import objc
try:
NSObject = objc.lookUpClass("NSObject")
class PyAsyncCallHelper(NSObject):
def initWithArgs_(self, f):
self.f = f
self.ret = None
self.exc = None
return self
def call_(self, o):
try:
self.ret = self.f()
except (KeyboardInterrupt,SystemExit) as exc:
self.exc = exc
except:
print("Exception in PyAsyncCallHelper call")
sys.excepthook(*sys.exc_info())
except Exception:
PyAsyncCallHelper = objc.lookUpClass("PyAsyncCallHelper") # already defined earlier
helper = PyAsyncCallHelper.alloc().initWithArgs_(f)
helper.performSelectorOnMainThread_withObject_waitUntilDone_(helper.call_, None, wait)
if wait and helper.exc:
raise helper.exc
return helper.ret
def DoInMainthreadDecorator(func):
def decoratedFunc(*args, **kwargs):
return do_in_mainthread(lambda: func(*args, **kwargs), wait=True)
return decoratedFunc
def WarnMustNotBeInForkDecorator(func):
class Ctx:
didWarn = False
def decoratedFunc(*args, **kwargs):
global isFork
if isFork:
if not Ctx.didWarn:
import Logging
Logging.debugWarn("Must not be in fork!")
Ctx.didWarn = True
return None
return func(*args, **kwargs)
return decoratedFunc
def execInMainProc(func):
global isMainProcess
if isMainProcess:
return func()
else:
assert _AsyncCallQueue.Self, "works only if called via asyncCall"
return _AsyncCallQueue.Self.asyncExecClient(func)
def ExecInMainProcDecorator(func):
def decoratedFunc(*args, **kwargs):
return execInMainProc(lambda: func(*args, **kwargs))
return decoratedFunc
def test_asyncCall():
mod = globals()
calledBackVarName = getTempNameInScope(mod)
mod[calledBackVarName] = False
def funcAsync():
assert not isMainProcess
assert not isFork
res = execInMainProc(funcMain)
assert res == "main"
return "async"
def funcMain():
mod[calledBackVarName] = True
return "main"
res = asyncCall(funcAsync, name="test", mustExec=True)
assert res == "async"
assert mod[calledBackVarName] is True
mod.pop(calledBackVarName)
class TestClassAsyncCallExecInMainProcDeco:
def __init__(self, name):
self.name = name
@ExecInMainProcDecorator
def testExecInMainProcDeco(self, *args):
return 42, self.name, args
@staticmethod
def getInstance(name):
return TestClassAsyncCallExecInMainProcDeco(name)
def __reduce__(self):
return (self.getInstance, (self.name,))
def test_asyncCall2():
test = TestClassAsyncCallExecInMainProcDeco("test42")
def funcAsync():
res = test.testExecInMainProcDeco(1, buffer("abc"))
assert res == (42, "test42", (1, buffer("abc")))
asyncCall(funcAsync, name="test", mustExec=True)
class AsyncInterrupt(BaseException): pass
# Note that there are places where an exception should never occur -
# eg inside an Lock.aquire(), Lock.__enter__(), Lock.__exit__().
# Otherwise we might end up with a non-unlocked mutex.
# We can never know if this is the case for the thread or not -
# so this is unsafe and should not be used!
# At least for now, I don't really see a way to overcome this.
def raiseExceptionInThread(threadId, exc=AsyncInterrupt):
import ctypes
ret = ctypes.pythonapi.PyThreadState_SetAsyncExc(
ctypes.c_long(threadId),
ctypes.py_object(exc))
# returns the count of threads where we set the exception
if ret > 1:
# strange - should not happen.
print("Error: PyThreadState_SetAsyncExc returned >1")
# try to reset - although this is similar unsafe...
ctypes.pythonapi.PyThreadState_SetAsyncExc(ctypes.c_long(threadId), None)
return ret > 0
class QueuedDaemonThread:
def __init__(self):
self.lock = RLock()
self.cond = Condition(self.lock)
self.queues = {}
self.thread = None
self.quit = False
def _getHandler(self, queueItem):
def handle():
try:
queueItem["func"]()
except (ForwardedKeyboardInterrupt, KeyboardInterrupt, SystemExit):
return # just ignore
except BaseException:
print("Exception in QueuedDaemonThread", queueItem["name"])
sys.excepthook(*sys.exc_info())
finally:
with self.lock:
queueItem["finished"] = True
self.cond.notifyAll()
return handle
def _threadMain(self):
setCurThreadName("Py QueuedDaemonThread")
while True:
with self.lock:
if self.quit:
self.thread = None
return
for queueId,queue in self.queues.items():
while queue:
queueItem = queue[0]
if queueItem.get("finished", False):
queue.pop(0)
continue
if not queueItem.get("started", False):
queueItem["started"] = True
handler = self._getHandler(queueItem)
daemonThreadCall(handler, name=queueItem["name"])
break
if not queue:
del self.queues[queueId]
self.cond.wait()
def _maybeStart(self):
if not self.thread:
self.thread = daemonThreadCall(self._threadMain, name="queued daemon thread")
def push(self, func, name=None, queue=None):
assert queue
with self.lock:
self.queues.setdefault(queue, []).append({"func":func, "name":name})
self.cond.notifyAll()
self._maybeStart()
def quit(self):
with self.lock:
self.quit = True
self.cond.notifyAll()
queuedDaemonThread = QueuedDaemonThread()
def daemonThreadCall(func, args=(), name=None, queue=None):
if queue:
queuedDaemonThread.push(func, name=name, queue=queue)
return
def doCall():
try:
setCurThreadName("Py daemon: %s" % name)
func(*args)
except (ForwardedKeyboardInterrupt, KeyboardInterrupt):
return # just ignore
except BaseException:
print("Exception in daemonThreadCall thread", name)
sys.excepthook(*sys.exc_info())
thread = Thread(target = doCall, name = name)
thread.daemon = True
thread.start()
return thread
def test_AsyncTask():
AsyncTask.test()
class ForwardedKeyboardInterrupt(Exception):
pass
class _AsyncCallQueue:
Self = None
class Types:
result = 0
exception = 1
asyncExec = 2
def __init__(self, queue):
assert not self.Self
self.__class__.Self = self
self.mutex = Lock()
self.queue = queue
def put(self, type, value):
self.queue.put((type, value))
def asyncExecClient(self, func):
with self.mutex:
self.put(self.Types.asyncExec, func)
t, value = self.queue.get()
if t == self.Types.result:
return value
elif t == self.Types.exception:
raise value
else:
assert False, "bad behavior of asyncCall in asyncExec (%r)" % t
@classmethod
def asyncExecHost(clazz, task, func):
q = task
name = "<unknown>"
try:
name = repr(func)
res = func()
except Exception as exc:
print("Exception in asyncExecHost", name, exc)
q.put((clazz.Types.exception, exc))
else:
try:
q.put((clazz.Types.result, res))
except IOError:
# broken pipe or so. parent quit. treat like a SIGINT
raise KeyboardInterrupt
def asyncCall(func, name=None, mustExec=False):
"""
This executes func() in another process and waits/blocks until
it is finished. The returned value is passed back to this process
and returned. Exceptions are passed back as well and will be
reraised here.
If `mustExec` is set, the other process must `exec()` after the `fork()`.
If it is not set, it might omit the `exec()`, depending on the platform.
"""
def doCall(queue):
q = _AsyncCallQueue(queue)
try:
try:
res = func()
except KeyboardInterrupt as exc:
print("Exception in asyncCall", name, ": KeyboardInterrupt")
q.put(q.Types.exception, ForwardedKeyboardInterrupt(exc))
except BaseException as exc:
print("Exception in asyncCall", name)
sys.excepthook(*sys.exc_info())
q.put(q.Types.exception, exc)
else:
q.put(q.Types.result, res)
except (KeyboardInterrupt, ForwardedKeyboardInterrupt):
print("asyncCall: SIGINT in put, probably the parent died")
# ignore
task = AsyncTask(func=doCall, name=name, mustExec=mustExec)
while True:
# If there is an unhandled exception in doCall or the process got killed/segfaulted or so,
# this will raise an EOFError here.
# However, normally, we should catch all exceptions and just reraise them here.
t,value = task.get()
if t == _AsyncCallQueue.Types.result:
return value
elif t == _AsyncCallQueue.Types.exception:
raise value
elif t == _AsyncCallQueue.Types.asyncExec:
_AsyncCallQueue.asyncExecHost(task, value)
else:
assert False, "unknown _AsyncCallQueue type %r" % t
# This is needed in some cases to avoid pickling problems with bounded funcs.
def funcCall(attrChainArgs, args=()):
f = attrChain(*attrChainArgs)
return f(*args)
import pickle, types, marshal
Unpickler = pickle.Unpickler
if PY3:
CellType = type((lambda x: lambda: x)(0).__closure__[0])
def makeCell(value): return (lambda: value).__closure__[0]
else:
CellType = type((lambda x: lambda: x)(0).func_closure[0])
def makeCell(value): return (lambda: value).func_closure[0]
def getModuleDict(modname): return __import__(modname).__dict__
DictType = dict if PY3 else types.DictionaryType
try:
_BasePickler = pickle._Pickler # use the pure Python implementation
except AttributeError:
_BasePickler = pickle.Pickler
class Pickler(_BasePickler):
def __init__(self, *args, **kwargs):
if "protocol" not in kwargs:
kwargs["protocol"] = pickle.HIGHEST_PROTOCOL
super(Pickler, self).__init__(*args, **kwargs)
dispatch = _BasePickler.dispatch.copy()
def save_func(self, obj):
try:
self.save_global(obj)
return
except pickle.PicklingError:
pass
assert type(obj) is types.FunctionType
self.save(types.FunctionType)
self.save((
obj.func_code,
obj.func_globals,
obj.func_name,
obj.func_defaults,
obj.func_closure,
))
self.write(pickle.REDUCE)
self.memoize(obj)
dispatch[types.FunctionType] = save_func
def save_code(self, obj):
assert type(obj) is types.CodeType
self.save(marshal.loads)
self.save((marshal.dumps(obj),))
self.write(pickle.REDUCE)
self.memoize(obj)
dispatch[types.CodeType] = save_code
def save_cell(self, obj):
assert type(obj) is CellType
self.save(makeCell)
self.save((obj.cell_contents,))
self.write(pickle.REDUCE)
self.memoize(obj)
dispatch[CellType] = save_cell
# We also search for module dicts and reference them.
def intellisave_dict(self, obj):
if len(obj) <= 5: # fastpath
self.save_dict(obj)
return
for modname, mod in sys.modules.items():
if not mod: continue
moddict = mod.__dict__
if obj is moddict:
self.save(getModuleDict)
self.save((modname,))
self.write(pickle.REDUCE)
self.memoize(obj)
return
self.save_dict(obj)
dispatch[DictType] = intellisave_dict
if not PY3:
def save_buffer(self, obj):
self.save(buffer)
self.save((str(obj),))
self.write(pickle.REDUCE)
dispatch[types.BufferType] = save_buffer
# Some types in the types modules are not correctly referenced,
# such as types.FunctionType. This is fixed here.
def fixedsave_type(self, obj):
try:
self.save_global(obj)
return
except pickle.PicklingError:
pass
for modname in ["types"]:
moddict = sys.modules[modname].__dict__
for modobjname,modobj in moddict.iteritems():
if modobj is obj:
self.write(pickle.GLOBAL + modname + '\n' + modobjname + '\n')
self.memoize(obj)
return
self.save_global(obj)
if not PY3:
dispatch[types.TypeType] = fixedsave_type
# avoid pickling instances of ourself. this mostly doesn't make sense and leads to trouble.
# however, also doesn't break. it mostly makes sense to just ignore.
def __getstate__(self): return None
def __setstate__(self, state): pass
class ExecingProcess:
def __init__(self, target, args, name):
self.target = target
self.args = args
self.name = name
self.daemon = True
self.pid = None
def start(self):
assert self.pid is None
def pipeOpen():
readend,writeend = os.pipe()
readend = os.fdopen(readend, "r")
writeend = os.fdopen(writeend, "w")
return readend,writeend
self.pipe_c2p = pipeOpen()
self.pipe_p2c = pipeOpen()
pid = os.fork()
if pid == 0: # child
self.pipe_c2p[0].close()
self.pipe_p2c[1].close()
# Copying all parameters is problematic (e.g. --pyshell).
# sys.argv[0] is never "python", so it might be problematic
# if it is not executable. However, it should be.
args = sys.argv[0:1] + [
"--forkExecProc",
str(self.pipe_c2p[1].fileno()),
str(self.pipe_p2c[0].fileno())]
os.execv(args[0], args)
else: # parent
self.pipe_c2p[1].close()
self.pipe_p2c[0].close()
self.pid = pid
self.pickler = Pickler(self.pipe_p2c[1])
self.pickler.dump(self.name)
self.pickler.dump(self.target)
self.pickler.dump(self.args)
self.pipe_p2c[1].flush()
Verbose = False
@staticmethod
def checkExec():
if "--forkExecProc" in sys.argv:
argidx = sys.argv.index("--forkExecProc")
writeFileNo = int(sys.argv[argidx + 1])
readFileNo = int(sys.argv[argidx + 2])
readend = os.fdopen(readFileNo, "r")
writeend = os.fdopen(writeFileNo, "w")
unpickler = Unpickler(readend)
name = unpickler.load()
if ExecingProcess.Verbose: print("ExecingProcess child %s (pid %i)" % (name, os.getpid()))
try:
target = unpickler.load()
args = unpickler.load()
except EOFError:
print("Error: unpickle incomplete")
raise SystemExit
ret = target(*args)
Pickler(writeend).dump(ret)
if ExecingProcess.Verbose: print("ExecingProcess child %s (pid %i) finished" % (name, os.getpid()))
raise SystemExit
class ExecingProcess_ConnectionWrapper(object):
def __init__(self, fd=None):
self.fd = fd
if self.fd:
from _multiprocessing import Connection
self.conn = Connection(fd)
def __getstate__(self): return self.fd
def __setstate__(self, state): self.__init__(state)
def __getattr__(self, attr): return getattr(self.conn, attr)
def _check_closed(self): assert not self.conn.closed
def _check_writable(self): assert self.conn.writable
def _check_readable(self): assert self.conn.readable
def send(self, value):
self._check_closed()
self._check_writable()
buf = StringIO()
Pickler(buf).dump(value)
self.conn.send_bytes(buf.getvalue())
def recv(self):
self._check_closed()
self._check_readable()
buf = self.conn.recv_bytes()
f = StringIO(buf)
return Unpickler(f).load()
def ExecingProcess_Pipe():
import socket
s1, s2 = socket.socketpair()
c1 = ExecingProcess_ConnectionWrapper(os.dup(s1.fileno()))
c2 = ExecingProcess_ConnectionWrapper(os.dup(s2.fileno()))
s1.close()
s2.close()
return c1, c2
isFork = False # fork() without exec()
isMainProcess = True
class AsyncTask:
def __init__(self, func, name=None, mustExec=False):
self.name = name or "unnamed"
self.func = func
self.mustExec = mustExec
self.parent_pid = os.getpid()
if mustExec and sys.platform != "win32":
self.Process = ExecingProcess
self.Pipe = ExecingProcess_Pipe
else:
from multiprocessing import Process, Pipe
self.Process = Process
self.Pipe = Pipe
self.parent_conn, self.child_conn = self.Pipe()
self.proc = self.Process(
target = funcCall,
args = ((AsyncTask, "_asyncCall"), (self,)),
name = self.name + " worker process")
self.proc.daemon = True
self.proc.start()
self.child_conn.close()
self.child_pid = self.proc.pid
assert self.child_pid
self.conn = self.parent_conn
@staticmethod
def _asyncCall(self):
assert self.isChild
self.parent_conn.close()
self.conn = self.child_conn # we are the child
if not self.mustExec and sys.platform != "win32":
global isFork
isFork = True
global isMainProcess
isMainProcess = False
try:
self.func(self)
except KeyboardInterrupt:
print("Exception in AsyncTask", self.name, ": KeyboardInterrupt")
except BaseException:
print("Exception in AsyncTask", self.name)
sys.excepthook(*sys.exc_info())
finally:
self.conn.close()
def put(self, value):
self.conn.send(value)
def get(self):
thread = currentThread()
try:
thread.waitQueue = self
res = self.conn.recv()
except EOFError: # this happens when the child died
raise ForwardedKeyboardInterrupt()
except Exception:
raise
finally:
thread.waitQueue = None
return res
@property
def isParent(self):
return self.parent_pid == os.getpid()
@property
def isChild(self):
if self.isParent: return False
assert self.parent_pid == os.getppid()
return True
# This might be called from the module code.
# See OnRequestQueue which implements the same interface.
def setCancel(self):
self.conn.close()
if self.isParent and self.child_pid:
import signal
os.kill(self.child_pid, signal.SIGINT)
self.child_pid = None
@classmethod
def test(cls):
pass
def test_picklebuffer():
origbuffer = buffer("123")
f = StringIO()
Pickler(f).dump(origbuffer)
f.seek(0)
b = Unpickler(f).load()
assert origbuffer == b
from contextlib import contextmanager
class ReadWriteLock(object):
"""Classic implementation of ReadWriteLock.
Note that this partly supports recursive lock usage:
- Inside a readlock, a writelock will always block!
- Inside a readlock, another readlock is fine.
- Inside a writelock, any other writelock or readlock is fine.
"""
def __init__(self):
import threading
self.lock = threading.RLock()
self.writeReadyCond = threading.Condition(self.lock)
self.readerCount = 0
@property
@contextmanager
def readlock(self):
with self.lock:
self.readerCount += 1
try: yield
finally:
with self.lock:
self.readerCount -= 1
if self.readerCount == 0:
self.writeReadyCond.notifyAll()
@property
@contextmanager
def writelock(self):
with self.lock:
while self.readerCount > 0:
self.writeReadyCond.wait()
yield
|
Edu-Glez/Bank_sentiment_analysis | refs/heads/master | env/lib/python3.6/site-packages/nbconvert/tests/files/override.py | 29 | c = get_config()
#Export all the notebooks in the current directory to the sphinx_howto format.
c.NbConvertApp.notebooks = ['notebook2.ipynb']
c.NbConvertApp.export_format = 'python'
|
Ca2Patton/PythonStuff | refs/heads/master | parseArgs.py | 1 | #!/Library/Frameworks/Python.framework/Versions/2.7/bin/python
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("square", help="Display a square of a given number", type=int)
args = parser.parse_args()
parser.add_argument("-v", "--verbose", action="count", default=0, help="increase output verbosity")
args = parser.parse_args()
answer = args.square**2
if args.verbose >= 2:
print "The square of {} equals {}".format(args.square, answer)
elif args.verbose >= 1:
print "{}^2 == {}".format(args.square, answer)
else:
print answer
|
quake0day/oj | refs/heads/master | aa.py | 1 | import heapq
class Solution:
def __init__(self):
self.heap_min = []
self.heap_max = []
def insert(self, num):
if not self.heap_min or num>self.heap_min[0]:
heapq.heappush(self.heap_min, num)
else:
heapq.heappush(self.heap_max, -num)
self.balance()
def balance(self):
l1 = len(self.heap_min)
l2 = len(self.heap_max)
if l1-l2>1:
heapq.heappush(self.heap_max, -heapq.heappop(self.heap_min))
self.balance()
elif l2-l1>1:
heapq.heappush(self.heap_min, -heapq.heappop(self.heap_max))
self.balance()
return
def get_median(self):
l1 = len(self.heap_min)
l2 = len(self.heap_max)
m = (l1+l2-1)/2
if m==l2-1:
return -self.heap_max[0]
elif m==l2:
return self.heap_min[0]
raise Exception("not balanced")
def medianII(self, nums):
"""
:param nums: A list of integers.
:return: The median of numbers
"""
ret = []
for num in nums:
self.insert(num)
ret.append(self.get_median())
return ret
a = Solution()
print a.medianII([4, 5, 1, 3, 2, 6, 0]) |
ostcar/OpenSlides | refs/heads/master | server/openslides/core/migrations/0016_projector_reference_projector.py | 8 | # Generated by Django 2.1.5 on 2019-01-31 10:24
from django.db import migrations, models
import openslides.utils.models
class Migration(migrations.Migration):
dependencies = [("core", "0015_auto_20190122_1216")]
operations = [
migrations.AddField(
model_name="projector",
name="reference_projector",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=openslides.utils.models.SET_NULL_AND_AUTOUPDATE,
related_name="references",
to="core.Projector",
),
)
]
|
duramato/SickRage | refs/heads/master | lib/unidecode/x05b.py | 252 | data = (
'Gui ', # 0x00
'Deng ', # 0x01
'Zhi ', # 0x02
'Xu ', # 0x03
'Yi ', # 0x04
'Hua ', # 0x05
'Xi ', # 0x06
'Hui ', # 0x07
'Rao ', # 0x08
'Xi ', # 0x09
'Yan ', # 0x0a
'Chan ', # 0x0b
'Jiao ', # 0x0c
'Mei ', # 0x0d
'Fan ', # 0x0e
'Fan ', # 0x0f
'Xian ', # 0x10
'Yi ', # 0x11
'Wei ', # 0x12
'Jiao ', # 0x13
'Fu ', # 0x14
'Shi ', # 0x15
'Bi ', # 0x16
'Shan ', # 0x17
'Sui ', # 0x18
'Qiang ', # 0x19
'Lian ', # 0x1a
'Huan ', # 0x1b
'Xin ', # 0x1c
'Niao ', # 0x1d
'Dong ', # 0x1e
'Yi ', # 0x1f
'Can ', # 0x20
'Ai ', # 0x21
'Niang ', # 0x22
'Neng ', # 0x23
'Ma ', # 0x24
'Tiao ', # 0x25
'Chou ', # 0x26
'Jin ', # 0x27
'Ci ', # 0x28
'Yu ', # 0x29
'Pin ', # 0x2a
'Yong ', # 0x2b
'Xu ', # 0x2c
'Nai ', # 0x2d
'Yan ', # 0x2e
'Tai ', # 0x2f
'Ying ', # 0x30
'Can ', # 0x31
'Niao ', # 0x32
'Wo ', # 0x33
'Ying ', # 0x34
'Mian ', # 0x35
'Kaka ', # 0x36
'Ma ', # 0x37
'Shen ', # 0x38
'Xing ', # 0x39
'Ni ', # 0x3a
'Du ', # 0x3b
'Liu ', # 0x3c
'Yuan ', # 0x3d
'Lan ', # 0x3e
'Yan ', # 0x3f
'Shuang ', # 0x40
'Ling ', # 0x41
'Jiao ', # 0x42
'Niang ', # 0x43
'Lan ', # 0x44
'Xian ', # 0x45
'Ying ', # 0x46
'Shuang ', # 0x47
'Shuai ', # 0x48
'Quan ', # 0x49
'Mi ', # 0x4a
'Li ', # 0x4b
'Luan ', # 0x4c
'Yan ', # 0x4d
'Zhu ', # 0x4e
'Lan ', # 0x4f
'Zi ', # 0x50
'Jie ', # 0x51
'Jue ', # 0x52
'Jue ', # 0x53
'Kong ', # 0x54
'Yun ', # 0x55
'Zi ', # 0x56
'Zi ', # 0x57
'Cun ', # 0x58
'Sun ', # 0x59
'Fu ', # 0x5a
'Bei ', # 0x5b
'Zi ', # 0x5c
'Xiao ', # 0x5d
'Xin ', # 0x5e
'Meng ', # 0x5f
'Si ', # 0x60
'Tai ', # 0x61
'Bao ', # 0x62
'Ji ', # 0x63
'Gu ', # 0x64
'Nu ', # 0x65
'Xue ', # 0x66
'[?] ', # 0x67
'Zhuan ', # 0x68
'Hai ', # 0x69
'Luan ', # 0x6a
'Sun ', # 0x6b
'Huai ', # 0x6c
'Mie ', # 0x6d
'Cong ', # 0x6e
'Qian ', # 0x6f
'Shu ', # 0x70
'Chan ', # 0x71
'Ya ', # 0x72
'Zi ', # 0x73
'Ni ', # 0x74
'Fu ', # 0x75
'Zi ', # 0x76
'Li ', # 0x77
'Xue ', # 0x78
'Bo ', # 0x79
'Ru ', # 0x7a
'Lai ', # 0x7b
'Nie ', # 0x7c
'Nie ', # 0x7d
'Ying ', # 0x7e
'Luan ', # 0x7f
'Mian ', # 0x80
'Zhu ', # 0x81
'Rong ', # 0x82
'Ta ', # 0x83
'Gui ', # 0x84
'Zhai ', # 0x85
'Qiong ', # 0x86
'Yu ', # 0x87
'Shou ', # 0x88
'An ', # 0x89
'Tu ', # 0x8a
'Song ', # 0x8b
'Wan ', # 0x8c
'Rou ', # 0x8d
'Yao ', # 0x8e
'Hong ', # 0x8f
'Yi ', # 0x90
'Jing ', # 0x91
'Zhun ', # 0x92
'Mi ', # 0x93
'Zhu ', # 0x94
'Dang ', # 0x95
'Hong ', # 0x96
'Zong ', # 0x97
'Guan ', # 0x98
'Zhou ', # 0x99
'Ding ', # 0x9a
'Wan ', # 0x9b
'Yi ', # 0x9c
'Bao ', # 0x9d
'Shi ', # 0x9e
'Shi ', # 0x9f
'Chong ', # 0xa0
'Shen ', # 0xa1
'Ke ', # 0xa2
'Xuan ', # 0xa3
'Shi ', # 0xa4
'You ', # 0xa5
'Huan ', # 0xa6
'Yi ', # 0xa7
'Tiao ', # 0xa8
'Shi ', # 0xa9
'Xian ', # 0xaa
'Gong ', # 0xab
'Cheng ', # 0xac
'Qun ', # 0xad
'Gong ', # 0xae
'Xiao ', # 0xaf
'Zai ', # 0xb0
'Zha ', # 0xb1
'Bao ', # 0xb2
'Hai ', # 0xb3
'Yan ', # 0xb4
'Xiao ', # 0xb5
'Jia ', # 0xb6
'Shen ', # 0xb7
'Chen ', # 0xb8
'Rong ', # 0xb9
'Huang ', # 0xba
'Mi ', # 0xbb
'Kou ', # 0xbc
'Kuan ', # 0xbd
'Bin ', # 0xbe
'Su ', # 0xbf
'Cai ', # 0xc0
'Zan ', # 0xc1
'Ji ', # 0xc2
'Yuan ', # 0xc3
'Ji ', # 0xc4
'Yin ', # 0xc5
'Mi ', # 0xc6
'Kou ', # 0xc7
'Qing ', # 0xc8
'Que ', # 0xc9
'Zhen ', # 0xca
'Jian ', # 0xcb
'Fu ', # 0xcc
'Ning ', # 0xcd
'Bing ', # 0xce
'Huan ', # 0xcf
'Mei ', # 0xd0
'Qin ', # 0xd1
'Han ', # 0xd2
'Yu ', # 0xd3
'Shi ', # 0xd4
'Ning ', # 0xd5
'Qin ', # 0xd6
'Ning ', # 0xd7
'Zhi ', # 0xd8
'Yu ', # 0xd9
'Bao ', # 0xda
'Kuan ', # 0xdb
'Ning ', # 0xdc
'Qin ', # 0xdd
'Mo ', # 0xde
'Cha ', # 0xdf
'Ju ', # 0xe0
'Gua ', # 0xe1
'Qin ', # 0xe2
'Hu ', # 0xe3
'Wu ', # 0xe4
'Liao ', # 0xe5
'Shi ', # 0xe6
'Zhu ', # 0xe7
'Zhai ', # 0xe8
'Shen ', # 0xe9
'Wei ', # 0xea
'Xie ', # 0xeb
'Kuan ', # 0xec
'Hui ', # 0xed
'Liao ', # 0xee
'Jun ', # 0xef
'Huan ', # 0xf0
'Yi ', # 0xf1
'Yi ', # 0xf2
'Bao ', # 0xf3
'Qin ', # 0xf4
'Chong ', # 0xf5
'Bao ', # 0xf6
'Feng ', # 0xf7
'Cun ', # 0xf8
'Dui ', # 0xf9
'Si ', # 0xfa
'Xun ', # 0xfb
'Dao ', # 0xfc
'Lu ', # 0xfd
'Dui ', # 0xfe
'Shou ', # 0xff
)
|
zasdfgbnm/tensorflow | refs/heads/master | tensorflow/contrib/tpu/python/tpu/tpu_infeed_test.py | 85 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Tests for TPU InfeedQueue methods."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.tpu.python.tpu import tpu_feed
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.platform import test
class InfeedTest(test.TestCase):
def testConstructor(self):
"""Tests that the constructor can be called with different arguments."""
i = tpu_feed.InfeedQueue(number_of_tuple_elements=2)
self.assertEqual(i.number_of_tuple_elements, 2)
self.assertEqual(i.tuple_types, None)
self.assertEqual(i.tuple_shapes, None)
self.assertEqual(i.number_of_shards, None)
i = tpu_feed.InfeedQueue(
tuple_types=[dtypes.float32, dtypes.int32, dtypes.int32])
self.assertEqual(i.number_of_tuple_elements, 3)
self.assertEqual(i.tuple_types,
[dtypes.float32, dtypes.int32, dtypes.int32])
self.assertEqual(i.tuple_shapes, None)
self.assertEqual(i.number_of_shards, None)
i = tpu_feed.InfeedQueue(tuple_shapes=[[1], [2, 3]])
self.assertEqual(i.number_of_tuple_elements, 2)
self.assertEqual(i.tuple_types, None)
self.assertEqual(i.tuple_shapes, [[1], [2, 3]])
self.assertEqual(i.number_of_shards, None)
i = tpu_feed.InfeedQueue(shard_dimensions=[1, 0, 7])
self.assertEqual(i.number_of_tuple_elements, 3)
self.assertEqual(i.tuple_types, None)
self.assertEqual(i.tuple_shapes, None)
self.assertEqual([p.shard_dimension
for p in i.sharding_policies], [1, 0, 7])
with self.assertRaises(ValueError):
i = tpu_feed.InfeedQueue()
with self.assertRaises(ValueError):
i = tpu_feed.InfeedQueue(
number_of_tuple_elements=2, tuple_types=[dtypes.float32])
with self.assertRaises(ValueError):
i = tpu_feed.InfeedQueue(number_of_tuple_elements=2, tuple_shapes=[[1]])
with self.assertRaises(ValueError):
i = tpu_feed.InfeedQueue(number_of_tuple_elements=2, shard_dimensions=[1])
with self.assertRaises(ValueError):
i = tpu_feed.InfeedQueue(tuple_shapes=[[1], [2, 3]], shard_dimensions=[1])
def testModification(self):
"""Tests modification of the queue post-construction."""
i = tpu_feed.InfeedQueue(number_of_tuple_elements=2)
i.set_tuple_types([dtypes.float32, dtypes.int32])
self.assertEqual(i.tuple_types, [dtypes.float32, dtypes.int32])
i.set_tuple_types([dtypes.float32, dtypes.float32])
self.assertEqual(i.tuple_types, [dtypes.float32, dtypes.float32])
with self.assertRaises(ValueError):
i.set_tuple_types([dtypes.float32])
i.set_tuple_shapes([[1], [2, 3]])
self.assertEqual(i.tuple_shapes, [[1], [2, 3]])
i.set_tuple_shapes([[1, 2], [3, 4]])
self.assertEqual(i.tuple_shapes, [[1, 2], [3, 4]])
with self.assertRaises(ValueError):
i.set_tuple_shapes([[1, 2]])
i.set_number_of_shards(2)
self.assertEqual(i.number_of_shards, 2)
i.set_number_of_shards(3)
self.assertEqual(i.number_of_shards, 3)
t1 = constant_op.constant(1, dtypes.int32, shape=[6])
t2 = constant_op.constant(2.0, dtypes.float32, shape=[3, 18])
i.set_configuration_from_input_tensors([t1, t2])
self.assertEqual(i.tuple_shapes, [[6], [3, 18]])
self.assertEqual(i.tuple_types, [dtypes.int32, dtypes.float32])
i.set_configuration_from_sharded_input_tensors([[t2, t1], [t2, t1]])
self.assertEqual(i.number_of_shards, 2)
self.assertEqual(i.tuple_shapes, [[6, 18], [12]])
self.assertEqual(i.tuple_types, [dtypes.float32, dtypes.int32])
i.set_shard_dimensions([1, 0])
i.set_number_of_shards(3)
with self.assertRaises(ValueError):
i.set_number_of_shards(4)
def testFreezing(self):
"""Tests freezing the queue."""
i = tpu_feed.InfeedQueue(number_of_tuple_elements=2)
t1 = constant_op.constant(1, dtypes.int32, shape=[2])
t2 = constant_op.constant(2.0, dtypes.float32, shape=[2, 4])
i.set_configuration_from_sharded_input_tensors([[t2, t1], [t2, t1]])
self.assertEqual(i.number_of_shards, 2)
self.assertEqual(i.tuple_shapes, [[4, 4], [4]])
self.assertEqual(i.tuple_types, [dtypes.float32, dtypes.int32])
self.assertEqual(i.shard_dimensions, [0, 0])
i.freeze()
i.set_number_of_shards(2)
i.set_tuple_shapes([[4, 4], [4]])
i.set_tuple_types([dtypes.float32, dtypes.int32])
i.set_shard_dimensions([0, 0])
with self.assertRaises(ValueError):
i.set_number_of_shards(1)
with self.assertRaises(ValueError):
i.set_tuple_shapes([[8, 8], [8]])
with self.assertRaises(ValueError):
i.set_tuple_types([dtypes.int32, dtypes.float32])
with self.assertRaises(ValueError):
i.set_shard_dimensions([1, 0])
self.assertEqual(i.number_of_shards, 2)
self.assertEqual(i.tuple_shapes, [[4, 4], [4]])
self.assertEqual(i.tuple_types, [dtypes.float32, dtypes.int32])
self.assertEqual(i.shard_dimensions, [0, 0])
if __name__ == '__main__':
test.main()
|
mph-/lcapy | refs/heads/master | doc/examples/functions/tri.py | 1 | from matplotlib.pyplot import savefig, style
from lcapy import *
style.use('function.mplstyle')
tri(t).plot((-2, 2), title='tri(t)')
savefig(__file__.replace('.py', '.png'))
|
savoirfairelinux/OpenUpgrade | refs/heads/master | addons/web_tests/tests/__init__.py | 385 | # -*- coding: utf-8 -*-
import test_ui
|
Evegen55/sequences-and-series | refs/heads/master | quizzes/applyRatioTestWithFactorials/__init__.py | 3 | from questions import *
class Question(RandomizedQuestion):
module = __file__
video = 'ratio-test-statement'
forum = 10116
title = 'apply the ratio test on a series involving factorials'
textbook = 'example:ratio-test-factorials'
def good_enough(self):
return self.ell != 1
def perturb(self):
x = var('x')
self.x = var('x')
self.factorial_term = factorial(x) * (randint(1,4) * x + randint(1,4))
self.factorial_term_n = self.factorial_term(x=self.variable)
self.ratio = randint(2,6)
self.power_term = self.ratio**x
self.power_term_n = self.power_term(x=self.variable)
if randint(0,1) == 0:
self.term_x = (self.power_term / self.factorial_term)
else:
self.term_x = (self.factorial_term / self.power_term)
self.term = (self.term_x)(x = self.variable)
self.ell = limit( (self.term_x)(x = x+1) / (self.term_x), x=oo )
self.initial = randint(0,9)
def __init__(self):
variables = ['n','n','n','n','n','n','n','m','m','m','k','i','j']
self.variable = var( variables[randint(0, len(variables)-1)] )
self.perturb()
super(Question, self).__init__()
def distractors(self,count):
if self.answer() == 'The series diverges.':
return [('The series converges.',r'Note that in this case \(L = ' + latex(self.ell) + '\).')]
else:
return [('The series diverges.',r'Note that in this case \(L = ' + latex(self.ell) + '\).')]
def verify(self):
try:
if sum(self.term, self.variable, self.initial, oo).is_infinity():
assert( self.answer() == 'The series diverges.' )
else:
assert( self.answer() == 'The series converges.' )
except ValueError:
assert( self.answer() == 'The series diverges.' )
def answer(self):
if self.ell < 1:
return 'The series converges.'
if self.ell > 1:
return 'The series diverges.'
if self.ell.is_infinity():
return 'The series diverges.'
return 'The ratio test is silent as to whether this series converges or diverges.'
|
virt-manager/virt-bootstrap | refs/heads/master | src/virtBootstrap/sources/docker_source.py | 1 | # -*- coding: utf-8 -*-
# Authors: Cedric Bosdonnat <cbosdonnat@suse.com>
#
# Copyright (C) 2017 SUSE, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
DockerSource aim is to download container image from Docker registry and
extract the layers of root file system into destination directory or qcow2
image with backing chains.
"""
import select
import shutil
import getpass
import os
import logging
import subprocess
from virtBootstrap import utils
# pylint: disable=invalid-name
# Create logger
logger = logging.getLogger(__name__)
class DockerSource(object):
"""
Extract files from Docker image
"""
# pylint: disable=too-many-instance-attributes
def __init__(self, **kwargs):
"""
Bootstrap root filesystem from Docker registry
@param uri: Address of source registry
@param username: Username to access source registry
@param password: Password to access source registry
@param uid_map: Mappings for UID of files in rootfs
@param gid_map: Mappings for GID of files in rootfs
@param root_password: Root password to set in rootfs
@param fmt: Format used to store image [dir, qcow2]
@param not_secure: Do not require HTTPS and certificate verification
@param no_cache: Whether to store downloaded images or not
@param progress: Instance of the progress module
Note: uid_map and gid_map have the format:
[[<start>, <target>, <count>], [<start>, <target>, <count>] ...]
"""
# Check if skopeo is installed
if not utils.is_installed('skopeo'):
raise RuntimeError('skopeo is not installed')
self.url = self.gen_valid_uri(kwargs['uri'])
self.username = kwargs.get('username', None)
self.password = kwargs.get('password', None)
self.uid_map = kwargs.get('uid_map', [])
self.gid_map = kwargs.get('gid_map', [])
self.root_password = kwargs.get('root_password', None)
self.output_format = kwargs.get('fmt', utils.DEFAULT_OUTPUT_FORMAT)
self.insecure = kwargs.get('not_secure', False)
self.no_cache = kwargs.get('no_cache', False)
self.progress = kwargs['progress'].update_progress
self.images_dir = utils.get_image_dir(self.no_cache)
self.image_details = None
self.layers = []
self.checksums = []
if self.username and not self.password:
self.password = getpass.getpass()
self.retrieve_layers_info()
def retrieve_layers_info(self):
"""
Retrive manifest from registry and get layers' digest,
sum_type, size and file_path in a list.
"""
image_details = utils.get_image_details(self.url, raw=False,
insecure=self.insecure,
username=self.username,
password=self.password)
if 'Layers' not in image_details or not image_details['Layers']:
raise ValueError('No image layers.')
# Layers are in order:
# - root layer first, and then successive layered layers
# Ref: https://github.com/containers/image/blob/master/image/oci.go
for layer_digest in image_details['Layers']:
sum_type, layer_sum = layer_digest.split(':')
self.checksums.append([sum_type, layer_sum]) # Store checksums
# Layers are tar files with hashsum used as name
file_path = os.path.join(self.images_dir, layer_sum)
# Store 'file path' and set placeholder for 'size'
self.layers.append([file_path, None])
def gen_valid_uri(self, uri):
"""
Generate Docker URI in format accepted by skopeo.
"""
registry = uri.netloc
image = uri.path
# Convert "docker:///<image>" to "docker://<image>"
if not registry and image.startswith('/'):
image = image[1:]
# Convert "docker://<image>/" to "docker://<image>"
if image.endswith('/'):
image = image[:-1]
return "docker://" + registry + image
def download_image(self):
"""
Download image layers using "skopeo copy".
"""
if self.no_cache:
dest_dir = self.images_dir
else:
dest_dir = utils.get_image_dir(no_cache=True)
# Note: we don't want to expose --src-cert-dir to users as
# they should place the certificates in the system
# folders for broader enablement
skopeo_copy = ["skopeo", "copy", self.url, "dir:" + dest_dir]
if self.insecure:
skopeo_copy.append('--src-tls-verify=false')
if self.username:
skopeo_copy.append('--src-creds={}:{}'.format(self.username,
self.password))
self.progress("Downloading container image", value=0, logger=logger)
# Run "skopeo copy" command
self.read_skopeo_progress(skopeo_copy)
if not self.no_cache:
os.remove(os.path.join(dest_dir, "manifest.json"))
os.remove(os.path.join(dest_dir, "version"))
utils.copytree(dest_dir, self.images_dir)
shutil.rmtree(dest_dir)
# Old versions of skopeo use '.tar' extension to blobs.
# Make sure we use the correct file name.
for i in range(len(self.layers)):
path = self.layers[i][0]
if not os.path.exists(path):
if os.path.exists(path + '.tar'):
self.layers[i][0] += '.tar'
else:
raise ValueError('Blob %s does not exist.' % path)
def parse_output(self, proc):
"""
Read stdout from skopeo's process asynchconosly.
"""
current_layer, total_layers_num = 0, len(self.layers)
# Process the output until the process terminates
while proc.poll() is None:
# Wait for data to become available
stdout = select.select([proc.stdout], [], [])[0]
# Split output into line
output = utils.read_async(stdout[0]).strip().split('\n')
for line in output:
line_split = line.split()
if len(line_split) > 2: # Avoid short lines
if utils.is_new_layer_message(line):
current_layer += 1
self.progress("Downloading layer (%s/%s)"
% (current_layer, total_layers_num))
# Use the single slash between layer's "downloaded" and
# "total size" in the output to recognise progress message
elif line_split[2] == '/':
self.update_progress_from_output(line_split,
current_layer,
total_layers_num)
# Stop parsing when manifest is copied.
elif utils.is_layer_config_message(line):
break
else:
continue # continue if the inner loop didn't break
break
if proc.poll() is None:
proc.wait() # Wait until the process is finished
return proc.returncode == 0
def update_progress_from_output(self, line_split, current_l, total_l):
"""
Parse a line from skopeo's output to extract the downloaded and
total size of image layer.
Calculate percentage and update the progress of virt-bootstrap.
@param current_l: Number of currently downloaded layer
@param total_l: Total number of layers
@param line_split: A list with format:
[<d_size>, <d_format>, '/', <t_size>, <t_format>, <progress>]
Example:
['5.92', 'MB', '/', '44.96', 'MB', '[===>-----------------]']
"""
if not (len(line_split) > 4 and isinstance(line_split, list)):
return
d_size, d_format = utils.str2float(line_split[0]), line_split[1]
t_size, t_format = utils.str2float(line_split[3]), line_split[4]
if d_size and t_size:
downloaded_size = utils.size_to_bytes(d_size, d_format)
total_size = utils.size_to_bytes(t_size, t_format)
if downloaded_size and total_size:
try:
frac = float(1) / total_l
downloaded = float(downloaded_size) / total_size
layer_frac = float(max(0, current_l - 1)) / total_l
progress = 50 * (layer_frac + (frac * downloaded))
self.progress(value=progress)
except Exception:
pass # Ignore failures
def read_skopeo_progress(self, cmd):
"""
Parse the output from skopeo copy to track download progress.
"""
proc = subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True
)
# Without `make_async`, `fd.read` in `read_async` blocks.
utils.make_async(proc.stdout)
if not self.parse_output(proc):
raise subprocess.CalledProcessError(proc.returncode, ' '.join(cmd))
def validate_image_layers(self):
"""
Check if layers of container image exist in image_dir
and have valid hash sum.
"""
self.progress("Checking cached layers", value=0, logger=logger)
for index, checksum in enumerate(self.checksums):
path = self.layers[index][0]
sum_type, sum_expected = checksum
logger.debug("Checking layer: %s", path)
if (os.path.exists(path)
and utils.checksum(path, sum_type, sum_expected)):
continue
if (not path.endswith('.tar')
and os.path.exists(path + '.tar')
and utils.checksum(path + '.tar', sum_type, sum_expected)):
self.layers[index][0] += '.tar'
else:
return False
return True
def fetch_layers(self):
"""
Retrieve layers of container image.
"""
# Check if layers have been downloaded
if not self.validate_image_layers():
self.download_image()
def unpack(self, dest):
"""
Extract image files from Docker image
@param dest: Directory path where the files to be extraced
"""
try:
# Layers are in order - root layer first
# Reference:
# https://github.com/containers/image/blob/master/image/oci.go#L100
self.fetch_layers()
# Unpack to destination directory
if self.output_format == 'dir':
self.progress("Extracting container layers", value=50,
logger=logger)
utils.untar_layers(self.layers, dest, self.progress)
elif self.output_format == 'qcow2':
self.progress("Extracting container layers into qcow2 images",
value=50, logger=logger)
img = utils.BuildImage(
layers=self.layers,
dest=dest,
progress=self.progress
)
img.create_base_layer()
img.create_backing_chains()
img.set_root_password(self.root_password)
if self.uid_map or self.gid_map:
logger.info("Mapping UID/GID")
utils.map_id_in_image(
len(self.layers), # Number of layers
dest, self.uid_map,
self.gid_map,
(self.root_password is None) # Create new disk?
)
else:
raise Exception("Unknown format:" + self.output_format)
except Exception:
raise
else:
self.progress("Download and extract completed!", value=100,
logger=logger)
logger.info("Files are stored in: %s", dest)
finally:
# Clean up
if self.no_cache and self.images_dir != utils.DEFAULT_IMG_DIR:
shutil.rmtree(self.images_dir)
|
nickjj/flask-webpack | refs/heads/master | flask_webpack/tests/test_app/app.py | 4 | from flask import Flask, render_template
from werkzeug.serving import run_simple
from flask_webpack import Webpack
webpack = Webpack()
def create_app(settings_override=None):
"""
Create a test application.
:param settings_override: Override settings
:type settings_override: dict
:return: Flask app
"""
app = Flask(__name__)
params = {
'DEBUG': True,
'WEBPACK_MANIFEST_PATH': './build/manifest.json'
}
app.config.update(params)
if settings_override:
app.config.update(settings_override)
webpack.init_app(app)
return app
app = create_app()
@app.route('/')
def index():
return render_template('index.jinja2')
if __name__ == '__main__':
run_simple('localhost', 5000, app, use_reloader=True, use_debugger=True)
|
epri-dev/PT2 | refs/heads/master | res/scripts/arp_poison_hosts.py | 1 | #!/usr/bin/env python
import os
import sys
import subprocess
import signal
interface = sys.argv[1]
host1 = sys.argv[2]
host2 = sys.argv[3]
if interface.strip() == "":
c1 = "arpspoof -t %s -r %s" % (host1, host2)
else:
c1 = "arpspoof -i %s -t %s -r %s" % (interface, host1, host2)
p1 = subprocess.Popen(c1.split(), shell=False, preexec_fn=os.setsid)
print c1
print "Arp poisoning running for hosts: %s, %s" % (host1, host2)
sys.stdout.flush()
def signal_handler(signal, frame):
print 'Killing pid %d' % p1.pid
#os.kill(p1.pid, 9)
p1.terminate()
print 'Exiting now!'
sys.stdout.flush()
sys.exit(0)
signal.signal(signal.SIGTERM, signal_handler)
while(1):
pass
|
eadgarchen/tensorflow | refs/heads/master | tensorflow/contrib/ffmpeg/encode_audio_op_test.py | 54 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Tests for third_party.tensorflow.contrib.ffmpeg.encode_audio_op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os.path
import six
from tensorflow.contrib import ffmpeg
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import resource_loader
from tensorflow.python.platform import test
class EncodeAudioOpTest(test.TestCase):
def setUp(self):
super(EncodeAudioOpTest, self).setUp()
path = os.path.join(resource_loader.get_data_files_path(),
'testdata/mono_10khz.wav')
with open(path, 'rb') as f:
self._contents = f.read()
def _compareWavFiles(self, original, encoded):
"""Compares the important bits of two WAV files.
Some encoders will create a slightly different header to the WAV file.
This compares only the important bits of the header as well as the contents.
Args:
original: Contents of the original .wav file.
encoded: Contents of the new, encoded .wav file.
"""
self.assertLess(44, len(original))
self.assertLess(44, len(encoded))
self.assertEqual(original[:4], encoded[:4])
# Skip file size
self.assertEqual(original[8:16], encoded[8:16])
# Skip header size
self.assertEqual(original[20:36], encoded[20:36])
# Skip extra bits inserted by ffmpeg.
self.assertEqual(original[original.find(b'data'):],
encoded[encoded.find(b'data'):])
def testRoundTrip(self):
"""Reads a wav file, writes it, and compares them."""
with self.test_session():
audio_op = ffmpeg.decode_audio(
self._contents,
file_format='wav',
samples_per_second=10000,
channel_count=1)
encode_op = ffmpeg.encode_audio(
audio_op, file_format='wav', samples_per_second=10000)
encoded_contents = encode_op.eval()
self._compareWavFiles(self._contents, encoded_contents)
def testRoundTripWithPlaceholderSampleRate(self):
with self.test_session():
placeholder = array_ops.placeholder(dtypes.int32)
audio_op = ffmpeg.decode_audio(
self._contents,
file_format='wav',
samples_per_second=placeholder,
channel_count=1)
encode_op = ffmpeg.encode_audio(
audio_op, file_format='wav', samples_per_second=placeholder)
encoded_contents = encode_op.eval(feed_dict={placeholder: 10000})
self._compareWavFiles(self._contents, encoded_contents)
def testFloatingPointSampleRateInvalid(self):
with self.test_session():
with self.assertRaises(TypeError):
ffmpeg.encode_audio(
[[0.0], [1.0]],
file_format='wav',
samples_per_second=12345.678)
def testZeroSampleRateInvalid(self):
with self.test_session() as sess:
encode_op = ffmpeg.encode_audio(
[[0.0], [1.0]],
file_format='wav',
samples_per_second=0)
with six.assertRaisesRegex(self, Exception, 'must be positive'):
sess.run(encode_op)
def testNegativeSampleRateInvalid(self):
with self.test_session() as sess:
encode_op = ffmpeg.encode_audio(
[[0.0], [1.0]],
file_format='wav',
samples_per_second=-2)
with six.assertRaisesRegex(self, Exception, 'must be positive'):
sess.run(encode_op)
if __name__ == '__main__':
test.main()
|
gearslam/v20j-geeb | refs/heads/master | scripts/gcc-wrapper.py | 2 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2011-2012, Code Aurora Forum. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Code Aurora nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NON-INFRINGEMENT ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Invoke gcc, looking for warnings, and causing a failure if there are
# non-whitelisted warnings.
import errno
import re
import os
import sys
import subprocess
# Note that gcc uses unicode, which may depend on the locale. TODO:
# force LANG to be set to en_US.UTF-8 to get consistent warnings.
allowed_warnings = set([
"alignment.c:327",
"mmu.c:602",
"return_address.c:62",
"wlan_hdd_ftm.c:2291",
"csrUtil.c:2492",
"csrApiScan.c:3376",
"wlan_qct_tl.c:10619",
"csrApiRoam.c:13686",
"csrApiRoam.c:8304",
"wlan_qct_wda.c:1581",
])
# Capture the name of the object file, can find it.
ofile = None
warning_re = re.compile(r'''(.*/|)([^/]+\.[a-z]+:\d+):(\d+:)? warning:''')
def interpret_warning(line):
"""Decode the message from gcc. The messages we care about have a filename, and a warning"""
line = line.rstrip('\n')
m = warning_re.match(line)
if m and m.group(2) not in allowed_warnings:
print "error, forbidden warning:", m.group(2)
# If there is a warning, remove any object if it exists.
if ofile:
try:
os.remove(ofile)
except OSError:
pass
sys.exit(1)
def run_gcc():
args = sys.argv[1:]
# Look for -o
try:
i = args.index('-o')
global ofile
ofile = args[i+1]
except (ValueError, IndexError):
pass
compiler = sys.argv[0]
try:
proc = subprocess.Popen(args, stderr=subprocess.PIPE)
for line in proc.stderr:
print line,
interpret_warning(line)
result = proc.wait()
except OSError as e:
result = e.errno
if result == errno.ENOENT:
print args[0] + ':',e.strerror
print 'Is your PATH set correctly?'
else:
print ' '.join(args), str(e)
return result
if __name__ == '__main__':
status = run_gcc()
sys.exit(status)
|
testiddd/ShaniXBMCWork | refs/heads/master | legacy/temp/test.py | 12 | import urllib2,re
url="http://www.hdarabic.com/"
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0(iPad; U; CPU iPhone OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B314 Safari/531.21.10')
response = urllib2.urlopen(req)
link=response.read()
response.close()
match =re.findall('iptv.php.*nume\">(.*?)<.*src=\"\.\/images\/(.*?)\.',link, re.IGNORECASE)
total=0
totalfound=0
try:
if len(match)>0:
total=len(match)
totalfound=0
for name1,name2 in match:
trynum=1
found=False
while trynum<=5 and not found:
if trynum==1:
newurl=url+name2.strip()+'.php'
newurl=newurl.replace(' ','_').lower()
elif trynum==2:
newurl=url+name1.strip()+'.php'
newurl=newurl.replace(' ','_').lower()
elif trynum==5:
newurl=url+name2.strip()+'.php'
newurl=newurl.replace(' ','').lower()
elif trynum==4:
newurl=url+name2.strip()+'.php'
newurl=newurl.replace(' ','').lower()
elif trynum==3:
newurl=url+name1.strip().replace('Al ','')+'.php'
newurl=newurl.replace(' ','_').lower()
try:
req = urllib2.Request(newurl)
req.add_header('User-Agent', 'Mozilla/5.0(iPad; U; CPU iPhone OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B314 Safari/531.21.10')
response = urllib2.urlopen(req)
link=response.read()
found=True
print '(\''+name1 +'\',\''+newurl+'\',\''+name2+'\'),'
#print newurl
except KeyboardInterrupt: raise
except: pass
trynum=trynum+1
if not found:
print 'not found' + name1
else: totalfound+=1
except KeyboardInterrupt:
print 'Stopped!'
print 'Total tried %d, found %d'%(total,totalfound)
|
pinkavaj/rstt | refs/heads/master | rstt_cli/calibration.py | 1 | import struct
class CalibrationCollector(object):
"""Collect calibration data from fragments."""
def __init__(self):
self._missing = [True, ] * 32
self._fragments = [None, ] * 32
self._data = None
def addFragment(self, idx, data):
"""Process one subframe, with calibration data."""
self._fragments[idx] = data
self._missing[idx] = False
return self.completed()
def calibration(self):
"""Return processed calibration data."""
return Calibration(self.data())
def completed(self):
"""Return True if all fragments are collected."""
if [x for x in self._missing if x]:
return False
return True
def data(self):
return b''.join(self._fragments)
class Calibration(object):
"""Parse calibration data."""
def __init__(self, data):
self._d_0 = data[0:2] # TODO
self._d_freq = struct.unpack('<H', data[2:4])
self._d_count_1 = struct.unpack('<H', data[4:6])
self._d_6 = struct.unpack('<H', data[6:8]) # TODO
self._d_8 = struct.unpack('<h', data[8:10]) # TODO
self._d_10 = struct.unpack('<h', data[10:12]) # TODO
self._d_id = struct.unpack('10c', data[22:32])[0].decode('ascii')
self._d_block_32 = data[32:36] # TODO
self._d_36 = struct.unpack('<7h', data[0x24:0x32]) # TODO
self._d_50 = struct.unpack('<3h', data[0x32:0x38]) # TODO
self._d_56 = data[56:64]
self._d_f = {}
for idx in range(64, 511-4, 5):
ch, f = struct.unpack('<Bf', data[idx:idx+5])
if ch:
ch, k = ch // 10, ch % 10
v = self._d_f.get(ch, [None, ]*8)
v[k] = f
self._d_f[ch] = v
def __repr__(self):
s = 'calibration = {\n'
c = [' %s: %s,\n' % (x, self._d_f[x]) for x in self._d_f]
s += ' "calib": {\n%s },\n' % ''.join(c)
s += '}'
return s
def _poly(self, x, n):
"""Pass x trought calibration polynom with index n."""
p = [v or 0. for v in self._d_f[n]]
return p[0] + x*(p[1] + x*(p[2] + x*(p[3] + x*(p[4] + x*p[5]))))
return x
def evalMeas(self, measData):
meas = {}
r_lo = (measData.ch7+measData.ch8) / 2
r_hi1 = measData.ch4 - r_lo
r_hi2 = measData.ch5 - r_lo
u1 = self._poly(r_hi2 / (measData.ch2 - r_lo), 4)
u2 = self._poly(r_hi2 / (measData.ch3 - r_lo), 5)
meas['U'] = max(u1, u2)
meas['P'] = float('NAN')
meas['T'] = float('NAN')
return meas
if __name__ == '__main__':
import sys
if len(sys.argv) != 2:
print("%s <INPUT FILE>")
sys.exit(1)
data = open(sys.argv[1], 'rb').read()
c = Calibration(data)
print(c)
|
tsabi/Odoo-tsabi-fixes | refs/heads/master | addons/event/report/__init__.py | 435 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import report_event_registration
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
jgcaaprom/android_external_chromium_org | refs/heads/cm-12.1 | third_party/closure_compiler/processor.py | 32 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Process Chrome resources (HTML/CSS/JS) to handle <include> and <if> tags."""
from collections import defaultdict
import re
import os
class LineNumber(object):
"""A simple wrapper to hold line information (e.g. file.js:32).
Args:
source_file: A file path.
line_number: The line in |file|.
"""
def __init__(self, source_file, line_number):
self.file = source_file
self.line_number = int(line_number)
class FileCache(object):
"""An in-memory cache to speed up reading the same files over and over.
Usage:
FileCache.read(path_to_file)
"""
_cache = defaultdict(str)
@classmethod
def read(self, source_file):
"""Read a file and return it as a string.
Args:
source_file: a file to read and return the contents of.
Returns:
|file| as a string.
"""
abs_file = os.path.abspath(source_file)
self._cache[abs_file] = self._cache[abs_file] or open(abs_file, "r").read()
return self._cache[abs_file]
class Processor(object):
"""Processes resource files, inlining the contents of <include> tags, removing
<if> tags, and retaining original line info.
For example
1: /* blah.js */
2: <if expr="is_win">
3: <include src="win.js">
4: </if>
would be turned into:
1: /* blah.js */
2:
3: /* win.js */
4: alert('Ew; Windows.');
5:
Args:
source_file: A file to process.
Attributes:
contents: Expanded contents after inlining <include>s and stripping <if>s.
included_files: A list of files that were inlined via <include>.
"""
_IF_TAGS_REG = "</?if[^>]*?>"
_INCLUDE_REG = "<include[^>]+src=['\"]([^>]*)['\"]>"
def __init__(self, source_file):
self._included_files = set()
self._index = 0
self._lines = self._get_file(source_file)
while self._index < len(self._lines):
current_line = self._lines[self._index]
match = re.search(self._INCLUDE_REG, current_line[2])
if match:
file_dir = os.path.dirname(current_line[0])
self._include_file(os.path.join(file_dir, match.group(1)))
else:
self._index += 1
for i, line in enumerate(self._lines):
self._lines[i] = line[:2] + (re.sub(self._IF_TAGS_REG, "", line[2]),)
self.contents = "\n".join(l[2] for l in self._lines)
# Returns a list of tuples in the format: (file, line number, line contents).
def _get_file(self, source_file):
lines = FileCache.read(source_file).splitlines()
return [(source_file, lnum + 1, line) for lnum, line in enumerate(lines)]
def _include_file(self, source_file):
self._included_files.add(source_file)
f = self._get_file(source_file)
self._lines = self._lines[:self._index] + f + self._lines[self._index + 1:]
def get_file_from_line(self, line_number):
"""Get the original file and line number for an expanded file's line number.
Args:
line_number: A processed file's line number.
"""
line_number = int(line_number) - 1
return LineNumber(self._lines[line_number][0], self._lines[line_number][1])
@property
def included_files(self):
"""A list of files that were inlined via <include>."""
return self._included_files
|
3nids/QGIS | refs/heads/master | tests/src/python/test_authmanager_proxy.py | 30 | # -*- coding: utf-8 -*-
"""
Tests for auth manager Basic configuration update proxy
From build dir, run from test directory:
LC_ALL=en_US.UTF-8 ctest -R PyQgsAuthManagerProxy -V
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
import os
import re
import string
import sys
from shutil import rmtree
import tempfile
import random
from qgis.core import QgsAuthManager, QgsAuthMethodConfig, QgsNetworkAccessManager, QgsSettings, QgsApplication
from qgis.testing import start_app, unittest
__author__ = 'Alessandro Pasotti'
__date__ = '27/09/2017'
__copyright__ = 'Copyright 2017, The QGIS Project'
QGIS_AUTH_DB_DIR_PATH = tempfile.mkdtemp()
os.environ['QGIS_AUTH_DB_DIR_PATH'] = QGIS_AUTH_DB_DIR_PATH
qgis_app = start_app()
class TestAuthManager(unittest.TestCase):
@classmethod
def setUpClass(cls):
"""Run before all tests:
Creates an auth configuration"""
# Enable auth
# os.environ['QGIS_AUTH_PASSWORD_FILE'] = QGIS_AUTH_PASSWORD_FILE
authm = QgsApplication.authManager()
assert (authm.setMasterPassword('masterpassword', True))
cls.auth_config = QgsAuthMethodConfig('Basic')
cls.auth_config.setName('test_auth_config')
cls.username = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(6))
cls.password = cls.username[::-1] # reversed
cls.auth_config.setConfig('username', cls.username)
cls.auth_config.setConfig('password', cls.password)
assert (authm.storeAuthenticationConfig(cls.auth_config)[0])
@classmethod
def tearDownClass(cls):
"""Run after all tests"""
rmtree(QGIS_AUTH_DB_DIR_PATH)
def setUp(self):
"""Run before each test."""
pass
def tearDown(self):
"""Run after each test."""
pass
def testProxyIsUpdated(self):
"""
Test that proxy is updated
"""
authm = QgsApplication.authManager()
nam = QgsNetworkAccessManager.instance()
proxy = nam.proxy()
self.assertEqual(proxy.password(), '')
self.assertEqual(proxy.user(), '')
self.assertTrue(authm.updateNetworkProxy(proxy, self.auth_config.id()))
self.assertEqual(proxy.user(), self.username)
self.assertEqual(proxy.password(), self.password)
def testProxyIsUpdatedByUserSettings(self):
"""
Test that proxy is updated
"""
nam = QgsNetworkAccessManager.instance()
nam.setupDefaultProxyAndCache()
proxy = nam.proxy()
self.assertEqual(proxy.password(), '')
self.assertEqual(proxy.user(), '')
settings = QgsSettings()
settings.setValue("proxy/authcfg", self.auth_config.id())
settings.setValue("proxy/proxyEnabled", True)
del (settings)
nam.setupDefaultProxyAndCache()
proxy = nam.fallbackProxy()
self.assertEqual(proxy.password(), self.password)
self.assertEqual(proxy.user(), self.username)
if __name__ == '__main__':
unittest.main()
|
fast90/youtube-dl | refs/heads/master | youtube_dl/extractor/photobucket.py | 90 | from __future__ import unicode_literals
import json
import re
from .common import InfoExtractor
from ..compat import compat_urllib_parse_unquote
class PhotobucketIE(InfoExtractor):
_VALID_URL = r'https?://(?:[a-z0-9]+\.)?photobucket\.com/.*(([\?\&]current=)|_)(?P<id>.*)\.(?P<ext>(flv)|(mp4))'
_TEST = {
'url': 'http://media.photobucket.com/user/rachaneronas/media/TiredofLinkBuildingTryBacklinkMyDomaincom_zpsc0c3b9fa.mp4.html?filters[term]=search&filters[primary]=videos&filters[secondary]=images&sort=1&o=0',
'md5': '7dabfb92b0a31f6c16cebc0f8e60ff99',
'info_dict': {
'id': 'zpsc0c3b9fa',
'ext': 'mp4',
'timestamp': 1367669341,
'upload_date': '20130504',
'uploader': 'rachaneronas',
'title': 'Tired of Link Building? Try BacklinkMyDomain.com!',
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
video_extension = mobj.group('ext')
webpage = self._download_webpage(url, video_id)
# Extract URL, uploader, and title from webpage
self.report_extraction(video_id)
info_json = self._search_regex(r'Pb\.Data\.Shared\.put\(Pb\.Data\.Shared\.MEDIA, (.*?)\);',
webpage, 'info json')
info = json.loads(info_json)
url = compat_urllib_parse_unquote(self._html_search_regex(r'file=(.+\.mp4)', info['linkcodes']['html'], 'url'))
return {
'id': video_id,
'url': url,
'uploader': info['username'],
'timestamp': info['creationDate'],
'title': info['title'],
'ext': video_extension,
'thumbnail': info['thumbUrl'],
}
|
Jai-Chaudhary/termite-data-server | refs/heads/master | web2py/gluon/contrib/gae_memcache.py | 9 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Developed by Robin Bhattacharyya (memecache for GAE)
Released under the web2py license (LGPL)
from gluon.contrib.gae_memcache import MemcacheClient
cache.ram=cache.disk=MemcacheClient(request)
"""
import time
from google.appengine.api.memcache import Client
class MemcacheClient(object):
client = Client()
def __init__(self, request, default_time_expire = 300):
self.request = request
self.default_time_expire = default_time_expire
def __call__(
self,
key,
f,
time_expire=None,
):
if time_expire is None:
time_expire = self.default_time_expire
key = '%s/%s' % (self.request.application, key)
value = None
obj = self.client.get(key)
if obj:
value = obj[1]
elif f is not None:
value = f()
self.client.set(key, (time.time(), value), time=time_expire)
return value
def increment(self, key, value=1):
key = '%s/%s' % (self.request.application, key)
obj = self.client.get(key)
if obj:
value = obj[1] + value
self.client.set(key, (time.time(), value))
return value
def incr(self, key, value=1):
return self.increment(key, value)
def clear(self, key=None):
if key:
key = '%s/%s' % (self.request.application, key)
self.client.delete(key)
else:
self.client.flush_all()
def delete(self, *a, **b):
return self.client.delete(*a, **b)
def get(self, *a, **b):
return self.client.get(*a, **b)
def set(self, *a, **b):
return self.client.set(*a, **b)
def flush_all(self, *a, **b):
return self.client.delete(*a, **b)
|
mkudlej/usmqe-tests | refs/heads/master | usmqe_tests/rpm/test_rpm.py | 2 | # -*- coding: utf8 -*-
import subprocess
import tempfile
import pytest
from packagelist import list_packages, tendrl_packages
LOGGER = pytest.get_logger(__name__, module=True)
def test_repoclosure(tendrl_repos, centos_repos):
cmd = ["repoclosure", "--newest"]
# configure systemd default repositories
for name, url in centos_repos.items():
cmd.append("--repofrompath")
cmd.append("{},{}".format(name, url))
cmd.append("--lookaside={}".format(name))
# configure tendrl repository (passed via tendrl_repos fixture)
for name, baseurl in tendrl_repos.items():
cmd.append("--repofrompath")
cmd.append("{},{}".format(name, baseurl))
# we expect that other repositories are for dependencies
if name != "tendrl-core":
cmd.append("--lookaside={}".format(name))
cmd.append("--repoid=tendrl-core")
# running repoclosure
LOGGER.info(" ".join(cmd))
with tempfile.TemporaryDirectory() as tmpdirname:
cp = subprocess.run(
cmd,
cwd=tmpdirname,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
LOGGER.debug("STDOUT: %s", cp.stdout)
LOGGER.debug("STDERR: %s", cp.stderr)
check_msg = "repoclosure return code should be 0 indicating no errors"
pytest.check(cp.returncode == 0, msg=check_msg)
# when the check fails, report the error in readable way
if cp.returncode != 0:
for line in cp.stdout.splitlines():
LOGGER.failed(line.decode())
for line in cp.stderr.splitlines():
LOGGER.failed(line.decode())
def test_repo_packagelist(tendrl_repos):
"""
Check that tendrl core repository contains all expected tendrl packages and
doesn't contain anything else.
"""
LOGGER.info(
"expected tendrl-core packages are: " + ",".join(tendrl_packages))
# get actual list of packages from tendrl-core repository (via repoquery)
packages = list_packages('tendrl-core')
for rpm_name in tendrl_packages:
msg = "package {} should be present in tendrl-core repo"
package_present = rpm_name in packages
pytest.check(package_present, msg.format(rpm_name))
if package_present:
packages.remove(rpm_name)
pytest.check(packages == [], msg="there should be no extra packages")
for rpm_name in packages:
LOGGER.failed("unexpected package in tendrl-core: {}".format(rpm_name))
def test_rpmlint(rpm_package):
rpm_name, rpm_path = rpm_package
cmd = ["rpmlint", rpm_path]
# running rpmlint
LOGGER.info(" ".join(cmd))
cp = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
LOGGER.debug("STDOUT: %s", cp.stdout)
LOGGER.debug("STDERR: %s", cp.stderr)
LOGGER.debug("RCODE: %s", cp.returncode)
# report every line on stdout as an error in readable way
# (except expected strings)
for line in cp.stdout.splitlines():
line_str = line.decode()
if "E: unknown-key" in line_str or line_str.startswith("1 packages"):
continue
LOGGER.failed(line_str)
@pytest.mark.parametrize("check_command", [
"check-sat",
"check-conflicts",
"check-upgrade",
])
def test_rpmdeplint(rpm_package, check_command, tendrl_repos, centos_repos):
rpm_name, rpm_path = rpm_package
cmd = ["rpmdeplint", check_command, "--arch", "x86_64"]
# configure systemd default repositories
for name, url in centos_repos.items():
cmd.append("--repo")
cmd.append("{},{}".format(name, url))
# configure tendrl repository (passed via tendrl_repos fixture)
for name, baseurl in tendrl_repos.items():
cmd.append("--repo")
cmd.append("{},{}".format(name, baseurl))
# and last but not least: specify the package
cmd.append(rpm_path)
# running rpmdeplint
LOGGER.info(" ".join(cmd))
cp = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
LOGGER.debug("STDOUT: %s", cp.stdout)
LOGGER.debug("STDERR: %s", cp.stderr)
LOGGER.debug("RCODE: %s", cp.returncode)
# when the check fails, report the error in readable way
if cp.returncode != 0:
for line in cp.stderr.splitlines():
line_str = line.decode()
if "Undeclared file conflicts:" == line_str:
LOGGER.debug(line_str)
continue
if "provides /etc/grafana/grafana.ini which is also provided by " \
"grafana-4.1.2-1486989747.x86_64" in line_str:
LOGGER.debug("IGNORING (old grafana packages): %s", line_str)
continue
LOGGER.failed(line_str)
|
MounirMesselmeni/django | refs/heads/master | tests/auth_tests/backend_alias.py | 512 | # For testing that auth backends can be referenced using a convenience import
from .test_auth_backends import ImportedModelBackend
__all__ = ['ImportedModelBackend']
|
cdegroc/scikit-learn | refs/heads/master | examples/semi_supervised/plot_label_propagation_structure.py | 1 | """
==============================================
Label Propagation learning a complex structure
==============================================
Example of LabelPropagation learning a complex internal structure
to demonstrate "manifold learning". The outer circle should be
labeled "red" and the inner circle "blue". Because both label groups
lie inside their own distinct shape, we can see that the labels
propagate correctly around the circle.
"""
print __doc__
# Authors: Clay Woolam <clay@woolam.org>
# Licence: BSD
import numpy as np
import pylab as pl
from sklearn.semi_supervised import label_propagation
# generate ring with inner box
n_samples_per_circle = 100
outer_circ_xs = np.cos(np.linspace(0, 2 * np.pi, n_samples_per_circle))
outer_circ_ys = np.sin(np.linspace(0, 2 * np.pi, n_samples_per_circle))
inner_circ_xs = np.cos(np.linspace(0, 2 * np.pi, n_samples_per_circle)) * 0.8
inner_circ_ys = np.sin(np.linspace(0, 2 * np.pi, n_samples_per_circle)) * 0.8
all_xs = np.append(outer_circ_xs, inner_circ_xs)
all_ys = np.append(outer_circ_ys, inner_circ_ys)
data = np.vstack((np.append(outer_circ_xs, inner_circ_xs),\
np.append(outer_circ_ys, inner_circ_ys))).T
outer, inner = 0, 1
labels = [outer] + \
[-1 for x in range(0, n_samples_per_circle - 1)] + \
[inner] + \
[-1 for x in range(0, n_samples_per_circle - 1)]
###############################################################################
# Learn with LabelSpreading
label_spread = label_propagation.LabelSpreading(kernel='knn', alpha=1.0)
label_spread.fit(data, labels)
###############################################################################
# Plot output labels
output_labels = label_spread.transduction_
pl.figure(figsize=(8.5, 4))
pl.subplot(1, 2, 1)
plot_outer_labeled, = pl.plot(outer_circ_xs[0], outer_circ_ys[0], 'rs')
plot_unlabeled, = pl.plot(np.append(outer_circ_xs[1:], inner_circ_xs[1:]),
np.append(outer_circ_ys[1:], inner_circ_ys[1:]),
'g.')
plot_inner_labeled, = pl.plot(inner_circ_xs[0], inner_circ_ys[0], 'bs')
pl.legend((plot_outer_labeled, plot_inner_labeled, plot_unlabeled),
('Outer Labeled', 'Inner Labeled', 'Unlabeled'), 'upper left',
numpoints=1, shadow=False)
pl.title("Raw data (2 classes=red and blue)")
pl.subplot(1, 2, 2)
output_label_array = np.asarray(output_labels)
outer_numbers = np.where(output_label_array == outer)
inner_numbers = np.where(output_label_array == inner)
plot_outer, = pl.plot(all_xs[outer_numbers], all_ys[outer_numbers], 'rs')
plot_inner, = pl.plot(all_xs[inner_numbers], all_ys[inner_numbers], 'bs')
pl.legend((plot_outer, plot_inner), ('Outer Learned', 'Inner Learned'),
'upper left', numpoints=1, shadow=False)
pl.title("Labels learned with Label Spreading (KNN)")
pl.subplots_adjust(left=0.07, bottom=0.07, right=0.93, top=0.92)
pl.show()
|
vitan/hue | refs/heads/master | desktop/core/ext-py/Django-1.6.10/tests/csrf_tests/models.py | 754 | # models.py file for tests to run.
|
vqw/frappe | refs/heads/develop | frappe/tests/test_permissions.py | 8 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
"""Use blog post test to test user permissions logic"""
import frappe
import frappe.defaults
import unittest
import json
import frappe.model.meta
from frappe.core.page.user_permissions.user_permissions import add, remove, get_permissions
from frappe.permissions import clear_user_permissions_for_doctype, get_doc_permissions
test_records = frappe.get_test_records('Blog Post')
test_dependencies = ["User"]
class TestPermissions(unittest.TestCase):
def setUp(self):
frappe.clear_cache(doctype="Blog Post")
user = frappe.get_doc("User", "test1@example.com")
user.add_roles("Website Manager")
user = frappe.get_doc("User", "test2@example.com")
user.add_roles("Blogger")
frappe.db.sql("""update `tabDocPerm` set if_owner=0
where parent='Blog Post' and permlevel=0 and permlevel=0 and role='Blogger'""")
self.set_ignore_user_permissions_if_missing(0)
frappe.set_user("test1@example.com")
def tearDown(self):
frappe.set_user("Administrator")
frappe.db.set_value("Blogger", "_Test Blogger 1", "user", None)
clear_user_permissions_for_doctype("Blog Category")
clear_user_permissions_for_doctype("Blog Post")
clear_user_permissions_for_doctype("Blogger")
frappe.db.sql("""update `tabDocPerm` set user_permission_doctypes=null, apply_user_permissions=0
where parent='Blog Post' and permlevel=0 and apply_user_permissions=1
and `read`=1""")
frappe.db.sql("""update `tabDocPerm` set if_owner=0
where parent='Blog Post' and permlevel=0 and permlevel=0 and role='Blogger'""")
self.set_ignore_user_permissions_if_missing(0)
def set_ignore_user_permissions_if_missing(self, ignore):
ss = frappe.get_doc("System Settings")
ss.ignore_user_permissions_if_missing = ignore
ss.flags.ignore_mandatory = 1
ss.save()
def test_basic_permission(self):
post = frappe.get_doc("Blog Post", "-test-blog-post")
self.assertTrue(post.has_permission("read"))
def test_user_permissions_in_doc(self):
self.set_user_permission_doctypes(["Blog Category"])
frappe.permissions.add_user_permission("Blog Category", "_Test Blog Category 1",
"test2@example.com")
frappe.set_user("test2@example.com")
post = frappe.get_doc("Blog Post", "-test-blog-post")
self.assertFalse(post.has_permission("read"))
self.assertFalse(get_doc_permissions(post).get("read"))
post1 = frappe.get_doc("Blog Post", "-test-blog-post-1")
self.assertTrue(post1.has_permission("read"))
self.assertTrue(get_doc_permissions(post1).get("read"))
def test_user_permissions_in_report(self):
self.set_user_permission_doctypes(["Blog Category"])
frappe.permissions.add_user_permission("Blog Category", "_Test Blog Category 1", "test2@example.com")
frappe.set_user("test2@example.com")
names = [d.name for d in frappe.get_list("Blog Post", fields=["name", "blog_category"])]
self.assertTrue("-test-blog-post-1" in names)
self.assertFalse("-test-blog-post" in names)
def test_default_values(self):
frappe.permissions.add_user_permission("Blog Category", "_Test Blog Category 1", "test2@example.com")
frappe.set_user("test2@example.com")
doc = frappe.new_doc("Blog Post")
self.assertEquals(doc.get("blog_category"), "_Test Blog Category 1")
def test_user_link_match_doc(self):
self.set_user_permission_doctypes(["Blogger"])
blogger = frappe.get_doc("Blogger", "_Test Blogger 1")
blogger.user = "test2@example.com"
blogger.save()
frappe.set_user("test2@example.com")
post = frappe.get_doc("Blog Post", "-test-blog-post-2")
self.assertTrue(post.has_permission("read"))
post1 = frappe.get_doc("Blog Post", "-test-blog-post-1")
self.assertFalse(post1.has_permission("read"))
def test_user_link_match_report(self):
self.set_user_permission_doctypes(["Blogger"])
blogger = frappe.get_doc("Blogger", "_Test Blogger 1")
blogger.user = "test2@example.com"
blogger.save()
frappe.set_user("test2@example.com")
names = [d.name for d in frappe.get_list("Blog Post", fields=["name", "owner"])]
self.assertTrue("-test-blog-post-2" in names)
self.assertFalse("-test-blog-post-1" in names)
def test_set_user_permissions(self):
frappe.set_user("test1@example.com")
add("test2@example.com", "Blog Post", "-test-blog-post")
def test_not_allowed_to_set_user_permissions(self):
frappe.set_user("test2@example.com")
# this user can't add user permissions
self.assertRaises(frappe.PermissionError, add,
"test2@example.com", "Blog Post", "-test-blog-post")
def test_read_if_explicit_user_permissions_are_set(self):
self.set_user_permission_doctypes(["Blog Post"])
self.test_set_user_permissions()
frappe.set_user("test2@example.com")
# user can only access permitted blog post
doc = frappe.get_doc("Blog Post", "-test-blog-post")
self.assertTrue(doc.has_permission("read"))
# and not this one
doc = frappe.get_doc("Blog Post", "-test-blog-post-1")
self.assertFalse(doc.has_permission("read"))
def test_not_allowed_to_remove_user_permissions(self):
self.test_set_user_permissions()
defname = get_permissions("test2@example.com", "Blog Post", "-test-blog-post")[0].name
frappe.set_user("test2@example.com")
# user cannot remove their own user permissions
self.assertRaises(frappe.PermissionError, remove,
"test2@example.com", defname, "Blog Post", "-test-blog-post")
def test_user_permissions_based_on_blogger(self):
frappe.set_user("test2@example.com")
doc = frappe.get_doc("Blog Post", "-test-blog-post-1")
self.assertTrue(doc.has_permission("read"))
self.set_user_permission_doctypes(["Blog Post"])
frappe.set_user("test1@example.com")
add("test2@example.com", "Blog Post", "-test-blog-post")
frappe.set_user("test2@example.com")
doc = frappe.get_doc("Blog Post", "-test-blog-post-1")
self.assertFalse(doc.has_permission("read"))
doc = frappe.get_doc("Blog Post", "-test-blog-post")
self.assertTrue(doc.has_permission("read"))
def test_set_only_once(self):
blog_post = frappe.get_meta("Blog Post")
blog_post.get_field("title").set_only_once = 1
doc = frappe.get_doc("Blog Post", "-test-blog-post-1")
doc.title = "New"
self.assertRaises(frappe.CannotChangeConstantError, doc.save)
blog_post.get_field("title").set_only_once = 0
def test_user_permission_doctypes(self):
frappe.permissions.add_user_permission("Blog Category", "_Test Blog Category 1",
"test2@example.com")
frappe.permissions.add_user_permission("Blogger", "_Test Blogger 1",
"test2@example.com")
frappe.set_user("test2@example.com")
self.set_user_permission_doctypes(["Blogger"])
frappe.model.meta.clear_cache("Blog Post")
doc = frappe.get_doc("Blog Post", "-test-blog-post")
self.assertFalse(doc.has_permission("read"))
doc = frappe.get_doc("Blog Post", "-test-blog-post-2")
self.assertTrue(doc.has_permission("read"))
frappe.model.meta.clear_cache("Blog Post")
def if_owner_setup(self):
frappe.db.sql("""update `tabDocPerm` set if_owner=1
where parent='Blog Post' and permlevel=0 and permlevel=0 and role='Blogger'""")
frappe.permissions.add_user_permission("Blog Category", "_Test Blog Category 1",
"test2@example.com")
frappe.permissions.add_user_permission("Blogger", "_Test Blogger 1",
"test2@example.com")
frappe.db.sql("""update `tabDocPerm` set user_permission_doctypes=%s
where parent='Blog Post' and permlevel=0 and apply_user_permissions=1
and `read`=1""", json.dumps(["Blog Category"]))
frappe.model.meta.clear_cache("Blog Post")
def set_user_permission_doctypes(self, user_permission_doctypes):
set_user_permission_doctypes(doctype="Blog Post", role="Blogger",
apply_user_permissions=1, user_permission_doctypes=user_permission_doctypes)
def test_insert_if_owner_with_user_permissions(self):
"""If `If Owner` is checked for a Role, check if that document is allowed to be read, updated, submitted, etc. except be created, even if the document is restricted based on User Permissions."""
self.set_user_permission_doctypes(["Blog Category"])
self.if_owner_setup()
frappe.set_user("test2@example.com")
doc = frappe.get_doc({
"doctype": "Blog Post",
"blog_category": "_Test Blog Category",
"blogger": "_Test Blogger 1",
"title": "_Test Blog Post Title",
"content": "_Test Blog Post Content"
})
self.assertRaises(frappe.PermissionError, doc.insert)
frappe.set_user("Administrator")
frappe.permissions.add_user_permission("Blog Category", "_Test Blog Category",
"test2@example.com")
frappe.set_user("test2@example.com")
doc.insert()
frappe.set_user("Administrator")
frappe.permissions.remove_user_permission("Blog Category", "_Test Blog Category",
"test2@example.com")
frappe.set_user("test2@example.com")
doc = frappe.get_doc(doc.doctype, doc.name)
self.assertTrue(doc.has_permission("read"))
self.assertTrue(doc.has_permission("write"))
self.assertFalse(doc.has_permission("create"))
def test_ignore_user_permissions_if_missing(self):
"""If `Ignore User Permissions If Missing` is checked in System Settings, show records even if User Permissions are missing for a linked doctype"""
self.set_user_permission_doctypes(['Blog Category', 'Blog Post', 'Blogger'])
frappe.set_user("Administrator")
frappe.permissions.add_user_permission("Blog Category", "_Test Blog Category",
"test2@example.com")
frappe.set_user("test2@example.com")
doc = frappe.get_doc({
"doctype": "Blog Post",
"blog_category": "_Test Blog Category",
"blogger": "_Test Blogger 1",
"title": "_Test Blog Post Title",
"content": "_Test Blog Post Content"
})
self.assertFalse(doc.has_permission("write"))
frappe.set_user("Administrator")
self.set_ignore_user_permissions_if_missing(1)
frappe.set_user("test2@example.com")
self.assertTrue(doc.has_permission("write"))
def set_user_permission_doctypes(doctype, role, apply_user_permissions, user_permission_doctypes):
user_permission_doctypes = None if not user_permission_doctypes else json.dumps(user_permission_doctypes)
frappe.db.sql("""update `tabDocPerm` set apply_user_permissions=%(apply_user_permissions)s,
user_permission_doctypes=%(user_permission_doctypes)s
where parent=%(doctype)s and permlevel=0
and `read`=1 and role=%(role)s""", {
"apply_user_permissions": apply_user_permissions,
"user_permission_doctypes": user_permission_doctypes,
"doctype": doctype,
"role": role
})
frappe.clear_cache(doctype=doctype)
|
zding5/Microblog-Flask | refs/heads/master | flask/lib/python2.7/site-packages/werkzeug/contrib/lint.py | 295 | # -*- coding: utf-8 -*-
"""
werkzeug.contrib.lint
~~~~~~~~~~~~~~~~~~~~~
.. versionadded:: 0.5
This module provides a middleware that performs sanity checks of the WSGI
application. It checks that :pep:`333` is properly implemented and warns
on some common HTTP errors such as non-empty responses for 304 status
codes.
This module provides a middleware, the :class:`LintMiddleware`. Wrap your
application with it and it will warn about common problems with WSGI and
HTTP while your application is running.
It's strongly recommended to use it during development.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from urlparse import urlparse
from warnings import warn
from werkzeug.datastructures import Headers
from werkzeug.http import is_entity_header
from werkzeug.wsgi import FileWrapper
from werkzeug._compat import string_types
class WSGIWarning(Warning):
"""Warning class for WSGI warnings."""
class HTTPWarning(Warning):
"""Warning class for HTTP warnings."""
def check_string(context, obj, stacklevel=3):
if type(obj) is not str:
warn(WSGIWarning('%s requires bytestrings, got %s' %
(context, obj.__class__.__name__)))
class InputStream(object):
def __init__(self, stream):
self._stream = stream
def read(self, *args):
if len(args) == 0:
warn(WSGIWarning('wsgi does not guarantee an EOF marker on the '
'input stream, thus making calls to '
'wsgi.input.read() unsafe. Conforming servers '
'may never return from this call.'),
stacklevel=2)
elif len(args) != 1:
warn(WSGIWarning('too many parameters passed to wsgi.input.read()'),
stacklevel=2)
return self._stream.read(*args)
def readline(self, *args):
if len(args) == 0:
warn(WSGIWarning('Calls to wsgi.input.readline() without arguments'
' are unsafe. Use wsgi.input.read() instead.'),
stacklevel=2)
elif len(args) == 1:
warn(WSGIWarning('wsgi.input.readline() was called with a size hint. '
'WSGI does not support this, although it\'s available '
'on all major servers.'),
stacklevel=2)
else:
raise TypeError('too many arguments passed to wsgi.input.readline()')
return self._stream.readline(*args)
def __iter__(self):
try:
return iter(self._stream)
except TypeError:
warn(WSGIWarning('wsgi.input is not iterable.'), stacklevel=2)
return iter(())
def close(self):
warn(WSGIWarning('application closed the input stream!'),
stacklevel=2)
self._stream.close()
class ErrorStream(object):
def __init__(self, stream):
self._stream = stream
def write(self, s):
check_string('wsgi.error.write()', s)
self._stream.write(s)
def flush(self):
self._stream.flush()
def writelines(self, seq):
for line in seq:
self.write(seq)
def close(self):
warn(WSGIWarning('application closed the error stream!'),
stacklevel=2)
self._stream.close()
class GuardedWrite(object):
def __init__(self, write, chunks):
self._write = write
self._chunks = chunks
def __call__(self, s):
check_string('write()', s)
self._write.write(s)
self._chunks.append(len(s))
class GuardedIterator(object):
def __init__(self, iterator, headers_set, chunks):
self._iterator = iterator
self._next = iter(iterator).next
self.closed = False
self.headers_set = headers_set
self.chunks = chunks
def __iter__(self):
return self
def next(self):
if self.closed:
warn(WSGIWarning('iterated over closed app_iter'),
stacklevel=2)
rv = self._next()
if not self.headers_set:
warn(WSGIWarning('Application returned before it '
'started the response'), stacklevel=2)
check_string('application iterator items', rv)
self.chunks.append(len(rv))
return rv
def close(self):
self.closed = True
if hasattr(self._iterator, 'close'):
self._iterator.close()
if self.headers_set:
status_code, headers = self.headers_set
bytes_sent = sum(self.chunks)
content_length = headers.get('content-length', type=int)
if status_code == 304:
for key, value in headers:
key = key.lower()
if key not in ('expires', 'content-location') and \
is_entity_header(key):
warn(HTTPWarning('entity header %r found in 304 '
'response' % key))
if bytes_sent:
warn(HTTPWarning('304 responses must not have a body'))
elif 100 <= status_code < 200 or status_code == 204:
if content_length != 0:
warn(HTTPWarning('%r responses must have an empty '
'content length') % status_code)
if bytes_sent:
warn(HTTPWarning('%r responses must not have a body' %
status_code))
elif content_length is not None and content_length != bytes_sent:
warn(WSGIWarning('Content-Length and the number of bytes '
'sent to the client do not match.'))
def __del__(self):
if not self.closed:
try:
warn(WSGIWarning('Iterator was garbage collected before '
'it was closed.'))
except Exception:
pass
class LintMiddleware(object):
"""This middleware wraps an application and warns on common errors.
Among other thing it currently checks for the following problems:
- invalid status codes
- non-bytestrings sent to the WSGI server
- strings returned from the WSGI application
- non-empty conditional responses
- unquoted etags
- relative URLs in the Location header
- unsafe calls to wsgi.input
- unclosed iterators
Detected errors are emitted using the standard Python :mod:`warnings`
system and usually end up on :data:`stderr`.
::
from werkzeug.contrib.lint import LintMiddleware
app = LintMiddleware(app)
:param app: the application to wrap
"""
def __init__(self, app):
self.app = app
def check_environ(self, environ):
if type(environ) is not dict:
warn(WSGIWarning('WSGI environment is not a standard python dict.'),
stacklevel=4)
for key in ('REQUEST_METHOD', 'SERVER_NAME', 'SERVER_PORT',
'wsgi.version', 'wsgi.input', 'wsgi.errors',
'wsgi.multithread', 'wsgi.multiprocess',
'wsgi.run_once'):
if key not in environ:
warn(WSGIWarning('required environment key %r not found'
% key), stacklevel=3)
if environ['wsgi.version'] != (1, 0):
warn(WSGIWarning('environ is not a WSGI 1.0 environ'),
stacklevel=3)
script_name = environ.get('SCRIPT_NAME', '')
if script_name and script_name[:1] != '/':
warn(WSGIWarning('SCRIPT_NAME does not start with a slash: %r'
% script_name), stacklevel=3)
path_info = environ.get('PATH_INFO', '')
if path_info[:1] != '/':
warn(WSGIWarning('PATH_INFO does not start with a slash: %r'
% path_info), stacklevel=3)
def check_start_response(self, status, headers, exc_info):
check_string('status', status)
status_code = status.split(None, 1)[0]
if len(status_code) != 3 or not status_code.isdigit():
warn(WSGIWarning('Status code must be three digits'), stacklevel=3)
if len(status) < 4 or status[3] != ' ':
warn(WSGIWarning('Invalid value for status %r. Valid '
'status strings are three digits, a space '
'and a status explanation'), stacklevel=3)
status_code = int(status_code)
if status_code < 100:
warn(WSGIWarning('status code < 100 detected'), stacklevel=3)
if type(headers) is not list:
warn(WSGIWarning('header list is not a list'), stacklevel=3)
for item in headers:
if type(item) is not tuple or len(item) != 2:
warn(WSGIWarning('Headers must tuple 2-item tuples'),
stacklevel=3)
name, value = item
if type(name) is not str or type(value) is not str:
warn(WSGIWarning('header items must be strings'),
stacklevel=3)
if name.lower() == 'status':
warn(WSGIWarning('The status header is not supported due to '
'conflicts with the CGI spec.'),
stacklevel=3)
if exc_info is not None and not isinstance(exc_info, tuple):
warn(WSGIWarning('invalid value for exc_info'), stacklevel=3)
headers = Headers(headers)
self.check_headers(headers)
return status_code, headers
def check_headers(self, headers):
etag = headers.get('etag')
if etag is not None:
if etag.startswith('w/'):
etag = etag[2:]
if not (etag[:1] == etag[-1:] == '"'):
warn(HTTPWarning('unquoted etag emitted.'), stacklevel=4)
location = headers.get('location')
if location is not None:
if not urlparse(location).netloc:
warn(HTTPWarning('absolute URLs required for location header'),
stacklevel=4)
def check_iterator(self, app_iter):
if isinstance(app_iter, string_types):
warn(WSGIWarning('application returned string. Response will '
'send character for character to the client '
'which will kill the performance. Return a '
'list or iterable instead.'), stacklevel=3)
def __call__(self, *args, **kwargs):
if len(args) != 2:
warn(WSGIWarning('Two arguments to WSGI app required'), stacklevel=2)
if kwargs:
warn(WSGIWarning('No keyword arguments to WSGI app allowed'),
stacklevel=2)
environ, start_response = args
self.check_environ(environ)
environ['wsgi.input'] = InputStream(environ['wsgi.input'])
environ['wsgi.errors'] = ErrorStream(environ['wsgi.errors'])
# hook our own file wrapper in so that applications will always
# iterate to the end and we can check the content length
environ['wsgi.file_wrapper'] = FileWrapper
headers_set = []
chunks = []
def checking_start_response(*args, **kwargs):
if len(args) not in (2, 3):
warn(WSGIWarning('Invalid number of arguments: %s, expected '
'2 or 3' % len(args), stacklevel=2))
if kwargs:
warn(WSGIWarning('no keyword arguments allowed.'))
status, headers = args[:2]
if len(args) == 3:
exc_info = args[2]
else:
exc_info = None
headers_set[:] = self.check_start_response(status, headers,
exc_info)
return GuardedWrite(start_response(status, headers, exc_info),
chunks)
app_iter = self.app(environ, checking_start_response)
self.check_iterator(app_iter)
return GuardedIterator(app_iter, headers_set, chunks)
|
DigitalCampus/django-oppia | refs/heads/master | tests/api/v2/test_activitylog.py | 1 | import json
from django.contrib.auth.models import User
from django.test import TestCase
from oppia.models import Tracker
from quiz.models import QuizAttemptResponse, QuizAttempt
from tastypie.test import ResourceTestCaseMixin
from tests.utils import get_api_key
class UploadAPIActivityLogTest(ResourceTestCaseMixin, TestCase):
fixtures = ['tests/test_user.json',
'tests/test_oppia.json',
'tests/test_malaria_quiz.json',
'tests/test_permissions.json',
'default_gamification_events.json']
url = '/api/activitylog/'
basic_activity_log = './oppia/fixtures/activity_logs/basic_activity.json'
activity_log_file_path = \
'./oppia/fixtures/activity_logs/activity_upload_test.json'
wrong_activity_file = './oppia/fixtures/activity_logs/wrong_format.json'
new_user_activity = './oppia/fixtures/activity_logs/new_user_activity.json'
quiz_attempt_log = './oppia/fixtures/activity_logs/quiz_attempts.json'
def setUp(self):
super(UploadAPIActivityLogTest, self).setUp()
self.username = 'demo'
user = User.objects.get(username=self.username)
api_key = get_api_key(user=user)
self.api_key = api_key.key
def get_credentials(self):
return self.create_apikey(username=self.username,
api_key=self.api_key)
def test_no_get(self):
response = self.api_client.get(self.url)
self.assertEqual(405, response.status_code)
def test_no_post(self):
response = self.api_client.post(self.url,
format='json',
data={})
self.assertEqual(405, response.status_code)
def test_no_data(self):
# no file
response = self.api_client.patch(self.url,
format='json',
data={},
authentication=self.get_credentials())
self.assertEqual(400, response.status_code)
def test_correct_basic_data(self):
tracker_count_start = Tracker.objects.all().count()
with open(self.basic_activity_log) as activity_log_file:
json_data = json.load(activity_log_file)
response = self.api_client.patch(self.url,
format='json',
data=json_data,
authentication=self.get_credentials())
self.assertEqual(200, response.status_code)
tracker_count_end = Tracker.objects.all().count()
self.assertEqual(tracker_count_start + 2, tracker_count_end)
def test_new_user_file(self):
tracker_count_start = Tracker.objects.all().count()
user_count_start = User.objects.all().count()
with open(self.new_user_activity) as activity_log_file:
json_data = json.load(activity_log_file)
response = self.api_client.patch(self.url,
format='json',
data=json_data,
authentication=self.get_credentials())
self.assertEqual(200, response.status_code)
tracker_count_end = Tracker.objects.all().count()
user_count_end = User.objects.all().count()
self.assertEqual(tracker_count_start + 2, tracker_count_end)
self.assertEqual(user_count_start + 1, user_count_end)
def test_wrong_format_file(self):
with open(self.wrong_activity_file) as activity_log_file:
json_data = json.load(activity_log_file)
response = self.api_client.patch(self.url,
format='json',
data=json_data,
authentication=self.get_credentials())
self.assertEqual(400, response.status_code)
def test_quizattempts(self):
tracker_count_start = Tracker.objects.all().count()
qa_count_start = QuizAttempt.objects.all().count()
qar_count_start = QuizAttemptResponse.objects.all().count()
with open(self.quiz_attempt_log) as activity_log_file:
json_data = json.load(activity_log_file)
response = self.api_client.patch(self.url,
format='json',
data=json_data,
authentication=self.get_credentials())
self.assertEqual(200, response.status_code)
tracker_count_end = Tracker.objects.all().count()
qa_count_end = QuizAttempt.objects.all().count()
qar_count_end = QuizAttemptResponse.objects.all().count()
self.assertEqual(tracker_count_start, tracker_count_end)
self.assertEqual(qa_count_start + 1, qa_count_end)
self.assertEqual(qar_count_start + 7, qar_count_end)
def test_trackers_not_duplicated(self):
tracker_count_start = Tracker.objects.all().count()
with open(self.basic_activity_log) as activity_log_file:
json_data = json.load(activity_log_file)
response = self.api_client.patch(self.url,
format='json',
data=json_data,
authentication=self.get_credentials())
self.assertEqual(200, response.status_code)
# Now upload the same file
with open(self.basic_activity_log) as activity_log_file:
json_data = json.load(activity_log_file)
response = self.api_client.patch(self.url,
format='json',
data=json_data,
authentication=self.get_credentials())
self.assertEqual(200, response.status_code)
tracker_count_end = Tracker.objects.all().count()
self.assertEqual(tracker_count_start + 2, tracker_count_end)
def test_quizattempts_not_duplicated(self):
tracker_count_start = Tracker.objects.all().count()
qa_count_start = QuizAttempt.objects.all().count()
qar_count_start = QuizAttemptResponse.objects.all().count()
with open(self.quiz_attempt_log) as activity_log_file:
json_data = json.load(activity_log_file)
response = self.api_client.patch(self.url,
format='json',
data=json_data,
authentication=self.get_credentials())
self.assertEqual(200, response.status_code)
# Now upload the same file
with open(self.quiz_attempt_log) as activity_log_file:
json_data = json.load(activity_log_file)
response = self.api_client.patch(self.url,
format='json',
data=json_data,
authentication=self.get_credentials())
self.assertEqual(200, response.status_code)
tracker_count_end = Tracker.objects.all().count()
qa_count_end = QuizAttempt.objects.all().count()
qar_count_end = QuizAttemptResponse.objects.all().count()
self.assertEqual(tracker_count_start, tracker_count_end)
self.assertEqual(qa_count_start + 1, qa_count_end)
self.assertEqual(qar_count_start + 7, qar_count_end)
|
ayoubg/gem5-graphics | refs/heads/master | gem5/src/arch/x86/isa/insts/simd128/floating_point/arithmetic/reciprocal_estimation.py | 62 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
microcode = '''
# RCPPS
# RCPSS
'''
|
truongdq/chainer | refs/heads/master | cupy/creation/basic.py | 19 | import cupy
def empty(shape, dtype=float):
"""Returns an array without initializing the elements.
This function currently does not support ``order`` option.
Args:
shape (tuple of ints): Dimensionalities of the array.
dtype: Data type specifier.
Returns:
cupy.ndarray: A new array with elements not initialized.
.. seealso:: :func:`numpy.empty`
"""
# TODO(beam2d): Support ordering option
return cupy.ndarray(shape, dtype=dtype)
def empty_like(a, dtype=None):
"""Returns a new array with same shape and dtype of a given array.
This function currently does not support ``order`` and ``subok`` options.
Args:
a (cupy.ndarray): Base array.
dtype: Data type specifier. The data type of ``a`` is used by default.
Returns:
cupy.ndarray: A new array with same shape and dtype of ``a`` with
elements not initialized.
.. seealso:: :func:`numpy.empty_like`
"""
# TODO(beam2d): Support ordering option
if dtype is None:
dtype = a.dtype
return empty(a.shape, dtype=dtype)
def eye(N, M=None, k=0, dtype=float):
"""Returns a 2-D array with ones on the diagonals and zeros elsewhere.
Args:
N (int): Number of rows.
M (int): Number of columns. M == N by default.
k (int): Index of the diagonal. Zero indicates the main diagonal,
a positive index an upper diagonal, and a negative index a lower
diagonal.
dtype: Data type specifier.
Returns:
cupy.ndarray: A 2-D array with given diagonals filled with ones and
zeros elsewhere.
.. seealso:: :func:`numpy.eye`
"""
if M is None:
M = N
ret = zeros((N, M), dtype)
ret.diagonal(k)[:] = 1
return ret
def identity(n, dtype=float):
"""Returns a 2-D identity array.
It is equivalent to ``eye(n, n, dtype)``.
Args:
n (int): Number of rows and columns.
dtype: Data type specifier.
Returns:
cupy.ndarray: A 2-D identity array.
.. seealso:: :func:`numpy.identity`
"""
return eye(n, dtype=dtype)
def ones(shape, dtype=float):
"""Returns a new array of given shape and dtype, filled with ones.
This function currently does not support ``order`` option.
Args:
shape (tuple of ints): Dimensionalities of the array.
dtype: Data type specifier.
Returns:
cupy.ndarray: An array filled with ones.
.. seealso:: :func:`numpy.ones`
"""
# TODO(beam2d): Support ordering option
return full(shape, 1, dtype)
def ones_like(a, dtype=None):
"""Returns an array of ones with same shape and dtype as a given array.
This function currently does not support ``order`` and ``subok`` options.
Args:
a (cupy.ndarray): Base array.
dtype: Data type specifier. The dtype of ``a`` is used by default.
Returns:
cupy.ndarray: An array filled with ones.
.. seealso:: :func:`numpy.ones_like`
"""
# TODO(beam2d): Support ordering option
if dtype is None:
dtype = a.dtype
return ones(a.shape, dtype)
def zeros(shape, dtype=float):
"""Returns a new array of given shape and dtype, filled with zeros.
This function currently does not support ``order`` option.
Args:
shape (tuple of ints): Dimensionalities of the array.
dtype: Data type specifier.
Returns:
cupy.ndarray: An array filled with ones.
.. seealso:: :func:`numpy.zeros`
"""
# TODO(beam2d): Support ordering option
a = empty(shape, dtype)
a.data.memset(0, a.nbytes)
return a
def zeros_like(a, dtype=None):
"""Returns an array of zeros with same shape and dtype as a given array.
This function currently does not support ``order`` and ``subok`` options.
Args:
a (cupy.ndarray): Base array.
dtype: Data type specifier. The dtype of ``a`` is used by default.
Returns:
cupy.ndarray: An array filled with ones.
.. seealso:: :func:`numpy.zeros_like`
"""
# TODO(beam2d): Support ordering option
if dtype is None:
dtype = a.dtype
return zeros(a.shape, dtype=dtype)
def full(shape, fill_value, dtype=None):
"""Returns a new array of given shape and dtype, filled with a given value.
This function currently does not support ``order`` option.
Args:
shape (tuple of ints): Dimensionalities of the array.
fill_value: A scalar value to fill a new array.
dtype: Data type specifier.
Returns:
cupy.ndarray: An array filled with ``fill_value``.
.. seealso:: :func:`numpy.full`
"""
# TODO(beam2d): Support ordering option
a = empty(shape, dtype)
a.fill(fill_value)
return a
def full_like(a, fill_value, dtype=None):
"""Returns a full array with same shape and dtype as a given array.
This function currently does not support ``order`` and ``subok`` options.
Args:
a (cupy.ndarray): Base array.
fill_value: A scalar value to fill a new array.
dtype: Data type specifier. The dtype of ``a`` is used by default.
Returns:
cupy.ndarray: An array filled with ``fill_value``.
.. seealso:: :func:`numpy.full_like`
"""
# TODO(beam2d): Support ordering option
if dtype is None:
dtype = a.dtype
return full(a.shape, fill_value, dtype)
|
Akagi201/learning-python | refs/heads/master | pyglet/image_viewer.py | 1 | #!/usr/bin/env python
import pyglet
window = pyglet.window.Window()
image = pyglet.resource.image('kitten.jpg')
@window.event
def on_draw():
window.clear()
image.blit(0, 0)
pyglet.app.run()
|
kkragenbrink/node-gyp | refs/heads/master | gyp/pylib/gyp/generator/scons.py | 231 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import gyp
import gyp.common
import gyp.SCons as SCons
import os.path
import pprint
import re
import subprocess
# TODO: remove when we delete the last WriteList() call in this module
WriteList = SCons.WriteList
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': '${LIBPREFIX}',
'SHARED_LIB_PREFIX': '${SHLIBPREFIX}',
'STATIC_LIB_SUFFIX': '${LIBSUFFIX}',
'SHARED_LIB_SUFFIX': '${SHLIBSUFFIX}',
'INTERMEDIATE_DIR': '${INTERMEDIATE_DIR}',
'SHARED_INTERMEDIATE_DIR': '${SHARED_INTERMEDIATE_DIR}',
'OS': 'linux',
'PRODUCT_DIR': '$TOP_BUILDDIR',
'SHARED_LIB_DIR': '$LIB_DIR',
'LIB_DIR': '$LIB_DIR',
'RULE_INPUT_ROOT': '${SOURCE.filebase}',
'RULE_INPUT_DIRNAME': '${SOURCE.dir}',
'RULE_INPUT_EXT': '${SOURCE.suffix}',
'RULE_INPUT_NAME': '${SOURCE.file}',
'RULE_INPUT_PATH': '${SOURCE.abspath}',
'CONFIGURATION_NAME': '${CONFIG_NAME}',
}
# Tell GYP how to process the input for us.
generator_handles_variants = True
generator_wants_absolute_build_file_paths = True
def FixPath(path, prefix):
if not os.path.isabs(path) and not path[0] == '$':
path = prefix + path
return path
header = """\
# This file is generated; do not edit.
"""
_alias_template = """
if GetOption('verbose'):
_action = Action([%(action)s])
else:
_action = Action([%(action)s], %(message)s)
_outputs = env.Alias(
['_%(target_name)s_action'],
%(inputs)s,
_action
)
env.AlwaysBuild(_outputs)
"""
_run_as_template = """
if GetOption('verbose'):
_action = Action([%(action)s])
else:
_action = Action([%(action)s], %(message)s)
"""
_run_as_template_suffix = """
_run_as_target = env.Alias('run_%(target_name)s', target_files, _action)
env.Requires(_run_as_target, [
Alias('%(target_name)s'),
])
env.AlwaysBuild(_run_as_target)
"""
_command_template = """
if GetOption('verbose'):
_action = Action([%(action)s])
else:
_action = Action([%(action)s], %(message)s)
_outputs = env.Command(
%(outputs)s,
%(inputs)s,
_action
)
"""
# This is copied from the default SCons action, updated to handle symlinks.
_copy_action_template = """
import shutil
import SCons.Action
def _copy_files_or_dirs_or_symlinks(dest, src):
SCons.Node.FS.invalidate_node_memos(dest)
if SCons.Util.is_List(src) and os.path.isdir(dest):
for file in src:
shutil.copy2(file, dest)
return 0
elif os.path.islink(src):
linkto = os.readlink(src)
os.symlink(linkto, dest)
return 0
elif os.path.isfile(src):
return shutil.copy2(src, dest)
else:
return shutil.copytree(src, dest, 1)
def _copy_files_or_dirs_or_symlinks_str(dest, src):
return 'Copying %s to %s ...' % (src, dest)
GYPCopy = SCons.Action.ActionFactory(_copy_files_or_dirs_or_symlinks,
_copy_files_or_dirs_or_symlinks_str,
convert=str)
"""
_rule_template = """
%(name)s_additional_inputs = %(inputs)s
%(name)s_outputs = %(outputs)s
def %(name)s_emitter(target, source, env):
return (%(name)s_outputs, source + %(name)s_additional_inputs)
if GetOption('verbose'):
%(name)s_action = Action([%(action)s])
else:
%(name)s_action = Action([%(action)s], %(message)s)
env['BUILDERS']['%(name)s'] = Builder(action=%(name)s_action,
emitter=%(name)s_emitter)
_outputs = []
_processed_input_files = []
for infile in input_files:
if (type(infile) == type('')
and not os.path.isabs(infile)
and not infile[0] == '$'):
infile = %(src_dir)r + infile
if str(infile).endswith('.%(extension)s'):
_generated = env.%(name)s(infile)
env.Precious(_generated)
_outputs.append(_generated)
%(process_outputs_as_sources_line)s
else:
_processed_input_files.append(infile)
prerequisites.extend(_outputs)
input_files = _processed_input_files
"""
_spawn_hack = """
import re
import SCons.Platform.posix
needs_shell = re.compile('["\\'><!^&]')
def gyp_spawn(sh, escape, cmd, args, env):
def strip_scons_quotes(arg):
if arg[0] == '"' and arg[-1] == '"':
return arg[1:-1]
return arg
stripped_args = [strip_scons_quotes(a) for a in args]
if needs_shell.search(' '.join(stripped_args)):
return SCons.Platform.posix.exec_spawnvpe([sh, '-c', ' '.join(args)], env)
else:
return SCons.Platform.posix.exec_spawnvpe(stripped_args, env)
"""
def EscapeShellArgument(s):
"""Quotes an argument so that it will be interpreted literally by a POSIX
shell. Taken from
http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python
"""
return "'" + s.replace("'", "'\\''") + "'"
def InvertNaiveSConsQuoting(s):
"""SCons tries to "help" with quoting by naively putting double-quotes around
command-line arguments containing space or tab, which is broken for all
but trivial cases, so we undo it. (See quote_spaces() in Subst.py)"""
if ' ' in s or '\t' in s:
# Then SCons will put double-quotes around this, so add our own quotes
# to close its quotes at the beginning and end.
s = '"' + s + '"'
return s
def EscapeSConsVariableExpansion(s):
"""SCons has its own variable expansion syntax using $. We must escape it for
strings to be interpreted literally. For some reason this requires four
dollar signs, not two, even without the shell involved."""
return s.replace('$', '$$$$')
def EscapeCppDefine(s):
"""Escapes a CPP define so that it will reach the compiler unaltered."""
s = EscapeShellArgument(s)
s = InvertNaiveSConsQuoting(s)
s = EscapeSConsVariableExpansion(s)
return s
def GenerateConfig(fp, config, indent='', src_dir=''):
"""
Generates SCons dictionary items for a gyp configuration.
This provides the main translation between the (lower-case) gyp settings
keywords and the (upper-case) SCons construction variables.
"""
var_mapping = {
'ASFLAGS' : 'asflags',
'CCFLAGS' : 'cflags',
'CFLAGS' : 'cflags_c',
'CXXFLAGS' : 'cflags_cc',
'CPPDEFINES' : 'defines',
'CPPPATH' : 'include_dirs',
# Add the ldflags value to $LINKFLAGS, but not $SHLINKFLAGS.
# SCons defines $SHLINKFLAGS to incorporate $LINKFLAGS, so
# listing both here would case 'ldflags' to get appended to
# both, and then have it show up twice on the command line.
'LINKFLAGS' : 'ldflags',
}
postamble='\n%s],\n' % indent
for scons_var in sorted(var_mapping.keys()):
gyp_var = var_mapping[scons_var]
value = config.get(gyp_var)
if value:
if gyp_var in ('defines',):
value = [EscapeCppDefine(v) for v in value]
if gyp_var in ('include_dirs',):
if src_dir and not src_dir.endswith('/'):
src_dir += '/'
result = []
for v in value:
v = FixPath(v, src_dir)
# Force SCons to evaluate the CPPPATH directories at
# SConscript-read time, so delayed evaluation of $SRC_DIR
# doesn't point it to the --generator-output= directory.
result.append('env.Dir(%r)' % v)
value = result
else:
value = map(repr, value)
WriteList(fp,
value,
prefix=indent,
preamble='%s%s = [\n ' % (indent, scons_var),
postamble=postamble)
def GenerateSConscript(output_filename, spec, build_file, build_file_data):
"""
Generates a SConscript file for a specific target.
This generates a SConscript file suitable for building any or all of
the target's configurations.
A SConscript file may be called multiple times to generate targets for
multiple configurations. Consequently, it needs to be ready to build
the target for any requested configuration, and therefore contains
information about the settings for all configurations (generated into
the SConscript file at gyp configuration time) as well as logic for
selecting (at SCons build time) the specific configuration being built.
The general outline of a generated SConscript file is:
-- Header
-- Import 'env'. This contains a $CONFIG_NAME construction
variable that specifies what configuration to build
(e.g. Debug, Release).
-- Configurations. This is a dictionary with settings for
the different configurations (Debug, Release) under which this
target can be built. The values in the dictionary are themselves
dictionaries specifying what construction variables should added
to the local copy of the imported construction environment
(Append), should be removed (FilterOut), and should outright
replace the imported values (Replace).
-- Clone the imported construction environment and update
with the proper configuration settings.
-- Initialize the lists of the targets' input files and prerequisites.
-- Target-specific actions and rules. These come after the
input file and prerequisite initializations because the
outputs of the actions and rules may affect the input file
list (process_outputs_as_sources) and get added to the list of
prerequisites (so that they're guaranteed to be executed before
building the target).
-- Call the Builder for the target itself.
-- Arrange for any copies to be made into installation directories.
-- Set up the {name} Alias (phony Node) for the target as the
primary handle for building all of the target's pieces.
-- Use env.Require() to make sure the prerequisites (explicitly
specified, but also including the actions and rules) are built
before the target itself.
-- Return the {name} Alias to the calling SConstruct file
so it can be added to the list of default targets.
"""
scons_target = SCons.Target(spec)
gyp_dir = os.path.dirname(output_filename)
if not gyp_dir:
gyp_dir = '.'
gyp_dir = os.path.abspath(gyp_dir)
output_dir = os.path.dirname(output_filename)
src_dir = build_file_data['_DEPTH']
src_dir_rel = gyp.common.RelativePath(src_dir, output_dir)
subdir = gyp.common.RelativePath(os.path.dirname(build_file), src_dir)
src_subdir = '$SRC_DIR/' + subdir
src_subdir_ = src_subdir + '/'
component_name = os.path.splitext(os.path.basename(build_file))[0]
target_name = spec['target_name']
if not os.path.exists(gyp_dir):
os.makedirs(gyp_dir)
fp = open(output_filename, 'w')
fp.write(header)
fp.write('\nimport os\n')
fp.write('\nImport("env")\n')
#
fp.write('\n')
fp.write('env = env.Clone(COMPONENT_NAME=%s,\n' % repr(component_name))
fp.write(' TARGET_NAME=%s)\n' % repr(target_name))
#
for config in spec['configurations'].itervalues():
if config.get('scons_line_length'):
fp.write(_spawn_hack)
break
#
indent = ' ' * 12
fp.write('\n')
fp.write('configurations = {\n')
for config_name, config in spec['configurations'].iteritems():
fp.write(' \'%s\' : {\n' % config_name)
fp.write(' \'Append\' : dict(\n')
GenerateConfig(fp, config, indent, src_subdir)
libraries = spec.get('libraries')
if libraries:
WriteList(fp,
map(repr, libraries),
prefix=indent,
preamble='%sLIBS = [\n ' % indent,
postamble='\n%s],\n' % indent)
fp.write(' ),\n')
fp.write(' \'FilterOut\' : dict(\n' )
for key, var in config.get('scons_remove', {}).iteritems():
fp.write(' %s = %s,\n' % (key, repr(var)))
fp.write(' ),\n')
fp.write(' \'Replace\' : dict(\n' )
scons_settings = config.get('scons_variable_settings', {})
for key in sorted(scons_settings.keys()):
val = pprint.pformat(scons_settings[key])
fp.write(' %s = %s,\n' % (key, val))
if 'c++' in spec.get('link_languages', []):
fp.write(' %s = %s,\n' % ('LINK', repr('$CXX')))
if config.get('scons_line_length'):
fp.write(' SPAWN = gyp_spawn,\n')
fp.write(' ),\n')
fp.write(' \'ImportExternal\' : [\n' )
for var in config.get('scons_import_variables', []):
fp.write(' %s,\n' % repr(var))
fp.write(' ],\n')
fp.write(' \'PropagateExternal\' : [\n' )
for var in config.get('scons_propagate_variables', []):
fp.write(' %s,\n' % repr(var))
fp.write(' ],\n')
fp.write(' },\n')
fp.write('}\n')
fp.write('\n'
'config = configurations[env[\'CONFIG_NAME\']]\n'
'env.Append(**config[\'Append\'])\n'
'env.FilterOut(**config[\'FilterOut\'])\n'
'env.Replace(**config[\'Replace\'])\n')
fp.write('\n'
'# Scons forces -fPIC for SHCCFLAGS on some platforms.\n'
'# Disable that so we can control it from cflags in gyp.\n'
'# Note that Scons itself is inconsistent with its -fPIC\n'
'# setting. SHCCFLAGS forces -fPIC, and SHCFLAGS does not.\n'
'# This will make SHCCFLAGS consistent with SHCFLAGS.\n'
'env[\'SHCCFLAGS\'] = [\'$CCFLAGS\']\n')
fp.write('\n'
'for _var in config[\'ImportExternal\']:\n'
' if _var in ARGUMENTS:\n'
' env[_var] = ARGUMENTS[_var]\n'
' elif _var in os.environ:\n'
' env[_var] = os.environ[_var]\n'
'for _var in config[\'PropagateExternal\']:\n'
' if _var in ARGUMENTS:\n'
' env[_var] = ARGUMENTS[_var]\n'
' elif _var in os.environ:\n'
' env[\'ENV\'][_var] = os.environ[_var]\n')
fp.write('\n'
"env['ENV']['LD_LIBRARY_PATH'] = env.subst('$LIB_DIR')\n")
#
#fp.write("\nif env.has_key('CPPPATH'):\n")
#fp.write(" env['CPPPATH'] = map(env.Dir, env['CPPPATH'])\n")
variants = spec.get('variants', {})
for setting in sorted(variants.keys()):
if_fmt = 'if ARGUMENTS.get(%s) not in (None, \'0\'):\n'
fp.write('\n')
fp.write(if_fmt % repr(setting.upper()))
fp.write(' env.AppendUnique(\n')
GenerateConfig(fp, variants[setting], indent, src_subdir)
fp.write(' )\n')
#
scons_target.write_input_files(fp)
fp.write('\n')
fp.write('target_files = []\n')
prerequisites = spec.get('scons_prerequisites', [])
fp.write('prerequisites = %s\n' % pprint.pformat(prerequisites))
actions = spec.get('actions', [])
for action in actions:
a = ['cd', src_subdir, '&&'] + action['action']
message = action.get('message')
if message:
message = repr(message)
inputs = [FixPath(f, src_subdir_) for f in action.get('inputs', [])]
outputs = [FixPath(f, src_subdir_) for f in action.get('outputs', [])]
if outputs:
template = _command_template
else:
template = _alias_template
fp.write(template % {
'inputs' : pprint.pformat(inputs),
'outputs' : pprint.pformat(outputs),
'action' : pprint.pformat(a),
'message' : message,
'target_name': target_name,
})
if int(action.get('process_outputs_as_sources', 0)):
fp.write('input_files.extend(_outputs)\n')
fp.write('prerequisites.extend(_outputs)\n')
fp.write('target_files.extend(_outputs)\n')
rules = spec.get('rules', [])
for rule in rules:
name = re.sub('[^a-zA-Z0-9_]', '_', rule['rule_name'])
message = rule.get('message')
if message:
message = repr(message)
if int(rule.get('process_outputs_as_sources', 0)):
poas_line = '_processed_input_files.extend(_generated)'
else:
poas_line = '_processed_input_files.append(infile)'
inputs = [FixPath(f, src_subdir_) for f in rule.get('inputs', [])]
outputs = [FixPath(f, src_subdir_) for f in rule.get('outputs', [])]
# Skip a rule with no action and no inputs.
if 'action' not in rule and not rule.get('rule_sources', []):
continue
a = ['cd', src_subdir, '&&'] + rule['action']
fp.write(_rule_template % {
'inputs' : pprint.pformat(inputs),
'outputs' : pprint.pformat(outputs),
'action' : pprint.pformat(a),
'extension' : rule['extension'],
'name' : name,
'message' : message,
'process_outputs_as_sources_line' : poas_line,
'src_dir' : src_subdir_,
})
scons_target.write_target(fp, src_subdir)
copies = spec.get('copies', [])
if copies:
fp.write(_copy_action_template)
for copy in copies:
destdir = None
files = None
try:
destdir = copy['destination']
except KeyError, e:
gyp.common.ExceptionAppend(
e,
"Required 'destination' key missing for 'copies' in %s." % build_file)
raise
try:
files = copy['files']
except KeyError, e:
gyp.common.ExceptionAppend(
e, "Required 'files' key missing for 'copies' in %s." % build_file)
raise
if not files:
# TODO: should probably add a (suppressible) warning;
# a null file list may be unintentional.
continue
if not destdir:
raise Exception(
"Required 'destination' key is empty for 'copies' in %s." % build_file)
fmt = ('\n'
'_outputs = env.Command(%s,\n'
' %s,\n'
' GYPCopy(\'$TARGET\', \'$SOURCE\'))\n')
for f in copy['files']:
# Remove trailing separators so basename() acts like Unix basename and
# always returns the last element, whether a file or dir. Without this,
# only the contents, not the directory itself, are copied (and nothing
# might be copied if dest already exists, since scons thinks nothing needs
# to be done).
dest = os.path.join(destdir, os.path.basename(f.rstrip(os.sep)))
f = FixPath(f, src_subdir_)
dest = FixPath(dest, src_subdir_)
fp.write(fmt % (repr(dest), repr(f)))
fp.write('target_files.extend(_outputs)\n')
run_as = spec.get('run_as')
if run_as:
action = run_as.get('action', [])
working_directory = run_as.get('working_directory')
if not working_directory:
working_directory = gyp_dir
else:
if not os.path.isabs(working_directory):
working_directory = os.path.normpath(os.path.join(gyp_dir,
working_directory))
if run_as.get('environment'):
for (key, val) in run_as.get('environment').iteritems():
action = ['%s="%s"' % (key, val)] + action
action = ['cd', '"%s"' % working_directory, '&&'] + action
fp.write(_run_as_template % {
'action' : pprint.pformat(action),
'message' : run_as.get('message', ''),
})
fmt = "\ngyp_target = env.Alias('%s', target_files)\n"
fp.write(fmt % target_name)
dependencies = spec.get('scons_dependencies', [])
if dependencies:
WriteList(fp, dependencies, preamble='dependencies = [\n ',
postamble='\n]\n')
fp.write('env.Requires(target_files, dependencies)\n')
fp.write('env.Requires(gyp_target, dependencies)\n')
fp.write('for prerequisite in prerequisites:\n')
fp.write(' env.Requires(prerequisite, dependencies)\n')
fp.write('env.Requires(gyp_target, prerequisites)\n')
if run_as:
fp.write(_run_as_template_suffix % {
'target_name': target_name,
})
fp.write('Return("gyp_target")\n')
fp.close()
#############################################################################
# TEMPLATE BEGIN
_wrapper_template = """\
__doc__ = '''
Wrapper configuration for building this entire "solution,"
including all the specific targets in various *.scons files.
'''
import os
import sys
import SCons.Environment
import SCons.Util
def GetProcessorCount():
'''
Detects the number of CPUs on the system. Adapted form:
http://codeliberates.blogspot.com/2008/05/detecting-cpuscores-in-python.html
'''
# Linux, Unix and Mac OS X:
if hasattr(os, 'sysconf'):
if os.sysconf_names.has_key('SC_NPROCESSORS_ONLN'):
# Linux and Unix or Mac OS X with python >= 2.5:
return os.sysconf('SC_NPROCESSORS_ONLN')
else: # Mac OS X with Python < 2.5:
return int(os.popen2("sysctl -n hw.ncpu")[1].read())
# Windows:
if os.environ.has_key('NUMBER_OF_PROCESSORS'):
return max(int(os.environ.get('NUMBER_OF_PROCESSORS', '1')), 1)
return 1 # Default
# Support PROGRESS= to show progress in different ways.
p = ARGUMENTS.get('PROGRESS')
if p == 'spinner':
Progress(['/\\r', '|\\r', '\\\\\\r', '-\\r'],
interval=5,
file=open('/dev/tty', 'w'))
elif p == 'name':
Progress('$TARGET\\r', overwrite=True, file=open('/dev/tty', 'w'))
# Set the default -j value based on the number of processors.
SetOption('num_jobs', GetProcessorCount() + 1)
# Have SCons use its cached dependency information.
SetOption('implicit_cache', 1)
# Only re-calculate MD5 checksums if a timestamp has changed.
Decider('MD5-timestamp')
# Since we set the -j value by default, suppress SCons warnings about being
# unable to support parallel build on versions of Python with no threading.
default_warnings = ['no-no-parallel-support']
SetOption('warn', default_warnings + GetOption('warn'))
AddOption('--mode', nargs=1, dest='conf_list', default=[],
action='append', help='Configuration to build.')
AddOption('--verbose', dest='verbose', default=False,
action='store_true', help='Verbose command-line output.')
#
sconscript_file_map = %(sconscript_files)s
class LoadTarget:
'''
Class for deciding if a given target sconscript is to be included
based on a list of included target names, optionally prefixed with '-'
to exclude a target name.
'''
def __init__(self, load):
'''
Initialize a class with a list of names for possible loading.
Arguments:
load: list of elements in the LOAD= specification
'''
self.included = set([c for c in load if not c.startswith('-')])
self.excluded = set([c[1:] for c in load if c.startswith('-')])
if not self.included:
self.included = set(['all'])
def __call__(self, target):
'''
Returns True if the specified target's sconscript file should be
loaded, based on the initialized included and excluded lists.
'''
return (target in self.included or
('all' in self.included and not target in self.excluded))
if 'LOAD' in ARGUMENTS:
load = ARGUMENTS['LOAD'].split(',')
else:
load = []
load_target = LoadTarget(load)
sconscript_files = []
for target, sconscript in sconscript_file_map.iteritems():
if load_target(target):
sconscript_files.append(sconscript)
target_alias_list= []
conf_list = GetOption('conf_list')
if conf_list:
# In case the same --mode= value was specified multiple times.
conf_list = list(set(conf_list))
else:
conf_list = [%(default_configuration)r]
sconsbuild_dir = Dir(%(sconsbuild_dir)s)
def FilterOut(self, **kw):
kw = SCons.Environment.copy_non_reserved_keywords(kw)
for key, val in kw.items():
envval = self.get(key, None)
if envval is None:
# No existing variable in the environment, so nothing to delete.
continue
for vremove in val:
# Use while not if, so we can handle duplicates.
while vremove in envval:
envval.remove(vremove)
self[key] = envval
# TODO(sgk): SCons.Environment.Append() has much more logic to deal
# with various types of values. We should handle all those cases in here
# too. (If variable is a dict, etc.)
non_compilable_suffixes = {
'LINUX' : set([
'.bdic',
'.css',
'.dat',
'.fragment',
'.gperf',
'.h',
'.hh',
'.hpp',
'.html',
'.hxx',
'.idl',
'.in',
'.in0',
'.in1',
'.js',
'.mk',
'.rc',
'.sigs',
'',
]),
'WINDOWS' : set([
'.h',
'.hh',
'.hpp',
'.dat',
'.idl',
'.in',
'.in0',
'.in1',
]),
}
def compilable(env, file):
base, ext = os.path.splitext(str(file))
if ext in non_compilable_suffixes[env['TARGET_PLATFORM']]:
return False
return True
def compilable_files(env, sources):
return [x for x in sources if compilable(env, x)]
def GypProgram(env, target, source, *args, **kw):
source = compilable_files(env, source)
result = env.Program(target, source, *args, **kw)
if env.get('INCREMENTAL'):
env.Precious(result)
return result
def GypTestProgram(env, target, source, *args, **kw):
source = compilable_files(env, source)
result = env.Program(target, source, *args, **kw)
if env.get('INCREMENTAL'):
env.Precious(*result)
return result
def GypLibrary(env, target, source, *args, **kw):
source = compilable_files(env, source)
result = env.Library(target, source, *args, **kw)
return result
def GypLoadableModule(env, target, source, *args, **kw):
source = compilable_files(env, source)
result = env.LoadableModule(target, source, *args, **kw)
return result
def GypStaticLibrary(env, target, source, *args, **kw):
source = compilable_files(env, source)
result = env.StaticLibrary(target, source, *args, **kw)
return result
def GypSharedLibrary(env, target, source, *args, **kw):
source = compilable_files(env, source)
result = env.SharedLibrary(target, source, *args, **kw)
if env.get('INCREMENTAL'):
env.Precious(result)
return result
def add_gyp_methods(env):
env.AddMethod(GypProgram)
env.AddMethod(GypTestProgram)
env.AddMethod(GypLibrary)
env.AddMethod(GypLoadableModule)
env.AddMethod(GypStaticLibrary)
env.AddMethod(GypSharedLibrary)
env.AddMethod(FilterOut)
env.AddMethod(compilable)
base_env = Environment(
tools = %(scons_tools)s,
INTERMEDIATE_DIR='$OBJ_DIR/${COMPONENT_NAME}/_${TARGET_NAME}_intermediate',
LIB_DIR='$TOP_BUILDDIR/lib',
OBJ_DIR='$TOP_BUILDDIR/obj',
SCONSBUILD_DIR=sconsbuild_dir.abspath,
SHARED_INTERMEDIATE_DIR='$OBJ_DIR/_global_intermediate',
SRC_DIR=Dir(%(src_dir)r),
TARGET_PLATFORM='LINUX',
TOP_BUILDDIR='$SCONSBUILD_DIR/$CONFIG_NAME',
LIBPATH=['$LIB_DIR'],
)
if not GetOption('verbose'):
base_env.SetDefault(
ARCOMSTR='Creating library $TARGET',
ASCOMSTR='Assembling $TARGET',
CCCOMSTR='Compiling $TARGET',
CONCATSOURCECOMSTR='ConcatSource $TARGET',
CXXCOMSTR='Compiling $TARGET',
LDMODULECOMSTR='Building loadable module $TARGET',
LINKCOMSTR='Linking $TARGET',
MANIFESTCOMSTR='Updating manifest for $TARGET',
MIDLCOMSTR='Compiling IDL $TARGET',
PCHCOMSTR='Precompiling $TARGET',
RANLIBCOMSTR='Indexing $TARGET',
RCCOMSTR='Compiling resource $TARGET',
SHCCCOMSTR='Compiling $TARGET',
SHCXXCOMSTR='Compiling $TARGET',
SHLINKCOMSTR='Linking $TARGET',
SHMANIFESTCOMSTR='Updating manifest for $TARGET',
)
add_gyp_methods(base_env)
for conf in conf_list:
env = base_env.Clone(CONFIG_NAME=conf)
SConsignFile(env.File('$TOP_BUILDDIR/.sconsign').abspath)
for sconscript in sconscript_files:
target_alias = env.SConscript(sconscript, exports=['env'])
if target_alias:
target_alias_list.extend(target_alias)
Default(Alias('all', target_alias_list))
help_fmt = '''
Usage: hammer [SCONS_OPTIONS] [VARIABLES] [TARGET] ...
Local command-line build options:
--mode=CONFIG Configuration to build:
--mode=Debug [default]
--mode=Release
--verbose Print actual executed command lines.
Supported command-line build variables:
LOAD=[module,...] Comma-separated list of components to load in the
dependency graph ('-' prefix excludes)
PROGRESS=type Display a progress indicator:
name: print each evaluated target name
spinner: print a spinner every 5 targets
The following TARGET names can also be used as LOAD= module names:
%%s
'''
if GetOption('help'):
def columnar_text(items, width=78, indent=2, sep=2):
result = []
colwidth = max(map(len, items)) + sep
cols = (width - indent) / colwidth
if cols < 1:
cols = 1
rows = (len(items) + cols - 1) / cols
indent = '%%*s' %% (indent, '')
sep = indent
for row in xrange(0, rows):
result.append(sep)
for i in xrange(row, len(items), rows):
result.append('%%-*s' %% (colwidth, items[i]))
sep = '\\n' + indent
result.append('\\n')
return ''.join(result)
load_list = set(sconscript_file_map.keys())
target_aliases = set(map(str, target_alias_list))
common = load_list and target_aliases
load_only = load_list - common
target_only = target_aliases - common
help_text = [help_fmt %% columnar_text(sorted(list(common)))]
if target_only:
fmt = "The following are additional TARGET names:\\n\\n%%s\\n"
help_text.append(fmt %% columnar_text(sorted(list(target_only))))
if load_only:
fmt = "The following are additional LOAD= module names:\\n\\n%%s\\n"
help_text.append(fmt %% columnar_text(sorted(list(load_only))))
Help(''.join(help_text))
"""
# TEMPLATE END
#############################################################################
def GenerateSConscriptWrapper(build_file, build_file_data, name,
output_filename, sconscript_files,
default_configuration):
"""
Generates the "wrapper" SConscript file (analogous to the Visual Studio
solution) that calls all the individual target SConscript files.
"""
output_dir = os.path.dirname(output_filename)
src_dir = build_file_data['_DEPTH']
src_dir_rel = gyp.common.RelativePath(src_dir, output_dir)
if not src_dir_rel:
src_dir_rel = '.'
scons_settings = build_file_data.get('scons_settings', {})
sconsbuild_dir = scons_settings.get('sconsbuild_dir', '#')
scons_tools = scons_settings.get('tools', ['default'])
sconscript_file_lines = ['dict(']
for target in sorted(sconscript_files.keys()):
sconscript = sconscript_files[target]
sconscript_file_lines.append(' %s = %r,' % (target, sconscript))
sconscript_file_lines.append(')')
fp = open(output_filename, 'w')
fp.write(header)
fp.write(_wrapper_template % {
'default_configuration' : default_configuration,
'name' : name,
'scons_tools' : repr(scons_tools),
'sconsbuild_dir' : repr(sconsbuild_dir),
'sconscript_files' : '\n'.join(sconscript_file_lines),
'src_dir' : src_dir_rel,
})
fp.close()
# Generate the SConstruct file that invokes the wrapper SConscript.
dir, fname = os.path.split(output_filename)
SConstruct = os.path.join(dir, 'SConstruct')
fp = open(SConstruct, 'w')
fp.write(header)
fp.write('SConscript(%s)\n' % repr(fname))
fp.close()
def TargetFilename(target, build_file=None, output_suffix=''):
"""Returns the .scons file name for the specified target.
"""
if build_file is None:
build_file, target = gyp.common.ParseQualifiedTarget(target)[:2]
output_file = os.path.join(os.path.dirname(build_file),
target + output_suffix + '.scons')
return output_file
def PerformBuild(data, configurations, params):
options = params['options']
# Due to the way we test gyp on the chromium typbots
# we need to look for 'scons.py' as well as the more common 'scons'
# TODO(sbc): update the trybots to have a more normal install
# of scons.
scons = 'scons'
paths = os.environ['PATH'].split(os.pathsep)
for scons_name in ['scons', 'scons.py']:
for path in paths:
test_scons = os.path.join(path, scons_name)
print 'looking for: %s' % test_scons
if os.path.exists(test_scons):
print "found scons: %s" % scons
scons = test_scons
break
for config in configurations:
arguments = [scons, '-C', options.toplevel_dir, '--mode=%s' % config]
print "Building [%s]: %s" % (config, arguments)
subprocess.check_call(arguments)
def GenerateOutput(target_list, target_dicts, data, params):
"""
Generates all the output files for the specified targets.
"""
options = params['options']
if options.generator_output:
def output_path(filename):
return filename.replace(params['cwd'], options.generator_output)
else:
def output_path(filename):
return filename
default_configuration = None
for qualified_target in target_list:
spec = target_dicts[qualified_target]
if spec['toolset'] != 'target':
raise Exception(
'Multiple toolsets not supported in scons build (target %s)' %
qualified_target)
scons_target = SCons.Target(spec)
if scons_target.is_ignored:
continue
# TODO: assumes the default_configuration of the first target
# non-Default target is the correct default for all targets.
# Need a better model for handle variation between targets.
if (not default_configuration and
spec['default_configuration'] != 'Default'):
default_configuration = spec['default_configuration']
build_file, target = gyp.common.ParseQualifiedTarget(qualified_target)[:2]
output_file = TargetFilename(target, build_file, options.suffix)
if options.generator_output:
output_file = output_path(output_file)
if not spec.has_key('libraries'):
spec['libraries'] = []
# Add dependent static library targets to the 'libraries' value.
deps = spec.get('dependencies', [])
spec['scons_dependencies'] = []
for d in deps:
td = target_dicts[d]
target_name = td['target_name']
spec['scons_dependencies'].append("Alias('%s')" % target_name)
if td['type'] in ('static_library', 'shared_library'):
libname = td.get('product_name', target_name)
spec['libraries'].append('lib' + libname)
if td['type'] == 'loadable_module':
prereqs = spec.get('scons_prerequisites', [])
# TODO: parameterize with <(SHARED_LIBRARY_*) variables?
td_target = SCons.Target(td)
td_target.target_prefix = '${SHLIBPREFIX}'
td_target.target_suffix = '${SHLIBSUFFIX}'
GenerateSConscript(output_file, spec, build_file, data[build_file])
if not default_configuration:
default_configuration = 'Default'
for build_file in sorted(data.keys()):
path, ext = os.path.splitext(build_file)
if ext != '.gyp':
continue
output_dir, basename = os.path.split(path)
output_filename = path + '_main' + options.suffix + '.scons'
all_targets = gyp.common.AllTargets(target_list, target_dicts, build_file)
sconscript_files = {}
for t in all_targets:
scons_target = SCons.Target(target_dicts[t])
if scons_target.is_ignored:
continue
bf, target = gyp.common.ParseQualifiedTarget(t)[:2]
target_filename = TargetFilename(target, bf, options.suffix)
tpath = gyp.common.RelativePath(target_filename, output_dir)
sconscript_files[target] = tpath
output_filename = output_path(output_filename)
if sconscript_files:
GenerateSConscriptWrapper(build_file, data[build_file], basename,
output_filename, sconscript_files,
default_configuration)
|
snbway/flask-rest-framework | refs/heads/master | rest_framework_flask/api_setting.py | 1 | # encoding:utf-8
# Version
DEFAULT_VERSION = '1.0'
ALLOWED_VERSION = ['1.0', '1.1']
VERSION_PARAM = 'v'
PAGE_SIZE = 10
|
Itxaka/st2 | refs/heads/master | st2common/st2common/util/loader.py | 5 | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import importlib
import inspect
import json
import os
import sys
import yaml
from st2common.exceptions.plugins import IncompatiblePluginException
from st2common import log as logging
__all__ = [
'register_plugin',
'register_plugin_class'
]
LOG = logging.getLogger(__name__)
PYTHON_EXTENSIONS = ('.py')
def _register_plugin_path(plugin_dir_abs_path):
if not os.path.isdir(plugin_dir_abs_path):
raise Exception('Directory containing plugins must be provided.')
for x in sys.path:
if plugin_dir_abs_path in (x, x + os.sep):
return
sys.path.append(plugin_dir_abs_path)
def _get_plugin_module(plugin_file_path):
plugin_module = os.path.basename(plugin_file_path)
if plugin_module.endswith(PYTHON_EXTENSIONS):
plugin_module = plugin_module[:plugin_module.rfind('.py')]
else:
plugin_module = None
return plugin_module
def _get_classes_in_module(module):
return [kls for name, kls in inspect.getmembers(module,
lambda member: inspect.isclass(member) and member.__module__ == module.__name__)]
def _get_plugin_classes(module_name):
return _get_classes_in_module(module_name)
def _get_plugin_methods(plugin_klass):
"""
Return a list of names of all the methods in the provided class.
Note: Abstract methods which are not implemented are excluded from the
list.
:rtype: ``list`` of ``str``
"""
methods = inspect.getmembers(plugin_klass, inspect.ismethod)
# Exclude inherited abstract methods from the parent class
method_names = []
for name, method in methods:
method_properties = method.__dict__
is_abstract = method_properties.get('__isabstractmethod__', False)
if is_abstract:
continue
method_names.append(name)
return method_names
def _validate_methods(plugin_base_class, plugin_klass):
'''
XXX: This is hacky but we'd like to validate the methods
in plugin_impl at least has all the *abstract* methods in
plugin_base_class.
'''
expected_methods = plugin_base_class.__abstractmethods__
plugin_methods = _get_plugin_methods(plugin_klass)
for method in expected_methods:
if method not in plugin_methods:
message = 'Class "%s" doesn\'t implement required "%s" method from the base class'
raise IncompatiblePluginException(message % (plugin_klass.__name__, method))
def _register_plugin(plugin_base_class, plugin_impl):
_validate_methods(plugin_base_class, plugin_impl)
plugin_base_class.register(plugin_impl)
def register_plugin_class(base_class, file_path, class_name):
"""
Retrieve a register plugin class from the provided file.
This method also validate that the class implements all the abstract methods
from the base plugin class.
:param base_class: Base plugin class.
:param base_class: ``class``
:param file_path: File absolute path to the plugin module file.
:type file_path: ``str``
:param class_name: Class name of a plugin.
:type class_name: ``str``
"""
plugin_dir = os.path.dirname(os.path.realpath(file_path))
_register_plugin_path(plugin_dir)
module_name = _get_plugin_module(file_path)
if module_name is None:
return None
module = importlib.import_module(module_name)
klass = getattr(module, class_name, None)
if not klass:
raise Exception('Plugin file "%s" doesn\'t expose class named "%s"' %
(file_path, class_name))
_register_plugin(base_class, klass)
return klass
def register_plugin(plugin_base_class, plugin_abs_file_path):
registered_plugins = []
plugin_dir = os.path.dirname(os.path.realpath(plugin_abs_file_path))
_register_plugin_path(plugin_dir)
module_name = _get_plugin_module(plugin_abs_file_path)
if module_name is None:
return None
module = importlib.import_module(module_name)
klasses = _get_plugin_classes(module)
# Try registering classes in plugin file. Some may fail.
for klass in klasses:
try:
_register_plugin(plugin_base_class, klass)
registered_plugins.append(klass)
except Exception as e:
LOG.exception(e)
LOG.debug('Skipping class %s as it doesn\'t match specs.', klass)
continue
if len(registered_plugins) == 0:
raise Exception('Found no classes in plugin file "%s" matching requirements.' %
(plugin_abs_file_path))
return registered_plugins
ALLOWED_EXTS = ['.json', '.yaml', '.yml']
PARSER_FUNCS = {'.json': json.load, '.yml': yaml.safe_load, '.yaml': yaml.safe_load}
def load_meta_file(file_path):
if not os.path.isfile(file_path):
raise Exception('File "%s" does not exist.' % file_path)
file_name, file_ext = os.path.splitext(file_path)
if file_ext not in ALLOWED_EXTS:
raise Exception('Unsupported meta type %s, file %s. Allowed: %s' %
(file_ext, file_path, ALLOWED_EXTS))
with open(file_path, 'r') as f:
return PARSER_FUNCS[file_ext](f)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.