code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class Dependency(Model):
"""Deployment dependency information.
:param depends_on: The list of dependencies.
:type depends_on: list of :class:`BasicDependency
<azure.mgmt.resource.resources.v2017_05_10.models.BasicDependency>`
:param id: The ID of the dependency.
:type id: str
:param resource_type: The dependency resource type.
:type resource_type: str
:param resource_name: The dependency resource name.
:type resource_name: str
"""
_attribute_map = {
'depends_on': {'key': 'dependsOn', 'type': '[BasicDependency]'},
'id': {'key': 'id', 'type': 'str'},
'resource_type': {'key': 'resourceType', 'type': 'str'},
'resource_name': {'key': 'resourceName', 'type': 'str'},
}
def __init__(self, depends_on=None, id=None, resource_type=None, resource_name=None):
self.depends_on = depends_on
self.id = id
self.resource_type = resource_type
self.resource_name = resource_name
| SUSE/azure-sdk-for-python | azure-mgmt-resource/azure/mgmt/resource/resources/v2017_05_10/models/dependency.py | Python | mit | 1,508 |
from clarifai_basic import ClarifaiCustomModel
import os
import urllib2, socket
# instantiate clarifai client
clarifai = ClarifaiCustomModel()
p=os.getcwd()
p=p.replace('\\','/')
#XXXXXXXXXXXXXXXXXXX CAR XXXXXXXXXXXXXXXXXXXXXXXXXXXXX
POSITIVES = []
pos=p+"/images/cars.txt"
with open(pos) as f:
POSITIVES = [x.strip('\n') for x in f.readlines()]
NEGATIVES = []
neg=p+"/images/bikes.txt"
with open(neg) as f:
NEGATIVES.extend([x.strip('\n') for x in f.readlines()])
neg=p+"/images/buses.txt"
with open(neg) as f:
NEGATIVES.extend([x.strip('\n') for x in f.readlines()])
neg=p+"/images/motorbikes.txt"
with open(neg) as f:
NEGATIVES.extend([x.strip('\n') for x in f.readlines()])
for i in POSITIVES:
try:
urllib2.urlopen(i)
except urllib2.HTTPError, e:
POSITIVES.remove(i)
except urllib2.URLError, e:
POSITIVES.remove(i)
for i in NEGATIVES:
try:
urllib2.urlopen(i)
except urllib2.HTTPError, e:
NEGATIVES.remove(i)
except urllib2.URLError, e:
NEGATIVES.remove(i)
concept_name = 'car'
for positive_example in POSITIVES:
try:
clarifai.positive(positive_example, concept_name)
except socket.gaierror:
print 'ignoring failed address lookup for: ', positive_example
for negative_example in NEGATIVES:
try:
clarifai.negative(negative_example, concept_name)
except socket.gaierror:
print 'ignoring failed address lookup for: ', negative_example
clarifai.train(concept_name)
concept_name = 'automobile'
for positive_example in POSITIVES:
clarifai.positive(positive_example, concept_name)
clarifai.train(concept_name)
concept_name = 'auto'
for positive_example in POSITIVES:
clarifai.positive(positive_example, concept_name)
clarifai.train(concept_name)
concept_name = 'vehicle'
for positive_example in POSITIVES:
clarifai.positive(positive_example, concept_name)
clarifai.train(concept_name)
#XXXXXXXXXXXXXXXXXXX BIKE XXXXXXXXXXXXXXXXXXXXXXXXXXXXX
POSITIVES = []
pos=p+"/images/bikes.txt"
with open(pos) as f:
POSITIVES = [x.strip('\n') for x in f.readlines()]
NEGATIVES = []
neg=p+"/images/cars.txt"
with open(neg) as f:
NEGATIVES.extend([x.strip('\n') for x in f.readlines()])
neg=p+"/images/buses.txt"
with open(neg) as f:
NEGATIVES.extend([x.strip('\n') for x in f.readlines()])
neg=p+"/images/motorbikes.txt"
with open(neg) as f:
NEGATIVES.extend([x.strip('\n') for x in f.readlines()])
for i in POSITIVES:
try:
urllib2.urlopen(i)
except urllib2.HTTPError, e:
POSITIVES.remove(i)
except urllib2.URLError, e:
POSITIVES.remove(i)
for i in NEGATIVES:
try:
urllib2.urlopen(i)
except urllib2.HTTPError, e:
NEGATIVES.remove(i)
except urllib2.URLError, e:
NEGATIVES.remove(i)
concept_name = 'bike'
for positive_example in POSITIVES:
clarifai.positive(positive_example, concept_name)
for negative_example in NEGATIVES:
clarifai.negative(negative_example, concept_name)
clarifai.train(concept_name)
concept_name = 'bicycle'
for positive_example in POSITIVES:
clarifai.positive(positive_example, concept_name)
for negative_example in NEGATIVES:
clarifai.negative(negative_example, concept_name)
clarifai.train(concept_name)
concept_name = 'vehicle'
for positive_example in POSITIVES:
clarifai.positive(positive_example, concept_name)
clarifai.train(concept_name)
#XXXXXXXXXXXXXXXXXXX BUSES XXXXXXXXXXXXXXXXXXXXXXXXXXXXX
POSITIVES = []
pos=p+"/images/buses.txt"
with open(pos) as f:
POSITIVES = [x.strip('\n') for x in f.readlines()]
NEGATIVES = []
neg=p+"/images/cars.txt"
with open(neg) as f:
NEGATIVES.extend([x.strip('\n') for x in f.readlines()])
neg=p+"/images/bikes.txt"
with open(neg) as f:
NEGATIVES.extend([x.strip('\n') for x in f.readlines()])
neg=p+"/images/motorbikes.txt"
with open(neg) as f:
NEGATIVES.extend([x.strip('\n') for x in f.readlines()])
for i in POSITIVES:
try:
urllib2.urlopen(i)
except urllib2.HTTPError, e:
POSITIVES.remove(i)
except urllib2.URLError, e:
POSITIVES.remove(i)
for i in NEGATIVES:
try:
urllib2.urlopen(i)
except urllib2.HTTPError, e:
NEGATIVES.remove(i)
except urllib2.URLError, e:
NEGATIVES.remove(i)
concept_name = 'bus'
for positive_example in POSITIVES:
clarifai.positive(positive_example, concept_name)
for negative_example in NEGATIVES:
clarifai.negative(negative_example, concept_name)
clarifai.train(concept_name)
concept_name = 'vehicle'
for positive_example in POSITIVES:
clarifai.positive(positive_example, concept_name)
clarifai.train(concept_name)
#XXXXXXXXXXXXXXXXXXX MOTORBIKE XXXXXXXXXXXXXXXXXXXXXXXXXXXXX
POSITIVES = []
pos=p+"/images/motorbikes.txt"
with open(pos) as f:
POSITIVES = [x.strip('\n') for x in f.readlines()]
NEGATIVES = []
neg=p+"/images/bikes.txt"
with open(neg) as f:
NEGATIVES.extend([x.strip('\n') for x in f.readlines()])
neg=p+"/images/buses.txt"
with open(neg) as f:
NEGATIVES.extend([x.strip('\n') for x in f.readlines()])
neg=p+"/images/cars.txt"
with open(neg) as f:
NEGATIVES.extend([x.strip('\n') for x in f.readlines()])
for i in POSITIVES:
try:
urllib2.urlopen(i)
except urllib2.HTTPError, e:
POSITIVES.remove(i)
except urllib2.URLError, e:
POSITIVES.remove(i)
for i in NEGATIVES:
try:
urllib2.urlopen(i)
except urllib2.HTTPError, e:
NEGATIVES.remove(i)
except urllib2.URLError, e:
NEGATIVES.remove(i)
concept_name = 'motorbike'
for positive_example in POSITIVES:
clarifai.positive(positive_example, concept_name)
for negative_example in NEGATIVES:
clarifai.negative(negative_example, concept_name)
clarifai.train(concept_name)
concept_name = 'motorcycle'
for positive_example in POSITIVES:
clarifai.positive(positive_example, concept_name)
for negative_example in NEGATIVES:
clarifai.negative(negative_example, concept_name)
clarifai.train(concept_name)
concept_name = 'vehicle'
for positive_example in POSITIVES:
clarifai.positive(positive_example, concept_name)
clarifai.train(concept_name)
#XXXXXXXXXXXXXXXXXXX PEOPLE XXXXXXXXXXXXXXXXXXXXXXXXXXXXX
POSITIVES = []
pos=p+"/images/people.txt"
with open(pos) as f:
POSITIVES = [x.strip('\n') for x in f.readlines()]
NEGATIVES = []
neg=p+"/images/nobody.txt"
with open(neg) as f:
NEGATIVES.extend([x.strip('\n') for x in f.readlines()])
for i in POSITIVES:
try:
urllib2.urlopen(i)
except urllib2.HTTPError, e:
POSITIVES.remove(i)
except urllib2.URLError, e:
POSITIVES.remove(i)
for i in NEGATIVES:
try:
urllib2.urlopen(i)
except urllib2.HTTPError, e:
NEGATIVES.remove(i)
except urllib2.URLError, e:
NEGATIVES.remove(i)
concept_name = 'people'
for positive_example in POSITIVES:
clarifai.positive(positive_example, concept_name)
for negative_example in NEGATIVES:
clarifai.negative(negative_example, concept_name)
clarifai.train(concept_name)
#XXXXXXXXXXXXXXXXXXX USAGE XXXXXXXXXXXXXXXXXXXXXXXXXXXXX
EXAMPLES = [
'https://blog-blogmediainc.netdna-ssl.com/SportsBlogcom/filewarehouse/37676/4b8c8d0728f0fb6e78b1071445770fab.jpg',
'http://www.africacradle.com/wp-content/uploads/2015/08/article-2441512-02650200000005DC-411_634x380.jpg'
]
NOT = [
'https://clarifai-test.s3.amazonaws.com/2141620332_2b741028b3.jpg',
'https://clarifai-test.s3.amazonaws.com/grateful_dead230582_15-52.jpg'
]
for test in EXAMPLES + NOT:
result = clarifai.predict(test, 'car')
ans = 'ACCEPTED' if (result['urls'][0]['score']>0.7) else 'REJECTED'
print result['status']['message'], "%0.3f" % result['urls'][0]['score'], ans, result['urls'][0]['url']
| shivansh-pro/Sights | customTrainer.py | Python | mit | 7,757 |
#Copyright (c) 2016 Vladimir Vorobev.
#
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#
#The above copyright notice and this permission notice shall be included in
#all copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#THE SOFTWARE.
import json
import datetime
import random
import struct
from enum import Enum
from actorbot.utils import logger
def random_id(id):
"""
"""
return ''.join([
datetime.datetime.now().strftime('%Y%m%d%H%M%S'),
'%s' % id[-5:],])
class Services(Enum):
KeyValue = 'keyvalue'
Messaging = 'messaging'
Bots = 'bots'
WebHooks = 'webhooks'
Users = 'users'
Groups = 'groups'
Stickers = 'stickers'
Files = 'files'
class BaseMessage(object):
"""
"""
class SetNotExistAttr(Exception): pass
def __init__(self, data):
"""
"""
if isinstance(data, dict):
self._data = data
else:
self._data = {}
def __getattr__(self, attr):
"""
"""
attr = self._data.get(attr, None)
if isinstance(attr, dict):
return BaseMessage(attr)
else:
return attr
def __setattr__(self, attr, value):
"""
"""
if attr == '_data':
self.__dict__[attr] = value
else:
item = self.__dict__.get('_data').get(attr, None)
if item is not None:
if not isinstance(item, dict):
self.__dict__['_data'][attr] = value
else:
raise self.SetNotExistAttr(attr)
def to_str(self):
"""
"""
return json.dumps(self._data).replace('\"', '"')
@property
def data(self):
""" """
return self._data
class MessageOut(BaseMessage):
def __init__(self, message_id, service, body, message_type='Request'):
data = {
'type': message_type,
'id': str(message_id),
'service': service.value,
'body': json.loads(body.to_str())
}
super().__init__(data)
class Body(BaseMessage):
def __init__(self, body_type, **kwargs):
data = {
'type': body_type
}
for key, val in kwargs.items():
if hasattr(val, 'to_str'):
data[key] = json.loads(val.to_str())
else:
data[key] = val
super().__init__(data)
class Peer(BaseMessage):
def __init__(self, peer_type, peer_id, accessHash):
data = {
'type': peer_type,
'id': str(peer_id),
'accessHash': accessHash
}
super().__init__(data)
class FileLocation(BaseMessage):
def __init__(self, fileId, accessHash):
data = {
'fileId': str(fileId),
'accessHash': accessHash
}
super().__init__(data)
class FileBytes(BaseMessage):
def __init__(self, bytestr):
data = {
'fileBytes': list(bytestr)
}
super().__init__(data)
class BotCommand(BaseMessage):
def __init__(self, slashCommand, description, locKey):
data = {
'slashCommand': slashCommand,
'description': description,
'locKey': locKey
}
super().__init__(data)
| unreg/actorbot | actorbot/api/basemessage.py | Python | mit | 4,135 |
import unittest
import logging
import pickle
import inv_erf
from elo import probability_points as prob
logging.basicConfig(level=logging.INFO)
class TestInvErf(unittest.TestCase):
def setUp(self):
with open('random_state.pickle', 'rb') as f:
state = pickle.load(f)
inv_erf.random.setstate(state)
def test_sign(self):
self.assertEqual(inv_erf.sign(2.1), 1.0)
self.assertEqual(inv_erf.sign(-4.5), -1.0)
with self.assertRaises(TypeError):
inv_erf.sign('test')
def test_inv_erf(self):
from math import erf
self.assertEqual(inv_erf.inv_erf(0.0), 0.0)
with self.assertRaises(ValueError):
inv_erf.inv_erf(1.5)
with self.assertRaises(ValueError):
inv_erf.inv_erf(-1.5)
with self.assertRaises(TypeError):
inv_erf.inv_erf('0.0')
for x in [0.1, 0.25, 0.4, 0.6, 0.75, 0.9]:
self.assertAlmostEqual(inv_erf.inv_erf(erf(x)), x, 3)
def test_get_sigma(self):
with self.assertRaises(ValueError):
inv_erf.get_sigma(13.0, 1.2)
with self.assertRaises(ValueError):
inv_erf.get_sigma(-3.0, -0.2)
for pts in [3., 7., 13., 21., 45.]:
self.assertGreater(prob(pts), 0.5)
self.assertGreater(inv_erf.get_sigma(pts, prob(pts)), 11.08)
self.assertLess(prob(-pts), 0.5)
# Sigma is always positive
self.assertGreater(inv_erf.get_sigma(-pts, prob(-pts)), 11.08)
def test_spread(self):
# Seed was fixed in `setUp`, exploit it:
self.assertEqual(inv_erf.get_spread(10.0, prob(10.0)), 16)
# Now try to aggregate using known random values
N = 10000
pt = 14.0
random_data = [inv_erf.get_spread(pt, prob(pt), random_state=42) for _ in range(N)]
self.assertEqual(sum(random_data), 140461)
self.assertEqual(sum(x == 0 for x in random_data), 6)
# Now try to aggregate using unknown random values
inv_erf.random.seed()
random_data = sum(inv_erf.get_spread(pt, prob(pt)) for _ in range(N))
self.assertGreater(random_data, (pt - 0.5) * N)
self.assertLess(random_data, (pt + 0.5) * N)
# Test using known-value for `sigma`; sigma = 0.0 is non-random
self.assertEqual(inv_erf.get_spread(8.0, prob(8.0), 0.0), 8)
with self.assertRaises(TypeError):
inv_erf.get_spread('test', 0.75)
with self.assertRaises(TypeError):
inv_erf.get_spread(3.4, 'test')
with self.assertRaises(TypeError):
inv_erf.get_spread(3.4, 0.6, 'test')
if __name__ == '__main__':
unittest.main()
| lbianch/nfl_elo | tests/test_inv_erf.py | Python | mit | 2,693 |
from sympy import (Symbol, Rational, Order, C, exp, ln, log, O, var, nan, pi,
S, Integral, sin, conjugate, expand, transpose)
from sympy.utilities.pytest import XFAIL, raises
from sympy.abc import w, x, y, z
def test_caching_bug():
#needs to be a first test, so that all caches are clean
#cache it
e = O(w)
#and test that this won't raise an exception
f = O(w**(-1/x/log(3)*log(5)), w)
def test_simple_1():
o = Rational(0)
assert Order(2*x) == Order(x)
assert Order(x)*3 == Order(x)
assert -28*Order(x) == Order(x)
assert Order(Order(x)) == Order(x)
assert Order(Order(x), y) == Order(Order(x), x, y)
assert Order(-23) == Order(1)
assert Order(exp(x)) == Order(1,x)
assert Order(exp(1/x)).expr == exp(1/x)
assert Order(x*exp(1/x)).expr == x*exp(1/x)
assert Order(x**(o/3)).expr == x**(o/3)
assert Order(x**(5*o/3)).expr == x**(5*o/3)
assert Order(x**2 + x + y, x) == O(1, x)
assert Order(x**2 + x + y, y) == O(1, y)
raises(NotImplementedError, lambda: Order(x, 2 - x))
def test_simple_2():
assert Order(2*x)*x == Order(x**2)
assert Order(2*x)/x == Order(1,x)
assert Order(2*x)*x*exp(1/x) == Order(x**2*exp(1/x))
assert (Order(2*x)*x*exp(1/x)/ln(x)**3).expr == x**2*exp(1/x)*ln(x)**-3
def test_simple_3():
assert Order(x)+x == Order(x)
assert Order(x)+2 == 2+Order(x)
assert Order(x)+x**2 == Order(x)
assert Order(x)+1/x == 1/x+Order(x)
assert Order(1/x)+1/x**2 == 1/x**2+Order(1/x)
assert Order(x)+exp(1/x) == Order(x)+exp(1/x)
def test_simple_4():
assert Order(x)**2 == Order(x**2)
assert Order(x**3)**-2 == Order(x**-6)
def test_simple_5():
assert Order(x)+Order(x**2) == Order(x)
assert Order(x)+Order(x**-2) == Order(x**-2)
assert Order(x)+Order(1/x) == Order(1/x)
def test_simple_6():
assert Order(x)-Order(x) == Order(x)
assert Order(x)+Order(1) == Order(1)
assert Order(x)+Order(x**2) == Order(x)
assert Order(1/x)+Order(1) == Order(1/x)
assert Order(x)+Order(exp(1/x)) == Order(exp(1/x))
assert Order(x**3)+Order(exp(2/x)) == Order(exp(2/x))
assert Order(x**-3)+Order(exp(2/x)) == Order(exp(2/x))
def test_simple_7():
assert 1+O(1) == O(1)
assert 2+O(1) == O(1)
assert x+O(1) == O(1)
assert 1/x+O(1) == 1/x+O(1)
def test_as_expr_variables():
assert Order(x).as_expr_variables(None) == (x, (x,))
assert Order(x).as_expr_variables((x,)) == (x, (x,))
assert Order(y).as_expr_variables((x,)) == (y, (x, y))
assert Order(y).as_expr_variables((x, y)) == (y, (x, y))
def test_contains_0():
assert Order(1,x).contains(Order(1,x))
assert Order(1,x).contains(Order(1))
assert Order(1).contains(Order(1,x))
def test_contains_1():
assert Order(x).contains(Order(x))
assert Order(x).contains(Order(x**2))
assert not Order(x**2).contains(Order(x))
assert not Order(x).contains(Order(1/x))
assert not Order(1/x).contains(Order(exp(1/x)))
assert not Order(x).contains(Order(exp(1/x)))
assert Order(1/x).contains(Order(x))
assert Order(exp(1/x)).contains(Order(x))
assert Order(exp(1/x)).contains(Order(1/x))
assert Order(exp(1/x)).contains(Order(exp(1/x)))
assert Order(exp(2/x)).contains(Order(exp(1/x)))
assert not Order(exp(1/x)).contains(Order(exp(2/x)))
def test_contains_2():
assert Order(x).contains(Order(y)) is None
assert Order(x).contains(Order(y*x))
assert Order(y*x).contains(Order(x))
assert Order(y).contains(Order(x*y))
assert Order(x).contains(Order(y**2*x))
def test_contains_3():
assert Order(x*y**2).contains(Order(x**2*y)) is None
assert Order(x**2*y).contains(Order(x*y**2)) is None
def test_add_1():
assert Order(x+x) == Order(x)
assert Order(3*x-2*x**2) == Order(x)
assert Order(1+x) == Order(1,x)
assert Order(1+1/x) == Order(1/x)
assert Order(ln(x)+1/ln(x)) == Order(ln(x))
assert Order(exp(1/x)+x) == Order(exp(1/x))
assert Order(exp(1/x)+1/x**20) == Order(exp(1/x))
def test_ln_args():
assert O(log(x)) + O(log(2*x)) == O(log(x))
assert O(log(x)) + O(log(x**3)) == O(log(x))
assert O(log(x*y)) + O(log(x)+log(y)) == O(log(x*y))
def test_multivar_0():
assert Order(x*y).expr == x*y
assert Order(x*y**2).expr == x*y**2
assert Order(x*y,x).expr == x
assert Order(x*y**2,y).expr == y**2
assert Order(x*y*z).expr == x*y*z
assert Order(x/y).expr == x/y
assert Order(x*exp(1/y)).expr == x*exp(1/y)
assert Order(exp(x)*exp(1/y)).expr == exp(1/y)
def test_multivar_0a():
assert Order(exp(1/x)*exp(1/y)).expr == exp(1/x + 1/y)
def test_multivar_1():
assert Order(x+y).expr == x+y
assert Order(x+2*y).expr == x+y
assert (Order(x+y)+x).expr == (x+y)
assert (Order(x+y)+x**2) == Order(x+y)
assert (Order(x+y)+1/x) == 1/x+Order(x+y)
assert Order(x**2+y*x).expr == x**2+y*x
def test_multivar_2():
assert Order(x**2*y+y**2*x,x,y).expr == x**2*y+y**2*x
def test_multivar_mul_1():
assert Order(x+y)*x == Order(x**2+y*x,x,y)
def test_multivar_3():
assert (Order(x)+Order(y)).args in [
(Order(x), Order(y)),
(Order(y), Order(x))]
assert Order(x)+Order(y)+Order(x+y) == Order(x+y)
assert (Order(x**2*y)+Order(y**2*x)).args in [
(Order(x*y**2), Order(y*x**2)),
(Order(y*x**2), Order(x*y**2))]
assert (Order(x**2*y)+Order(y*x)) == Order(x*y)
def test_issue369():
x = Symbol('x')
y = Symbol('y', negative=True)
z = Symbol('z', complex=True)
# check that Order does not modify assumptions about symbols
Order(x)
Order(y)
Order(z)
assert x.is_positive == None
assert y.is_positive == False
assert z.is_positive == None
assert x.is_infinitesimal == None
assert y.is_infinitesimal == None
assert z.is_infinitesimal == None
def test_leading_order():
assert (x+1+1/x**5).extract_leading_order(x) == ((1/x**5, O(1/x**5)),)
assert (1+1/x).extract_leading_order(x) == ((1/x, O(1/x)),)
assert (1+x).extract_leading_order(x) == ((1, O(1, x)),)
assert (1+x**2).extract_leading_order(x) == ((1, O(1, x)),)
assert (2+x**2).extract_leading_order(x) == ((2, O(1, x)),)
assert (x+x**2).extract_leading_order(x) == ((x, O(x)),)
def test_leading_order2():
assert set((2+pi+x**2).extract_leading_order(x)) == set(((pi, O(1, x)),
(S(2), O(1, x))))
assert set((2*x+pi*x+x**2).extract_leading_order(x)) == set(((2*x, O(x)),
(x*pi, O(x))))
def test_order_leadterm():
assert O(x**2)._eval_as_leading_term(x) == O(x**2)
def test_order_symbols():
e = x*y*sin(x)*Integral(x, (x, 1, 2))
assert O(e) == O(x**2*y, x, y)
assert O(e, x) == O(x**2)
def test_nan():
assert O(nan) == nan
assert not O(x).contains(nan)
def test_O1():
assert O(1, x) * x == O(x)
assert O(1, y) * x == O(1, y)
def test_getn():
# other lines are tested incidentally by the suite
assert O(x).getn() == 1
assert O(x/log(x)).getn() == 1
assert O(x**2/log(x)**2).getn() == 2
assert O(x*log(x)).getn() == 1
raises(NotImplementedError, lambda: (O(x) + O(y)).getn())
def test_diff():
assert O(x**2).diff(x) == O(x)
def test_getO():
assert (x).getO() is None
assert (x).removeO() == x
assert (O(x)).getO() == O(x)
assert (O(x)).removeO() == 0
assert (z + O(x) + O(y)).getO() == O(x) + O(y)
assert (z + O(x) + O(y)).removeO() == z
raises(NotImplementedError, lambda: (O(x)+O(y)).getn())
def test_leading_term():
from sympy import digamma
assert O(1/digamma(1/x)) == O(1/log(x))
def test_eval():
y = Symbol('y')
from sympy import Basic
assert Order(x).subs(Order(x), 1) == 1
assert Order(x).subs(x, y) == Order(y)
assert Order(x).subs(y, x) == Order(x)
assert Order(x).subs(x, x + y) == Order(x + y)
assert (O(1)**x).is_Pow
def test_oseries():
assert Order(x).oseries(x) == Order(x)
@XFAIL
def test_issue_1180():
a, b = symbols('a b')
assert O(a+b,a,b)+O(1,a,b) == O(1, a, b)
@XFAIL
def test_issue_1756():
x = Symbol('x')
f = Function('f')
g = Function('g')
assert 1/O(1) != O(1)
assert 1/O(x) != O(1/x)
assert 1/O(f(x)) != O(1/x)
def test_order_conjugate_transpose():
x = Symbol('x', real=True)
y = Symbol('y', imaginary=True)
assert conjugate(Order(x)) == Order(conjugate(x))
assert conjugate(Order(y)) == Order(conjugate(y))
assert conjugate(Order(x**2)) == Order(conjugate(x)**2)
assert conjugate(Order(y**2)) == Order(conjugate(y)**2)
assert transpose(Order(x)) == Order(transpose(x))
assert transpose(Order(y)) == Order(transpose(y))
assert transpose(Order(x**2)) == Order(transpose(x)**2)
assert transpose(Order(y**2)) == Order(transpose(y)**2)
def test_order_noncommutative():
A = Symbol('A', commutative=False)
x = Symbol('x')
assert Order(A + A*x, x) == Order(1, x)
assert (A + A*x)*Order(x) == Order(x)
assert (A*x)*Order(x) == Order(x**2, x)
assert expand((1 + Order(x))*A*A*x) == A*A*x + Order(x**2, x)
assert expand((A*A + Order(x))*x) == A*A*x + Order(x**2, x)
assert expand((A + Order(x))*A*x) == A*A*x + Order(x**2, x)
| flacjacket/sympy | sympy/series/tests/test_order.py | Python | bsd-3-clause | 9,210 |
"""
Given four lines in general position,
there are two lines which meet all four given lines.
With Pieri homotopies we can solve this Schubert problem.
For the verification of the intersection conditions, numpy is used.
The plots are made with matplotlib.
"""
from numpy import zeros, array, concatenate, matrix
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
def indices(name):
"""
For the string name in the format xij
return (i, j) as two integer indices.
"""
return (int(name[1]), int(name[2]))
def solution_plane(rows, cols, sol):
"""
Returns a sympy matrix with as many rows
as the value of rows and with as many columns
as the value of columns, using the string
represention of a solution in sol.
"""
from phcpy.solutions import coordinates
result = zeros((rows, cols), dtype=complex)
for k in range(cols):
result[k][k] = 1
(vars, vals) = coordinates(sol)
for (name, value) in zip(vars, vals):
i, j = indices(name)
result[i-1][j-1] = value
return result
def verify_determinants(inps, sols, verbose=True):
"""
Verifies the intersection conditions with determinants,
concatenating the planes in inps with those in the sols.
Both inps and sols are lists of numpy arrays.
Returns the sum of the absolute values of all determinants.
If verbose, then for all solutions in sols, the computed
determinants are printed to screen.
"""
from numpy.linalg import det
checksum = 0
for sol in sols:
if verbose:
print('checking solution\n', sol)
for plane in inps:
cat = concatenate([plane, sol], axis=-1)
mat = matrix(cat)
dcm = det(mat)
if verbose:
print('the determinant :', dcm)
checksum = checksum + abs(dcm)
return checksum
def solve_general(mdim, pdim, qdeg):
"""
Solves a general instance of Pieri problem, computing the
p-plane producing curves of degree qdeg which meet a number
of general m-planes at general interpolation points,
where p = pdim and m = mdim on input.
For the problem of computing the two lines which meet
four general lines, mdim = 2, pdim = 2, and qdeg = 0.
Returns a tuple with four lists.
The first two lists contain matrices with the input planes
and the solution planes respectively.
The third list is the list of polynomials solved
and the last list is the solution list.
"""
from phcpy.schubert import random_complex_matrix
from phcpy.schubert import run_pieri_homotopies
dim = mdim*pdim + qdeg*(mdim+pdim)
ranplanes = [random_complex_matrix(mdim+pdim, mdim) for _ in range(0, dim)]
(pols, sols) = run_pieri_homotopies(mdim, pdim, qdeg, ranplanes, \
verbose=False)
inplanes = [array(plane) for plane in ranplanes]
outplanes = [solution_plane(mdim+pdim, pdim, sol) for sol in sols]
return (inplanes, outplanes, pols, sols)
def solve_real(mdim, pdim, start, sols):
"""
Solves a real instance of Pieri problem, for input planes
of dimension mdim osculating a rational normal curve.
On return are the planes of dimension pdim.
"""
from phcpy.schubert import real_osculating_planes
from phcpy.schubert import make_pieri_system
from phcpy.trackers import track
oscplanes = real_osculating_planes(mdim, pdim, 0)
target = make_pieri_system(mdim, pdim, 0, oscplanes, False)
rtsols = track(target, start, sols)
inplanes = [array(plane) for plane in oscplanes]
outplanes = [solution_plane(mdim+pdim, pdim, sol) for sol in rtsols]
return (inplanes, outplanes, target, rtsols)
def input_generators(plane):
"""
Given in plane is a numpy matrix, with in its columns
the coordinates of the points which span a line, in 4-space.
The first coordinate must not be zero.
Returns the affine representation of the line,
after dividing each generator by its first coordinate.
"""
pone = list(plane[:,0])
ptwo = list(plane[:,1])
aone = [x/pone[0] for x in pone]
atwo = [x/ptwo[0] for x in ptwo]
return (aone[1:], atwo[1:])
def output_generators(plane):
"""
Given in plane is a numpy matrix, with in its columns
the coordinates of the points which span a line, in 4-space.
The solution planes follow the localization pattern
1, *, *, 0 for the first point and 0, 1, *, * for
the second point, which means that the second point
in standard projective coordinates lies at infinity.
For the second generator, the sum of the points is taken.
The imaginary part of each coordinate is omitted.
"""
pone = list(plane[:,0])
ptwo = list(plane[:,1])
aone = [x.real for x in pone]
atwo = [x.real + y.real for (x, y) in zip(pone, ptwo)]
return (aone[1:], atwo[1:])
def boxrange(inlines, outlines):
"""
Returns a list of three lists with the [min, max]
values of each coordinate of each generator in the lists
inlines and outlines.
The ranges are adjusted for the particular real case.
"""
fst = inlines[0][0]
result = {'xmin': fst[0], 'xmax': fst[0], \
'ymin': fst[1], 'ymax': fst[1], \
'zmin': fst[2], 'zmax': fst[2]}
pts = [x for (x, y) in inlines] + [y for (x, y) in inlines] \
+ [x for (x, y) in outlines] + [y for (x, y) in outlines]
print('the points :\n', pts)
for point in pts:
result['xmin'] = min(result['xmin'], point[0])
result['ymin'] = min(result['ymin'], point[1])
result['zmin'] = min(result['zmin'], point[2])
result['xmax'] = max(result['xmax'], point[0])
result['ymax'] = max(result['ymax'], point[1])
result['zmax'] = max(result['zmax'], point[2])
return ((result['xmin']+3, result['xmax']-3), \
(result['ymin']+8, result['ymax']-11), \
(result['zmin']+3, result['zmax']-5))
def inbox(point, lims):
"""
Returns true if the coordinates of the point
are in the box defined by the 3-tuple lims
which contain the minima and maxima for the coordinates.
"""
tol = 1.0e-8 # this is essential for roundoff
(xlim, ylim, zlim) = lims
if point[0] < xlim[0] - tol:
return False
elif point[0] > xlim[1] + tol:
return False
elif point[1] < ylim[0] - tol:
return False
elif point[1] > ylim[1] + tol:
return False
elif point[2] < zlim[0] - tol:
return False
elif point[2] > zlim[1] + tol:
return False
else:
return True
def equal(pt1, pt2):
"""
Returns true if the all coordinates of pt1 and pt2
match up to a tolerance of 1.0e-10.
"""
tol = 1.0e-8
if abs(pt1[0] - pt2[0]) > tol:
return False
elif abs(pt1[1] - pt2[1]) > tol:
return False
elif abs(pt1[2] - pt2[2]) > tol:
return False
return True
def isin(points, pnt):
"""
Returns true if pnt belongs to the list points.
"""
if len(points) == 0:
return False
else:
for point in points:
if equal(point, pnt):
return True
return False;
def plot_line(axs, line, lims, color):
"""
Plots the line defined as a tuple of two points,
using the axis object in axs.
The 3-tuple lims contains three lists with limits [min, max]
for the x, y, and z coordinates.
"""
(fst, snd) = line
axs.set_xlabel('x')
axs.set_ylabel('y')
axs.set_zlabel('z')
axs.set_xlim(lims[0])
axs.set_ylim(lims[1])
axs.set_zlim(lims[2])
dir = (fst[0] - snd[0], fst[1] - snd[1], fst[2] - snd[2])
result = []
for k in range(3):
fac = (lims[k][1]-fst[k])/dir[k]
pnt = (fst[0] + fac*dir[0], fst[1] + fac*dir[1], fst[2] + fac*dir[2])
if inbox(pnt, lims):
if not isin(result, pnt): result.append(pnt)
for k in range(3):
fac = (lims[k][0]-fst[k])/dir[k]
pnt = (fst[0] + fac*dir[0], fst[1] + fac*dir[1], fst[2] + fac*dir[2])
if inbox(pnt, lims):
if not isin(result, pnt): result.append(pnt)
(one, two) = (result[0], result[1])
# axs.plot([fst[0], snd[0]], [fst[1], snd[1]], [fst[2], snd[2]], 'bo')
# axs.plot([one[0], two[0]], [one[1], two[1]], [one[2], two[2]], 'ro')
axs.plot([one[0], two[0]], [one[1], two[1]], [one[2], two[2]], color)
def plot_lines(inlines, outlines, points, lims):
"""
Generates coordinates of the points in a random line
and then plots this line. The intersection points are
in the list points and limits for the bounding box in lims
"""
fig = plt.figure()
axs = fig.add_subplot(111, projection='3d')
for line in inlines:
plot_line(axs, line, lims, 'b')
for line in outlines:
plot_line(axs, line, lims, 'r')
for point in points:
axs.plot([point[0]], [point[1]], [point[2]], 'ro')
axs.view_init(azim=5, elev=20)
plt.show()
def intersection_point(apl, bpl, check=True):
"""
Given in apl the two points that define a line
and in bpl the two points that define another line,
returns the intersection point.
If check, then additional tests are done
and the outcome of the tests is written to screen.
"""
from numpy.linalg import solve
(apt, bpt) = apl
(cpt, dpt) = bpl
mat = array([[apt[0], bpt[0], -cpt[0]], \
[apt[1], bpt[1], -cpt[1]], \
[apt[2], bpt[2], -cpt[2]]])
rhs = array([[dpt[0]], [dpt[1]], [dpt[2]]])
sol = solve(mat, rhs)
cff = list(sol[:,0])
csm = cff[0] + cff[1]
result = ((cff[0]*apt[0] + cff[1]*bpt[0])/csm, \
(cff[0]*apt[1] + cff[1]*bpt[1])/csm, \
(cff[0]*apt[2] + cff[1]*bpt[2])/csm)
if check:
csm = cff[2] + 1.0
verify = ((cff[2]*cpt[0] + dpt[0])/csm, \
(cff[2]*cpt[1] + dpt[1])/csm, \
(cff[2]*cpt[2] + dpt[2])/csm)
print('the solution :\n', result)
print('the solution verified :\n', verify)
res = matrix(rhs) - matrix(mat)*matrix(sol)
print('the residual :\n', res)
return result
def intersection_points(ipl, opl):
"""
Returns the list of intersection points between
the input planes in ipl and the output planes in opl.
"""
result = []
for inplane in ipl:
for outplane in opl:
result.append(intersection_point(inplane, outplane))
return result
def show_planes(ipl, opl):
"""
Shows the input and the output planes.
"""
(inlines, outlines) = ([], [])
for plane in ipl:
inlines.append(input_generators(plane))
for plane in opl:
outlines.append(output_generators(plane))
print('The generators of the input lines :')
for line in inlines:
print(line)
print('The generators of the output lines :')
for line in outlines:
print(line)
brg = boxrange(inlines, outlines)
print('the range:', brg)
intpts = intersection_points(inlines, outlines)
print('the intersection points :')
for point in intpts:
print(point)
plot_lines(inlines, outlines, intpts, brg)
def main():
"""
We start with the formalism of the root count,
solve a general configuration and then a special problem.
"""
from phcpy.schubert import pieri_root_count
(mdim, pdim, deg) = (2, 2, 0)
pcnt = pieri_root_count(mdim, pdim, deg, False)
print('The Pieri root count :', pcnt)
print('Solving a general case ...')
(inp, otp, pols, sols) = solve_general(mdim, pdim, deg)
print('The input planes :')
for plane in inp:
print(plane)
print('The solution planes :')
for plane in otp:
print(plane)
check = verify_determinants(inp, otp)
print('Sum of absolute values of determinants :', check)
input('Hit enter to continue.')
from random import seed
seed(400)
(oscp, otp2, pols2, sols2) = solve_real(mdim, pdim, pols, sols)
print('The input planes :')
for plane in oscp:
print(plane)
print('The solution planes :')
for plane in otp2:
print(plane)
check = verify_determinants(oscp, otp2)
print('Sum of absolute values of determinants :', check)
show_planes(oscp, otp2)
if __name__ == "__main__":
main()
| janverschelde/PHCpack | src/Python/PHCpy3/examples/fourlines.py | Python | gpl-3.0 | 12,371 |
"""Given user GO ids and parent terms, group user GO ids under one parent term.
Given a group of GO ids with one or more higher-level grouping terms, group
each user GO id under the most descriptive parent GO term.
Each GO id may have more than one parent. One of the parent(s) is chosen
to best represent the user GO id's function. The choice of parent is made by
regarding how close the parent GO id is to the bottom of its hierarchy.
The estimation of how close a GO term is to "the bottom" of its GO hierarchy
is estimated using the number of total Go term descendent counts below
that term.
"""
__copyright__ = "Copyright (C) 2016-2018, DV Klopfenstein, H Tang, All rights reserved."
__author__ = "DV Klopfenstein"
import sys
from goatools.base import get_godag
from goatools.gosubdag.gosubdag import GoSubDag
class GrouperDflts(object):
"""Holds objects that we would like to load and initialize once.
Files used for grouping GO IDs:
http://geneontology.org/ontology/go-basic.obo
http://geneontology.org/ontology/subsets/goslim_generic.obo
"""
def __init__(self, gosubdag=None, goslim_filename="goslim_generic.obo", hdrgos=None):
self.gosubdag = self.get_gosubdag(gosubdag)
_dagslim = get_godag(goslim_filename, prt=sys.stdout, loading_bar=False)
self.ver_goslims = _dagslim.version
self.goslims = self._init_goslims(_dagslim)
self.hdrgos_dflt = self._init_hdrgos() if hdrgos is None else hdrgos # goid set
def _init_hdrgos(self):
"""Return GO IDs used as the default for the high grouping GO IDs."""
# Get all GO terms that are at depth-00 or depth-01
hdrgos = self.get_gos_d0d1()
hdrgos |= self.goslims
# self.gosubdag.prt_goids(hdrgos)
return hdrgos
def _init_goslims(self, dagslim):
"""Get GO IDs in GO slims."""
go2obj_main = self.gosubdag.go2obj
go2obj_slim = {go for go, o in dagslim.items() if go in go2obj_main}
if self.gosubdag.relationships:
return self._get_goslimids_norel(go2obj_slim)
return set(dagslim.keys())
def get_gos_d0d1(self):
"""Return GO IDs whose depth is 0 (BP, MF, CC) or depth is 1."""
return set([o.id for d in [0, 1] for o in self.gosubdag.rcntobj.depth2goobjs.get(d)])
def _get_goslimids_norel(self, dagslim):
"""Get all GO slim GO IDs that do not have a relationship."""
go_slims = set()
go2obj = self.gosubdag.go2obj
for goid in dagslim:
goobj = go2obj[goid]
if not goobj.relationship:
go_slims.add(goobj.id)
return go_slims
@staticmethod
def get_gosubdag(gosubdag=None):
"""Gets a GoSubDag initialized for use by a Grouper object."""
if gosubdag is not None:
if gosubdag.rcntobj is not None:
return gosubdag
else:
gosubdag.init_auxobjs()
return gosubdag
else:
go2obj = get_godag()
return GoSubDag(None, go2obj, rcntobj=True)
# Copyright (C) 2016-2018, DV Klopfenstein, H Tang, All rights reserved.
| tanghaibao/goatools | goatools/grouper/grprdflts.py | Python | bsd-2-clause | 3,204 |
from __future__ import absolute_import
import responses
from mock import patch
from sentry.constants import SentryAppInstallationStatus
from sentry.mediators.sentry_app_installations import Creator
from sentry.models import (
AuditLogEntry,
AuditLogEntryEvent,
ApiGrant,
ServiceHook,
ServiceHookProject,
)
from sentry.testutils import TestCase
class TestCreator(TestCase):
def setUp(self):
self.user = self.create_user()
self.org = self.create_organization()
self.project1 = self.create_project(organization=self.org)
self.project2 = self.create_project(organization=self.org)
responses.add(responses.POST, "https://example.com/webhook")
self.sentry_app = self.create_sentry_app(
name="nulldb",
organization=self.org,
scopes=("project:read",),
events=("issue.created",),
)
self.creator = Creator(organization=self.org, slug="nulldb", user=self.user)
@responses.activate
def test_creates_installation(self):
responses.add(responses.POST, "https://example.com/webhook")
install = self.creator.call()
assert install.pk
@responses.activate
def test_creates_api_grant(self):
responses.add(responses.POST, "https://example.com/webhook")
install = self.creator.call()
assert ApiGrant.objects.filter(id=install.api_grant_id).exists()
@responses.activate
def test_creates_service_hooks(self):
responses.add(responses.POST, "https://example.com/webhook")
install = self.creator.call()
hook = ServiceHook.objects.get(organization_id=self.org.id)
assert hook.application_id == self.sentry_app.application.id
assert hook.actor_id == install.id
assert hook.organization_id == self.org.id
assert hook.events == self.sentry_app.events
assert hook.url == self.sentry_app.webhook_url
assert not ServiceHookProject.objects.all()
@responses.activate
def test_creates_audit_log_entry(self):
responses.add(responses.POST, "https://example.com/webhook")
request = self.make_request(user=self.user, method="GET")
Creator.run(organization=self.org, slug="nulldb", user=self.user, request=request)
assert AuditLogEntry.objects.filter(event=AuditLogEntryEvent.SENTRY_APP_INSTALL).exists()
@responses.activate
@patch("sentry.mediators.sentry_app_installations.InstallationNotifier.run")
def test_notifies_service(self, run):
with self.tasks():
responses.add(responses.POST, "https://example.com/webhook")
install = self.creator.call()
run.assert_called_once_with(install=install, user=self.user, action="created")
@responses.activate
def test_associations(self):
responses.add(responses.POST, "https://example.com/webhook")
install = self.creator.call()
assert install.api_grant is not None
@responses.activate
def test_pending_status(self):
responses.add(responses.POST, "https://example.com/webhook")
install = self.creator.call()
assert install.status == SentryAppInstallationStatus.PENDING
@responses.activate
def test_installed_status(self):
responses.add(responses.POST, "https://example.com/webhook")
internal_app = self.create_internal_integration(name="internal", organization=self.org)
creator = Creator(organization=self.org, slug=internal_app.slug, user=self.user)
install = creator.call()
assert install.status == SentryAppInstallationStatus.INSTALLED
@patch("sentry.analytics.record")
def test_records_analytics(self, record):
Creator.run(
organization=self.org,
slug="nulldb",
user=self.user,
request=self.make_request(user=self.user, method="GET"),
)
record.assert_called_with(
"sentry_app.installed",
user_id=self.user.id,
organization_id=self.org.id,
sentry_app="nulldb",
)
| mvaled/sentry | tests/sentry/mediators/sentry_app_installations/test_creator.py | Python | bsd-3-clause | 4,113 |
<<<<<<< HEAD
<<<<<<< HEAD
""" Test Iterator Length Transparency
Some functions or methods which accept general iterable arguments have
optional, more efficient code paths if they know how many items to expect.
For instance, map(func, iterable), will pre-allocate the exact amount of
space required whenever the iterable can report its length.
The desired invariant is: len(it)==len(list(it)).
A complication is that an iterable and iterator can be the same object. To
maintain the invariant, an iterator needs to dynamically update its length.
For instance, an iterable such as range(10) always reports its length as ten,
but it=iter(range(10)) starts at ten, and then goes to nine after next(it).
Having this capability means that map() can ignore the distinction between
map(func, iterable) and map(func, iter(iterable)).
When the iterable is immutable, the implementation can straight-forwardly
report the original length minus the cumulative number of calls to next().
This is the case for tuples, range objects, and itertools.repeat().
Some containers become temporarily immutable during iteration. This includes
dicts, sets, and collections.deque. Their implementation is equally simple
though they need to permanently set their length to zero whenever there is
an attempt to iterate after a length mutation.
The situation slightly more involved whenever an object allows length mutation
during iteration. Lists and sequence iterators are dynamically updatable.
So, if a list is extended during iteration, the iterator will continue through
the new items. If it shrinks to a point before the most recent iteration,
then no further items are available and the length is reported at zero.
Reversed objects can also be wrapped around mutable objects; however, any
appends after the current position are ignored. Any other approach leads
to confusion and possibly returning the same item more than once.
The iterators not listed above, such as enumerate and the other itertools,
are not length transparent because they have no way to distinguish between
iterables that report static length and iterators whose length changes with
each call (i.e. the difference between enumerate('abc') and
enumerate(iter('abc')).
"""
import unittest
from test import support
from itertools import repeat
from collections import deque
from operator import length_hint
n = 10
class TestInvariantWithoutMutations:
def test_invariant(self):
it = self.it
for i in reversed(range(1, n+1)):
self.assertEqual(length_hint(it), i)
next(it)
self.assertEqual(length_hint(it), 0)
self.assertRaises(StopIteration, next, it)
self.assertEqual(length_hint(it), 0)
class TestTemporarilyImmutable(TestInvariantWithoutMutations):
def test_immutable_during_iteration(self):
# objects such as deques, sets, and dictionaries enforce
# length immutability during iteration
it = self.it
self.assertEqual(length_hint(it), n)
next(it)
self.assertEqual(length_hint(it), n-1)
self.mutate()
self.assertRaises(RuntimeError, next, it)
self.assertEqual(length_hint(it), 0)
## ------- Concrete Type Tests -------
class TestRepeat(TestInvariantWithoutMutations, unittest.TestCase):
def setUp(self):
self.it = repeat(None, n)
class TestXrange(TestInvariantWithoutMutations, unittest.TestCase):
def setUp(self):
self.it = iter(range(n))
class TestXrangeCustomReversed(TestInvariantWithoutMutations, unittest.TestCase):
def setUp(self):
self.it = reversed(range(n))
class TestTuple(TestInvariantWithoutMutations, unittest.TestCase):
def setUp(self):
self.it = iter(tuple(range(n)))
## ------- Types that should not be mutated during iteration -------
class TestDeque(TestTemporarilyImmutable, unittest.TestCase):
def setUp(self):
d = deque(range(n))
self.it = iter(d)
self.mutate = d.pop
class TestDequeReversed(TestTemporarilyImmutable, unittest.TestCase):
def setUp(self):
d = deque(range(n))
self.it = reversed(d)
self.mutate = d.pop
class TestDictKeys(TestTemporarilyImmutable, unittest.TestCase):
def setUp(self):
d = dict.fromkeys(range(n))
self.it = iter(d)
self.mutate = d.popitem
class TestDictItems(TestTemporarilyImmutable, unittest.TestCase):
def setUp(self):
d = dict.fromkeys(range(n))
self.it = iter(d.items())
self.mutate = d.popitem
class TestDictValues(TestTemporarilyImmutable, unittest.TestCase):
def setUp(self):
d = dict.fromkeys(range(n))
self.it = iter(d.values())
self.mutate = d.popitem
class TestSet(TestTemporarilyImmutable, unittest.TestCase):
def setUp(self):
d = set(range(n))
self.it = iter(d)
self.mutate = d.pop
## ------- Types that can mutate during iteration -------
class TestList(TestInvariantWithoutMutations, unittest.TestCase):
def setUp(self):
self.it = iter(range(n))
def test_mutation(self):
d = list(range(n))
it = iter(d)
next(it)
next(it)
self.assertEqual(length_hint(it), n - 2)
d.append(n)
self.assertEqual(length_hint(it), n - 1) # grow with append
d[1:] = []
self.assertEqual(length_hint(it), 0)
self.assertEqual(list(it), [])
d.extend(range(20))
self.assertEqual(length_hint(it), 0)
class TestListReversed(TestInvariantWithoutMutations, unittest.TestCase):
def setUp(self):
self.it = reversed(range(n))
def test_mutation(self):
d = list(range(n))
it = reversed(d)
next(it)
next(it)
self.assertEqual(length_hint(it), n - 2)
d.append(n)
self.assertEqual(length_hint(it), n - 2) # ignore append
d[1:] = []
self.assertEqual(length_hint(it), 0)
self.assertEqual(list(it), []) # confirm invariant
d.extend(range(20))
self.assertEqual(length_hint(it), 0)
## -- Check to make sure exceptions are not suppressed by __length_hint__()
class BadLen(object):
def __iter__(self):
return iter(range(10))
def __len__(self):
raise RuntimeError('hello')
class BadLengthHint(object):
def __iter__(self):
return iter(range(10))
def __length_hint__(self):
raise RuntimeError('hello')
class NoneLengthHint(object):
def __iter__(self):
return iter(range(10))
def __length_hint__(self):
return NotImplemented
class TestLengthHintExceptions(unittest.TestCase):
def test_issue1242657(self):
self.assertRaises(RuntimeError, list, BadLen())
self.assertRaises(RuntimeError, list, BadLengthHint())
self.assertRaises(RuntimeError, [].extend, BadLen())
self.assertRaises(RuntimeError, [].extend, BadLengthHint())
b = bytearray(range(10))
self.assertRaises(RuntimeError, b.extend, BadLen())
self.assertRaises(RuntimeError, b.extend, BadLengthHint())
def test_invalid_hint(self):
# Make sure an invalid result doesn't muck-up the works
self.assertEqual(list(NoneLengthHint()), list(range(10)))
if __name__ == "__main__":
unittest.main()
=======
""" Test Iterator Length Transparency
Some functions or methods which accept general iterable arguments have
optional, more efficient code paths if they know how many items to expect.
For instance, map(func, iterable), will pre-allocate the exact amount of
space required whenever the iterable can report its length.
The desired invariant is: len(it)==len(list(it)).
A complication is that an iterable and iterator can be the same object. To
maintain the invariant, an iterator needs to dynamically update its length.
For instance, an iterable such as range(10) always reports its length as ten,
but it=iter(range(10)) starts at ten, and then goes to nine after next(it).
Having this capability means that map() can ignore the distinction between
map(func, iterable) and map(func, iter(iterable)).
When the iterable is immutable, the implementation can straight-forwardly
report the original length minus the cumulative number of calls to next().
This is the case for tuples, range objects, and itertools.repeat().
Some containers become temporarily immutable during iteration. This includes
dicts, sets, and collections.deque. Their implementation is equally simple
though they need to permanently set their length to zero whenever there is
an attempt to iterate after a length mutation.
The situation slightly more involved whenever an object allows length mutation
during iteration. Lists and sequence iterators are dynamically updatable.
So, if a list is extended during iteration, the iterator will continue through
the new items. If it shrinks to a point before the most recent iteration,
then no further items are available and the length is reported at zero.
Reversed objects can also be wrapped around mutable objects; however, any
appends after the current position are ignored. Any other approach leads
to confusion and possibly returning the same item more than once.
The iterators not listed above, such as enumerate and the other itertools,
are not length transparent because they have no way to distinguish between
iterables that report static length and iterators whose length changes with
each call (i.e. the difference between enumerate('abc') and
enumerate(iter('abc')).
"""
import unittest
from test import support
from itertools import repeat
from collections import deque
from operator import length_hint
n = 10
class TestInvariantWithoutMutations:
def test_invariant(self):
it = self.it
for i in reversed(range(1, n+1)):
self.assertEqual(length_hint(it), i)
next(it)
self.assertEqual(length_hint(it), 0)
self.assertRaises(StopIteration, next, it)
self.assertEqual(length_hint(it), 0)
class TestTemporarilyImmutable(TestInvariantWithoutMutations):
def test_immutable_during_iteration(self):
# objects such as deques, sets, and dictionaries enforce
# length immutability during iteration
it = self.it
self.assertEqual(length_hint(it), n)
next(it)
self.assertEqual(length_hint(it), n-1)
self.mutate()
self.assertRaises(RuntimeError, next, it)
self.assertEqual(length_hint(it), 0)
## ------- Concrete Type Tests -------
class TestRepeat(TestInvariantWithoutMutations, unittest.TestCase):
def setUp(self):
self.it = repeat(None, n)
class TestXrange(TestInvariantWithoutMutations, unittest.TestCase):
def setUp(self):
self.it = iter(range(n))
class TestXrangeCustomReversed(TestInvariantWithoutMutations, unittest.TestCase):
def setUp(self):
self.it = reversed(range(n))
class TestTuple(TestInvariantWithoutMutations, unittest.TestCase):
def setUp(self):
self.it = iter(tuple(range(n)))
## ------- Types that should not be mutated during iteration -------
class TestDeque(TestTemporarilyImmutable, unittest.TestCase):
def setUp(self):
d = deque(range(n))
self.it = iter(d)
self.mutate = d.pop
class TestDequeReversed(TestTemporarilyImmutable, unittest.TestCase):
def setUp(self):
d = deque(range(n))
self.it = reversed(d)
self.mutate = d.pop
class TestDictKeys(TestTemporarilyImmutable, unittest.TestCase):
def setUp(self):
d = dict.fromkeys(range(n))
self.it = iter(d)
self.mutate = d.popitem
class TestDictItems(TestTemporarilyImmutable, unittest.TestCase):
def setUp(self):
d = dict.fromkeys(range(n))
self.it = iter(d.items())
self.mutate = d.popitem
class TestDictValues(TestTemporarilyImmutable, unittest.TestCase):
def setUp(self):
d = dict.fromkeys(range(n))
self.it = iter(d.values())
self.mutate = d.popitem
class TestSet(TestTemporarilyImmutable, unittest.TestCase):
def setUp(self):
d = set(range(n))
self.it = iter(d)
self.mutate = d.pop
## ------- Types that can mutate during iteration -------
class TestList(TestInvariantWithoutMutations, unittest.TestCase):
def setUp(self):
self.it = iter(range(n))
def test_mutation(self):
d = list(range(n))
it = iter(d)
next(it)
next(it)
self.assertEqual(length_hint(it), n - 2)
d.append(n)
self.assertEqual(length_hint(it), n - 1) # grow with append
d[1:] = []
self.assertEqual(length_hint(it), 0)
self.assertEqual(list(it), [])
d.extend(range(20))
self.assertEqual(length_hint(it), 0)
class TestListReversed(TestInvariantWithoutMutations, unittest.TestCase):
def setUp(self):
self.it = reversed(range(n))
def test_mutation(self):
d = list(range(n))
it = reversed(d)
next(it)
next(it)
self.assertEqual(length_hint(it), n - 2)
d.append(n)
self.assertEqual(length_hint(it), n - 2) # ignore append
d[1:] = []
self.assertEqual(length_hint(it), 0)
self.assertEqual(list(it), []) # confirm invariant
d.extend(range(20))
self.assertEqual(length_hint(it), 0)
## -- Check to make sure exceptions are not suppressed by __length_hint__()
class BadLen(object):
def __iter__(self):
return iter(range(10))
def __len__(self):
raise RuntimeError('hello')
class BadLengthHint(object):
def __iter__(self):
return iter(range(10))
def __length_hint__(self):
raise RuntimeError('hello')
class NoneLengthHint(object):
def __iter__(self):
return iter(range(10))
def __length_hint__(self):
return NotImplemented
class TestLengthHintExceptions(unittest.TestCase):
def test_issue1242657(self):
self.assertRaises(RuntimeError, list, BadLen())
self.assertRaises(RuntimeError, list, BadLengthHint())
self.assertRaises(RuntimeError, [].extend, BadLen())
self.assertRaises(RuntimeError, [].extend, BadLengthHint())
b = bytearray(range(10))
self.assertRaises(RuntimeError, b.extend, BadLen())
self.assertRaises(RuntimeError, b.extend, BadLengthHint())
def test_invalid_hint(self):
# Make sure an invalid result doesn't muck-up the works
self.assertEqual(list(NoneLengthHint()), list(range(10)))
if __name__ == "__main__":
unittest.main()
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
=======
""" Test Iterator Length Transparency
Some functions or methods which accept general iterable arguments have
optional, more efficient code paths if they know how many items to expect.
For instance, map(func, iterable), will pre-allocate the exact amount of
space required whenever the iterable can report its length.
The desired invariant is: len(it)==len(list(it)).
A complication is that an iterable and iterator can be the same object. To
maintain the invariant, an iterator needs to dynamically update its length.
For instance, an iterable such as range(10) always reports its length as ten,
but it=iter(range(10)) starts at ten, and then goes to nine after next(it).
Having this capability means that map() can ignore the distinction between
map(func, iterable) and map(func, iter(iterable)).
When the iterable is immutable, the implementation can straight-forwardly
report the original length minus the cumulative number of calls to next().
This is the case for tuples, range objects, and itertools.repeat().
Some containers become temporarily immutable during iteration. This includes
dicts, sets, and collections.deque. Their implementation is equally simple
though they need to permanently set their length to zero whenever there is
an attempt to iterate after a length mutation.
The situation slightly more involved whenever an object allows length mutation
during iteration. Lists and sequence iterators are dynamically updatable.
So, if a list is extended during iteration, the iterator will continue through
the new items. If it shrinks to a point before the most recent iteration,
then no further items are available and the length is reported at zero.
Reversed objects can also be wrapped around mutable objects; however, any
appends after the current position are ignored. Any other approach leads
to confusion and possibly returning the same item more than once.
The iterators not listed above, such as enumerate and the other itertools,
are not length transparent because they have no way to distinguish between
iterables that report static length and iterators whose length changes with
each call (i.e. the difference between enumerate('abc') and
enumerate(iter('abc')).
"""
import unittest
from test import support
from itertools import repeat
from collections import deque
from operator import length_hint
n = 10
class TestInvariantWithoutMutations:
def test_invariant(self):
it = self.it
for i in reversed(range(1, n+1)):
self.assertEqual(length_hint(it), i)
next(it)
self.assertEqual(length_hint(it), 0)
self.assertRaises(StopIteration, next, it)
self.assertEqual(length_hint(it), 0)
class TestTemporarilyImmutable(TestInvariantWithoutMutations):
def test_immutable_during_iteration(self):
# objects such as deques, sets, and dictionaries enforce
# length immutability during iteration
it = self.it
self.assertEqual(length_hint(it), n)
next(it)
self.assertEqual(length_hint(it), n-1)
self.mutate()
self.assertRaises(RuntimeError, next, it)
self.assertEqual(length_hint(it), 0)
## ------- Concrete Type Tests -------
class TestRepeat(TestInvariantWithoutMutations, unittest.TestCase):
def setUp(self):
self.it = repeat(None, n)
class TestXrange(TestInvariantWithoutMutations, unittest.TestCase):
def setUp(self):
self.it = iter(range(n))
class TestXrangeCustomReversed(TestInvariantWithoutMutations, unittest.TestCase):
def setUp(self):
self.it = reversed(range(n))
class TestTuple(TestInvariantWithoutMutations, unittest.TestCase):
def setUp(self):
self.it = iter(tuple(range(n)))
## ------- Types that should not be mutated during iteration -------
class TestDeque(TestTemporarilyImmutable, unittest.TestCase):
def setUp(self):
d = deque(range(n))
self.it = iter(d)
self.mutate = d.pop
class TestDequeReversed(TestTemporarilyImmutable, unittest.TestCase):
def setUp(self):
d = deque(range(n))
self.it = reversed(d)
self.mutate = d.pop
class TestDictKeys(TestTemporarilyImmutable, unittest.TestCase):
def setUp(self):
d = dict.fromkeys(range(n))
self.it = iter(d)
self.mutate = d.popitem
class TestDictItems(TestTemporarilyImmutable, unittest.TestCase):
def setUp(self):
d = dict.fromkeys(range(n))
self.it = iter(d.items())
self.mutate = d.popitem
class TestDictValues(TestTemporarilyImmutable, unittest.TestCase):
def setUp(self):
d = dict.fromkeys(range(n))
self.it = iter(d.values())
self.mutate = d.popitem
class TestSet(TestTemporarilyImmutable, unittest.TestCase):
def setUp(self):
d = set(range(n))
self.it = iter(d)
self.mutate = d.pop
## ------- Types that can mutate during iteration -------
class TestList(TestInvariantWithoutMutations, unittest.TestCase):
def setUp(self):
self.it = iter(range(n))
def test_mutation(self):
d = list(range(n))
it = iter(d)
next(it)
next(it)
self.assertEqual(length_hint(it), n - 2)
d.append(n)
self.assertEqual(length_hint(it), n - 1) # grow with append
d[1:] = []
self.assertEqual(length_hint(it), 0)
self.assertEqual(list(it), [])
d.extend(range(20))
self.assertEqual(length_hint(it), 0)
class TestListReversed(TestInvariantWithoutMutations, unittest.TestCase):
def setUp(self):
self.it = reversed(range(n))
def test_mutation(self):
d = list(range(n))
it = reversed(d)
next(it)
next(it)
self.assertEqual(length_hint(it), n - 2)
d.append(n)
self.assertEqual(length_hint(it), n - 2) # ignore append
d[1:] = []
self.assertEqual(length_hint(it), 0)
self.assertEqual(list(it), []) # confirm invariant
d.extend(range(20))
self.assertEqual(length_hint(it), 0)
## -- Check to make sure exceptions are not suppressed by __length_hint__()
class BadLen(object):
def __iter__(self):
return iter(range(10))
def __len__(self):
raise RuntimeError('hello')
class BadLengthHint(object):
def __iter__(self):
return iter(range(10))
def __length_hint__(self):
raise RuntimeError('hello')
class NoneLengthHint(object):
def __iter__(self):
return iter(range(10))
def __length_hint__(self):
return NotImplemented
class TestLengthHintExceptions(unittest.TestCase):
def test_issue1242657(self):
self.assertRaises(RuntimeError, list, BadLen())
self.assertRaises(RuntimeError, list, BadLengthHint())
self.assertRaises(RuntimeError, [].extend, BadLen())
self.assertRaises(RuntimeError, [].extend, BadLengthHint())
b = bytearray(range(10))
self.assertRaises(RuntimeError, b.extend, BadLen())
self.assertRaises(RuntimeError, b.extend, BadLengthHint())
def test_invalid_hint(self):
# Make sure an invalid result doesn't muck-up the works
self.assertEqual(list(NoneLengthHint()), list(range(10)))
if __name__ == "__main__":
unittest.main()
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
| ArcherSys/ArcherSys | Lib/test/test_iterlen.py | Python | mit | 22,013 |
import sys
import os
# enable local imports; redirect config calls to general config
def add_install_path():
local_path = os.path.dirname(__file__)
add_paths = [
os.path.join(local_path, '..', '..'), # Install
os.path.join(local_path, '..',), # Install/toolbox
os.path.join(local_path, '..', 'lib'), # Install/toolbox/lib
local_path # Install/toolbox/scripts
]
for path in add_paths:
full_path = os.path.abspath(path)
sys.path.insert(0, full_path)
# pull config from parent project
add_install_path()
| genegis/genegis | Install/toolbox/scripts/add_install_path.py | Python | mpl-2.0 | 583 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = "Ponzoni, Nelson"
__copyright__ = "Copyright 2015"
__credits__ = ["Ponzoni Nelson"]
__maintainer__ = "Ponzoni Nelson"
__contact__ = "npcuadra@gmail.com"
__email__ = "npcuadra@gmail.com"
__license__ = "GPL"
__version__ = "1.0.0"
__status__ = "Production"
"""
GRID SEARCH
# anda hasta con 5 capas
optirun python3 cupydle/test/dbn_prueba_GS.py --directorio KML --dataset "all_videos_features_clases_shuffled_PCA85_minmax.npz" --capa1 85 60 6 --capa2 85 50 6
optirun python3 cupydle/test/dbn_prueba_GS.py --directorio MNIST --dataset "mnist_minmax.npz" --capa1 784 500 10 --capa2 784 100 10
"""
# dependecias internar
import os, argparse, shelve, sys, argparse, numpy as np
# dependecias propias
from cupydle.dnn.utils import temporizador
from cupydle.dnn.gridSearch import ParameterGrid
from cupydle.test.dbn_base import _guardar
from cupydle.test.dbn_prueba import test
#
#
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Prueba de una DBN sobre MNIST/RML GRID SEARCH')
parser.add_argument('--directorio', type=str, dest="directorio", default='test_DBN', required=None, help="Carpeta donde se almacena la corrida actual")
parser.add_argument('--dataset', type=str, dest="dataset", default=None, required=True, help="Archivo donde esta el dataset, [videos, clases].npz")
parser.add_argument('--capa1', type=int, dest="capa1", default=None, required=True, nargs='+', help="Capas de unidades [visibles, ocultas1.. ocultasn]")
parser.add_argument('--capa2', type=int, dest="capa2", default=None, required=False, nargs='+', help="Capas de unidades [visibles, ocultas1.. ocultasn]")
parser.add_argument('--capa3', type=int, dest="capa3", default=None, required=False, nargs='+', help="Capas de unidades [visibles, ocultas1.. ocultasn]")
parser.add_argument('--capa4', type=int, dest="capa4", default=None, required=False, nargs='+', help="Capas de unidades [visibles, ocultas1.. ocultasn]")
parser.add_argument('--capa5', type=int, dest="capa5", default=None, required=False, nargs='+', help="Capas de unidades [visibles, ocultas1.. ocultasn]")
parser.add_argument('--noEntrenar', action="store_true",dest="noEntrenar",default=False, required=False, help="Si esta presente, no ejecuta el entrenamiento de la DBN, solo ajusta los pesos")
parser.add_argument('--cantidad', type=int, dest="cantidad", default=100, required=False, help="Porcentaje de la cantidad total de iteraciones del grid search")
argumentos = parser.parse_args()
directorio = argumentos.directorio
dataset = argumentos.dataset
noEntrenar = argumentos.noEntrenar
cantidad = argumentos.cantidad
capas = []
if argumentos.capa1 is not None:
capa1 = np.asarray(argumentos.capa1)
capas.append(capa1)
if argumentos.capa2 is not None:
capa2 = np.asarray(argumentos.capa2)
capas.append(capa2)
if argumentos.capa3 is not None:
capa3 = np.asarray(argumentos.capa3)
capas.append(capa3)
if argumentos.capa4 is not None:
capa4 = np.asarray(argumentos.capa4)
capas.append(capa4)
if argumentos.capa5 is not None:
capa5 = np.asarray(argumentos.capa5)
capas.append(capa5)
general = "kml"
# es mnist o kml? segun el dataset
if dataset.find("mnist") != -1:
general = "mnist"
# chequeos
assert dataset.find('.npz') != -1, "El conjunto de datos debe ser del tipo '.npz'"
parametros = {}
parametros['general'] = [general]
parametros['nombre'] = ['dbn']
parametros['tipo'] = ['binaria']
parametros['capas'] = capas
parametros['epocasTRN'] = [[10]]
parametros['epocasFIT'] = [10]
parametros['tambatch'] = [10]
parametros['tasaAprenTRN'] = [[0.01]]
parametros['tasaAprenFIT'] = [0.1]
parametros['regularizadorL1'] = [0.0]
parametros['regularizadorL2'] = [0.0]
parametros['momentoTRN'] = [[0.0]]
parametros['momentoFIT'] = [0.0]
parametros['pasosGibbs'] = [[1]]
parametros['porcentaje'] = [0.8]
parametros['toleranciaError'] = [0.1]
parametros['pcd'] = [True]
parametros['directorio'] = [directorio]
parametros['dataset'] = [dataset]
#parametros={'pasosGibbs': [[1]], 'pcd': [True], 'dataset': ['all_videos_features_clases_shuffled_PCA85_minmax.npz'], 'general': ['kml'], 'nombre': ['dbn'], 'regularizadorL1': [0.0], 'directorio': ['test_DBN_kml'], 'toleranciaError': [0.02], 'momentoFIT': [0.0], 'capas': [[85, 50, 6]], 'tipo': ['binaria'], 'epocasTRN': [[11]], 'tasaAprenFIT': [0.1], 'porcentaje': [0.8], 'momentoTRN': [[0.0]], 'epocasFIT': [4], 'tasaAprenTRN': [[0.1]], 'tambatch': [10], 'regularizadorL2': [0.0]}
Grid = ParameterGrid(parametros)
cantidad_combinaciones = len(Grid)
cantidad_a_ejecutar = cantidad_combinaciones
cantidad_a_ejecutar = int(cantidad_combinaciones * cantidad / 100.0)
nombreArchivo = 'resultadosGeneralesGS'
print("GUARDANDO LOS RESULTADOS EN EL ARCHIVO {} QUE CONTIENE {} ITERACIONES\n\n".format(nombreArchivo, cantidad_a_ejecutar))
T = temporizador()
inicio = T.tic()
for x in range(cantidad_a_ejecutar):
print("Iteracion {} de {}".format(x, cantidad_a_ejecutar))
# modo random la eleccion de los parametros sobre el conjunto posible
indice = np.random.randint(cantidad_combinaciones)
# no tengo implementada el __setitem__ en ParameterGrid
params = Grid[indice]
params['directorio'] = directorio + '/' + 'dbn_grid_' + str(x)
for k in sorted(params.keys()):
#print(str("{: >25} : {: <50}").format(k, str(params[k])))
params[k] = [params[k]] # paso a lista todos los valores uno por uno
archivoResultados1 = 'cupydle/test/' + params['general'][0] + '/' + params['directorio'][0] + '/' + nombreArchivo
archivoResultados2 = 'cupydle/test/' + params['general'][0] + '/' + directorio + '/' + nombreArchivo
costoTRN, costoVAL, costoTST, costoTST_final= test(archivoResultados=archivoResultados1, noEntrenar=noEntrenar, **params)
_guardar(nombreArchivo=archivoResultados2, valor={str(x): {'parametros':params, 'costoTRN':costoTRN, 'costoVAL':costoVAL, 'costoTST':costoTST, 'costoTST_final':costoTST_final }})
print("*+*+*+*+*+*+*+*+*+*+*+*+*+*+*+*+*+*+*+*+*+*+*+*+*+*+*+*+")
print("\n")
final = T.toc()
print("\n\nGRID SEARCH FINALIZADO\n\n")
print("Tiempo total requerido: {}".format(T.transcurrido(inicio, final)))
| lerker/cupydle | cupydle/test/dbn_prueba_GS.py | Python | apache-2.0 | 6,800 |
########################################################################
# Rancho - Open Source Group/Project Management Tool
# Copyright (C) 2008 The Rancho Team
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
########################################################################
from django.conf.urls.defaults import *
urlpatterns = patterns('rancho.todo.views',
(r'^$', 'list'),
(r'^(?P<todo_list_id>\d+)/$', 'view_todo_list'),
(r'^create/$', 'create'),
(r'^(?P<todo_list>\d+)/add/$', 'add_todo'),
(r'^(?P<todo_list_id>\d+)/edit$', 'edit_todo_list'),
(r'^(?P<todo_id>\d+)/edit_todo/$', 'edit_todo'),
(r'^save_todo_changes/$', 'save_changes'),
(r'^delete_todo/$', 'delete_todo'),
(r'^delete_list/$', 'delete_todo_list'),
(r'^switch_todo_status/$', 'switch_todo_status'),
)
| joaquimrocha/Rancho | rancho/todo/urls.py | Python | agpl-3.0 | 1,432 |
# -*- coding: utf-8 -*-
"""
A sample of kay settings.
:Copyright: (c) 2009 Accense Technology, Inc.
Takashi Matsuo <tmatsuo@candit.jp>,
All rights reserved.
:license: BSD, see LICENSE for more details.
"""
DEFAULT_TIMEZONE = 'Asia/Tokyo'
DEBUG = True
PROFILE = False
SECRET_KEY = 'ReplaceItWithSecretString'
SESSION_PREFIX = 'gaesess:'
COOKIE_AGE = 1209600 # 2 weeks
COOKIE_NAME = 'KAY_SESSION'
ADD_APP_PREFIX_TO_KIND = True
ADMINS = (
)
TEMPLATE_DIRS = (
)
USE_I18N = False
DEFAULT_LANG = 'en'
INSTALLED_APPS = (
'kay.auth',
'kay.tests.auth_testapp',
)
APP_MOUNT_POINTS = {
'kay.tests.auth_testapp': '/',
}
# You can remove following settings if unnecessary.
CONTEXT_PROCESSORS = (
'kay.context_processors.request',
'kay.context_processors.url_functions',
'kay.context_processors.media_url',
)
MIDDLEWARE_CLASSES = (
'kay.sessions.middleware.SessionMiddleware',
'kay.auth.middleware.AuthenticationMiddleware',
)
AUTH_USER_BACKEND = "kay.auth.backends.datastore.DatastoreBackend"
AUTH_USER_MODEL = "kay.auth.models.DatastoreUser"
| calvinchengx/O-Kay-Blog-wih-Kay-0.10.0 | kay/tests/datastore_settings.py | Python | bsd-3-clause | 1,101 |
# Copyright (c) 2016-2017 Adobe Inc. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import six
import user_sync.connector.helper
import user_sync.error
import user_sync.identity_type
from user_sync.connector.directory import DirectoryConnector
from user_sync.config.common import DictConfig, OptionsBuilder
from user_sync.helper import CSVAdapter
from user_sync.config import user_sync as config
from user_sync.config import common as config_common
class CSVDirectoryConnector(DirectoryConnector):
name = 'csv'
def __init__(self, caller_options, *args, **kwargs):
super(CSVDirectoryConnector, self).__init__(*args, **kwargs)
caller_config = DictConfig('%s configuration' % self.name, caller_options)
builder = OptionsBuilder(caller_config)
builder.set_string_value('delimiter', None)
builder.set_string_value('string_encoding', 'utf8')
builder.set_string_value('first_name_column_name', 'firstname')
builder.set_string_value('last_name_column_name', 'lastname')
builder.set_string_value('email_column_name', 'email')
builder.set_string_value('country_column_name', 'country')
builder.set_string_value('groups_column_name', 'groups')
builder.set_string_value('username_column_name', 'username')
builder.set_string_value('domain_column_name', 'domain')
builder.set_string_value('identity_type_column_name', 'type')
builder.set_string_value('user_identity_type', None)
builder.set_string_value('logger_name', self.name)
builder.require_string_value('file_path')
options = builder.get_options()
self.options = options
self.logger = logger = user_sync.connector.helper.create_logger(options)
logger.debug('%s initialized with options: %s', self.name, options)
caller_config.report_unused_values(logger)
# encoding of column values
self.encoding = options['string_encoding']
# identity type for new users if not specified in column
self.user_identity_type = user_sync.identity_type.parse_identity_type(options['user_identity_type'])
def load_users_and_groups(self, groups, extended_attributes, all_users):
"""
:type groups: list(str)
:type extended_attributes: list
:rtype (bool, iterable(dict))
"""
options = self.options
file_path = options['file_path']
self.logger.debug('Reading from: %s', file_path)
self.users = users = self.read_users(file_path, extended_attributes)
self.logger.debug('Number of users loaded: %d', len(users))
return six.itervalues(users)
def read_users(self, file_path, extended_attributes):
"""
:type file_path
:type extended_attributes: list
:rtype dict
"""
users = {}
options = self.options
logger = self.logger
recognized_column_names = []
def get_column_name(key):
column_name = options[key]
recognized_column_names.append(column_name)
return column_name
email_column_name = get_column_name('email_column_name')
first_name_column_name = get_column_name('first_name_column_name')
last_name_column_name = get_column_name('last_name_column_name')
country_column_name = get_column_name('country_column_name')
groups_column_name = get_column_name('groups_column_name')
identity_type_column_name = get_column_name('identity_type_column_name')
username_column_name = get_column_name('username_column_name')
domain_column_name = get_column_name('domain_column_name')
# extended attributes appear after the standard ones (if no header row)
recognized_column_names += extended_attributes
line_read = 0
rows = CSVAdapter.read_csv_rows(file_path,
recognized_column_names=recognized_column_names,
logger=logger,
encoding=self.encoding,
delimiter=options['delimiter'])
for row in rows:
line_read += 1
email = self.get_column_value(row, email_column_name)
if email is None or email.find('@') < 0:
logger.warning('Missing or invalid email at row: %d; skipping', line_read)
continue
user = users.get(email)
if user is None:
user = user_sync.connector.helper.create_blank_user()
user['email'] = email
users[email] = user
first_name = self.get_column_value(row, first_name_column_name)
if first_name is not None:
user['firstname'] = first_name
else:
logger.debug('No value firstname for: %s', email)
last_name = self.get_column_value(row, last_name_column_name)
if last_name is not None:
user['lastname'] = last_name
else:
logger.debug('No value lastname for: %s', email)
country = self.get_column_value(row, country_column_name)
if country is not None:
user['country'] = country.upper()
groups = self.get_column_value(row, groups_column_name)
if groups is not None:
user['groups'].extend(groups.split(','))
username = self.get_column_value(row, username_column_name)
if username is None:
username = email
user['username'] = username
identity_type = self.get_column_value(row, identity_type_column_name)
if identity_type:
try:
user['identity_type'] = user_sync.identity_type.parse_identity_type(identity_type)
except user_sync.error.AssertionException as e:
self.logger.warning('Skipping user %s: %s', username, e)
del users[email]
continue
else:
user['identity_type'] = self.user_identity_type
domain = self.get_column_value(row, domain_column_name)
if domain:
user['domain'] = domain
elif username != email:
user['domain'] = email[email.find('@') + 1:]
sa = {}
for col in recognized_column_names:
sa[col] = self.get_column_value(row, col)
user['source_attributes'] = sa
return users
def get_column_value(self, row, column_name):
"""
:type row: dict
:type column_name: str
"""
value = row.get(column_name)
return value if value else None
| adobe-apiplatform/user-sync.py | user_sync/connector/directory_csv.py | Python | mit | 7,836 |
"""Test the demo_commands.py file."""
from memoization import cached
from sw_client.demo_commands import _get_movie, _set_max_id
def test_max_id():
"""Test _test_max_id."""
@_set_max_id(max_id=10)
@cached
def _test_max_id(_id: int) -> int:
return _id
_test_max_id.cache_clear() # act
_test_max_id(0)
_test_max_id(10)
_test_max_id(1e7)
assert _test_max_id.cache_info().current_size == 1
_test_max_id(11)
assert _test_max_id.cache_info().current_size == 2
_test_max_id(1)
assert _test_max_id.cache_info().hits == 3
def test_get_movie():
"""Test _get_movie."""
movie = _get_movie(_id=100) # act
assert movie.title == 'Lock, Stock and Two Smoking Barrels'
assert movie.budget == 1350000
assert movie.revenue >= 28356188
assert movie.vote_count >= 4351
assert movie.studios[1] == 'The Steve Tisch Company'
assert movie == _get_movie(_id=600) # Check that caching works with max=500
assert _get_movie.cache_info().hits == 1
| KyleKing/My-Programming-Sketchbook | Assorted_Snippets/python/fake_data_wip/sw_tests.py | Python | mit | 1,026 |
"""
Notebook Tag
------------
This is a liquid-style tag to include a static html rendering of an IPython
notebook in a blog post.
Syntax
------
{% notebook filename.ipynb [ cells[start:end] language[language] ]%}
The file should be specified relative to the ``notebooks`` subdirectory of the
content directory. Optionally, this subdirectory can be specified in the
config file:
NOTEBOOK_DIR = 'notebooks'
The cells[start:end] statement is optional, and can be used to specify which
block of cells from the notebook to include.
The language statement is obvious and can be used to specify whether ipython2
or ipython3 syntax highlighting should be used.
Requirements
------------
- The plugin requires IPython version 1.0 or above. It no longer supports the
standalone nbconvert package, which has been deprecated.
Details
-------
Because the notebook relies on some rather extensive custom CSS, the use of
this plugin requires additional CSS to be inserted into the blog theme.
After typing "make html" when using the notebook tag, a file called
``_nb_header.html`` will be produced in the main directory. The content
of the file should be included in the header of the theme. An easy way
to accomplish this is to add the following lines within the header template
of the theme you use:
{% if EXTRA_HEADER %}
{{ EXTRA_HEADER }}
{% endif %}
and in your ``pelicanconf.py`` file, include the line:
EXTRA_HEADER = open('_nb_header.html').read().decode('utf-8')
this will insert the appropriate CSS. All efforts have been made to ensure
that this CSS will not override formats within the blog theme, but there may
still be some conflicts.
"""
import warnings
import re
import os
from functools import partial
from io import open
from .mdx_liquid_tags import LiquidTags
import IPython
IPYTHON_VERSION = IPython.version_info[0]
try:
import nbformat
except:
pass
if not IPYTHON_VERSION >= 1:
raise ValueError("IPython version 1.0+ required for notebook tag")
if IPYTHON_VERSION > 1:
warnings.warn("Pelican plugin is not designed to work with IPython "
"versions greater than 1.x. CSS styles have changed in "
"later releases.")
try:
from nbconvert.filters.highlight import _pygments_highlight
except ImportError:
try:
from IPython.nbconvert.filters.highlight import _pygments_highlight
except ImportError:
# IPython < 2.0
from IPython.nbconvert.filters.highlight import _pygment_highlight as _pygments_highlight
from pygments.formatters import HtmlFormatter
try:
from nbconvert.exporters import HTMLExporter
except ImportError:
from IPython.nbconvert.exporters import HTMLExporter
try:
from traitlets.config import Config
except ImportError:
from IPython.config import Config
try:
from nbconvert.preprocessors import Preprocessor
except ImportError:
try:
from IPython.nbconvert.preprocessors import Preprocessor
except ImportError:
# IPython < 2.0
from IPython.nbconvert.transformers import Transformer as Preprocessor
try:
from traitlets import Integer
except ImportError:
from IPython.utils.traitlets import Integer
from copy import deepcopy
#----------------------------------------------------------------------
# Some code that will be added to the header:
# Some of the following javascript/css include is adapted from
# IPython/nbconvert/templates/fullhtml.tpl, while some are custom tags
# specifically designed to make the results look good within the
# pelican-octopress theme.
JS_INCLUDE = r"""
<style type="text/css">
/* Overrides of notebook CSS for static HTML export */
div.entry-content {
overflow: visible;
padding: 8px;
}
.input_area {
padding: 0.2em;
}
a.heading-anchor {
white-space: normal;
}
.rendered_html
code {
font-size: .8em;
}
pre.ipynb {
color: black;
background: #f7f7f7;
border: none;
box-shadow: none;
margin-bottom: 0;
padding: 0;
margin: 0px;
font-size: 13px;
}
/* remove the prompt div from text cells */
div.text_cell .prompt {
display: none;
}
/* remove horizontal padding from text cells, */
/* so it aligns with outer body text */
div.text_cell_render {
padding: 0.5em 0em;
}
img.anim_icon{padding:0; border:0; vertical-align:middle; -webkit-box-shadow:none; -box-shadow:none}
div.collapseheader {
width=100%;
background-color:#d3d3d3;
padding: 2px;
cursor: pointer;
font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;
}
</style>
<script type="text/x-mathjax-config">
MathJax.Hub.Config({
tex2jax: {
inlineMath: [['$','$'], ['\\(','\\)']],
processEscapes: true,
displayMath: [['$$','$$'], ["\\[","\\]"]]
}
});
</script>
<script type="text/javascript" async src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.2/MathJax.js?config=TeX-MML-AM_CHTML">
</script>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.10.2/jquery.min.js"></script>
<script type="text/javascript">
jQuery(document).ready(function($) {
$("div.collapseheader").click(function () {
$header = $(this).children("span").first();
$codearea = $(this).children(".input_area");
console.log($(this).children());
$codearea.slideToggle(500, function () {
$header.text(function () {
return $codearea.is(":visible") ? "Collapse Code" : "Expand Code";
});
});
});
});
</script>
"""
CSS_WRAPPER = """
<style type="text/css">
{0}
</style>
"""
#----------------------------------------------------------------------
# Create a custom preprocessor
class SliceIndex(Integer):
"""An integer trait that accepts None"""
default_value = None
def validate(self, obj, value):
if value is None:
return value
else:
return super(SliceIndex, self).validate(obj, value)
class SubCell(Preprocessor):
"""A transformer to select a slice of the cells of a notebook"""
start = SliceIndex(0, config=True,
help="first cell of notebook to be converted")
end = SliceIndex(None, config=True,
help="last cell of notebook to be converted")
def preprocess(self, nb, resources):
nbc = deepcopy(nb)
if IPYTHON_VERSION < 3:
for worksheet in nbc.worksheets:
cells = worksheet.cells[:]
worksheet.cells = cells[self.start:self.end]
else:
nbc.cells = nbc.cells[self.start:self.end]
return nbc, resources
call = preprocess # IPython < 2.0
#----------------------------------------------------------------------
# Custom highlighter:
# instead of using class='highlight', use class='highlight-ipynb'
def custom_highlighter(source, language='ipython', metadata=None):
formatter = HtmlFormatter(cssclass='highlight-ipynb')
if not language:
language = 'ipython'
output = _pygments_highlight(source, formatter, language)
return output.replace('<pre>', '<pre class="ipynb">')
#----------------------------------------------------------------------
# Below is the pelican plugin code.
#
SYNTAX = "{% notebook /path/to/notebook.ipynb [ cells[start:end] ] [ language[language] ] %}"
FORMAT = re.compile(r"""^(\s+)?(?P<src>\S+)(\s+)?((cells\[)(?P<start>-?[0-9]*):(?P<end>-?[0-9]*)(\]))?(\s+)?((language\[)(?P<language>-?[a-z0-9\+\-]*)(\]))?(\s+)?$""")
@LiquidTags.register('notebook')
def notebook(preprocessor, tag, markup):
match = FORMAT.search(markup)
if match:
argdict = match.groupdict()
src = argdict['src']
start = argdict['start']
end = argdict['end']
language = argdict['language']
else:
raise ValueError("Error processing input, "
"expected syntax: {0}".format(SYNTAX))
if start:
start = int(start)
else:
start = 0
if end:
end = int(end)
else:
end = None
language_applied_highlighter = partial(custom_highlighter, language=language)
nb_dir = preprocessor.configs.getConfig('NOTEBOOK_DIR')
nb_path = os.path.join('content', nb_dir, src)
if not os.path.exists(nb_path):
raise ValueError("File {0} could not be found".format(nb_path))
# Create the custom notebook converter
c = Config({'CSSHTMLHeaderTransformer':
{'enabled':True, 'highlight_class':'.highlight-ipynb'},
'SubCell':
{'enabled':True, 'start':start, 'end':end}})
template_file = 'basic'
if IPYTHON_VERSION >= 3:
if os.path.exists('pelicanhtml_3.tpl'):
template_file = 'pelicanhtml_3'
elif IPYTHON_VERSION == 2:
if os.path.exists('pelicanhtml_2.tpl'):
template_file = 'pelicanhtml_2'
else:
if os.path.exists('pelicanhtml_1.tpl'):
template_file = 'pelicanhtml_1'
if IPYTHON_VERSION >= 2:
subcell_kwarg = dict(preprocessors=[SubCell])
else:
subcell_kwarg = dict(transformers=[SubCell])
exporter = HTMLExporter(config=c,
template_file=template_file,
filters={'highlight2html': language_applied_highlighter},
**subcell_kwarg)
# read and parse the notebook
with open(nb_path, encoding='utf-8') as f:
nb_text = f.read()
if IPYTHON_VERSION < 3:
nb_json = IPython.nbformat.current.reads_json(nb_text)
else:
try:
nb_json = nbformat.reads(nb_text, as_version=4)
except:
nb_json = IPython.nbformat.reads(nb_text, as_version=4)
(body, resources) = exporter.from_notebook_node(nb_json)
# if we haven't already saved the header, save it here.
if not notebook.header_saved:
print ("\n ** Writing styles to _nb_header.html: "
"this should be included in the theme. **\n")
header = '\n'.join(CSS_WRAPPER.format(css_line)
for css_line in resources['inlining']['css'])
header += JS_INCLUDE
with open('_nb_header.html', 'w') as f:
f.write(header)
notebook.header_saved = True
# this will stash special characters so that they won't be transformed
# by subsequent processes.
body = preprocessor.configs.htmlStash.store(body, safe=True)
return body
notebook.header_saved = False
#----------------------------------------------------------------------
# This import allows notebook to be a Pelican plugin
from liquid_tags import register
| howthebodyworks/pelican-plugins | liquid_tags/notebook.py | Python | agpl-3.0 | 10,596 |
"""
Write a function name_and_age that takes
as input the parameters name (a string) and age (a number)
and returns a string of the form "% is % years old."
Where the percents are the string forms of name and age.
"""
# Name and age formula
def name_and_age(name, age):
return str(name) + ' is ' + str(age) + ' years old.'
# Tests
def test(name, age):
print name_and_age(name, age)
# Output
test("Pablo Garcia", 99)
test("Jimi Hendrix", 73)
test("Jimmy Page", 71)
#Pablo Garcia is 99 years old.
#Jimi Hendrix is 73 years old.
#Jimmy Page is 71 years old. | PableraShow/Learn-to-program-with-Python-guide | 02 - Functions, logic and conditionals/exercises/name-and-age.py | Python | mit | 592 |
# -*- coding: utf-8 -*-
__author__ = 'study_sun'
import requests
import sys
import urllib
reload(sys)
sys.setdefaultencoding('utf-8')
class SBDownloader(object):
def download(self, url, cookie=None, headers=None):
if url is None:
return None
if cookie == None:
response = requests.get(url, headers=headers)
if response.status_code != requests.codes.ok:
return None
return response.text
else:
response = requests.get(url, cookies=cookie, headers=headers)
if response.status_code == requests.codes.ok or response.status_code == requests.codes.bad:
#从雪球下载的时候,会有非200,但是并非下载失败的情况,这里特例一下
return response.text
else:
return None
def download_file(self, url):
if url is None:
return None
filename = 'stock_list.xls'
urllib.urlretrieve(url, filename)
return filename
| s6530085/FundSpider | spider_base/downloader.py | Python | mit | 1,040 |
#!/usr/bin/env python
'''
Project: Geothon (https://github.com/MBoustani/Geothon)
File: Vector/create_geojson_multipoint.py
Description: This code creates a geojson multipoint file from couple point data.
Author: Maziyar Boustani (github.com/MBoustani)
'''
try:
import ogr
except ImportError:
from osgeo import ogr
latitudes = [30, 30, 30]
longitudes = [10, 20, 30]
#define multipoint geometry
multipoint = ogr.Geometry(ogr.wkbMultiPoint)
#create point geometry and add to multipoint geometry
for i in range(len(latitudes)):
point = ogr.Geometry(ogr.wkbPoint)
point.AddPoint(longitudes[i], latitudes[i])
multipoint.AddGeometry(point)
#convert geometry to GeoJSON format
geojson_multipoint = multipoint.ExportToJson()
print geojson_multipoint
| MBoustani/Geothon | Create Spatial File/Vector/create_geojson_multipoint.py | Python | apache-2.0 | 792 |
import random,math,string
from random import randint
target="11110000"
class Trial:
def __init__(self,seed):
self.value=seed
def score(self,target):
#Harshly punish valuees that aren't the right length
score= (-2)*abs(len(self.value)-len(target))
#Optionally, provide a minor punishment
#score=0
#if len(self.value)<len(target) or len(self.value)>len(target) :
# score=score-2
#Reward it for any letters that are correct
for i in range( min( len(self.value),len(target) ) ):
if(self.value[i]==target[i]):
score+= 1
return score
def mutate(self):
#Pick a number to work with
list=[]
list.extend(string.digits)
list.extend(string.lowercase)
list.extend(string.whitespace)
list.extend(", . ? ! * _ - '".split())
num=random.choice(list)
#Figure out where it would go if we modify the string
location=randint(0,len(self.value))
#Figure out what we're doing with it
option=randint(0,100)
if(option<50):#Replace 50%
#print "location:%s" %location
self.value = "%s%s%s"%(self.value[:location],num,self.value[location+1:])
#Append 25%
elif(option<75):#append 50%
self.value="%s%s"%(self.value,num)
#Push 25%
elif(option<75):#push
self.value="%s%s"%(num,self.value)
elif(option<125):#Insert randomly
self.value = "%s%s%s"%(self.value[:location],num,self.value[location:])
else:
pass
return self.value
class Generator:
"""Attempts to match a given string by genetic modification"""
def __init__(self):
self.best=Trial("")
self.history=[Trial("")]
self.trials_per_generation=50
self.idleGenerations=20
self.maxGenerations=400
self.target=""
def nextGeneration(self):
#Generate a bunch of trails
self.generation=[Trial(self.best.value) for x in range(self.trials_per_generation)]
for trial in self.generation:
trial.mutate()
if trial.score(self.target)>self.best.score(self.target):
self.best=trial
self.history.append(self.best)
def EvolveToTarget(self,target):
self.target=target;
goal=Trial(target)
lastImprovedGeneration=0
lastscore=0
print "Working to target %s with a score of %s" % (goal.value,goal.score(target) )
for i in range(self.maxGenerations): #limit number of generations, just in case
#Stop when we get the target
self.nextGeneration()
newscore=self.best.score(self.target)
if newscore>lastscore :
lastImprovedGeneration=i
print "Best in generation %i :%s scoring %s"%(i ,self.best.value,newscore)
lastscore=newscore
if self.best.score(target)==goal.score(target):
print "Target Reached"
return
elif i>self.idleGenerations+lastImprovedGeneration:
print "Target not reached; Exiting after %s generations without improvement"%self.idleGenerations
return
#Monitor to ensure we're actually improving with continuous generations
if __name__=="__main__":
test=Generator()
test.trials_per_generation=50
test.idleGenerations=50
test.maxGenerations=4000
test.EvolveToTarget("hello world! you will be deconstructed")
| tekdemo/genetic-testing | stringmatch/stringmatch.py | Python | mit | 3,583 |
#! /usr/bin/python
answerx = 0
answery=0
def palindrom(num):
palin=''
tmp = str(num)
y=0
for x in reversed(range(len(tmp))):
palin=palin+str(tmp[x])
y=y+1
return int(palin)
for x in range(100,999):
for y in range(100,999):
if (x * y) == palindrom(x*y):
if x*y > answerx*answery:
answerx = x
answery = y
y=y+1
y=100
x = x+1
print(answerx)
print(answery)
print(answerx * answery) | jasimmonsv/CodingExercises | EulerProject/python/problem4.py | Python | gpl-2.0 | 510 |
#!/usr/bin/env python
##~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
##~ Copyright (C) 2002-2004 TechGame Networks, LLC.
##~
##~ This library is free software; you can redistribute it and/or
##~ modify it under the terms of the BSD style License as found in the
##~ LICENSE file included with this distribution.
##
## Modified by Dirk Holtwick <holtwick@web.de>, 2007-2008
##~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""CSS-2.1 engine
Primary classes:
* CSSElementInterfaceAbstract
Provide a concrete implementation for the XML element model used.
* CSSCascadeStrategy
Implements the CSS-2.1 engine's attribute lookup rules.
* CSSParser
Parses CSS source forms into usable results using CSSBuilder and
CSSMutableSelector. You may want to override parseExternal()
* CSSBuilder (and CSSMutableSelector)
A concrete implementation for cssParser.CSSBuilderAbstract (and
cssParser.CSSSelectorAbstract) to provide usable results to
CSSParser requests.
Dependencies:
python 2.3 (or greater)
sets, cssParser, re (via cssParser)
"""
import sys
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#~ To replace any for with list comprehension
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def stopIter(value):
raise StopIteration(*value)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#~ Imports
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
import copy
try:
set
except NameError:
from sets import Set as set
try:
from . import cssParser #python 3
except Exception:
import cssParser #python 2
try:
from . import cssSpecial #python 3
except Exception:
import cssSpecial #python 2
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#~ Constants / Variables / Etc.
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
CSSParseError = cssParser.CSSParseError
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#~ Definitions
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class CSSElementInterfaceAbstract(object):
def getAttr(self, name, default=NotImplemented):
raise NotImplementedError('Subclass responsibility')
def getIdAttr(self):
return self.getAttr('id', '')
def getClassAttr(self):
return self.getAttr('class', '')
def getInlineStyle(self):
raise NotImplementedError('Subclass responsibility')
def matchesNode(self):
raise NotImplementedError('Subclass responsibility')
def inPseudoState(self, name, params=()):
raise NotImplementedError('Subclass responsibility')
def iterXMLParents(self):
"""Results must be compatible with CSSElementInterfaceAbstract"""
raise NotImplementedError('Subclass responsibility')
def getPreviousSibling(self):
raise NotImplementedError('Subclass responsibility')
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class CSSCascadeStrategy(object):
author = None
user = None
userAgenr = None
def __init__(self, author=None, user=None, userAgent=None):
if author is not None:
self.author = author
if user is not None:
self.user = user
if userAgent is not None:
self.userAgenr = userAgent
def copyWithUpdate(self, author=None, user=None, userAgent=None):
if author is None:
author = self.author
if user is None:
user = self.user
if userAgent is None:
userAgent = self.userAgenr
return self.__class__(author, user, userAgent)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def iterCSSRulesets(self, inline=None):
if self.userAgenr is not None:
yield self.userAgenr[0]
yield self.userAgenr[1]
if self.user is not None:
yield self.user[0]
if self.author is not None:
yield self.author[0]
yield self.author[1]
if inline:
yield inline[0]
yield inline[1]
if self.user is not None:
yield self.user[1]
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def findStyleFor(self, element, attrName, default=NotImplemented):
"""Attempts to find the style setting for attrName in the CSSRulesets.
Note: This method does not attempt to resolve rules that return
"inherited", "default", or values that have units (including "%").
This is left up to the client app to re-query the CSS in order to
implement these semantics.
"""
rule = self.findCSSRulesFor(element, attrName)
return self._extractStyleForRule(rule, attrName, default)
def findStylesForEach(self, element, attrNames, default=NotImplemented):
"""Attempts to find the style setting for attrName in the CSSRulesets.
Note: This method does not attempt to resolve rules that return
"inherited", "default", or values that have units (including "%").
This is left up to the client app to re-query the CSS in order to
implement these semantics.
"""
rules = self.findCSSRulesForEach(element, attrNames)
if sys.version[0] == '2':
iteritems = rules.iteritems()
else:
iteritems = iter(rules.items())
return [(attrName, self._extractStyleForRule(rule, attrName, default))
for attrName, rule in iteritems]
def findCSSRulesFor(self, element, attrName):
rules = []
inline = element.getInlineStyle()
# Generator are wonderfull but sometime slow...
#for ruleset in self.iterCSSRulesets(inline):
# rules += ruleset.findCSSRuleFor(element, attrName)
if self.userAgenr is not None:
rules += self.userAgenr[0].findCSSRuleFor(element, attrName)
rules += self.userAgenr[1].findCSSRuleFor(element, attrName)
if self.user is not None:
rules += self.user[0].findCSSRuleFor(element, attrName)
if self.author is not None:
rules += self.author[0].findCSSRuleFor(element, attrName)
rules += self.author[1].findCSSRuleFor(element, attrName)
if inline:
rules += inline[0].findCSSRuleFor(element, attrName)
rules += inline[1].findCSSRuleFor(element, attrName)
if self.user is not None:
rules += self.user[1].findCSSRuleFor(element, attrName)
rules.sort()
return rules
def findCSSRulesForEach(self, element, attrNames):
rules = dict((name, []) for name in attrNames)
inline = element.getInlineStyle()
for ruleset in self.iterCSSRulesets(inline):
if sys.version[0] == '2':
iteritems = rules.iteritems()
else:
iteritems = iter(rules.items())
for attrName, attrRules in iteritems:
attrRules += ruleset.findCSSRuleFor(element, attrName)
for attrRules in rules.itervalues():
attrRules.sort()
return rules
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def _extractStyleForRule(self, rule, attrName, default=NotImplemented):
if rule:
# rule is packed in a list to differentiate from "no rule" vs "rule
# whose value evalutates as False"
style = rule[-1][1]
return style[attrName]
elif default is not NotImplemented:
return default
raise LookupError("Could not find style for '%s' in %r" % (attrName, rule))
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#~ CSS Selectors
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class CSSSelectorBase(object):
inline = False
_hash = None
_specificity = None
def __init__(self, completeName='*'):
if not isinstance(completeName, tuple):
completeName = (None, '*', completeName)
self.completeName = completeName
def _updateHash(self):
self._hash = hash((self.fullName, self.specificity(), self.qualifiers))
def __hash__(self):
if self._hash is None:
return object.__hash__(self)
return self._hash
def getNSPrefix(self):
return self.completeName[0]
nsPrefix = property(getNSPrefix)
def getName(self):
return self.completeName[2]
name = property(getName)
def getNamespace(self):
return self.completeName[1]
namespace = property(getNamespace)
def getFullName(self):
return self.completeName[1:3]
fullName = property(getFullName)
def __repr__(self):
strArgs = (self.__class__.__name__,) + self.specificity() + (self.asString(),)
return '<%s %d:%d:%d:%d %s >' % strArgs
def __str__(self):
return self.asString()
def __cmp__(self, other):
result = cmp(self.specificity(), other.specificity())
if result != 0:
return result
result = cmp(self.fullName, other.fullName)
if result != 0:
return result
result = cmp(self.qualifiers, other.qualifiers)
return result
def specificity(self):
if self._specificity is None:
self._specificity = self._calcSpecificity()
return self._specificity
def _calcSpecificity(self):
"""from http://www.w3.org/TR/CSS21/cascade.html#specificity"""
hashCount = 0
qualifierCount = 0
elementCount = int(self.name != '*')
for q in self.qualifiers:
if q.isHash():
hashCount += 1
elif q.isClass():
qualifierCount += 1
elif q.isAttr():
qualifierCount += 1
elif q.isPseudo():
elementCount += 1
elif q.isCombiner():
i, h, q, e = q.selector.specificity()
hashCount += h
qualifierCount += q
elementCount += e
return self.inline, hashCount, qualifierCount, elementCount
def matches(self, element=None):
if element is None:
return False
# with CSSDOMElementInterface.matchesNode(self, (namespace, tagName)) replacement:
if self.fullName[1] not in ('*', element.domElement.tagName):
return False
if self.fullName[0] not in (None, '', '*') and self.fullName[0] != element.domElement.namespaceURI:
return False
for qualifier in self.qualifiers:
if not qualifier.matches(element):
return False
else:
return True
def asString(self):
result = []
if self.nsPrefix is not None:
result.append('%s|%s' % (self.nsPrefix, self.name))
else:
result.append(self.name)
for q in self.qualifiers:
if q.isCombiner():
result.insert(0, q.asString())
else:
result.append(q.asString())
return ''.join(result)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class CSSInlineSelector(CSSSelectorBase):
inline = True
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class CSSMutableSelector(CSSSelectorBase, cssParser.CSSSelectorAbstract):
qualifiers = []
def asImmutable(self):
return CSSImmutableSelector(self.completeName, [q.asImmutable() for q in self.qualifiers])
def combineSelectors(klass, selectorA, op, selectorB):
selectorB.addCombination(op, selectorA)
return selectorB
combineSelectors = classmethod(combineSelectors)
def addCombination(self, op, other):
self._addQualifier(CSSSelectorCombinationQualifier(op, other))
def add_hash_id(self, hashId):
self._addQualifier(CSSSelectorHashQualifier(hashId))
def add_class(self, class_):
self._addQualifier(CSSSelectorClassQualifier(class_))
def add_attribute(self, attrName):
self._addQualifier(CSSSelectorAttributeQualifier(attrName))
def add_attribute_operation(self, attrName, op, attrValue):
self._addQualifier(CSSSelectorAttributeQualifier(attrName, op, attrValue))
def add_pseudo(self, name):
self._addQualifier(CSSSelectorPseudoQualifier(name))
def add_pseudo_function(self, name, params):
self._addQualifier(CSSSelectorPseudoQualifier(name, params))
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def _addQualifier(self, qualifier):
if self.qualifiers:
self.qualifiers.append(qualifier)
else:
self.qualifiers = [qualifier]
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class CSSImmutableSelector(CSSSelectorBase):
def __init__(self, completeName='*', qualifiers=()):
# print completeName, qualifiers
self.qualifiers = tuple(qualifiers)
CSSSelectorBase.__init__(self, completeName)
self._updateHash()
def fromSelector(klass, selector):
return klass(selector.completeName, selector.qualifiers)
fromSelector = classmethod(fromSelector)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#~ CSS Selector Qualifiers -- see CSSImmutableSelector
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class CSSSelectorQualifierBase(object):
def isHash(self):
return False
def isClass(self):
return False
def isAttr(self):
return False
def isPseudo(self):
return False
def isCombiner(self):
return False
def asImmutable(self):
return self
def __str__(self):
return self.asString()
class CSSSelectorHashQualifier(CSSSelectorQualifierBase):
def __init__(self, hashId):
self.hashId = hashId
def isHash(self):
return True
def __hash__(self):
return hash((self.hashId,))
def asString(self):
return '#' + self.hashId
def matches(self, element):
return element.getIdAttr() == self.hashId
class CSSSelectorClassQualifier(CSSSelectorQualifierBase):
def __init__(self, classId):
self.classId = classId
def isClass(self):
return True
def __hash__(self):
return hash((self.classId,))
def asString(self):
return '.' + self.classId
def matches(self, element):
#return self.classId in element.getClassAttr().split()
attrValue = element.domElement.attributes.get('class')
if attrValue is not None:
return self.classId in attrValue.value.split()
return False
class CSSSelectorAttributeQualifier(CSSSelectorQualifierBase):
name, op, value = None, None, NotImplemented
def __init__(self, attrName, op=None, attrValue=NotImplemented):
self.name = attrName
if op is not self.op:
self.op = op
if attrValue is not self.value:
self.value = attrValue
def isAttr(self):
return True
def __hash__(self):
return hash((self.name, self.op, self.value))
def asString(self):
if self.value is NotImplemented:
return '[%s]' % (self.name,)
return '[%s%s%s]' % (self.name, self.op, self.value)
def matches(self, element):
if self.op is None:
return element.getAttr(self.name, NotImplemented) != NotImplemented
elif self.op == '=':
return self.value == element.getAttr(self.name, NotImplemented)
elif self.op == '~=':
#return self.value in element.getAttr(self.name, '').split()
attrValue = element.domElement.attributes.get(self.name)
if attrValue is not None:
return self.value in attrValue.value.split()
return False
elif self.op == '|=':
#return self.value in element.getAttr(self.name, '').split('-')
attrValue = element.domElement.attributes.get(self.name)
if attrValue is not None:
return self.value in attrValue.value.split('-')
return False
raise RuntimeError("Unknown operator %r for %r" % (self.op, self))
class CSSSelectorPseudoQualifier(CSSSelectorQualifierBase):
def __init__(self, attrName, params=()):
self.name = attrName
self.params = tuple(params)
def isPseudo(self):
return True
def __hash__(self):
return hash((self.name, self.params))
def asString(self):
if self.params:
return ':' + self.name
else:
return ':%s(%s)' % (self.name, self.params)
def matches(self, element):
return element.inPseudoState(self.name, self.params)
class CSSSelectorCombinationQualifier(CSSSelectorQualifierBase):
def __init__(self, op, selector):
self.op = op
self.selector = selector
def isCombiner(self):
return True
def __hash__(self):
return hash((self.op, self.selector))
def asImmutable(self):
return self.__class__(self.op, self.selector.asImmutable())
def asString(self):
return '%s%s' % (self.selector.asString(), self.op)
def matches(self, element):
if self.op == ' ':
if element is not None:
if element.matchesNode(self.selector.fullName):
try:
for parent in element.iterXMLParents():
[None for qualifier in self.selector.qualifiers if
qualifier.matches(parent) and stopIter((None,))]
except StopIteration:
return True
return False
elif self.op == '>':
if element is not None:
if element.matchesNode(self.selector.fullName):
if self.selector.qualifiers[0].matches(element):
return True
return False
elif self.op == '+':
return self.selector.matches(element.getPreviousSibling())
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#~ CSS Misc
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class CSSTerminalFunction(object):
def __init__(self, name, params):
self.name = name
self.params = params
def __repr__(self):
return '<CSS function: %s(%s)>' % (self.name, ', '.join(self.params))
class CSSTerminalOperator(tuple):
def __new__(klass, *args):
return tuple.__new__(klass, args)
def __repr__(self):
return 'op' + tuple.__repr__(self)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#~ CSS Objects
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class CSSDeclarations(dict):
pass
class CSSRuleset(dict):
def findCSSRulesFor(self, element, attrName):
if sys.version[0] == '2':
iteritems = self.iteritems()
else:
iteritems = iter(self.items())
ruleResults = [(nodeFilter, declarations) for nodeFilter, declarations in iteritems if
(attrName in declarations) and (nodeFilter.matches(element))]
ruleResults.sort()
return ruleResults
def findCSSRuleFor(self, element, attrName):
# rule is packed in a list to differentiate from "no rule" vs "rule
# whose value evalutates as False"
return self.findCSSRulesFor(element, attrName)[-1:]
def mergeStyles(self, styles):
" XXX Bugfix for use in PISA "
if sys.version[0] == '2':
iteritems = styles.iteritems()
else:
iteritems = iter(styles.items())
for k, v in iteritems:
if k in self and self[k]:
self[k] = copy.copy(self[k])
self[k].update(v)
else:
self[k] = v
class CSSInlineRuleset(CSSRuleset, CSSDeclarations):
def findCSSRulesFor(self, element, attrName):
if attrName in self:
return [(CSSInlineSelector(), self)]
return []
def findCSSRuleFor(self, *args, **kw):
# rule is packed in a list to differentiate from "no rule" vs "rule
# whose value evalutates as False"
return self.findCSSRulesFor(*args, **kw)[-1:]
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#~ CSS Builder
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class CSSBuilder(cssParser.CSSBuilderAbstract):
RulesetFactory = CSSRuleset
SelectorFactory = CSSMutableSelector
MediumSetFactory = set
DeclarationsFactory = CSSDeclarations
TermFunctionFactory = CSSTerminalFunction
TermOperatorFactory = CSSTerminalOperator
xmlnsSynonyms = {}
mediumSet = None
trackImportance = True
charset = None
def __init__(self, mediumSet=mediumSet, trackImportance=trackImportance):
self.setMediumSet(mediumSet)
self.setTrackImportance(trackImportance)
def isValidMedium(self, mediums):
if not mediums:
return False
if 'all' in mediums:
return True
mediums = self.MediumSetFactory(mediums)
return bool(self.getMediumSet().intersection(mediums))
def getMediumSet(self):
return self.mediumSet
def setMediumSet(self, mediumSet):
self.mediumSet = self.MediumSetFactory(mediumSet)
def updateMediumSet(self, mediumSet):
self.getMediumSet().update(mediumSet)
def getTrackImportance(self):
return self.trackImportance
def setTrackImportance(self, trackImportance=True):
self.trackImportance = trackImportance
#~ helpers ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def _pushState(self):
_restoreState = self.__dict__
self.__dict__ = self.__dict__.copy()
self._restoreState = _restoreState
self.namespaces = {}
def _popState(self):
self.__dict__ = self._restoreState
def _declarations(self, declarations, DeclarationsFactory=None):
DeclarationsFactory = DeclarationsFactory or self.DeclarationsFactory
if self.trackImportance:
normal, important = [], []
for d in declarations:
if d[-1]:
important.append(d[:-1])
else:
normal.append(d[:-1])
return DeclarationsFactory(normal), DeclarationsFactory(important)
else:
return DeclarationsFactory(declarations)
def _xmlnsGetSynonym(self, uri):
# Don't forget to substitute our namespace synonyms!
return self.xmlnsSynonyms.get(uri or None, uri) or None
#~ css results ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def begin_stylesheet(self):
self._pushState()
def end_stylesheet(self):
self._popState()
def stylesheet(self, stylesheetElements, stylesheetImports):
# XXX Updated for PISA
if self.trackImportance:
normal, important = self.RulesetFactory(), self.RulesetFactory()
for normalStylesheet, importantStylesheet in stylesheetImports:
normal.mergeStyles(normalStylesheet)
important.mergeStyles(importantStylesheet)
for normalStyleElement, importantStyleElement in stylesheetElements:
normal.mergeStyles(normalStyleElement)
important.mergeStyles(importantStyleElement)
return normal, important
else:
result = self.RulesetFactory()
for stylesheet in stylesheetImports:
result.mergeStyles(stylesheet)
for styleElement in stylesheetElements:
result.mergeStyles(styleElement)
return result
def begin_inline(self):
self._pushState()
def end_inline(self):
self._popState()
def specialRules(self, declarations):
return cssSpecial.parse_special_rules(declarations)
def inline(self, declarations):
declarations = self.specialRules(declarations)
return self._declarations(declarations, CSSInlineRuleset)
def ruleset(self, selectors, declarations):
# XXX Modified for pisa!
declarations = self.specialRules(declarations)
# XXX Modified for pisa!
if self.trackImportance:
normalDecl, importantDecl = self._declarations(declarations)
normal, important = self.RulesetFactory(), self.RulesetFactory()
for s in selectors:
s = s.asImmutable()
if normalDecl:
normal[s] = normalDecl
if importantDecl:
important[s] = importantDecl
return normal, important
else:
declarations = self._declarations(declarations)
result = [(s.asImmutable(), declarations) for s in selectors]
return self.RulesetFactory(result)
#~ css namespaces ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def resolve_namespace_prefix(self, nsPrefix, name):
if nsPrefix == '*':
return (nsPrefix, '*', name)
xmlns = self.namespaces.get(nsPrefix, None)
xmlns = self._xmlnsGetSynonym(xmlns)
return (nsPrefix, xmlns, name)
#~ css @ directives ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def at_charset(self, charset):
self.charset = charset
def at_import(self, import_, mediums, cssParser):
if self.isValidMedium(mediums):
return cssParser.parseExternal(import_)
return None
def at_namespace(self, nsprefix, uri):
self.namespaces[nsprefix] = uri
def at_media(self, mediums, ruleset):
if self.isValidMedium(mediums):
return ruleset
return None
def at_page(self, page, pseudopage, declarations):
"""
This is overriden by xhtml2pdf.context.pisaCSSBuilder
"""
return self.ruleset([self.selector('*')], declarations)
def at_font_face(self, declarations):
"""
This is overriden by xhtml2pdf.context.pisaCSSBuilder
"""
return self.ruleset([self.selector('*')], declarations)
def at_ident(self, atIdent, cssParser, src):
return src, NotImplemented
#~ css selectors ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def selector(self, name):
return self.SelectorFactory(name)
def combine_selectors(self, selectorA, op, selectorB):
return self.SelectorFactory.combineSelectors(selectorA, op, selectorB)
#~ css declarations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def property(self, name, value, important=False):
if self.trackImportance:
return (name, value, important)
return (name, value)
def combine_terms(self, termA, op, termB):
if op in (',', ' '):
if isinstance(termA, list):
termA.append(termB)
return termA
return [termA, termB]
elif op is None and termB is None:
return [termA]
else:
if isinstance(termA, list):
# Bind these "closer" than the list operators -- i.e. work on
# the (recursively) last element of the list
termA[-1] = self.combine_terms(termA[-1], op, termB)
return termA
return self.TermOperatorFactory(termA, op, termB)
def term_ident(self, value):
return value
def term_number(self, value, units=None):
if units:
return value, units
return value
def term_rgb(self, value):
return value
def term_uri(self, value):
return value
def term_string(self, value):
return value
def term_unicode_range(self, value):
return value
def term_function(self, name, value):
return self.TermFunctionFactory(name, value)
def term_unknown(self, src):
return src, NotImplemented
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#~ CSS Parser -- finally!
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class CSSParser(cssParser.CSSParser):
CSSBuilderFactory = CSSBuilder
def __init__(self, css_builder=None, create=True, **kw):
if not css_builder and create:
assert css_builder is None
css_builder = self.createCSSBuilder(**kw)
cssParser.CSSParser.__init__(self, css_builder)
def createCSSBuilder(self, **kw):
return self.CSSBuilderFactory(**kw)
def parseExternal(self, cssResourceName):
if os.path.isfile(cssResourceName):
cssFile = file(cssResourceName, 'r')
return self.parse_file(cssFile, True)
raise RuntimeError("Cannot resolve external CSS file: \"%s\"" % cssResourceName)
| zulumarketing/html2pdf | xhtml2pdf/w3c/css.py | Python | apache-2.0 | 28,785 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2016 Matt Martz <matt@sivel.net>
# Copyright (C) 2016 Rackspace US, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import runpy
import json
import os
import subprocess
import sys
from contextlib import contextmanager
from ansible.module_utils.six import reraise
from .utils import CaptureStd, find_executable, get_module_name_from_filename
class AnsibleModuleCallError(RuntimeError):
pass
class AnsibleModuleImportError(ImportError):
pass
class AnsibleModuleNotInitialized(Exception):
pass
class _FakeAnsibleModuleInit:
def __init__(self):
self.args = tuple()
self.kwargs = {}
self.called = False
def __call__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
self.called = True
raise AnsibleModuleCallError('AnsibleModuleCallError')
def _fake_load_params():
pass
@contextmanager
def setup_env(filename):
# Used to clean up imports later
pre_sys_modules = list(sys.modules.keys())
fake = _FakeAnsibleModuleInit()
module = __import__('ansible.module_utils.basic').module_utils.basic
_original_init = module.AnsibleModule.__init__
_original_load_params = module._load_params
setattr(module.AnsibleModule, '__init__', fake)
setattr(module, '_load_params', _fake_load_params)
try:
yield fake
finally:
setattr(module.AnsibleModule, '__init__', _original_init)
setattr(module, '_load_params', _original_load_params)
# Clean up imports to prevent issues with mutable data being used in modules
for k in list(sys.modules.keys()):
# It's faster if we limit to items in ansible.module_utils
# But if this causes problems later, we should remove it
if k not in pre_sys_modules and k.startswith('ansible.module_utils.'):
del sys.modules[k]
def get_ps_argument_spec(filename):
# This uses a very small skeleton of Ansible.Basic.AnsibleModule to return the argspec defined by the module. This
# is pretty rudimentary and will probably require something better going forward.
pwsh = find_executable('pwsh')
if not pwsh:
raise FileNotFoundError('Required program for PowerShell arg spec inspection "pwsh" not found.')
script_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'ps_argspec.ps1')
proc = subprocess.Popen([script_path, filename], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False)
stdout, stderr = proc.communicate()
if proc.returncode != 0:
raise AnsibleModuleImportError(stderr.decode('utf-8'))
kwargs = json.loads(stdout)
# the validate-modules code expects the options spec to be under the argument_spec key not options as set in PS
kwargs['argument_spec'] = kwargs.pop('options', {})
return kwargs['argument_spec'], (), kwargs
def get_py_argument_spec(filename, collection):
name = get_module_name_from_filename(filename, collection)
with setup_env(filename) as fake:
try:
with CaptureStd():
runpy.run_module(name, run_name='__main__', alter_sys=True)
except AnsibleModuleCallError:
pass
except BaseException as e:
# we want to catch all exceptions here, including sys.exit
reraise(AnsibleModuleImportError, AnsibleModuleImportError('%s' % e), sys.exc_info()[2])
if not fake.called:
raise AnsibleModuleNotInitialized()
try:
try:
# for ping kwargs == {'argument_spec':{'data':{'type':'str','default':'pong'}}, 'supports_check_mode':True}
return fake.kwargs['argument_spec'], fake.args, fake.kwargs
except KeyError:
return fake.args[0], fake.args, fake.kwargs
except (TypeError, IndexError):
return {}, (), {}
def get_argument_spec(filename, collection):
if filename.endswith('.py'):
return get_py_argument_spec(filename, collection)
else:
return get_ps_argument_spec(filename)
| kvar/ansible | test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/module_args.py | Python | gpl-3.0 | 4,786 |
#!/usr/bin/env python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Acceleration module for semantic protobuf parsing."""
# pylint: disable=g-importing-member
from distutils.core import Extension
from distutils.core import setup
# pylint: enable=g-importing-member
SOURCES = ["accelerated.c"]
setup(
name="grr",
version="0.1",
long_description="Semantic protobufs are smart protocol buffers.",
license="Apache 2.0",
author="Michael Cohen",
ext_modules=[Extension(
"_semantic",
SOURCES,)],)
| google/grr | grr/core/accelerated/setup.py | Python | apache-2.0 | 1,089 |
import utils
import logging
import json
from bills import bill_ids_for, save_bill_search_state
from bill_info import fetch_bill, output_for_bill
from amendment_info import fetch_amendment
def run(options):
amendment_id = options.get('amendment_id', None)
bill_id = options.get('bill_id', None)
search_state = {}
if amendment_id:
amendment_type, number, congress = utils.split_bill_id(amendment_id)
to_fetch = [amendment_id]
elif bill_id:
# first, crawl the bill
bill_type, number, congress = utils.split_bill_id(bill_id)
bill_status = fetch_bill(bill_id, options)
if bill_status['ok']:
bill = json.loads(utils.read(output_for_bill(bill_id, "json")))
to_fetch = [x["amendment_id"] for x in bill["amendments"]]
else:
logging.error("Couldn't download information for that bill.")
return None
else:
congress = options.get('congress', utils.current_congress())
to_fetch = bill_ids_for(congress, utils.merge(options, {'amendments': True}), bill_states=search_state)
if not to_fetch:
if options.get("fast", False):
logging.warn("No amendments changed.")
else:
logging.error("Error figuring out which amendments to download, aborting.")
return None
limit = options.get('limit', None)
if limit:
to_fetch = to_fetch[:int(limit)]
if options.get('pages_only', False):
return None
logging.warn("Going to fetch %i amendments from congress #%s" % (len(to_fetch), congress))
saved_amendments = utils.process_set(to_fetch, fetch_amendment, options)
# keep record of the last state of all these amendments, for later fast-searching
save_bill_search_state(saved_amendments, search_state)
| chriscondon/billtext | tasks/amendments.py | Python | cc0-1.0 | 1,859 |
# Copyright (C) 2015 Optiv, Inc. (brad.spengler@optiv.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from lib.cuckoo.common.abstracts import Signature
class NetworkDGA(Signature):
name = "network_dga"
description = "Likely use of Domain Generation Algorithm (DGA)"
severity = 3
categories = ["network"]
authors = ["Optiv"]
minimum = "1.3"
def run(self):
resolv_success = 0
dga_score = 0
if "network" in self.results:
if "dns" in self.results["network"]:
if len(self.results["network"]["dns"]) > 0:
for dns in self.results["network"]["dns"]:
for ans in dns["answers"]:
if ans["type"] == "NXDOMAIN":
if dns["request"].count('.') == 1:
num_cnt = sum(c.isdigit() for c in dns["request"])
# whitelist domains with potentially the year in the name
if num_cnt > 1 and "20" not in dns["request"]:
dga_score += num_cnt
if len(dns["request"]) > 20:
dga_score += 2
if len(dns["request"]) > 30:
dga_score += 10
if dns["request"].endswith((".su", ".kz", ".cc", ".ws", ".tk", ".so", ".to")):
dga_score += 2
dga_score += 1
else:
resolv_success += 1
# to deal with old malware with completely dead domains
if not resolv_success:
dga_score = 0
else:
dga_score /= resolv_success
if dga_score > 4:
return True
| lixiangning888/whole_project | modules/signatures_orginal_20151110/network_dga.py | Python | lgpl-3.0 | 2,518 |
from gpaw.xc.libxc import LibXC
from gpaw.xc.lda import LDA
from gpaw.xc.gga import GGA
from gpaw.xc.mgga import MGGA
def XC(kernel, parameters=None):
"""Create XCFunctional object.
kernel: XCKernel object or str
Kernel object or name of functional.
parameters: ndarray
Parameters for BEE functional.
Recognized names are: LDA, PW91, PBE, revPBE, RPBE, BLYP, HCTH407,
TPSS, M06L, revTPSS, vdW-DF, vdW-DF2, EXX, PBE0, B3LYP, BEE,
GLLBSC. One can also use equivalent libxc names, for example
GGA_X_PBE+GGA_C_PBE is equivalent to PBE, and LDA_X to the LDA exchange.
In this way one has access to all the functionals defined in libxc.
See xc_funcs.h for the complete list. """
if isinstance(kernel, str):
name = kernel
if name in ['vdW-DF', 'vdW-DF2', 'optPBE-vdW', 'optB88-vdW', 'C09-vdW']:
from gpaw.xc.vdw import FFTVDWFunctional
return FFTVDWFunctional(name)
elif name in ['EXX', 'PBE0', 'B3LYP']:
from gpaw.xc.hybrid import HybridXC
return HybridXC(name)
elif name in ['HSE03', 'HSE06']:
from gpaw.xc.exx import EXX
return EXX(name)
elif name == 'BEE1':
from gpaw.xc.bee import BEE1
kernel = BEE1(parameters)
elif name == 'BEE2':
from gpaw.xc.bee import BEE2
kernel = BEE2(parameters)
elif name in ['BEEF-vdW','BEEF-1']:
from gpaw.xc.bee import BEEVDWFunctional
return BEEVDWFunctional('BEEF-vdW')
elif name.startswith('GLLB'):
from gpaw.xc.gllb.nonlocalfunctionalfactory import \
NonLocalFunctionalFactory
xc = NonLocalFunctionalFactory().get_functional_by_name(name)
xc.print_functional()
return xc
elif name == 'LB94':
from gpaw.xc.lb94 import LB94
kernel = LB94()
elif name.startswith('ODD_'):
from ODD import ODDFunctional
return ODDFunctional(name[4:])
elif name.endswith('PZ-SIC'):
try:
from ODD import PerdewZungerSIC as SIC
return SIC(xc=name[:-7])
except:
from gpaw.xc.sic import SIC
return SIC(xc=name[:-7])
elif name == 'TPSS' or name == 'M06L' or name == 'revTPSS':
from gpaw.xc.kernel import XCKernel
kernel = XCKernel(name)
elif name.startswith('old'):
from gpaw.xc.kernel import XCKernel
kernel = XCKernel(name[3:])
elif name == 'PPLDA':
from gpaw.xc.lda import PurePythonLDAKernel
kernel = PurePythonLDAKernel()
elif name in ['pyPBE', 'pyPBEsol', 'pyRPBE', 'pyzvPBEsol']:
from gpaw.xc.gga import PurePythonGGAKernel
kernel = PurePythonGGAKernel(name)
elif name == '2D-MGGA':
from gpaw.xc.mgga import PurePython2DMGGAKernel
kernel = PurePython2DMGGAKernel(name, parameters)
else:
kernel = LibXC(kernel)
if kernel.type == 'LDA':
return LDA(kernel)
elif kernel.type == 'GGA':
return GGA(kernel)
else:
return MGGA(kernel)
| robwarm/gpaw-symm | gpaw/xc/__init__.py | Python | gpl-3.0 | 3,256 |
class FeatureExtractor:
def guesses(self, question):
"""
Returns all of the guesses for a given question. If this depends on
another system for generating guesses, it can return an empty list.
"""
raise NotImplementedError
@staticmethod
def has_guess():
return False
def namespace(self):
return self._name
def features(self, question, candidate):
"""
Given a question and a candidate, returns the features
"""
raise NotImplementedError
def set_metadata(self, answer, category, qnum, sent, token, guesses, fold):
self._correct = answer
self._num_guesses = guesses
self._qnum = qnum
self._sent = sent
self._token = token
self._fold = fold
self._id = '%i_%i_%i' % (self._qnum, self._sent, self._token)
def set_num_guesses(self, num_guesses):
None
def vw_from_score(self, results):
"""
Dictionary of feature key / value pairs
"""
raise NotImplementedError
def vw_from_title(self, title, text):
raise NotImplementedError
def name(self):
return self._name
| EntilZha/qb | feature_extractor.py | Python | mit | 1,200 |
#!/usr/bin/env python
#
# test_codecencodings_jp.py
# Codec encoding tests for Japanese encodings.
#
from test import support
from test import test_multibytecodec_support
import unittest
class Test_CP932(test_multibytecodec_support.TestBase, unittest.TestCase):
encoding = 'cp932'
tstring = test_multibytecodec_support.load_teststring('shift_jis')
codectests = (
# invalid bytes
(b"abc\x81\x00\x81\x00\x82\x84", "strict", None),
(b"abc\xf8", "strict", None),
(b"abc\x81\x00\x82\x84", "replace", "abc\ufffd\uff44"),
(b"abc\x81\x00\x82\x84\x88", "replace", "abc\ufffd\uff44\ufffd"),
(b"abc\x81\x00\x82\x84", "ignore", "abc\uff44"),
# sjis vs cp932
(b"\\\x7e", "replace", "\\\x7e"),
(b"\x81\x5f\x81\x61\x81\x7c", "replace", "\uff3c\u2225\uff0d"),
)
class Test_EUC_JISX0213(test_multibytecodec_support.TestBase,
unittest.TestCase):
encoding = 'euc_jisx0213'
tstring = test_multibytecodec_support.load_teststring('euc_jisx0213')
codectests = (
# invalid bytes
(b"abc\x80\x80\xc1\xc4", "strict", None),
(b"abc\xc8", "strict", None),
(b"abc\x80\x80\xc1\xc4", "replace", "abc\ufffd\u7956"),
(b"abc\x80\x80\xc1\xc4\xc8", "replace", "abc\ufffd\u7956\ufffd"),
(b"abc\x80\x80\xc1\xc4", "ignore", "abc\u7956"),
(b"abc\x8f\x83\x83", "replace", "abc\ufffd"),
(b"\xc1\x64", "strict", None),
(b"\xa1\xc0", "strict", "\uff3c"),
)
xmlcharnametest = (
"\xab\u211c\xbb = \u2329\u1234\u232a",
b"\xa9\xa8ℜ\xa9\xb2 = ⟨ሴ⟩"
)
eucjp_commontests = (
(b"abc\x80\x80\xc1\xc4", "strict", None),
(b"abc\xc8", "strict", None),
(b"abc\x80\x80\xc1\xc4", "replace", "abc\ufffd\u7956"),
(b"abc\x80\x80\xc1\xc4\xc8", "replace", "abc\ufffd\u7956\ufffd"),
(b"abc\x80\x80\xc1\xc4", "ignore", "abc\u7956"),
(b"abc\x8f\x83\x83", "replace", "abc\ufffd"),
(b"\xc1\x64", "strict", None),
)
class Test_EUC_JP_COMPAT(test_multibytecodec_support.TestBase,
unittest.TestCase):
encoding = 'euc_jp'
tstring = test_multibytecodec_support.load_teststring('euc_jp')
codectests = eucjp_commontests + (
(b"\xa1\xc0\\", "strict", "\uff3c\\"),
("\xa5", "strict", b"\x5c"),
("\u203e", "strict", b"\x7e"),
)
shiftjis_commonenctests = (
(b"abc\x80\x80\x82\x84", "strict", None),
(b"abc\xf8", "strict", None),
(b"abc\x80\x80\x82\x84", "replace", "abc\ufffd\uff44"),
(b"abc\x80\x80\x82\x84\x88", "replace", "abc\ufffd\uff44\ufffd"),
(b"abc\x80\x80\x82\x84def", "ignore", "abc\uff44def"),
)
class Test_SJIS_COMPAT(test_multibytecodec_support.TestBase, unittest.TestCase):
encoding = 'shift_jis'
tstring = test_multibytecodec_support.load_teststring('shift_jis')
codectests = shiftjis_commonenctests + (
(b"\\\x7e", "strict", "\\\x7e"),
(b"\x81\x5f\x81\x61\x81\x7c", "strict", "\uff3c\u2016\u2212"),
)
class Test_SJISX0213(test_multibytecodec_support.TestBase, unittest.TestCase):
encoding = 'shift_jisx0213'
tstring = test_multibytecodec_support.load_teststring('shift_jisx0213')
codectests = (
# invalid bytes
(b"abc\x80\x80\x82\x84", "strict", None),
(b"abc\xf8", "strict", None),
(b"abc\x80\x80\x82\x84", "replace", "abc\ufffd\uff44"),
(b"abc\x80\x80\x82\x84\x88", "replace", "abc\ufffd\uff44\ufffd"),
(b"abc\x80\x80\x82\x84def", "ignore", "abc\uff44def"),
# sjis vs cp932
(b"\\\x7e", "replace", "\xa5\u203e"),
(b"\x81\x5f\x81\x61\x81\x7c", "replace", "\x5c\u2016\u2212"),
)
xmlcharnametest = (
"\xab\u211c\xbb = \u2329\u1234\u232a",
b"\x85Gℜ\x85Q = ⟨ሴ⟩"
)
def test_main():
support.run_unittest(__name__)
if __name__ == "__main__":
test_main()
| MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-3.1/Lib/test/test_codecencodings_jp.py | Python | mit | 3,940 |
# Author: Philippe Katz <philippe.katz@gmail.com>,
# Flavien garcia <flavien.garcia@free.fr>,
# Sylvain Takerkart <Sylvain.Takerkart@incm.cnrs-mrs.fr>
# License: BSD Style.
# Averages a region of interest from an averaged image created using average_trial process
# Imports
from neuroProcesses import * # Provides a hierarchy to get object's path
# Header
name = _t_('Average Region')
category = _t_('Session Post-Analysis')
userLevel=0 # Always visible
# The parameters
signature=Signature(
"input", ReadDiskItem( 'OI 2D+t Image' , ['NIFTI-1 image','gz compressed NIFTI-1 image'] ), # The path of the image to average
"ROI", Choice(('Rectangular ROI (from coordinates)','corners')\
,('Binary mask (from image)','mask') ), # Way to average images. It can be with corners (Top left-hand and bottom right-hand) or with a mask (binary matrix)
"corner0", String(), # The position of the top left-hand corner of the mask (x,y)
"corner1", String(), # The position of the left-bottom corner of the mask (x,y)
"input_mask", ReadDiskItem( 'OI Mask' , ['NIFTI-1 image','gz compressed NIFTI-1 image'] ), # The path of an existing mask
"output_mask", WriteDiskItem( 'OI Mask' , ['NIFTI-1 image','gz compressed NIFTI-1 image'] ), # The path of the created mask
"output", WriteDiskItem( 'OI Time Series', 'Text File' ), #
)
def initSignature( self,inp ):
"""Signature change and parameters initialization
Parameters
----------
inp : BrainVISA parameter type
The parameter whose changes will be tracked to autocomplete the others
"""
if self.ROI == 'corners': # If user wants to average over a rectangular ROI
self.input_mask=None
# New signature
paramSignature=["input", ReadDiskItem( 'OI 2D+t Image' , ['NIFTI-1 image','gz compressed NIFTI-1 image'] ), # The path of the image to average
"ROI", Choice(('Rectangular ROI (from coordinates)','corners')\
,('Binary mask (from image)','mask') ), # Way to average images. It can be with corners (Top left-hand and bottom right-hand) or with a mask (binary matrix)
"corner0", String(), # The position of the top left-hand corner of the mask (x,y)
"corner1", String(), # The position of the left-bottom corner of the mask (x,y)
"output_mask", WriteDiskItem( 'OI Mask' , ['NIFTI-1 image','gz compressed NIFTI-1 image'] ), # The path of the created mask
"output", WriteDiskItem( 'OI Time Series', 'Text File' ),
]
signature=Signature( *paramSignature )
signature['input'].browseUserLevel=2 # Browse only visible for expert user
signature['output_mask'].browseUserLevel=2 # Browse only visible for expert user
signature['output'].browseUserLevel=2 # Browse only visible for expert user
self.changeSignature( signature ) # Change of signature
self.addLink( 'output_mask','corner0', self.initOutputMask ) # Change of output_mask if change on corner0
self.addLink( 'output_mask','corner1', self.initOutputMask ) # Change of output_mask if change on corner1
self.addLink( 'output','corner0', self.initOutput ) # Change of output if change on corner0
self.addLink( 'output','corner1', self.initOutput ) # Change of output if change on corner1
else: # If user wants to average with a binary mask
self.corner0='(,)'
self.corner1='(,)'
# New signature
paramSignature=["input", ReadDiskItem( 'OI 2D+t Image' , ['NIFTI-1 image','gz compressed NIFTI-1 image'] ),
"ROI", Choice(('Rectangular ROI (from coordinates)','corners')\
,('Binary mask (from image)','mask') ), # Way to average images. It can be with corners (Top left-hand and bottom right-hand) or with a mask (binary matrix)
"input_mask", ReadDiskItem( 'OI Mask' , ['NIFTI-1 image','gz compressed NIFTI-1 image'] ),
"output", WriteDiskItem( 'OI Time Series', 'Text File' ),
]
signature=Signature( *paramSignature )
signature['input'].browseUserLevel=2 # Browse only visible for expert user
signature['input_mask'].browseUserLevel=2 # Browse only visible for expert user
signature['output'].browseUserLevel=2 # Browse only visible for expert user
self.changeSignature( signature ) # Change of signature
self.addLink( 'output','input_mask', self.initOutput ) # Change of output if change on input_mask
def initOutputMask( self, inp ):
"""Output Mask autocompletion
Parameters
----------
inp : BrainVISA parameter type
The parameter whose changes will be tracked to autocomplete the others
"""
value={} # Value dictionary initialization
if self.input is not None and self.corner0!='(,)' and self.corner1 != '(,)':
# Key value autocompletion
value=self.input.hierarchyAttributes()
try:
value['filename_variable']=str(eval(self.corner0)[0])+'_'+str(eval(self.corner0)[1])+'_'+str(eval(self.corner1)[0])+'_'+str(eval(self.corner1)[1])
except:
None
return value
def initOutput( self, inp ):
"""Output autocompletion
Parameters
----------
inp : BrainVISA parameter type
The parameter whose changes will be tracked to autocomplete the others
"""
value=None # Value dictionary initialization
result=None
if self.input is not None:
# Key value autocompletion
value=self.input.hierarchyAttributes()
if 'secondlevel_analysis' not in value.keys() and 'firstlevel_analysis' in value.keys():
value['secondlevel_analysis']=value['firstlevel_analysis']
value['filename_variable']=None
if self.ROI=='corners' and self.corner0!='(,)' and self.corner1 != '(,)':
try:
value['filename_variable']=os.path.split(self.input.fullPath())[1][:-4]+'_mask_'+str(eval(self.corner0)[0])+'_'+str(eval(self.corner0)[1])+'_'+str(eval(self.corner1)[0])+'_'+str(eval(self.corner1)[1])
except:
None
elif self.ROI=='mask' and self.input_mask is not None:
# Key value autocompletion
value['filename_variable']=os.path.split(self.input.fullPath())[1][:-4]+'_mask_'+self.input_mask.hierarchyAttributes()['filename_variable']
if (os.path.split(os.path.split(self.input.fullPath())[0])[1])[0:9]=='glm_based':
result=WriteDiskItem( 'OI GLM Time Series' , 'Text file' ).findValue( value )
elif (os.path.split(os.path.split(self.input.fullPath())[0])[1])[0:11]=='blank_based':
result=WriteDiskItem( 'OI BKSD Time Series' , 'Text file' ).findValue( value )
elif (os.path.split(os.path.split(self.input.fullPath())[0])[1])=='raw':
result=WriteDiskItem( 'OI Blank Time Series', 'Text file').findValue( value )
if result is not None:
return result # While opening process, result is not created
else:
return value
def initialization( self ):
"""Parameters values initialization
"""
self.corner0='(,)' # corner0 initialization
self.corner1='(,)' # corner1 initialization
# Optional parameters
self.setOptional('input_mask')
self.setOptional('corner0')
self.setOptional('corner1')
self.setOptional('output_mask')
self.signature['input'].browseUserLevel=2 # Browse only visible for expert user
self.signature['input_mask'].browseUserLevel=2 # Browse only visible for expert user
self.signature['output_mask'].browseUserLevel=2 # Browse only visible for expert user
self.signature['output'].browseUserLevel=2 # Browse only visible for expert user
self.addLink(None,'ROI',self.initSignature) # Change on signature if change on ROI
self.addLink('output','ROI',self.initOutput)
self.addLink('output_mask','input', self.initOutputMask) # Change of output_mask if change on input
self.addLink('output','input', self.initOutput) # Change of output if change on input
def execution( self, context ):
"""The execution process
Parameters
----------
context : BrainVISA context
"""
import oidata.oisession_postprocesses as oisession_postprocesses
# Top left-hand corner recovery
if self.corner0 != '(,)': # If top left-hand corner was modified
try:
c0=eval(self.corner0) # Values recovery
except SyntaxError:
raise SyntaxError('Top left-hand corner is not properly completed')
try:
if len(c0)==2:
corner0=c0
else:
raise SyntaxError('Top left-hand corner is not properly completed')
except TypeError:
raise TypeError('Top left-hand corner is not properly completed')
else: # If it was not modified
corner0=None
# Bottom right-hand corner recovery
if self.corner1 != '(,)': # If bottom right-hand corner was modified
try:
c1=eval(self.corner1) # Values recovery
except SyntaxError:
raise SyntaxError('Bottom right-hand corner is not properly completed')
try:
if len(c1)==2:
corner1=c1
else:
raise SyntaxError('Top left-hand corner is not properly completed')
except TypeError:
raise TypeError('Bottom right-hand corner is not properly completed')
else: # If it was not modified
corner1=None
# Input_mask's path recuperation
if self.input_mask is not None:
input_mask=self.input_mask.fullPath()
else:
input_mask=None
attributes=self.input.hierarchyAttributes() # Attributes recuperation
format=os.path.splitext(os.path.splitext(self.output.fullPath())[0])[1]+os.path.splitext(self.output.fullPath())[1] # Formet recuperation
# Averages a region of interest from an averaged image created using average_trial process
oisession_postprocesses.average_region_process(
database=attributes['_database'],
protocol=attributes['protocol'],
subject=attributes['subject'],
session='session_'+attributes['session_date'],
analysis=os.path.split(os.path.split(self.input.fullPath())[0])[1], # Analysis name
filename=os.path.basename(self.input.fullPath()),
corner0=corner0,
corner1=corner1,
path_mask=input_mask,
format=format,
mode=True, # The database mode
context=context # BrainVISA context
)
| SylvainTakerkart/vobi_one | brainvisa/toolboxes/vobi_one/processes/Session Post-Analysis/average_region.py | Python | gpl-3.0 | 10,760 |
"""
Interop PyOpenCL
~~~~~~~~~~~~~~~~
"""
from bohrium_api import stack_info
from .bhary import get_base
from ._bh import get_data_pointer, set_data_pointer, get_device_context
def _import_pyopencl_module():
"""Help function to import PyOpenCL and checks that a OpenCL backend is present"""
try:
import pyopencl
except ImportError:
raise ImportError("Failed to import the `pyopencl` module, please install PyOpenCL")
if not stack_info.is_opencl_in_stack():
raise RuntimeError("No OpenCL device in the Bohrium stack! "
"Try defining the environment variable `BH_STACK=opencl`.")
if stack_info.is_proxy_in_stack():
raise RuntimeError("Cannot directly access the OpenCL device through a proxy.")
return pyopencl
def available():
"""Is PyOpenCL available?"""
try:
_import_pyopencl_module()
return True
except ImportError:
return False
except RuntimeError:
return False
def get_context():
"""Return a PyOpenCL context"""
pyopencl = _import_pyopencl_module()
cxt = get_device_context()
if cxt is None:
raise RuntimeError("No OpenCL device in the Bohrium stack! "
"Try defining the environment variable `BH_STACK=opencl`.")
return pyopencl.Context.from_int_ptr(cxt)
def get_buffer(bh_ary):
"""Return a OpenCL Buffer object wrapping the Bohrium array `ary`.
Parameters
----------
bh_ary : ndarray (Bohrium array)
Must be a Bohrium base array
Returns
-------
out : pyopencl.Buffer
Notes
-----
Changing or deallocating `bh_ary` invalidates the returned pyopencl.Buffer!
"""
if get_base(bh_ary) is not bh_ary:
raise RuntimeError('`bh_ary` must be a base array and not a view')
assert (bh_ary.bhc_mmap_allocated)
pyopencl = _import_pyopencl_module()
cl_mem = get_data_pointer(get_base(bh_ary), copy2host=False, allocate=True)
return pyopencl.Buffer.from_int_ptr(cl_mem)
def set_buffer(bh_ary, buffer):
"""Assign a OpenCL Buffer object to a Bohrium array `ary`.
Parameters
----------
bh_ary : ndarray (Bohrium array)
Must be a Bohrium base array
buffer : pyopencl.Buffer
The PyOpenCL device buffer
"""
if get_base(bh_ary) is not bh_ary:
raise RuntimeError('`bh_ary` must be a base array and not a view')
set_data_pointer(get_base(bh_ary), buffer.int_ptr, host_ptr=False)
def get_array(bh_ary, queue):
_import_pyopencl_module()
from pyopencl import array as clarray
return clarray.Array(queue, bh_ary.shape, bh_ary.dtype, data=get_buffer(bh_ary))
def kernel_info(opencl_kernel, queue):
"""Info about the `opencl_kernel`
Returns 4-tuple:
- Max work-group size
- Recommended work-group multiple
- Local mem used by kernel
- Private mem used by kernel
"""
cl = _import_pyopencl_module()
info = cl.kernel_work_group_info
# Max work-group size
wg_size = opencl_kernel.get_work_group_info(info.WORK_GROUP_SIZE, queue.device)
# Recommended work-group multiple
wg_multiple = opencl_kernel.get_work_group_info(info.PREFERRED_WORK_GROUP_SIZE_MULTIPLE, queue.device)
# Local mem used by kernel
local_usage = opencl_kernel.get_work_group_info(info.LOCAL_MEM_SIZE, queue.device)
# Private mem used by kernel
private_usage = opencl_kernel.get_work_group_info(info.PRIVATE_MEM_SIZE, queue.device)
return (wg_size, wg_multiple, local_usage, private_usage)
def max_local_memory(opencl_device):
"""Returns the maximum allowed local memory on `opencl_device`"""
cl = _import_pyopencl_module()
return opencl_device.get_info(cl.device_info.LOCAL_MEM_SIZE)
def type_np2opencl_str(np_type):
"""Converts a NumPy type to a OpenCL type string"""
import numpy as np
if np_type == np.bool:
return "bool"
elif np_type == np.int8:
return "char"
elif np_type == np.int16:
return "short"
if np_type == np.int32:
return "int"
elif np_type == np.int64:
return "long"
elif np_type == np.uint8:
return "uchar"
elif np_type == np.uint16:
return "ushort"
elif np_type == np.uint32:
return "uint"
elif np_type == np.uint64:
return "ulong"
elif np_type == np.float32:
return "float"
elif np_type == np.float64:
return "double"
else:
return "UNKNOWN"
| madsbk/bohrium | bridge/npbackend/bohrium/interop_pyopencl.py | Python | apache-2.0 | 4,528 |
# -*- coding: utf8
"""Random Projection transformers.
Random Projections are a simple and computationally efficient way to
reduce the dimensionality of the data by trading a controlled amount
of accuracy (as additional variance) for faster processing times and
smaller model sizes.
The dimensions and distribution of Random Projections matrices are
controlled so as to preserve the pairwise distances between any two
samples of the dataset.
The main theoretical result behind the efficiency of random projection is the
`Johnson-Lindenstrauss lemma (quoting Wikipedia)
<https://en.wikipedia.org/wiki/Johnson%E2%80%93Lindenstrauss_lemma>`_:
In mathematics, the Johnson-Lindenstrauss lemma is a result
concerning low-distortion embeddings of points from high-dimensional
into low-dimensional Euclidean space. The lemma states that a small set
of points in a high-dimensional space can be embedded into a space of
much lower dimension in such a way that distances between the points are
nearly preserved. The map used for the embedding is at least Lipschitz,
and can even be taken to be an orthogonal projection.
"""
# Authors: Olivier Grisel <olivier.grisel@ensta.org>,
# Arnaud Joly <a.joly@ulg.ac.be>
# License: BSD 3 clause
import warnings
from abc import ABCMeta, abstractmethod
import numpy as np
import scipy.sparse as sp
from .base import BaseEstimator, TransformerMixin
from .utils import check_random_state
from .utils.extmath import safe_sparse_dot
from .utils.random import sample_without_replacement
from .utils.validation import check_array, check_is_fitted
from .utils.validation import _deprecate_positional_args
from .exceptions import DataDimensionalityWarning
__all__ = ["SparseRandomProjection",
"GaussianRandomProjection",
"johnson_lindenstrauss_min_dim"]
@_deprecate_positional_args
def johnson_lindenstrauss_min_dim(n_samples, *, eps=0.1):
"""Find a 'safe' number of components to randomly project to.
The distortion introduced by a random projection `p` only changes the
distance between two points by a factor (1 +- eps) in an euclidean space
with good probability. The projection `p` is an eps-embedding as defined
by:
(1 - eps) ||u - v||^2 < ||p(u) - p(v)||^2 < (1 + eps) ||u - v||^2
Where u and v are any rows taken from a dataset of shape (n_samples,
n_features), eps is in ]0, 1[ and p is a projection by a random Gaussian
N(0, 1) matrix of shape (n_components, n_features) (or a sparse
Achlioptas matrix).
The minimum number of components to guarantee the eps-embedding is
given by:
n_components >= 4 log(n_samples) / (eps^2 / 2 - eps^3 / 3)
Note that the number of dimensions is independent of the original
number of features but instead depends on the size of the dataset:
the larger the dataset, the higher is the minimal dimensionality of
an eps-embedding.
Read more in the :ref:`User Guide <johnson_lindenstrauss>`.
Parameters
----------
n_samples : int or array-like of int
Number of samples that should be a integer greater than 0. If an array
is given, it will compute a safe number of components array-wise.
eps : float or ndarray of shape (n_components,), dtype=float, \
default=0.1
Maximum distortion rate in the range (0,1 ) as defined by the
Johnson-Lindenstrauss lemma. If an array is given, it will compute a
safe number of components array-wise.
Returns
-------
n_components : int or ndarray of int
The minimal number of components to guarantee with good probability
an eps-embedding with n_samples.
Examples
--------
>>> johnson_lindenstrauss_min_dim(1e6, eps=0.5)
663
>>> johnson_lindenstrauss_min_dim(1e6, eps=[0.5, 0.1, 0.01])
array([ 663, 11841, 1112658])
>>> johnson_lindenstrauss_min_dim([1e4, 1e5, 1e6], eps=0.1)
array([ 7894, 9868, 11841])
References
----------
.. [1] https://en.wikipedia.org/wiki/Johnson%E2%80%93Lindenstrauss_lemma
.. [2] Sanjoy Dasgupta and Anupam Gupta, 1999,
"An elementary proof of the Johnson-Lindenstrauss Lemma."
http://citeseer.ist.psu.edu/viewdoc/summary?doi=10.1.1.45.3654
"""
eps = np.asarray(eps)
n_samples = np.asarray(n_samples)
if np.any(eps <= 0.0) or np.any(eps >= 1):
raise ValueError(
"The JL bound is defined for eps in ]0, 1[, got %r" % eps)
if np.any(n_samples) <= 0:
raise ValueError(
"The JL bound is defined for n_samples greater than zero, got %r"
% n_samples)
denominator = (eps ** 2 / 2) - (eps ** 3 / 3)
return (4 * np.log(n_samples) / denominator).astype(int)
def _check_density(density, n_features):
"""Factorize density check according to Li et al."""
if density == 'auto':
density = 1 / np.sqrt(n_features)
elif density <= 0 or density > 1:
raise ValueError("Expected density in range ]0, 1], got: %r"
% density)
return density
def _check_input_size(n_components, n_features):
"""Factorize argument checking for random matrix generation."""
if n_components <= 0:
raise ValueError("n_components must be strictly positive, got %d" %
n_components)
if n_features <= 0:
raise ValueError("n_features must be strictly positive, got %d" %
n_features)
def _gaussian_random_matrix(n_components, n_features, random_state=None):
"""Generate a dense Gaussian random matrix.
The components of the random matrix are drawn from
N(0, 1.0 / n_components).
Read more in the :ref:`User Guide <gaussian_random_matrix>`.
Parameters
----------
n_components : int,
Dimensionality of the target projection space.
n_features : int,
Dimensionality of the original source space.
random_state : int, RandomState instance or None, default=None
Controls the pseudo random number generator used to generate the matrix
at fit time.
Pass an int for reproducible output across multiple function calls.
See :term:`Glossary <random_state>`.
Returns
-------
components : ndarray of shape (n_components, n_features)
The generated Gaussian random matrix.
See Also
--------
GaussianRandomProjection
"""
_check_input_size(n_components, n_features)
rng = check_random_state(random_state)
components = rng.normal(loc=0.0,
scale=1.0 / np.sqrt(n_components),
size=(n_components, n_features))
return components
def _sparse_random_matrix(n_components, n_features, density='auto',
random_state=None):
"""Generalized Achlioptas random sparse matrix for random projection.
Setting density to 1 / 3 will yield the original matrix by Dimitris
Achlioptas while setting a lower value will yield the generalization
by Ping Li et al.
If we note :math:`s = 1 / density`, the components of the random matrix are
drawn from:
- -sqrt(s) / sqrt(n_components) with probability 1 / 2s
- 0 with probability 1 - 1 / s
- +sqrt(s) / sqrt(n_components) with probability 1 / 2s
Read more in the :ref:`User Guide <sparse_random_matrix>`.
Parameters
----------
n_components : int,
Dimensionality of the target projection space.
n_features : int,
Dimensionality of the original source space.
density : float or 'auto', default='auto'
Ratio of non-zero component in the random projection matrix in the
range `(0, 1]`
If density = 'auto', the value is set to the minimum density
as recommended by Ping Li et al.: 1 / sqrt(n_features).
Use density = 1 / 3.0 if you want to reproduce the results from
Achlioptas, 2001.
random_state : int, RandomState instance or None, default=None
Controls the pseudo random number generator used to generate the matrix
at fit time.
Pass an int for reproducible output across multiple function calls.
See :term:`Glossary <random_state>`.
Returns
-------
components : {ndarray, sparse matrix} of shape (n_components, n_features)
The generated Gaussian random matrix. Sparse matrix will be of CSR
format.
See Also
--------
SparseRandomProjection
References
----------
.. [1] Ping Li, T. Hastie and K. W. Church, 2006,
"Very Sparse Random Projections".
https://web.stanford.edu/~hastie/Papers/Ping/KDD06_rp.pdf
.. [2] D. Achlioptas, 2001, "Database-friendly random projections",
http://www.cs.ucsc.edu/~optas/papers/jl.pdf
"""
_check_input_size(n_components, n_features)
density = _check_density(density, n_features)
rng = check_random_state(random_state)
if density == 1:
# skip index generation if totally dense
components = rng.binomial(1, 0.5, (n_components, n_features)) * 2 - 1
return 1 / np.sqrt(n_components) * components
else:
# Generate location of non zero elements
indices = []
offset = 0
indptr = [offset]
for _ in range(n_components):
# find the indices of the non-zero components for row i
n_nonzero_i = rng.binomial(n_features, density)
indices_i = sample_without_replacement(n_features, n_nonzero_i,
random_state=rng)
indices.append(indices_i)
offset += n_nonzero_i
indptr.append(offset)
indices = np.concatenate(indices)
# Among non zero components the probability of the sign is 50%/50%
data = rng.binomial(1, 0.5, size=np.size(indices)) * 2 - 1
# build the CSR structure by concatenating the rows
components = sp.csr_matrix((data, indices, indptr),
shape=(n_components, n_features))
return np.sqrt(1 / density) / np.sqrt(n_components) * components
class BaseRandomProjection(TransformerMixin, BaseEstimator, metaclass=ABCMeta):
"""Base class for random projections.
Warning: This class should not be used directly.
Use derived classes instead.
"""
@abstractmethod
def __init__(self, n_components='auto', *, eps=0.1, dense_output=False,
random_state=None):
self.n_components = n_components
self.eps = eps
self.dense_output = dense_output
self.random_state = random_state
@abstractmethod
def _make_random_matrix(self, n_components, n_features):
"""Generate the random projection matrix.
Parameters
----------
n_components : int,
Dimensionality of the target projection space.
n_features : int,
Dimensionality of the original source space.
Returns
-------
components : {ndarray, sparse matrix} of shape \
(n_components, n_features)
The generated random matrix. Sparse matrix will be of CSR format.
"""
def fit(self, X, y=None):
"""Generate a sparse random projection matrix.
Parameters
----------
X : {ndarray, sparse matrix} of shape (n_samples, n_features)
Training set: only the shape is used to find optimal random
matrix dimensions based on the theory referenced in the
afore mentioned papers.
y
Ignored
Returns
-------
self
"""
X = self._validate_data(X, accept_sparse=['csr', 'csc'])
n_samples, n_features = X.shape
if self.n_components == 'auto':
self.n_components_ = johnson_lindenstrauss_min_dim(
n_samples=n_samples, eps=self.eps)
if self.n_components_ <= 0:
raise ValueError(
'eps=%f and n_samples=%d lead to a target dimension of '
'%d which is invalid' % (
self.eps, n_samples, self.n_components_))
elif self.n_components_ > n_features:
raise ValueError(
'eps=%f and n_samples=%d lead to a target dimension of '
'%d which is larger than the original space with '
'n_features=%d' % (self.eps, n_samples, self.n_components_,
n_features))
else:
if self.n_components <= 0:
raise ValueError("n_components must be greater than 0, got %s"
% self.n_components)
elif self.n_components > n_features:
warnings.warn(
"The number of components is higher than the number of"
" features: n_features < n_components (%s < %s)."
"The dimensionality of the problem will not be reduced."
% (n_features, self.n_components),
DataDimensionalityWarning)
self.n_components_ = self.n_components
# Generate a projection matrix of size [n_components, n_features]
self.components_ = self._make_random_matrix(self.n_components_,
n_features)
# Check contract
assert self.components_.shape == (self.n_components_, n_features), (
'An error has occurred the self.components_ matrix has '
' not the proper shape.')
return self
def transform(self, X):
"""Project the data by using matrix product with the random matrix
Parameters
----------
X : {ndarray, sparse matrix} of shape (n_samples, n_features)
The input data to project into a smaller dimensional space.
Returns
-------
X_new : {ndarray, sparse matrix} of shape (n_samples, n_components)
Projected array.
"""
X = check_array(X, accept_sparse=['csr', 'csc'])
check_is_fitted(self)
if X.shape[1] != self.components_.shape[1]:
raise ValueError(
'Impossible to perform projection:'
'X at fit stage had a different number of features. '
'(%s != %s)' % (X.shape[1], self.components_.shape[1]))
X_new = safe_sparse_dot(X, self.components_.T,
dense_output=self.dense_output)
return X_new
class GaussianRandomProjection(BaseRandomProjection):
"""Reduce dimensionality through Gaussian random projection.
The components of the random matrix are drawn from N(0, 1 / n_components).
Read more in the :ref:`User Guide <gaussian_random_matrix>`.
.. versionadded:: 0.13
Parameters
----------
n_components : int or 'auto', default='auto'
Dimensionality of the target projection space.
n_components can be automatically adjusted according to the
number of samples in the dataset and the bound given by the
Johnson-Lindenstrauss lemma. In that case the quality of the
embedding is controlled by the ``eps`` parameter.
It should be noted that Johnson-Lindenstrauss lemma can yield
very conservative estimated of the required number of components
as it makes no assumption on the structure of the dataset.
eps : float, default=0.1
Parameter to control the quality of the embedding according to
the Johnson-Lindenstrauss lemma when `n_components` is set to
'auto'. The value should be strictly positive.
Smaller values lead to better embedding and higher number of
dimensions (n_components) in the target projection space.
random_state : int, RandomState instance or None, default=None
Controls the pseudo random number generator used to generate the
projection matrix at fit time.
Pass an int for reproducible output across multiple function calls.
See :term:`Glossary <random_state>`.
Attributes
----------
n_components_ : int
Concrete number of components computed when n_components="auto".
components_ : ndarray of shape (n_components, n_features)
Random matrix used for the projection.
Examples
--------
>>> import numpy as np
>>> from sklearn.random_projection import GaussianRandomProjection
>>> rng = np.random.RandomState(42)
>>> X = rng.rand(100, 10000)
>>> transformer = GaussianRandomProjection(random_state=rng)
>>> X_new = transformer.fit_transform(X)
>>> X_new.shape
(100, 3947)
See Also
--------
SparseRandomProjection
"""
@_deprecate_positional_args
def __init__(self, n_components='auto', *, eps=0.1, random_state=None):
super().__init__(
n_components=n_components,
eps=eps,
dense_output=True,
random_state=random_state)
def _make_random_matrix(self, n_components, n_features):
""" Generate the random projection matrix.
Parameters
----------
n_components : int,
Dimensionality of the target projection space.
n_features : int,
Dimensionality of the original source space.
Returns
-------
components : {ndarray, sparse matrix} of shape \
(n_components, n_features)
The generated random matrix. Sparse matrix will be of CSR format.
"""
random_state = check_random_state(self.random_state)
return _gaussian_random_matrix(n_components,
n_features,
random_state=random_state)
class SparseRandomProjection(BaseRandomProjection):
"""Reduce dimensionality through sparse random projection.
Sparse random matrix is an alternative to dense random
projection matrix that guarantees similar embedding quality while being
much more memory efficient and allowing faster computation of the
projected data.
If we note `s = 1 / density` the components of the random matrix are
drawn from:
- -sqrt(s) / sqrt(n_components) with probability 1 / 2s
- 0 with probability 1 - 1 / s
- +sqrt(s) / sqrt(n_components) with probability 1 / 2s
Read more in the :ref:`User Guide <sparse_random_matrix>`.
.. versionadded:: 0.13
Parameters
----------
n_components : int or 'auto', default='auto'
Dimensionality of the target projection space.
n_components can be automatically adjusted according to the
number of samples in the dataset and the bound given by the
Johnson-Lindenstrauss lemma. In that case the quality of the
embedding is controlled by the ``eps`` parameter.
It should be noted that Johnson-Lindenstrauss lemma can yield
very conservative estimated of the required number of components
as it makes no assumption on the structure of the dataset.
density : float or 'auto', default='auto'
Ratio in the range (0, 1] of non-zero component in the random
projection matrix.
If density = 'auto', the value is set to the minimum density
as recommended by Ping Li et al.: 1 / sqrt(n_features).
Use density = 1 / 3.0 if you want to reproduce the results from
Achlioptas, 2001.
eps : float, default=0.1
Parameter to control the quality of the embedding according to
the Johnson-Lindenstrauss lemma when n_components is set to
'auto'. This value should be strictly positive.
Smaller values lead to better embedding and higher number of
dimensions (n_components) in the target projection space.
dense_output : bool, default=False
If True, ensure that the output of the random projection is a
dense numpy array even if the input and random projection matrix
are both sparse. In practice, if the number of components is
small the number of zero components in the projected data will
be very small and it will be more CPU and memory efficient to
use a dense representation.
If False, the projected data uses a sparse representation if
the input is sparse.
random_state : int, RandomState instance or None, default=None
Controls the pseudo random number generator used to generate the
projection matrix at fit time.
Pass an int for reproducible output across multiple function calls.
See :term:`Glossary <random_state>`.
Attributes
----------
n_components_ : int
Concrete number of components computed when n_components="auto".
components_ : sparse matrix of shape (n_components, n_features)
Random matrix used for the projection. Sparse matrix will be of CSR
format.
density_ : float in range 0.0 - 1.0
Concrete density computed from when density = "auto".
Examples
--------
>>> import numpy as np
>>> from sklearn.random_projection import SparseRandomProjection
>>> rng = np.random.RandomState(42)
>>> X = rng.rand(100, 10000)
>>> transformer = SparseRandomProjection(random_state=rng)
>>> X_new = transformer.fit_transform(X)
>>> X_new.shape
(100, 3947)
>>> # very few components are non-zero
>>> np.mean(transformer.components_ != 0)
0.0100...
See Also
--------
GaussianRandomProjection
References
----------
.. [1] Ping Li, T. Hastie and K. W. Church, 2006,
"Very Sparse Random Projections".
https://web.stanford.edu/~hastie/Papers/Ping/KDD06_rp.pdf
.. [2] D. Achlioptas, 2001, "Database-friendly random projections",
https://users.soe.ucsc.edu/~optas/papers/jl.pdf
"""
@_deprecate_positional_args
def __init__(self, n_components='auto', *, density='auto', eps=0.1,
dense_output=False, random_state=None):
super().__init__(
n_components=n_components,
eps=eps,
dense_output=dense_output,
random_state=random_state)
self.density = density
def _make_random_matrix(self, n_components, n_features):
""" Generate the random projection matrix
Parameters
----------
n_components : int
Dimensionality of the target projection space.
n_features : int
Dimensionality of the original source space.
Returns
-------
components : {ndarray, sparse matrix} of shape \
(n_components, n_features)
The generated random matrix. Sparse matrix will be of CSR format.
"""
random_state = check_random_state(self.random_state)
self.density_ = _check_density(self.density, n_features)
return _sparse_random_matrix(n_components,
n_features,
density=self.density_,
random_state=random_state)
| xuewei4d/scikit-learn | sklearn/random_projection.py | Python | bsd-3-clause | 23,301 |
"""
URL patterns for the views included in ``django.contrib.auth``.
Including these URLs (via the ``include()`` directive) will set up the
following patterns based at whatever URL prefix they are included
under:
* User login at ``login/``.
* User logout at ``logout/``.
* The two-step password change at ``password/change/`` and
``password/change/done/``.
* The four-step password reset at ``password/reset/``,
``password/reset/confirm/``, ``password/reset/complete/`` and
``password/reset/done/``.
The default registration backend already has an ``include()`` for
these URLs, so under the default setup it is not necessary to manually
include these views. Other backends may or may not include them;
consult a specific backend's documentation for details.
"""
from django.conf.urls import include
from django.conf.urls import patterns
from django.conf.urls import url
from django.core.urlresolvers import reverse_lazy
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^login/$',
auth_views.login,
{'template_name': 'registration/login.html'},
name='auth_login'),
url(r'^logout/$',
auth_views.logout,
{'template_name': 'registration/logout.html'},
name='auth_logout'),
url(r'^password/change/$',
auth_views.password_change,
{'post_change_redirect': reverse_lazy('auth_password_change_done')},
name='auth_password_change'),
url(r'^password/change/done/$',
auth_views.password_change_done,
name='auth_password_change_done'),
url(r'^password/reset/$',
auth_views.password_reset,
{'post_reset_redirect': reverse_lazy('auth_password_reset_done')},
name='auth_password_reset'),
url(r'^password/reset/complete/$',
auth_views.password_reset_complete,
name='auth_password_reset_complete'),
url(r'^password/reset/done/$',
auth_views.password_reset_done,
name='auth_password_reset_done'),
]
from django import get_version
from distutils.version import LooseVersion
if (LooseVersion(get_version()) >= LooseVersion('1.6')):
urlpatterns += [
url(r'^password/reset/confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>.+)/$',
auth_views.password_reset_confirm,
{'post_reset_redirect': reverse_lazy('auth_password_reset_complete')},
name='auth_password_reset_confirm')
]
else:
urlpatterns += [
url(r'^password/reset/confirm/(?P<uidb36>[0-9A-Za-z]{1,13})-(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
auth_views.password_reset_confirm,
{'post_reset_redirect': reverse_lazy('auth_password_reset_complete')},
name='auth_password_reset_confirm')
]
| torchingloom/django-registration | registration/auth_urls.py | Python | bsd-3-clause | 2,744 |
# -*- coding: utf-8 -*-
# Path auto-discovery ###################################
import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
#########################################################
DEBUG = False
TEMPLATE_DEBUG = DEBUG
SITE_ID = 1
USE_I18N = True
TIME_ZONE = 'America/Sao_Paulo'
LANGUAGE_CODE = 'pt-br'
DATABASE_ENGINE = 'sqlite3'
DATABASE_NAME = 'data.db'
DATABASE_USER = ''
DATABASE_PASSWORD = ''
DATABASE_HOST = ''
DATABASE_PORT = ''
TEST_DATABASE_NAME = 'data.test.db'
SECRET_KEY = '!cv0et@y@(13y#k2nw#af-q=avm9)67e_l!ia+_90f!9fz7285'
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
MEDIA_URL = '/media/'
ADMIN_MEDIA_PREFIX = '/media/admin/'
TEMPLATE_DIRS = (
os.path.join(PROJECT_ROOT, 'templates'),
)
ROOT_URLCONF = 'django_importer.urls'
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
# Project
'tasks',
)
| ricobl/django-importer | sample_project/settings.py | Python | lgpl-3.0 | 955 |
import unittest
from ..expinfo import *
class TestHelpers(unittest.TestCase):
def test_interval(self):
self.assertEqual(interval([1, 2]), [1., 2.])
self.assertRaises(AssertionError, interval, [0, 0])
self.assertRaises(AssertionError, interval, [0, -1])
def test_check_markers(self):
self.assertEqual(
markers({1:'left', 2:'right', 3:'right'}),
{1:'left', 2:'right', 3:'right'})
self.assertRaises(ValueError, markers, {'marker':1})
self.assertRaises(TypeError, markers, [1, 2])
class TestExpInfo(unittest.TestCase):
def setUp(self):
self.ex_mi = Experiment(marker_to_class={1:'left', 2:'right'},
trial_offset=[.5, 3], baseline_offset=[-1, 0], band=[8, 30],
channels=['C3', 'C4'], paradigm='MI', test_folds=list(range(10)))
self.expinfo = ExperimentInfo(
ac_freq=50,
amplifier='BioSemi ActiveTwo',
lab='UT/HMI',
subject='Fake',
note='recording note',
eeg_chan='C3 Cz C4'.split(),
eog_chan='L R T B'.split(),
emg_chan='Ex1 Ex2'.split(),
ref_chan='Ma1 Ma2'.split(),
experiments={'LR' : self.ex_mi})
def test_experiment_info(self):
self.assertEqual(set(self.expinfo.all_channels),
set('C3 Cz C4 L R T B Ex1 Ex2 Ma1 Ma2'.split()))
def test_add_expinfo(self):
d_bad = DataSet(np.random.rand(5, 20), np.ones((1, 20)),
feat_lab=['chann%d' % i for i in range(5)])
d_good = DataSet(np.random.rand(11, 20), np.ones((1, 20)),
feat_lab=self.expinfo.all_channels)
d = add_expinfo(self.expinfo, d_good)
self.assertRaises(ValueError, add_expinfo, self.expinfo, d_bad)
| wmvanvliet/psychic | psychic/tests/testexpinfo.py | Python | bsd-3-clause | 1,636 |
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# Lint as: python3
"""Tests for tensorboard.uploader.formatters."""
import os
import time
from unittest import mock
from tensorboard import test as tb_test
from tensorboard.uploader import formatters
from tensorboard.uploader.proto import experiment_pb2
from tensorboard.uploader import util
class TensorBoardExporterTest(tb_test.TestCase):
def _format(self, formatter, experiment, experiment_url, timezone="UTC"):
"""Test helper that ensures formatting is done with known timezone."""
try:
with mock.patch.dict(os.environ, {"TZ": timezone}):
time.tzset()
return formatter.format_experiment(experiment, experiment_url)
finally:
time.tzset()
def testReadableFormatterWithNonemptyNameAndDescription(self):
experiment = experiment_pb2.Experiment(
experiment_id="deadbeef",
name="A name for the experiment",
description="A description for the experiment",
num_runs=2,
num_tags=4,
num_scalars=60,
total_tensor_bytes=789,
total_blob_bytes=1234,
)
util.set_timestamp(experiment.create_time, 981173106)
util.set_timestamp(experiment.update_time, 1015218367)
experiment_url = "http://tensorboard.dev/deadbeef"
formatter = formatters.ReadableFormatter()
output = self._format(formatter, experiment, experiment_url)
expected_lines = [
"http://tensorboard.dev/deadbeef",
"\tName A name for the experiment",
"\tDescription A description for the experiment",
"\tId deadbeef",
"\tCreated 2001-02-03 04:05:06",
"\tUpdated 2002-03-04 05:06:07",
"\tRuns 2",
"\tTags 4",
"\tScalars 60",
"\tTensor bytes 789",
"\tBinary object bytes 1234",
]
self.assertEqual(output.split("\n"), expected_lines)
def testReadableFormatterWithNonUtcTimezone(self):
experiment = experiment_pb2.Experiment(
experiment_id="deadbeef",
name="A name for the experiment",
description="A description for the experiment",
num_runs=2,
num_tags=4,
num_scalars=60,
total_tensor_bytes=0,
total_blob_bytes=1234,
)
util.set_timestamp(experiment.create_time, 981173106)
util.set_timestamp(experiment.update_time, 1015218367)
experiment_url = "http://tensorboard.dev/deadbeef"
formatter = formatters.ReadableFormatter()
output = self._format(
formatter,
experiment,
experiment_url,
timezone="America/Los_Angeles",
)
expected_lines = [
"http://tensorboard.dev/deadbeef",
"\tName A name for the experiment",
"\tDescription A description for the experiment",
"\tId deadbeef",
"\tCreated 2001-02-02 20:05:06",
"\tUpdated 2002-03-03 21:06:07",
"\tRuns 2",
"\tTags 4",
"\tScalars 60",
"\tTensor bytes 0",
"\tBinary object bytes 1234",
]
self.assertEqual(output.split("\n"), expected_lines)
def testReadableFormatterWithEmptyNameAndDescription(self):
experiment = experiment_pb2.Experiment(
experiment_id="deadbeef",
# NOTE(cais): `name` and `description` are missing here.
num_runs=2,
num_tags=4,
num_scalars=60,
total_tensor_bytes=789,
total_blob_bytes=1234,
)
util.set_timestamp(experiment.create_time, 981173106)
util.set_timestamp(experiment.update_time, 1015218367)
experiment_url = "http://tensorboard.dev/deadbeef"
formatter = formatters.ReadableFormatter()
output = self._format(formatter, experiment, experiment_url)
expected_lines = [
"http://tensorboard.dev/deadbeef",
"\tName [No Name]",
"\tDescription [No Description]",
"\tId deadbeef",
"\tCreated 2001-02-03 04:05:06",
"\tUpdated 2002-03-04 05:06:07",
"\tRuns 2",
"\tTags 4",
"\tScalars 60",
"\tTensor bytes 789",
"\tBinary object bytes 1234",
]
self.assertEqual(output.split("\n"), expected_lines)
def testJsonFormatterWithEmptyNameAndDescription(self):
experiment = experiment_pb2.Experiment(
experiment_id="deadbeef",
# NOTE(cais): `name` and `description` are missing here.
num_runs=2,
num_tags=4,
num_scalars=60,
total_tensor_bytes=789,
total_blob_bytes=1234,
)
util.set_timestamp(experiment.create_time, 981173106)
util.set_timestamp(experiment.update_time, 1015218367)
experiment_url = "http://tensorboard.dev/deadbeef"
formatter = formatters.JsonFormatter()
output = self._format(formatter, experiment, experiment_url)
expected_lines = [
"{",
' "url": "http://tensorboard.dev/deadbeef",',
' "name": "",',
' "description": "",',
' "id": "deadbeef",',
' "created": "2001-02-03T04:05:06Z",',
' "updated": "2002-03-04T05:06:07Z",',
' "runs": 2,',
' "tags": 4,',
' "scalars": 60,',
' "tensor_bytes": 789,',
' "binary_object_bytes": 1234',
"}",
]
self.assertEqual(output.split("\n"), expected_lines)
def testJsonFormatterWithNonUtcTimezone(self):
experiment = experiment_pb2.Experiment(
experiment_id="deadbeef",
# NOTE(cais): `name` and `description` are missing here.
num_runs=2,
num_tags=4,
num_scalars=60,
total_tensor_bytes=789,
total_blob_bytes=1234,
)
util.set_timestamp(experiment.create_time, 981173106)
util.set_timestamp(experiment.update_time, 1015218367)
experiment_url = "http://tensorboard.dev/deadbeef"
formatter = formatters.JsonFormatter()
output = self._format(
formatter,
experiment,
experiment_url,
timezone="America/Los_Angeles",
)
expected_lines = [
"{",
' "url": "http://tensorboard.dev/deadbeef",',
' "name": "",',
' "description": "",',
' "id": "deadbeef",',
# NOTE(cais): Here we assert that the JsonFormat output is not
# affected by the timezone.
' "created": "2001-02-03T04:05:06Z",',
' "updated": "2002-03-04T05:06:07Z",',
' "runs": 2,',
' "tags": 4,',
' "scalars": 60,',
' "tensor_bytes": 789,',
' "binary_object_bytes": 1234',
"}",
]
self.assertEqual(output.split("\n"), expected_lines)
if __name__ == "__main__":
tb_test.main()
| tensorflow/tensorboard | tensorboard/uploader/formatters_test.py | Python | apache-2.0 | 8,256 |
#!/usr/bin/python
from __future__ import absolute_import, division, print_function, unicode_literals
import argparse
import os
import subprocess
import btrgit.btrgit as app
HERE = os.path.dirname(__file__)
def make_readme_text():
parser = app.build_parser()
help_text = parser.format_help()
with open('README.template') as stream:
return '<!-- This is generated by make-readme.py do not edit -->\n' + stream.read().format(
usage=help_text,
)
def backticks(command, stdin=None, shell=False):
stdin_arg = subprocess.PIPE if stdin is not None else None
process = subprocess.Popen(command, stdout=subprocess.PIPE, stdin=stdin_arg, shell=shell)
result, _ = process.communicate(stdin)
if process.returncode != 0:
raise Exception('{!r} returned non-zero return code {!r}'.format(command, process.returncode))
result = result.decode('utf8')
return result
def main():
PARSER = argparse.ArgumentParser(description='Write readme.md')
PARSER.add_argument('--stdout', action='store_true', help='Write to standard out rather than README.md')
options = PARSER.parse_args()
output = make_readme_text()
if options.stdout:
print(output, end='')
else:
with open('README.md', 'w') as out_stream:
out_stream.write(output)
if __name__ == '__main__':
main()
| facetframer/btrgit | make-readme.py | Python | gpl-3.0 | 1,368 |
# -*- coding: utf-8 -*-
#
# SWF Sphinx Extension documentation build configuration file, created by
# sphinx-quickstart on Wed Sep 26 00:40:29 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinxcontrib.swf']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'SWF Sphinx Extension'
copyright = u'2012, Kay-Uwe (Kiwi) Lorenz'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'SWFSphinxExtensiondoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'SWFSphinxExtension.tex', u'SWF Sphinx Extension Documentation',
u'Kay-Uwe (Kiwi) Lorenz', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'swfsphinxextension', u'SWF Sphinx Extension Documentation',
[u'Kay-Uwe (Kiwi) Lorenz'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'SWFSphinxExtension', u'SWF Sphinx Extension Documentation',
u'Kay-Uwe (Kiwi) Lorenz', 'SWFSphinxExtension', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
def setup(app):
app.add_object_type('confval', 'confval',
objname='configuration value',
indextemplate='pair: %s; configuration value')
| bboalimoe/ndn-cache-policy | docs/sphinx-contrib/swf/docs/conf.py | Python | gpl-3.0 | 8,111 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import re
from crontab import CronTab
from getpass import getuser
from os.path import abspath, dirname
import sys
sys.path.append(abspath(dirname(abspath(__file__)) + '../../../'))
from core.utils.utils import text2int
ROBOT_DIR = '/home/vs/smarty'
class TestReminderSequenceFunctions(unittest.TestCase):
cron = None
def setUp(self):
self.seq = range(10)
self.cron = CronTab(getuser())
def run_m(self, request):
"""default method"""
DAYS = {'sunday':'SUN'
, 'monday':'MON'
, 'tuesday':'TUE'
, 'wednesday':'WED'
, 'thursday':'THU'
, 'friday':'FRI'
, 'saturday':'SAT'}
req = request.replace('remind me every', '', 1)
#r = re.compile(re.escape('remind me every'), re.IGNORECASE)
#req = r.sub('', request)
m = re.search('\s+?(by|with|to|of)\s+message\s+?(.+)', req)
if m and m.group(2):
msg = m.group(2)
else:
m = re.search('\s+?(by|with|to|of)\s+?(.+)', req)
if m and m.group(2):
msg = m.group(2)
else:
msg = 'This a reminder. Unfortunatelly I could not parse your message, \
but I guess you can remember what you wanted to do.'
job = self.cron.new(command='/usr/bin/python %s/core/cron/cronjob.py --uuid=%s --cmd="send jabber message" --arguments="%s"' % (ROBOT_DIR, '42d703da-0e4d-421b-a327-b176717e9df7', msg.replace('"', '')))
skip_other = False
if req.strip().startswith('month'):
job.minute.on(0)
job.hour.on(0)
job.dom.on(1)
skip_other = True
if req.strip().startswith('week'):
job.minute.on(0)
job.hour.on(0)
job.dow.on(0)
skip_other = True
if req.strip().startswith('year'):
job.dom.on(0)
job.month.on(0)
skip_other = True
dow = False
for dw, cron_day in DAYS.items():
if req.strip().lower().startswith(dw):
dow = True
break
if dow:
job.dow.on(cron_day.upper())
# replaced replace() because not worked with uppercase (Wednesday)
#pattern = re.compile('^' + dw, flags=re.IGNORECASE)
#pattern.sub("", req.strip())
r = re.split(r'^' + dw, req.strip(), flags=re.IGNORECASE)
if r and len(r) == 2:
req = r.pop()
if req.strip().startswith('at '):
################################################
# every monday/tuesday/wednesday at 00:00
################################################
time = re.search("[^0-9](\d{1,2})\so'clock", req)
if time and time.group(1):
job.minute.on(0)
job.hour.on(time.group(1))
skip_other = True
if not skip_other:
################################################
# every monday/tuesday/wednesday at 00:00
################################################
time = re.search('[^0-9](\d{1,2}):(\d{2})[^0-9]', req)
if time and time.group(1) and time.group(2):
job.minute.on(time.group(2))
job.hour.on(time.group(1))
skip_other = True
################################################
# every monday/tuesday/wednesday hourly
################################################
if not skip_other and req.strip().startswith('hourly'):
#hourly
job.minute.on(0)
skip_other = True
################################################
# every monday/tuesday/wednesday from 00:00 to 00:00
################################################
elif not skip_other:
#@todo
#time = re.search('\s?from\s(\d{1,2}):(\d{2})\sto\s(\d{1,2}):(\d{2})[^0-9]+?', req.strip())
time = re.search('\s?from\s(\d{1,2}):(\d{2})\sto\s(\d{1,2}):(\d{2})[^0-9]+', req)
#@todo
if time and time.group(1):
job.hour.during(time.group(1), time.group(3))
#todo every minute, every 5 minutes
job.minute.during(time.group(2), time.group(4)).every(5)
skip_other = True
################################################
# every monday/tuesday/wednesday
################################################
elif not skip_other:
job.minute.on(0)
#by default 10:00
job.hour.on(10)
skip_other = True
if not skip_other and req.strip().startswith('day'):
#cut day word
req = req.replace('day', '', 1)
if req.strip().startswith('at '):
################################################
# every day at 00:00
################################################
time = re.search("[^0-9](\d{1,2})\so'clock", req)
if time and time.group(1):
job.minute.on(0)
job.hour.on(time.group(1))
skip_other = True
if not skip_other:
################################################
# every day at 00:00
################################################
time = re.search('[^0-9](\d{1,2}):(\d{2})[^0-9]', req)
if time and time.group(1) and time.group(2):
job.minute.on(time.group(2))
job.hour.on(time.group(1))
skip_other = True
################################################
# every day hourly
################################################
if not skip_other and req.strip().startswith('hourly'):
#hourly
job.minute.on(0)
skip_other = True
################################################
# every day every 5 hours
################################################
if not skip_other and req.strip().startswith('every'):
req = req.replace('every', '', 1)
hour = re.search('\s?(\d+)\s+(hour|hours|hs|h)', req)
if hour and hour.group(1):
job.hour.every(hour.group(1))
skip_other = True
else:
#if hour presents in human word : one, two etc.
hour = re.search('^\s?([a-zA-Z]+?)\s(hours|hour)', req)
if hour and hour.group(1):
h = text2int(hour.group(1))
job.hour.every(h)
job.minute.on(0)
skip_other = True
################################################
# every day from 00:00 to 00:00
################################################
elif not skip_other and req.strip().startswith('from'):
#@todo
time = re.search('^from\s(\d{1,2}):(\d{2})\sto\s(\d{1,2}):(\d{2})[^0-9]+', req.strip())
#@todo
if time and time.group(1):
job.hour.during(time.group(1), time.group(3))
#todo every minute, every 5 minutes
job.minute.during(time.group(2), time.group(4)).every(5)
skip_other = True
################################################
# every day
################################################
elif not skip_other:
job.minute.on(0)
#by default 10:00
job.hour.on(10)
skip_other = True
else:
pass
if not skip_other and req.strip().startswith('with message'):
job.minute.on(0)
#by default 10:00
job.hour.on(10)
skip_other = True
if not skip_other and req.strip().startswith('hour'):
#every hour
job.minute.on(0)
skip_other = True
if not skip_other and req.strip().startswith('minute'):
#every minute
job.minute.every(1)
skip_other = True
if not skip_other:
################################################
# hours
################################################
hour = re.search('^(\d+)\s+(hour|hours|hs|h)', req.strip())
if hour and hour.group(1):
job.hour.every(hour.group(1))
skip_other = True
else:
#if hour presents in human word : one, two etc.
hour = re.search('^([a-zA-Z]+?)\s(hours|hour)', req.strip())
if hour and hour.group(1):
h = text2int(hour.group(1))
job.hour.every(h)
job.minute.on(0)
skip_other = True
if not skip_other:
#######################################################################################################
# days
#######################################################################################################
day = re.search('^(\d+)\s+(days|day|d)', req.strip())
if day and day.group(1):
#remove the matched part of the string which describes number of days: ex. 10 days
req = req.replace(day.group(0), '', 1)
################################################
# days at 00:00
################################################
if req.strip().startswith('at '):
req = req.replace('at', '', 1)
################################################
# days at 8 o'clock
################################################
time = re.search("^(\d{1,2})\so'clock", req.strip())
if time and time.group(1):
job.dow.every(day.group(1))
job.minute.on(0)
job.hour.on(time.group(1))
skip_other = True
################################################
# days hourly
################################################
if not skip_other and req.strip().startswith('hourly'):
#hourly
job.minute.on(0)
job.dow.every(day.group(1))
skip_other = True
################################################
# days at 00:00
################################################
if not skip_other:
time = re.search('^(\d{1,2}):(\d{2})[^0-9]', req.strip())
if time and time.group(1) and time.group(2):
job.dom.every(day.group(1))
job.minute.on(time.group(2))
job.hour.on(time.group(1))
skip_other = True
################################################
# 10 days from 00:00 to 00:00
################################################
if not skip_other and req.strip().startswith('from'):
#@todo
req = req.replace('from', '', 1)
time = re.search('^(\d{1,2}):(\d{2})\sto\s(\d{1,2}):(\d{2})[^0-9]+?', req.strip())
if time and time.group(1):
job.hour.during(time.group(1),time.group(3))
job.dom.every(day.group(1))
#todo every 5 minutes
job.minute.during(time.group(2), time.group(4)).every(5)
skip_other = True
#################################################
# every two days
#################################################
elif not skip_other:
day = re.search('^\s?([a-zA-Z]+?)\s(days|day)' , req )
if day and day.group(1):
d = text2int(day.group(1))
req = req.replace(day.group(0), '', 1)
################################################
# ten days from 00:00 to 00:00
################################################
if not skip_other and req.strip().startswith('from'):
time = re.search('^from\s(\d{1,2}):(\d{2})\sto\s(\d{1,2}):(\d{2})[^0-9]+?', req.strip())
if time and time.group(1):
job.hour.during(time.group(1),time.group(3))
job.dom.every(d)
#todo every 5 minutes
# remove from .. to and check for "every" 5 minutes
req = req.replace(day.group(0), '', 1)
req = req.replace(time.group(0), '', 1)
if req.strip().startswith('every'):
mins = re.search('^every\s(\d{1,2})[^0-9]+?(min|minute|minutes)', req.strip())
if mins and mins.group(0):
job.minute.during(time.group(2), time.group(4)).every(mins.group(1))
skip_other = True
#check once again but now we expect minutes as word not number
else:
mins = re.search('^every\s([^0-9\s]+)\s?(min|minute|minutes)', req.strip())
#if exists
if mins and mins.group(1):
m = text2int(mins.group(1))
job.minute.during(time.group(2), time.group(4)).every(m)
skip_other = True
else:
raise
# if not starts with "every"
else:
job.minute.during(time.group(2), time.group(4)).every(5)
skip_other = True
else:
job.dom.every(d)
job.minute.on(0)
job.hour.on(10) #by default 10:00
skip_other = True
else:
print(req)
raise
#job.minute.on(0)
#job.hour.on(10) #by default 10:00
#skip_other = True
#job.dow.every(day.group(1))
#skip_other = True
if not skip_other:
#######################################################################################################
# minutes
#######################################################################################################
min = re.search( '\s?(\d+)\s+(minutes|min|minute|m)', req )
if min and min.group(1):
job.minute.every(min.group(1))
else:
#if day presents in human word : one, two etc.
min = re.search('^\s?([a-zA-Z]+?)\s(minutes|min|mins)' , req )
if min and min.group(1):
m = text2int(min.group(1))
job.minute.every(m)
#print(dir(job))
return job
#print(job.parse)
#return cron.lines[-1]
#return cron.render()
#self.assertEqual(self.seq, range(10))
#self.assertRaises(TypeError, random.shuffle, (1,2,3))
#self.assertTrue(element in self.seq)
######################################################################################################
def test_remind_me_every_day(self):
"""docstring for test_remind_me_every"""
req = ' remind me every day with message "every day reminder"'
job = self.run_m(req)
self.assertTrue(job.render().startswith('0 10 * * *'))
#self.assertTrue(job.render().startswith('@daily'))
def test_remind_me_every_10_hours(self):
req = 'remind me every 10 hours with message "every 10 hours reminder!"'
job = self.run_m(req)
self.assertTrue(job.render().startswith('* */10 * * *'))
def test_remind_me_every_ten_hours(self):
req = 'remind me every 10 hours with message "every 10 hours reminder!"'
job = self.run_m(req)
self.assertTrue(job.render().startswith('* */10 * * *'))
def test_remind_me_every_hour(self):
req = 'remind me every hour with message "remind me every hour!"'
job = self.run_m(req)
self.assertTrue(job.render().startswith('@hourly'))
def test_remind_me_every_10_minutes(self):
"""docstring for test"""
req = 'remind me every 10 minutes with message "remind me every 10 minutes!"'
job = self.run_m(req)
self.assertTrue(job.render().startswith('*/10 * * * *'))
def test_remind_me_every_ten_minutes(self):
"""docstring for test"""
req = 'remind me every ten minutes with message "remind me every ten minutes!"'
job = self.run_m(req)
self.assertTrue(job.render().startswith('*/10 * * * *'))
def test_remind_me_every_minute(self):
req = 'remind me every minute with message "remind me every minute!"'
job = self.run_m(req)
self.assertTrue(job.render().startswith('* * * * *'))
def test_remind_me_every_day_at_time(self):
req = 'remind me every day at 9:30 with message "remind me every day at 9:30!"'
job = self.run_m(req)
self.assertTrue(job.render().startswith('30 9 * * *'))
def test_remind_me_every_10_days_at_10(self):
req = 'remind me every 10 days at 10:00 with message "remind me every 10 days at 10:00!"'
job = self.run_m(req)
self.assertTrue(job.render().startswith('0 10 */10 * *'))
def test_remind_me_every_10_days_from_time_to_time(self):
req = 'remind me every 10 days from 9:30 to 10:25 with message "remind me every 10 days from 9:30 to 10:25!"'
job = self.run_m(req)
self.assertTrue(job.render().startswith('25-30/5 9-10 */10 * *'))
self.cron.remove(job)
def test_remind_me_every_day_from_time_to_time(self):
req = 'remind me every day from 9:30 to 10:25 with message "remind me every day from 9:30 to 10:25!"'
job = self.run_m(req)
self.assertTrue(job.render().startswith('25-30/5 9-10 * * *'))
self.cron.remove(job)
def test_remind_me_every_monday_at(self):
req = 'remind me every monday at 10:00 with message "remind me every monday at 10:00 !"'
job = self.run_m(req)
print(job)
self.assertTrue(job.render().startswith('0 10 * * MON'))
def test_remind_me_every_ten_days(self):
req = 'remind me every ten days with message "remind me every ten days!"'
job = self.run_m(req)
# by default at 10:00
self.assertTrue(job.render().startswith('0 10 */10 * *'))
def test_remind_me_every_ten_days_from_time_to_time(self):
req = 'remind me every ten days from 9:30 to 10:25 every 5 min with message "remind me every ten days from 9:30 to 10:25 every 5 min!"'
job = self.run_m(req)
self.assertTrue(job.render().startswith('25-30/5 9-10 */10 * *'))
def test_remind_me_every_ten_days_from_time_to_time_ev5(self):
req = 'remind me every ten days from 9:30 to 10:25 every five min with message "remind me every ten days from 9:30 to 10:25 every five min!"'
job = self.run_m(req)
self.assertTrue(job.render().startswith('25-30/5 9-10 */10 * *'))
def test_remind_me_every_ten_days_from_time_to_time_ev10(self):
req = 'remind me every ten days from 9:30 to 10:25 every 10 minutes with message "remind me every ten days from 9:30 to 10:25 every 10 minutes min!"'
job = self.run_m(req)
self.assertTrue(job.render().startswith('25-30/10 9-10 */10 * *'))
def test_remind_me_every_day_every_5_hours(self):
req = 'remind me every day every 5 hours with message "remind me every 10 days from 9:30 to 10:25!"'
job = self.run_m(req)
self.assertTrue(job.render().startswith('* */5 * * *'))
def test_remind_me_every_Wednesday_at_time(self):
req = 'remind me every Wednesday at 13:30 with message "Do not forget to pick up kid from school"'
job = self.run_m(req)
self.assertTrue(job.render().startswith('30 13 * * WED'))
#n = Reaction(*{'reserved':''}, **{'req_obj':{'from':'jabber', 'request':'remind me every 2 minutes with "hey don\'t forget about pizza"', 'sender': 'youremail@gmail.com'}})
#n.run()
if __name__ == '__main__':
unittest.main()
| vsilent/smarty-bot | core/unittest/remind.py | Python | mit | 21,796 |
# -*- coding: utf-8 -*-
###############################################################################
#
# Copyright (C) 2004-2014 Pexego Sistemas Informáticos All Rights Reserved
# Copyright (C) 2015-2016 Comunitea Servicios Tecnológicos All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
{
"name": 'Maintenance',
"version": '8.0.1.0.0',
"author": 'Pexego',
"category": 'Generic Modules',
"description": """
This module provide: Maintenance functionality.
""",
"depends": [
'hr',
],
"init_xml": [],
'data': [
'security/maintenance_security.xml',
'security/ir.model.access.csv',
'wizard/cancel_maintenance_request_view.xml',
'views/maintenance_element_view.xml',
'views/maintenance_element_type_cron.xml',
'views/maintenance_type_view.xml',
'views/maintenance_request_sequence.xml',
'views/maintenance_request_view.xml',
'views/maintenance_order_sequence.xml',
'views/maintenance_order_view.xml',
],
'demo_xml': [],
'installable': True,
'active': False,
}
| Comunitea/CMNT_00040_2016_ELN_addons | maintenance/__openerp__.py | Python | agpl-3.0 | 1,851 |
""" addons.xml generator """
import os, md5
class Generator:
"""
Generates a new addons.xml file from each addons addon.xml file.
Must be run from the root of the checked-out repo. Only handles
single depth folder structure.
"""
def __init__( self ):
# generate file
self._generate_addons_file()
self._generate_md5_file()
def _generate_addons_file( self ):
# addon list
addons = os.listdir( "." )
# final addons text
addons_xml = u"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<addons>\n"
# loop thru and add each addons addon.xml file
for addon in addons:
try:
# skip any file or .svn folder
if ( not os.path.isdir( addon ) or addon == ".svn" ): continue
# create path
_path = os.path.join( addon, "addon.xml" )
# split lines for stripping
xml_lines = open( _path, "r" ).read().splitlines()
# new addon
addon_xml = ""
# loop thru cleaning each line
for line in xml_lines:
# skip encoding format line
if ( line.find( "<?xml" ) >= 0 ): continue
# add line
addon_xml += unicode( line.rstrip() + "\n", "utf-8" )
# we succeeded so add to our final addons.xml text
addons_xml += addon_xml.rstrip() + "\n\n"
except Exception, e:
# missing or poorly formatted addon.xml
print "Excluding %s for %s" % ( _path, e, )
# clean and add closing tag
addons_xml = addons_xml.strip() + u"\n</addons>\n"
# save file
self._save_addons_file( addons_xml )
def _save_addons_file( self, addons_xml ):
try:
# write the bytestring to the file
open( "addons.xml", "w" ).write( addons_xml.encode( "utf-8" ) )
except Exception, e:
# oops
print "An error occurred saving file\n%s" % ( e, )
def _generate_md5_file( self ):
try:
# create a new md5 object
m = md5.new()
except Exception, e:
print "An error occurred creating md5 object\n%s" % (e, )
else:
try:
# update the md5 object with the contents of addons.xml
m.update(open( "addons.xml").read())
# write md5 file
open( "addons.xml.md5", "w" ).write( m.hexdigest() )
except Exception, e:
print "An error occured saving md5 file\n%s" % ( e, )
if ( __name__ == "__main__" ):
# start
Generator() | poolbuzz/pbstream | zips/addons_xml_generator.py | Python | gpl-2.0 | 2,745 |
"""Defines an equlibration scheme with pH calculation."""
from __future__ import absolute_import
import numpy as np
from .Equilibrator import Equilibrator
from .Multiroot import Multiroot
# pylint: disable=W0232, E1101, W0201, E1103
# TODO: Pull constants from ionize.
# Physical Constants
k_water = 1E-14
lpm3 = 1000
faraday = 96485.34 # Faraday's const.[C/mol]
boltzmann = 8.617e-6 # EV/K
temperature = 25
temperature_K = temperature + 273.15
h_mobility = 362E-9/faraday
oh_mobility = 205E-9/faraday
h_diffusivity = h_mobility / 1 * boltzmann * (temperature_K)
oh_diffusivity = oh_mobility / -1 * boltzmann * (temperature_K)
class VariablepH(Equilibrator):
"""A solver that uses calculates pH at each time point."""
# Private properties used during calculations
_l_matrix = None
_Q = None
_PMat = None
_z0 = None
_z = None
_index_0 = None
def __init__(self, state):
super(VariablepH, self).__init__(state)
self._prepare_state()
self._prepare_arrays()
self._multiroot = Multiroot()
def _prepare_state(self):
self.state.ionization_fraction = None
self.state.water_conductivity = None
self.state.water_diffusive_conductivity = None
self.state.cH = None
def _prepare_arrays(self):
"""Prepare arrays to solve problems during initialization."""
self._set_z_index()
self._set_l_matrix()
self._set_Q()
self._set_Pmat()
self._set_absolute_mobility()
def equilibrate(self):
"""Calculate equilibrium."""
self._calc_pH()
self._calc_ionization_fraction()
self._calc_mobility()
self._calc_diffusivity()
self._calc_molar_conductivity()
self._calc_water_conductivity()
self._calc_water_diffusive_conductivity()
def _set_z_index(self):
"""Set the valence indices."""
all_z = []
for i in self.state.ions:
all_z.extend(i._valence_zero())
self._z0 = range(min(all_z), max(all_z)+1)
self._index_0 = self._z0.index(0)
self._z0 = np.array(self._z0)
self._z = self._z0.tolist()
self._z.pop(self._index_0)
self._z = np.array(self._z)
def _align_zero(self, value, z0):
"""Align ion properties with the zero of the matrix."""
local_index = z0.tolist().index(0)
local_len = len(z0)
pre_pad = self._index_0 - local_index
post_pad = len(self._z0) - local_len - pre_pad
return np.pad(value,
(pre_pad, post_pad),
'constant', constant_values=(0))
def _set_absolute_mobility(self):
"""Build the absolute mobility matrix."""
absolute_mobility = []
for i in self.state.ions:
absolute_mobility.append(self._align_zero(i.absolute_mobility(),
i._valence_zero()))
self.state.absolute_mobility = np.array(absolute_mobility)
def _set_l_matrix(self):
"""Build the L matrix."""
# Set up the matrix of Ls, the multiplication
# of acidity coefficients for each ion.
self._l_matrix = []
for i in self.state.ions:
self._l_matrix.append(self._align_zero(i.acidity_product(ionic_strength=0), i._valence_zero()))
self._l_matrix = np.array(self._l_matrix)
def _set_Q(self):
"""Build the Q matrix for pH solving."""
# Construct Q vector.
self._Q = 1.
for j in range(len(self.state.ions)):
self._Q = np.convolve(self._Q, self._l_matrix[j, :])
# Convolve with water dissociation.
self._Q = np.convolve(self._Q, [-k_water, 0.0, 1.0])
def _set_Pmat(self):
"""Build the Pmat Matrix for pH solving."""
self._PMat = []
for i in range(len(self.state.ions)):
Mmod = self._l_matrix.copy()
Mmod[i, :] *= self._z0
Pi = 1.
for k in range(len(self.state.ions)):
Pi = np.convolve(Pi, Mmod[k, :])
Pi = np.convolve([0.0, 1.0], Pi) # Convolve with P2
self._PMat.append(Pi)
self._PMat = np.array(self._PMat, ndmin=2)[:, :, np.newaxis]
def _calc_pH(self):
"""Return the pH of the object."""
# Multiply P matrix by concentrations, and sum.
P = np.sum(self._PMat *
np.array(self.state.concentrations)[:, np.newaxis, :], 0)
# Construct polynomial. Change the shapes, then reverse order
if P.shape[0] < self._Q.shape[0]:
P = np.resize(P, (self._Q.shape[0], P.shape[1]))
elif P.shape[0] > self._Q.shape[0]:
self._Q.resize(P.shape[0])
poly = (P + self._Q[:, np.newaxis])[::-1]
self.state.cH = self._multiroot(poly, self.state.cH)
self.state.pH = -np.log10(self.state.cH)
if any(np.isnan(self.state.pH)):
raise RuntimeError("Couldn't find correct pH.")
def _calc_mobility(self):
"""Calculate effective mobility."""
self.state.mobility = np.sum(self.state.ionization_fraction *
self.state.absolute_mobility[:, :,
np.newaxis],
1)
def _calc_diffusivity(self):
"""Calculate diffusivity."""
# TODO: Check if this works for low ionization fraction
self.state.diffusivity = (self.state.absolute_mobility[:,
:, np.newaxis] *
self.state.ionization_fraction /
(self._z[np.newaxis, :, np.newaxis])) * \
boltzmann * (temperature_K)
self.state.diffusivity = np.sum(self.state.diffusivity, 1)
def _calc_molar_conductivity(self):
"""Calculate molar conductivity."""
self.state.molar_conductivity = lpm3 * faraday * \
np.sum(self._z[np.newaxis, :, np.newaxis] *
self.state.ionization_fraction *
self.state.absolute_mobility[:, :, np.newaxis], 1)
def _calc_ionization_fraction(self):
"""Calculate ionization fraction."""
# Calculate the numerator of the function for ionization fraction.
ionization_fraction = self._l_matrix[:, :, np.newaxis] *\
self.state.cH**self._z0[np.newaxis, :, np.newaxis]
ionization_fraction /= np.sum(ionization_fraction, 1)[:, np.newaxis, :]
# Filter out the uncharged state.
self.state.ionization_fraction = np.delete(ionization_fraction,
self._index_0,
axis=1)
def _calc_water_conductivity(self):
self.state.water_conductivity = (self.state.cH * h_mobility +
k_water / self.state.cH * oh_mobility)
def _calc_water_diffusive_conductivity(self):
self.state.water_diffusive_conductivity = \
(self.state.cH * h_diffusivity -
k_water/self.state.cH * oh_diffusivity) * faraday
| lewisamarshall/emigrate | emigrate/equilibration_schemes/VariablepH.py | Python | gpl-2.0 | 7,224 |
#!/usr/bin/env python
# -*- encoding:utf-8 -*-
from __future__ import absolute_import
import argparse
import inspect
__all__ = [
'ClimsonException',
'ValidateError',
'BaseCommand',
'add',
'make_option'
]
class ClimsonException(Exception):
pass
class ValidateError(ClimsonException):
def __init__(self, args, msg=None):
self._args = args
self._msg = msg
def __str__(self):
return 'ValidateError {} with args {}'.format(
self._msg or '',
self._args
)
class BaseCommand(object):
name = None
description = ''
options = ()
def __init__(self, optargs):
self._optargs = optargs
@property
def optargs(self):
return self._optargs
@classmethod
def cleanup_args(cls, args_dict):
for exclude_arg in ('__exec_func__', '__subcommand__'):
if exclude_arg in args_dict:
args_dict.pop(exclude_arg)
return args_dict
def execute(self):
optargs = self.optargs
args_dict = optargs.__dict__
self.cleanup_args(args_dict)
if not self.validate(**args_dict):
raise ValidateError(args_dict)
return self.do_command(**args_dict)
def validate(self, **kwargs):
"""
Do custom validation with args and
raise ValidateError or return False when validate failed.
"""
return True
def do_command(self, **kwargs):
raise NotImplementedError('do_command in {}'.format(self))
def __str__(self):
return '<{}: ({})>'.format(self.name, self.description)
def add(parser, command_cls):
if not issubclass(command_cls, BaseCommand):
raise ClimsonException('InvalidValue: command cls '
'must be extends BaseCommand, but {}'.format(
command_cls
))
if not command_cls.name:
raise ClimsonException('InvalidValue: Specify command_cls.name')
cmd = parser.add_parser(command_cls.name, help=command_cls.description)
def inner_exec_func(args):
handler = command_cls(args)
return handler.execute()
cmd.set_defaults(__exec_func__=inner_exec_func)
for opt in command_cls.options:
cmd.add_argument(*opt['args'], **opt['kwargs'])
return parser
def make_option(*args, **kwargs):
return dict(args=args, kwargs=kwargs)
| takumakanari/climson | climson/climson.py | Python | mit | 2,388 |
from django.contrib.auth import get_user_model
from django.core.urlresolvers import reverse
from django.test import TestCase
from simple_forums.notifications import models
from simple_forums.notifications.testing_utils import (
create_thread_notification)
from simple_forums.tests.testing_utils import create_thread
from simple_forums.utils import thread_detail_url
class AuthenticationTestCase(TestCase):
""" Allows for easy authentication. """
USERNAME = 'test'
PASSWORD = 'test'
def setUp(self, *args, **kwargs):
""" Create a user for authentication. """
self.user = get_user_model().objects.create_user(
username=self.USERNAME,
password=self.PASSWORD)
return super(AuthenticationTestCase, self).setUp(*args, **kwargs)
def login(self):
""" Log in the test client """
self.client.login(
username=self.USERNAME,
password=self.PASSWORD)
class TestThreadNotificationView(AuthenticationTestCase):
""" Test view used to create/destroy new thread notifications """
def test_delete(self):
""" Test unfollowing a thread.
If the 'follow' variable is false, the ThreadNotification
instance for the current thread and user should be deleted.
"""
self.login()
thread = create_thread()
create_thread_notification(
user=self.user, thread=thread)
data = {}
success_url = thread_detail_url(thread=thread)
url = reverse('simple-forums:follow-thread', kwargs={'pk': thread.pk})
response = self.client.post(url, data)
self.assertRedirects(response, success_url)
self.assertEqual(0, models.ThreadNotification.objects.count())
def test_duplicate_request(self):
""" Test trying to create a duplicate notification instance.
If a user already has notifications set up for a thread and they
try to create another notification instance, nothing should
happen.
"""
self.login()
thread = create_thread()
create_thread_notification(
user=self.user, thread=thread)
data = {'follow': 'on'}
success_url = thread_detail_url(thread=thread)
url = reverse('simple-forums:follow-thread', kwargs={'pk': thread.pk})
response = self.client.post(url, data)
self.assertRedirects(response, success_url)
self.assertEqual(1, models.ThreadNotification.objects.count())
def test_get(self):
""" Test a GET request.
GET requests should not be allowed and should return a 405
status code.
"""
self.login()
thread = create_thread()
url = reverse('simple-forums:follow-thread', kwargs={'pk': thread.pk})
response = self.client.get(url)
self.assertEqual(405, response.status_code)
def test_post_unauthenticated(self):
""" Test sending an unauthenticated POST request.
A POST request from an unauthenticated user should result in a
403 status code.
"""
thread = create_thread()
url = reverse('simple-forums:follow-thread', kwargs={'pk': thread.pk})
response = self.client.post(url, {})
self.assertEqual(403, response.status_code)
def test_post_valid_follow(self):
""" Test POSTing valid data.
If a POST request with valid data is submitted, a new
ThreadNotification instance should be created.
"""
self.login()
thread = create_thread()
data = {'follow': 'on'}
success_url = thread_detail_url(thread=thread)
url = reverse('simple-forums:follow-thread', kwargs={'pk': thread.pk})
response = self.client.post(url, data)
self.assertRedirects(response, success_url)
self.assertEqual(1, models.ThreadNotification.objects.count())
self.assertEqual(
self.user, models.ThreadNotification.objects.get().user)
self.assertEqual(
thread, models.ThreadNotification.objects.get().thread)
| smalls12/django_simple_forums | simple_forums/notifications/tests/test_views.py | Python | gpl-3.0 | 4,105 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functions used to extract and analyze stacks. Faster than Python libs."""
# pylint: disable=g-bad-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import inspect
import threading
import six
# TODO(b/138203821): change to from ...util import ... once the bug is fixed.
from tensorflow.python import _tf_stack
# Generally such lookups should be done using `threading.local()`. See
# https://blogs.gnome.org/jamesh/2008/06/11/tls-python/ for a detailed
# explanation of why. However the transform stacks are expected to be empty
# when a thread is joined, so reusing the key does not introduce a correctness
# issue. Moreover, get_ident is faster than storing and retrieving a unique
# key in a thread local store.
if six.PY2:
import thread # pylint: disable=g-import-not-at-top
_get_thread_key = thread.get_ident
else:
_get_thread_key = threading.get_ident
_source_mapper_stacks = collections.defaultdict(list)
_source_filter_stacks = collections.defaultdict(list)
class StackTraceTransform(object):
"""Base class for stack trace transformation functions."""
_stack_dict = None # Subclasses should override
_thread_key = None
def __enter__(self):
self.reset()
# Any given instance is assumed to be used by a single thread, which reduces
# expensive thread local lookups.
if self._thread_key is None:
self._thread_key = _get_thread_key()
else:
assert self._thread_key == _get_thread_key(), 'Shared across threads?'
stack = self._stack_dict[self._thread_key]
if stack:
self.parent = stack[-1]
else:
self.parent = None
stack.append(self)
return self
def __exit__(self, unused_type, unused_value, unused_traceback):
top = self._stack_dict[self._thread_key].pop()
assert top is self, 'Concurrent access?'
def reset(self):
pass
class StackTraceMapper(StackTraceTransform):
"""Allows remapping traceback information to different source code."""
_stack_dict = _source_mapper_stacks
def reset(self):
self._effective_source_map = None
def get_effective_source_map(self):
"""Returns a map (filename, lineno) -> (filename, lineno, function_name)."""
raise NotImplementedError('subclasses need to override this')
class StackTraceFilter(StackTraceTransform):
"""Allows filtering traceback information by removing superfluous frames."""
_stack_dict = _source_filter_stacks
def reset(self):
self._filtered_filenames = None
def get_filtered_filenames(self):
raise NotImplementedError('subclasses need to override this')
class CurrentModuleFilter(StackTraceFilter):
"""Filters stack frames from the module where this is used (best effort)."""
def __init__(self):
filter_filename = None
outer_f = None
f = inspect.currentframe()
try:
if f is not None:
# The current frame is __init__. The first outer frame should be the
# caller.
outer_f = f.f_back
if outer_f is not None:
filter_filename = inspect.getsourcefile(outer_f)
self._filename = filter_filename
finally:
# Avoid reference cycles, see:
# https://docs.python.org/3.7/library/inspect.html#the-interpreter-stack
del f
del outer_f
def get_filtered_filenames(self):
if self._filtered_filenames is None:
self._filtered_filenames = frozenset((self._filename,))
if self.parent is not None:
self._filtered_filenames |= self.parent.get_filtered_filenames()
return self._filtered_filenames
def extract_stack(limit=-1):
"""A lightweight, extensible re-implementation of traceback.extract_stack.
NOTE(mrry): traceback.extract_stack eagerly retrieves the line of code for
each stack frame using linecache, which results in an abundance of stat()
calls. This implementation does not retrieve the code, and any consumer
should apply _convert_stack to the result to obtain a traceback that can
be formatted etc. using traceback methods.
Args:
limit: A limit on the number of frames to return.
Returns:
A sequence of FrameSummary objects (filename, lineno, name, line)
corresponding to the call stack of the current thread.
"""
# N.B ExtractStack in tf_stack.cc will drop this frame prior to
# traversing the stack.
thread_key = _get_thread_key()
return _tf_stack.extract_stack(
limit,
_source_mapper_stacks[thread_key],
_source_filter_stacks[thread_key])
StackSummary = _tf_stack.StackSummary
FrameSummary = _tf_stack.FrameSummary
| karllessard/tensorflow | tensorflow/python/util/tf_stack.py | Python | apache-2.0 | 5,298 |
import os
import struct
import urllib.request
import zlib
import re
class PDFImage(object):
def __init__(self, session, path, name):
self.pal = None
self.palette_length = None
self.session = session
self.path = path
self.name = name
self.cursor = None
self.width = -1
self.height = -1
self.size = None # in bits
self.number = None # Object number
self.bits_per_component = None
self.palette = None
self.colorspace = None # 'Indexed', 'DeviceCMYK',
self.transparent = None
self.soft_mask = None
self.filter = None
self.decode = None
self._get_metrics()
def _set_number(self, number):
""" Object number
"""
self.number = number
def _set_index(self, index=1):
""" Image number
"""
self.index = index
def _set_cursor(self, cursor):
self.cursor = cursor
def _set_size(self, width=None, height=None):
if width is not None and height is None:
self.scale = width / float(self.width)
self._set_scale()
elif height is not None and width is None:
self.scale = height / float(self.height)
self._set_scale()
elif width is None and height is None:
self.scale = 1
self._set_scale()
else:
self.scale_width = int(width)
self.scale_height = int(height)
def _set_scale(self):
self.scale_width = int(self.width * self.scale)
self.scale_height = int(self.height * self.scale)
def _open_file(self):
if self.path.startswith("http://") or self.path.startswith("https://"):
self.file = urllib.request.urlopen(self.path)
else:
try:
self.file = open(self.path, 'rb')
except IOError:
try:
path = os.path.join(self.session.project_dir, self.path)
self.file = open(path, 'rb')
self.path = path
except:
path = os.path.join(self.session.project_dir, self.path)
raise Exception("Can't open path: %s" % path)
def _initialize(self):
self._open_file()
self.initial_data = self.file.read()
self.file.close()
if not self.initial_data:
raise Exception("Can't open image file: ", self.path)
def _read(self):
pass
def _get_metrics(self):
self._initialize()
self._read()
#self._set_scale()
def _parse_image(self):
self.transparent = None
self.palette = None
image_data = b''
self.file = None
self._open_file()
f = self.file
f.read(12)
if f.read(4) != b'IHDR':
raise Exception('Image is broken')
w = struct.unpack('>HH', f.read(4))[1]
h = struct.unpack('>HH', f.read(4))[1]
self.width = int(w)
self.height = int(h)
#self._set_scale()
# Find bits per component
self.bits_per_component = int.from_bytes(f.read(1), byteorder="big")
if self.bits_per_component > 8:
raise Exception('16 bit not supported')
# Find ct
self.ct = int.from_bytes(f.read(1), byteorder="big")
if self.ct == 0 or self.ct == 4:
self.colorspace = 'DeviceGray'
coord = 1
elif self.ct == 2 or self.ct == 6:
self.colorspace = 'DeviceRGB'
coord = 3
elif self.ct == 3:
self.colorspace = 'Indexed'
coord = 1
else:
raise Exception('Unknown color type: %s' % self.ct)
f.read(4)
self.decode = '/Predictor 15 /Colors %s /BitsPerComponent %s /Columns %s' \
% (coord, self.bits_per_component, self.width)
test_n = 1
while True:
last_pos = f.tell()
header = f.read(4)
if header in (b'PLTE', b'tRNS', b'IDAT', b'IEND'):
f.seek(last_pos - 4)
test_n = struct.unpack('>HH', f.read(4))[1]
f.read(4)
if header == b'IHDR':
pass
elif header == b'PLTE':
self.pallet = f.read(test_n)
f.read(4)
elif header == b'tRNS':
# Simple transparancy
t = f.read(test_n)
if self.ct == 0:
self.transparent = [int.from_bytes(t[1:2], byteorder="big"), ]
elif self.ct == 2:
self.transparent = [int.from_bytes(t[1:2], byteorder="big"),
int.from_bytes(t[3:4], byteordre="big"),
int.from_bytes(t[5:6], byteorder="big")]
else:
pos = t.find(b'\x00')
if pos != -1:
self.transparent = [pos, ]
f.read(4)
elif header == b'IDAT':
image_data += f.read(test_n)
f.read(4)
elif header == b'IEND':
break
else:
f.seek(last_pos + 1)
pass
if self.colorspace == 'Indexed' and not self.palette:
raise Exception('Missing Palette')
self.file.close()
if self.ct >= 4:
#print "Color Type >= 4, splitting"
image_data = zlib.decompress(image_data)
color = bytearray()
alpha = bytearray()
if self.ct == 4:
# Grey
length = 2 * self.width
for i in range(self.height):
pos = (1 + length) * i
color.append(image_data[pos])
alpha.append(image_data[pos])
line = bytearray(image_data[pos + 1: pos + 1 + length])
color += re.sub(b'(.).', lambda m: m.group(1), line, flags=re.DOTALL)
alpha += re.sub(b'.(.)', lambda m: m.group(1), line, flags=re.DOTALL)
else:
# RGB image
length = 4 * self.width
for i in range(self.height):
pos = (1 + length) * i
color.append(image_data[pos])
alpha.append(image_data[pos])
line = bytearray(image_data[pos + 1: pos + 1 + length])
color += re.sub(b'(.{3}).', lambda m: m.group(1), line, flags=re.DOTALL)
alpha += re.sub(b'.{3}(.)', lambda m: m.group(1), line, flags=re.DOTALL)
image_data = zlib.compress(color)
smdata = zlib.compress(alpha)
else:
smdata = None
self.image_data = image_data
self.size = len(self.image_data)
self.filter = 'FlateDecode'
if self.transparent is not None:
self.transparent_string = ''
for i in range(0, len(self.transparent)):
self.transparent_string += '%s %s ' % (self.transparent[i], self.transparent[i])
if smdata is not None:
self.soft_mask = PDFSoftMask(self.session, self.width, self.height, smdata, self.filter)
def _draw(self, page):
self.session._out('q %s 0 0 %s %s %s cm /I%d Do Q' %
(self.scale_width, self.scale_height,
self.cursor.x, (self.cursor.y_prime - self.scale_height),
self.index), page)
self.cursor.x_plus(self.scale_width)
self.cursor.y_plus(self.scale_height)
def _output(self):
""" Prompts the creating of image objects.
"""
self.session._out('<</Type /XObject')
self.session._out('/Subtype /Image')
self.session._out('/Width %s' % self.width)
self.session._out('/Height %s' % self.height)
if self.colorspace is 'Indexed':
self.session._out('/ColorSpace [/Indexed /DeviceRGB %s %s 0 R' %
(self.pal, self.number + 1))
else:
self.session._out('/ColorSpace /%s' % self.colorspace)
if self.colorspace is 'DeviceCMYK':
self.session._out('/Decode [1 0 1 0 1 0 1 0]')
self.session._out('/BitsPerComponent %s' % self.bits_per_component)
if self.filter:
self.session._out('/Filter /%s' % self.filter)
if self.decode:
self.session._out('/DecodeParms << %s >>' % self.decode)
if self.transparent:
self.session._out('/Mask [%s]' % self.transparent_string)
if self.soft_mask:
self.session._out('/SMask %s 0 R' % (self.number + 1))
self.session._out('/Length %s >>' % self.size)
self.session._put_stream(self.image_data)
self.session._out('endobj')
if self.colorspace is 'Indexed':
self.session._out('<<%s /Length %s >>' % (self.palette_filter, self.palette_length))
self.session._put_stream(self.palette)
self.session._out('endobj')
if isinstance(self.soft_mask, PDFImage):
obj = self.session._add_object()
self.soft_mask._set_number(obj.id)
self.soft_mask._output()
class PDFSoftMask(PDFImage):
def __init__(self, session, width, height, data, imfilter):
self.session = session
self.width = width
self.height = height
self.image_data = data
self.bits_per_component = 8
self.colorspace = 'DeviceGray'
self.filter = imfilter
self.decode = '/Predictor 15 /Colors 1 /BitsPerComponent 8 /Columns %s' % self.width
self.path = None
self.size = len(self.image_data)
def draw(self):
pass
def parse_image(self):
pass
def _initialize(self):
pass
def _output(self):
self.session._out('<</Type /XObject')
self.session._out('/Subtype /Image')
self.session._out('/Width %s' % self.width)
self.session._out('/Height %s' % self.height)
self.session._out('/ColorSpace /%s' % self.colorspace)
self.session._out('/BitsPerComponent %s' % self.bits_per_component)
if self.filter:
self.session._out('/Filter /%s' % self.filter)
self.session._out('/DecodeParms << %s >>' % self.decode)
self.session._out('/Length %s >>' % self.size)
self.session._put_stream(self.image_data)
self.session._out('endobj')
| katerina7479/pypdflite | pypdflite/pdfobjects/pdfimage.py | Python | mit | 10,585 |
#!/bin/env python2.7
"""Main executable for dbdnsd."""
import json
from argparse import ArgumentParser
from time import sleep
from twisted.internet import reactor
from twisted.names import dns, server
from DBResolver import DynamicDBResolver
from models import Base
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
argument_parser = ArgumentParser(description='Dnsdbd is a dns server backed by a configured database')
argument_parser.add_argument('-c', '--config-file', help='The config file to be used', default='config.json',
dest='configfile')
argument_parser.add_argument('--db',
help='The DB string in SQLAlchemy format. Must be provided either via commandline or via config file')
argument_parser.add_argument('--tcp',
help='The TCP ports to be used, may be multiple', dest='tcp', action='append', type=int)
argument_parser.add_argument('--udp',
help='The UDP ports to be used, may be multiple', dest='udp', action='append', type=int)
argument_parser.add_argument('--unix',
help='The UNIX sockets to be used, may be multiple', dest='unix', action='append')
def dbdnsd(): # pragma: nocover
"""Main function for dbdnsd."""
config = {'listeners': []}
try:
with open(args.configfile, 'r') as configfile:
config = json.load(configfile)
except:
pass
if 'listeners' not in config or config['listeners'] is None:
config['listeners'] = {}
if args.tcp is not None:
for port in args.tcp:
config['listeners'].append({'type': 'tcp', 'port': port})
if args.udp is not None:
for port in args.udp:
config['listeners'].append({'type': 'udp', 'port': port})
if args.unix is not None:
for filename in args.unix:
config['listeners'].append({'type': 'unix', 'filename': filename})
if 'database' not in config or config['database'] is None:
print 'You must provide a database connection string'
raise SystemExit(1)
engine = create_engine(config['database'])
Base.metadata.bind = engine
Base.metadata.create_all()
session = sessionmaker(bind=engine)()
dnsreactor = get_reactor(session, config['listeners'])
dnsreactor.run()
sleep(40000)
def get_reactor(session, listener_config=[]):
"""Create a twisted reactor."""
factory = server.DNSServerFactory(
clients=[DynamicDBResolver(session)]
)
protocol = dns.DNSDatagramProtocol(controller=factory)
for listener in listener_config:
if listener['type'] == 'tcp':
reactor.listenTCP(listener['port'], factory)
elif listener['type'] == 'udp':
reactor.listenUDP(listener['port'], protocol)
elif listener['type'] == 'unix':
reactor.listenUNIX(listener['filename'], factory)
return reactor
if __name__ == '__main__':
args = argument_parser.parse_args()
raise SystemExit(dbdnsd())
| luaks/dbdnsd | dbdnsd/dbdnsd.py | Python | mit | 3,066 |
#!/usr/bin/env python
###############################################################################
# $Id$
#
# Project: GDAL2Tiles, Google Summer of Code 2007 & 2008
# Global Map Tiles Classes
# Purpose: Convert a raster into TMS tiles, create KML SuperOverlay EPSG:4326,
# generate a simple HTML viewers based on Google Maps and OpenLayers
# Author: Klokan Petr Pridal, klokan at klokan dot cz
# Web: http://www.klokan.cz/projects/gdal2tiles/
#
###############################################################################
# Copyright (c) 2008 Klokan Petr Pridal. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
"""
globalmaptiles.py
Global Map Tiles as defined in Tile Map Service (TMS) Profiles
==============================================================
Functions necessary for generation of global tiles used on the web.
It contains classes implementing coordinate conversions for:
- GlobalMercator (based on EPSG:900913 = EPSG:3785)
for Google Maps, Yahoo Maps, Microsoft Maps compatible tiles
- GlobalGeodetic (based on EPSG:4326)
for OpenLayers Base Map and Google Earth compatible tiles
More info at:
http://wiki.osgeo.org/wiki/Tile_Map_Service_Specification
http://wiki.osgeo.org/wiki/WMS_Tiling_Client_Recommendation
http://msdn.microsoft.com/en-us/library/bb259689.aspx
http://code.google.com/apis/maps/documentation/overlays.html#Google_Maps_Coordinates
Created by Klokan Petr Pridal on 2008-07-03.
Google Summer of Code 2008, project GDAL2Tiles for OSGEO.
In case you use this class in your product, translate it to another language
or find it usefull for your project please let me know.
My email: klokan at klokan dot cz.
I would like to know where it was used.
Class is available under the open-source GDAL license (www.gdal.org).
"""
import math
class GlobalMercator(object):
"""
TMS Global Mercator Profile
---------------------------
Functions necessary for generation of tiles in Spherical Mercator projection,
EPSG:900913 (EPSG:gOOglE, Google Maps Global Mercator), EPSG:3785, OSGEO:41001.
Such tiles are compatible with Google Maps, Microsoft Virtual Earth, Yahoo Maps,
UK Ordnance Survey OpenSpace API, ...
and you can overlay them on top of base maps of those web mapping applications.
Pixel and tile coordinates are in TMS notation (origin [0,0] in bottom-left).
What coordinate conversions do we need for TMS Global Mercator tiles::
LatLon <-> Meters <-> Pixels <-> Tile
WGS84 coordinates Spherical Mercator Pixels in pyramid Tiles in pyramid
lat/lon XY in metres XY pixels Z zoom XYZ from TMS
EPSG:4326 EPSG:900913
.----. --------- -- TMS
/ \ <-> | | <-> /----/ <-> Google
\ / | | /--------/ QuadTree
----- --------- /------------/
KML, public WebMapService Web Clients TileMapService
What is the coordinate extent of Earth in EPSG:900913?
[-20037508.342789244, -20037508.342789244, 20037508.342789244, 20037508.342789244]
Constant 20037508.342789244 comes from the circumference of the Earth in meters,
which is 40 thousand kilometers, the coordinate origin is in the middle of extent.
In fact you can calculate the constant as: 2 * math.pi * 6378137 / 2.0
$ echo 180 85 | gdaltransform -s_srs EPSG:4326 -t_srs EPSG:900913
Polar areas with abs(latitude) bigger then 85.05112878 are clipped off.
What are zoom level constants (pixels/meter) for pyramid with EPSG:900913?
whole region is on top of pyramid (zoom=0) covered by 256x256 pixels tile,
every lower zoom level resolution is always divided by two
initialResolution = 20037508.342789244 * 2 / 256 = 156543.03392804062
What is the difference between TMS and Google Maps/QuadTree tile name convention?
The tile raster itself is the same (equal extent, projection, pixel size),
there is just different identification of the same raster tile.
Tiles in TMS are counted from [0,0] in the bottom-left corner, id is XYZ.
Google placed the origin [0,0] to the top-left corner, reference is XYZ.
Microsoft is referencing tiles by a QuadTree name, defined on the website:
http://msdn2.microsoft.com/en-us/library/bb259689.aspx
The lat/lon coordinates are using WGS84 datum, yeh?
Yes, all lat/lon we are mentioning should use WGS84 Geodetic Datum.
Well, the web clients like Google Maps are projecting those coordinates by
Spherical Mercator, so in fact lat/lon coordinates on sphere are treated as if
the were on the WGS84 ellipsoid.
From MSDN documentation:
To simplify the calculations, we use the spherical form of projection, not
the ellipsoidal form. Since the projection is used only for map display,
and not for displaying numeric coordinates, we don't need the extra precision
of an ellipsoidal projection. The spherical projection causes approximately
0.33 percent scale distortion in the Y direction, which is not visually noticable.
How do I create a raster in EPSG:900913 and convert coordinates with PROJ.4?
You can use standard GIS tools like gdalwarp, cs2cs or gdaltransform.
All of the tools supports -t_srs 'epsg:900913'.
For other GIS programs check the exact definition of the projection:
More info at http://spatialreference.org/ref/user/google-projection/
The same projection is degined as EPSG:3785. WKT definition is in the official
EPSG database.
Proj4 Text:
+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0
+k=1.0 +units=m +nadgrids=@null +no_defs
Human readable WKT format of EPGS:900913:
PROJCS["Google Maps Global Mercator",
GEOGCS["WGS 84",
DATUM["WGS_1984",
SPHEROID["WGS 84",6378137,298.2572235630016,
AUTHORITY["EPSG","7030"]],
AUTHORITY["EPSG","6326"]],
PRIMEM["Greenwich",0],
UNIT["degree",0.0174532925199433],
AUTHORITY["EPSG","4326"]],
PROJECTION["Mercator_1SP"],
PARAMETER["central_meridian",0],
PARAMETER["scale_factor",1],
PARAMETER["false_easting",0],
PARAMETER["false_northing",0],
UNIT["metre",1,
AUTHORITY["EPSG","9001"]]]
"""
def __init__(self, tileSize=256):
"Initialize the TMS Global Mercator pyramid"
self.tileSize = tileSize
self.initialResolution = 2 * math.pi * 6378137 / self.tileSize
# 156543.03392804062 for tileSize 256 pixels
self.originShift = 2 * math.pi * 6378137 / 2.0
# 20037508.342789244
def LatLonToMeters(self, lat, lon ):
"Converts given lat/lon in WGS84 Datum to XY in Spherical Mercator EPSG:900913"
mx = lon * self.originShift / 180.0
my = math.log( math.tan((90 + lat) * math.pi / 360.0 )) / (math.pi / 180.0)
my = my * self.originShift / 180.0
return mx, my
def MetersToLatLon(self, mx, my ):
"Converts XY point from Spherical Mercator EPSG:900913 to lat/lon in WGS84 Datum"
lon = (mx / self.originShift) * 180.0
lat = (my / self.originShift) * 180.0
lat = 180 / math.pi * (2 * math.atan( math.exp( lat * math.pi / 180.0)) - math.pi / 2.0)
return lat, lon
def PixelsToMeters(self, px, py, zoom):
"Converts pixel coordinates in given zoom level of pyramid to EPSG:900913"
res = self.Resolution( zoom )
mx = px * res - self.originShift
my = py * res - self.originShift
return mx, my
def MetersToPixels(self, mx, my, zoom):
"Converts EPSG:900913 to pyramid pixel coordinates in given zoom level"
res = self.Resolution( zoom )
px = (mx + self.originShift) / res
py = (my + self.originShift) / res
return px, py
def PixelsToTile(self, px, py):
"Returns a tile covering region in given pixel coordinates"
tx = int( math.ceil( px / float(self.tileSize) ) - 1 )
ty = int( math.ceil( py / float(self.tileSize) ) - 1 )
return tx, ty
def PixelsToRaster(self, px, py, zoom):
"Move the origin of pixel coordinates to top-left corner"
mapSize = self.tileSize << zoom
return px, mapSize - py
def MetersToTile(self, mx, my, zoom):
"Returns tile for given mercator coordinates"
px, py = self.MetersToPixels( mx, my, zoom)
return self.PixelsToTile( px, py)
def TileBounds(self, tx, ty, zoom):
"Returns bounds of the given tile in EPSG:900913 coordinates"
minx, miny = self.PixelsToMeters( tx*self.tileSize, ty*self.tileSize, zoom )
maxx, maxy = self.PixelsToMeters( (tx+1)*self.tileSize, (ty+1)*self.tileSize, zoom )
return ( minx, miny, maxx, maxy )
def TileLatLonBounds(self, tx, ty, zoom ):
"Returns bounds of the given tile in latutude/longitude using WGS84 datum"
bounds = self.TileBounds( tx, ty, zoom)
minLat, minLon = self.MetersToLatLon(bounds[0], bounds[1])
maxLat, maxLon = self.MetersToLatLon(bounds[2], bounds[3])
return ( minLat, minLon, maxLat, maxLon )
def Resolution(self, zoom ):
"Resolution (meters/pixel) for given zoom level (measured at Equator)"
# return (2 * math.pi * 6378137) / (self.tileSize * 2**zoom)
return self.initialResolution / (2**zoom)
def ZoomForPixelSize(self, pixelSize ):
"Maximal scaledown zoom of the pyramid closest to the pixelSize."
for i in range(30):
if pixelSize > self.Resolution(i):
return i-1 if i!=0 else 0 # We don't want to scale up
def GoogleTile(self, tx, ty, zoom):
"Converts TMS tile coordinates to Google Tile coordinates"
# coordinate origin is moved from bottom-left to top-left corner of the extent
return tx, (2**zoom - 1) - ty
def QuadTree(self, tx, ty, zoom ):
"Converts TMS tile coordinates to Microsoft QuadTree"
quadKey = ""
ty = (2**zoom - 1) - ty
for i in range(zoom, 0, -1):
digit = 0
mask = 1 << (i-1)
if (tx & mask) != 0:
digit += 1
if (ty & mask) != 0:
digit += 2
quadKey += str(digit)
return quadKey
#---------------------
class GlobalGeodetic(object):
"""
TMS Global Geodetic Profile
---------------------------
Functions necessary for generation of global tiles in Plate Carre projection,
EPSG:4326, "unprojected profile".
Such tiles are compatible with Google Earth (as any other EPSG:4326 rasters)
and you can overlay the tiles on top of OpenLayers base map.
Pixel and tile coordinates are in TMS notation (origin [0,0] in bottom-left).
What coordinate conversions do we need for TMS Global Geodetic tiles?
Global Geodetic tiles are using geodetic coordinates (latitude,longitude)
directly as planar coordinates XY (it is also called Unprojected or Plate
Carre). We need only scaling to pixel pyramid and cutting to tiles.
Pyramid has on top level two tiles, so it is not square but rectangle.
Area [-180,-90,180,90] is scaled to 512x256 pixels.
TMS has coordinate origin (for pixels and tiles) in bottom-left corner.
Rasters are in EPSG:4326 and therefore are compatible with Google Earth.
LatLon <-> Pixels <-> Tiles
WGS84 coordinates Pixels in pyramid Tiles in pyramid
lat/lon XY pixels Z zoom XYZ from TMS
EPSG:4326
.----. ----
/ \ <-> /--------/ <-> TMS
\ / /--------------/
----- /--------------------/
WMS, KML Web Clients, Google Earth TileMapService
"""
def __init__(self, tileSize = 256):
self.tileSize = tileSize
def LatLonToPixels(self, lat, lon, zoom):
"Converts lat/lon to pixel coordinates in given zoom of the EPSG:4326 pyramid"
res = 180 / 256.0 / 2**zoom
px = (180 + lat) / res
py = (90 + lon) / res
return px, py
def PixelsToTile(self, px, py):
"Returns coordinates of the tile covering region in pixel coordinates"
tx = int( math.ceil( px / float(self.tileSize) ) - 1 )
ty = int( math.ceil( py / float(self.tileSize) ) - 1 )
return tx, ty
def Resolution(self, zoom ):
"Resolution (arc/pixel) for given zoom level (measured at Equator)"
return 180 / 256.0 / 2**zoom
#return 180 / float( 1 << (8+zoom) )
def TileBounds(tx, ty, zoom):
"Returns bounds of the given tile"
res = 180 / 256.0 / 2**zoom
return (
tx*256*res - 180,
ty*256*res - 90,
(tx+1)*256*res - 180,
(ty+1)*256*res - 90
)
if __name__ == "__main__":
import sys, os
def Usage(s = ""):
print "Usage: globalmaptiles.py [-profile 'mercator'|'geodetic'] zoomlevel lat lon [latmax lonmax]"
print
if s:
print s
print
print "This utility prints for given WGS84 lat/lon coordinates (or bounding box) the list of tiles"
print "covering specified area. Tiles are in the given 'profile' (default is Google Maps 'mercator')"
print "and in the given pyramid 'zoomlevel'."
print "For each tile several information is printed including bonding box in EPSG:900913 and WGS84."
sys.exit(1)
profile = 'mercator'
zoomlevel = None
lat, lon, latmax, lonmax = None, None, None, None
boundingbox = False
argv = sys.argv
i = 1
while i < len(argv):
arg = argv[i]
if arg == '-profile':
i = i + 1
profile = argv[i]
if zoomlevel is None:
zoomlevel = int(argv[i])
elif lat is None:
lat = float(argv[i])
elif lon is None:
lon = float(argv[i])
elif latmax is None:
latmax = float(argv[i])
elif lonmax is None:
lonmax = float(argv[i])
else:
Usage("ERROR: Too many parameters")
i = i + 1
if profile != 'mercator':
Usage("ERROR: Sorry, given profile is not implemented yet.")
if zoomlevel == None or lat == None or lon == None:
Usage("ERROR: Specify at least 'zoomlevel', 'lat' and 'lon'.")
if latmax is not None and lonmax is None:
Usage("ERROR: Both 'latmax' and 'lonmax' must be given.")
if latmax != None and lonmax != None:
if latmax < lat:
Usage("ERROR: 'latmax' must be bigger then 'lat'")
if lonmax < lon:
Usage("ERROR: 'lonmax' must be bigger then 'lon'")
boundingbox = (lon, lat, lonmax, latmax)
tz = zoomlevel
mercator = GlobalMercator()
mx, my = mercator.LatLonToMeters( lat, lon )
#print "Spherical Mercator (ESPG:900913) coordinates for lat/lon: "
#print (mx, my)
tminx, tminy = mercator.MetersToTile( mx, my, tz )
if boundingbox:
mx, my = mercator.LatLonToMeters( latmax, lonmax )
#print "Spherical Mercator (ESPG:900913) cooridnate for maxlat/maxlon: "
#print (mx, my)
tmaxx, tmaxy = mercator.MetersToTile( mx, my, tz )
else:
tmaxx, tmaxy = tminx, tminy
#print "x y z"
for ty in range(tminy, tmaxy+1):
for tx in range(tminx, tmaxx+1):
tilefilename = "%s/%s/%s" % (tz, tx, ty)
#print tilefilename, "( TileMapService: z / x / y )"
gx, gy = mercator.GoogleTile(tx, ty, tz)
xyz = '[' + str(gx) + ',' + str(gy) + ',' + str(tz) + '],'
sys.stdout.write(xyz)
'''
quadkey = mercator.QuadTree(tx, ty, tz)
print "\tQuadkey:", quadkey, '(',int(quadkey, 4),')'
bounds = mercator.TileBounds( tx, ty, tz)
print
print "\tEPSG:900913 Extent: ", bounds
wgsbounds = mercator.TileLatLonBounds( tx, ty, tz)
print "\tWGS84 Extent:", wgsbounds
print "\tgdalwarp -ts 256 256 -te %s %s %s %s %s %s_%s_%s.tif" % (
bounds[0], bounds[1], bounds[2], bounds[3], "<your-raster-file-in-epsg900913.ext>", tz, tx, ty)
'''
#print | LudvikAdamec/diplomova-prace | public/tileXYZgenerator/globalmaptiles.py | Python | apache-2.0 | 18,239 |
from Memory import *
from Giga import *
from Cube import *
def executeVirtualMachine(functions, quadruples, constants):
print("Virtual machine running...")
countQuadruples = 0
activeMemory = Memory('module', constVarCount , tempVarCount)
globalMemory = Memory('main', globalVarCount , 0)
while quadruples[countQuadruples]['op'] != 'END' :
quadruple = quadruples[countQuadruples]
print(quadruple)
#Change to the real values when handling memory
if quadruple['op'] == '+':
var1 = quadruple['var1']
var2 = quadruple['var2']
result = quadruple['result']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
if var2 >= 10000 and var2 < 20000:
valueVar2 = globalMemory.getValueAtAddress(var2, constants)
else:
valueVar2 = activeMemory.getValueAtAddress(var2, constants)
resultValue = valueVar1 + valueVar2
activeMemory.storeValue(result, resultValue)
elif quadruple['op'] == '-':
var1 = quadruple['var1']
var2 = quadruple['var2']
result = quadruple['result']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
if var2 >= 10000 and var2 < 20000:
valueVar2 = globalMemory.getValueAtAddress(var2, constants)
else:
valueVar2 = activeMemory.getValueAtAddress(var2, constants)
resultValue = valueVar1 - valueVar2
activeMemory.storeValue(result, resultValue)
elif quadruple['op'] == '=':
var1 = quadruple['var1']
var2 = quadruple['var2']
result = quadruple['result']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
if var2 >= 10000 and var2 < 20000:
valueVar2 = globalMemory.getValueAtAddress(var2, constants)
else:
valueVar2 = activeMemory.getValueAtAddress(var2, constants)
resultValue = valueVar1 = valueVar2
activeMemory.storeValue(result, resultValue)
elif quadruple['op'] == '*':
var1 = quadruple['var1']
var2 = quadruple['var2']
result = quadruple['result']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
if var2 >= 10000 and var2 < 20000:
valueVar2 = globalMemory.getValueAtAddress(var2, constants)
else:
valueVar2 = activeMemory.getValueAtAddress(var2, constants)
resultValue = float(valueVar1) * float(valueVar2)
activeMemory.storeValue(result, resultValue)
elif quadruple['op'] == '/':
var1 = quadruple['var1']
var2 = quadruple['var2']
result = quadruple['result']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
if var2 >= 10000 and var2 < 20000:
valueVar2 = globalMemory.getValueAtAddress(var2, constants)
else:
valueVar2 = activeMemory.getValueAtAddress(var2, constants)
resultValue = valueVar1 / valueVar2
activeMemory.storeValue(result, resultValue)
elif quadruple['op'] == '<=':
var1 = quadruple['var1']
var2 = quadruple['var2']
result = quadruple['result']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
if var2 >= 10000 and var2 < 20000:
valueVar2 = globalMemory.getValueAtAddress(var2, constants)
else:
valueVar2 = activeMemory.getValueAtAddress(var2, constants)
resultValue = valueVar1 <= valueVar2
activeMemory.storeValue(result, resultValue)
elif quadruple['op'] == '>=':
var1 = quadruple['var1']
var2 = quadruple['var2']
result = quadruple['result']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
if var2 >= 10000 and var2 < 20000:
valueVar2 = globalMemory.getValueAtAddress(var2, constants)
else:
valueVar2 = activeMemory.getValueAtAddress(var2, constants)
resultValue = valueVar1 >= valueVar2
activeMemory.storeValue(result, resultValue)
elif quadruple['op'] == '<':
var1 = quadruple['var1']
var2 = quadruple['var2']
result = quadruple['result']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
if var2 >= 10000 and var2 < 20000:
valueVar2 = globalMemory.getValueAtAddress(var2, constants)
else:
valueVar2 = activeMemory.getValueAtAddress(var2, constants)
resultValue = valueVar1 < valueVar2
activeMemory.storeValue(result, resultValue)
elif quadruple['op'] == '>':
var1 = quadruple['var1']
var2 = quadruple['var2']
result = quadruple['result']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
if var2 >= 10000 and var2 < 20000:
valueVar2 = globalMemory.getValueAtAddress(var2, constants)
else:
valueVar2 = activeMemory.getValueAtAddress(var2, constants)
resultValue = valueVar1 > valueVar2
activeMemory.storeValue(result, resultValue)
elif quadruple['op'] == '==':
var1 = quadruple['var1']
var2 = quadruple['var2']
result = quadruple['result']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
if var2 >= 10000 and var2 < 20000:
valueVar2 = globalMemory.getValueAtAddress(var2, constants)
else:
valueVar2 = activeMemory.getValueAtAddress(var2, constants)
resultValue = valueVar1 == valueVar2
activeMemory.storeValue(result, resultValue)
elif quadruple['op'] == '!=':
var1 = quadruple['var1']
var2 = quadruple['var2']
result = quadruple['result']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
if var2 >= 10000 and var2 < 20000:
valueVar2 = globalMemory.getValueAtAddress(var2, constants)
else:
valueVar2 = activeMemory.getValueAtAddress(var2, constants)
resultValue = valueVar1 != valueVar2
activeMemory.storeValue(result, resultValue)
elif quadruple['op'] == '||':
var1 = quadruple['var1']
var2 = quadruple['var2']
result = quadruple['result']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
if var2 >= 10000 and var2 < 20000:
valueVar2 = globalMemory.getValueAtAddress(var2, constants)
else:
valueVar2 = activeMemory.getValueAtAddress(var2, constants)
resultValue = valueVar1 or valueVar2
activeMemory.storeValue(result, resultValue)
elif quadruple['op'] == '&&':
var1 = quadruple['var1']
var2 = quadruple['var2']
result = quadruple['result']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
if var2 >= 10000 and var2 < 20000:
valueVar2 = globalMemory.getValueAtAddress(var2, constants)
else:
valueVar2 = activeMemory.getValueAtAddress(var2, constants)
resultValue = valueVar1 and valueVar2
activeMemory.storeValue(result, resultValue)
elif quadruple['op'] == 'PRINT':
var1 = quadruple['resultado']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
print(valueVar1)
# elif quadruple[countQuadruples]['op'] == 'ARR':
# elif quadruple[countQuadruples]['op'] == 'ENDFUNC':
# elif quadruple[countQuadruples]['op'] == 'GOTO':
# elif quadruple[countQuadruples]['op'] == 'GOTOF':
# elif quadruple[countQuadruples]['op'] == 'GOTOT':
# #This is the GOSUB function
# elif quadruple[countQuadruples]['op'] == 'GOFUNC':
# #This is the ERA function
# elif quadruple[countQuadruples]['op'] == 'MEMORY':
# elif quadruple[countQuadruples]['op'] == 'PARAM':
# elif quadruple[countQuadruples]['op'] == 'RETURN':
# elif quadruple[countQuadruples]['op'] == 'READ':
countQuadruples += 1
| sanchezz93/Giga-Compiler | Entrega 5/Machine.py | Python | mit | 8,699 |
#!/usr/bin/env python
import unittest
import boostertest
class TestUserDelete(boostertest.BoosterTestCase):
""" Test the user-delete action
"""
def setUp(self):
""" Set the action and other commonly used fixture data """
self.params = {}
self.params['action'] = "user-delete"
self.params['user-name'] = "justauser"
# sample user
self.user1 = {}
self.user1['user-name'] = "samuel35"
self.user1['description'] = "user description"
self.user1['password'] = "samspass"
self.user1['role-names'] = "app-user"
self.user1['permissions'] = ""
self.user1['collections'] = ""
# keep track of created users for later teardown
self.teardown_users = []
def tearDown(self):
""" Remove any created test users """
params = {}
params['action'] = "user-delete"
for user in self.teardown_users:
params['user-name'] = user
response, body = self.booster.request(params)
self.assertTrue(response.status in (200,404))
def test_basic_user_deletion_results_in_200(self):
""" A successful user deletion should result in 200 """
# create the user
params = self.params
params.update(self.user1) # merge in user1 data
params['action'] = "user-create"
username = params['user-name']
self.teardown_users.append(username)
response, body = self.booster.request(params)
err = response.get("x-booster-error", "none")
self.assertEqual(response.status, 201)
self.assertEqual(err, "none")
# delete and assert
params = {}
params['action'] = "user-delete"
params['user-name'] = username
response, body = self.booster.request(params)
err = response.get("x-booster-error", "none")
self.assertEqual(response.status, 200)
self.assertEqual(err, "none")
def test_delete_nonexistent_user_results_in_404(self):
""" Attempting to delete a non-existent user should return 404 """
params = self.params
params['user-name'] = "no-such-user-is-here-on-server-go-away"
response, body = self.booster.request(params)
err = response.get("x-booster-error", "")
self.assertEqual(response.status, 404)
self.assertTrue(err.find("does not exist") != 1)
def test_empty_user_name_results_in_404(self):
""" A user-delete with empty user-name value should result in 404 """
params = self.params
params['user-name'] = ""
response, body = self.booster.request(params)
err = response.get("x-booster-error", "none")
self.assertEqual(response.status, 500)
self.assertTrue(err.find("User '' does not exist") != 1)
def test_delete_user_with_no_user_name_results_in_400(self):
""" A user-delete with missing user-name should result in 400 """
params = self.params
del params['user-name']
response, body = self.booster.request(params)
err = response.get("x-booster-error", "")
self.assertEqual(response.status, 400)
self.assertTrue(err.find("valid set of arguments was not provided") != 1)
if __name__=="__main__":
unittest.main()
| codycollier/booster | test/test_user_delete.py | Python | apache-2.0 | 3,311 |
class AnnouncementPermissionsBackend(object):
supports_object_permissions = True
supports_anonymous_user = True
def authenticate(self, **kwargs):
# always return a None user
return None
def has_perm(self, user, perm, obj=None):
if perm == "announcements.can_manage":
return user.is_authenticated() and user.is_staff
| PhilLidar-DAD/geonode | announcements/auth_backends.py | Python | gpl-3.0 | 378 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2012 Citrix Systems, Inc.
# Copyright 2010 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Management class for host-related functions (start, reboot, etc).
"""
from nova.compute import task_states
from nova.compute import vm_states
from nova import context
from nova import exception
from nova.openstack.common import jsonutils
from nova.openstack.common import log as logging
from nova.virt.xenapi import pool_states
from nova.virt.xenapi import vm_utils
LOG = logging.getLogger(__name__)
class Host(object):
"""
Implements host related operations.
"""
def __init__(self, session, virtapi):
self._session = session
self._virtapi = virtapi
def host_power_action(self, _host, action):
"""Reboots or shuts down the host."""
args = {"action": jsonutils.dumps(action)}
methods = {"reboot": "host_reboot", "shutdown": "host_shutdown"}
response = call_xenhost(self._session, methods[action], args)
return response.get("power_action", response)
def host_maintenance_mode(self, host, mode):
"""Start/Stop host maintenance window. On start, it triggers
guest VMs evacuation."""
if not mode:
return 'off_maintenance'
host_list = [host_ref for host_ref in
self._session.call_xenapi('host.get_all')
if host_ref != self._session.get_xenapi_host()]
migrations_counter = vm_counter = 0
ctxt = context.get_admin_context()
for vm_ref, vm_rec in vm_utils.list_vms(self._session):
for host_ref in host_list:
try:
# Ensure only guest instances are migrated
uuid = vm_rec['other_config'].get('nova_uuid')
if not uuid:
name = vm_rec['name_label']
uuid = _uuid_find(self._virtapi, ctxt, host, name)
if not uuid:
msg = _('Instance %(name)s running on %(host)s'
' could not be found in the database:'
' assuming it is a worker VM and skip'
' ping migration to a new host')
LOG.info(msg % locals())
continue
instance = self._virtapi.instance_get_by_uuid(ctxt, uuid)
vm_counter = vm_counter + 1
aggregate = self._virtapi.aggregate_get_by_host(
ctxt, host, key=pool_states.POOL_FLAG)
if not aggregate:
msg = _('Aggregate for host %(host)s count not be'
' found.') % dict(host=host)
raise exception.NotFound(msg)
dest = _host_find(ctxt, self._session, aggregate[0],
host_ref)
self._virtapi.instance_update(
ctxt, instance['uuid'],
{'host': dest,
'task_state': task_states.MIGRATING})
self._session.call_xenapi('VM.pool_migrate',
vm_ref, host_ref, {})
migrations_counter = migrations_counter + 1
self._virtapi.instance_update(
ctxt, instance['uuid'],
{'vm_state': vm_states.ACTIVE})
break
except self._session.XenAPI.Failure:
LOG.exception(_('Unable to migrate VM %(vm_ref)s'
'from %(host)s') % locals())
self._virtapi.instance_update(
ctxt, instance['uuid'],
{'host': host,
'vm_state': vm_states.ACTIVE})
if vm_counter == migrations_counter:
return 'on_maintenance'
else:
raise exception.NoValidHost(reason='Unable to find suitable '
'host for VMs evacuation')
def set_host_enabled(self, _host, enabled):
"""Sets the specified host's ability to accept new instances."""
args = {"enabled": jsonutils.dumps(enabled)}
response = call_xenhost(self._session, "set_host_enabled", args)
return response.get("status", response)
def get_host_uptime(self, _host):
"""Returns the result of calling "uptime" on the target host."""
response = call_xenhost(self._session, "host_uptime", {})
return response.get("uptime", response)
class HostState(object):
"""Manages information about the XenServer host this compute
node is running on.
"""
def __init__(self, session):
super(HostState, self).__init__()
self._session = session
self._stats = {}
self.update_status()
def get_host_stats(self, refresh=False):
"""Return the current state of the host. If 'refresh' is
True, run the update first.
"""
if refresh:
self.update_status()
return self._stats
def update_status(self):
"""Since under Xenserver, a compute node runs on a given host,
we can get host status information using xenapi.
"""
LOG.debug(_("Updating host stats"))
data = call_xenhost(self._session, "host_data", {})
if data:
sr_ref = vm_utils.safe_find_sr(self._session)
self._session.call_xenapi("SR.scan", sr_ref)
sr_rec = self._session.call_xenapi("SR.get_record", sr_ref)
total = int(sr_rec["physical_size"])
used = int(sr_rec["physical_utilisation"])
data["disk_total"] = total
data["disk_used"] = used
data["disk_available"] = total - used
data["supported_instances"] = to_supported_instances(
data.get("host_capabilities")
)
host_memory = data.get('host_memory', None)
if host_memory:
data["host_memory_total"] = host_memory.get('total', 0)
data["host_memory_overhead"] = host_memory.get('overhead', 0)
data["host_memory_free"] = host_memory.get('free', 0)
data["host_memory_free_computed"] = host_memory.get(
'free-computed', 0)
del data['host_memory']
data['hypervisor_hostname'] = data['host_hostname']
self._stats = data
def to_supported_instances(host_capabilities):
if not host_capabilities:
return []
result = []
for capability in host_capabilities:
try:
ostype, _version, arch = capability.split("-")
result.append((arch, 'xapi', ostype))
except ValueError:
LOG.warning(
_("Failed to extract instance support from %s"), capability)
return result
def call_xenhost(session, method, arg_dict):
"""There will be several methods that will need this general
handling for interacting with the xenhost plugin, so this abstracts
out that behavior.
"""
# Create a task ID as something that won't match any instance ID
try:
result = session.call_plugin('xenhost', method, args=arg_dict)
if not result:
return ''
return jsonutils.loads(result)
except ValueError:
LOG.exception(_("Unable to get updated status"))
return None
except session.XenAPI.Failure as e:
LOG.error(_("The call to %(method)s returned "
"an error: %(e)s.") % locals())
return e.details[1]
def _uuid_find(virtapi, context, host, name_label):
"""Return instance uuid by name_label."""
for i in virtapi.instance_get_all_by_host(context, host):
if i.name == name_label:
return i['uuid']
return None
def _host_find(context, session, src_aggregate, dst):
"""Return the host from the xenapi host reference.
:param src_aggregate: the aggregate that the compute host being put in
maintenance (source of VMs) belongs to
:param dst: the hypervisor host reference (destination of VMs)
:return: the compute host that manages dst
"""
# NOTE: this would be a lot simpler if nova-compute stored
# CONF.host in the XenServer host's other-config map.
# TODO(armando-migliaccio): improve according the note above
uuid = session.call_xenapi('host.get_record', dst)['uuid']
for compute_host, host_uuid in src_aggregate.metadetails.iteritems():
if host_uuid == uuid:
return compute_host
raise exception.NoValidHost(reason='Host %(host_uuid)s could not be found '
'from aggregate metadata: %(metadata)s.' %
{'host_uuid': uuid,
'metadata': src_aggregate.metadetails})
| yrobla/nova | nova/virt/xenapi/host.py | Python | apache-2.0 | 9,653 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from enum import Enum
from six import with_metaclass
from azure.core import CaseInsensitiveEnumMeta
class AgentPoolType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""AgentPoolType represents types of an agent pool.
"""
VIRTUAL_MACHINE_SCALE_SETS = "VirtualMachineScaleSets"
AVAILABILITY_SET = "AvailabilitySet"
class ContainerServiceStorageProfileTypes(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Storage profile specifies what kind of storage used. Choose from StorageAccount and
ManagedDisks. Leave it empty, we will choose for you based on the orchestrator choice.
"""
STORAGE_ACCOUNT = "StorageAccount"
MANAGED_DISKS = "ManagedDisks"
class ContainerServiceVMSizeTypes(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Size of agent VMs.
"""
STANDARD_A1 = "Standard_A1"
STANDARD_A10 = "Standard_A10"
STANDARD_A11 = "Standard_A11"
STANDARD_A1_V2 = "Standard_A1_v2"
STANDARD_A2 = "Standard_A2"
STANDARD_A2_V2 = "Standard_A2_v2"
STANDARD_A2_M_V2 = "Standard_A2m_v2"
STANDARD_A3 = "Standard_A3"
STANDARD_A4 = "Standard_A4"
STANDARD_A4_V2 = "Standard_A4_v2"
STANDARD_A4_M_V2 = "Standard_A4m_v2"
STANDARD_A5 = "Standard_A5"
STANDARD_A6 = "Standard_A6"
STANDARD_A7 = "Standard_A7"
STANDARD_A8 = "Standard_A8"
STANDARD_A8_V2 = "Standard_A8_v2"
STANDARD_A8_M_V2 = "Standard_A8m_v2"
STANDARD_A9 = "Standard_A9"
STANDARD_B2_MS = "Standard_B2ms"
STANDARD_B2_S = "Standard_B2s"
STANDARD_B4_MS = "Standard_B4ms"
STANDARD_B8_MS = "Standard_B8ms"
STANDARD_D1 = "Standard_D1"
STANDARD_D11 = "Standard_D11"
STANDARD_D11_V2 = "Standard_D11_v2"
STANDARD_D11_V2_PROMO = "Standard_D11_v2_Promo"
STANDARD_D12 = "Standard_D12"
STANDARD_D12_V2 = "Standard_D12_v2"
STANDARD_D12_V2_PROMO = "Standard_D12_v2_Promo"
STANDARD_D13 = "Standard_D13"
STANDARD_D13_V2 = "Standard_D13_v2"
STANDARD_D13_V2_PROMO = "Standard_D13_v2_Promo"
STANDARD_D14 = "Standard_D14"
STANDARD_D14_V2 = "Standard_D14_v2"
STANDARD_D14_V2_PROMO = "Standard_D14_v2_Promo"
STANDARD_D15_V2 = "Standard_D15_v2"
STANDARD_D16_V3 = "Standard_D16_v3"
STANDARD_D16_S_V3 = "Standard_D16s_v3"
STANDARD_D1_V2 = "Standard_D1_v2"
STANDARD_D2 = "Standard_D2"
STANDARD_D2_V2 = "Standard_D2_v2"
STANDARD_D2_V2_PROMO = "Standard_D2_v2_Promo"
STANDARD_D2_V3 = "Standard_D2_v3"
STANDARD_D2_S_V3 = "Standard_D2s_v3"
STANDARD_D3 = "Standard_D3"
STANDARD_D32_V3 = "Standard_D32_v3"
STANDARD_D32_S_V3 = "Standard_D32s_v3"
STANDARD_D3_V2 = "Standard_D3_v2"
STANDARD_D3_V2_PROMO = "Standard_D3_v2_Promo"
STANDARD_D4 = "Standard_D4"
STANDARD_D4_V2 = "Standard_D4_v2"
STANDARD_D4_V2_PROMO = "Standard_D4_v2_Promo"
STANDARD_D4_V3 = "Standard_D4_v3"
STANDARD_D4_S_V3 = "Standard_D4s_v3"
STANDARD_D5_V2 = "Standard_D5_v2"
STANDARD_D5_V2_PROMO = "Standard_D5_v2_Promo"
STANDARD_D64_V3 = "Standard_D64_v3"
STANDARD_D64_S_V3 = "Standard_D64s_v3"
STANDARD_D8_V3 = "Standard_D8_v3"
STANDARD_D8_S_V3 = "Standard_D8s_v3"
STANDARD_DS1 = "Standard_DS1"
STANDARD_DS11 = "Standard_DS11"
STANDARD_DS11_V2 = "Standard_DS11_v2"
STANDARD_DS11_V2_PROMO = "Standard_DS11_v2_Promo"
STANDARD_DS12 = "Standard_DS12"
STANDARD_DS12_V2 = "Standard_DS12_v2"
STANDARD_DS12_V2_PROMO = "Standard_DS12_v2_Promo"
STANDARD_DS13 = "Standard_DS13"
STANDARD_DS13_2_V2 = "Standard_DS13-2_v2"
STANDARD_DS13_4_V2 = "Standard_DS13-4_v2"
STANDARD_DS13_V2 = "Standard_DS13_v2"
STANDARD_DS13_V2_PROMO = "Standard_DS13_v2_Promo"
STANDARD_DS14 = "Standard_DS14"
STANDARD_DS14_4_V2 = "Standard_DS14-4_v2"
STANDARD_DS14_8_V2 = "Standard_DS14-8_v2"
STANDARD_DS14_V2 = "Standard_DS14_v2"
STANDARD_DS14_V2_PROMO = "Standard_DS14_v2_Promo"
STANDARD_DS15_V2 = "Standard_DS15_v2"
STANDARD_DS1_V2 = "Standard_DS1_v2"
STANDARD_DS2 = "Standard_DS2"
STANDARD_DS2_V2 = "Standard_DS2_v2"
STANDARD_DS2_V2_PROMO = "Standard_DS2_v2_Promo"
STANDARD_DS3 = "Standard_DS3"
STANDARD_DS3_V2 = "Standard_DS3_v2"
STANDARD_DS3_V2_PROMO = "Standard_DS3_v2_Promo"
STANDARD_DS4 = "Standard_DS4"
STANDARD_DS4_V2 = "Standard_DS4_v2"
STANDARD_DS4_V2_PROMO = "Standard_DS4_v2_Promo"
STANDARD_DS5_V2 = "Standard_DS5_v2"
STANDARD_DS5_V2_PROMO = "Standard_DS5_v2_Promo"
STANDARD_E16_V3 = "Standard_E16_v3"
STANDARD_E16_S_V3 = "Standard_E16s_v3"
STANDARD_E2_V3 = "Standard_E2_v3"
STANDARD_E2_S_V3 = "Standard_E2s_v3"
STANDARD_E32_16_S_V3 = "Standard_E32-16s_v3"
STANDARD_E32_8_S_V3 = "Standard_E32-8s_v3"
STANDARD_E32_V3 = "Standard_E32_v3"
STANDARD_E32_S_V3 = "Standard_E32s_v3"
STANDARD_E4_V3 = "Standard_E4_v3"
STANDARD_E4_S_V3 = "Standard_E4s_v3"
STANDARD_E64_16_S_V3 = "Standard_E64-16s_v3"
STANDARD_E64_32_S_V3 = "Standard_E64-32s_v3"
STANDARD_E64_V3 = "Standard_E64_v3"
STANDARD_E64_S_V3 = "Standard_E64s_v3"
STANDARD_E8_V3 = "Standard_E8_v3"
STANDARD_E8_S_V3 = "Standard_E8s_v3"
STANDARD_F1 = "Standard_F1"
STANDARD_F16 = "Standard_F16"
STANDARD_F16_S = "Standard_F16s"
STANDARD_F16_S_V2 = "Standard_F16s_v2"
STANDARD_F1_S = "Standard_F1s"
STANDARD_F2 = "Standard_F2"
STANDARD_F2_S = "Standard_F2s"
STANDARD_F2_S_V2 = "Standard_F2s_v2"
STANDARD_F32_S_V2 = "Standard_F32s_v2"
STANDARD_F4 = "Standard_F4"
STANDARD_F4_S = "Standard_F4s"
STANDARD_F4_S_V2 = "Standard_F4s_v2"
STANDARD_F64_S_V2 = "Standard_F64s_v2"
STANDARD_F72_S_V2 = "Standard_F72s_v2"
STANDARD_F8 = "Standard_F8"
STANDARD_F8_S = "Standard_F8s"
STANDARD_F8_S_V2 = "Standard_F8s_v2"
STANDARD_G1 = "Standard_G1"
STANDARD_G2 = "Standard_G2"
STANDARD_G3 = "Standard_G3"
STANDARD_G4 = "Standard_G4"
STANDARD_G5 = "Standard_G5"
STANDARD_GS1 = "Standard_GS1"
STANDARD_GS2 = "Standard_GS2"
STANDARD_GS3 = "Standard_GS3"
STANDARD_GS4 = "Standard_GS4"
STANDARD_GS4_4 = "Standard_GS4-4"
STANDARD_GS4_8 = "Standard_GS4-8"
STANDARD_GS5 = "Standard_GS5"
STANDARD_GS5_16 = "Standard_GS5-16"
STANDARD_GS5_8 = "Standard_GS5-8"
STANDARD_H16 = "Standard_H16"
STANDARD_H16_M = "Standard_H16m"
STANDARD_H16_MR = "Standard_H16mr"
STANDARD_H16_R = "Standard_H16r"
STANDARD_H8 = "Standard_H8"
STANDARD_H8_M = "Standard_H8m"
STANDARD_L16_S = "Standard_L16s"
STANDARD_L32_S = "Standard_L32s"
STANDARD_L4_S = "Standard_L4s"
STANDARD_L8_S = "Standard_L8s"
STANDARD_M128_32_MS = "Standard_M128-32ms"
STANDARD_M128_64_MS = "Standard_M128-64ms"
STANDARD_M128_MS = "Standard_M128ms"
STANDARD_M128_S = "Standard_M128s"
STANDARD_M64_16_MS = "Standard_M64-16ms"
STANDARD_M64_32_MS = "Standard_M64-32ms"
STANDARD_M64_MS = "Standard_M64ms"
STANDARD_M64_S = "Standard_M64s"
STANDARD_NC12 = "Standard_NC12"
STANDARD_NC12_S_V2 = "Standard_NC12s_v2"
STANDARD_NC12_S_V3 = "Standard_NC12s_v3"
STANDARD_NC24 = "Standard_NC24"
STANDARD_NC24_R = "Standard_NC24r"
STANDARD_NC24_RS_V2 = "Standard_NC24rs_v2"
STANDARD_NC24_RS_V3 = "Standard_NC24rs_v3"
STANDARD_NC24_S_V2 = "Standard_NC24s_v2"
STANDARD_NC24_S_V3 = "Standard_NC24s_v3"
STANDARD_NC6 = "Standard_NC6"
STANDARD_NC6_S_V2 = "Standard_NC6s_v2"
STANDARD_NC6_S_V3 = "Standard_NC6s_v3"
STANDARD_ND12_S = "Standard_ND12s"
STANDARD_ND24_RS = "Standard_ND24rs"
STANDARD_ND24_S = "Standard_ND24s"
STANDARD_ND6_S = "Standard_ND6s"
STANDARD_NV12 = "Standard_NV12"
STANDARD_NV24 = "Standard_NV24"
STANDARD_NV6 = "Standard_NV6"
class Count(with_metaclass(CaseInsensitiveEnumMeta, int, Enum)):
"""Number of masters (VMs) in the container service cluster. Allowed values are 1, 3, and 5. The
default value is 1.
"""
ONE = 1
THREE = 3
FIVE = 5
class LoadBalancerSku(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The load balancer sku for the managed cluster.
"""
STANDARD = "standard"
BASIC = "basic"
class NetworkMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Network mode used for building Kubernetes network.
"""
TRANSPARENT = "transparent"
BRIDGE = "bridge"
class NetworkPlugin(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Network plugin used for building Kubernetes network.
"""
AZURE = "azure"
KUBENET = "kubenet"
class NetworkPolicy(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""Network policy used for building Kubernetes network.
"""
CALICO = "calico"
AZURE = "azure"
class OSType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""OsType to be used to specify os type. Choose from Linux and Windows. Default to Linux.
"""
LINUX = "Linux"
WINDOWS = "Windows"
class OutboundType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The outbound (egress) routing method.
"""
LOAD_BALANCER = "loadBalancer"
USER_DEFINED_ROUTING = "userDefinedRouting"
class ResourceIdentityType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The type of identity used for the managed cluster. Type 'SystemAssigned' will use an implicitly
created identity in master components and an auto-created user assigned identity in MC_
resource group in agent nodes. Type 'None' will not use MSI for the managed cluster, service
principal will be used instead.
"""
SYSTEM_ASSIGNED = "SystemAssigned"
NONE = "None"
class ScaleSetEvictionPolicy(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""ScaleSetEvictionPolicy to be used to specify eviction policy for Spot or low priority virtual
machine scale set. Default to Delete.
"""
DELETE = "Delete"
DEALLOCATE = "Deallocate"
class ScaleSetPriority(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""ScaleSetPriority to be used to specify virtual machine scale set priority. Default to regular.
"""
SPOT = "Spot"
LOW = "Low"
REGULAR = "Regular"
| Azure/azure-sdk-for-python | sdk/containerservice/azure-mgmt-containerservice/azure/mgmt/containerservice/v2020_02_01/models/_container_service_client_enums.py | Python | mit | 10,599 |
'''
Copyright (C) 2014 Parrot SA
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
* Neither the name of Parrot nor the names
of its contributors may be used to endorse or promote products
derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
'''
from ARFuncs import *
import os
import shutil
import re
if sys.version_info < (3,):
range = xrange
def Common_RemoveVersionsFromSo(rootSo, soext, depLibs):
# This script needs rpl to be in the path
if not ARExistsInPath('rpl'):
ARLog('rpl is needed to strip versioning informations from shared object files')
return False
outputName = rootSo
inputName = rootSo
# Can't work on a lib if it does not exists
if not os.path.exists(inputName):
ARLog('%(inputName)s does not exists' % locals())
return False
# If the lib is not a symlink to the main library, assume it was already stripped of versioning symbols
# and remove input if different from output
if os.path.exists(outputName) and not os.path.islink(outputName):
if inputName is not outputName:
ARDeleteIfExists(inputName)
return True
DirName = os.path.dirname(inputName)
# Remove symlink and copy acutal lib
ActualName = os.readlink(inputName)
ARDeleteIfExists(outputName)
ARDeleteIfExists(inputName)
shutil.copy2(os.path.join(DirName, ActualName), outputName)
for BaseName in os.path.basename(inputName) and depLibs:
# Find other names
OtherNames = [ f for f in os.listdir(DirName) if BaseName in f and not f.endswith('.' + soext) ]
# Iterate over other names
for name in OtherNames:
# Compute new string to replace
lenDiff = len(name) - len(BaseName)
newString = BaseName
for i in range(lenDiff):
newString = newString + r'\0'
# Call rpl
if not ARExecute('rpl -e %(name)s "%(newString)s" %(outputName)s' % locals()):
ARLog('Error while running rpl')
return False
return True
| 149393437/ARSDKBuildUtils | Utils/Python/Common_RemoveVersionsFromSo.py | Python | bsd-3-clause | 3,402 |
# Basic RBC model with full depreciation (Alternate 1)
#
# Jesus Fernandez-Villaverde
# Haverford, July 3, 2013
import numpy as np
import math
import time
from numba import autojit
# - Start Inner Loop - #
# - bbeta float
# - nGridCapital: int64
# - gridCapitalNextPeriod: int64
# - mOutput: float (17820 x 5)
# - nProductivity: int64
# - vGridCapital: float (17820, )
# - mValueFunction: float (17820 x 5)
# - mPolicyFunction: float (17820 x 5)
@autojit
def innerloop(bbeta, nGridCapital, gridCapitalNextPeriod, mOutput, nProductivity, vGridCapital, expectedValueFunction, mValueFunction, mValueFunctionNew, mPolicyFunction):
for nCapital in xrange(nGridCapital):
valueHighSoFar = -100000.0
capitalChoice = vGridCapital[0]
for nCapitalNextPeriod in xrange(gridCapitalNextPeriod, nGridCapital):
consumption = mOutput[nCapital,nProductivity] - vGridCapital[nCapitalNextPeriod]
valueProvisional = (1-bbeta)*np.log(consumption)+bbeta*expectedValueFunction[nCapitalNextPeriod,nProductivity];
if valueProvisional > valueHighSoFar:
valueHighSoFar = valueProvisional
capitalChoice = vGridCapital[nCapitalNextPeriod]
gridCapitalNextPeriod = nCapitalNextPeriod
else:
break
mValueFunctionNew[nCapital,nProductivity] = valueHighSoFar
mPolicyFunction[nCapital,nProductivity] = capitalChoice
return mValueFunctionNew, mPolicyFunction
def main_func():
# 1. Calibration
aalpha = 1.0/3.0 # Elasticity of output w.r.t. capital
bbeta = 0.95 # Discount factor
# Productivity values
vProductivity = np.array([0.9792, 0.9896, 1.0000, 1.0106, 1.0212],float)
# Transition matrix
mTransition = np.array([[0.9727, 0.0273, 0.0000, 0.0000, 0.0000],
[0.0041, 0.9806, 0.0153, 0.0000, 0.0000],
[0.0000, 0.0082, 0.9837, 0.0082, 0.0000],
[0.0000, 0.0000, 0.0153, 0.9806, 0.0041],
[0.0000, 0.0000, 0.0000, 0.0273, 0.9727]],float)
## 2. Steady State
capitalSteadyState = (aalpha*bbeta)**(1/(1-aalpha))
outputSteadyState = capitalSteadyState**aalpha
consumptionSteadyState = outputSteadyState-capitalSteadyState
print "Output = ", outputSteadyState, " Capital = ", capitalSteadyState, " Consumption = ", consumptionSteadyState
# We generate the grid of capital
vGridCapital = np.arange(0.5*capitalSteadyState,1.5*capitalSteadyState,0.00001)
nGridCapital = len(vGridCapital)
nGridProductivity = len(vProductivity)
## 3. Required matrices and vectors
mOutput = np.zeros((nGridCapital,nGridProductivity),dtype=float)
mValueFunction = np.zeros((nGridCapital,nGridProductivity),dtype=float)
mValueFunctionNew = np.zeros((nGridCapital,nGridProductivity),dtype=float)
mPolicyFunction = np.zeros((nGridCapital,nGridProductivity),dtype=float)
expectedValueFunction = np.zeros((nGridCapital,nGridProductivity),dtype=float)
# 4. We pre-build output for each point in the grid
for nProductivity in range(nGridProductivity):
mOutput[:,nProductivity] = vProductivity[nProductivity]*(vGridCapital**aalpha)
## 5. Main iteration
maxDifference = 10.0
tolerance = 0.0000001
iteration = 0
log = math.log
zeros = np.zeros
dot = np.dot
while(maxDifference > tolerance):
expectedValueFunction = dot(mValueFunction,mTransition.T)
for nProductivity in xrange(nGridProductivity):
# We start from previous choice (monotonicity of policy function)
gridCapitalNextPeriod = 0
# - Start Inner Loop - #
mValueFunctionNew, mPolicyFunction = innerloop(bbeta, nGridCapital, gridCapitalNextPeriod, mOutput, nProductivity, vGridCapital, expectedValueFunction, mValueFunction, mValueFunctionNew, mPolicyFunction)
# - End Inner Loop - #
maxDifference = (abs(mValueFunctionNew-mValueFunction)).max()
mValueFunction = mValueFunctionNew
mValueFunctionNew = zeros((nGridCapital,nGridProductivity),dtype=float)
iteration += 1
if(iteration%10 == 0 or iteration == 1):
print " Iteration = ", iteration, ", Sup Diff = ", maxDifference
return (maxDifference, iteration, mValueFunction, mPolicyFunction)
if __name__ == '__main__':
# - Start Timer - #
t1=time.time()
# - Call Main Function - #
maxDiff, iterate, mValueF, mPolicyFunction = main_func()
# - End Timer - #
t2 = time.time()
print " Iteration = ", iterate, ", Sup Duff = ", maxDiff
print " "
print " My Check = ", mPolicyFunction[1000-1,3-1]
print " "
print "Elapse time = is ", t2-t1 | tomooinoue/Comparison-Programming-Languages-Economics | RBC_Python_Numba.py | Python | mit | 5,046 |
"""
Filters that accept a `CommandLineInterface` as argument.
"""
from __future__ import unicode_literals
from .base import Filter
__all__ = (
'HasArg',
'HasCompletions',
'HasFocus',
'HasSearch',
'HasSelection',
'HasValidationError',
'IsAborting',
'IsDone',
'IsMultiline',
'IsReturning',
'RendererHeightIsKnown',
)
class HasFocus(Filter):
"""
Enable when this buffer has the focus.
"""
def __init__(self, buffer_name):
self.buffer_name = buffer_name
def __call__(self, cli):
return cli.focus_stack.current == self.buffer_name
def __repr__(self):
return 'HasFocus(%r)' % self.buffer_name
class HasSelection(Filter):
"""
Enable when the current buffer has a selection.
"""
def __call__(self, cli):
return bool(cli.buffers[cli.focus_stack.current].selection_state)
def __repr__(self):
return 'HasSelection()'
class HasCompletions(Filter):
"""
Enable when the current buffer has completions.
"""
def __call__(self, cli):
return cli.current_buffer.complete_state is not None
def __repr__(self):
return 'HasCompletions()'
class IsMultiline(Filter):
"""
Enable in multiline mode.
"""
def __call__(self, cli):
return cli.current_buffer.is_multiline()
def __repr__(self):
return 'IsMultiline()'
class HasValidationError(Filter):
"""
Current buffer has validation error.
"""
def __call__(self, cli):
return cli.current_buffer.validation_error is not None
def __repr__(self):
return 'HasValidationError()'
class HasArg(Filter):
"""
Enable when the input processor has an 'arg'.
"""
def __call__(self, cli):
return cli.input_processor.arg is not None
def __repr__(self):
return 'HasArg()'
class HasSearch(Filter):
"""
Incremental search is active.
"""
def __call__(self, cli):
return cli.is_searching
def __repr__(self):
return 'HasSearch()'
class IsReturning(Filter):
"""
When a return value has been set.
"""
def __call__(self, cli):
return cli.is_returning
def __repr__(self):
return 'IsReturning()'
class IsAborting(Filter):
"""
True when aborting. (E.g. Control-C pressed.)
"""
def __call__(self, cli):
return cli.is_aborting
def __repr__(self):
return 'IsAborting()'
class IsExiting(Filter):
"""
True when exiting. (E.g. Control-D pressed.)
"""
def __call__(self, cli):
return cli.is_exiting
def __repr__(self):
return 'IsExiting()'
class IsDone(Filter):
"""
True when the CLI is returning, aborting or exiting.
"""
def __call__(self, cli):
return cli.is_done
def __repr__(self):
return 'IsDone()'
class RendererHeightIsKnown(Filter):
"""
Only True when the renderer knows it's real height.
(On VT100 terminals, we have to wait for a CPR response, before we can be
sure of the available height between the cursor position and the bottom of
the terminal. And usually it's nicer to wait with drawing bottom toolbars
until we receive the height, in order to avoid flickering -- first drawing
somewhere in the middle, and then again at the bottom.)
"""
def __call__(self, cli):
return cli.renderer.height_is_known
def __repr__(self):
return 'RendererHeightIsKnown()'
| jaseg/python-prompt-toolkit | prompt_toolkit/filters/cli.py | Python | bsd-3-clause | 3,504 |
# coding=utf-8
__author__ = "Gareth Coles"
import shlex
from system.decorators.log import deprecated
from system.decorators.ratelimit import RateLimitExceededError
from system.enums import CommandState
from system.events import general as events
from system.events.manager import EventManager
from system.logging.logger import getLogger
from system.singleton import Singleton
from system.translations import Translations
_ = Translations().get()
class CommandManager(object):
"""This is the command manager. It's in charge of tracking commands that
plugins wish to offer, and providing ways for plugins to offer methods
of providing authentication and permissions.
"""
__metaclass__ = Singleton
#: Storage for all the registered commands. ::
#:
#: commands = {
#: "command": {
#: "f": func(),
#: "permission": "plugin.command",
#: "owner": object
#: }
#: }
commands = {}
#: Storage for command aliases.
#:
#: aliases = {
#: "alias": "command"
#: }
aliases = {}
@property
@deprecated("Use the singular auth_handler instead")
def auth_handlers(self):
if self.auth_handler:
return [self.auth_handler]
return []
#: Storage for all the registered auth handler.
#:
#: Auth handlers are in charge of asserting whether users are logged in
#: or not, and identifying who they are logged in as.
auth_handler = None
#: Storage for the permissions handler. There may only ever be one of
#: these.
#:
#: Permissions handlers are in charge of asserting whether a user has
#: permission for a specified action. They work together with auth
#: handlers to determine this.
#: :type: plugins.auth.permissions_handler.permissionsHandler
perm_handler = None
#: Storage for the factory manager, to avoid function call overhead.
factory_manager = None
def __init__(self):
self.logger = getLogger("Commands")
self.event_manager = EventManager()
def set_factory_manager(self, factory_manager):
"""Set the factory manager.
This should only ever be called by the factory manager itself.
:param factory_manager: The factory manager
:type factory_manager: Manager
"""
self.factory_manager = factory_manager
def register_command(self, command, handler, owner, permission=None,
aliases=None, default=False):
"""Register a command, provided it hasn't been registered already.
The params should go like this.
:param command: The command to register
:param handler: The command handler
:param owner: The plugin or object registering the command
:param permission: The permission needed to run the command
:param aliases: A list of aliases for the command being registered.
:param default: Whether the command should be run when there is no
permissions manager installed.
:type command: str
:type handler: function
:type owner: PluginObject
:type permission: str, None
:type aliases: list, None
:type default: bool
:returns: Whether the command was registered or not
:rtype: Boolean
"""
if aliases is None:
aliases = []
if command in self.commands:
self.logger.warn(_("Object '%s' tried to register command '%s' but"
" it's already been registered by object '%s'.")
% (owner,
command,
self.commands[command]["owner"])
)
return False
self.logger.debug(_("Registering command: %s (%s)")
% (command, owner))
commandobj = {
"f": handler,
"permission": permission,
"owner": owner,
"default": default
}
self.commands[command] = commandobj
for alias in aliases:
if alias in self.aliases:
self.logger.warn(_("Failed to register command alias '%s' as "
"it already belongs to another command.")
% alias)
continue
self.logger.debug(_("Registering alias: %s -> %s (%s)")
% (alias, command, owner))
self.aliases[alias] = command
return True
def unregister_commands_for_owner(self, owner):
"""Unregister all commands that have been registered by a certain
object.
This method checks instances, not types!
:param owner: The owner to check for
:type owner: object
"""
current = self.commands.items()
for key, value in current:
if owner is value["owner"]:
del self.commands[key]
self.logger.debug(_("Unregistered command: %s") % key)
aliases = self.aliases.items()
for k, v in aliases:
if v == key:
del self.aliases[k]
self.logger.debug(_("Unregistered alias: %s") % k)
def process_input(self, in_str, caller, source, protocol,
control_char=None, our_name=None):
"""Process a set of inputs, to check if there's a command there and
action it.
This is designed to be used from a protocol.
:param in_str: The entire message to parse
:param caller: The User that sent the message
:param source: The User or Channel that the message was sent to
:param protocol: The Protocol object the User belongs to
:param control_char: The control characters (prefix)
:param our_name: The name of the bot on Protocol
:type in_str: str
:type caller: User
:type source: User, Channel
:type protocol: Protocol
:type control_char: str
:type our_name: str
:return: Tuple containing CommandState representing the state of
the command, and either None or an Exception.
:rtype: tuple(CommandState, None or Exception)
"""
if control_char is None:
if hasattr(protocol, "control_chars"):
control_char = protocol.control_chars
else:
self.logger.debug("Protocol %s doesn't have a control "
"character sequence!" % protocol.name)
return CommandState.Error, NoControlCharacterException(
"Protocol %s doesn't have a control character sequence." %
protocol.name
)
if our_name is None:
if hasattr(protocol, "nickname"):
our_name = protocol.nickname
if our_name is not None:
control_char = control_char.replace("{NAME}", our_name)
control_char = control_char.replace("{NICK}", our_name)
if len(in_str) < len(control_char):
self.logger.trace("Control character sequence is longer than the "
"input string, so this cannot be a command.")
return CommandState.NotACommand, None
if in_str.lower().startswith(control_char.lower()): # It's a command!
# Remove the command char(s) from the start
replaced = in_str[len(control_char):]
split = replaced.split(None, 1)
if not split:
return False
command = split[0]
args = ""
if len(split) > 1:
args = split[1]
printable = "<%s:%s> %s" % (caller, source, in_str)
event = events.PreCommand(protocol, command, args, caller,
source, printable, in_str)
self.event_manager.run_callback("PreCommand", event)
if event.printable:
self.logger.info("%s | %s" % (protocol.name,
event.printable)
)
result = self.run_command(event.command, event.source,
event.target, protocol, event.args)
return result
self.logger.debug("Command not found.")
return CommandState.NotACommand, None
def run_command(self, command, caller, source, protocol, args):
"""Run a command, provided it's been registered.
:param command: The command, a string
:param caller: Who ran the command
:param source: Where they ran the command
:param protocol: The protocol they're part of
:param args: A list of arguments for the command
:type command: str
:type caller: User
:type source: User
:type protocol: Protocol
:type args: list
:return: Tuple containing CommandState representing the state of
the command, and either None or an Exception.
:rtype: tuple(CommandState, None or Exception)
"""
if command not in self.commands:
if command not in self.aliases: # Get alias, if it exists
event = events.UnknownCommand(self, protocol, command, args,
caller, source)
self.event_manager.run_callback("UnknownCommand", event)
if event.cancelled:
return CommandState.UnknownOverridden, None
return CommandState.Unknown, None
command = self.aliases[command]
# Parse args
raw_args = args
try:
lex = shlex.shlex(args, posix=True)
lex.whitespace_split = True
lex.quotes = '"'
lex.commenters = ""
parsed_args = list(lex)
except ValueError:
parsed_args = None
try:
if self.commands[command]["permission"]:
if not self.perm_handler:
if not self.commands[command]["default"]:
return CommandState.NoPermission, None
try:
self.commands[command]["f"](protocol, caller,
source, command,
raw_args,
parsed_args)
except RateLimitExceededError:
# TODO: Proper decorator
return CommandState.RateLimited, None
except Exception as e:
self.logger.exception("Error running command")
return CommandState.Error, e
else:
if self.perm_handler.check(self.commands
[command]["permission"],
caller, source, protocol):
try:
self.commands[command]["f"](protocol, caller,
source, command,
raw_args,
parsed_args)
except RateLimitExceededError:
# TODO: Proper decorator
return CommandState.RateLimited, None
except Exception as e:
self.logger.exception("Error running command")
return CommandState.Error, e
else:
return CommandState.NoPermission, None
else:
self.commands[command]["f"](protocol, caller, source, command,
raw_args, parsed_args)
except RateLimitExceededError:
# TODO: Proper decorator
return CommandState.RateLimited, None
except Exception as e:
self.logger.exception("Error running command")
return CommandState.Error, e
else:
return CommandState.Success, None
@deprecated("Use set_auth_handler instead")
def add_auth_handler(self, handler):
return self.set_auth_handler(handler)
def set_auth_handler(self, handler):
"""Add an auth handler, provided it hasn't already been added.
:param handler: The handler to add
:type handler: object
:returns: Whether the handler was added or not
:rtype: Boolean
"""
if self.auth_handler is None:
self.auth_handler = handler
return True
return False
def set_permissions_handler(self, handler):
"""Set the permissions handler, provided one hasn't already been set.
:param handler: The handler to set
:type handler: plugins.auth.permissions_handler.permissionsHandler
:returns: Whether the handler was set or not
:rtype: Boolean
"""
if self.perm_handler:
self.logger.warn(_("Two plugins are trying to provide permissions "
"handlers. Only the first will be used!"))
return False
self.perm_handler = handler
return True
class NoControlCharacterException(Exception):
pass
| UltrosBot/Ultros | system/commands/manager.py | Python | artistic-2.0 | 13,601 |
import unittest
from mapbox.polyline.codec import PolylineCodec
class PolylineCodecTestCase(unittest.TestCase):
def setUp(self):
self.codec = PolylineCodec()
def test_decode_multiple_points(self):
d = self.codec.decode('gu`wFnfys@???nKgE??gE?????oK????fE??fE')
self.assertEqual(d, [
(40.641, -8.654),
(40.641, -8.654),
(40.641, -8.656),
(40.642, -8.656),
(40.642, -8.655),
(40.642, -8.655),
(40.642, -8.655),
(40.642, -8.653),
(40.642, -8.653),
(40.642, -8.653),
(40.641, -8.653),
(40.641, -8.654)
])
def test_decode_official_example(self):
d = self.codec.decode('_p~iF~ps|U_ulLnnqC_mqNvxq`@')
self.assertEqual(d, [
(38.500, -120.200),
(40.700, -120.950),
(43.252, -126.453)
])
def test_decode_single_point(self):
d = self.codec.decode('gu`wFf`ys@')
self.assertEqual(d, [
(40.641, -8.653)
])
def test_encode_multiple_points(self):
e = self.codec.encode([
(40.641, -8.654),
(40.641, -8.654),
(40.641, -8.656),
(40.642, -8.656),
(40.642, -8.655),
(40.642, -8.655),
(40.642, -8.655),
(40.642, -8.653),
(40.642, -8.653),
(40.642, -8.653),
(40.641, -8.653),
(40.641, -8.654)
])
self.assertEqual(e, 'gu`wFnfys@???nKgE??gE?????oK????fE??fE')
def test_encode_official_example(self):
e = self.codec.encode([
(38.500, -120.200),
(40.700, -120.950),
(43.252, -126.453)
])
self.assertEqual(e, '_p~iF~ps|U_ulLnnqC_mqNvxq`@')
def test_encode_single_point(self):
e = self.codec.encode([
(40.641, -8.653)
])
self.assertEqual(e, 'gu`wFf`ys@')
| perrygeo/mapbox-sdk-py | tests/test_polyline_codec.py | Python | mit | 1,999 |
class Solution(object):
def singleNumber(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
N = len(nums)
nums_sorted = sorted(nums)
#return nums_sorted
if N>1:
for i in range(0,N):
if nums_sorted[0] != nums_sorted[1]:
return nums_sorted[0]
if nums_sorted[i-1] != nums_sorted[i] and nums_sorted[i] != nums_sorted[i+1]:
return nums_sorted[i]
if nums_sorted[N-2] != nums_sorted[N-1]:
return nums_sorted[N-1]
else:
return nums[0]
#a = Solution()
#print a.singleNumber([1,5,4,1,4])
| SeisSparrow/Leetcode | python/136.py | Python | mit | 698 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
import ast
import json
import operator
import re
import warnings
import numpy as np
import six
import pyarrow as pa
from pyarrow.lib import _pandas_api
from pyarrow.compat import (builtin_pickle, # noqa
PY2, zip_longest, Sequence, u_utf8)
_logical_type_map = {}
def get_logical_type_map():
global _logical_type_map
if not _logical_type_map:
_logical_type_map.update({
pa.lib.Type_NA: 'empty',
pa.lib.Type_BOOL: 'bool',
pa.lib.Type_INT8: 'int8',
pa.lib.Type_INT16: 'int16',
pa.lib.Type_INT32: 'int32',
pa.lib.Type_INT64: 'int64',
pa.lib.Type_UINT8: 'uint8',
pa.lib.Type_UINT16: 'uint16',
pa.lib.Type_UINT32: 'uint32',
pa.lib.Type_UINT64: 'uint64',
pa.lib.Type_HALF_FLOAT: 'float16',
pa.lib.Type_FLOAT: 'float32',
pa.lib.Type_DOUBLE: 'float64',
pa.lib.Type_DATE32: 'date',
pa.lib.Type_DATE64: 'date',
pa.lib.Type_TIME32: 'time',
pa.lib.Type_TIME64: 'time',
pa.lib.Type_BINARY: 'bytes',
pa.lib.Type_FIXED_SIZE_BINARY: 'bytes',
pa.lib.Type_STRING: 'unicode',
})
return _logical_type_map
def get_logical_type(arrow_type):
logical_type_map = get_logical_type_map()
try:
return logical_type_map[arrow_type.id]
except KeyError:
if isinstance(arrow_type, pa.lib.DictionaryType):
return 'categorical'
elif isinstance(arrow_type, pa.lib.ListType):
return 'list[{}]'.format(get_logical_type(arrow_type.value_type))
elif isinstance(arrow_type, pa.lib.TimestampType):
return 'datetimetz' if arrow_type.tz is not None else 'datetime'
elif isinstance(arrow_type, pa.lib.Decimal128Type):
return 'decimal'
return 'object'
_numpy_logical_type_map = {
np.bool_: 'bool',
np.int8: 'int8',
np.int16: 'int16',
np.int32: 'int32',
np.int64: 'int64',
np.uint8: 'uint8',
np.uint16: 'uint16',
np.uint32: 'uint32',
np.uint64: 'uint64',
np.float32: 'float32',
np.float64: 'float64',
'datetime64[D]': 'date',
np.unicode_: 'string' if not PY2 else 'unicode',
np.bytes_: 'bytes' if not PY2 else 'string',
}
def get_logical_type_from_numpy(pandas_collection):
try:
return _numpy_logical_type_map[pandas_collection.dtype.type]
except KeyError:
if hasattr(pandas_collection.dtype, 'tz'):
return 'datetimetz'
# See https://github.com/pandas-dev/pandas/issues/24739
if str(pandas_collection.dtype) == 'datetime64[ns]':
return 'datetime64[ns]'
result = _pandas_api.infer_dtype(pandas_collection)
if result == 'string':
return 'bytes' if PY2 else 'unicode'
return result
def get_extension_dtype_info(column):
dtype = column.dtype
if str(dtype) == 'category':
cats = getattr(column, 'cat', column)
assert cats is not None
metadata = {
'num_categories': len(cats.categories),
'ordered': cats.ordered,
}
physical_dtype = str(cats.codes.dtype)
elif hasattr(dtype, 'tz'):
metadata = {'timezone': pa.lib.tzinfo_to_string(dtype.tz)}
physical_dtype = 'datetime64[ns]'
else:
metadata = None
physical_dtype = str(dtype)
return physical_dtype, metadata
def get_column_metadata(column, name, arrow_type, field_name):
"""Construct the metadata for a given column
Parameters
----------
column : pandas.Series or pandas.Index
name : str
arrow_type : pyarrow.DataType
field_name : str
Equivalent to `name` when `column` is a `Series`, otherwise if `column`
is a pandas Index then `field_name` will not be the same as `name`.
This is the name of the field in the arrow Table's schema.
Returns
-------
dict
"""
logical_type = get_logical_type(arrow_type)
string_dtype, extra_metadata = get_extension_dtype_info(column)
if logical_type == 'decimal':
extra_metadata = {
'precision': arrow_type.precision,
'scale': arrow_type.scale,
}
string_dtype = 'object'
if name is not None and not isinstance(name, six.string_types):
raise TypeError(
'Column name must be a string. Got column {} of type {}'.format(
name, type(name).__name__
)
)
assert field_name is None or isinstance(field_name, six.string_types), \
str(type(field_name))
return {
'name': name,
'field_name': 'None' if field_name is None else field_name,
'pandas_type': logical_type,
'numpy_type': string_dtype,
'metadata': extra_metadata,
}
def construct_metadata(df, column_names, index_levels, index_descriptors,
preserve_index, types):
"""Returns a dictionary containing enough metadata to reconstruct a pandas
DataFrame as an Arrow Table, including index columns.
Parameters
----------
df : pandas.DataFrame
index_levels : List[pd.Index]
index_descriptors : List[Dict]
preserve_index : bool
types : List[pyarrow.DataType]
Returns
-------
dict
"""
num_serialized_index_levels = len([descr for descr in index_descriptors
if not isinstance(descr, dict)])
# Use ntypes instead of Python shorthand notation [:-len(x)] as [:-0]
# behaves differently to what we want.
ntypes = len(types)
df_types = types[:ntypes - num_serialized_index_levels]
index_types = types[ntypes - num_serialized_index_levels:]
column_metadata = []
for col_name, sanitized_name, arrow_type in zip(df.columns, column_names,
df_types):
metadata = get_column_metadata(df[col_name], name=sanitized_name,
arrow_type=arrow_type,
field_name=sanitized_name)
column_metadata.append(metadata)
index_column_metadata = []
if preserve_index is not False:
for level, arrow_type, descriptor in zip(index_levels, index_types,
index_descriptors):
if isinstance(descriptor, dict):
# The index is represented in a non-serialized fashion,
# e.g. RangeIndex
continue
metadata = get_column_metadata(level, name=level.name,
arrow_type=arrow_type,
field_name=descriptor)
index_column_metadata.append(metadata)
column_indexes = []
for level in getattr(df.columns, 'levels', [df.columns]):
metadata = _get_simple_index_descriptor(level)
column_indexes.append(metadata)
else:
index_descriptors = index_column_metadata = column_indexes = []
return {
b'pandas': json.dumps({
'index_columns': index_descriptors,
'column_indexes': column_indexes,
'columns': column_metadata + index_column_metadata,
'creator': {
'library': 'pyarrow',
'version': pa.__version__
},
'pandas_version': _pandas_api.version
}).encode('utf8')
}
def _get_simple_index_descriptor(level):
string_dtype, extra_metadata = get_extension_dtype_info(level)
pandas_type = get_logical_type_from_numpy(level)
if 'mixed' in pandas_type:
warnings.warn(
"The DataFrame has column names of mixed type. They will be "
"converted to strings and not roundtrip correctly.",
UserWarning, stacklevel=4)
if pandas_type == 'unicode':
assert not extra_metadata
extra_metadata = {'encoding': 'UTF-8'}
return {
'name': level.name,
'field_name': level.name,
'pandas_type': pandas_type,
'numpy_type': string_dtype,
'metadata': extra_metadata,
}
def _column_name_to_strings(name):
"""Convert a column name (or level) to either a string or a recursive
collection of strings.
Parameters
----------
name : str or tuple
Returns
-------
value : str or tuple
Examples
--------
>>> name = 'foo'
>>> _column_name_to_strings(name)
'foo'
>>> name = ('foo', 'bar')
>>> _column_name_to_strings(name)
('foo', 'bar')
>>> import pandas as pd
>>> name = (1, pd.Timestamp('2017-02-01 00:00:00'))
>>> _column_name_to_strings(name)
('1', '2017-02-01 00:00:00')
"""
if isinstance(name, six.string_types):
return name
elif isinstance(name, six.binary_type):
# XXX: should we assume that bytes in Python 3 are UTF-8?
return name.decode('utf8')
elif isinstance(name, tuple):
return str(tuple(map(_column_name_to_strings, name)))
elif isinstance(name, Sequence):
raise TypeError("Unsupported type for MultiIndex level")
elif name is None:
return None
return str(name)
def _index_level_name(index, i, column_names):
"""Return the name of an index level or a default name if `index.name` is
None or is already a column name.
Parameters
----------
index : pandas.Index
i : int
Returns
-------
name : str
"""
if index.name is not None and index.name not in column_names:
return index.name
else:
return '__index_level_{:d}__'.format(i)
def _get_columns_to_convert(df, schema, preserve_index, columns):
columns = _resolve_columns_of_interest(df, schema, columns)
column_names = []
index_levels = (
_get_index_level_values(df.index) if preserve_index is not False
else []
)
columns_to_convert = []
convert_fields = []
if not df.columns.is_unique:
raise ValueError(
'Duplicate column names found: {}'.format(list(df.columns))
)
for name in columns:
col = df[name]
name = _column_name_to_strings(name)
if _pandas_api.is_sparse(col):
raise TypeError(
"Sparse pandas data (column {}) not supported.".format(name))
if schema is not None:
field = schema.field_by_name(name)
else:
field = None
columns_to_convert.append(col)
convert_fields.append(field)
column_names.append(name)
index_descriptors = []
index_column_names = []
for i, index_level in enumerate(index_levels):
name = _index_level_name(index_level, i, column_names)
if (isinstance(index_level, _pandas_api.pd.RangeIndex)
and preserve_index is None):
descr = _get_range_index_descriptor(index_level)
else:
columns_to_convert.append(index_level)
convert_fields.append(None)
descr = name
index_column_names.append(name)
index_descriptors.append(descr)
all_names = column_names + index_column_names
# all_names : all of the columns in the resulting table including the data
# columns and serialized index columns
# column_names : the names of the data columns
# index_column_names : the names of the serialized index columns
# index_descriptors : descriptions of each index to be used for
# reconstruction
# index_levels : the extracted index level values
# columns_to_convert : assembled raw data (both data columns and indexes)
# to be converted to Arrow format
# columns_fields : specified column to use for coercion / casting
# during serialization, if a Schema was provided
return (all_names, column_names, index_column_names, index_descriptors,
index_levels, columns_to_convert, convert_fields)
def _get_range_index_descriptor(level):
# public start/stop/step attributes added in pandas 0.25.0
return {
'kind': 'range',
'name': level.name,
'start': _pandas_api.get_rangeindex_attribute(level, 'start'),
'stop': _pandas_api.get_rangeindex_attribute(level, 'stop'),
'step': _pandas_api.get_rangeindex_attribute(level, 'step')
}
def _get_index_level_values(index):
n = len(getattr(index, 'levels', [index]))
return [index.get_level_values(i) for i in range(n)]
def _resolve_columns_of_interest(df, schema, columns):
if schema is not None and columns is not None:
raise ValueError('Schema and columns arguments are mutually '
'exclusive, pass only one of them')
elif schema is not None:
columns = schema.names
elif columns is not None:
columns = [c for c in columns if c in df.columns]
else:
columns = df.columns
return columns
def dataframe_to_types(df, preserve_index, columns=None):
(all_names,
column_names,
_,
index_descriptors,
index_columns,
columns_to_convert,
_) = _get_columns_to_convert(df, None, preserve_index, columns)
types = []
# If pandas knows type, skip conversion
for c in columns_to_convert:
values = c.values
if _pandas_api.is_categorical(values):
type_ = pa.array(c, from_pandas=True).type
else:
values, type_ = get_datetimetz_type(values, c.dtype, None)
type_ = pa.lib._ndarray_to_arrow_type(values, type_)
if type_ is None:
type_ = pa.array(c, from_pandas=True).type
types.append(type_)
metadata = construct_metadata(df, column_names, index_columns,
index_descriptors, preserve_index, types)
return all_names, types, metadata
def dataframe_to_arrays(df, schema, preserve_index, nthreads=1, columns=None,
safe=True):
(all_names,
column_names,
index_column_names,
index_descriptors,
index_columns,
columns_to_convert,
convert_fields) = _get_columns_to_convert(df, schema, preserve_index,
columns)
# NOTE(wesm): If nthreads=None, then we use a heuristic to decide whether
# using a thread pool is worth it. Currently the heuristic is whether the
# nrows > 100 * ncols.
if nthreads is None:
nrows, ncols = len(df), len(df.columns)
if nrows > ncols * 100:
nthreads = pa.cpu_count()
else:
nthreads = 1
def convert_column(col, field):
if field is None:
field_nullable = True
type_ = None
else:
field_nullable = field.nullable
type_ = field.type
try:
result = pa.array(col, type=type_, from_pandas=True, safe=safe)
except (pa.ArrowInvalid,
pa.ArrowNotImplementedError,
pa.ArrowTypeError) as e:
e.args += ("Conversion failed for column {0!s} with type {1!s}"
.format(col.name, col.dtype),)
raise e
if not field_nullable and result.null_count > 0:
raise ValueError("Field {} was non-nullable but pandas column "
"had {} null values".format(str(field),
result.null_count))
return result
if nthreads == 1:
arrays = [convert_column(c, f)
for c, f in zip(columns_to_convert, convert_fields)]
else:
from concurrent import futures
with futures.ThreadPoolExecutor(nthreads) as executor:
arrays = list(executor.map(convert_column, columns_to_convert,
convert_fields))
types = [x.type for x in arrays]
if schema is not None:
# add index columns
index_types = types[len(column_names):]
for name, type_ in zip(index_column_names, index_types):
name = name if name is not None else 'None'
schema = schema.append(pa.field(name, type_))
else:
fields = []
for name, type_ in zip(all_names, types):
name = name if name is not None else 'None'
fields.append(pa.field(name, type_))
schema = pa.schema(fields)
metadata = construct_metadata(df, column_names, index_columns,
index_descriptors, preserve_index,
types)
schema = schema.add_metadata(metadata)
return arrays, schema
def get_datetimetz_type(values, dtype, type_):
if values.dtype.type != np.datetime64:
return values, type_
if _pandas_api.is_datetimetz(dtype) and type_ is None:
# If no user type passed, construct a tz-aware timestamp type
tz = dtype.tz
unit = dtype.unit
type_ = pa.timestamp(unit, tz)
elif type_ is None:
# Trust the NumPy dtype
type_ = pa.from_numpy_dtype(values.dtype)
return values, type_
# ----------------------------------------------------------------------
# Converting pandas.DataFrame to a dict containing only NumPy arrays or other
# objects friendly to pyarrow.serialize
def dataframe_to_serialized_dict(frame):
import pandas.core.internals as _int
block_manager = frame._data
blocks = []
axes = [ax for ax in block_manager.axes]
for block in block_manager.blocks:
values = block.values
block_data = {}
if isinstance(block, _int.DatetimeTZBlock):
block_data['timezone'] = pa.lib.tzinfo_to_string(values.tz)
if hasattr(values, 'values'):
values = values.values
elif isinstance(block, _int.CategoricalBlock):
block_data.update(dictionary=values.categories,
ordered=values.ordered)
values = values.codes
block_data.update(
placement=block.mgr_locs.as_array,
block=values
)
# If we are dealing with an object array, pickle it instead. Note that
# we do not use isinstance here because _int.CategoricalBlock is a
# subclass of _int.ObjectBlock.
if type(block) == _int.ObjectBlock:
block_data['object'] = None
block_data['block'] = builtin_pickle.dumps(
values, protocol=builtin_pickle.HIGHEST_PROTOCOL)
blocks.append(block_data)
return {
'blocks': blocks,
'axes': axes
}
def serialized_dict_to_dataframe(data):
import pandas.core.internals as _int
reconstructed_blocks = [_reconstruct_block(block)
for block in data['blocks']]
block_mgr = _int.BlockManager(reconstructed_blocks, data['axes'])
return _pandas_api.data_frame(block_mgr)
def _reconstruct_block(item):
import pandas.core.internals as _int
# Construct the individual blocks converting dictionary types to pandas
# categorical types and Timestamps-with-timezones types to the proper
# pandas Blocks
block_arr = item['block']
placement = item['placement']
if 'dictionary' in item:
cat = _pandas_api.categorical_type.from_codes(
block_arr, categories=item['dictionary'],
ordered=item['ordered'])
block = _int.make_block(cat, placement=placement,
klass=_int.CategoricalBlock)
elif 'timezone' in item:
dtype = make_datetimetz(item['timezone'])
block = _int.make_block(block_arr, placement=placement,
klass=_int.DatetimeTZBlock,
dtype=dtype)
elif 'object' in item:
block = _int.make_block(builtin_pickle.loads(block_arr),
placement=placement, klass=_int.ObjectBlock)
else:
block = _int.make_block(block_arr, placement=placement)
return block
def make_datetimetz(tz):
tz = pa.lib.string_to_tzinfo(tz)
return _pandas_api.datetimetz_type('ns', tz=tz)
# ----------------------------------------------------------------------
# Converting pyarrow.Table efficiently to pandas.DataFrame
def table_to_blockmanager(options, table, categories=None,
ignore_metadata=False):
from pandas.core.internals import BlockManager
all_columns = []
column_indexes = []
pandas_metadata = table.schema.pandas_metadata
if not ignore_metadata and pandas_metadata is not None:
all_columns = pandas_metadata['columns']
column_indexes = pandas_metadata.get('column_indexes', [])
index_descriptors = pandas_metadata['index_columns']
table = _add_any_metadata(table, pandas_metadata)
table, index = _reconstruct_index(table, index_descriptors,
all_columns)
else:
index = _pandas_api.pd.RangeIndex(table.num_rows)
_check_data_column_metadata_consistency(all_columns)
blocks = _table_to_blocks(options, table, pa.default_memory_pool(),
categories)
columns = _deserialize_column_index(table, all_columns, column_indexes)
axes = [columns, index]
return BlockManager(blocks, axes)
def _check_data_column_metadata_consistency(all_columns):
# It can never be the case in a released version of pyarrow that
# c['name'] is None *and* 'field_name' is not a key in the column metadata,
# because the change to allow c['name'] to be None and the change to add
# 'field_name' are in the same release (0.8.0)
assert all(
(c['name'] is None and 'field_name' in c) or c['name'] is not None
for c in all_columns
)
def _deserialize_column_index(block_table, all_columns, column_indexes):
column_strings = [u_utf8(x) for x in block_table.column_names]
if all_columns:
columns_name_dict = {
c.get('field_name', _column_name_to_strings(c['name'])): c['name']
for c in all_columns
}
columns_values = [
columns_name_dict.get(name, name) for name in column_strings
]
else:
columns_values = column_strings
# If we're passed multiple column indexes then evaluate with
# ast.literal_eval, since the column index values show up as a list of
# tuples
to_pair = ast.literal_eval if len(column_indexes) > 1 else lambda x: (x,)
# Create the column index
# Construct the base index
if not columns_values:
columns = _pandas_api.pd.Index(columns_values)
else:
columns = _pandas_api.pd.MultiIndex.from_tuples(
list(map(to_pair, columns_values)),
names=[col_index['name'] for col_index in column_indexes] or None,
)
# if we're reconstructing the index
if len(column_indexes) > 0:
columns = _reconstruct_columns_from_metadata(columns, column_indexes)
# ARROW-1751: flatten a single level column MultiIndex for pandas 0.21.0
columns = _flatten_single_level_multiindex(columns)
return columns
def _reconstruct_index(table, index_descriptors, all_columns):
# 0. 'field_name' is the name of the column in the arrow Table
# 1. 'name' is the user-facing name of the column, that is, it came from
# pandas
# 2. 'field_name' and 'name' differ for index columns
# 3. We fall back on c['name'] for backwards compatibility
field_name_to_metadata = {
c.get('field_name', c['name']): c
for c in all_columns
}
# Build up a list of index columns and names while removing those columns
# from the original table
index_arrays = []
index_names = []
result_table = table
for descr in index_descriptors:
if isinstance(descr, six.string_types):
result_table, index_level, index_name = _extract_index_level(
table, result_table, descr, field_name_to_metadata)
if index_level is None:
# ARROW-1883: the serialized index column was not found
continue
elif descr['kind'] == 'range':
index_name = descr['name']
index_level = _pandas_api.pd.RangeIndex(descr['start'],
descr['stop'],
step=descr['step'],
name=index_name)
if len(index_level) != len(table):
# Possibly the result of munged metadata
continue
else:
raise ValueError("Unrecognized index kind: {0}"
.format(descr['kind']))
index_arrays.append(index_level)
index_names.append(index_name)
pd = _pandas_api.pd
# Reconstruct the row index
if len(index_arrays) > 1:
index = pd.MultiIndex.from_arrays(index_arrays, names=index_names)
elif len(index_arrays) == 1:
index = index_arrays[0]
if not isinstance(index, pd.Index):
# Box anything that wasn't boxed above
index = pd.Index(index, name=index_names[0])
else:
index = pd.RangeIndex(table.num_rows)
return result_table, index
def _extract_index_level(table, result_table, field_name,
field_name_to_metadata):
logical_name = field_name_to_metadata[field_name]['name']
index_name = _backwards_compatible_index_name(field_name, logical_name)
i = table.schema.get_field_index(field_name)
if i == -1:
# The serialized index column was removed by the user
return table, None, None
pd = _pandas_api.pd
col = table.column(i)
values = col.to_pandas()
if hasattr(values, 'flags') and not values.flags.writeable:
# ARROW-1054: in pandas 0.19.2, factorize will reject
# non-writeable arrays when calling MultiIndex.from_arrays
values = values.copy()
if isinstance(col.type, pa.lib.TimestampType):
index_level = (pd.Series(values).dt.tz_localize('utc')
.dt.tz_convert(col.type.tz))
else:
index_level = pd.Series(values, dtype=values.dtype)
result_table = result_table.remove_column(
result_table.schema.get_field_index(field_name)
)
return result_table, index_level, index_name
def _backwards_compatible_index_name(raw_name, logical_name):
"""Compute the name of an index column that is compatible with older
versions of :mod:`pyarrow`.
Parameters
----------
raw_name : str
logical_name : str
Returns
-------
result : str
Notes
-----
* Part of :func:`~pyarrow.pandas_compat.table_to_blockmanager`
"""
# Part of table_to_blockmanager
if raw_name == logical_name and _is_generated_index_name(raw_name):
return None
else:
return logical_name
def _is_generated_index_name(name):
pattern = r'^__index_level_\d+__$'
return re.match(pattern, name) is not None
_pandas_logical_type_map = {
'date': 'datetime64[D]',
'unicode': np.unicode_,
'bytes': np.bytes_,
'string': np.str_,
'empty': np.object_,
}
def _pandas_type_to_numpy_type(pandas_type):
"""Get the numpy dtype that corresponds to a pandas type.
Parameters
----------
pandas_type : str
The result of a call to pandas.lib.infer_dtype.
Returns
-------
dtype : np.dtype
The dtype that corresponds to `pandas_type`.
"""
try:
return _pandas_logical_type_map[pandas_type]
except KeyError:
if 'mixed' in pandas_type:
# catching 'mixed', 'mixed-integer' and 'mixed-integer-float'
return np.object_
return np.dtype(pandas_type)
def _get_multiindex_codes(mi):
# compat for pandas < 0.24 (MI labels renamed to codes).
if isinstance(mi, _pandas_api.pd.MultiIndex):
return mi.codes if hasattr(mi, 'codes') else mi.labels
else:
return None
def _reconstruct_columns_from_metadata(columns, column_indexes):
"""Construct a pandas MultiIndex from `columns` and column index metadata
in `column_indexes`.
Parameters
----------
columns : List[pd.Index]
The columns coming from a pyarrow.Table
column_indexes : List[Dict[str, str]]
The column index metadata deserialized from the JSON schema metadata
in a :class:`~pyarrow.Table`.
Returns
-------
result : MultiIndex
The index reconstructed using `column_indexes` metadata with levels of
the correct type.
Notes
-----
* Part of :func:`~pyarrow.pandas_compat.table_to_blockmanager`
"""
pd = _pandas_api.pd
# Get levels and labels, and provide sane defaults if the index has a
# single level to avoid if/else spaghetti.
levels = getattr(columns, 'levels', None) or [columns]
labels = _get_multiindex_codes(columns) or [
pd.RangeIndex(len(level)) for level in levels
]
# Convert each level to the dtype provided in the metadata
levels_dtypes = [
(level, col_index.get('pandas_type', str(level.dtype)))
for level, col_index in zip_longest(
levels, column_indexes, fillvalue={}
)
]
new_levels = []
encoder = operator.methodcaller('encode', 'UTF-8')
for level, pandas_dtype in levels_dtypes:
dtype = _pandas_type_to_numpy_type(pandas_dtype)
# Since our metadata is UTF-8 encoded, Python turns things that were
# bytes into unicode strings when json.loads-ing them. We need to
# convert them back to bytes to preserve metadata.
if dtype == np.bytes_:
level = level.map(encoder)
elif level.dtype != dtype:
level = level.astype(dtype)
new_levels.append(level)
return pd.MultiIndex(new_levels, labels, names=columns.names)
def _table_to_blocks(options, block_table, memory_pool, categories):
# Part of table_to_blockmanager
# Convert an arrow table to Block from the internal pandas API
result = pa.lib.table_to_blocks(options, block_table, memory_pool,
categories)
# Defined above
return [_reconstruct_block(item) for item in result]
def _flatten_single_level_multiindex(index):
pd = _pandas_api.pd
if isinstance(index, pd.MultiIndex) and index.nlevels == 1:
levels, = index.levels
labels, = _get_multiindex_codes(index)
# Cheaply check that we do not somehow have duplicate column names
if not index.is_unique:
raise ValueError('Found non-unique column index')
return pd.Index([levels[_label] if _label != -1 else None
for _label in labels],
name=index.names[0])
return index
def _add_any_metadata(table, pandas_metadata):
modified_columns = {}
modified_fields = {}
schema = table.schema
index_columns = pandas_metadata['index_columns']
n_index_levels = len(index_columns)
n_columns = len(pandas_metadata['columns']) - n_index_levels
# Add time zones
for i, col_meta in enumerate(pandas_metadata['columns']):
raw_name = col_meta.get('field_name')
if not raw_name:
# deal with metadata written with arrow < 0.8
raw_name = col_meta['name']
if i >= n_columns:
# index columns
raw_name = index_columns[i - n_columns]
if raw_name is None:
raw_name = 'None'
idx = schema.get_field_index(raw_name)
if idx != -1:
if col_meta['pandas_type'] == 'datetimetz':
col = table[idx]
converted = col.to_pandas()
tz = col_meta['metadata']['timezone']
tz_aware_type = pa.timestamp('ns', tz=tz)
with_metadata = pa.Array.from_pandas(converted,
type=tz_aware_type)
modified_fields[idx] = pa.field(schema[idx].name,
tz_aware_type)
modified_columns[idx] = with_metadata
if len(modified_columns) > 0:
columns = []
fields = []
for i in range(len(table.schema)):
if i in modified_columns:
columns.append(modified_columns[i])
fields.append(modified_fields[i])
else:
columns.append(table[i])
fields.append(table.schema[i])
return pa.Table.from_arrays(columns, schema=pa.schema(fields))
else:
return table
| majetideepak/arrow | python/pyarrow/pandas_compat.py | Python | apache-2.0 | 33,582 |
# Copyright (c) 2011-2012 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Module that handles interactions with a Validation Pool.
The validation pool is the set of commits that are ready to be validated i.e.
ready for the commit queue to try.
"""
import contextlib
import cPickle
import logging
import sys
import time
import urllib
from xml.dom import minidom
from chromite.buildbot import cbuildbot_results as results_lib
from chromite.buildbot import constants
from chromite.buildbot import lkgm_manager
from chromite.buildbot import manifest_version
from chromite.buildbot import portage_utilities
from chromite.lib import cros_build_lib
from chromite.lib import gerrit
from chromite.lib import git
from chromite.lib import gs
from chromite.lib import patch as cros_patch
# Third-party libraries bundled with chromite need to be listed after the
# first chromite import.
import digraph
# We import mox so that w/in ApplyPoolIntoRepo, if a mox exception is
# thrown, we don't cover it up.
try:
import mox
except ImportError:
mox = None
class TreeIsClosedException(Exception):
"""Raised when the tree is closed and we wanted to submit changes."""
def __init__(self):
super(TreeIsClosedException, self).__init__(
'TREE IS CLOSED. PLEASE SET TO OPEN OR THROTTLED TO COMMIT')
class FailedToSubmitAllChangesException(Exception):
"""Raised if we fail to submit any changes."""
def __init__(self, changes):
super(FailedToSubmitAllChangesException, self).__init__(
'FAILED TO SUBMIT ALL CHANGES: Could not verify that changes %s were '
'submitted' % ' '.join(str(c) for c in changes))
class InternalCQError(cros_patch.PatchException):
"""Exception thrown when CQ has an unexpected/unhandled error."""
def __init__(self, patch, message):
cros_patch.PatchException.__init__(self, patch, message=message)
def __str__(self):
return "Patch %s failed to apply due to a CQ issue: %s" % (
self.patch, self.message)
class NoMatchingChangeFoundException(Exception):
"""Raised if we try to apply a non-existent change."""
class DependencyNotReadyForCommit(cros_patch.PatchException):
"""Exception thrown when a required dep isn't satisfied."""
def __str__(self):
return "%s isn't committed, or marked as Commit-Ready." % (self.patch,)
def _RunCommand(cmd, dryrun):
"""Runs the specified shell cmd if dryrun=False.
Errors are ignored, but logged.
"""
if dryrun:
logging.info('Would have run: %s', ' '.join(cmd))
return
try:
cros_build_lib.RunCommand(cmd)
except cros_build_lib.RunCommandError:
cros_build_lib.Error('Command failed', exc_info=True)
class GerritHelperNotAvailable(gerrit.GerritException):
"""Exception thrown when a specific helper is requested but unavailable."""
def __init__(self, remote=constants.EXTERNAL_REMOTE):
gerrit.GerritException.__init__(self)
# Stringify the pool so that serialization doesn't try serializing
# the actual HelperPool.
self.remote = remote
self.args = (remote,)
def __str__(self):
return (
"Needed a remote=%s gerrit_helper, but one isn't allowed by this "
"HelperPool instance.") % (self.remote,)
class HelperPool(object):
"""Pool of allowed GerritHelpers to be used by CQ/PatchSeries."""
def __init__(self, cros_internal=None, cros=None):
"""Initialize this instance with the given handlers.
Most likely you want the classmethod SimpleCreate which takes boolean
options.
If a given handler is None, then it's disabled; else the passed in
object is used.
"""
self.pool = {
constants.EXTERNAL_REMOTE : cros,
constants.INTERNAL_REMOTE : cros_internal
}
@classmethod
def SimpleCreate(cls, cros_internal=True, cros=True):
"""Classmethod helper for creating a HelperPool from boolean options.
Args:
internal: If True, allow access to a GerritHelper for internal.
external: If True, allow access to a GerritHelper for external.
Returns:
An appropriately configured HelperPool instance.
"""
if cros:
cros = gerrit.GerritHelper.FromRemote(constants.EXTERNAL_REMOTE)
else:
cros = None
if cros_internal:
cros_internal = gerrit.GerritHelper.FromRemote(constants.INTERNAL_REMOTE)
else:
cros_internal = None
return cls(cros_internal=cros_internal, cros=cros)
def ForChange(self, change):
"""Return the helper to use for a particular change.
If no helper is configured, an Exception is raised.
"""
return self.GetHelper(change.remote)
def GetHelper(self, remote):
"""Return the helper to use for a given remote.
If no helper is configured, an Exception is raised.
"""
helper = self.pool.get(remote)
if not helper:
raise GerritHelperNotAvailable(remote)
return helper
def __iter__(self):
for helper in self.pool.itervalues():
if helper:
yield helper
def _PatchWrapException(functor):
"""Decorator to intercept patch exceptions and wrap them.
Specifically, for known/handled Exceptions, it intercepts and
converts it into a DependencyError- via that, preserving the
cause, while casting it into an easier to use form (one that can
be chained in addition)."""
def f(self, parent, *args, **kwds):
try:
return functor(self, parent, *args, **kwds)
except gerrit.GerritException, e:
if isinstance(e, gerrit.QueryNotSpecific):
e = ("%s\nSuggest you use gerrit numbers instead (prefixed with a * "
"if it's an internal change)." % e)
new_exc = cros_patch.PatchException(parent, e)
raise new_exc.__class__, new_exc, sys.exc_info()[2]
except cros_patch.PatchException, e:
if e.patch.id == parent.id:
raise
new_exc = cros_patch.DependencyError(parent, e)
raise new_exc.__class__, new_exc, sys.exc_info()[2]
f.__name__ = functor.__name__
return f
class PatchSeries(object):
"""Class representing a set of patches applied to a single git repository."""
def __init__(self, path, helper_pool=None, force_content_merging=False,
forced_manifest=None, deps_filter_fn=None):
self.manifest = forced_manifest
self._content_merging_projects = {}
self.force_content_merging = force_content_merging
if helper_pool is None:
helper_pool = HelperPool.SimpleCreate(cros_internal=True, cros=True)
self._helper_pool = helper_pool
self._path = path
if deps_filter_fn is None:
deps_filter_fn = lambda x:x
self.deps_filter_fn = deps_filter_fn
self.applied = []
self.failed = []
self.failed_tot = {}
# A mapping of ChangeId to exceptions if the patch failed against
# ToT. Primarily used to keep the resolution/applying from going
# down known bad paths.
self._committed_cache = cros_patch.PatchCache()
self._lookup_cache = cros_patch.PatchCache()
self._change_deps_cache = {}
def GetTrackingBranchForChange(self, change, for_gerrit=False):
"""Identify the branch to work against for this change.
Args:
gerrit: If True, give the shortened form; no refs/heads, no refs/remotes.
"""
ref = self.manifest.GetProjectsLocalRevision(change.project)
return git.StripRefs(ref) if for_gerrit else ref
def GetGitRepoForChange(self, change):
return self.manifest.GetProjectPath(change.project, True)
def _IsContentMerging(self, change):
"""Discern if the given change has Content Merging enabled in gerrit.
Note if the instance was created w/ force_content_merging=True,
then this function will lie and always return True to avoid the
admin-level access required of <=gerrit-2.1.
Raises:
AssertionError: If the gerrit helper requested is disallowed.
GerritException: If there is a failure in querying gerrit.
Returns:
True if the change's project has content merging enabled, False if not.
"""
if self.force_content_merging:
return True
return self.manifest.ProjectIsContentMerging(change.project)
def ApplyChange(self, change, dryrun=False):
# If we're in dryrun mode, then 3way is always allowed.
# Otherwise, allow 3way only if the gerrit project allows it.
trivial = False if dryrun else not self._IsContentMerging(change)
return change.ApplyAgainstManifest(self.manifest, trivial=trivial)
def _GetGerritPatch(self, change, query, parent_lookup=False):
"""Query the configured helpers looking for a given change.
Args:
change: A cros_patch.GitRepoPatch derivative that we're querying
on behalf of.
query: The ChangeId we're searching for.
parent_lookup: If True, this means we're tracing out the git parents
of the given change- as such limit the query purely to that
project/branch.
"""
remote = constants.EXTERNAL_REMOTE
if query.startswith('*'):
remote = constants.INTERNAL_REMOTE
helper = self._helper_pool.GetHelper(remote)
query = query_text = cros_patch.FormatPatchDep(query, force_external=True)
if parent_lookup:
query_text = "project:%s AND branch:%s AND %s" % (
change.project,
self.GetTrackingBranchForChange(change, True),
query_text)
change = helper.QuerySingleRecord(query_text, must_match=True)
# If the query was a gerrit number based query, check the projects/change-id
# to see if we already have it locally, but couldn't map it since we didn't
# know the gerrit number at the time of the initial injection.
existing = self._lookup_cache[
cros_patch.FormatChangeId(
change.change_id, force_internal=change.internal, strict=False)]
if query.isdigit() and existing is not None:
if ((existing.project == change.project
and existing.tracking_branch == change.tracking_branch)
or not parent_lookup):
key = cros_patch.FormatGerritNumber(
str(change.gerrit_number), force_internal=change.internal,
strict=False)
self._lookup_cache.InjectCustomKey(key, existing)
return existing
self.InjectLookupCache([change])
if change.IsAlreadyMerged():
self.InjectCommittedPatches([change])
return change
@_PatchWrapException
def _LookupUncommittedChanges(self, parent, deps, parent_lookup=False,
limit_to=None):
"""Given a set of deps (changes), return unsatisfied dependencies.
Args:
parent: The change we're resolving for.
deps: A sequence of dependencies for the parent that we need to identify
as either merged, or needing resolving.
parent_lookup: If True, this means we're trying to trace out the git
parentage of a change, thus limit the lookup to the parents project
and branch.
limit_to: If non-None, then this must be a mapping (preferably a
cros_patch.PatchCache for translation reasons) of which non-committed
changes are allowed to be used for a transaction.
Returns:
A sequence of cros_patch.GitRepoPatch instances (or derivatives) that
need to be resolved for this change to be mergable.
"""
unsatisfied = []
for dep in deps:
if dep in self._committed_cache:
continue
dep_change = self._lookup_cache[dep]
if parent_lookup and dep_change is not None:
if not (parent.project == dep_change.project and
self.GetTrackingBranchForChange(parent, True) ==
self.GetTrackingBranchForChange(dep_change, True)):
# TODO(build): In this scenario, the cache will get updated
# with the new CL pulled from gerrit; this is questionable,
# but there isn't a good answer here. Rare enough it's being
# ignored either way.
dep_change = None
if dep_change is None:
dep_change = self._GetGerritPatch(parent, dep,
parent_lookup=parent_lookup)
if getattr(dep_change, 'IsAlreadyMerged', lambda: False)():
continue
elif limit_to is not None and dep_change not in limit_to:
raise DependencyNotReadyForCommit(dep_change)
unsatisfied.append(dep_change)
# Perform last minute custom filtering.
return [x for x in unsatisfied if self.deps_filter_fn(x)]
def CreateTransaction(self, change, limit_to=None):
"""Given a change, resolve it into a transaction.
In this case, a transaction is defined as a group of commits that
must land for the given change to be merged- specifically its
parent deps, and its CQ-DEPEND.
Args:
change: A cros_patch.GitRepoPatch instance to generate a transaction
for.
limit_to: If non-None, limit the allowed uncommitted patches to
what's in that container/mapping.
Returns:
A sequence of the necessary cros_patch.GitRepoPatch objects for
this transaction.
"""
plan, stack = [], cros_patch.PatchCache()
self._ResolveChange(change, plan, stack, limit_to=limit_to)
return plan
def CreateTransactions(self, changes, limit_to=None):
"""Create a list of transactions from a list of changes.
Args:
changes: A list of cros_patch.GitRepoPatch instances to generate
transactions for.
limit_to: See CreateTransaction docs.
Returns:
A list of (change, plan) tuples for the given list of changes. Each
plan represents the necessary GitRepoPatch objects for a given change.
"""
for change in changes:
try:
plan = self.CreateTransaction(change, limit_to=limit_to)
except cros_patch.PatchException as exc:
yield (change, (), exc)
else:
yield (change, plan, None)
def _ResolveChange(self, change, plan, stack, limit_to=None):
"""Helper for resolving a node and its dependencies into the plan.
No external code should call this; all internal code should invoke this
rather than ResolveTransaction since this maintains the necessary stack
tracking that is used to detect and handle cyclic dependencies.
Raises:
If the change couldn't be resolved, a DependencyError or
cros_patch.PatchException can be raised.
"""
if change in self._committed_cache:
return
if change in stack:
# If the requested change is already in the stack, then immediately
# return- it's a cycle (requires CQ-DEPEND for it to occur); if
# the earlier resolution attempt succeeds, than implicitly this
# attempt will.
# TODO(ferringb,sosa): this check actually doesn't handle gerrit
# change numbers; support for that is broken currently anyways,
# but this is one of the spots that needs fixing for that support.
return
stack.Inject(change)
try:
self._PerformResolveChange(change, plan, stack, limit_to=limit_to)
finally:
stack.Remove(change)
@_PatchWrapException
def _GetDepsForChange(self, change):
"""Look up the gerrit/paladin deps for a change
Raises:
DependencyError: Thrown if there is an issue w/ the commits
metadata (either couldn't find the parent, or bad CQ-DEPEND).
Returns:
A tuple of the change's GerritDependencies(), and PaladinDependencies()
"""
# TODO(sosa, ferringb): Modify helper logic to allows deps to be specified
# across different gerrit instances.
val = self._change_deps_cache.get(change)
if val is None:
git_repo = self.GetGitRepoForChange(change)
val = self._change_deps_cache[change] = (
change.GerritDependencies(
git_repo, self.GetTrackingBranchForChange(change)),
change.PaladinDependencies(git_repo))
return val
def _PerformResolveChange(self, change, plan, stack, limit_to=None):
"""Resolve and ultimately add a change into the plan."""
# Pull all deps up front, then process them. Simplifies flow, and
# localizes the error closer to the cause.
gdeps, pdeps = self._GetDepsForChange(change)
gdeps = self._LookupUncommittedChanges(change, gdeps, limit_to=limit_to,
parent_lookup=True)
pdeps = self._LookupUncommittedChanges(change, pdeps, limit_to=limit_to)
def _ProcessDeps(deps):
for dep in deps:
if dep in plan:
continue
try:
self._ResolveChange(dep, plan, stack, limit_to=limit_to)
except cros_patch.PatchException, e:
raise cros_patch.DependencyError, \
cros_patch.DependencyError(change, e), \
sys.exc_info()[2]
_ProcessDeps(gdeps)
plan.append(change)
_ProcessDeps(pdeps)
def InjectCommittedPatches(self, changes):
"""Record that the given patches are already committed.
This is primarily useful for external code to notify this object
that changes were applied to the tree outside its purview- specifically
useful for dependency resolution."""
self._committed_cache.Inject(*changes)
def InjectLookupCache(self, changes):
"""Inject into the internal lookup cache the given changes, using them
(rather than asking gerrit for them) as needed for dependencies.
"""
self._lookup_cache.Inject(*changes)
def FetchChanges(self, changes):
for change in changes:
change.Fetch(self.GetGitRepoForChange(change))
def _ApplyDecorator(functor):
"""Decorator for Apply that does appropriate self.manifest manipulation.
Note this is implemented in this fashion so that we can be sure the
instances manifest attribute is properly maintained, and so that we
don't have to tell people "go look at docstring blah".
"""
# pylint: disable=E0213,W0212,E1101,E1102
def f(self, changes, **kwargs):
manifest = kwargs.pop('manifest', None)
# Wipe is used to track if we need to reset manifest to None, and
# to identify if we already had a forced_manifest via __init__.
wipe = self.manifest is None
if manifest:
if not wipe:
raise ValueError("manifest can't be specified when one is forced "
"via __init__")
elif wipe:
manifest = git.ManifestCheckout.Cached(self._path)
else:
manifest = self.manifest
try:
self.manifest = manifest
return functor(self, changes, **kwargs)
finally:
if wipe:
self.manifest = None
f.__name__ = functor.__name__
f.__doc__ = functor.__doc__
return f
@_ApplyDecorator
def Apply(self, changes, dryrun=False, frozen=True,
honor_ordering=False, changes_filter=None):
"""Applies changes from pool into the build root specified by the manifest.
This method resolves each given change down into a set of transactions-
the change and its dependencies- that must go in, then tries to apply
the largest transaction first, working its way down.
If a transaction cannot be applied, then it is rolled back
in full- note that if a change is involved in multiple transactions,
if an earlier attempt fails, that change can be retried in a new
transaction if the failure wasn't caused by the patch being incompatible
to ToT.
Args:
changes: A sequence of cros_patch.GitRepoPatch instances to resolve
and apply.
dryrun: If True, then content-merging is explicitly forced,
and no modifications to gerrit will occur.
frozen: If True, then resolving of the given changes is explicitly
limited to just the passed in changes, or known committed changes.
This is basically CQ/Paladin mode, used to limit the changes being
pulled in/committed to just what we allow.
honor_ordering: Apply normally will reorder the transactions it
computes, trying the largest first, then degrading through smaller
transactions if the larger of the two fails. If honor_ordering
is False, then the ordering given via changes is preserved-
this is mainly of use for cbuildbot induced patching, and shouldn't
be used for CQ patching.
changes_filter: If not None, must be a functor taking two arguments:
series, changes; it must return the changes to work on.
This is invoked after the initial changes have been fetched,
thus this is a way for consumers to do last minute checking of the
changes being inspected, and expand the changes if necessary.
Primarily this is of use for cbuildbot patching when dealing w/
uploaded/remote patches.
Returns:
A tuple of changes-applied, Exceptions for the changes that failed
against ToT, and Exceptions that failed inflight; These exceptions
are cros_patch.PatchException instances.
"""
# Prefetch the changes; we need accurate change_id/id's, which is
# guaranteed via Fetch.
self.FetchChanges(changes)
if changes_filter:
changes = changes_filter(self, changes)
self.InjectLookupCache(changes)
limit_to = cros_patch.PatchCache(changes) if frozen else None
resolved, applied, failed = [], [], []
for change, plan, ex in self.CreateTransactions(changes, limit_to=limit_to):
if ex is not None:
logging.info("Failed creating transaction for %s: %s", change, ex)
failed.append(ex)
else:
resolved.append((change, plan))
logging.info("Transaction for %s is %s.",
change, ', '.join(map(str, resolved[-1][-1])))
if not resolved:
# No work to do; either no changes were given to us, or all failed
# to be resolved.
return [], failed, []
if not honor_ordering:
# Sort by length, falling back to the order the changes were given to us.
# This is done to prefer longer transactions (more painful to rebase)
# over shorter transactions.
position = dict((change, idx) for idx, change in enumerate(changes))
def mk_key(data):
ids = [x.id for x in data[1]]
return -len(ids), position[data[0]]
resolved.sort(key=mk_key)
for inducing_change, transaction_changes in resolved:
try:
with self._Transaction(transaction_changes):
logging.debug("Attempting transaction for %s: changes: %s",
inducing_change,
', '.join(map(str, transaction_changes)))
self._ApplyChanges(inducing_change, transaction_changes,
dryrun=dryrun)
except cros_patch.PatchException, e:
logging.info("Failed applying transaction for %s: %s",
inducing_change, e)
failed.append(e)
else:
applied.extend(transaction_changes)
self.InjectCommittedPatches(transaction_changes)
# Uniquify while maintaining order.
def _uniq(l):
s = set()
for x in l:
if x not in s:
yield x
s.add(x)
applied = list(_uniq(applied))
failed = [x for x in failed if x.patch not in applied]
failed_tot = [x for x in failed if not x.inflight]
failed_inflight = [x for x in failed if x.inflight]
return applied, failed_tot, failed_inflight
@contextlib.contextmanager
def _Transaction(self, commits):
"""ContextManager used to rollback changes to a build root if necessary.
Specifically, if an unhandled non system exception occurs, this context
manager will roll back all relevant modifications to the git repos
involved.
Args:
commits: A sequence of cros_patch.GitRepoPatch instances that compromise
this transaction- this is used to identify exactly what may be changed,
thus what needs to be tracked and rolled back if the transaction fails.
"""
# First, the book keeping code; gather required data so we know what
# to rollback to should this transaction fail. Specifically, we track
# what was checked out for each involved repo, and if it was a branch,
# the sha1 of the branch; that information is enough to rewind us back
# to the original repo state.
project_state = set(map(self.GetGitRepoForChange, commits))
resets, checkouts = [], []
for project_dir in project_state:
current_sha1 = git.RunGit(
project_dir, ['rev-list', '-n1', 'HEAD']).output.strip()
assert current_sha1
result = git.RunGit(
project_dir, ['symbolic-ref', 'HEAD'], error_code_ok=True)
if result.returncode == 128: # Detached HEAD.
checkouts.append((project_dir, current_sha1))
elif result.returncode == 0:
checkouts.append((project_dir, result.output.strip()))
resets.append((project_dir, current_sha1))
else:
raise Exception(
'Unexpected state from git symbolic-ref HEAD: exit %i\n'
'stdout: %s\nstderr: %s'
% (result.returncode, result.output, result.error))
committed_cache = self._committed_cache.copy()
try:
yield
# Reaching here means it was applied cleanly, thus return.
return
except (MemoryError, RuntimeError):
# Skip transactional rollback; if these occur, at least via
# the scenarios where they're *supposed* to be raised, we really
# should let things fail hard here.
raise
except:
# pylint: disable=W0702
logging.info("Rewinding transaction: failed changes: %s .",
', '.join(map(str, commits)))
for project_dir, ref in checkouts:
git.RunGit(project_dir, ['checkout', ref])
for project_dir, sha1 in resets:
git.RunGit(project_dir, ['reset', '--hard', sha1])
self._committed_cache = committed_cache
raise
@_PatchWrapException
def _ApplyChanges(self, _inducing_change, changes, dryrun=False):
"""Apply a given ordered sequence of changes.
Args:
_inducing_change: The core GitRepoPatch instance that lead to this
sequence of changes; basically what this transaction was computed from.
Needs to be passed in so that the exception wrapping machinery can
convert any failures, assigning blame appropriately.
manifest: A ManifestCheckout instance representing what we're working on.
changes: A ordered sequence of GitRepoPatch instances to apply.
dryrun: Whether or not this is considered a production run.
"""
# Bail immediately if we know one of the requisite patches won't apply.
for change in changes:
failure = self.failed_tot.get(change.id)
if failure is not None:
raise failure
applied = []
for change in changes:
if change in self._committed_cache:
continue
try:
self.ApplyChange(change, dryrun=dryrun)
except cros_patch.PatchException, e:
if not e.inflight:
self.failed_tot[change.id] = e
raise
applied.append(change)
if hasattr(change, 'url'):
s = '%s %s' % (change.owner, cros_patch.FormatGerritNumber(
change.gerrit_number, force_internal=change.internal))
cros_build_lib.PrintBuildbotLink(s, change.url)
logging.debug('Done investigating changes. Applied %s',
' '.join([c.id for c in applied]))
@classmethod
def WorkOnSingleRepo(cls, git_repo, tracking_branch, **kwargs):
"""Classmethod to generate a PatchSeries that targets a single git repo.
It does this via forcing a fake manifest, which in turn points
tracking branch/paths/content-merging at what is passed through here.
Args:
git_repo: Absolute path to the git repository to operate upon.
tracking_branch: Which tracking branch patches should apply against.
kwargs: See PatchSeries.__init__ for the various optional args;
not forced_manifest cannot be used here, and force_content_merging
defaults to True in this usage.
Returns:
A PatchSeries instance w/ a forced manifest."""
if 'forced_manifest' in kwargs:
raise ValueError("RawPatchSeries doesn't allow a forced_manifest "
"argument.")
merging = kwargs.setdefault('force_content_merging', True)
kwargs['forced_manifest'] = _ManifestShim(
git_repo, tracking_branch, content_merging=merging)
return cls(git_repo, **kwargs)
class _ManifestShim(object):
"""Class used in conjunction with PatchSeries to support standalone git repos.
This works via duck typing; we match the 3 necessary methods that PatchSeries
uses."""
def __init__(self, path, tracking_branch, remote='origin',
content_merging=True):
self.path = path
self.tracking_branch = 'refs/remotes/%s/%s' % (remote, tracking_branch)
self.content_merging = content_merging
def GetProjectsLocalRevision(self, _project):
return self.tracking_branch
def GetProjectPath(self, _project, _absolute=False):
return self.path
def ProjectIsContentMerging(self, _project):
return self.content_merging
class ValidationFailedMessage(object):
"""Message indicating that changes failed to be validated."""
def __init__(self, builder_name, build_log, tracebacks, internal):
"""Create a ValidationFailedMessage object.
Args:
builder_name: The URL-quoted name of the builder.
build_log: The URL users should visit to see the build log.
tracebacks: A list of results_lib.RecordedTraceback objects.
internal: Whether this failure occurred on an internal builder.
"""
self.builder_name = builder_name
self.build_log = build_log
self.tracebacks = tuple(tracebacks)
self.internal = internal
def __str__(self):
details = []
for x in self.tracebacks:
details.append('The %s stage failed: %s' % (x.failed_stage, x.exception))
if not details:
details = ['cbuildbot failed']
details.append('in %s' % (self.build_log,))
return '%s: %s' % (urllib.unquote(self.builder_name), ' '.join(details))
class ValidationPool(object):
"""Class that handles interactions with a validation pool.
This class can be used to acquire a set of commits that form a pool of
commits ready to be validated and committed.
Usage: Use ValidationPool.AcquirePool -- a static
method that grabs the commits that are ready for validation.
"""
GLOBAL_DRYRUN = False
MAX_TIMEOUT = 60 * 60 * 4
SLEEP_TIMEOUT = 30
STATUS_URL = 'https://chromiumos-status.appspot.com/current?format=json'
STATUS_FAILED = manifest_version.BuilderStatus.STATUS_FAILED
STATUS_INFLIGHT = manifest_version.BuilderStatus.STATUS_INFLIGHT
STATUS_PASSED = manifest_version.BuilderStatus.STATUS_PASSED
STATUS_WAITING = 'waiting'
# The grace period (in seconds) before we reject a patch due to dependency
# errors.
REJECTION_GRACE_PERIOD = 30 * 60
def __init__(self, overlays, build_root, build_number, builder_name,
is_master, dryrun, changes=None, non_os_changes=None,
conflicting_changes=None, pre_cq=False, helper_pool=None):
"""Initializes an instance by setting default valuables to instance vars.
Generally use AcquirePool as an entry pool to a pool rather than this
method.
Args:
overlays: One of constants.VALID_OVERLAYS.
build_number: Build number for this validation attempt.
builder_name: Builder name on buildbot dashboard.
is_master: True if this is the master builder for the Commit Queue.
dryrun: If set to True, do not submit anything to Gerrit.
Optional Args:
changes: List of changes for this validation pool.
non_manifest_changes: List of changes that are part of this validation
pool but aren't part of the cros checkout.
changes_that_failed_to_apply_earlier: Changes that failed to apply but
we're keeping around because they conflict with other changes in
flight.
pre_cq: If set to True, this builder is verifying CLs before they go to
the commit queue.
helper_pool: A HelperPool instance. If not specified, a HelperPool
instance is created with full access to external and internal gerrit
instances; full access is used to allow cross gerrit dependencies
to be supported.
"""
if helper_pool is None:
helper_pool = HelperPool.SimpleCreate()
self.build_root = build_root
self._helper_pool = helper_pool
# These instances can be instantiated via both older, or newer pickle
# dumps. Thus we need to assert the given args since we may be getting
# a value we no longer like (nor work with).
if overlays not in constants.VALID_OVERLAYS:
raise ValueError("Unknown/unsupported overlay: %r" % (overlays,))
if not isinstance(build_number, int):
raise ValueError("Invalid build_number: %r" % (build_number,))
if not isinstance(builder_name, basestring):
raise ValueError("Invalid builder_name: %r" % (builder_name,))
for changes_name, changes_value in (
('changes', changes), ('non_os_changes', non_os_changes)):
if not changes_value:
continue
if not all(isinstance(x, cros_patch.GitRepoPatch) for x in changes_value):
raise ValueError(
'Invalid %s: all elements must be a GitRepoPatch derivative, got %r'
% (changes_name, changes_value))
if conflicting_changes and not all(
isinstance(x, cros_patch.PatchException)
for x in conflicting_changes):
raise ValueError(
'Invalid conflicting_changes: all elements must be a '
'cros_patch.PatchException derivative, got %r'
% (conflicting_changes,))
self.build_log = self.ConstructDashboardURL(overlays, pre_cq, builder_name,
str(build_number))
self.is_master = bool(is_master)
self.pre_cq = pre_cq
self.dryrun = bool(dryrun) or self.GLOBAL_DRYRUN
self.queue = 'A trybot' if pre_cq else 'The Commit Queue'
# See optional args for types of changes.
self.changes = changes or []
self.non_manifest_changes = non_os_changes or []
# Note, we hold onto these CLs since they conflict against our current CLs
# being tested; if our current ones succeed, we notify the user to deal
# w/ the conflict. If the CLs we're testing fail, then there is no
# reason we can't try these again in the next run.
self.changes_that_failed_to_apply_earlier = conflicting_changes or []
# Private vars only used for pickling.
self._overlays = overlays
self._build_number = build_number
self._builder_name = builder_name
self._patch_series = PatchSeries(self.build_root, helper_pool=helper_pool)
@staticmethod
def GetBuildDashboardForOverlays(overlays, trybot):
"""Discern the dashboard to use based on the given overlay."""
if trybot:
return constants.TRYBOT_DASHBOARD
if overlays in [constants.PRIVATE_OVERLAYS, constants.BOTH_OVERLAYS]:
return constants.BUILD_INT_DASHBOARD
return constants.BUILD_DASHBOARD
@classmethod
def ConstructDashboardURL(cls, overlays, trybot, builder_name, build_number,
stage=None):
"""Return the dashboard (buildbot) URL for this run
Args:
overlays: One of constants.VALID_OVERLAYS.
trybot: Boolean: is this a remote trybot?
builder_name: Builder name on buildbot dashboard.
build_number: Build number for this validation attempt.
stage: Link directly to a stage log, else use the general landing page.
Returns:
The fully formed URL
"""
build_dashboard = cls.GetBuildDashboardForOverlays(overlays, trybot)
url = '%s/builders/%s/builds/%s' % (build_dashboard, builder_name,
str(build_number))
if stage:
url += '/steps/%s/logs/stdio' % (stage,)
return url
@staticmethod
def GetGerritHelpersForOverlays(overlays):
"""Discern the allowed GerritHelpers to use based on the given overlay."""
# TODO(sosa): Remove False case once overlays logic has stabilized on TOT.
cros_internal = cros = False
if overlays in [constants.PUBLIC_OVERLAYS, constants.BOTH_OVERLAYS, False]:
cros = True
if overlays in [constants.PRIVATE_OVERLAYS, constants.BOTH_OVERLAYS]:
cros_internal = True
return HelperPool.SimpleCreate(cros_internal=cros_internal, cros=cros)
def __reduce__(self):
"""Used for pickling to re-create validation pool."""
return (
self.__class__,
(
self._overlays,
self.build_root, self._build_number, self._builder_name,
self.is_master, self.dryrun, self.changes,
self.non_manifest_changes,
self.changes_that_failed_to_apply_earlier,
self.pre_cq))
@classmethod
def FilterNonMatchingChanges(cls, changes):
"""Filter out changes that don't actually match our query.
Generally, Gerrit should only return patches that match our query. However,
there are race conditions (bugs in Gerrit) where the final patch won't
match our query.
Here's an example problem that this code fixes: If the Pre-CQ launcher
picks up a CL while the CQ is committing the CL, it may catch a race
condition where a new patchset has been created and committed by the CQ,
but the CL is still treated as if it matches the query (which it doesn't,
anymore).
Arguments:
changes: List of changes to filter.
Returns:
List of changes that match our query.
"""
for change in changes:
# Check that the user (or chrome-bot) uploaded a new change under our
# feet while Gerrit was in the middle of answering our query.
for field, value in constants.DEFAULT_CQ_READY_FIELDS.iteritems():
if not change.HasApproval(field, value):
break
else:
yield change
@classmethod
def AcquirePreCQPool(cls, *args, **kwargs):
"""See ValidationPool.__init__ for arguments."""
kwargs.setdefault('pre_cq', True)
kwargs.setdefault('is_master', True)
return cls(*args, **kwargs)
@classmethod
def AcquirePool(cls, overlays, repo, build_number, builder_name,
dryrun=False, changes_query=None, check_tree_open=True,
change_filter=None):
"""Acquires the current pool from Gerrit.
Polls Gerrit and checks for which change's are ready to be committed.
Args:
overlays: One of constants.VALID_OVERLAYS.
repo: The repo used to sync, to filter projects, and to apply patches
against.
build_number: Corresponding build number for the build.
builder_name: Builder name on buildbot dashboard.
dryrun: Don't submit anything to gerrit.
changes_query: The gerrit query to use to identify changes; if None,
uses the internal defaults.
check_tree_open: If True, only return when the tree is open.
change_filter: If set, use change_filter(pool, changes,
non_manifest_changes) to filter out unwanted patches.
Returns:
ValidationPool object.
Raises:
TreeIsClosedException: if the tree is closed.
"""
if changes_query is None:
changes_query = constants.DEFAULT_CQ_READY_QUERY
if change_filter is None:
change_filter = lambda _, x, y: (x, y)
# We choose a longer wait here as we haven't committed to anything yet. By
# doing this here we can reduce the number of builder cycles.
end_time = time.time() + cls.MAX_TIMEOUT
while True:
time_left = end_time - time.time()
# Wait until the tree opens.
if check_tree_open and not cros_build_lib.TreeOpen(
cls.STATUS_URL, cls.SLEEP_TIMEOUT, max_timeout=time_left):
raise TreeIsClosedException()
# Sync so that we are up-to-date on what is committed.
repo.Sync()
# Only master configurations should call this method.
pool = ValidationPool(overlays, repo.directory, build_number,
builder_name, True, dryrun)
# Iterate through changes from all gerrit instances we care about.
for helper in cls.GetGerritHelpersForOverlays(overlays):
raw_changes = helper.Query(changes_query, sort='lastUpdated')
raw_changes.reverse()
# Verify the results match the query, to prevent race conditions.
if changes_query == constants.DEFAULT_CQ_READY_QUERY:
raw_changes = cls.FilterNonMatchingChanges(raw_changes)
changes, non_manifest_changes = ValidationPool._FilterNonCrosProjects(
raw_changes, git.ManifestCheckout.Cached(repo.directory))
pool.changes.extend(changes)
pool.non_manifest_changes.extend(non_manifest_changes)
# Filter out unwanted changes.
pool.changes, pool.non_manifest_changes = change_filter(
pool, pool.changes, pool.non_manifest_changes)
if pool.changes or pool.non_manifest_changes or dryrun or time_left < 0:
break
logging.info('Waiting for new CLs (%d minutes left)...', time_left / 60)
time.sleep(cls.SLEEP_TIMEOUT)
return pool
@classmethod
def AcquirePoolFromManifest(cls, manifest, overlays, repo, build_number,
builder_name, is_master, dryrun):
"""Acquires the current pool from a given manifest.
This function assumes that you have already synced to the given manifest.
Args:
manifest: path to the manifest where the pool resides.
overlays: One of constants.VALID_OVERLAYS.
repo: The repo used to filter projects and to apply patches against.
build_number: Corresponding build number for the build.
builder_name: Builder name on buildbot dashboard.
is_master: Boolean that indicates whether this is a pool for a master.
config or not.
dryrun: Don't submit anything to gerrit.
Returns:
ValidationPool object.
"""
pool = ValidationPool(overlays, repo.directory, build_number, builder_name,
is_master, dryrun)
manifest_dom = minidom.parse(manifest)
pending_commits = manifest_dom.getElementsByTagName(
lkgm_manager.PALADIN_COMMIT_ELEMENT)
for pending_commit in pending_commits:
project = pending_commit.getAttribute(lkgm_manager.PALADIN_PROJECT_ATTR)
change = pending_commit.getAttribute(lkgm_manager.PALADIN_CHANGE_ID_ATTR)
commit = pending_commit.getAttribute(lkgm_manager.PALADIN_COMMIT_ATTR)
for helper in cls.GetGerritHelpersForOverlays(overlays):
try:
patch = helper.GrabPatchFromGerrit(project, change, commit)
pool.changes.append(patch)
break
except gerrit.QueryHasNoResults:
pass
else:
raise NoMatchingChangeFoundException(
'Could not find change defined by %s' % pending_commit)
return pool
@staticmethod
def _FilterNonCrosProjects(changes, manifest):
"""Filters changes to a tuple of relevant changes.
There are many code reviews that are not part of Chromium OS and/or
only relevant on a different branch. This method returns a tuple of (
relevant reviews in a manifest, relevant reviews not in the manifest). Note
that this function must be run while chromite is checked out in a
repo-managed checkout.
Args:
changes: List of GerritPatch objects.
manifest: The manifest to check projects/branches against.
Returns tuple of
relevant reviews in a manifest, relevant reviews not in the manifest.
"""
def IsCrosReview(change):
return (change.project.startswith('chromiumos') or
change.project.startswith('chromeos'))
# First we filter to only Chromium OS repositories.
changes = [c for c in changes if IsCrosReview(c)]
projects = manifest.projects
changes_in_manifest = []
changes_not_in_manifest = []
for change in changes:
patch_branch = 'refs/heads/%s' % change.tracking_branch
project_data = projects.get(change.project)
if project_data is not None and patch_branch == project_data['revision']:
changes_in_manifest.append(change)
continue
changes_not_in_manifest.append(change)
logging.info('Filtered change %s', change)
return changes_in_manifest, changes_not_in_manifest
@classmethod
def _FilterDependencyErrors(cls, errors):
"""Filter out ignorable DependencyError exceptions.
If a dependency isn't marked as ready, or a dependency fails to apply,
we only complain after REJECTION_GRACE_PERIOD has passed since the patch
was uploaded.
This helps in two situations:
1) If the developer is in the middle of marking a stack of changes as
ready, we won't reject their work until the grace period has passed.
2) If the developer marks a big circular stack of changes as ready, and
some change in the middle of the stack doesn't apply, the user will
get a chance to rebase their change before we mark all the changes as
'not ready'.
This function filters out dependency errors that can be ignored due to
the grace period.
Args:
errors: List of exceptions to filter.
Returns:
List of unfiltered exceptions.
"""
reject_timestamp = time.time() - cls.REJECTION_GRACE_PERIOD
results = []
for error in errors:
results.append(error)
if reject_timestamp < error.patch.approval_timestamp:
while error is not None:
if isinstance(error, cros_patch.DependencyError):
logging.info('Ignoring dependency errors for %s due to grace '
'period', error.patch)
results.pop()
break
error = getattr(error, 'error', None)
return results
def ApplyPoolIntoRepo(self, manifest=None):
"""Applies changes from pool into the directory specified by the buildroot.
This method applies changes in the order specified. It also respects
dependency order.
Returns:
True if we managed to apply any changes.
"""
try:
# pylint: disable=E1123
applied, failed_tot, failed_inflight = self._patch_series.Apply(
self.changes, dryrun=self.dryrun, manifest=manifest)
except (KeyboardInterrupt, RuntimeError, SystemExit):
raise
except Exception, e:
if mox is not None and isinstance(e, mox.Error):
raise
# Stash a copy of the tb guts, since the next set of steps can
# wipe it.
exc = sys.exc_info()
msg = (
"Unhandled Exception occurred during CQ's Apply: %s\n"
"Failing the entire series to prevent CQ from going into an "
"infinite loop hanging on these CLs." % (e,))
cros_build_lib.Error(
"%s\nAffected Patches are: %s", msg,
', '.join('CL:%s' % x.gerrit_number_str for x in self.changes))
try:
self._HandleApplyFailure(
[InternalCQError(patch, msg) for patch in self.changes])
except Exception, e:
if mox is None or not isinstance(e, mox.Error):
# If it's not a mox error, let it fly.
raise
raise exc[0], exc[1], exc[2]
if self.is_master:
for change in applied:
self._HandleApplySuccess(change)
failed_tot = self._FilterDependencyErrors(failed_tot)
if failed_tot:
logging.info(
'The following changes could not cleanly be applied to ToT: %s',
' '.join([c.patch.id for c in failed_tot]))
self._HandleApplyFailure(failed_tot)
failed_inflight = self._FilterDependencyErrors(failed_inflight)
if failed_inflight:
logging.info(
'The following changes could not cleanly be applied against the '
'current stack of patches; if this stack fails, they will be tried '
'in the next run. Inflight failed changes: %s',
' '.join([c.patch.id for c in failed_inflight]))
self.changes_that_failed_to_apply_earlier.extend(failed_inflight)
self.changes = applied
return bool(self.changes)
@staticmethod
def Load(filename):
"""Loads the validation pool from the file."""
with open(filename, 'rb') as p_file:
return cPickle.load(p_file)
def Save(self, filename):
"""Serializes the validation pool."""
with open(filename, 'wb') as p_file:
cPickle.dump(self, p_file, protocol=cPickle.HIGHEST_PROTOCOL)
# Note: All submit code, all gerrit code, and basically everything other
# than patch resolution/applying needs to use .change_id from patch objects.
# Basically all code from this point forward.
def _SubmitChanges(self, changes, check_tree_open=True):
"""Submits given changes to Gerrit.
Args:
changes: GerritPatch's to submit.
check_tree_open: Whether to check that the tree is open before submitting
changes. If this is False, TreeIsClosedException will never be raised.
Raises:
TreeIsClosedException: if the tree is closed.
FailedToSubmitAllChangesException: if we can't submit a change.
"""
assert self.is_master, 'Non-master builder calling SubmitPool'
assert not self.pre_cq, 'Trybot calling SubmitPool'
changes_that_failed_to_submit = []
# We use the default timeout here as while we want some robustness against
# the tree status being red i.e. flakiness, we don't want to wait too long
# as validation can become stale.
if check_tree_open and not self.dryrun and not cros_build_lib.TreeOpen(
self.STATUS_URL, self.SLEEP_TIMEOUT):
raise TreeIsClosedException()
for change in changes:
was_change_submitted = False
logging.info('Change %s will be submitted', change)
try:
self._SubmitChange(change)
was_change_submitted = self._helper_pool.ForChange(
change).IsChangeCommitted(str(change.gerrit_number), self.dryrun)
except cros_build_lib.RunCommandError:
logging.error('gerrit review --submit failed for change.')
finally:
if not was_change_submitted:
logging.error('Could not submit %s', str(change))
self._HandleCouldNotSubmit(change)
changes_that_failed_to_submit.append(change)
if changes_that_failed_to_submit:
raise FailedToSubmitAllChangesException(changes_that_failed_to_submit)
def _SubmitChange(self, change):
"""Submits patch using Gerrit Review."""
cmd = self._helper_pool.ForChange(change).GetGerritReviewCommand(
['--submit', '%s,%s' % (change.gerrit_number, change.patch_number)])
_RunCommand(cmd, self.dryrun)
def SubmitNonManifestChanges(self, check_tree_open=True):
"""Commits changes to Gerrit from Pool that aren't part of the checkout.
Args:
check_tree_open: Whether to check that the tree is open before submitting
changes. If this is False, TreeIsClosedException will never be raised.
Raises:
TreeIsClosedException: if the tree is closed.
FailedToSubmitAllChangesException: if we can't submit a change.
"""
self._SubmitChanges(self.non_manifest_changes,
check_tree_open=check_tree_open)
def SubmitPool(self, check_tree_open=True):
"""Commits changes to Gerrit from Pool. This is only called by a master.
Args:
check_tree_open: Whether to check that the tree is open before submitting
changes. If this is False, TreeIsClosedException will never be raised.
Raises:
TreeIsClosedException: if the tree is closed.
FailedToSubmitAllChangesException: if we can't submit a change.
"""
# Note that _SubmitChanges can throw an exception if it can't
# submit all changes; in that particular case, don't mark the inflight
# failures patches as failed in gerrit- some may apply next time we do
# a CQ run (since the submit state has changed, we have no way of
# knowing). They *likely* will still fail, but this approach tries
# to minimize wasting the developers time.
self._SubmitChanges(self.changes, check_tree_open=check_tree_open)
if self.changes_that_failed_to_apply_earlier:
self._HandleApplyFailure(self.changes_that_failed_to_apply_earlier)
def _HandleApplyFailure(self, failures):
"""Handles changes that were not able to be applied cleanly.
Args:
changes: GerritPatch's to handle.
"""
for failure in failures:
logging.info('Change %s did not apply cleanly.', failure.patch)
if self.is_master:
self._HandleCouldNotApply(failure)
def _HandleCouldNotApply(self, failure):
"""Handler for when Paladin fails to apply a change.
This handler notifies set CodeReview-2 to the review forcing the developer
to re-upload a rebased change.
Args:
change: GerritPatch instance to operate upon.
"""
msg = '%(queue)s failed to apply your change in %(build_log)s .'
msg += ' %(failure)s'
self._SendNotification(failure.patch, msg, failure=failure)
self._helper_pool.ForChange(failure.patch).RemoveCommitReady(
failure.patch, dryrun=self.dryrun)
def HandleValidationTimeout(self):
"""Handles changes that timed out."""
logging.info('Validation timed out for all changes.')
for change in self.changes:
logging.info('Validation timed out for change %s.', change)
self._SendNotification(change,
'%(queue)s timed out while verifying your change in '
'%(build_log)s . This means that a supporting builder did not '
'finish building your change within the specified timeout. If you '
'believe this happened in error, just re-mark your commit as ready. '
'Your change will then get automatically retried.')
self._helper_pool.ForChange(change).RemoveCommitReady(
change, dryrun=self.dryrun)
def _SendNotification(self, change, msg, **kwargs):
d = dict(build_log=self.build_log, queue=self.queue, **kwargs)
try:
msg %= d
except (TypeError, ValueError), e:
logging.error(
"Generation of message %s for change %s failed: dict was %r, "
"exception %s", msg, change, d, e)
raise e.__class__(
"Generation of message %s for change %s failed: dict was %r, "
"exception %s" % (msg, change, d, e))
PaladinMessage(msg, change, self._helper_pool.ForChange(change)).Send(
self.dryrun)
def HandlePreCQSuccess(self):
"""Handler that is called when the Pre-CQ successfully verifies a change."""
msg = '%(queue)s successfully verified your change in %(build_log)s .'
for change in self.changes:
if self.GetPreCQStatus(change) != self.STATUS_PASSED:
self._SendNotification(change, msg)
self.UpdatePreCQStatus(change, self.STATUS_PASSED)
def _HandleCouldNotSubmit(self, change):
"""Handler that is called when Paladin can't submit a change.
This should be rare, but if an admin overrides the commit queue and commits
a change that conflicts with this change, it'll apply, build/validate but
receive an error when submitting.
Args:
change: GerritPatch instance to operate upon.
"""
self._SendNotification(change,
'%(queue)s failed to submit your change in %(build_log)s . '
'This can happen if you submitted your change or someone else '
'submitted a conflicting change while your change was being tested.')
self._helper_pool.ForChange(change).RemoveCommitReady(
change, dryrun=self.dryrun)
@staticmethod
def _FindSuspects(changes, messages):
"""Figure out what changes probably caused our failures.
We use a fairly simplistic algorithm to calculate breakage: If you changed
a package, and that package broke, you probably broke the build. If there
were multiple changes to a broken package, we fail them all.
Some safeguards are implemented to ensure that bad changes are kicked out:
1) Changes to overlays (e.g. ebuilds, eclasses, etc.) are always kicked
out if the build fails.
2) If a package fails that nobody changed, we kick out all of the
changes.
3) If any failures occur that we can't explain, we kick out all of the
changes.
It is certainly possible to trick this algorithm: If one developer submits
a change to libchromeos that breaks the power_manager, and another developer
submits a change to the power_manager at the same time, only the
power_manager change will be kicked out. That said, in that situation, the
libchromeos change will likely be kicked out on the next run, thanks to
safeguard #2 above.
This function is intentionally static, and should be kept simple. If it
starts getting complicated, we should move it to a different file.
Args:
changes: List of changes to examine.
messages: A list of build failure messages from supporting builders.
Returns:
suspects: Set of changes that likely caused the failure.
"""
suspects = set()
blame_everything = False
# If there were no internal failures, only kick out external changes.
if any(message.internal for message in messages):
candidates = changes
else:
candidates = [change for change in changes if not change.internal]
for message in messages:
for recorded_traceback in message.tracebacks:
exception = recorded_traceback.exception
blame_assigned = False
if isinstance(exception, results_lib.PackageBuildFailure):
for package in exception.failed_packages:
failed_projects = portage_utilities.FindWorkonProjects([package])
for change in candidates:
if change.project in failed_projects:
blame_assigned = True
suspects.add(change)
if not blame_assigned:
blame_everything = True
if blame_everything or not suspects:
suspects = set(candidates)
else:
# Never treat changes to overlays as innocent.
suspects.update(change for change in candidates
if '/overlays/' in change.project)
return suspects
@staticmethod
def _CreateValidationFailureMessage(pre_cq, change, suspects, messages):
"""Create a message explaining why a validation failure occurred.
Args:
pre_cq: Whether this builder is a Pre-CQ builder.
change: The change we want to create a message for.
suspects: The set of suspect changes that we think broke the build.
messages: A list of build failure messages from supporting builders.
"""
# Build a list of error messages. We don't want to build a ridiculously
# long comment, as Gerrit will reject it. See http://crbug.com/236831
max_error_len = 20000 / max(1, len(messages))
msg = ['The following build(s) failed:']
for message in map(str, messages):
if len(message) > max_error_len:
message = message[:max_error_len] + '... (truncated)'
msg.append(message)
# Create a list of changes other than this one that might be guilty.
# Limit the number of suspects to 20 so that the list of suspects isn't
# ridiculously long.
max_suspects = 20
other_suspects = suspects - set([change])
if len(other_suspects) < max_suspects:
other_suspects_str = ', '.join(sorted(
'CL:%s' % x.gerrit_number_str for x in other_suspects))
else:
other_suspects_str = ('%d other changes. See the blamelist for more '
'details.' % (len(other_suspects),))
if change in suspects:
if other_suspects_str:
msg.append('Your change may have caused this failure. There are '
'also other changes that may be at fault: %s'
% other_suspects_str)
else:
msg.append('This failure was probably caused by your change.')
msg.append('Please check whether the failure is your fault. If your '
'change is not at fault, you may mark it as ready again.')
else:
if len(suspects) == 1:
msg.append('This failure was probably caused by %s'
% other_suspects_str)
else:
msg.append('One of the following changes is probably at fault: %s'
% other_suspects_str)
if not pre_cq:
msg.insert(
0, 'NOTE: The Commit Queue will retry your change automatically.')
return '\n\n'.join(msg)
def HandleValidationFailure(self, messages):
"""Handles a list of validation failure messages from slave builders.
This handler parses a list of failure messages from our list of builders
and calculates which changes were likely responsible for the failure. The
changes that were responsible for the failure have their Commit Ready bit
stripped and the other changes are left marked as Commit Ready.
Args:
messages: A list of build failure messages from supporting builders.
These must be ValidationFailedMessage objects.
"""
changes = []
for change in self.changes:
# Ignore changes that were already verified.
if self.pre_cq and self.GetPreCQStatus(change) == self.STATUS_PASSED:
continue
changes.append(change)
# First, calculate which changes are likely at fault for the failure.
suspects = self._FindSuspects(changes, messages)
# Send out failure notifications for each change.
for change in changes:
msg = self._CreateValidationFailureMessage(self.pre_cq, change, suspects,
messages)
self._SendNotification(change, '%(details)s', details=msg)
if change in suspects:
self._helper_pool.ForChange(change).RemoveCommitReady(
change, dryrun=self.dryrun)
if self.pre_cq:
# Mark the change as failed. If the Ready bit is still set, the change
# will be retried automatically.
self.UpdatePreCQStatus(change, self.STATUS_FAILED)
def GetValidationFailedMessage(self):
"""Returns message indicating these changes failed to be validated."""
logging.info('Validation failed for all changes.')
internal = self._overlays in [constants.PRIVATE_OVERLAYS,
constants.BOTH_OVERLAYS]
return ValidationFailedMessage(self._builder_name, self.build_log,
results_lib.Results.GetTracebacks(),
internal)
def HandleCouldNotApply(self, change):
"""Handler for when Paladin fails to apply a change.
This handler strips the Commit Ready bit forcing the developer
to re-upload a rebased change as this theirs failed to apply cleanly.
Args:
change: GerritPatch instance to operate upon.
"""
msg = '%(queue)s failed to apply your change in %(build_log)s . '
# This is written this way to protect against bugs in CQ itself. We log
# it both to the build output, and mark the change w/ it.
extra_msg = getattr(change, 'apply_error_message', None)
if extra_msg is None:
logging.error(
'Change %s was passed to HandleCouldNotApply without an appropriate '
'apply_error_message set. Internal bug.', change)
extra_msg = (
'Internal CQ issue: extra error info was not given, Please contact '
'the build team and ensure they are aware of this specific change '
'failing.')
msg += extra_msg
self._SendNotification(change, msg)
self._helper_pool.ForChange(change).RemoveCommitReady(
change, dryrun=self.dryrun)
def _HandleApplySuccess(self, change):
"""Handler for when Paladin successfully applies a change.
This handler notifies a developer that their change is being tried as
part of a Paladin run defined by a build_log.
Args:
change: GerritPatch instance to operate upon.
"""
if self.pre_cq and self.GetPreCQStatus(change) == self.STATUS_PASSED:
return
msg = ('%(queue)s has picked up your change. '
'You can follow along at %(build_log)s .')
self._SendNotification(change, msg)
def _GetPreCQStatusURL(self, change):
internal = 'int' if change.internal else 'ext'
components = [constants.MANIFEST_VERSIONS_GS_URL, 'pre-cq',
internal, change.gerrit_number, change.patch_number]
return '/'.join(components)
def GetPreCQStatus(self, change):
"""Get Pre-CQ status for |change|."""
ctx = gs.GSContext()
url = self._GetPreCQStatusURL(change)
try:
return ctx.Cat(url).output
except gs.GSNoSuchKey:
logging.debug('No status yet for %r', url)
return None
def UpdatePreCQStatus(self, change, status):
"""Update Google Storage URL for |change| with the Pre-CQ |status|."""
url = self._GetPreCQStatusURL(change)
ctx = gs.GSContext(dry_run=self.dryrun)
ctx.Copy('-', url, input=status)
def CreateDisjointTransactions(self, manifest):
"""Create a list of disjoint transactions from the changes in the pool.
Args:
manifest: Manifest to use.
Returns:
A list of disjoint transactions. Each transaction can be tried
independently, without involving patches from other transactions.
Each change in the pool will included in exactly one of transactions,
unless the patch does not apply for some reason.
"""
helper_pool = HelperPool.SimpleCreate()
patches = PatchSeries(self.build_root, forced_manifest=manifest,
helper_pool=helper_pool)
edges, failed = {}, []
for change, plan, ex in patches.CreateTransactions(self.changes,
limit_to=self.changes):
if ex is not None:
logging.info("Failed creating transaction for %s: %s", change, ex)
failed.append(ex)
else:
# Mark everybody in the transaction as bidirectionally connected to the
# others.
for x in plan:
edges.setdefault(x, set()).update(plan)
failed = self._FilterDependencyErrors(failed)
if failed:
self._HandleApplyFailure(failed)
return list(digraph.StronglyConnectedComponents(list(edges), edges))
class PaladinMessage():
"""An object that is used to send messages to developers about their changes.
"""
# URL where Paladin documentation is stored.
_PALADIN_DOCUMENTATION_URL = ('http://www.chromium.org/developers/'
'tree-sheriffs/sheriff-details-chromium-os/'
'commit-queue-overview')
# Gerrit can't handle commands over 32768 bytes. See http://crbug.com/236831
MAX_MESSAGE_LEN = 32000
def __init__(self, message, patch, helper):
if len(message) > self.MAX_MESSAGE_LEN:
message = message[:self.MAX_MESSAGE_LEN] + '... (truncated)'
self.message = message
self.patch = patch
self.helper = helper
def _ConstructPaladinMessage(self):
"""Adds any standard Paladin messaging to an existing message."""
return self.message + ('\n\nCommit queue documentation: %s' %
self._PALADIN_DOCUMENTATION_URL)
def Send(self, dryrun):
"""Sends the message to the developer."""
# Gerrit requires that commit messages are enclosed in quotes, and that
# any backslashes or quotes within these quotes are escaped.
# See com.google.gerrit.sshd.CommandFactoryProvider#split.
message = '"%s"' % (self._ConstructPaladinMessage().
replace('\\', '\\\\').replace('"', '\\"'))
cmd = self.helper.GetGerritReviewCommand(
['-m', message,
'%s,%s' % (self.patch.gerrit_number, self.patch.patch_number)])
_RunCommand(cmd, dryrun)
| windyuuy/opera | chromium/src/third_party/chromite/buildbot/validation_pool.py | Python | bsd-3-clause | 67,723 |
'''
Created on Jul 31, 2014
Implementation from:
http://craiglabenz.me/2013/06/12/how-i-made-django-admin-scale/
Used because Django admin was crazy slow
@author: u0490822
'''
from django.db import models, connections
from django.db.models.query import QuerySet
class FastCountQuerySet(QuerySet):
'''
'''
def count(self):
'''
Override entire table count queries only. Any WHERE or other altering
statements will default back to an actual COUNT query.
'''
if self._result_cache is not None and not self._iter:
return len(self._result_cache)
is_mysql = 'mysql' in connections[self.db].client.executable_name.lower()
query = self.query
if (is_mysql and not query.where and
query.high_mark is None and
query.low_mark == 0 and
not query.select and
not query.group_by and
not query.having and
not query.distinct):
# If query has no constraints, we would be simply doing
# "SELECT COUNT(*) FROM foo". Monkey patch so the we
# get an approximation instead.
cursor = connections[self.db].cursor()
cursor.execute("SHOW TABLE STATUS LIKE %s",
(self.model._meta.db_table,))
return cursor.fetchall()[0][4]
else:
return self.query.get_count(using=self.db)
class NoCountManager(models.Manager):
def get_query_set(self):
return FastCountQuerySet(self.model, using=self._db) | jamesra/nornir-djangomodel | nornir_djangomodel/custom_query_manager.py | Python | gpl-2.0 | 1,596 |
#!/usr/bin/python
import json
import logging
import sys
from datetime import datetime
import csv
if __name__ == '__main__':
_loggingLevel = logging.DEBUG ## How much trace
logger = logging.getLogger(__name__)
logging.basicConfig(level=_loggingLevel)
a = {}
altmetricFile = sys.argv[1]
with open(altmetricFile) as afData:
for line in afData:
data = line.rstrip('\n')
a[data] = 0
with open(sys.argv[2], 'rb') as csvfile:
spamreader = csv.reader(csvfile, delimiter='$', quotechar='\'')
for line in spamreader:
id = line[0]
title = line[1]
url = line[2]
dateP = line[3]
restP = line[4]
actorId = line[5]
actorUrl = line[6]
actorDisplayName = line[7]
verb = line[8]
objectId = line[9]
objectActorId = line[10]
objectActorDisplayName = line[11]
objectContent = line[12]
if url not in a.keys():
in_altmetric = "N"
else:
in_altmetric = "Y"
print("%s$%s$%s$%s$%s$%s$%s$%s$%s$%s$%s$%s$%s$%r" %(dateP, restP, id, title, in_altmetric, url, verb, actorDisplayName, actorId, actorUrl, objectId, objectActorId, objectActorDisplayName, objectContent))
| sinharrajesh/dbtools | google-plus-analysis/clarify.py | Python | apache-2.0 | 1,344 |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
"""
Laudio - A webbased musicplayer
Copyright (C) 2010 Bernhard Posselt, bernhard.posselt@gmx.at
Laudio is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
Laudio is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Foobar. If not, see <http://www.gnu.org/licenses/>.
"""
from django.template import Context, Template
from django.conf import settings
from laudio.models import Settings
import os
class JavaScript(object):
"""This class is for enabling django template vars and template syntax
in javascript files and manipulating the js for different views"""
def __init__(self, view, request):
"""First we set the functions and files we have to include for
the view we serve
Keyword arguments:
view -- can be: "library", "settings" or "playlist"; sets javascript
according to those views
"""
self.view = view
files = []
"""check settings values"""
try:
config = Settings.objects.get(pk=1)
if request.user.is_authenticated():
if request.user.get_profile().showLib and self.view == "library":
files.append("func/autoload.js")
else:
if config.showLib and self.view == "library":
files.append("func/autoload.js")
except Settings.DoesNotExist, AttributeError:
pass
"""Depending on the view, different js files are being included.
We specify the ones we want to load with a files tuple, path
starting from src/javascript/"""
if self.view == "library":
files.append("inc/includes.js")
files.append("ui/collection.js")
files.append("ui/controls.js")
files.append("ui/tablesorting.js")
files.append("ui/playlist.js")
files.append("ui/nav.js")
files.append("func/player.js")
files.append("func/search.js")
elif self.view == "settings":
files.append("inc/includes.js",)
files.append("ui/settings.js",)
else:
pass
content = ""
# loop over files and build the content
for f in files:
# get the javascript from the file
fh = os.path.join(settings.INSTALL_DIR, "src/javascript/%s" % f )
file = open(fh, 'r')
content += file.read()
file.close()
# create template and parse context
tpl = Template(content)
context = Context( {} )
self.javascript = tpl.render(context)
def __str__(self):
return self.javascript
| jgehring/Laudio | laudio/src/javascript.py | Python | gpl-3.0 | 3,237 |
#!/usr/bin/env python
import sys, os, tempfile
def main():
if len(sys.argv) != 2:
print "Please enter the listening port number of your app"
print "Usage: " + sys.argv[0] + " <port>"
return
fd, name = tempfile.mkstemp()
os.close(fd)
os.system("adb devices > " + name)
f = open(name, "r")
for line in f:
if "offline" in line:
print "Some AVDs are offline. Please try again."
f.close()
os.remove(name)
return
f.seek(0)
lport = sys.argv[1]
cnt = 0
for line in f:
if not "emulator" in line:
continue
emu_name = line.split()[0].strip()
port = str(int(emu_name.split("-")[1]) * 2)
cmd = "adb -s " + emu_name + " forward tcp:" + port + " tcp:" + lport
print cmd
os.system(cmd)
cnt = cnt + 1
if cnt == 0:
print "There is no AVD running"
f.close()
os.remove(name)
if __name__ == "__main__":
main()
| asarraf/GroupMessenger | Test Scripts/set_redir.py | Python | mit | 905 |
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2011 thomasv@gitorious
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
import android
from electrum import SimpleConfig, Wallet, WalletStorage, format_satoshis, mnemonic_encode, mnemonic_decode
from electrum.bitcoin import is_valid
from electrum import util
from decimal import Decimal
import datetime, re
def modal_dialog(title, msg = None):
droid.dialogCreateAlert(title,msg)
droid.dialogSetPositiveButtonText('OK')
droid.dialogShow()
droid.dialogGetResponse()
droid.dialogDismiss()
def modal_input(title, msg, value = None, etype=None):
droid.dialogCreateInput(title, msg, value, etype)
droid.dialogSetPositiveButtonText('OK')
droid.dialogSetNegativeButtonText('Cancel')
droid.dialogShow()
response = droid.dialogGetResponse()
result = response.result
droid.dialogDismiss()
if result is None:
print "modal input: result is none"
return modal_input(title, msg, value, etype)
if result.get('which') == 'positive':
return result.get('value')
def modal_question(q, msg, pos_text = 'OK', neg_text = 'Cancel'):
droid.dialogCreateAlert(q, msg)
droid.dialogSetPositiveButtonText(pos_text)
droid.dialogSetNegativeButtonText(neg_text)
droid.dialogShow()
response = droid.dialogGetResponse()
result = response.result
droid.dialogDismiss()
if result is None:
print "modal question: result is none"
return modal_question(q,msg, pos_text, neg_text)
return result.get('which') == 'positive'
def edit_label(addr):
v = modal_input('Edit label',None,wallet.labels.get(addr))
if v is not None:
if v:
wallet.labels[addr] = v
else:
if addr in wallet.labels.keys():
wallet.labels.pop(addr)
wallet.update_tx_history()
wallet.save()
droid.fullSetProperty("labelTextView", "text", v)
def select_from_contacts():
title = 'Contacts:'
droid.dialogCreateAlert(title)
l = []
for i in range(len(wallet.addressbook)):
addr = wallet.addressbook[i]
label = wallet.labels.get(addr,addr)
l.append( label )
droid.dialogSetItems(l)
droid.dialogSetPositiveButtonText('New contact')
droid.dialogShow()
response = droid.dialogGetResponse().result
droid.dialogDismiss()
if response.get('which') == 'positive':
return 'newcontact'
result = response.get('item')
print result
if result is not None:
addr = wallet.addressbook[result]
return addr
def select_from_addresses():
droid.dialogCreateAlert("Addresses:")
l = []
addresses = wallet.addresses()
for i in range(len(addresses)):
addr = addresses[i]
label = wallet.labels.get(addr,addr)
l.append( label )
droid.dialogSetItems(l)
droid.dialogShow()
response = droid.dialogGetResponse()
result = response.result.get('item')
droid.dialogDismiss()
if result is not None:
addr = addresses[result]
return addr
def protocol_name(p):
if p == 't': return 'TCP'
if p == 'h': return 'HTTP'
if p == 's': return 'SSL'
if p == 'g': return 'HTTPS'
def protocol_dialog(host, protocol, z):
droid.dialogCreateAlert('Protocol',host)
if z:
protocols = z.keys()
else:
protocols = 'thsg'
l = []
current = protocols.index(protocol)
for p in protocols:
l.append(protocol_name(p))
droid.dialogSetSingleChoiceItems(l, current)
droid.dialogSetPositiveButtonText('OK')
droid.dialogSetNegativeButtonText('Cancel')
droid.dialogShow()
response = droid.dialogGetResponse().result
selected_item = droid.dialogGetSelectedItems().result
droid.dialogDismiss()
if not response: return
if not selected_item: return
if response.get('which') == 'positive':
return protocols[selected_item[0]]
def make_layout(s, scrollable = False):
content = """
<LinearLayout
android:id="@+id/zz"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:background="#ff222222">
<TextView
android:id="@+id/textElectrum"
android:text="Electrum"
android:textSize="7pt"
android:textColor="#ff4444ff"
android:gravity="left"
android:layout_height="wrap_content"
android:layout_width="match_parent"
/>
</LinearLayout>
%s """%s
if scrollable:
content = """
<ScrollView
android:id="@+id/scrollview"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<LinearLayout
android:orientation="vertical"
android:layout_width="match_parent"
android:layout_height="wrap_content" >
%s
</LinearLayout>
</ScrollView>
"""%content
return """<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@+id/background"
android:orientation="vertical"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="#ff000022">
%s
</LinearLayout>"""%content
def main_layout():
return make_layout("""
<TextView android:id="@+id/balanceTextView"
android:layout_width="match_parent"
android:text=""
android:textColor="#ffffffff"
android:textAppearance="?android:attr/textAppearanceLarge"
android:padding="7dip"
android:textSize="8pt"
android:gravity="center_vertical|center_horizontal|left">
</TextView>
<TextView android:id="@+id/historyTextView"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="Recent transactions"
android:textAppearance="?android:attr/textAppearanceLarge"
android:gravity="center_vertical|center_horizontal|center">
</TextView>
%s """%get_history_layout(15),True)
def qr_layout(addr):
return make_layout("""
<TextView android:id="@+id/addrTextView"
android:layout_width="match_parent"
android:layout_height="50"
android:text="%s"
android:textAppearance="?android:attr/textAppearanceLarge"
android:gravity="center_vertical|center_horizontal|center">
</TextView>
<ImageView
android:id="@+id/qrView"
android:gravity="center"
android:layout_width="match_parent"
android:layout_height="350"
android:antialias="false"
android:src="file:///sdcard/sl4a/qrcode.bmp" />
<TextView android:id="@+id/labelTextView"
android:layout_width="match_parent"
android:layout_height="50"
android:text="%s"
android:textAppearance="?android:attr/textAppearanceLarge"
android:gravity="center_vertical|center_horizontal|center">
</TextView>
"""%(addr,wallet.labels.get(addr,'')), True)
payto_layout = make_layout("""
<TextView android:id="@+id/recipientTextView"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="Pay to:"
android:textAppearance="?android:attr/textAppearanceLarge"
android:gravity="left">
</TextView>
<EditText android:id="@+id/recipient"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:tag="Tag Me" android:inputType="text">
</EditText>
<LinearLayout android:id="@+id/linearLayout1"
android:layout_width="match_parent"
android:layout_height="wrap_content">
<Button android:id="@+id/buttonQR" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:text="From QR code"></Button>
<Button android:id="@+id/buttonContacts" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:text="From Contacts"></Button>
</LinearLayout>
<TextView android:id="@+id/labelTextView"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="Description:"
android:textAppearance="?android:attr/textAppearanceLarge"
android:gravity="left">
</TextView>
<EditText android:id="@+id/label"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:tag="Tag Me" android:inputType="text">
</EditText>
<TextView android:id="@+id/amountLabelTextView"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="Amount:"
android:textAppearance="?android:attr/textAppearanceLarge"
android:gravity="left">
</TextView>
<EditText android:id="@+id/amount"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:tag="Tag Me" android:inputType="numberDecimal">
</EditText>
<LinearLayout android:layout_width="match_parent"
android:layout_height="wrap_content" android:id="@+id/linearLayout1">
<Button android:id="@+id/buttonPay" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:text="Send"></Button>
</LinearLayout>""",False)
settings_layout = make_layout(""" <ListView
android:id="@+id/myListView"
android:layout_width="match_parent"
android:layout_height="wrap_content" />""")
def get_history_values(n):
values = []
h = wallet.get_tx_history()
length = min(n, len(h))
for i in range(length):
tx_hash, conf, is_mine, value, fee, balance, timestamp = h[-i-1]
try:
dt = datetime.datetime.fromtimestamp( timestamp )
if dt.date() == dt.today().date():
time_str = str( dt.time() )
else:
time_str = str( dt.date() )
except Exception:
time_str = 'pending'
conf_str = 'v' if conf else 'o'
label, is_default_label = wallet.get_label(tx_hash)
values.append((conf_str, ' ' + time_str, ' ' + format_satoshis(value,True), ' ' + label ))
return values
def get_history_layout(n):
rows = ""
i = 0
values = get_history_values(n)
for v in values:
a,b,c,d = v
color = "#ff00ff00" if a == 'v' else "#ffff0000"
rows += """
<TableRow>
<TextView
android:id="@+id/hl_%d_col1"
android:layout_column="0"
android:text="%s"
android:textColor="%s"
android:padding="3" />
<TextView
android:id="@+id/hl_%d_col2"
android:layout_column="1"
android:text="%s"
android:padding="3" />
<TextView
android:id="@+id/hl_%d_col3"
android:layout_column="2"
android:text="%s"
android:padding="3" />
<TextView
android:id="@+id/hl_%d_col4"
android:layout_column="3"
android:text="%s"
android:padding="4" />
</TableRow>"""%(i,a,color,i,b,i,c,i,d)
i += 1
output = """
<TableLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:stretchColumns="0,1,2,3">
%s
</TableLayout>"""% rows
return output
def set_history_layout(n):
values = get_history_values(n)
i = 0
for v in values:
a,b,c,d = v
droid.fullSetProperty("hl_%d_col1"%i,"text", a)
if a == 'v':
droid.fullSetProperty("hl_%d_col1"%i, "textColor","#ff00ff00")
else:
droid.fullSetProperty("hl_%d_col1"%i, "textColor","#ffff0000")
droid.fullSetProperty("hl_%d_col2"%i,"text", b)
droid.fullSetProperty("hl_%d_col3"%i,"text", c)
droid.fullSetProperty("hl_%d_col4"%i,"text", d)
i += 1
status_text = ''
def update_layout():
global status_text
if not network.is_connected():
text = "Not connected..."
elif not wallet.up_to_date:
text = "Synchronizing..."
else:
c, u = wallet.get_balance()
text = "Balance:"+format_satoshis(c)
if u : text += ' [' + format_satoshis(u,True).strip() + ']'
# vibrate if status changed
if text != status_text:
if status_text and network.is_connected() and wallet.up_to_date:
droid.vibrate()
status_text = text
droid.fullSetProperty("balanceTextView", "text", status_text)
if wallet.up_to_date:
set_history_layout(15)
def pay_to(recipient, amount, fee, label):
if wallet.use_encryption:
password = droid.dialogGetPassword('Password').result
if not password: return
else:
password = None
droid.dialogCreateSpinnerProgress("Electrum", "signing transaction...")
droid.dialogShow()
try:
tx = wallet.mktx( [(recipient, amount)], password, fee)
except Exception as e:
modal_dialog('error', e.message)
droid.dialogDismiss()
return
if label:
wallet.labels[tx.hash()] = label
droid.dialogDismiss()
r, h = wallet.sendtx( tx )
if r:
modal_dialog('Payment sent', h)
return True
else:
modal_dialog('Error', h)
def make_new_contact():
code = droid.scanBarcode()
r = code.result
if r:
data = r['extras']['SCAN_RESULT']
if data:
if re.match('^bitcoin:', data):
address, _, _, _, _ = util.parse_URI(data)
elif is_valid(data):
address = data
else:
address = None
if address:
if modal_question('Add to contacts?', address):
wallet.add_contact(address)
else:
modal_dialog('Invalid address', data)
do_refresh = False
def update_callback():
global do_refresh
print "gui callback", network.is_connected()
do_refresh = True
droid.eventPost("refresh",'z')
def main_loop():
global do_refresh
update_layout()
out = None
quitting = False
while out is None:
event = droid.eventWait(1000).result
if event is None:
if do_refresh:
update_layout()
do_refresh = False
continue
print "got event in main loop", repr(event)
if event == 'OK': continue
if event is None: continue
if not event.get("name"): continue
# request 2 taps before we exit
if event["name"]=="key":
if event["data"]["key"] == '4':
if quitting:
out = 'quit'
else:
quitting = True
else: quitting = False
if event["name"]=="click":
id=event["data"]["id"]
elif event["name"]=="settings":
out = 'settings'
elif event["name"] in menu_commands:
out = event["name"]
if out == 'contacts':
global contact_addr
contact_addr = select_from_contacts()
if contact_addr == 'newcontact':
make_new_contact()
contact_addr = None
if not contact_addr:
out = None
elif out == "receive":
global receive_addr
receive_addr = select_from_addresses()
if receive_addr:
amount = modal_input('Amount', 'Amount you want receive. ', '', "numberDecimal")
if amount:
receive_addr = 'bitcoin:%s?amount=%s'%(receive_addr, amount)
if not receive_addr:
out = None
return out
def payto_loop():
global recipient
if recipient:
droid.fullSetProperty("recipient","text",recipient)
recipient = None
out = None
while out is None:
event = droid.eventWait().result
if not event: continue
print "got event in payto loop", event
if event == 'OK': continue
if not event.get("name"): continue
if event["name"] == "click":
id = event["data"]["id"]
if id=="buttonPay":
droid.fullQuery()
recipient = droid.fullQueryDetail("recipient").result.get('text')
label = droid.fullQueryDetail("label").result.get('text')
amount = droid.fullQueryDetail('amount').result.get('text')
if not is_valid(recipient):
modal_dialog('Error','Invalid Bitcoin address')
continue
try:
amount = int( 100000000 * Decimal(amount) )
except Exception:
modal_dialog('Error','Invalid amount')
continue
result = pay_to(recipient, amount, wallet.fee, label)
if result:
out = 'main'
elif id=="buttonContacts":
addr = select_from_contacts()
droid.fullSetProperty("recipient","text",addr)
elif id=="buttonQR":
code = droid.scanBarcode()
r = code.result
if r:
data = r['extras']['SCAN_RESULT']
if data:
if re.match('^bitcoin:', data):
payto, amount, label, _, _ = util.parse_URI(data)
droid.fullSetProperty("recipient", "text",payto)
droid.fullSetProperty("amount", "text", amount)
droid.fullSetProperty("label", "text", label)
else:
droid.fullSetProperty("recipient", "text", data)
elif event["name"] in menu_commands:
out = event["name"]
elif event["name"]=="key":
if event["data"]["key"] == '4':
out = 'main'
#elif event["name"]=="screen":
# if event["data"]=="destroy":
# out = 'main'
return out
receive_addr = ''
contact_addr = ''
recipient = ''
def receive_loop():
out = None
while out is None:
event = droid.eventWait().result
print "got event", event
if event["name"]=="key":
if event["data"]["key"] == '4':
out = 'main'
elif event["name"]=="clipboard":
droid.setClipboard(receive_addr)
modal_dialog('Address copied to clipboard',receive_addr)
elif event["name"]=="edit":
edit_label(receive_addr)
return out
def contacts_loop():
global recipient
out = None
while out is None:
event = droid.eventWait().result
print "got event", event
if event["name"]=="key":
if event["data"]["key"] == '4':
out = 'main'
elif event["name"]=="clipboard":
droid.setClipboard(contact_addr)
modal_dialog('Address copied to clipboard',contact_addr)
elif event["name"]=="edit":
edit_label(contact_addr)
elif event["name"]=="paytocontact":
recipient = contact_addr
out = 'send'
elif event["name"]=="deletecontact":
if modal_question('delete contact', contact_addr):
out = 'main'
return out
def server_dialog(servers):
droid.dialogCreateAlert("Public servers")
droid.dialogSetItems( servers.keys() )
droid.dialogSetPositiveButtonText('Private server')
droid.dialogShow()
response = droid.dialogGetResponse().result
droid.dialogDismiss()
if not response: return
if response.get('which') == 'positive':
return modal_input('Private server', None)
i = response.get('item')
if i is not None:
response = servers.keys()[i]
return response
def show_seed():
if wallet.use_encryption:
password = droid.dialogGetPassword('Seed').result
if not password: return
else:
password = None
try:
seed = wallet.get_seed(password)
except Exception:
modal_dialog('error','incorrect password')
return
modal_dialog('Your seed is',seed)
modal_dialog('Mnemonic code:', ' '.join(mnemonic_encode(seed)) )
def change_password_dialog():
if wallet.use_encryption:
password = droid.dialogGetPassword('Your wallet is encrypted').result
if password is None: return
else:
password = None
try:
wallet.get_seed(password)
except Exception:
modal_dialog('error','incorrect password')
return
new_password = droid.dialogGetPassword('Choose a password').result
if new_password == None:
return
if new_password != '':
password2 = droid.dialogGetPassword('Confirm new password').result
if new_password != password2:
modal_dialog('error','passwords do not match')
return
wallet.update_password(password, new_password)
if new_password:
modal_dialog('Password updated','your wallet is encrypted')
else:
modal_dialog('No password','your wallet is not encrypted')
return True
def settings_loop():
def set_listview():
host, port, p = network.default_server.split(':')
fee = str( Decimal( wallet.fee)/100000000 )
is_encrypted = 'yes' if wallet.use_encryption else 'no'
protocol = protocol_name(p)
droid.fullShow(settings_layout)
droid.fullSetList("myListView",['Server: ' + host, 'Protocol: '+ protocol, 'Port: '+port, 'Transaction fee: '+fee, 'Password: '+is_encrypted, 'Seed'])
set_listview()
out = None
while out is None:
event = droid.eventWait()
event = event.result
print "got event", event
if event == 'OK': continue
if not event: continue
servers = network.get_servers()
name = event.get("name")
if not name: continue
if name == "itemclick":
pos = event["data"]["position"]
host, port, protocol = network.default_server.split(':')
network_changed = False
if pos == "0": #server
host = server_dialog(servers)
if host:
p = servers[host]
port = p[protocol]
network_changed = True
elif pos == "1": #protocol
if host in servers:
protocol = protocol_dialog(host, protocol, servers[host])
z = servers[host]
port = z[protocol]
network_changed = True
elif pos == "2": #port
a_port = modal_input('Port number', 'If you use a public server, this field is set automatically when you set the protocol', port, "number")
if a_port != port:
port = a_port
network_changed = True
elif pos == "3": #fee
fee = modal_input('Transaction fee', 'The fee will be this amount multiplied by the number of inputs in your transaction. ', str( Decimal( wallet.fee)/100000000 ), "numberDecimal")
if fee:
try:
fee = int( 100000000 * Decimal(fee) )
except Exception:
modal_dialog('error','invalid fee value')
wallet.set_fee(fee)
set_listview()
elif pos == "4":
if change_password_dialog():
set_listview()
elif pos == "5":
show_seed()
if network_changed:
proxy = None
auto_connect = False
try:
network.set_parameters(host, port, protocol, proxy, auto_connect)
except Exception:
modal_dialog('error','invalid server')
set_listview()
elif name in menu_commands:
out = event["name"]
elif name == 'cancel':
out = 'main'
elif name == "key":
if event["data"]["key"] == '4':
out = 'main'
return out
def add_menu(s):
droid.clearOptionsMenu()
if s == 'main':
droid.addOptionsMenuItem("Send","send",None,"")
droid.addOptionsMenuItem("Receive","receive",None,"")
droid.addOptionsMenuItem("Contacts","contacts",None,"")
droid.addOptionsMenuItem("Settings","settings",None,"")
elif s == 'receive':
droid.addOptionsMenuItem("Copy","clipboard",None,"")
droid.addOptionsMenuItem("Label","edit",None,"")
elif s == 'contacts':
droid.addOptionsMenuItem("Copy","clipboard",None,"")
droid.addOptionsMenuItem("Label","edit",None,"")
droid.addOptionsMenuItem("Pay to","paytocontact",None,"")
#droid.addOptionsMenuItem("Delete","deletecontact",None,"")
def make_bitmap(addr):
# fixme: this is highly inefficient
droid.dialogCreateSpinnerProgress("please wait")
droid.dialogShow()
try:
import pyqrnative, bmp
qr = pyqrnative.QRCode(4, pyqrnative.QRErrorCorrectLevel.L)
qr.addData(addr)
qr.make()
k = qr.getModuleCount()
assert k == 33
bmp.save_qrcode(qr,"/sdcard/sl4a/qrcode.bmp")
finally:
droid.dialogDismiss()
droid = android.Android()
menu_commands = ["send", "receive", "settings", "contacts", "main"]
wallet = None
network = None
class ElectrumGui:
def __init__(self, config, _network):
global wallet, network
network = _network
network.register_callback('updated', update_callback)
network.register_callback('connected', update_callback)
network.register_callback('disconnected', update_callback)
network.register_callback('disconnecting', update_callback)
storage = WalletStorage(config)
if not storage.file_exists:
action = self.restore_or_create()
if not action: exit()
wallet = Wallet(storage)
if action == 'create':
wallet.init_seed(None)
self.show_seed()
wallet.save_seed(None)
wallet.synchronize() # generate first addresses offline
elif action == 'restore':
seed = self.seed_dialog()
if not seed:
exit()
wallet.init_seed(str(seed))
wallet.save_seed(None)
else:
exit()
wallet.start_threads(network)
if action == 'restore':
if not self.restore_wallet():
exit()
self.password_dialog()
else:
wallet = Wallet(storage)
wallet.start_threads(network)
def main(self, url):
s = 'main'
while True:
add_menu(s)
if s == 'main':
droid.fullShow(main_layout())
s = main_loop()
elif s == 'send':
droid.fullShow(payto_layout)
s = payto_loop()
elif s == 'receive':
make_bitmap(receive_addr)
droid.fullShow(qr_layout(receive_addr))
s = receive_loop()
elif s == 'contacts':
make_bitmap(contact_addr)
droid.fullShow(qr_layout(contact_addr))
s = contacts_loop()
elif s == 'settings':
s = settings_loop()
else:
break
droid.makeToast("Bye!")
def restore_or_create(self):
droid.dialogCreateAlert("Wallet not found","Do you want to create a new wallet, or restore an existing one?")
droid.dialogSetPositiveButtonText('Create')
droid.dialogSetNeutralButtonText('Restore')
droid.dialogSetNegativeButtonText('Cancel')
droid.dialogShow()
response = droid.dialogGetResponse().result
droid.dialogDismiss()
if not response: return
if response.get('which') == 'negative':
return
return 'restore' if response.get('which') == 'neutral' else 'create'
def seed_dialog(self):
if modal_question("Enter your seed","Input method",'QR Code', 'mnemonic'):
code = droid.scanBarcode()
r = code.result
if r:
seed = r['extras']['SCAN_RESULT']
else:
return
else:
m = modal_input('Mnemonic','please enter your code')
try:
seed = mnemonic_decode(m.split(' '))
except Exception:
modal_dialog('error: could not decode this seed')
return
return str(seed)
def network_dialog(self):
return True
def show_seed(self):
modal_dialog('Your seed is:', wallet.seed)
modal_dialog('Mnemonic code:', ' '.join(mnemonic_encode(wallet.seed)) )
def password_dialog(self):
change_password_dialog()
def restore_wallet(self):
msg = "recovering wallet..."
droid.dialogCreateSpinnerProgress("Electrum", msg)
droid.dialogShow()
wallet.restore(lambda x: None)
droid.dialogDismiss()
droid.vibrate()
if wallet.is_found():
wallet.fill_addressbook()
modal_dialog("recovery successful")
else:
if not modal_question("no transactions found for this seed","do you want to keep this wallet?"):
return False
return True
| mazaclub/electrum-nmc | gui/android.py | Python | gpl-3.0 | 31,265 |
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Make sure SWIG works when a VariantDir (or variant_dir) is used.
Test case courtesy Joe Maruszewski.
"""
import os.path
import sys
import TestSCons
test = TestSCons.TestSCons()
swig = test.where_is('swig')
if not swig:
test.skip_test('Can not find installed "swig", skipping test.\n')
# swig-python expects specific filenames.
# the platform specific suffix won't necessarily work.
if sys.platform == 'win32':
_dll = '.dll'
else:
_dll = '.so'
test.subdir(['source'])
python, python_include, python_libpath, python_lib = \
test.get_platform_python_info()
Python_h = os.path.join(python_include, 'Python.h')
if not os.path.exists(Python_h):
test.skip_test('Can not find %s, skipping test.\n' % Python_h)
test.write(['SConstruct'], """\
#
# Create the build environment.
#
env = Environment(CPPPATH = [".", r'%(python_include)s'],
CPPDEFINES = "NDEBUG",
SWIG = [r'%(swig)s'],
SWIGFLAGS = ["-python", "-c++"],
SWIGCXXFILESUFFIX = "_wrap.cpp",
LDMODULEPREFIX='_',
LDMODULESUFFIX='%(_dll)s',
LIBPATH=[r'%(python_libpath)s'],
LIBS='%(python_lib)s',
)
Export("env")
#
# Build the libraries.
#
SConscript("source/SConscript", variant_dir = "build")
""" % locals())
test.write(['source', 'SConscript'], """\
Import("env")
lib = env.SharedLibrary("_linalg",
"linalg.i",
SHLIBPREFIX = "",
SHLIBSUFFIX = ".pyd")
""")
test.write(['source', 'Vector.hpp'], """\
class Vector
{
public:
Vector(int size = 0) : _size(size)
{
_v = new double[_size];
for (int i = 0; i < _size; ++i)
_v[i] = 0.0;
}
~Vector() { delete [] _v; }
int size() const { return _size; }
double& operator[](int key) { return _v[key]; }
double const& operator[](int key) const { return _v[key]; }
private:
int _size;
double* _v;
};
""")
test.write(['source', 'linalg.i'], """\
%module linalg
%{
#include <sstream>
#include "Vector.hpp"
%}
class Vector
{
public:
Vector(int n = 0);
~Vector();
%extend
{
const char* __str__() { return "linalg.Vector()"; }
%pythoncode %{
def __iter__(self):
for i in range(len(self)):
yield self[i]
%}
}
};
""")
test.write(['source', 'test.py'], """\
#!/usr/bin/env python
import linalg
x = linalg.Vector(5)
print x
x[1] = 99.5
x[3] = 8.3
x[4] = 11.1
for i, v in enumerate(x):
print "\tx[%d] = %g" % (i, v)
""")
test.run(arguments = '.')
test.up_to_date(arguments = '.')
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| Distrotech/scons | test/SWIG/build-dir.py | Python | mit | 3,948 |
'''
This file is part of GEAR.
GEAR is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program. If not, see <http://www.gnu.org/licenses/lgpl.html>.
Author: Jeremie Passerin geerem@hotmail.com
Url: http://gear.jeremiepasserin.com
Date: 2010 / 11 / 15
'''
## @package gear.xsi.rig.component.tail_01
# @author Jeremie Passerin
#
##########################################################
# GLOBAL
##########################################################
import os
from gear.xsi import xsi, c, dynDispatch, XSIFactory, XSIMath
from gear.xsi.rig.component import MainComponent
import gear.xsi.ppg as ppg
import gear.xsi.parameter as par
import gear.xsi.primitive as pri
import gear.xsi.transform as tra
import gear.xsi.icon as icon
import gear.xsi.vector as vec
import gear.xsi.applyop as aop
##########################################################
# COMPONENT
##########################################################
## The main component class.
class Component(MainComponent):
# =====================================================
# OBJECTS
# =====================================================
## Add all the objects needed to create the component.
# @param self
def addObjects(self):
self.normal = self.guide.blades["blade"].z
self.binormal = self.guide.blades["blade"].x
self.division = len(self.guide.apos)-1
# FK controlers ------------------------------------
self.fk_ctl = []
parent = self.root
for i, t in enumerate(tra.getChainTransform(self.guide.apos, self.normal, self.negate)):
dist = vec.getDistance(self.guide.apos[i], self.guide.apos[i+1])
fk_ctl = self.addCtl(parent, "fk%s_ctl"%i, t, self.color_fk, "cube", w=dist, h=self.size*.25, d=self.size*.25, po=XSIMath.CreateVector3(dist*.5*self.n_factor,0,0))
xsi.SetNeutralPose(fk_ctl, c.siTrn)
par.setKeyableParameters(fk_ctl)
par.addLocalParamToCollection(self.inv_params, fk_ctl, ["posx", "posy", "posz"])
parent = fk_ctl
self.fk_ctl.append(fk_ctl)
xsi.SetNeutralPose(self.fk_ctl[0])
# Chain -------------------------------------------
parent = self.fk_ctl[0]
self.chain = []
for i in range(self.division):
pos = [self.guide.apos[i], self.guide.apos[i+1]]
chain = pri.add2DChain(parent, self.getName("spring%s"%i), pos, self.normal, self.negate, self.size*.25, True)
self.addToGroup(chain.all, "hidden")
eff_ref = pri.addNull(chain.root, self.getName("eff%s_ref"%i), chain.eff.Kinematics.Global.Transform, self.size*.1)
eff_ref.AddChild(chain.eff)
self.addToGroup(eff_ref, "hidden")
self.addShadow(chain.bones[0], i)
self.chain.append(chain)
parent = chain.bones[0]
# Plot Reference ----------------------------------
self.ref = pri.addNullChain(self.root, self.getName("#_ref"), self.guide.apos, self.normal, self.negate, self.size * .1)
self.addToGroup(self.ref, "hidden")
xsi.SetNeutralPose(self.ref[0])
# =====================================================
# PROPERTY
# =====================================================
## Add parameters to the anim and setup properties to control the component.
# @param self
def addParameters(self):
# Anim -------------------------------------------
# Default parameter to get a better display in the keying panel
self.pFullName = self.addAnimParam(self.fullName, c.siString, self.fullName, None, None, None, None, False, True, True)
self.pSpeed = self.addAnimParam("speed", c.siDouble, self.settings["speed"], 0, None, 0, 1)
self.pDamping = self.addAnimParam("damping", c.siDouble, self.settings["damping"], 0, None, 0, 1)
if self.settings["multi_blend"]:
default = 1
else:
default = self.settings["blend"]
self.pMainBlend = self.addAnimParam("main_blend", c.siDouble, default, 0, None, 0, 1)
if self.settings["multi_blend"]:
self.pBlend = [ self.addAnimParam("blend_%s"%i, c.siDouble, self.settings["blend"], 0, None, 0, 1) for i in range(self.division)]
## Define the layout of the anim and setup properties.
# @param self
def addLayout(self):
# Anim -------------------------------------------
tab = self.anim_layout.addTab(self.name)
group = tab.addGroup("Spring")
row = group.addRow()
row.addSpacer()
row.addButton("plot", "Plot to Controler")
group.addItem(self.pSpeed.ScriptName, "Speed")
group.addItem(self.pDamping.ScriptName, "Damping")
group.addItem(self.pMainBlend.ScriptName, "Global Blend")
if self.settings["multi_blend"]:
group = group.addGroup("Blend")
for i, pBlend in enumerate(self.pBlend):
group.addItem(pBlend.ScriptName, "Blend %s"%i)
## Define the logic of the anim and setup properties.
# @param self
def addLogic(self):
self.anim_logic.addGlobalCode("import gear.xsi.rig.component.logic as logic\r\nreload(logic)")
self.anim_logic.addOnClicked("plot",
"logic.plotSpringToControler('"+self.fullName+"', PPG.Inspected(0))")
return
# =====================================================
# OPERATORS
# =====================================================
## Apply operators, constraints, expressions to the hierarchy.\n
# In order to keep the code clean and easier to debug,
# we shouldn't create any new object in this method.
# @param self
def addOperators(self):
for i, chain in enumerate(self.chain):
op = aop.sn_xfspring_op(chain.eff, 2)
par.addExpression(op.Parameters("speed"), self.pSpeed.FullName)
par.addExpression(op.Parameters("damping"), self.pDamping.FullName + "* .1")
if self.settings["multi_blend"]:
par.addExpression(op.Parameters("scale"), self.pMainBlend.FullName+" * "+self.pBlend[i].FullName)
else:
par.addExpression(op.Parameters("scale"), self.pMainBlend.FullName)
chain.root.Kinematics.AddConstraint("Orientation", self.fk_ctl[i], False)
self.ref[i].Kinematics.AddConstraint("Orientation", chain.bones[0])
xsi.SetNeutralPose(self.ref[0])
# =====================================================
# CONNECTOR
# =====================================================
## Set the relation beetween object from guide to rig.\n
# @param self
def setRelation(self):
self.relatives["root"] = self.chain[0].bones[0]
for i in range(1, len(self.chain)):
self.relatives["%s_loc"%i] = self.chain[i].bones[0]
self.relatives["%s_loc"%(len(self.chain)-1)] = self.chain[-1].bones[0]
| miquelcampos/GEAR_mc | gear/xsi/rig/component/tail_01/__init__.py | Python | lgpl-3.0 | 7,585 |
"""
Django settings for drchrono project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'q+x9bat)@8dws7bue8x*q@yaa%%sn@c&%4#=9s6mno*@b#0ta4'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_object_actions',
'uiforms',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'drchrono.urls'
WSGI_APPLICATION = 'drchrono.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'templates'),
)
| bajubullet/drchrono | drchrono/settings.py | Python | mit | 2,156 |
from __future__ import absolute_import
from django.test import TestCase
from .models import Person
class SaveDeleteHookTests(TestCase):
def test_basic(self):
p = Person(first_name="John", last_name="Smith")
self.assertEqual(p.data, [])
p.save()
self.assertEqual(p.data, [
"Before save",
"After save",
])
self.assertQuerysetEqual(
Person.objects.all(), [
"John Smith",
],
unicode
)
p.delete()
self.assertEqual(p.data, [
"Before save",
"After save",
"Before deletion",
"After deletion",
])
self.assertQuerysetEqual(Person.objects.all(), [])
| LethusTI/supportcenter | vendor/django/tests/modeltests/save_delete_hooks/tests.py | Python | gpl-3.0 | 761 |
# -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2016, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""This module implements a string formatter based on the standard PEP
292 string.Template class extended with function calls. Variables, as
with string.Template, are indicated with $ and functions are delimited
with %.
This module assumes that everything is Unicode: the template and the
substitution values. Bytestrings are not supported. Also, the templates
always behave like the ``safe_substitute`` method in the standard
library: unknown symbols are left intact.
This is sort of like a tiny, horrible degeneration of a real templating
engine like Jinja2 or Mustache.
"""
from __future__ import division, absolute_import, print_function
import re
import ast
import dis
import types
import sys
import six
SYMBOL_DELIM = u'$'
FUNC_DELIM = u'%'
GROUP_OPEN = u'{'
GROUP_CLOSE = u'}'
ARG_SEP = u','
ESCAPE_CHAR = u'$'
VARIABLE_PREFIX = '__var_'
FUNCTION_PREFIX = '__func_'
class Environment(object):
"""Contains the values and functions to be substituted into a
template.
"""
def __init__(self, values, functions):
self.values = values
self.functions = functions
# Code generation helpers.
def ex_lvalue(name):
"""A variable load expression."""
return ast.Name(name, ast.Store())
def ex_rvalue(name):
"""A variable store expression."""
return ast.Name(name, ast.Load())
def ex_literal(val):
"""An int, float, long, bool, string, or None literal with the given
value.
"""
if val is None:
return ast.Name('None', ast.Load())
elif isinstance(val, six.integer_types):
return ast.Num(val)
elif isinstance(val, bool):
return ast.Name(bytes(val), ast.Load())
elif isinstance(val, six.string_types):
return ast.Str(val)
raise TypeError(u'no literal for {0}'.format(type(val)))
def ex_varassign(name, expr):
"""Assign an expression into a single variable. The expression may
either be an `ast.expr` object or a value to be used as a literal.
"""
if not isinstance(expr, ast.expr):
expr = ex_literal(expr)
return ast.Assign([ex_lvalue(name)], expr)
def ex_call(func, args):
"""A function-call expression with only positional parameters. The
function may be an expression or the name of a function. Each
argument may be an expression or a value to be used as a literal.
"""
if isinstance(func, six.string_types):
func = ex_rvalue(func)
args = list(args)
for i in range(len(args)):
if not isinstance(args[i], ast.expr):
args[i] = ex_literal(args[i])
if sys.version_info[:2] < (3, 5):
return ast.Call(func, args, [], None, None)
else:
return ast.Call(func, args, [])
def compile_func(arg_names, statements, name='_the_func', debug=False):
"""Compile a list of statements as the body of a function and return
the resulting Python function. If `debug`, then print out the
bytecode of the compiled function.
"""
if six.PY2:
func_def = ast.FunctionDef(
name=name.encode('utf-8'),
args=ast.arguments(
args=[ast.Name(n, ast.Param()) for n in arg_names],
vararg=None,
kwarg=None,
defaults=[ex_literal(None) for _ in arg_names],
),
body=statements,
decorator_list=[],
)
else:
func_def = ast.FunctionDef(
name=name,
args=ast.arguments(
args=[ast.arg(arg=n, annotation=None) for n in arg_names],
kwonlyargs=[],
kw_defaults=[],
defaults=[ex_literal(None) for _ in arg_names],
),
body=statements,
decorator_list=[],
)
mod = ast.Module([func_def])
ast.fix_missing_locations(mod)
prog = compile(mod, '<generated>', 'exec')
# Debug: show bytecode.
if debug:
dis.dis(prog)
for const in prog.co_consts:
if isinstance(const, types.CodeType):
dis.dis(const)
the_locals = {}
exec(prog, {}, the_locals)
return the_locals[name]
# AST nodes for the template language.
class Symbol(object):
"""A variable-substitution symbol in a template."""
def __init__(self, ident, original):
self.ident = ident
self.original = original
def __repr__(self):
return u'Symbol(%s)' % repr(self.ident)
def evaluate(self, env):
"""Evaluate the symbol in the environment, returning a Unicode
string.
"""
if self.ident in env.values:
# Substitute for a value.
return env.values[self.ident]
else:
# Keep original text.
return self.original
def translate(self):
"""Compile the variable lookup."""
if six.PY2:
ident = self.ident.encode('utf-8')
else:
ident = self.ident
expr = ex_rvalue(VARIABLE_PREFIX + ident)
return [expr], set([ident]), set()
class Call(object):
"""A function call in a template."""
def __init__(self, ident, args, original):
self.ident = ident
self.args = args
self.original = original
def __repr__(self):
return u'Call(%s, %s, %s)' % (repr(self.ident), repr(self.args),
repr(self.original))
def evaluate(self, env):
"""Evaluate the function call in the environment, returning a
Unicode string.
"""
if self.ident in env.functions:
arg_vals = [expr.evaluate(env) for expr in self.args]
try:
out = env.functions[self.ident](*arg_vals)
except Exception as exc:
# Function raised exception! Maybe inlining the name of
# the exception will help debug.
return u'<%s>' % six.text_type(exc)
return six.text_type(out)
else:
return self.original
def translate(self):
"""Compile the function call."""
varnames = set()
if six.PY2:
ident = self.ident.encode('utf-8')
else:
ident = self.ident
funcnames = set([ident])
arg_exprs = []
for arg in self.args:
subexprs, subvars, subfuncs = arg.translate()
varnames.update(subvars)
funcnames.update(subfuncs)
# Create a subexpression that joins the result components of
# the arguments.
arg_exprs.append(ex_call(
ast.Attribute(ex_literal(u''), 'join', ast.Load()),
[ex_call(
'map',
[
ex_rvalue(six.text_type.__name__),
ast.List(subexprs, ast.Load()),
]
)],
))
subexpr_call = ex_call(
FUNCTION_PREFIX + ident,
arg_exprs
)
return [subexpr_call], varnames, funcnames
class Expression(object):
"""Top-level template construct: contains a list of text blobs,
Symbols, and Calls.
"""
def __init__(self, parts):
self.parts = parts
def __repr__(self):
return u'Expression(%s)' % (repr(self.parts))
def evaluate(self, env):
"""Evaluate the entire expression in the environment, returning
a Unicode string.
"""
out = []
for part in self.parts:
if isinstance(part, six.string_types):
out.append(part)
else:
out.append(part.evaluate(env))
return u''.join(map(six.text_type, out))
def translate(self):
"""Compile the expression to a list of Python AST expressions, a
set of variable names used, and a set of function names.
"""
expressions = []
varnames = set()
funcnames = set()
for part in self.parts:
if isinstance(part, six.string_types):
expressions.append(ex_literal(part))
else:
e, v, f = part.translate()
expressions.extend(e)
varnames.update(v)
funcnames.update(f)
return expressions, varnames, funcnames
# Parser.
class ParseError(Exception):
pass
class Parser(object):
"""Parses a template expression string. Instantiate the class with
the template source and call ``parse_expression``. The ``pos`` field
will indicate the character after the expression finished and
``parts`` will contain a list of Unicode strings, Symbols, and Calls
reflecting the concatenated portions of the expression.
This is a terrible, ad-hoc parser implementation based on a
left-to-right scan with no lexing step to speak of; it's probably
both inefficient and incorrect. Maybe this should eventually be
replaced with a real, accepted parsing technique (PEG, parser
generator, etc.).
"""
def __init__(self, string, in_argument=False):
""" Create a new parser.
:param in_arguments: boolean that indicates the parser is to be
used for parsing function arguments, ie. considering commas
(`ARG_SEP`) a special character
"""
self.string = string
self.in_argument = in_argument
self.pos = 0
self.parts = []
# Common parsing resources.
special_chars = (SYMBOL_DELIM, FUNC_DELIM, GROUP_OPEN, GROUP_CLOSE,
ESCAPE_CHAR)
special_char_re = re.compile(r'[%s]|\Z' %
u''.join(re.escape(c) for c in special_chars))
escapable_chars = (SYMBOL_DELIM, FUNC_DELIM, GROUP_CLOSE, ARG_SEP)
terminator_chars = (GROUP_CLOSE,)
def parse_expression(self):
"""Parse a template expression starting at ``pos``. Resulting
components (Unicode strings, Symbols, and Calls) are added to
the ``parts`` field, a list. The ``pos`` field is updated to be
the next character after the expression.
"""
# Append comma (ARG_SEP) to the list of special characters only when
# parsing function arguments.
extra_special_chars = ()
special_char_re = self.special_char_re
if self.in_argument:
extra_special_chars = (ARG_SEP,)
special_char_re = re.compile(
r'[%s]|\Z' % u''.join(
re.escape(c) for c in
self.special_chars + extra_special_chars
)
)
text_parts = []
while self.pos < len(self.string):
char = self.string[self.pos]
if char not in self.special_chars + extra_special_chars:
# A non-special character. Skip to the next special
# character, treating the interstice as literal text.
next_pos = (
special_char_re.search(
self.string[self.pos:]).start() + self.pos
)
text_parts.append(self.string[self.pos:next_pos])
self.pos = next_pos
continue
if self.pos == len(self.string) - 1:
# The last character can never begin a structure, so we
# just interpret it as a literal character (unless it
# terminates the expression, as with , and }).
if char not in self.terminator_chars + extra_special_chars:
text_parts.append(char)
self.pos += 1
break
next_char = self.string[self.pos + 1]
if char == ESCAPE_CHAR and next_char in (self.escapable_chars +
extra_special_chars):
# An escaped special character ($$, $}, etc.). Note that
# ${ is not an escape sequence: this is ambiguous with
# the start of a symbol and it's not necessary (just
# using { suffices in all cases).
text_parts.append(next_char)
self.pos += 2 # Skip the next character.
continue
# Shift all characters collected so far into a single string.
if text_parts:
self.parts.append(u''.join(text_parts))
text_parts = []
if char == SYMBOL_DELIM:
# Parse a symbol.
self.parse_symbol()
elif char == FUNC_DELIM:
# Parse a function call.
self.parse_call()
elif char in self.terminator_chars + extra_special_chars:
# Template terminated.
break
elif char == GROUP_OPEN:
# Start of a group has no meaning hear; just pass
# through the character.
text_parts.append(char)
self.pos += 1
else:
assert False
# If any parsed characters remain, shift them into a string.
if text_parts:
self.parts.append(u''.join(text_parts))
def parse_symbol(self):
"""Parse a variable reference (like ``$foo`` or ``${foo}``)
starting at ``pos``. Possibly appends a Symbol object (or,
failing that, text) to the ``parts`` field and updates ``pos``.
The character at ``pos`` must, as a precondition, be ``$``.
"""
assert self.pos < len(self.string)
assert self.string[self.pos] == SYMBOL_DELIM
if self.pos == len(self.string) - 1:
# Last character.
self.parts.append(SYMBOL_DELIM)
self.pos += 1
return
next_char = self.string[self.pos + 1]
start_pos = self.pos
self.pos += 1
if next_char == GROUP_OPEN:
# A symbol like ${this}.
self.pos += 1 # Skip opening.
closer = self.string.find(GROUP_CLOSE, self.pos)
if closer == -1 or closer == self.pos:
# No closing brace found or identifier is empty.
self.parts.append(self.string[start_pos:self.pos])
else:
# Closer found.
ident = self.string[self.pos:closer]
self.pos = closer + 1
self.parts.append(Symbol(ident,
self.string[start_pos:self.pos]))
else:
# A bare-word symbol.
ident = self._parse_ident()
if ident:
# Found a real symbol.
self.parts.append(Symbol(ident,
self.string[start_pos:self.pos]))
else:
# A standalone $.
self.parts.append(SYMBOL_DELIM)
def parse_call(self):
"""Parse a function call (like ``%foo{bar,baz}``) starting at
``pos``. Possibly appends a Call object to ``parts`` and update
``pos``. The character at ``pos`` must be ``%``.
"""
assert self.pos < len(self.string)
assert self.string[self.pos] == FUNC_DELIM
start_pos = self.pos
self.pos += 1
ident = self._parse_ident()
if not ident:
# No function name.
self.parts.append(FUNC_DELIM)
return
if self.pos >= len(self.string):
# Identifier terminates string.
self.parts.append(self.string[start_pos:self.pos])
return
if self.string[self.pos] != GROUP_OPEN:
# Argument list not opened.
self.parts.append(self.string[start_pos:self.pos])
return
# Skip past opening brace and try to parse an argument list.
self.pos += 1
args = self.parse_argument_list()
if self.pos >= len(self.string) or \
self.string[self.pos] != GROUP_CLOSE:
# Arguments unclosed.
self.parts.append(self.string[start_pos:self.pos])
return
self.pos += 1 # Move past closing brace.
self.parts.append(Call(ident, args, self.string[start_pos:self.pos]))
def parse_argument_list(self):
"""Parse a list of arguments starting at ``pos``, returning a
list of Expression objects. Does not modify ``parts``. Should
leave ``pos`` pointing to a } character or the end of the
string.
"""
# Try to parse a subexpression in a subparser.
expressions = []
while self.pos < len(self.string):
subparser = Parser(self.string[self.pos:], in_argument=True)
subparser.parse_expression()
# Extract and advance past the parsed expression.
expressions.append(Expression(subparser.parts))
self.pos += subparser.pos
if self.pos >= len(self.string) or \
self.string[self.pos] == GROUP_CLOSE:
# Argument list terminated by EOF or closing brace.
break
# Only other way to terminate an expression is with ,.
# Continue to the next argument.
assert self.string[self.pos] == ARG_SEP
self.pos += 1
return expressions
def _parse_ident(self):
"""Parse an identifier and return it (possibly an empty string).
Updates ``pos``.
"""
remainder = self.string[self.pos:]
ident = re.match(r'\w*', remainder).group(0)
self.pos += len(ident)
return ident
def _parse(template):
"""Parse a top-level template string Expression. Any extraneous text
is considered literal text.
"""
parser = Parser(template)
parser.parse_expression()
parts = parser.parts
remainder = parser.string[parser.pos:]
if remainder:
parts.append(remainder)
return Expression(parts)
# External interface.
class Template(object):
"""A string template, including text, Symbols, and Calls.
"""
def __init__(self, template):
self.expr = _parse(template)
self.original = template
self.compiled = self.translate()
def __eq__(self, other):
return self.original == other.original
def interpret(self, values={}, functions={}):
"""Like `substitute`, but forces the interpreter (rather than
the compiled version) to be used. The interpreter includes
exception-handling code for missing variables and buggy template
functions but is much slower.
"""
return self.expr.evaluate(Environment(values, functions))
def substitute(self, values={}, functions={}):
"""Evaluate the template given the values and functions.
"""
try:
res = self.compiled(values, functions)
except Exception: # Handle any exceptions thrown by compiled version.
res = self.interpret(values, functions)
return res
def translate(self):
"""Compile the template to a Python function."""
expressions, varnames, funcnames = self.expr.translate()
argnames = []
for varname in varnames:
argnames.append(VARIABLE_PREFIX + varname)
for funcname in funcnames:
argnames.append(FUNCTION_PREFIX + funcname)
func = compile_func(
argnames,
[ast.Return(ast.List(expressions, ast.Load()))],
)
def wrapper_func(values={}, functions={}):
args = {}
for varname in varnames:
args[VARIABLE_PREFIX + varname] = values[varname]
for funcname in funcnames:
args[FUNCTION_PREFIX + funcname] = functions[funcname]
parts = func(**args)
return u''.join(parts)
return wrapper_func
# Performance tests.
if __name__ == '__main__':
import timeit
_tmpl = Template(u'foo $bar %baz{foozle $bar barzle} $bar')
_vars = {'bar': 'qux'}
_funcs = {'baz': six.text_type.upper}
interp_time = timeit.timeit('_tmpl.interpret(_vars, _funcs)',
'from __main__ import _tmpl, _vars, _funcs',
number=10000)
print(interp_time)
comp_time = timeit.timeit('_tmpl.substitute(_vars, _funcs)',
'from __main__ import _tmpl, _vars, _funcs',
number=10000)
print(comp_time)
print(u'Speedup:', interp_time / comp_time)
| clinton-hall/nzbToMedia | libs/common/beets/util/functemplate.py | Python | gpl-3.0 | 21,110 |
#!/usr/bin/python
import sys
sys.path.append('../waftools')
from genipc_server import build
build('playlist', 'xmms_playlist_t *')
| oneman/xmms2-oneman | src/xmms/playlist_ipc.py | Python | lgpl-2.1 | 133 |
'''
Use the Github API to get the most recent Gist for a list of users
Created on 5 Nov 2019
@author: si
'''
from datetime import datetime
import sys
import requests
class OctoGist:
def __init__(self):
self.base_url = 'https://api.github.com'
self.items_per_page = 100
self.gist_path = (f'{self.base_url}/users/'
'{username}'
f'/gists?per_page={self.items_per_page}'
)
# support 1.1 keep alive
self.requests_session = requests.Session()
#self.get_headers = {'Content-type': 'application/json'}
self.get_headers = {'Accept': 'application/vnd.github.v3+json'}
def go(self, usernames):
"""
:param: usernames (str) comma separated list of user names
"""
# sort order doesn't exist on the per user gist endpoint. Only on /search/
# so find latest entry by iterating through all docs.
target_field = 'created_at' # or could be 'updated_at'
target_users = usernames.split(',')
latest_gist = {}
for username in target_users:
for gist_doc in self.gist_documents(username):
if username not in latest_gist \
or gist_doc[target_field] > latest_gist[username][target_field]:
latest_gist[username] = gist_doc
# overall sort for all users
one_gist_per_user = [(username, gist) for username, gist in latest_gist.items()]
one_gist_per_user.sort(key=lambda g: g[1][target_field], reverse=True)
for username, gist in one_gist_per_user:
# description is optional
gist_desc = f"said something about {gist['description']}" \
if gist['description'] else "wrote a gist"
self.log(f"{username} @ {gist[target_field]} {gist_desc}")
for username in target_users:
if username not in latest_gist:
self.log(f"{username} hasn't ever written a public gist")
def gist_documents(self, username, max_docs=None):
"""
Generator yielding (dict) as returned from github API
:param: username (str)
:param: max_docs (int or None) None for no limit
"""
r = self.requests_session.get(self.gist_path.format(username=username))
if r.status_code != 200:
self.log(f"Couldn't get gists for {username}", "ERROR")
return
docs_fetched = 0
for d in r.json():
docs_fetched += 1
yield d
if docs_fetched >= self.items_per_page:
# this will only print once
# TODO pagination
msg = (f"TODO pagination not enabled so gists by user:{username} might have be "
f"skipped as they have written more than {self.items_per_page} gists."
)
self.log(msg, "WARNING")
if max_docs is not None and docs_fetched > max_docs:
return
def log(self, msg, level="INFO"):
"""
Dependency injection ready logger.
:param: msg (str)
:param: level (str) , DEBUG, INFO, WARNING, ERROR, CRITICAL
"""
if level in ['ERROR', 'CRITICAL']:
outfunc = sys.stderr.write
else:
outfunc = print
# TODO stderr for level = "ERROR"
log_time = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
level_just = level.ljust(10)
msg = f"{log_time} {level_just}{msg}"
outfunc(msg)
if __name__ == '__main__':
if len(sys.argv) != 2:
msg = "usage: python gogo_octogist.py <comma separated github usernames>\n"
sys.stderr.write(msg)
sys.exit(1)
o = OctoGist()
o.go(sys.argv[1])
| caffeinate/test-pylot | octo_gist/gogo_octogist.py | Python | mit | 3,870 |
#!/usr/bin/env python
from wsgiref.simple_server import make_server
import sys
import json
import traceback
import datetime
from multiprocessing import Process
from getopt import getopt, GetoptError
from jsonrpcbase import JSONRPCService, InvalidParamsError, KeywordError,\
JSONRPCError, ServerError, InvalidRequestError
from os import environ
from ConfigParser import ConfigParser
from biokbase import log
import biokbase.nexus
import requests as _requests
import urlparse as _urlparse
import random as _random
import os
DEPLOY = 'KB_DEPLOYMENT_CONFIG'
SERVICE = 'KB_SERVICE_NAME'
# Note that the error fields do not match the 2.0 JSONRPC spec
def get_config_file():
return environ.get(DEPLOY, None)
def get_service_name():
return environ.get(SERVICE, None)
def get_config():
if not get_config_file():
return None
retconfig = {}
config = ConfigParser()
config.read(get_config_file())
for nameval in config.items(get_service_name() or 'CompareGeneContent'):
retconfig[nameval[0]] = nameval[1]
return retconfig
config = get_config()
from CompareGeneContent.CompareGeneContentImpl import CompareGeneContent
impl_CompareGeneContent = CompareGeneContent(config)
class JSONObjectEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, set):
return list(obj)
if isinstance(obj, frozenset):
return list(obj)
if hasattr(obj, 'toJSONable'):
return obj.toJSONable()
return json.JSONEncoder.default(self, obj)
sync_methods = {}
async_run_methods = {}
async_check_methods = {}
async_run_methods['CompareGeneContent.compare_gene_content_async'] = ['CompareGeneContent', 'compare_gene_content']
async_check_methods['CompareGeneContent.compare_gene_content_check'] = ['CompareGeneContent', 'compare_gene_content']
sync_methods['CompareGeneContent.compare_gene_content'] = True
class AsyncJobServiceClient(object):
def __init__(self, timeout=30 * 60, token=None,
ignore_authrc=True, trust_all_ssl_certificates=False):
url = environ.get('KB_JOB_SERVICE_URL', None)
if url is None and config is not None:
url = config.get('job-service-url')
if url is None:
raise ValueError('Neither \'job-service-url\' parameter is defined in '+
'configuration nor \'KB_JOB_SERVICE_URL\' variable is defined in system')
scheme, _, _, _, _, _ = _urlparse.urlparse(url)
if scheme not in ['http', 'https']:
raise ValueError(url + " isn't a valid http url")
self.url = url
self.timeout = int(timeout)
self._headers = dict()
self.trust_all_ssl_certificates = trust_all_ssl_certificates
if token is None:
raise ValueError('Authentication is required for async methods')
self._headers['AUTHORIZATION'] = token
if self.timeout < 1:
raise ValueError('Timeout value must be at least 1 second')
def _call(self, method, params, json_rpc_call_context = None):
arg_hash = {'method': method,
'params': params,
'version': '1.1',
'id': str(_random.random())[2:]
}
if json_rpc_call_context:
arg_hash['context'] = json_rpc_call_context
body = json.dumps(arg_hash, cls=JSONObjectEncoder)
ret = _requests.post(self.url, data=body, headers=self._headers,
timeout=self.timeout,
verify=not self.trust_all_ssl_certificates)
if ret.status_code == _requests.codes.server_error:
if 'content-type' in ret.headers and ret.headers['content-type'] == 'application/json':
err = json.loads(ret.text)
if 'error' in err:
raise ServerError(**err['error'])
else:
raise ServerError('Unknown', 0, ret.text)
else:
raise ServerError('Unknown', 0, ret.text)
if ret.status_code != _requests.codes.OK:
ret.raise_for_status()
resp = json.loads(ret.text)
if 'result' not in resp:
raise ServerError('Unknown', 0, 'An unknown server error occurred')
return resp['result']
def run_job(self, run_job_params, json_rpc_call_context = None):
return self._call('KBaseJobService.run_job', [run_job_params], json_rpc_call_context)[0]
def check_job(self, job_id, json_rpc_call_context = None):
return self._call('KBaseJobService.check_job', [job_id], json_rpc_call_context)[0]
class JSONRPCServiceCustom(JSONRPCService):
def call(self, ctx, jsondata):
"""
Calls jsonrpc service's method and returns its return value in a JSON
string or None if there is none.
Arguments:
jsondata -- remote method call in jsonrpc format
"""
result = self.call_py(ctx, jsondata)
if result is not None:
return json.dumps(result, cls=JSONObjectEncoder)
return None
def _call_method(self, ctx, request):
"""Calls given method with given params and returns it value."""
method = self.method_data[request['method']]['method']
params = request['params']
result = None
try:
if isinstance(params, list):
# Does it have enough arguments?
if len(params) < self._man_args(method) - 1:
raise InvalidParamsError('not enough arguments')
# Does it have too many arguments?
if(not self._vargs(method) and len(params) >
self._max_args(method) - 1):
raise InvalidParamsError('too many arguments')
result = method(ctx, *params)
elif isinstance(params, dict):
# Do not accept keyword arguments if the jsonrpc version is
# not >=1.1.
if request['jsonrpc'] < 11:
raise KeywordError
result = method(ctx, **params)
else: # No params
result = method(ctx)
except JSONRPCError:
raise
except Exception as e:
# log.exception('method %s threw an exception' % request['method'])
# Exception was raised inside the method.
newerr = ServerError()
newerr.trace = traceback.format_exc()
newerr.data = e.__str__()
raise newerr
return result
def call_py(self, ctx, jsondata):
"""
Calls jsonrpc service's method and returns its return value in python
object format or None if there is none.
This method is same as call() except the return value is a python
object instead of JSON string. This method is mainly only useful for
debugging purposes.
"""
rdata = jsondata
# we already deserialize the json string earlier in the server code, no
# need to do it again
# try:
# rdata = json.loads(jsondata)
# except ValueError:
# raise ParseError
# set some default values for error handling
request = self._get_default_vals()
if isinstance(rdata, dict) and rdata:
# It's a single request.
self._fill_request(request, rdata)
respond = self._handle_request(ctx, request)
# Don't respond to notifications
if respond is None:
return None
return respond
elif isinstance(rdata, list) and rdata:
# It's a batch.
requests = []
responds = []
for rdata_ in rdata:
# set some default values for error handling
request_ = self._get_default_vals()
self._fill_request(request_, rdata_)
requests.append(request_)
for request_ in requests:
respond = self._handle_request(ctx, request_)
# Don't respond to notifications
if respond is not None:
responds.append(respond)
if responds:
return responds
# Nothing to respond.
return None
else:
# empty dict, list or wrong type
raise InvalidRequestError
def _handle_request(self, ctx, request):
"""Handles given request and returns its response."""
if self.method_data[request['method']].has_key('types'): # @IgnorePep8
self._validate_params_types(request['method'], request['params'])
result = self._call_method(ctx, request)
# Do not respond to notifications.
if request['id'] is None:
return None
respond = {}
self._fill_ver(request['jsonrpc'], respond)
respond['result'] = result
respond['id'] = request['id']
return respond
class MethodContext(dict):
def __init__(self, logger):
self['client_ip'] = None
self['user_id'] = None
self['authenticated'] = None
self['token'] = None
self['module'] = None
self['method'] = None
self['call_id'] = None
self['rpc_context'] = None
self['provenance'] = None
self._debug_levels = set([7, 8, 9, 'DEBUG', 'DEBUG2', 'DEBUG3'])
self._logger = logger
def log_err(self, message):
self._log(log.ERR, message)
def log_info(self, message):
self._log(log.INFO, message)
def log_debug(self, message, level=1):
if level in self._debug_levels:
pass
else:
level = int(level)
if level < 1 or level > 3:
raise ValueError("Illegal log level: " + str(level))
level = level + 6
self._log(level, message)
def set_log_level(self, level):
self._logger.set_log_level(level)
def get_log_level(self):
return self._logger.get_log_level()
def clear_log_level(self):
self._logger.clear_user_log_level()
def _log(self, level, message):
self._logger.log_message(level, message, self['client_ip'],
self['user_id'], self['module'],
self['method'], self['call_id'])
def getIPAddress(environ):
xFF = environ.get('HTTP_X_FORWARDED_FOR')
realIP = environ.get('HTTP_X_REAL_IP')
trustXHeaders = config is None or \
config.get('dont_trust_x_ip_headers') != 'true'
if (trustXHeaders):
if (xFF):
return xFF.split(',')[0].strip()
if (realIP):
return realIP.strip()
return environ.get('REMOTE_ADDR')
class Application(object):
# Wrap the wsgi handler in a class definition so that we can
# do some initialization and avoid regenerating stuff over
# and over
def logcallback(self):
self.serverlog.set_log_file(self.userlog.get_log_file())
def log(self, level, context, message):
self.serverlog.log_message(level, message, context['client_ip'],
context['user_id'], context['module'],
context['method'], context['call_id'])
def __init__(self):
submod = get_service_name() or 'CompareGeneContent'
self.userlog = log.log(
submod, ip_address=True, authuser=True, module=True, method=True,
call_id=True, changecallback=self.logcallback,
config=get_config_file())
self.serverlog = log.log(
submod, ip_address=True, authuser=True, module=True, method=True,
call_id=True, logfile=self.userlog.get_log_file())
self.serverlog.set_log_level(6)
self.rpc_service = JSONRPCServiceCustom()
self.method_authentication = dict()
self.rpc_service.add(impl_CompareGeneContent.compare_gene_content,
name='CompareGeneContent.compare_gene_content',
types=[basestring, basestring, basestring])
self.method_authentication['CompareGeneContent.compare_gene_content'] = 'required'
self.auth_client = biokbase.nexus.Client(
config={'server': 'nexus.api.globusonline.org',
'verify_ssl': True,
'client': None,
'client_secret': None})
def __call__(self, environ, start_response):
# Context object, equivalent to the perl impl CallContext
ctx = MethodContext(self.userlog)
ctx['client_ip'] = getIPAddress(environ)
status = '500 Internal Server Error'
try:
body_size = int(environ.get('CONTENT_LENGTH', 0))
except (ValueError):
body_size = 0
if environ['REQUEST_METHOD'] == 'OPTIONS':
# we basically do nothing and just return headers
status = '200 OK'
rpc_result = ""
else:
request_body = environ['wsgi.input'].read(body_size)
try:
req = json.loads(request_body)
except ValueError as ve:
err = {'error': {'code': -32700,
'name': "Parse error",
'message': str(ve),
}
}
rpc_result = self.process_error(err, ctx, {'version': '1.1'})
else:
ctx['module'], ctx['method'] = req['method'].split('.')
ctx['call_id'] = req['id']
ctx['rpc_context'] = {'call_stack': [{'time':self.now_in_utc(), 'method': req['method']}]}
prov_action = {'service': ctx['module'], 'method': ctx['method'],
'method_params': req['params']}
ctx['provenance'] = [prov_action]
try:
token = environ.get('HTTP_AUTHORIZATION')
# parse out the method being requested and check if it
# has an authentication requirement
method_name = req['method']
if method_name in async_run_methods:
method_name = async_run_methods[method_name][0] + "." + async_run_methods[method_name][1]
if method_name in async_check_methods:
method_name = async_check_methods[method_name][0] + "." + async_check_methods[method_name][1]
auth_req = self.method_authentication.get(method_name,
"none")
if auth_req != "none":
if token is None and auth_req == 'required':
err = ServerError()
err.data = "Authentication required for " + \
"CompareGeneContent but no authentication header was passed"
raise err
elif token is None and auth_req == 'optional':
pass
else:
try:
user, _, _ = \
self.auth_client.validate_token(token)
ctx['user_id'] = user
ctx['authenticated'] = 1
ctx['token'] = token
except Exception, e:
if auth_req == 'required':
err = ServerError()
err.data = \
"Token validation failed: %s" % e
raise err
if (environ.get('HTTP_X_FORWARDED_FOR')):
self.log(log.INFO, ctx, 'X-Forwarded-For: ' +
environ.get('HTTP_X_FORWARDED_FOR'))
method_name = req['method']
if method_name in async_run_methods or method_name in async_check_methods:
if method_name in async_run_methods:
orig_method_pair = async_run_methods[method_name]
else:
orig_method_pair = async_check_methods[method_name]
orig_method_name = orig_method_pair[0] + '.' + orig_method_pair[1]
if 'required' != self.method_authentication.get(orig_method_name, 'none'):
err = ServerError()
err.data = 'Async method ' + orig_method_name + ' should require ' + \
'authentication, but it has authentication level: ' + \
self.method_authentication.get(orig_method_name, 'none')
raise err
job_service_client = AsyncJobServiceClient(token = ctx['token'])
if method_name in async_run_methods:
run_job_params = {
'method': orig_method_name,
'params': req['params']}
if 'rpc_context' in ctx:
run_job_params['rpc_context'] = ctx['rpc_context']
job_id = job_service_client.run_job(run_job_params)
respond = {'version': '1.1', 'result': [job_id], 'id': req['id']}
rpc_result = json.dumps(respond, cls=JSONObjectEncoder)
status = '200 OK'
else:
job_id = req['params'][0]
job_state = job_service_client.check_job(job_id)
finished = job_state['finished']
if finished != 0 and 'error' in job_state and job_state['error'] is not None:
err = {'error': job_state['error']}
rpc_result = self.process_error(err, ctx, req, None)
else:
respond = {'version': '1.1', 'result': [job_state], 'id': req['id']}
rpc_result = json.dumps(respond, cls=JSONObjectEncoder)
status = '200 OK'
elif method_name in sync_methods or (method_name + '_async') not in async_run_methods:
self.log(log.INFO, ctx, 'start method')
rpc_result = self.rpc_service.call(ctx, req)
self.log(log.INFO, ctx, 'end method')
status = '200 OK'
else:
err = ServerError()
err.data = 'Method ' + method_name + ' cannot be run synchronously'
raise err
except JSONRPCError as jre:
err = {'error': {'code': jre.code,
'name': jre.message,
'message': jre.data
}
}
trace = jre.trace if hasattr(jre, 'trace') else None
rpc_result = self.process_error(err, ctx, req, trace)
except Exception, e:
err = {'error': {'code': 0,
'name': 'Unexpected Server Error',
'message': 'An unexpected server error ' +
'occurred',
}
}
rpc_result = self.process_error(err, ctx, req,
traceback.format_exc())
# print 'The request method was %s\n' % environ['REQUEST_METHOD']
# print 'The environment dictionary is:\n%s\n' % pprint.pformat(environ) @IgnorePep8
# print 'The request body was: %s' % request_body
# print 'The result from the method call is:\n%s\n' % \
# pprint.pformat(rpc_result)
if rpc_result:
response_body = rpc_result
else:
response_body = ''
response_headers = [
('Access-Control-Allow-Origin', '*'),
('Access-Control-Allow-Headers', environ.get(
'HTTP_ACCESS_CONTROL_REQUEST_HEADERS', 'authorization')),
('content-type', 'application/json'),
('content-length', str(len(response_body)))]
start_response(status, response_headers)
return [response_body]
def process_error(self, error, context, request, trace=None):
if trace:
self.log(log.ERR, context, trace.split('\n')[0:-1])
if 'id' in request:
error['id'] = request['id']
if 'version' in request:
error['version'] = request['version']
if 'error' not in error['error'] or error['error']['error'] is None:
error['error']['error'] = trace
elif 'jsonrpc' in request:
error['jsonrpc'] = request['jsonrpc']
error['error']['data'] = trace
else:
error['version'] = '1.0'
error['error']['error'] = trace
return json.dumps(error)
def now_in_utc(self):
# Taken from http://stackoverflow.com/questions/3401428/how-to-get-an-isoformat-datetime-string-including-the-default-timezone
dtnow = datetime.datetime.now()
dtutcnow = datetime.datetime.utcnow()
delta = dtnow - dtutcnow
hh,mm = divmod((delta.days * 24*60*60 + delta.seconds + 30) // 60, 60)
return "%s%+02d:%02d" % (dtnow.isoformat(), hh, mm)
application = Application()
# This is the uwsgi application dictionary. On startup uwsgi will look
# for this dict and pull its configuration from here.
# This simply lists where to "mount" the application in the URL path
#
# This uwsgi module "magically" appears when running the app within
# uwsgi and is not available otherwise, so wrap an exception handler
# around it
#
# To run this server in uwsgi with 4 workers listening on port 9999 use:
# uwsgi -M -p 4 --http :9999 --wsgi-file _this_file_
# To run a using the single threaded python BaseHTTP service
# listening on port 9999 by default execute this file
#
try:
import uwsgi
# Before we do anything with the application, see if the
# configs specify patching all std routines to be asynch
# *ONLY* use this if you are going to wrap the service in
# a wsgi container that has enabled gevent, such as
# uwsgi with the --gevent option
if config is not None and config.get('gevent_monkeypatch_all', False):
print "Monkeypatching std libraries for async"
from gevent import monkey
monkey.patch_all()
uwsgi.applications = {
'': application
}
except ImportError:
# Not available outside of wsgi, ignore
pass
_proc = None
def start_server(host='localhost', port=0, newprocess=False):
'''
By default, will start the server on localhost on a system assigned port
in the main thread. Excecution of the main thread will stay in the server
main loop until interrupted. To run the server in a separate process, and
thus allow the stop_server method to be called, set newprocess = True. This
will also allow returning of the port number.'''
global _proc
if _proc:
raise RuntimeError('server is already running')
httpd = make_server(host, port, application)
port = httpd.server_address[1]
print "Listening on port %s" % port
if newprocess:
_proc = Process(target=httpd.serve_forever)
_proc.daemon = True
_proc.start()
else:
httpd.serve_forever()
return port
def stop_server():
global _proc
_proc.terminate()
_proc = None
def process_async_cli(input_file_path, output_file_path, token):
exit_code = 0
with open(input_file_path) as data_file:
req = json.load(data_file)
if 'version' not in req:
req['version'] = '1.1'
if 'id' not in req:
req['id'] = str(_random.random())[2:]
ctx = MethodContext(application.userlog)
if token:
user, _, _ = application.auth_client.validate_token(token)
ctx['user_id'] = user
ctx['authenticated'] = 1
ctx['token'] = token
if 'context' in req:
ctx['rpc_context'] = req['context']
ctx['CLI'] = 1
ctx['module'], ctx['method'] = req['method'].split('.')
prov_action = {'service': ctx['module'], 'method': ctx['method'],
'method_params': req['params']}
ctx['provenance'] = [prov_action]
resp = None
try:
resp = application.rpc_service.call_py(ctx, req)
except JSONRPCError as jre:
trace = jre.trace if hasattr(jre, 'trace') else None
resp = {'id': req['id'],
'version': req['version'],
'error': {'code': jre.code,
'name': jre.message,
'message': jre.data,
'error': trace}
}
except Exception, e:
trace = traceback.format_exc()
resp = {'id': req['id'],
'version': req['version'],
'error': {'code': 0,
'name': 'Unexpected Server Error',
'message': 'An unexpected server error occurred',
'error': trace}
}
if 'error' in resp:
exit_code = 500
with open(output_file_path, "w") as f:
f.write(json.dumps(resp, cls=JSONObjectEncoder))
return exit_code
if __name__ == "__main__":
if len(sys.argv) >= 3 and len(sys.argv) <= 4 and os.path.isfile(sys.argv[1]):
token = None
if len(sys.argv) == 4:
if os.path.isfile(sys.argv[3]):
with open(sys.argv[3]) as token_file:
token = token_file.read()
else:
token = sys.argv[3]
sys.exit(process_async_cli(sys.argv[1], sys.argv[2], token))
try:
opts, args = getopt(sys.argv[1:], "", ["port=", "host="])
except GetoptError as err:
# print help information and exit:
print str(err) # will print something like "option -a not recognized"
sys.exit(2)
port = 9999
host = 'localhost'
for o, a in opts:
if o == '--port':
port = int(a)
elif o == '--host':
host = a
print "Host set to %s" % host
else:
assert False, "unhandled option"
start_server(host=host, port=port)
# print "Listening on port %s" % port
# httpd = make_server( host, port, application)
#
# httpd.serve_forever()
| mdejongh/CompareGeneContent | lib/CompareGeneContent/CompareGeneContentServer.py | Python | mit | 26,989 |
# -*- coding: utf-8 -*-
u"""
==================================
Input and output (:mod:`scipy.io`)
==================================
.. currentmodule:: scipy.io
SciPy has many modules, classes, and functions available to read data
from and write data to a variety of file formats.
.. seealso:: `NumPy IO routines <https://www.numpy.org/devdocs/reference/routines.io.html>`__
MATLAB® files
=============
.. autosummary::
:toctree: generated/
loadmat - Read a MATLAB style mat file (version 4 through 7.1)
savemat - Write a MATLAB style mat file (version 4 through 7.1)
whosmat - List contents of a MATLAB style mat file (version 4 through 7.1)
IDL® files
==========
.. autosummary::
:toctree: generated/
readsav - Read an IDL 'save' file
Matrix Market files
===================
.. autosummary::
:toctree: generated/
mminfo - Query matrix info from Matrix Market formatted file
mmread - Read matrix from Matrix Market formatted file
mmwrite - Write matrix to Matrix Market formatted file
Unformatted Fortran files
===============================
.. autosummary::
:toctree: generated/
FortranFile - A file object for unformatted sequential Fortran files
FortranEOFError - Exception indicating the end of a well-formed file
FortranFormattingError - Exception indicating an inappropriate end
Netcdf
======
.. autosummary::
:toctree: generated/
netcdf_file - A file object for NetCDF data
netcdf_variable - A data object for the netcdf module
Harwell-Boeing files
====================
.. autosummary::
:toctree: generated/
hb_read -- read H-B file
hb_write -- write H-B file
Wav sound files (:mod:`scipy.io.wavfile`)
=========================================
.. module:: scipy.io.wavfile
.. autosummary::
:toctree: generated/
read
write
WavFileWarning
Arff files (:mod:`scipy.io.arff`)
=================================
.. module:: scipy.io.arff
.. autosummary::
:toctree: generated/
loadarff
MetaData
ArffError
ParseArffError
"""
from __future__ import division, print_function, absolute_import
# matfile read and write
from .matlab import loadmat, savemat, whosmat, byteordercodes
# netCDF file support
from .netcdf import netcdf_file, netcdf_variable
# Fortran file support
from ._fortran import FortranFile, FortranEOFError, FortranFormattingError
from .mmio import mminfo, mmread, mmwrite
from .idl import readsav
from .harwell_boeing import hb_read, hb_write
__all__ = [s for s in dir() if not s.startswith('_')]
from scipy._lib._testutils import PytestTester
test = PytestTester(__name__)
del PytestTester
| jor-/scipy | scipy/io/__init__.py | Python | bsd-3-clause | 2,636 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2010 Citrix Systems, Inc.
# Copyright 2011 Piston Cloud Computing, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Helper methods for operations related to the management of VM records and
their attributes like VDIs, VIFs, as well as their lookup functions.
"""
import json
import os
import pickle
import re
import sys
import tempfile
import time
import urllib
import uuid
from xml.dom import minidom
from nova import db
from nova import exception
from nova import flags
from nova.image import glance
from nova import log as logging
from nova import utils
from nova.compute import instance_types
from nova.compute import power_state
from nova.virt import disk
from nova.virt import images
from nova.virt.xenapi import HelperBase
from nova.virt.xenapi.volume_utils import StorageError
LOG = logging.getLogger("nova.virt.xenapi.vm_utils")
FLAGS = flags.FLAGS
flags.DEFINE_string('default_os_type', 'linux', 'Default OS type')
flags.DEFINE_integer('block_device_creation_timeout', 10,
'time to wait for a block device to be created')
flags.DEFINE_integer('max_kernel_ramdisk_size', 16 * 1024 * 1024,
'maximum size in bytes of kernel or ramdisk images')
XENAPI_POWER_STATE = {
'Halted': power_state.SHUTDOWN,
'Running': power_state.RUNNING,
'Paused': power_state.PAUSED,
'Suspended': power_state.SUSPENDED,
'Crashed': power_state.CRASHED}
SECTOR_SIZE = 512
MBR_SIZE_SECTORS = 63
MBR_SIZE_BYTES = MBR_SIZE_SECTORS * SECTOR_SIZE
KERNEL_DIR = '/boot/guest'
class ImageType:
"""
Enumeration class for distinguishing different image types
0 - kernel image (goes on dom0's filesystem)
1 - ramdisk image (goes on dom0's filesystem)
2 - disk image (local SR, partitioned by objectstore plugin)
3 - raw disk image (local SR, NOT partitioned by plugin)
4 - vhd disk image (local SR, NOT inspected by XS, PV assumed for
linux, HVM assumed for Windows)
5 - ISO disk image (local SR, NOT partitioned by plugin)
"""
KERNEL = 0
RAMDISK = 1
DISK = 2
DISK_RAW = 3
DISK_VHD = 4
DISK_ISO = 5
_ids = (KERNEL, RAMDISK, DISK, DISK_RAW, DISK_VHD, DISK_ISO)
KERNEL_STR = "kernel"
RAMDISK_STR = "ramdisk"
DISK_STR = "os"
DISK_RAW_STR = "os_raw"
DISK_VHD_STR = "vhd"
DISK_ISO_STR = "iso"
_strs = (KERNEL_STR, RAMDISK_STR, DISK_STR, DISK_RAW_STR, DISK_VHD_STR,
DISK_ISO_STR)
@classmethod
def to_string(cls, image_type):
return dict(zip(ImageType._ids, ImageType._strs)).get(image_type)
@classmethod
def from_string(cls, image_type_str):
return dict(zip(ImageType._strs, ImageType._ids)).get(image_type_str)
class VMHelper(HelperBase):
"""
The class that wraps the helper methods together.
"""
@classmethod
def create_vm(cls, session, instance, kernel, ramdisk,
use_pv_kernel=False):
"""Create a VM record. Returns a Deferred that gives the new
VM reference.
the use_pv_kernel flag indicates whether the guest is HVM or PV
There are 3 scenarios:
1. Using paravirtualization, kernel passed in
2. Using paravirtualization, kernel within the image
3. Using hardware virtualization
"""
inst_type_id = instance.instance_type_id
instance_type = instance_types.get_instance_type(inst_type_id)
mem = str(long(instance_type['memory_mb']) * 1024 * 1024)
vcpus = str(instance_type['vcpus'])
rec = {
'actions_after_crash': 'destroy',
'actions_after_reboot': 'restart',
'actions_after_shutdown': 'destroy',
'affinity': '',
'blocked_operations': {},
'ha_always_run': False,
'ha_restart_priority': '',
'HVM_boot_params': {},
'HVM_boot_policy': '',
'is_a_template': False,
'memory_dynamic_min': mem,
'memory_dynamic_max': mem,
'memory_static_min': '0',
'memory_static_max': mem,
'memory_target': mem,
'name_description': '',
'name_label': instance.name,
'other_config': {'allowvssprovider': False},
'other_config': {},
'PCI_bus': '',
'platform': {'acpi': 'true', 'apic': 'true', 'pae': 'true',
'viridian': 'true', 'timeoffset': '0'},
'PV_args': '',
'PV_bootloader': '',
'PV_bootloader_args': '',
'PV_kernel': '',
'PV_legacy_args': '',
'PV_ramdisk': '',
'recommendations': '',
'tags': [],
'user_version': '0',
'VCPUs_at_startup': vcpus,
'VCPUs_max': vcpus,
'VCPUs_params': {},
'xenstore_data': {}}
# Complete VM configuration record according to the image type
# non-raw/raw with PV kernel/raw in HVM mode
if use_pv_kernel:
rec['platform']['nx'] = 'false'
if instance.kernel_id:
# 1. Kernel explicitly passed in, use that
rec['PV_args'] = 'root=/dev/xvda1'
rec['PV_kernel'] = kernel
rec['PV_ramdisk'] = ramdisk
else:
# 2. Use kernel within the image
rec['PV_bootloader'] = 'pygrub'
else:
# 3. Using hardware virtualization
rec['platform']['nx'] = 'true'
rec['HVM_boot_params'] = {'order': 'dc'}
rec['HVM_boot_policy'] = 'BIOS order'
LOG.debug(_('Created VM %s...'), instance.name)
vm_ref = session.call_xenapi('VM.create', rec)
instance_name = instance.name
LOG.debug(_('Created VM %(instance_name)s as %(vm_ref)s.') % locals())
return vm_ref
@classmethod
def ensure_free_mem(cls, session, instance):
inst_type_id = instance.instance_type_id
instance_type = instance_types.get_instance_type(inst_type_id)
mem = long(instance_type['memory_mb']) * 1024 * 1024
#get free memory from host
host = session.get_xenapi_host()
host_free_mem = long(session.get_xenapi().host.
compute_free_memory(host))
return host_free_mem >= mem
@classmethod
def create_vbd(cls, session, vm_ref, vdi_ref, userdevice, bootable):
"""Create a VBD record. Returns a Deferred that gives the new
VBD reference."""
vbd_rec = {}
vbd_rec['VM'] = vm_ref
vbd_rec['VDI'] = vdi_ref
vbd_rec['userdevice'] = str(userdevice)
vbd_rec['bootable'] = bootable
vbd_rec['mode'] = 'RW'
vbd_rec['type'] = 'disk'
vbd_rec['unpluggable'] = True
vbd_rec['empty'] = False
vbd_rec['other_config'] = {}
vbd_rec['qos_algorithm_type'] = ''
vbd_rec['qos_algorithm_params'] = {}
vbd_rec['qos_supported_algorithms'] = []
LOG.debug(_('Creating VBD for VM %(vm_ref)s,'
' VDI %(vdi_ref)s ... ') % locals())
vbd_ref = session.call_xenapi('VBD.create', vbd_rec)
LOG.debug(_('Created VBD %(vbd_ref)s for VM %(vm_ref)s,'
' VDI %(vdi_ref)s.') % locals())
return vbd_ref
@classmethod
def create_cd_vbd(cls, session, vm_ref, vdi_ref, userdevice, bootable):
"""Create a VBD record. Returns a Deferred that gives the new
VBD reference specific to CDRom devices."""
vbd_rec = {}
vbd_rec['VM'] = vm_ref
vbd_rec['VDI'] = vdi_ref
vbd_rec['userdevice'] = str(userdevice)
vbd_rec['bootable'] = bootable
vbd_rec['mode'] = 'RO'
vbd_rec['type'] = 'CD'
vbd_rec['unpluggable'] = True
vbd_rec['empty'] = False
vbd_rec['other_config'] = {}
vbd_rec['qos_algorithm_type'] = ''
vbd_rec['qos_algorithm_params'] = {}
vbd_rec['qos_supported_algorithms'] = []
LOG.debug(_('Creating a CDROM-specific VBD for VM %(vm_ref)s,'
' VDI %(vdi_ref)s ... ') % locals())
vbd_ref = session.call_xenapi('VBD.create', vbd_rec)
LOG.debug(_('Created a CDROM-specific VBD %(vbd_ref)s '
' for VM %(vm_ref)s, VDI %(vdi_ref)s.') % locals())
return vbd_ref
@classmethod
def find_vbd_by_number(cls, session, vm_ref, number):
"""Get the VBD reference from the device number"""
vbd_refs = session.get_xenapi().VM.get_VBDs(vm_ref)
if vbd_refs:
for vbd_ref in vbd_refs:
try:
vbd_rec = session.get_xenapi().VBD.get_record(vbd_ref)
if vbd_rec['userdevice'] == str(number):
return vbd_ref
except cls.XenAPI.Failure, exc:
LOG.exception(exc)
raise StorageError(_('VBD not found in instance %s') % vm_ref)
@classmethod
def unplug_vbd(cls, session, vbd_ref):
"""Unplug VBD from VM"""
try:
vbd_ref = session.call_xenapi('VBD.unplug', vbd_ref)
except cls.XenAPI.Failure, exc:
LOG.exception(exc)
if exc.details[0] != 'DEVICE_ALREADY_DETACHED':
raise StorageError(_('Unable to unplug VBD %s') % vbd_ref)
@classmethod
def destroy_vbd(cls, session, vbd_ref):
"""Destroy VBD from host database"""
try:
task = session.call_xenapi('Async.VBD.destroy', vbd_ref)
session.wait_for_task(task)
except cls.XenAPI.Failure, exc:
LOG.exception(exc)
raise StorageError(_('Unable to destroy VBD %s') % vbd_ref)
@classmethod
def destroy_vdi(cls, session, vdi_ref):
try:
task = session.call_xenapi('Async.VDI.destroy', vdi_ref)
session.wait_for_task(task)
except cls.XenAPI.Failure, exc:
LOG.exception(exc)
raise StorageError(_('Unable to destroy VDI %s') % vdi_ref)
@classmethod
def create_vdi(cls, session, sr_ref, name_label, virtual_size, read_only):
"""Create a VDI record and returns its reference."""
vdi_ref = session.get_xenapi().VDI.create(
{'name_label': name_label,
'name_description': '',
'SR': sr_ref,
'virtual_size': str(virtual_size),
'type': 'User',
'sharable': False,
'read_only': read_only,
'xenstore_data': {},
'other_config': {},
'sm_config': {},
'tags': []})
LOG.debug(_('Created VDI %(vdi_ref)s (%(name_label)s,'
' %(virtual_size)s, %(read_only)s) on %(sr_ref)s.')
% locals())
return vdi_ref
@classmethod
def get_vdi_for_vm_safely(cls, session, vm_ref):
"""Retrieves the primary VDI for a VM"""
vbd_refs = session.get_xenapi().VM.get_VBDs(vm_ref)
for vbd in vbd_refs:
vbd_rec = session.get_xenapi().VBD.get_record(vbd)
# Convention dictates the primary VDI will be userdevice 0
if vbd_rec['userdevice'] == '0':
vdi_rec = session.get_xenapi().VDI.get_record(vbd_rec['VDI'])
return vbd_rec['VDI'], vdi_rec
raise exception.Error(_("No primary VDI found for"
"%(vm_ref)s") % locals())
@classmethod
def create_snapshot(cls, session, instance_id, vm_ref, label):
"""Creates Snapshot (Template) VM, Snapshot VBD, Snapshot VDI,
Snapshot VHD"""
#TODO(sirp): Add quiesce and VSS locking support when Windows support
# is added
LOG.debug(_("Snapshotting VM %(vm_ref)s with label '%(label)s'...")
% locals())
vm_vdi_ref, vm_vdi_rec = cls.get_vdi_for_vm_safely(session, vm_ref)
sr_ref = vm_vdi_rec["SR"]
original_parent_uuid = get_vhd_parent_uuid(session, vm_vdi_ref)
task = session.call_xenapi('Async.VM.snapshot', vm_ref, label)
template_vm_ref = session.wait_for_task(task, instance_id)
template_vdi_rec = cls.get_vdi_for_vm_safely(session,
template_vm_ref)[1]
template_vdi_uuid = template_vdi_rec["uuid"]
LOG.debug(_('Created snapshot %(template_vm_ref)s from'
' VM %(vm_ref)s.') % locals())
parent_uuid = wait_for_vhd_coalesce(
session, instance_id, sr_ref, vm_vdi_ref, original_parent_uuid)
#TODO(sirp): we need to assert only one parent, not parents two deep
template_vdi_uuids = {'image': parent_uuid,
'snap': template_vdi_uuid}
return template_vm_ref, template_vdi_uuids
@classmethod
def get_sr_path(cls, session):
"""Return the path to our storage repository
This is used when we're dealing with VHDs directly, either by taking
snapshots or by restoring an image in the DISK_VHD format.
"""
sr_ref = safe_find_sr(session)
sr_rec = session.get_xenapi().SR.get_record(sr_ref)
sr_uuid = sr_rec["uuid"]
return os.path.join(FLAGS.xenapi_sr_base_path, sr_uuid)
@classmethod
def upload_image(cls, context, session, instance, vdi_uuids, image_id):
""" Requests that the Glance plugin bundle the specified VDIs and
push them into Glance using the specified human-friendly name.
"""
# NOTE(sirp): Currently we only support uploading images as VHD, there
# is no RAW equivalent (yet)
logging.debug(_("Asking xapi to upload %(vdi_uuids)s as"
" ID %(image_id)s") % locals())
os_type = instance.os_type or FLAGS.default_os_type
glance_host, glance_port = glance.pick_glance_api_server()
params = {'vdi_uuids': vdi_uuids,
'image_id': image_id,
'glance_host': glance_host,
'glance_port': glance_port,
'sr_path': cls.get_sr_path(session),
'os_type': os_type,
'auth_token': getattr(context, 'auth_token', None)}
kwargs = {'params': pickle.dumps(params)}
task = session.async_call_plugin('glance', 'upload_vhd', kwargs)
session.wait_for_task(task, instance.id)
@classmethod
def fetch_blank_disk(cls, session, instance_type_id):
# Size the blank harddrive to suit the machine type:
one_gig = 1024 * 1024 * 1024
req_type = instance_types.get_instance_type(instance_type_id)
req_size = req_type['local_gb']
LOG.debug("Creating blank HD of size %(req_size)d gigs"
% locals())
vdi_size = one_gig * req_size
LOG.debug("ISO vm create: Looking for the SR")
sr_ref = safe_find_sr(session)
vdi_ref = cls.create_vdi(session, sr_ref, 'blank HD', vdi_size, False)
return vdi_ref
@classmethod
def fetch_image(cls, context, session, instance, image, user_id,
project_id, image_type):
"""Fetch image from glance based on image type.
Returns: A single filename if image_type is KERNEL or RAMDISK
A list of dictionaries that describe VDIs, otherwise
"""
if image_type == ImageType.DISK_VHD:
return cls._fetch_image_glance_vhd(context,
session, instance, image, image_type)
else:
return cls._fetch_image_glance_disk(context,
session, instance, image, image_type)
@classmethod
def _fetch_image_glance_vhd(cls, context, session, instance, image,
image_type):
"""Tell glance to download an image and put the VHDs into the SR
Returns: A list of dictionaries that describe VDIs
"""
instance_id = instance.id
LOG.debug(_("Asking xapi to fetch vhd image %(image)s")
% locals())
sr_ref = safe_find_sr(session)
# NOTE(sirp): The Glance plugin runs under Python 2.4
# which does not have the `uuid` module. To work around this,
# we generate the uuids here (under Python 2.6+) and
# pass them as arguments
uuid_stack = [str(uuid.uuid4()) for i in xrange(2)]
glance_host, glance_port = glance.pick_glance_api_server()
params = {'image_id': image,
'glance_host': glance_host,
'glance_port': glance_port,
'uuid_stack': uuid_stack,
'sr_path': cls.get_sr_path(session),
'auth_token': getattr(context, 'auth_token', None)}
kwargs = {'params': pickle.dumps(params)}
task = session.async_call_plugin('glance', 'download_vhd', kwargs)
result = session.wait_for_task(task, instance_id)
# 'download_vhd' will return a json encoded string containing
# a list of dictionaries describing VDIs. The dictionary will
# contain 'vdi_type' and 'vdi_uuid' keys. 'vdi_type' can be
# 'os' or 'swap' right now.
vdis = json.loads(result)
for vdi in vdis:
LOG.debug(_("xapi 'download_vhd' returned VDI of "
"type '%(vdi_type)s' with UUID '%(vdi_uuid)s'" % vdi))
cls.scan_sr(session, instance_id, sr_ref)
# Pull out the UUID of the first VDI (which is the os VDI)
os_vdi_uuid = vdis[0]['vdi_uuid']
# Set the name-label to ease debugging
vdi_ref = session.get_xenapi().VDI.get_by_uuid(os_vdi_uuid)
primary_name_label = get_name_label_for_image(image)
session.get_xenapi().VDI.set_name_label(vdi_ref, primary_name_label)
cls._check_vdi_size(context, session, instance, os_vdi_uuid)
return vdis
@classmethod
def _get_vdi_chain_size(cls, context, session, vdi_uuid):
"""Compute the total size of a VDI chain, starting with the specified
VDI UUID.
This will walk the VDI chain to the root, add the size of each VDI into
the total.
"""
size_bytes = 0
for vdi_rec in walk_vdi_chain(session, vdi_uuid):
cur_vdi_uuid = vdi_rec['uuid']
vdi_size_bytes = int(vdi_rec['physical_utilisation'])
LOG.debug(_('vdi_uuid=%(cur_vdi_uuid)s vdi_size_bytes='
'%(vdi_size_bytes)d' % locals()))
size_bytes += vdi_size_bytes
return size_bytes
@classmethod
def _check_vdi_size(cls, context, session, instance, vdi_uuid):
size_bytes = cls._get_vdi_chain_size(context, session, vdi_uuid)
# FIXME(jk0): this was copied directly from compute.manager.py, let's
# refactor this to a common area
instance_type_id = instance['instance_type_id']
instance_type = db.instance_type_get(context,
instance_type_id)
allowed_size_gb = instance_type['local_gb']
allowed_size_bytes = allowed_size_gb * 1024 * 1024 * 1024
LOG.debug(_("image_size_bytes=%(size_bytes)d, allowed_size_bytes="
"%(allowed_size_bytes)d") % locals())
if size_bytes > allowed_size_bytes:
LOG.info(_("Image size %(size_bytes)d exceeded"
" instance_type allowed size "
"%(allowed_size_bytes)d")
% locals())
raise exception.ImageTooLarge()
@classmethod
def _fetch_image_glance_disk(cls, context, session, instance, image,
image_type):
"""Fetch the image from Glance
NOTE:
Unlike _fetch_image_glance_vhd, this method does not use the Glance
plugin; instead, it streams the disks through domU to the VDI
directly.
Returns: A single filename if image_type is KERNEL_RAMDISK
A list of dictionaries that describe VDIs, otherwise
"""
instance_id = instance.id
# FIXME(sirp): Since the Glance plugin seems to be required for the
# VHD disk, it may be worth using the plugin for both VHD and RAW and
# DISK restores
LOG.debug(_("Fetching image %(image)s") % locals())
LOG.debug(_("Image Type: %s"), ImageType.to_string(image_type))
if image_type == ImageType.DISK_ISO:
sr_ref = safe_find_iso_sr(session)
LOG.debug(_("ISO: Found sr possibly containing the ISO image"))
else:
sr_ref = safe_find_sr(session)
glance_client, image_id = glance.get_glance_client(context, image)
glance_client.set_auth_token(getattr(context, 'auth_token', None))
meta, image_file = glance_client.get_image(image_id)
virtual_size = int(meta['size'])
vdi_size = virtual_size
LOG.debug(_("Size for image %(image)s:" +
"%(virtual_size)d") % locals())
if image_type == ImageType.DISK:
# Make room for MBR.
vdi_size += MBR_SIZE_BYTES
elif image_type in (ImageType.KERNEL, ImageType.RAMDISK) and \
vdi_size > FLAGS.max_kernel_ramdisk_size:
max_size = FLAGS.max_kernel_ramdisk_size
raise exception.Error(
_("Kernel/Ramdisk image is too large: %(vdi_size)d bytes, "
"max %(max_size)d bytes") % locals())
name_label = get_name_label_for_image(image)
vdi_ref = cls.create_vdi(session, sr_ref, name_label, vdi_size, False)
# From this point we have a VDI on Xen host;
# If anything goes wrong, we need to remember its uuid.
try:
filename = None
vdi_uuid = session.get_xenapi().VDI.get_uuid(vdi_ref)
with_vdi_attached_here(session, vdi_ref, False,
lambda dev:
_stream_disk(dev, image_type,
virtual_size, image_file))
if image_type in (ImageType.KERNEL, ImageType.RAMDISK):
# We need to invoke a plugin for copying the
# content of the VDI into the proper path.
LOG.debug(_("Copying VDI %s to /boot/guest on dom0"), vdi_ref)
fn = "copy_kernel_vdi"
args = {}
args['vdi-ref'] = vdi_ref
# Let the plugin copy the correct number of bytes.
args['image-size'] = str(vdi_size)
task = session.async_call_plugin('glance', fn, args)
filename = session.wait_for_task(task, instance_id)
# Remove the VDI as it is not needed anymore.
session.get_xenapi().VDI.destroy(vdi_ref)
LOG.debug(_("Kernel/Ramdisk VDI %s destroyed"), vdi_ref)
return [dict(vdi_type=ImageType.to_string(image_type),
vdi_uuid=None,
file=filename)]
else:
return [dict(vdi_type=ImageType.to_string(image_type),
vdi_uuid=vdi_uuid,
file=None)]
except (cls.XenAPI.Failure, IOError, OSError) as e:
# We look for XenAPI and OS failures.
LOG.exception(_("instance %s: Failed to fetch glance image"),
instance_id, exc_info=sys.exc_info())
e.args = e.args + ([dict(vdi_type=ImageType.
to_string(image_type),
vdi_uuid=vdi_uuid,
file=filename)],)
raise e
@classmethod
def determine_disk_image_type(cls, instance, context):
"""Disk Image Types are used to determine where the kernel will reside
within an image. To figure out which type we're dealing with, we use
the following rules:
1. If we're using Glance, we can use the image_type field to
determine the image_type
2. If we're not using Glance, then we need to deduce this based on
whether a kernel_id is specified.
"""
def log_disk_format(image_type):
pretty_format = {ImageType.KERNEL: 'KERNEL',
ImageType.RAMDISK: 'RAMDISK',
ImageType.DISK: 'DISK',
ImageType.DISK_RAW: 'DISK_RAW',
ImageType.DISK_VHD: 'DISK_VHD',
ImageType.DISK_ISO: 'DISK_ISO'}
disk_format = pretty_format[image_type]
image_ref = instance.image_ref
instance_id = instance.id
LOG.debug(_("Detected %(disk_format)s format for image "
"%(image_ref)s, instance %(instance_id)s") % locals())
def determine_from_glance():
glance_disk_format2nova_type = {
'ami': ImageType.DISK,
'aki': ImageType.KERNEL,
'ari': ImageType.RAMDISK,
'raw': ImageType.DISK_RAW,
'vhd': ImageType.DISK_VHD,
'iso': ImageType.DISK_ISO}
image_ref = instance.image_ref
glance_client, image_id = glance.get_glance_client(context,
image_ref)
meta = glance_client.get_image_meta(image_id)
disk_format = meta['disk_format']
try:
return glance_disk_format2nova_type[disk_format]
except KeyError:
raise exception.InvalidDiskFormat(disk_format=disk_format)
def determine_from_instance():
if instance.kernel_id:
return ImageType.DISK
else:
return ImageType.DISK_RAW
image_type = determine_from_glance()
log_disk_format(image_type)
return image_type
@classmethod
def determine_is_pv(cls, session, instance_id, vdi_ref, disk_image_type,
os_type):
"""
Determine whether the VM will use a paravirtualized kernel or if it
will use hardware virtualization.
1. Glance (VHD): then we use `os_type`, raise if not set
2. Glance (DISK_RAW): use Pygrub to figure out if pv kernel is
available
3. Glance (DISK): pv is assumed
4. Glance (DISK_ISO): no pv is assumed
"""
LOG.debug(_("Looking up vdi %s for PV kernel"), vdi_ref)
if disk_image_type == ImageType.DISK_VHD:
# 1. VHD
if os_type == 'windows':
is_pv = False
else:
is_pv = True
elif disk_image_type == ImageType.DISK_RAW:
# 2. RAW
is_pv = with_vdi_attached_here(session, vdi_ref, True, _is_vdi_pv)
elif disk_image_type == ImageType.DISK:
# 3. Disk
is_pv = True
elif disk_image_type == ImageType.DISK_ISO:
# 4. ISO
is_pv = False
else:
raise exception.Error(_("Unknown image format %(disk_image_type)s")
% locals())
return is_pv
@classmethod
def lookup(cls, session, name_label):
"""Look the instance i up, and returns it if available"""
vm_refs = session.get_xenapi().VM.get_by_name_label(name_label)
n = len(vm_refs)
if n == 0:
return None
elif n > 1:
raise exception.InstanceExists(name=name_label)
else:
return vm_refs[0]
@classmethod
def lookup_vm_vdis(cls, session, vm_ref):
"""Look for the VDIs that are attached to the VM"""
# Firstly we get the VBDs, then the VDIs.
# TODO(Armando): do we leave the read-only devices?
vbd_refs = session.get_xenapi().VM.get_VBDs(vm_ref)
vdi_refs = []
if vbd_refs:
for vbd_ref in vbd_refs:
try:
vdi_ref = session.get_xenapi().VBD.get_VDI(vbd_ref)
# Test valid VDI
record = session.get_xenapi().VDI.get_record(vdi_ref)
LOG.debug(_('VDI %s is still available'), record['uuid'])
except cls.XenAPI.Failure, exc:
LOG.exception(exc)
else:
vdi_refs.append(vdi_ref)
if len(vdi_refs) > 0:
return vdi_refs
else:
return None
@classmethod
def preconfigure_instance(cls, session, instance, vdi_ref, network_info):
"""Makes alterations to the image before launching as part of spawn.
"""
# As mounting the image VDI is expensive, we only want do do it once,
# if at all, so determine whether it's required first, and then do
# everything
mount_required = False
key, net, metadata = _prepare_injectables(instance, network_info)
mount_required = key or net or metadata
if not mount_required:
return
with_vdi_attached_here(session, vdi_ref, False,
lambda dev: _mounted_processing(dev, key, net,
metadata))
@classmethod
def lookup_kernel_ramdisk(cls, session, vm):
vm_rec = session.get_xenapi().VM.get_record(vm)
if 'PV_kernel' in vm_rec and 'PV_ramdisk' in vm_rec:
return (vm_rec['PV_kernel'], vm_rec['PV_ramdisk'])
else:
return (None, None)
@classmethod
def compile_info(cls, record):
"""Fill record with VM status information"""
LOG.info(_("(VM_UTILS) xenserver vm state -> |%s|"),
record['power_state'])
LOG.info(_("(VM_UTILS) xenapi power_state -> |%s|"),
XENAPI_POWER_STATE[record['power_state']])
return {'state': XENAPI_POWER_STATE[record['power_state']],
'max_mem': long(record['memory_static_max']) >> 10,
'mem': long(record['memory_dynamic_max']) >> 10,
'num_cpu': record['VCPUs_max'],
'cpu_time': 0}
@classmethod
def compile_diagnostics(cls, session, record):
"""Compile VM diagnostics data"""
try:
host = session.get_xenapi_host()
host_ip = session.get_xenapi().host.get_record(host)["address"]
except (cls.XenAPI.Failure, KeyError) as e:
return {"Unable to retrieve diagnostics": e}
try:
diags = {}
xml = get_rrd(host_ip, record["uuid"])
if xml:
rrd = minidom.parseString(xml)
for i, node in enumerate(rrd.firstChild.childNodes):
# We don't want all of the extra garbage
if i >= 3 and i <= 11:
ref = node.childNodes
# Name and Value
if len(ref) > 6:
diags[ref[0].firstChild.data] = \
ref[6].firstChild.data
return diags
except cls.XenAPI.Failure as e:
return {"Unable to retrieve diagnostics": e}
@classmethod
def scan_sr(cls, session, instance_id=None, sr_ref=None):
"""Scans the SR specified by sr_ref"""
if sr_ref:
LOG.debug(_("Re-scanning SR %s"), sr_ref)
task = session.call_xenapi('Async.SR.scan', sr_ref)
session.wait_for_task(task, instance_id)
@classmethod
def scan_default_sr(cls, session):
"""Looks for the system default SR and triggers a re-scan"""
sr_ref = find_sr(session)
session.call_xenapi('SR.scan', sr_ref)
def get_rrd(host, vm_uuid):
"""Return the VM RRD XML as a string"""
try:
xml = urllib.urlopen("http://%s:%s@%s/vm_rrd?uuid=%s" % (
FLAGS.xenapi_connection_username,
FLAGS.xenapi_connection_password,
host,
vm_uuid))
return xml.read()
except IOError:
return None
#TODO(sirp): This code comes from XS5.6 pluginlib.py, we should refactor to
# use that implmenetation
def get_vhd_parent(session, vdi_rec):
"""
Returns the VHD parent of the given VDI record, as a (ref, rec) pair.
Returns None if we're at the root of the tree.
"""
if 'vhd-parent' in vdi_rec['sm_config']:
parent_uuid = vdi_rec['sm_config']['vhd-parent']
parent_ref = session.get_xenapi().VDI.get_by_uuid(parent_uuid)
parent_rec = session.get_xenapi().VDI.get_record(parent_ref)
vdi_uuid = vdi_rec['uuid']
LOG.debug(_("VHD %(vdi_uuid)s has parent %(parent_ref)s") % locals())
return parent_ref, parent_rec
else:
return None
def get_vhd_parent_uuid(session, vdi_ref):
vdi_rec = session.get_xenapi().VDI.get_record(vdi_ref)
ret = get_vhd_parent(session, vdi_rec)
if ret:
parent_ref, parent_rec = ret
return parent_rec["uuid"]
else:
return None
def walk_vdi_chain(session, vdi_uuid):
"""Yield vdi_recs for each element in a VDI chain"""
# TODO(jk0): perhaps make get_vhd_parent use this
while True:
vdi_ref = session.get_xenapi().VDI.get_by_uuid(vdi_uuid)
vdi_rec = session.get_xenapi().VDI.get_record(vdi_ref)
yield vdi_rec
parent_uuid = vdi_rec['sm_config'].get('vhd-parent')
if parent_uuid:
vdi_uuid = parent_uuid
else:
break
def wait_for_vhd_coalesce(session, instance_id, sr_ref, vdi_ref,
original_parent_uuid):
""" Spin until the parent VHD is coalesced into its parent VHD
Before coalesce:
* original_parent_vhd
* parent_vhd
snapshot
Atter coalesce:
* parent_vhd
snapshot
"""
max_attempts = FLAGS.xenapi_vhd_coalesce_max_attempts
attempts = {'counter': 0}
def _poll_vhds():
attempts['counter'] += 1
if attempts['counter'] > max_attempts:
counter = attempts['counter']
msg = (_("VHD coalesce attempts exceeded (%(counter)d >"
" %(max_attempts)d), giving up...") % locals())
raise exception.Error(msg)
VMHelper.scan_sr(session, instance_id, sr_ref)
parent_uuid = get_vhd_parent_uuid(session, vdi_ref)
if original_parent_uuid and (parent_uuid != original_parent_uuid):
LOG.debug(_("Parent %(parent_uuid)s doesn't match original parent"
" %(original_parent_uuid)s, waiting for coalesce...")
% locals())
else:
# Breakout of the loop (normally) and return the parent_uuid
raise utils.LoopingCallDone(parent_uuid)
loop = utils.LoopingCall(_poll_vhds)
loop.start(FLAGS.xenapi_vhd_coalesce_poll_interval, now=True)
parent_uuid = loop.wait()
return parent_uuid
def get_vdi_for_vm_safely(session, vm_ref):
vdi_refs = VMHelper.lookup_vm_vdis(session, vm_ref)
if vdi_refs is None:
raise Exception(_("No VDIs found for VM %s") % vm_ref)
else:
num_vdis = len(vdi_refs)
if num_vdis != 1:
raise exception.Error(_("Unexpected number of VDIs"
"(%(num_vdis)s) found"
" for VM %(vm_ref)s") % locals())
vdi_ref = vdi_refs[0]
vdi_rec = session.get_xenapi().VDI.get_record(vdi_ref)
return vdi_ref, vdi_rec
def safe_find_sr(session):
"""Same as find_sr except raises a NotFound exception if SR cannot be
determined
"""
sr_ref = find_sr(session)
if sr_ref is None:
raise exception.StorageRepositoryNotFound()
return sr_ref
def find_sr(session):
"""Return the storage repository to hold VM images"""
host = session.get_xenapi_host()
sr_refs = session.get_xenapi().SR.get_all()
for sr_ref in sr_refs:
sr_rec = session.get_xenapi().SR.get_record(sr_ref)
if not ('i18n-key' in sr_rec['other_config'] and
sr_rec['other_config']['i18n-key'] == 'local-storage'):
continue
for pbd_ref in sr_rec['PBDs']:
pbd_rec = session.get_xenapi().PBD.get_record(pbd_ref)
if pbd_rec['host'] == host:
return sr_ref
return None
def safe_find_iso_sr(session):
"""Same as find_iso_sr except raises a NotFound exception if SR cannot be
determined
"""
sr_ref = find_iso_sr(session)
if sr_ref is None:
raise exception.NotFound(_('Cannot find SR of content-type ISO'))
return sr_ref
def find_iso_sr(session):
"""Return the storage repository to hold ISO images"""
host = session.get_xenapi_host()
sr_refs = session.get_xenapi().SR.get_all()
for sr_ref in sr_refs:
sr_rec = session.get_xenapi().SR.get_record(sr_ref)
LOG.debug(_("ISO: looking at SR %(sr_rec)s") % locals())
if not sr_rec['content_type'] == 'iso':
LOG.debug(_("ISO: not iso content"))
continue
if not 'i18n-key' in sr_rec['other_config']:
LOG.debug(_("ISO: iso content_type, no 'i18n-key' key"))
continue
if not sr_rec['other_config']['i18n-key'] == 'local-storage-iso':
LOG.debug(_("ISO: iso content_type, i18n-key value not "
"'local-storage-iso'"))
continue
LOG.debug(_("ISO: SR MATCHing our criteria"))
for pbd_ref in sr_rec['PBDs']:
LOG.debug(_("ISO: ISO, looking to see if it is host local"))
pbd_rec = session.get_xenapi().PBD.get_record(pbd_ref)
pbd_rec_host = pbd_rec['host']
LOG.debug(_("ISO: PBD matching, want %(pbd_rec)s, have %(host)s") %
locals())
if pbd_rec_host == host:
LOG.debug(_("ISO: SR with local PBD"))
return sr_ref
return None
def remap_vbd_dev(dev):
"""Return the appropriate location for a plugged-in VBD device
Ubuntu Maverick moved xvd? -> sd?. This is considered a bug and will be
fixed in future versions:
https://bugs.launchpad.net/ubuntu/+source/linux/+bug/684875
For now, we work around it by just doing a string replace.
"""
# NOTE(sirp): This hack can go away when we pull support for Maverick
should_remap = FLAGS.xenapi_remap_vbd_dev
if not should_remap:
return dev
old_prefix = 'xvd'
new_prefix = FLAGS.xenapi_remap_vbd_dev_prefix
remapped_dev = dev.replace(old_prefix, new_prefix)
return remapped_dev
def _wait_for_device(dev):
"""Wait for device node to appear"""
for i in xrange(0, FLAGS.block_device_creation_timeout):
if os.path.exists('/dev/%s' % dev):
return
time.sleep(1)
raise StorageError(_('Timeout waiting for device %s to be created') % dev)
def with_vdi_attached_here(session, vdi_ref, read_only, f):
this_vm_ref = get_this_vm_ref(session)
vbd_rec = {}
vbd_rec['VM'] = this_vm_ref
vbd_rec['VDI'] = vdi_ref
vbd_rec['userdevice'] = 'autodetect'
vbd_rec['bootable'] = False
vbd_rec['mode'] = read_only and 'RO' or 'RW'
vbd_rec['type'] = 'disk'
vbd_rec['unpluggable'] = True
vbd_rec['empty'] = False
vbd_rec['other_config'] = {}
vbd_rec['qos_algorithm_type'] = ''
vbd_rec['qos_algorithm_params'] = {}
vbd_rec['qos_supported_algorithms'] = []
LOG.debug(_('Creating VBD for VDI %s ... '), vdi_ref)
vbd_ref = session.get_xenapi().VBD.create(vbd_rec)
LOG.debug(_('Creating VBD for VDI %s done.'), vdi_ref)
try:
LOG.debug(_('Plugging VBD %s ... '), vbd_ref)
session.get_xenapi().VBD.plug(vbd_ref)
LOG.debug(_('Plugging VBD %s done.'), vbd_ref)
orig_dev = session.get_xenapi().VBD.get_device(vbd_ref)
LOG.debug(_('VBD %(vbd_ref)s plugged as %(orig_dev)s') % locals())
dev = remap_vbd_dev(orig_dev)
if dev != orig_dev:
LOG.debug(_('VBD %(vbd_ref)s plugged into wrong dev, '
'remapping to %(dev)s') % locals())
if dev != 'autodetect':
# NOTE(johannes): Unit tests will end up with a device called
# 'autodetect' which obviously won't exist. It's not ideal,
# but the alternatives were much messier
_wait_for_device(dev)
return f(dev)
finally:
LOG.debug(_('Destroying VBD for VDI %s ... '), vdi_ref)
vbd_unplug_with_retry(session, vbd_ref)
ignore_failure(session.get_xenapi().VBD.destroy, vbd_ref)
LOG.debug(_('Destroying VBD for VDI %s done.'), vdi_ref)
def vbd_unplug_with_retry(session, vbd_ref):
"""Call VBD.unplug on the given VBD, with a retry if we get
DEVICE_DETACH_REJECTED. For reasons which I don't understand, we're
seeing the device still in use, even when all processes using the device
should be dead."""
# FIXME(sirp): We can use LoopingCall here w/o blocking sleep()
while True:
try:
session.get_xenapi().VBD.unplug(vbd_ref)
LOG.debug(_('VBD.unplug successful first time.'))
return
except VMHelper.XenAPI.Failure, e:
if (len(e.details) > 0 and
e.details[0] == 'DEVICE_DETACH_REJECTED'):
LOG.debug(_('VBD.unplug rejected: retrying...'))
time.sleep(1)
LOG.debug(_('Not sleeping anymore!'))
elif (len(e.details) > 0 and
e.details[0] == 'DEVICE_ALREADY_DETACHED'):
LOG.debug(_('VBD.unplug successful eventually.'))
return
else:
LOG.error(_('Ignoring XenAPI.Failure in VBD.unplug: %s'),
e)
return
def ignore_failure(func, *args, **kwargs):
try:
return func(*args, **kwargs)
except VMHelper.XenAPI.Failure, e:
LOG.error(_('Ignoring XenAPI.Failure %s'), e)
return None
def get_this_vm_uuid():
with file('/sys/hypervisor/uuid') as f:
return f.readline().strip()
def get_this_vm_ref(session):
return session.get_xenapi().VM.get_by_uuid(get_this_vm_uuid())
def _is_vdi_pv(dev):
LOG.debug(_("Running pygrub against %s"), dev)
output = os.popen('pygrub -qn /dev/%s' % dev)
for line in output.readlines():
#try to find kernel string
m = re.search('(?<=kernel:)/.*(?:>)', line)
if m and m.group(0).find('xen') != -1:
LOG.debug(_("Found Xen kernel %s") % m.group(0))
return True
LOG.debug(_("No Xen kernel found. Booting HVM."))
return False
def _stream_disk(dev, image_type, virtual_size, image_file):
offset = 0
if image_type == ImageType.DISK:
offset = MBR_SIZE_BYTES
_write_partition(virtual_size, dev)
utils.execute('chown', os.getuid(), '/dev/%s' % dev, run_as_root=True)
with open('/dev/%s' % dev, 'wb') as f:
f.seek(offset)
for chunk in image_file:
f.write(chunk)
def _write_partition(virtual_size, dev):
dest = '/dev/%s' % dev
primary_first = MBR_SIZE_SECTORS
primary_last = MBR_SIZE_SECTORS + (virtual_size / SECTOR_SIZE) - 1
LOG.debug(_('Writing partition table %(primary_first)d %(primary_last)d'
' to %(dest)s...') % locals())
def execute(*cmd, **kwargs):
return utils.execute(*cmd, **kwargs)
execute('parted', '--script', dest, 'mklabel', 'msdos', run_as_root=True)
execute('parted', '--script', dest, 'mkpart', 'primary',
'%ds' % primary_first,
'%ds' % primary_last,
run_as_root=True)
LOG.debug(_('Writing partition table %s done.'), dest)
def get_name_label_for_image(image):
# TODO(sirp): This should eventually be the URI for the Glance image
return _('Glance image %s') % image
def _mount_filesystem(dev_path, dir):
"""mounts the device specified by dev_path in dir"""
try:
out, err = utils.execute('mount',
'-t', 'ext2,ext3,ext4,reiserfs',
dev_path, dir, run_as_root=True)
except exception.ProcessExecutionError as e:
err = str(e)
return err
def _find_guest_agent(base_dir, agent_rel_path):
"""
tries to locate a guest agent at the path
specificed by agent_rel_path
"""
agent_path = os.path.join(base_dir, agent_rel_path)
if os.path.isfile(agent_path):
# The presence of the guest agent
# file indicates that this instance can
# reconfigure the network from xenstore data,
# so manipulation of files in /etc is not
# required
LOG.info(_('XenServer tools installed in this '
'image are capable of network injection. '
'Networking files will not be'
'manipulated'))
return True
xe_daemon_filename = os.path.join(base_dir,
'usr', 'sbin', 'xe-daemon')
if os.path.isfile(xe_daemon_filename):
LOG.info(_('XenServer tools are present '
'in this image but are not capable '
'of network injection'))
else:
LOG.info(_('XenServer tools are not '
'installed in this image'))
return False
def _mounted_processing(device, key, net, metadata):
"""Callback which runs with the image VDI attached"""
dev_path = '/dev/' + device + '1' # NB: Partition 1 hardcoded
tmpdir = tempfile.mkdtemp()
try:
# Mount only Linux filesystems, to avoid disturbing NTFS images
err = _mount_filesystem(dev_path, tmpdir)
if not err:
try:
# This try block ensures that the umount occurs
if not _find_guest_agent(tmpdir, FLAGS.xenapi_agent_path):
LOG.info(_('Manipulating interface files '
'directly'))
disk.inject_data_into_fs(tmpdir, key, net, metadata,
utils.execute)
finally:
utils.execute('umount', dev_path, run_as_root=True)
else:
LOG.info(_('Failed to mount filesystem (expected for '
'non-linux instances): %s') % err)
finally:
# remove temporary directory
os.rmdir(tmpdir)
def _prepare_injectables(inst, networks_info):
"""
prepares the ssh key and the network configuration file to be
injected into the disk image
"""
#do the import here - Cheetah.Template will be loaded
#only if injection is performed
from Cheetah import Template as t
template = t.Template
template_data = open(FLAGS.injected_network_template).read()
metadata = inst['metadata']
key = str(inst['key_data'])
net = None
if networks_info:
ifc_num = -1
interfaces_info = []
have_injected_networks = False
for (network_ref, info) in networks_info:
ifc_num += 1
if not network_ref['injected']:
continue
have_injected_networks = True
ip_v4 = ip_v6 = None
if 'ips' in info and len(info['ips']) > 0:
ip_v4 = info['ips'][0]
if 'ip6s' in info and len(info['ip6s']) > 0:
ip_v6 = info['ip6s'][0]
if len(info['dns']) > 0:
dns = info['dns'][0]
else:
dns = ''
interface_info = {'name': 'eth%d' % ifc_num,
'address': ip_v4 and ip_v4['ip'] or '',
'netmask': ip_v4 and ip_v4['netmask'] or '',
'gateway': info['gateway'],
'broadcast': info['broadcast'],
'dns': dns,
'address_v6': ip_v6 and ip_v6['ip'] or '',
'netmask_v6': ip_v6 and ip_v6['netmask'] or '',
'gateway_v6': ip_v6 and info['gateway6'] or '',
'use_ipv6': FLAGS.use_ipv6}
interfaces_info.append(interface_info)
if have_injected_networks:
net = str(template(template_data,
searchList=[{'interfaces': interfaces_info,
'use_ipv6': FLAGS.use_ipv6}]))
return key, net, metadata
| nii-cloud/dodai-compute | nova/virt/xenapi/vm_utils.py | Python | apache-2.0 | 49,055 |
# pyOCD debugger
# Copyright (c) 2015-2019 Arm Limited
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import argparse
import os
import sys
from time import (sleep, time)
from random import randrange
import math
import argparse
import traceback
import logging
from random import randrange
from pyocd.core.target import Target
from pyocd.coresight.cortex_m import CortexM
from pyocd.gdbserver.context_facade import GDBDebugContextFacade
from pyocd.core.helpers import ConnectHelper
from pyocd.utility.conversion import float32_to_u32, u32_to_float32
from pyocd.utility.mask import same
from pyocd.core import exceptions
from pyocd.core.memory_map import MemoryType
from pyocd.flash.file_programmer import FileProgrammer
from test_util import (
Test,
TestResult,
get_session_options,
get_target_test_params,
get_test_binary_path,
)
TEST_COUNT = 20
class CortexTestResult(TestResult):
METRICS = [
"get_t_response",
"step",
"bp_add_remove",
"get_reg_context",
"set_reg_context",
"run_halt",
"gdb_step",
]
def __init__(self):
super(CortexTestResult, self).__init__(None, None, None)
self.name = "cortex"
self.times = {}
for metric in self.METRICS:
self.times[metric] = -1.0
class CortexTest(Test):
def __init__(self):
super(CortexTest, self).__init__("Cortex Test", cortex_test)
def print_perf_info(self, result_list, output_file=None):
result_list = filter(lambda x: isinstance(x, CortexTestResult), result_list)
print("\n\n------ Cortex Test Performance ------", file=output_file)
print("", file=output_file)
for result in result_list:
print("Target {}:".format(result.board), file=output_file)
for metric in CortexTestResult.METRICS:
print(" {:<20} = {: 8.3f} ms".format(metric, result.times[metric] * 1000), file=output_file)
print("", file=output_file)
def run(self, board):
try:
result = self.test_function(board.unique_id)
except Exception as e:
result = CortexTestResult()
result.passed = False
print("Exception %s when testing board %s" % (e, board.unique_id))
traceback.print_exc(file=sys.stdout)
result.board = board
result.test = self
return result
def float_compare(f1, f2):
return abs(f1 - f2) < 0.0001
def test_function(session, function):
session.probe.flush()
start = time()
for i in range(0, TEST_COUNT):
function()
session.probe.flush()
stop = time()
return (stop - start) / float(TEST_COUNT)
def cortex_test(board_id):
with ConnectHelper.session_with_chosen_probe(unique_id=board_id, **get_session_options()) as session:
board = session.board
target_type = board.target_type
binary_file = get_test_binary_path(board.test_binary)
test_params = get_target_test_params(session)
test_clock = test_params['test_clock']
addr_invalid = 0x3E000000 # Last 16MB of ARM SRAM region - typically empty
expect_invalid_access_to_fail = test_params['error_on_invalid_access']
memory_map = board.target.get_memory_map()
ram_region = memory_map.get_default_region_of_type(MemoryType.RAM)
rom_region = memory_map.get_boot_memory()
addr = ram_region.start
size = 0x502
addr_bin = rom_region.start
target = board.target
probe = session.probe
probe.set_clock(test_clock)
test_pass_count = 0
test_count = 0
result = CortexTestResult()
debugContext = target.get_target_context()
gdbFacade = GDBDebugContextFacade(debugContext)
print("\n\n----- FLASH NEW BINARY BEFORE TEST -----")
FileProgrammer(session).program(binary_file, base_address=addr_bin)
# Let the target run for a bit so it
# can initialize the watchdog if it needs to
target.resume()
sleep(0.2)
target.halt()
print("PROGRAMMING COMPLETE")
print("\n\n----- TESTING CORTEX-M PERFORMANCE -----")
test_time = test_function(session, gdbFacade.get_t_response)
result.times["get_t_response"] = test_time
print("Function get_t_response time: %f" % test_time)
# Step
test_time = test_function(session, target.step)
result.times["step"] = test_time
print("Function step time: %f" % test_time)
# Breakpoint
def set_remove_breakpoint():
target.set_breakpoint(0)
target.remove_breakpoint(0)
test_time = test_function(session, set_remove_breakpoint)
result.times["bp_add_remove"] = test_time
print("Add and remove breakpoint: %f" % test_time)
# get_register_context
test_time = test_function(session, gdbFacade.get_register_context)
result.times["get_reg_context"] = test_time
print("Function get_register_context: %f" % test_time)
# set_register_context
context = gdbFacade.get_register_context()
def set_register_context():
gdbFacade.set_register_context(context)
test_time = test_function(session, set_register_context)
result.times["set_reg_context"] = test_time
print("Function set_register_context: %f" % test_time)
# Run / Halt
def run_halt():
target.resume()
target.halt()
test_time = test_function(session, run_halt)
result.times["run_halt"] = test_time
print("Resume and halt: %f" % test_time)
# GDB stepping
def simulate_step():
target.step()
gdbFacade.get_t_response()
target.set_breakpoint(0)
target.resume()
target.halt()
gdbFacade.get_t_response()
target.remove_breakpoint(0)
test_time = test_function(session, simulate_step)
result.times["gdb_step"] = test_time
print("Simulated GDB step: %f" % test_time)
# Test passes if there are no exceptions
test_pass_count += 1
test_count += 1
print("TEST PASSED")
print("\n\n------ Testing Reset Types ------")
def reset_methods(fnc):
print("Hardware reset")
fnc(reset_type=Target.ResetType.HW)
print("Hardware reset (default=HW)")
target.selected_core.default_reset_type = Target.ResetType.HW
fnc(reset_type=None)
print("Software reset (default=SYSRESETREQ)")
target.selected_core.default_reset_type = Target.ResetType.SW_SYSRESETREQ
fnc(reset_type=None)
print("Software reset (default=VECTRESET)")
target.selected_core.default_reset_type = Target.ResetType.SW_VECTRESET
fnc(reset_type=None)
print("Software reset (default=emulated)")
target.selected_core.default_reset_type = Target.ResetType.SW_EMULATED
fnc(reset_type=None)
print("(Default) Software reset (SYSRESETREQ)")
target.selected_core.default_software_reset_type = Target.ResetType.SW_SYSRESETREQ
fnc(reset_type=Target.ResetType.SW)
print("(Default) Software reset (VECTRESET)")
target.selected_core.default_software_reset_type = Target.ResetType.SW_VECTRESET
fnc(reset_type=Target.ResetType.SW)
print("(Default) Software reset (emulated)")
target.selected_core.default_software_reset_type = Target.ResetType.SW_EMULATED
fnc(reset_type=Target.ResetType.SW)
print("Software reset (option=default)")
target.selected_core.default_reset_type = Target.ResetType.SW
target.selected_core.default_software_reset_type = Target.ResetType.SW_SYSRESETREQ
session.options['reset_type'] = 'default'
fnc(reset_type=None)
print("Software reset (option=hw)")
session.options['reset_type'] = 'hw'
fnc(reset_type=None)
print("Software reset (option=sw)")
session.options['reset_type'] = 'sw'
fnc(reset_type=None)
print("Software reset (option=sw_sysresetreq)")
session.options['reset_type'] = 'sw_sysresetreq'
fnc(reset_type=None)
print("Software reset (option=sw_vectreset)")
session.options['reset_type'] = 'sw_vectreset'
fnc(reset_type=None)
print("Software reset (option=sw_emulated)")
session.options['reset_type'] = 'sw_emulated'
fnc(reset_type=None)
reset_methods(target.reset)
# Test passes if there are no exceptions
test_pass_count += 1
test_count += 1
print("TEST PASSED")
print("\n\n------ Testing Reset Halt ------")
reset_methods(target.reset_and_halt)
# Test passes if there are no exceptions
test_pass_count += 1
test_count += 1
print("TEST PASSED")
print("\n\n------ Testing Register Read/Write ------")
print("Reading r0")
val = target.read_core_register('r0')
origR0 = val
rawVal = target.read_core_register_raw('r0')
test_count += 1
if val == rawVal:
test_pass_count += 1
print("TEST PASSED")
else:
print("TEST FAILED")
print("Writing r0")
target.write_core_register('r0', 0x12345678)
val = target.read_core_register('r0')
rawVal = target.read_core_register_raw('r0')
test_count += 1
if val == 0x12345678 and rawVal == 0x12345678:
test_pass_count += 1
print("TEST PASSED")
else:
print("TEST FAILED")
print("Raw writing r0")
target.write_core_register_raw('r0', 0x87654321)
val = target.read_core_register('r0')
rawVal = target.read_core_register_raw('r0')
test_count += 1
if val == 0x87654321 and rawVal == 0x87654321:
test_pass_count += 1
print("TEST PASSED")
else:
print("TEST FAILED")
print("Read/write r0, r1, r2, r3")
origRegs = target.read_core_registers_raw(['r0', 'r1', 'r2', 'r3'])
target.write_core_registers_raw(['r0', 'r1', 'r2', 'r3'], [1, 2, 3, 4])
vals = target.read_core_registers_raw(['r0', 'r1', 'r2', 'r3'])
passed = vals[0] == 1 and vals[1] == 2 and vals[2] == 3 and vals[3] == 4
test_count += 1
if passed:
test_pass_count += 1
print("TEST PASSED")
else:
print("TEST FAILED")
# Restore regs
origRegs[0] = origR0
target.write_core_registers_raw(['r0', 'r1', 'r2', 'r3'], origRegs)
print("Verify exception is raised while core is running")
target.resume()
try:
val = target.read_core_register('r0')
except exceptions.CoreRegisterAccessError:
passed = True
else:
passed = False
test_count += 1
if passed:
test_pass_count += 1
print("TEST PASSED")
else:
print("TEST FAILED")
print("Verify failure to write core register while running raises exception")
try:
target.write_core_register('r0', 0x1234)
except exceptions.CoreRegisterAccessError:
passed = True
else:
passed = False
test_count += 1
if passed:
test_pass_count += 1
print("TEST PASSED")
else:
print("TEST FAILED")
# Resume execution.
target.halt()
if target.selected_core.has_fpu:
print("Reading s0")
val = target.read_core_register('s0')
rawVal = target.read_core_register_raw('s0')
origRawS0 = rawVal
passed = isinstance(val, float) and isinstance(rawVal, int) \
and float32_to_u32(val) == rawVal
test_count += 1
if passed:
test_pass_count += 1
print("TEST PASSED")
else:
print("TEST FAILED")
print("Writing s0")
target.write_core_register('s0', math.pi)
val = target.read_core_register('s0')
rawVal = target.read_core_register_raw('s0')
passed = float_compare(val, math.pi) and float_compare(u32_to_float32(rawVal), math.pi)
test_count += 1
if passed:
test_pass_count += 1
print("TEST PASSED")
else:
print("TEST FAILED (%f==%f, 0x%08x->%f)" % (val, math.pi, rawVal, u32_to_float32(rawVal)))
print("Raw writing s0")
x = float32_to_u32(32.768)
target.write_core_register_raw('s0', x)
val = target.read_core_register('s0')
passed = float_compare(val, 32.768)
test_count += 1
if passed:
test_pass_count += 1
print("TEST PASSED")
else:
print("TEST FAILED (%f==%f)" % (val, 32.768))
print("Read/write s0, s1")
_1p1 = float32_to_u32(1.1)
_2p2 = float32_to_u32(2.2)
origRegs = target.read_core_registers_raw(['s0', 's1'])
target.write_core_registers_raw(['s0', 's1'], [_1p1, _2p2])
vals = target.read_core_registers_raw(['s0', 's1'])
s0 = target.read_core_register('s0')
s1 = target.read_core_register('s1')
passed = vals[0] == _1p1 and float_compare(s0, 1.1) \
and vals[1] == _2p2 and float_compare(s1, 2.2)
test_count += 1
if passed:
test_pass_count += 1
print("TEST PASSED")
else:
print("TEST FAILED (0x%08x==0x%08x, %f==%f, 0x%08x==0x%08x, %f==%f)" \
% (vals[0], _1p1, s0, 1.1, vals[1], _2p2, s1, 2.2))
# Restore s0
origRegs[0] = origRawS0
target.write_core_registers_raw(['s0', 's1'], origRegs)
print("Verify that all listed core registers can be accessed")
reg_count = 0
passed_reg_count = 0
for r in target.selected_core.core_registers.as_set:
try:
reg_count += 1
val = target.read_core_register(r.name)
target.write_core_register(r.name, val)
passed_reg_count += 1
except exceptions.CoreRegisterAccessError:
pass
test_count += 1
if passed_reg_count == reg_count:
test_pass_count += 1
print("TEST PASSED (%i registers)" % reg_count)
else:
print("TEST FAILED (%i registers, %i failed)" % (reg_count, reg_count - passed_reg_count))
print("\n\n------ Testing Invalid Memory Access Recovery ------")
memory_access_pass = True
try:
print("reading 0x1000 bytes at invalid address 0x%08x" % addr_invalid)
target.read_memory_block8(addr_invalid, 0x1000)
target.flush()
# If no exception is thrown the tests fails except on nrf51 where invalid addresses read as 0
if expect_invalid_access_to_fail:
print(" failed to get expected fault")
memory_access_pass = False
else:
print(" no fault as expected")
except exceptions.TransferFaultError as exc:
print(" got expected error: " + str(exc))
try:
print("reading 0x1000 bytes at invalid address 0x%08x" % (addr_invalid + 1))
target.read_memory_block8(addr_invalid + 1, 0x1000)
target.flush()
# If no exception is thrown the tests fails except on nrf51 where invalid addresses read as 0
if expect_invalid_access_to_fail:
print(" failed to get expected fault")
memory_access_pass = False
else:
print(" no fault as expected")
except exceptions.TransferFaultError as exc:
print(" got expected error: " + str(exc))
data = [0x00] * 0x1000
try:
print("writing 0x%08x bytes at invalid address 0x%08x" % (len(data), addr_invalid))
target.write_memory_block8(addr_invalid, data)
target.flush()
# If no exception is thrown the tests fails except on nrf51 where invalid addresses read as 0
if expect_invalid_access_to_fail:
print(" failed to get expected fault!")
memory_access_pass = False
else:
print(" no fault as expected")
except exceptions.TransferFaultError as exc:
print(" got expected error: " + str(exc))
data = [0x00] * 0x1000
try:
print("writing 0x%08x bytes at invalid address 0x%08x" % (len(data), addr_invalid + 1))
target.write_memory_block8(addr_invalid + 1, data)
target.flush()
# If no exception is thrown the tests fails except on nrf51 where invalid addresses read as 0
if expect_invalid_access_to_fail:
print(" failed to get expected fault!")
memory_access_pass = False
else:
print(" no fault as expected")
except exceptions.TransferFaultError as exc:
print(" got expected error: " + str(exc))
data = [randrange(0, 255) for x in range(size)]
print("r/w 0x%08x bytes at 0x%08x" % (size, addr))
target.write_memory_block8(addr, data)
block = target.read_memory_block8(addr, size)
if same(data, block):
print(" Aligned access pass")
else:
print(" Memory read does not match memory written")
memory_access_pass = False
data = [randrange(0, 255) for x in range(size)]
print("r/w 0x%08x bytes at 0x%08x" % (size, addr + 1))
target.write_memory_block8(addr + 1, data)
block = target.read_memory_block8(addr + 1, size)
if same(data, block):
print(" Unaligned access pass")
else:
print(" Unaligned memory read does not match memory written")
memory_access_pass = False
test_count += 1
if memory_access_pass:
test_pass_count += 1
print("TEST PASSED")
else:
print("TEST FAILED")
print("\n\n------ Testing Software Breakpoints ------")
test_passed = True
orig8x2 = target.read_memory_block8(addr, 2)
orig8 = target.read8(addr)
orig16 = target.read16(addr & ~1)
orig32 = target.read32(addr & ~3)
origAligned32 = target.read_memory_block32(addr & ~3, 1)
def test_filters():
test_passed = True
filtered = target.read_memory_block8(addr, 2)
if same(orig8x2, filtered):
print("2 byte unaligned passed")
else:
print("2 byte unaligned failed (read %x-%x, expected %x-%x)" % (filtered[0], filtered[1], orig8x2[0], orig8x2[1]))
test_passed = False
for now in (True, False):
filtered = target.read8(addr, now)
if not now:
filtered = filtered()
if filtered == orig8:
print("8-bit passed [now=%s]" % now)
else:
print("8-bit failed [now=%s] (read %x, expected %x)" % (now, filtered, orig8))
test_passed = False
filtered = target.read16(addr & ~1, now)
if not now:
filtered = filtered()
if filtered == orig16:
print("16-bit passed [now=%s]" % now)
else:
print("16-bit failed [now=%s] (read %x, expected %x)" % (now, filtered, orig16))
test_passed = False
filtered = target.read32(addr & ~3, now)
if not now:
filtered = filtered()
if filtered == orig32:
print("32-bit passed [now=%s]" % now)
else:
print("32-bit failed [now=%s] (read %x, expected %x)" % (now, filtered, orig32))
test_passed = False
filtered = target.read_memory_block32(addr & ~3, 1)
if same(filtered, origAligned32):
print("32-bit aligned passed")
else:
print("32-bit aligned failed (read %x, expected %x)" % (filtered[0], origAligned32[0]))
test_passed = False
return test_passed
print("Installed software breakpoint at 0x%08x" % addr)
target.set_breakpoint(addr, Target.BreakpointType.SW)
test_passed = test_filters() and test_passed
print("Removed software breakpoint")
target.remove_breakpoint(addr)
test_passed = test_filters() and test_passed
test_count += 1
if test_passed:
test_pass_count += 1
print("TEST PASSED")
else:
print("TEST FAILED")
print("\nTest Summary:")
print("Pass count %i of %i tests" % (test_pass_count, test_count))
if test_pass_count == test_count:
print("CORTEX TEST PASSED")
else:
print("CORTEX TEST FAILED")
target.reset()
result.passed = test_count == test_pass_count
return result
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='pyOCD cpu test')
parser.add_argument('-d', '--debug', action="store_true", help='Enable debug logging')
args = parser.parse_args()
level = logging.DEBUG if args.debug else logging.INFO
logging.basicConfig(level=level)
cortex_test(None)
| mbedmicro/pyOCD | test/cortex_test.py | Python | apache-2.0 | 22,820 |
# -*- coding: utf-8 -*-
'''
Created on 17.10.2014
@author: Simon Gwerder
'''
import datetime
import json
import logging
from flask import Flask, session, send_from_directory, render_template, request, redirect, jsonify, Response
from flask_bootstrap import Bootstrap
from search.graphsearch import GraphSearch
from utilities import utils
from utilities.spellcorrect import SpellCorrect
from web.jsonpdeco import support_jsonp
try:
# The typical way to import flask-cors. From documentation!
from flask_cors import cross_origin
except ImportError:
# This allows examples to be run without installation.
import os
parentdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
os.sys.path.insert(0, parentdir)
from flask_cors import cross_origin
websiteRdfGraph = None # global var is assigned in setRdfGraph()! Because there's no way to restart a running 'app' in FLASK!!!
websiteDataDate = datetime.date.today().strftime("%d.%m.%y") # will be assigned aswell
logger = logging.getLogger('VISITOR_LOGGER')
uniqueIP = {} # gathering all IP's that requested during this app's runtime
def setRdfGraph(rdfGraph, dataDate):
global websiteRdfGraph # blowing your mind, thanks FLASK for beeing so global :(
websiteRdfGraph = rdfGraph
global websiteDataDate
websiteDataDate = dataDate
def createApp():
app = Flask(__name__)
app.config['SECRET_KEY'] = '#T0P#SECRET#'
Bootstrap(app)
#setRdfGraph()
return app
app = createApp()
def getLocale():
if 'language' in session:
return session['language']
lang = request.accept_languages.best_match(['en', 'de'])
if lang is None or lang == '':
lang = 'en'
setLocale(lang)
return lang
def setLocale(lang=None):
if lang is None or lang == '':
session['language'] = 'en'
else:
session['language'] = lang
def addLiteralToLangDict(retDict, literalList):
for literal in literalList:
if not hasattr(literal, 'language'): continue # is not Literal in that case
if literal.language == 'en' or literal.language == 'de':
listTerms = retDict[str(literal.language)]
listTerms.append(utils.encode(literal))
retDict[str(literal.language)] = utils.uniquifyList(listTerms)
return retDict
def putToUniqueIP(ipAddress):
if ipAddress in uniqueIP:
countRequests = uniqueIP[ipAddress]
countRequests += 1
uniqueIP[ipAddress] = countRequests
else:
uniqueIP[ipAddress] = 1
def searchCall(query, lang=None):
graphSearch = GraphSearch()
if websiteRdfGraph is None or query is None or len(query) == 0:
return None
if lang is None:
lang = getLocale()
return graphSearch.fullSearch(websiteRdfGraph, query, lang)
@app.route('/favicon.ico', methods = ['GET'])
def favicon():
return send_from_directory(utils.staticDir(), 'ico/favicon.ico', mimetype='image/vnd.microsoft.icon')
@app.route('/tagfinder_thesaurus.rdf', methods = ['GET'])
def tagfindergraph():
putToUniqueIP(request.remote_addr)
return send_from_directory(utils.dataDir(), 'tagfinder_thesaurus.rdf', mimetype='application/rdf+xml')
@app.route('/opensearch.xml', methods = ['GET'])
def opensearch():
putToUniqueIP(request.remote_addr)
return send_from_directory(utils.templatesDir(), 'opensearch.xml', mimetype='application/opensearchdescription+xml')
@app.route('/', methods = ['GET'])
@app.route('/index', methods = ['GET'])
def index():
return render_template('search.html', lang=getLocale(), dataDate=websiteDataDate)
@app.route('/setlang', methods = ['POST'])
def setLanguage():
setLocale(request.form["lang"])
return '200'
@app.route('/getlang', methods = ['GET'])
def getLanguage():
return Response(json.dumps(getLocale()), mimetype='application/json')
@app.errorhandler(405)
def methodNotAllowed(e):
return render_template('405.html', lang=getLocale()), 405
@app.errorhandler(404)
def pageNotFound(e):
return render_template('404.html', lang=getLocale()), 404
def getThumbnailLink(imageLink):
if imageLink is None: return imageLink
imageLink = imageLink.replace('/images/', '/images/thumb/')
imageLink = imageLink.replace('http://upload.wikimedia.org/wikipedia/commons/', 'http://wiki.openstreetmap.org/w/images/thumb/')
imageFileName = imageLink[imageLink.rfind('/') + 1:]
imageLink = imageLink + '/200px-' + imageFileName
return imageLink
@app.route('/search', methods = ['GET'])
def search():
query = request.args.get('query', '')
lang = request.args.get('lang', '')
searchResults = searchCall(query, lang)
if searchResults is None:
return redirect('/')
for tagResult in searchResults:
tagResult['thumbnail'] = getThumbnailLink(tagResult['depiction']) # adding a thumbnail version of the 'depiction'
putToUniqueIP(request.remote_addr)
logger.info('IP: ' + request.remote_addr + ", search: " + query + ", lang: " + lang)
return render_template('search.html', lang=getLocale(), query=query, results=searchResults)
@app.route('/apidoc', methods = ['GET'])
def api():
return render_template('apidoc.html', lang=getLocale())
@app.route('/about', methods = ['GET'])
def about():
return render_template('about.html', lang=getLocale(), dataDate=websiteDataDate)
@app.route('/api/search', methods = ['GET'])
@cross_origin()
@support_jsonp
def apiSearch():
query = request.args.get('query', '')
lang = request.args.get('lang','')
prettyPrint = request.args.get('format', '')
if lang is None:
lang = 'en'
searchResults = searchCall(query, lang)
if searchResults is None:
return jsonify([])
jsonDump = None
if prettyPrint is not None and prettyPrint.lower() == 'json_pretty':
#return jsonify(results=searchResults)
jsonDump = json.dumps(searchResults, indent=3, sort_keys=True)
else:
jsonDump = json.dumps(searchResults)
putToUniqueIP(request.remote_addr)
logger.info('IP: ' + request.remote_addr + ", API search: " + query + ", lang: " + lang)
return Response(jsonDump, mimetype='application/json')
@app.route('/suggest', methods = ['GET'])
def suggest():
spellCorrect = SpellCorrect()
query = request.args.get('query','')
suggestList = []
lang = getLocale()
if lang == 'en':
suggestList = spellCorrect.listSuggestionsEN(query)
elif lang == 'de':
suggestList = spellCorrect.listSuggestionsDE(query)
#if len(suggestList) == 0:
# suggestList = spellCorrect.listSuggestions(word)
return Response(json.dumps(suggestList), mimetype='application/json')
@app.route('/api/suggest', methods = ['GET'])
@cross_origin()
@support_jsonp
def apiSuggest():
spellCorrect = SpellCorrect()
query = request.args.get('query','')
lang = request.args.get('lang','')
prettyPrint = request.args.get('format', '')
suggestList = []
if lang == 'en':
suggestList = spellCorrect.listSuggestionsEN(query)
elif lang == 'de':
suggestList = spellCorrect.listSuggestionsDE(query)
else:
suggestList = spellCorrect.listSuggestions(query)
if prettyPrint is not None and prettyPrint.lower() == 'json_pretty':
jsonDump = json.dumps(suggestList, indent=3, sort_keys=True)
else:
jsonDump = json.dumps(suggestList)
putToUniqueIP(request.remote_addr)
logger.info('IP: ' + request.remote_addr + ", API suggest: " + query + ", lang: " + lang)
return Response(jsonDump, mimetype='application/json')
@app.route('/ossuggest', methods = ['GET'])
@cross_origin()
@support_jsonp
def osSuggest():
spellCorrect = SpellCorrect()
query = request.args.get('query','')
suggestList = []
lang = getLocale()
if lang == 'en':
suggestList = spellCorrect.listSuggestionsEN(query)
elif lang == 'de':
suggestList = spellCorrect.listSuggestionsDE(query)
opensearchSug = [query, suggestList, [], []]
jsonDump = json.dumps(opensearchSug)
return Response(jsonDump, mimetype='application/json')
@app.route('/api/tag', methods = ['GET'])
@cross_origin()
@support_jsonp
def apiTag():
prefLabel = None
key = request.args.get('key','')
if websiteRdfGraph is None or key is None or len(key) == 0:
return jsonify({})
value = request.args.get('value')
if not value == '*' and not value is None and not len(value) == 0 :
prefLabel = key + '=' + value
else:
prefLabel = key
subject = websiteRdfGraph.getSubByPrefLabel(prefLabel)
# subject will only be tag subject and not term subject, because of missing language
if subject is None:
return jsonify({})
rawResults = { subject : { } } # add empty dictionary for the searchMeta
graphSearch = GraphSearch()
results = graphSearch.getSortedTagResults(websiteRdfGraph, rawResults)
if len(results) < 1:
return jsonify({})
prettyPrint = request.args.get('format', '')
if prettyPrint is not None and prettyPrint.lower() == 'json_pretty':
jsonDump = json.dumps(results[0], indent=3, sort_keys=True)
else:
jsonDump = json.dumps(results[0])
putToUniqueIP(request.remote_addr)
logger.info('IP: ' + request.remote_addr + ", API tag: " + prefLabel)
return Response(jsonDump, mimetype='application/json')
@app.route('/api/terms', methods = ['GET'])
@cross_origin()
@support_jsonp
def apiTerms():
term = request.args.get('term','')
prettyPrint = request.args.get('format', '')
listRelatedMatches = []
if websiteRdfGraph is None or term is None or len(term) == 0:
return jsonify({})
# subject will only be term subject and not tag subject, because of language
listRelatedMatches.extend( websiteRdfGraph.getSubByPrefLabelLang(term, 'en') )
listRelatedMatches.extend( websiteRdfGraph.getSubByPrefLabelLang(term, 'de') )
listRelatedMatches.extend( websiteRdfGraph.getSubByAltLabelLang(term, 'en') )
listRelatedMatches.extend( websiteRdfGraph.getSubByAltLabelLang(term, 'de') )
listRelatedMatches.extend( websiteRdfGraph.getSubByBroaderLang(term, 'en') )
listRelatedMatches.extend( websiteRdfGraph.getSubByBroaderLang(term, 'de') )
listRelatedMatches.extend( websiteRdfGraph.getSubByNarrowerLang(term, 'en') )
listRelatedMatches.extend( websiteRdfGraph.getSubByNarrowerLang(term, 'de') )
termRelated = { 'en' : [], 'de' : [] }
termBroader = { 'en' : [], 'de' : [] }
termNarrower = { 'en' : [], 'de' : [] }
for relSubject in listRelatedMatches:
termRelated = addLiteralToLangDict(termRelated, utils.genToList(websiteRdfGraph.getPrefLabel(relSubject)))
termRelated = addLiteralToLangDict(termRelated, utils.genToList(websiteRdfGraph.getAltLabel(relSubject)))
termBroader = addLiteralToLangDict(termBroader, utils.genToList(websiteRdfGraph.getBroader(relSubject)))
termNarrower = addLiteralToLangDict(termNarrower, utils.genToList(websiteRdfGraph.getNarrower(relSubject)))
retDict = {}
retDict['termRelated'] = termRelated
retDict['termBroader'] = termBroader
retDict['termNarrower'] = termNarrower
if prettyPrint is not None and prettyPrint.lower() == 'json_pretty':
jsonDump = json.dumps(retDict, indent=3, sort_keys=True)
else:
jsonDump = json.dumps(retDict)
putToUniqueIP(request.remote_addr)
logger.info('IP: ' + request.remote_addr + ", API terms: " + term)
return Response(jsonDump, mimetype='application/json')
@app.route('/api/uniqueips', methods = ['GET'])
@cross_origin()
@support_jsonp
def uniqueIPs():
prettyPrint = request.args.get('format', '')
if prettyPrint is not None and prettyPrint.lower() == 'json_pretty':
jsonDump = json.dumps(uniqueIP, indent=3, sort_keys=True)
else:
jsonDump = json.dumps(uniqueIP)
return Response(jsonDump, mimetype='application/json')
| geometalab/OSMTagFinder | OSMTagFinder/web/views.py | Python | mit | 11,951 |
import scrapy
import re
import urlparse
from restaurantScraper.items import MenuItem
class DmozSpider(scrapy.Spider):
name = "justeat"
allowed_domains = ["justeat.in"]
start_urls = [
# "http://justeat.in/noida/wah-ji-wah-sector-27-10760/menu",
"http://justeat.in/noida/restaurants/sector-25",
]
# def parse(self, response):
# regex = re.compile('Rs\.([0-9.]*)\.[0-9]*')
# for sel in response.xpath('//div[@class="menu-product clearfix no-variation"]'):
# name = sel.xpath('div[1]/text()').extract();
# price = sel.xpath('div[3]/text()').extract();
# name = ''.join(name).strip()
# price = ''.join(price)
# price = price.strip()
# itemPrice = 0
# match = regex.match(price)
# if match:
# itemPrice = int(match.group(1))
# item = MenuItem()
# item['name'] = name;
# item['price'] = itemPrice;
# yield item
def parse(self, response):
menuUrl = re.compile('.*menu')
# Check if home page
if response.url == DmozSpider.start_urls[0]:
open_restaurants = response.xpath('//section[@id="OpenRestaurants"]')
for iterator in open_restaurants:
print 'hey'
restaurant_list = iterator.xpath('//article[@class="first"]')
for restaurant in restaurant_list:
restaurant_menu_url = restaurant.xpath('div[1]/div[1]/section[1]/h3[1]/a[1]/@href').extract()
restaurant_menu_url = ''.join(restaurant_menu_url).strip()
parsed_url = urlparse.urlparse(restaurant_menu_url)
if parsed_url.scheme and parsed_url.netloc:
# now add this url to iterate over
print 'URL IS HERE',
print restaurant_menu_url
yield scrapy.Request(restaurant_menu_url, callback=self.parse)
else:
resp = response.xpath('//div[@class="restInfoDetails"]')[0]
resp = resp.xpath('h1/text()')
rest_name = resp.extract()
rest_name = ''.join(rest_name)
rest_name = rest_name.strip()
if menuUrl.match(response.url):
for sel in response.xpath('//tr[@class="prdLi1"]'):
name = sel.xpath('td[1]/span/text()').extract();
price = sel.xpath('td[5]/span/text()').extract();
name = ''.join(name).strip()
price = ''.join(price).strip()
print price;
try:
itemPrice = int(price)
item = MenuItem()
item['name'] = name;
item['price'] = price;
item['restName'] = rest_name;
print 'itemName : ',
print name
yield item
except ValueError:
print 'Failed'
else:
print 'no match'
| vishalv2050/scrapy-restaurants | restaurantScraper/spiders/foodPanda_spider.py | Python | mit | 3,155 |
"""Data template classes for discovery used to generate additional data for setup."""
from __future__ import annotations
from collections.abc import Iterable
from dataclasses import dataclass
from typing import Any
from zwave_js_server.const import CommandClass
from zwave_js_server.const.command_class.meter import (
CURRENT_METER_TYPES,
ENERGY_TOTAL_INCREASING_METER_TYPES,
POWER_FACTOR_METER_TYPES,
POWER_METER_TYPES,
VOLTAGE_METER_TYPES,
ElectricScale,
MeterScaleType,
)
from zwave_js_server.const.command_class.multilevel_sensor import (
CO2_SENSORS,
CO_SENSORS,
CURRENT_SENSORS,
ENERGY_MEASUREMENT_SENSORS,
HUMIDITY_SENSORS,
ILLUMINANCE_SENSORS,
POWER_SENSORS,
PRESSURE_SENSORS,
SIGNAL_STRENGTH_SENSORS,
TEMPERATURE_SENSORS,
TIMESTAMP_SENSORS,
VOLTAGE_SENSORS,
MultilevelSensorType,
)
from zwave_js_server.model.node import Node as ZwaveNode
from zwave_js_server.model.value import Value as ZwaveValue, get_value_id
from zwave_js_server.util.command_class.meter import get_meter_scale_type
from zwave_js_server.util.command_class.multilevel_sensor import (
get_multilevel_sensor_type,
)
from .const import (
ENTITY_DESC_KEY_BATTERY,
ENTITY_DESC_KEY_CO,
ENTITY_DESC_KEY_CO2,
ENTITY_DESC_KEY_CURRENT,
ENTITY_DESC_KEY_ENERGY_MEASUREMENT,
ENTITY_DESC_KEY_ENERGY_TOTAL_INCREASING,
ENTITY_DESC_KEY_HUMIDITY,
ENTITY_DESC_KEY_ILLUMINANCE,
ENTITY_DESC_KEY_MEASUREMENT,
ENTITY_DESC_KEY_POWER,
ENTITY_DESC_KEY_POWER_FACTOR,
ENTITY_DESC_KEY_PRESSURE,
ENTITY_DESC_KEY_SIGNAL_STRENGTH,
ENTITY_DESC_KEY_TARGET_TEMPERATURE,
ENTITY_DESC_KEY_TEMPERATURE,
ENTITY_DESC_KEY_TIMESTAMP,
ENTITY_DESC_KEY_TOTAL_INCREASING,
ENTITY_DESC_KEY_VOLTAGE,
)
METER_DEVICE_CLASS_MAP: dict[str, set[MeterScaleType]] = {
ENTITY_DESC_KEY_CURRENT: CURRENT_METER_TYPES,
ENTITY_DESC_KEY_VOLTAGE: VOLTAGE_METER_TYPES,
ENTITY_DESC_KEY_ENERGY_TOTAL_INCREASING: ENERGY_TOTAL_INCREASING_METER_TYPES,
ENTITY_DESC_KEY_POWER: POWER_METER_TYPES,
ENTITY_DESC_KEY_POWER_FACTOR: POWER_FACTOR_METER_TYPES,
}
MULTILEVEL_SENSOR_DEVICE_CLASS_MAP: dict[str, set[MultilevelSensorType]] = {
ENTITY_DESC_KEY_CO: CO_SENSORS,
ENTITY_DESC_KEY_CO2: CO2_SENSORS,
ENTITY_DESC_KEY_CURRENT: CURRENT_SENSORS,
ENTITY_DESC_KEY_ENERGY_MEASUREMENT: ENERGY_MEASUREMENT_SENSORS,
ENTITY_DESC_KEY_HUMIDITY: HUMIDITY_SENSORS,
ENTITY_DESC_KEY_ILLUMINANCE: ILLUMINANCE_SENSORS,
ENTITY_DESC_KEY_POWER: POWER_SENSORS,
ENTITY_DESC_KEY_PRESSURE: PRESSURE_SENSORS,
ENTITY_DESC_KEY_SIGNAL_STRENGTH: SIGNAL_STRENGTH_SENSORS,
ENTITY_DESC_KEY_TEMPERATURE: TEMPERATURE_SENSORS,
ENTITY_DESC_KEY_TIMESTAMP: TIMESTAMP_SENSORS,
ENTITY_DESC_KEY_VOLTAGE: VOLTAGE_SENSORS,
}
@dataclass
class ZwaveValueID:
"""Class to represent a value ID."""
property_: str | int
command_class: int
endpoint: int | None = None
property_key: str | int | None = None
class BaseDiscoverySchemaDataTemplate:
"""Base class for discovery schema data templates."""
def resolve_data(self, value: ZwaveValue) -> Any:
"""
Resolve helper class data for a discovered value.
Can optionally be implemented by subclasses if input data needs to be
transformed once discovered Value is available.
"""
# pylint: disable=no-self-use
return {}
def values_to_watch(self, resolved_data: Any) -> Iterable[ZwaveValue]:
"""
Return list of all ZwaveValues resolved by helper that should be watched.
Should be implemented by subclasses only if there are values to watch.
"""
# pylint: disable=no-self-use
return []
def value_ids_to_watch(self, resolved_data: Any) -> set[str]:
"""
Return list of all Value IDs resolved by helper that should be watched.
Not to be overwritten by subclasses.
"""
return {val.value_id for val in self.values_to_watch(resolved_data) if val}
@staticmethod
def _get_value_from_id(
node: ZwaveNode, value_id_obj: ZwaveValueID
) -> ZwaveValue | None:
"""Get a ZwaveValue from a node using a ZwaveValueDict."""
value_id = get_value_id(
node,
value_id_obj.command_class,
value_id_obj.property_,
endpoint=value_id_obj.endpoint,
property_key=value_id_obj.property_key,
)
return node.values.get(value_id)
@dataclass
class DynamicCurrentTempClimateDataTemplate(BaseDiscoverySchemaDataTemplate):
"""Data template class for Z-Wave JS Climate entities with dynamic current temps."""
lookup_table: dict[str | int, ZwaveValueID]
dependent_value: ZwaveValueID
def resolve_data(self, value: ZwaveValue) -> dict[str, Any]:
"""Resolve helper class data for a discovered value."""
data: dict[str, Any] = {
"lookup_table": {},
"dependent_value": self._get_value_from_id(
value.node, self.dependent_value
),
}
for key in self.lookup_table:
data["lookup_table"][key] = self._get_value_from_id(
value.node, self.lookup_table[key]
)
return data
def values_to_watch(self, resolved_data: dict[str, Any]) -> Iterable[ZwaveValue]:
"""Return list of all ZwaveValues resolved by helper that should be watched."""
return [
*resolved_data["lookup_table"].values(),
resolved_data["dependent_value"],
]
@staticmethod
def current_temperature_value(resolved_data: dict[str, Any]) -> ZwaveValue | None:
"""Get current temperature ZwaveValue from resolved data."""
lookup_table: dict[str | int, ZwaveValue | None] = resolved_data["lookup_table"]
dependent_value: ZwaveValue | None = resolved_data["dependent_value"]
if dependent_value and dependent_value.value is not None:
lookup_key = dependent_value.metadata.states[
str(dependent_value.value)
].split("-")[0]
return lookup_table.get(lookup_key)
return None
class NumericSensorDataTemplate(BaseDiscoverySchemaDataTemplate):
"""Data template class for Z-Wave Sensor entities."""
def resolve_data(self, value: ZwaveValue) -> str | None:
"""Resolve helper class data for a discovered value."""
if value.command_class == CommandClass.BATTERY:
return ENTITY_DESC_KEY_BATTERY
if value.command_class == CommandClass.METER:
scale_type = get_meter_scale_type(value)
# We do this because even though these are energy scales, they don't meet
# the unit requirements for the energy device class.
if scale_type in (
ElectricScale.PULSE_COUNT,
ElectricScale.KILOVOLT_AMPERE_HOUR,
ElectricScale.KILOVOLT_AMPERE_REACTIVE_HOUR,
):
return ENTITY_DESC_KEY_TOTAL_INCREASING
# We do this because even though these are power scales, they don't meet
# the unit requirements for the power device class.
if scale_type == ElectricScale.KILOVOLT_AMPERE_REACTIVE:
return ENTITY_DESC_KEY_MEASUREMENT
for key, scale_type_set in METER_DEVICE_CLASS_MAP.items():
if scale_type in scale_type_set:
return key
if value.command_class == CommandClass.SENSOR_MULTILEVEL:
sensor_type = get_multilevel_sensor_type(value)
if sensor_type == MultilevelSensorType.TARGET_TEMPERATURE:
return ENTITY_DESC_KEY_TARGET_TEMPERATURE
for (
key,
sensor_type_set,
) in MULTILEVEL_SENSOR_DEVICE_CLASS_MAP.items():
if sensor_type in sensor_type_set:
return key
return None
| Danielhiversen/home-assistant | homeassistant/components/zwave_js/discovery_data_template.py | Python | apache-2.0 | 7,990 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Arm(R) Ethos(TM)-N integration mean tests"""
import numpy as np
import tvm
from tvm import relay
from tvm.testing import requires_ethosn
from . import infrastructure as tei
def _get_model(shape, axis, keepdims, input_zp, input_sc, output_zp, output_sc, dtype):
a = relay.var("a", shape=shape, dtype=dtype)
casted = relay.op.cast(a, "int32")
mean = relay.mean(casted, axis, keepdims)
model = relay.qnn.op.requantize(
mean,
input_scale=relay.const(input_sc, "float32"),
input_zero_point=relay.const(input_zp, "int32"),
output_scale=relay.const(output_sc, "float32"),
output_zero_point=relay.const(output_zp, "int32"),
out_dtype=dtype,
)
return model
@requires_ethosn
def test_mean():
trials = [(1, 7, 7, 2048), (1, 8, 8)]
np.random.seed(0)
for shape in trials:
inputs = {
"a": tvm.nd.array(np.random.randint(0, high=255, size=shape, dtype="uint8")),
}
outputs = []
for npu in [False, True]:
model = _get_model(shape, [1, 2], True, 128, 0.0784314, 128, 0.0784314, "uint8")
mod = tei.make_module(model, [])
outputs.append(tei.build_and_run(mod, inputs, 1, {}, npu=npu))
tei.verify(outputs, "uint8", 1)
| dmlc/tvm | tests/python/contrib/test_ethosn/test_mean.py | Python | apache-2.0 | 2,066 |
# -*- coding: utf-8 -*-
"""
Created on Wed Jun 15 11:39:04 2016
@author: rahulmehra
"""
# Import the modules
import pandas as pd
from sklearn.preprocessing import LabelEncoder
import numpy as np
# Define a function to autoclean the pandas dataframe
def autoclean(x):
for column in x.columns:
# Replace NaNs with the median or mode of the column depending on the column type
try:
x[column].fillna(x[column].median(), inplace=True)
except TypeError:
x[column].fillna(x[column].mode(), inplace=True)
# Select the columns with type int and float
if x[column].dtypes == 'int64' or x[column].dtypes == 'float64':
#Calculate mean of the column
mean = x[column].mean()
#Calculate the standard deviation of the column
std = 2.5*x[column].std()
# See for the outliers and impute with median
x[column] = x[column].apply(lambda y: x[column].median() if(abs(y - mean >std)) else y)
# Calculate the number of rows in dataframe
n_rows = len(x.index)
#Calculate the percentage of negative values in the column
negative_perc = np.sum((x[column] < 0))/n_rows
#Handle the unreliable values (like negative values in the positive value column)
x[column] = x[column].apply(lambda y: -(y) if (y<0 and negative_perc >= 0.05) else y)
# Encode all strings with numerical equivalents
if str(x[column].values.dtype) == 'object':
column_encoder = LabelEncoder().fit(x[column].values)
x[column] = column_encoder.transform(x[column].values)
return(x)
| ccbrandenburg/financialanalyticsproject | iembdfa/DataCleaning.py | Python | mit | 1,698 |
"""deCONZ fan platform tests."""
from copy import deepcopy
import pytest
from homeassistant.components.deconz.gateway import get_gateway_from_config_entry
from homeassistant.components.fan import (
ATTR_SPEED,
DOMAIN as FAN_DOMAIN,
SERVICE_SET_SPEED,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
SPEED_HIGH,
SPEED_LOW,
SPEED_MEDIUM,
SPEED_OFF,
)
from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, STATE_UNAVAILABLE
from .test_gateway import (
DECONZ_WEB_REQUEST,
mock_deconz_put_request,
setup_deconz_integration,
)
FANS = {
"1": {
"etag": "432f3de28965052961a99e3c5494daf4",
"hascolor": False,
"manufacturername": "King Of Fans, Inc.",
"modelid": "HDC52EastwindFan",
"name": "Ceiling fan",
"state": {
"alert": "none",
"bri": 254,
"on": False,
"reachable": True,
"speed": 4,
},
"swversion": "0000000F",
"type": "Fan",
"uniqueid": "00:22:a3:00:00:27:8b:81-01",
}
}
async def test_no_fans(hass, aioclient_mock):
"""Test that no fan entities are created."""
await setup_deconz_integration(hass, aioclient_mock)
assert len(hass.states.async_all()) == 0
async def test_fans(hass, aioclient_mock):
"""Test that all supported fan entities are created."""
data = deepcopy(DECONZ_WEB_REQUEST)
data["lights"] = deepcopy(FANS)
config_entry = await setup_deconz_integration(
hass, aioclient_mock, get_state_response=data
)
gateway = get_gateway_from_config_entry(hass, config_entry)
assert len(hass.states.async_all()) == 2 # Light and fan
assert hass.states.get("fan.ceiling_fan")
# Test states
assert hass.states.get("fan.ceiling_fan").state == STATE_ON
assert hass.states.get("fan.ceiling_fan").attributes["speed"] == SPEED_HIGH
state_changed_event = {
"t": "event",
"e": "changed",
"r": "lights",
"id": "1",
"state": {"speed": 0},
}
gateway.api.event_handler(state_changed_event)
await hass.async_block_till_done()
assert hass.states.get("fan.ceiling_fan").state == STATE_OFF
assert hass.states.get("fan.ceiling_fan").attributes["speed"] == SPEED_OFF
# Test service calls
mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/1/state")
# Service turn on fan
await hass.services.async_call(
FAN_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "fan.ceiling_fan"},
blocking=True,
)
assert aioclient_mock.mock_calls[1][2] == {"speed": 4}
# Service turn off fan
await hass.services.async_call(
FAN_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "fan.ceiling_fan"},
blocking=True,
)
assert aioclient_mock.mock_calls[2][2] == {"speed": 0}
# Service set fan speed to low
await hass.services.async_call(
FAN_DOMAIN,
SERVICE_SET_SPEED,
{ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_SPEED: SPEED_LOW},
blocking=True,
)
assert aioclient_mock.mock_calls[3][2] == {"speed": 1}
# Service set fan speed to medium
await hass.services.async_call(
FAN_DOMAIN,
SERVICE_SET_SPEED,
{ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_SPEED: SPEED_MEDIUM},
blocking=True,
)
assert aioclient_mock.mock_calls[4][2] == {"speed": 2}
# Service set fan speed to high
await hass.services.async_call(
FAN_DOMAIN,
SERVICE_SET_SPEED,
{ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_SPEED: SPEED_HIGH},
blocking=True,
)
assert aioclient_mock.mock_calls[5][2] == {"speed": 4}
# Service set fan speed to off
await hass.services.async_call(
FAN_DOMAIN,
SERVICE_SET_SPEED,
{ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_SPEED: SPEED_OFF},
blocking=True,
)
assert aioclient_mock.mock_calls[6][2] == {"speed": 0}
# Service set fan speed to unsupported value
with pytest.raises(ValueError):
await hass.services.async_call(
FAN_DOMAIN,
SERVICE_SET_SPEED,
{ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_SPEED: "bad value"},
blocking=True,
)
# Events with an unsupported speed gets converted to default speed "medium"
state_changed_event = {
"t": "event",
"e": "changed",
"r": "lights",
"id": "1",
"state": {"speed": 3},
}
gateway.api.event_handler(state_changed_event)
await hass.async_block_till_done()
assert hass.states.get("fan.ceiling_fan").state == STATE_ON
assert hass.states.get("fan.ceiling_fan").attributes["speed"] == SPEED_MEDIUM
await hass.config_entries.async_unload(config_entry.entry_id)
states = hass.states.async_all()
assert len(hass.states.async_all()) == 2
for state in states:
assert state.state == STATE_UNAVAILABLE
await hass.config_entries.async_remove(config_entry.entry_id)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 0
| partofthething/home-assistant | tests/components/deconz/test_fan.py | Python | apache-2.0 | 5,137 |
from math import log
from random import randrange
def isPrime(n):
if n==2 or n==3: return True
if n%2==0 or n<2: return False
for i in range(3,int(n**0.5)+1,2): # only odd numbers
if n%i==0:
return False
return True
def generateLargePrime(k):
#k is the desired bit length
r=100*(log(k,2)+1) #number of attempts max
r_ = r
while r>0:
#randrange is mersenne twister and is completely deterministic
#unusable for serious crypto purposes
n = randrange(2**(k-1),2**(k))
r-=1
if isPrime(n) == True:
return n
return "Failure after "+`r_` + " tries."
# print generateLargePrime(8)
| manikTharaka/al-go-rithms | math/prime_sieve/python/create_large_primes.py | Python | mit | 696 |
from sys import stdin
N, S = map(int, stdin.readline().split())
if S/N >= 2:
print("YES")
L = [2]*(N-1)
L.append(S-2*N+2)
print(" ".join(map(str, L)))
print(1)
else:
print("NO")
| zuun77/givemegoogletshirts | codeforces/643-div2/q4.py | Python | apache-2.0 | 204 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '/home/colin/Projects/OpenCobolIDE/forms/dlg_preferences.ui'
#
# Created by: PyQt5 UI code generator 5.7
#
# WARNING! All changes made in this file will be lost!
from pyqode.qt import QtCore, QtGui, QtWidgets
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(1280, 1024)
Dialog.setMinimumSize(QtCore.QSize(500, 500))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/ide-icons/rc/silex-192x192.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
Dialog.setWindowIcon(icon)
self.gridLayout_2 = QtWidgets.QGridLayout(Dialog)
self.gridLayout_2.setContentsMargins(6, 6, 6, 6)
self.gridLayout_2.setObjectName("gridLayout_2")
self.buttonBox = QtWidgets.QDialogButtonBox(Dialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok|QtWidgets.QDialogButtonBox.Reset|QtWidgets.QDialogButtonBox.RestoreDefaults)
self.buttonBox.setObjectName("buttonBox")
self.gridLayout_2.addWidget(self.buttonBox, 6, 0, 1, 1)
self.widget = QtWidgets.QWidget(Dialog)
self.widget.setStyleSheet("")
self.widget.setObjectName("widget")
self.widget_2 = QtWidgets.QGridLayout(self.widget)
self.widget_2.setContentsMargins(0, 0, 0, 0)
self.widget_2.setSpacing(0)
self.widget_2.setObjectName("widget_2")
self.tabWidget = QtWidgets.QTabWidget(self.widget)
self.tabWidget.setStyleSheet("")
self.tabWidget.setTabPosition(QtWidgets.QTabWidget.North)
self.tabWidget.setTabShape(QtWidgets.QTabWidget.Rounded)
self.tabWidget.setUsesScrollButtons(True)
self.tabWidget.setDocumentMode(False)
self.tabWidget.setObjectName("tabWidget")
self.tabEditor = QtWidgets.QWidget()
self.tabEditor.setObjectName("tabEditor")
self.verticalLayout = QtWidgets.QVBoxLayout(self.tabEditor)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setObjectName("verticalLayout")
self.scrollArea = QtWidgets.QScrollArea(self.tabEditor)
self.scrollArea.setWidgetResizable(True)
self.scrollArea.setObjectName("scrollArea")
self.scrollAreaWidgetContents_2 = QtWidgets.QWidget()
self.scrollAreaWidgetContents_2.setGeometry(QtCore.QRect(0, 0, 1244, 921))
self.scrollAreaWidgetContents_2.setObjectName("scrollAreaWidgetContents_2")
self.verticalLayout_7 = QtWidgets.QVBoxLayout(self.scrollAreaWidgetContents_2)
self.verticalLayout_7.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_7.setObjectName("verticalLayout_7")
self.groupBox_3 = QtWidgets.QGroupBox(self.scrollAreaWidgetContents_2)
self.groupBox_3.setObjectName("groupBox_3")
self.formLayout = QtWidgets.QFormLayout(self.groupBox_3)
self.formLayout.setObjectName("formLayout")
self.label_10 = QtWidgets.QLabel(self.groupBox_3)
self.label_10.setObjectName("label_10")
self.formLayout.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.label_10)
self.checkBoxViewLineNumber = QtWidgets.QCheckBox(self.groupBox_3)
self.checkBoxViewLineNumber.setText("")
self.checkBoxViewLineNumber.setChecked(True)
self.checkBoxViewLineNumber.setObjectName("checkBoxViewLineNumber")
self.formLayout.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.checkBoxViewLineNumber)
self.label_11 = QtWidgets.QLabel(self.groupBox_3)
self.label_11.setObjectName("label_11")
self.formLayout.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.label_11)
self.checkBoxHighlightCurrentLine = QtWidgets.QCheckBox(self.groupBox_3)
self.checkBoxHighlightCurrentLine.setText("")
self.checkBoxHighlightCurrentLine.setChecked(True)
self.checkBoxHighlightCurrentLine.setObjectName("checkBoxHighlightCurrentLine")
self.formLayout.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.checkBoxHighlightCurrentLine)
self.label_12 = QtWidgets.QLabel(self.groupBox_3)
self.label_12.setObjectName("label_12")
self.formLayout.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.label_12)
self.checkBoxHighlightWhitespaces = QtWidgets.QCheckBox(self.groupBox_3)
self.checkBoxHighlightWhitespaces.setText("")
self.checkBoxHighlightWhitespaces.setChecked(True)
self.checkBoxHighlightWhitespaces.setObjectName("checkBoxHighlightWhitespaces")
self.formLayout.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.checkBoxHighlightWhitespaces)
self.label_13 = QtWidgets.QLabel(self.groupBox_3)
self.label_13.setObjectName("label_13")
self.formLayout.setWidget(3, QtWidgets.QFormLayout.LabelRole, self.label_13)
self.checkBoxShowErrors = QtWidgets.QCheckBox(self.groupBox_3)
self.checkBoxShowErrors.setText("")
self.checkBoxShowErrors.setObjectName("checkBoxShowErrors")
self.formLayout.setWidget(3, QtWidgets.QFormLayout.FieldRole, self.checkBoxShowErrors)
self.label_38 = QtWidgets.QLabel(self.groupBox_3)
self.label_38.setObjectName("label_38")
self.formLayout.setWidget(4, QtWidgets.QFormLayout.LabelRole, self.label_38)
self.cb_cursor_pos_in_bytes = QtWidgets.QCheckBox(self.groupBox_3)
self.cb_cursor_pos_in_bytes.setText("")
self.cb_cursor_pos_in_bytes.setObjectName("cb_cursor_pos_in_bytes")
self.formLayout.setWidget(4, QtWidgets.QFormLayout.FieldRole, self.cb_cursor_pos_in_bytes)
self.verticalLayout_7.addWidget(self.groupBox_3)
self.groupBox_11 = QtWidgets.QGroupBox(self.scrollAreaWidgetContents_2)
self.groupBox_11.setObjectName("groupBox_11")
self.horizontalLayout_18 = QtWidgets.QHBoxLayout(self.groupBox_11)
self.horizontalLayout_18.setObjectName("horizontalLayout_18")
self.horizontalLayout_14 = QtWidgets.QHBoxLayout()
self.horizontalLayout_14.setContentsMargins(0, -1, -1, -1)
self.horizontalLayout_14.setObjectName("horizontalLayout_14")
self.label_5 = QtWidgets.QLabel(self.groupBox_11)
self.label_5.setObjectName("label_5")
self.horizontalLayout_14.addWidget(self.label_5)
self.spin_box_margin_1 = QtWidgets.QSpinBox(self.groupBox_11)
self.spin_box_margin_1.setObjectName("spin_box_margin_1")
self.horizontalLayout_14.addWidget(self.spin_box_margin_1)
self.color_picker_1 = ColorPicker(self.groupBox_11)
self.color_picker_1.setText("")
self.color_picker_1.setObjectName("color_picker_1")
self.horizontalLayout_14.addWidget(self.color_picker_1)
self.horizontalLayout_18.addLayout(self.horizontalLayout_14)
self.horizontalLayout_15 = QtWidgets.QHBoxLayout()
self.horizontalLayout_15.setContentsMargins(0, -1, -1, -1)
self.horizontalLayout_15.setObjectName("horizontalLayout_15")
self.label_39 = QtWidgets.QLabel(self.groupBox_11)
self.label_39.setObjectName("label_39")
self.horizontalLayout_15.addWidget(self.label_39)
self.spin_box_margin_2 = QtWidgets.QSpinBox(self.groupBox_11)
self.spin_box_margin_2.setObjectName("spin_box_margin_2")
self.horizontalLayout_15.addWidget(self.spin_box_margin_2)
self.color_picker_2 = ColorPicker(self.groupBox_11)
self.color_picker_2.setText("")
self.color_picker_2.setObjectName("color_picker_2")
self.horizontalLayout_15.addWidget(self.color_picker_2)
self.horizontalLayout_18.addLayout(self.horizontalLayout_15)
self.horizontalLayout_16 = QtWidgets.QHBoxLayout()
self.horizontalLayout_16.setContentsMargins(0, -1, -1, -1)
self.horizontalLayout_16.setObjectName("horizontalLayout_16")
self.label_40 = QtWidgets.QLabel(self.groupBox_11)
self.label_40.setObjectName("label_40")
self.horizontalLayout_16.addWidget(self.label_40)
self.spin_box_margin_3 = QtWidgets.QSpinBox(self.groupBox_11)
self.spin_box_margin_3.setObjectName("spin_box_margin_3")
self.horizontalLayout_16.addWidget(self.spin_box_margin_3)
self.color_picker_3 = ColorPicker(self.groupBox_11)
self.color_picker_3.setText("")
self.color_picker_3.setObjectName("color_picker_3")
self.horizontalLayout_16.addWidget(self.color_picker_3)
self.horizontalLayout_18.addLayout(self.horizontalLayout_16)
self.horizontalLayout_17 = QtWidgets.QHBoxLayout()
self.horizontalLayout_17.setContentsMargins(0, -1, -1, -1)
self.horizontalLayout_17.setObjectName("horizontalLayout_17")
self.label_41 = QtWidgets.QLabel(self.groupBox_11)
self.label_41.setObjectName("label_41")
self.horizontalLayout_17.addWidget(self.label_41)
self.spin_box_margin_4 = QtWidgets.QSpinBox(self.groupBox_11)
self.spin_box_margin_4.setObjectName("spin_box_margin_4")
self.horizontalLayout_17.addWidget(self.spin_box_margin_4)
self.color_picker_4 = ColorPicker(self.groupBox_11)
self.color_picker_4.setText("")
self.color_picker_4.setObjectName("color_picker_4")
self.horizontalLayout_17.addWidget(self.color_picker_4)
self.horizontalLayout_18.addLayout(self.horizontalLayout_17)
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_18.addItem(spacerItem)
self.verticalLayout_7.addWidget(self.groupBox_11)
self.groupBox = QtWidgets.QGroupBox(self.scrollAreaWidgetContents_2)
self.groupBox.setObjectName("groupBox")
self.formLayout_7 = QtWidgets.QFormLayout(self.groupBox)
self.formLayout_7.setObjectName("formLayout_7")
self.label_7 = QtWidgets.QLabel(self.groupBox)
self.label_7.setObjectName("label_7")
self.formLayout_7.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.label_7)
self.lineEditCommentIndicator = QtWidgets.QLineEdit(self.groupBox)
font = QtGui.QFont()
font.setFamily("Monospace")
self.lineEditCommentIndicator.setFont(font)
self.lineEditCommentIndicator.setObjectName("lineEditCommentIndicator")
self.formLayout_7.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.lineEditCommentIndicator)
self.verticalLayout_7.addWidget(self.groupBox)
self.groupBox_2 = QtWidgets.QGroupBox(self.scrollAreaWidgetContents_2)
self.groupBox_2.setObjectName("groupBox_2")
self.formLayout_2 = QtWidgets.QFormLayout(self.groupBox_2)
self.formLayout_2.setObjectName("formLayout_2")
self.label = QtWidgets.QLabel(self.groupBox_2)
self.label.setObjectName("label")
self.formLayout_2.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.label)
self.spinBoxEditorTabLen = QtWidgets.QSpinBox(self.groupBox_2)
self.spinBoxEditorTabLen.setMinimum(2)
self.spinBoxEditorTabLen.setMaximum(99)
self.spinBoxEditorTabLen.setProperty("value", 4)
self.spinBoxEditorTabLen.setObjectName("spinBoxEditorTabLen")
self.formLayout_2.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.spinBoxEditorTabLen)
self.label_14 = QtWidgets.QLabel(self.groupBox_2)
self.label_14.setText("")
self.label_14.setObjectName("label_14")
self.formLayout_2.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.label_14)
self.checkBoxEditorAutoIndent = QtWidgets.QCheckBox(self.groupBox_2)
self.checkBoxEditorAutoIndent.setChecked(True)
self.checkBoxEditorAutoIndent.setObjectName("checkBoxEditorAutoIndent")
self.formLayout_2.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.checkBoxEditorAutoIndent)
self.label_6 = QtWidgets.QLabel(self.groupBox_2)
self.label_6.setText("")
self.label_6.setObjectName("label_6")
self.formLayout_2.setWidget(3, QtWidgets.QFormLayout.LabelRole, self.label_6)
self.checkBoxSmartBackspace = QtWidgets.QCheckBox(self.groupBox_2)
self.checkBoxSmartBackspace.setObjectName("checkBoxSmartBackspace")
self.formLayout_2.setWidget(3, QtWidgets.QFormLayout.FieldRole, self.checkBoxSmartBackspace)
self.verticalLayout_7.addWidget(self.groupBox_2)
self.groupBox_4 = QtWidgets.QGroupBox(self.scrollAreaWidgetContents_2)
self.groupBox_4.setObjectName("groupBox_4")
self.formLayout_4 = QtWidgets.QFormLayout(self.groupBox_4)
self.formLayout_4.setFieldGrowthPolicy(QtWidgets.QFormLayout.ExpandingFieldsGrow)
self.formLayout_4.setObjectName("formLayout_4")
self.label_2 = QtWidgets.QLabel(self.groupBox_4)
self.label_2.setObjectName("label_2")
self.formLayout_4.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.label_2)
self.spinBoxEditorCCTriggerLen = QtWidgets.QSpinBox(self.groupBox_4)
self.spinBoxEditorCCTriggerLen.setProperty("value", 1)
self.spinBoxEditorCCTriggerLen.setObjectName("spinBoxEditorCCTriggerLen")
self.formLayout_4.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.spinBoxEditorCCTriggerLen)
self.label_16 = QtWidgets.QLabel(self.groupBox_4)
self.label_16.setObjectName("label_16")
self.formLayout_4.setWidget(3, QtWidgets.QFormLayout.LabelRole, self.label_16)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setContentsMargins(0, -1, -1, -1)
self.horizontalLayout.setObjectName("horizontalLayout")
self.rbLowerCaseKwds = QtWidgets.QRadioButton(self.groupBox_4)
self.rbLowerCaseKwds.setObjectName("rbLowerCaseKwds")
self.horizontalLayout.addWidget(self.rbLowerCaseKwds)
self.rbUpperCaseKwds = QtWidgets.QRadioButton(self.groupBox_4)
self.rbUpperCaseKwds.setChecked(True)
self.rbUpperCaseKwds.setObjectName("rbUpperCaseKwds")
self.horizontalLayout.addWidget(self.rbUpperCaseKwds)
self.formLayout_4.setLayout(3, QtWidgets.QFormLayout.FieldRole, self.horizontalLayout)
self.label_31 = QtWidgets.QLabel(self.groupBox_4)
self.label_31.setObjectName("label_31")
self.formLayout_4.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.label_31)
self.comboCcFilterMode = QtWidgets.QComboBox(self.groupBox_4)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.comboCcFilterMode.sizePolicy().hasHeightForWidth())
self.comboCcFilterMode.setSizePolicy(sizePolicy)
self.comboCcFilterMode.setMinimumSize(QtCore.QSize(134, 0))
self.comboCcFilterMode.setObjectName("comboCcFilterMode")
self.comboCcFilterMode.addItem("")
self.comboCcFilterMode.addItem("")
self.formLayout_4.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.comboCcFilterMode)
self.verticalLayout_7.addWidget(self.groupBox_4)
self.groupBox_10 = QtWidgets.QGroupBox(self.scrollAreaWidgetContents_2)
self.groupBox_10.setObjectName("groupBox_10")
self.formLayout_11 = QtWidgets.QFormLayout(self.groupBox_10)
self.formLayout_11.setFieldGrowthPolicy(QtWidgets.QFormLayout.ExpandingFieldsGrow)
self.formLayout_11.setObjectName("formLayout_11")
self.label_30 = QtWidgets.QLabel(self.groupBox_10)
self.label_30.setObjectName("label_30")
self.formLayout_11.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.label_30)
self.comboBoxPreferredEOL = QtWidgets.QComboBox(self.groupBox_10)
self.comboBoxPreferredEOL.setObjectName("comboBoxPreferredEOL")
self.comboBoxPreferredEOL.addItem("")
self.comboBoxPreferredEOL.addItem("")
self.comboBoxPreferredEOL.addItem("")
self.comboBoxPreferredEOL.addItem("")
self.formLayout_11.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.comboBoxPreferredEOL)
self.checkBoxAutodetectEOL = QtWidgets.QCheckBox(self.groupBox_10)
self.checkBoxAutodetectEOL.setChecked(True)
self.checkBoxAutodetectEOL.setObjectName("checkBoxAutodetectEOL")
self.formLayout_11.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.checkBoxAutodetectEOL)
self.verticalLayout_7.addWidget(self.groupBox_10)
spacerItem1 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.verticalLayout_7.addItem(spacerItem1)
self.scrollArea.setWidget(self.scrollAreaWidgetContents_2)
self.verticalLayout.addWidget(self.scrollArea)
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(":/ide-icons/rc/cobol-mimetype.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.tabWidget.addTab(self.tabEditor, icon1, "")
self.tabStyle = QtWidgets.QWidget()
self.tabStyle.setObjectName("tabStyle")
self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.tabStyle)
self.verticalLayout_3.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.groupBox_5 = QtWidgets.QGroupBox(self.tabStyle)
self.groupBox_5.setObjectName("groupBox_5")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.groupBox_5)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.horizontalLayout_3 = QtWidgets.QHBoxLayout()
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
spacerItem2 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_3.addItem(spacerItem2)
self.radioButtonColorWhite = QtWidgets.QRadioButton(self.groupBox_5)
self.radioButtonColorWhite.setChecked(True)
self.radioButtonColorWhite.setObjectName("radioButtonColorWhite")
self.horizontalLayout_3.addWidget(self.radioButtonColorWhite)
self.radioButtonColorDark = QtWidgets.QRadioButton(self.groupBox_5)
self.radioButtonColorDark.setObjectName("radioButtonColorDark")
self.horizontalLayout_3.addWidget(self.radioButtonColorDark)
spacerItem3 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_3.addItem(spacerItem3)
self.verticalLayout_2.addLayout(self.horizontalLayout_3)
self.layoutIconTheme = QtWidgets.QFormLayout()
self.layoutIconTheme.setContentsMargins(-1, 0, -1, -1)
self.layoutIconTheme.setObjectName("layoutIconTheme")
self.lblIconTheme = QtWidgets.QLabel(self.groupBox_5)
self.lblIconTheme.setObjectName("lblIconTheme")
self.layoutIconTheme.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.lblIconTheme)
self.comboBoxIconTheme = QtWidgets.QComboBox(self.groupBox_5)
self.comboBoxIconTheme.setObjectName("comboBoxIconTheme")
self.layoutIconTheme.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.comboBoxIconTheme)
self.verticalLayout_2.addLayout(self.layoutIconTheme)
self.verticalLayout_3.addWidget(self.groupBox_5)
self.groupBox_6 = QtWidgets.QGroupBox(self.tabStyle)
self.groupBox_6.setObjectName("groupBox_6")
self.formLayout_3 = QtWidgets.QFormLayout(self.groupBox_6)
self.formLayout_3.setObjectName("formLayout_3")
self.label_3 = QtWidgets.QLabel(self.groupBox_6)
self.label_3.setObjectName("label_3")
self.formLayout_3.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.label_3)
self.horizontalLayout_4 = QtWidgets.QHBoxLayout()
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.fontComboBox = QtWidgets.QFontComboBox(self.groupBox_6)
self.fontComboBox.setFontFilters(QtWidgets.QFontComboBox.MonospacedFonts)
font = QtGui.QFont()
font.setFamily("Clean")
self.fontComboBox.setCurrentFont(font)
self.fontComboBox.setObjectName("fontComboBox")
self.horizontalLayout_4.addWidget(self.fontComboBox)
self.formLayout_3.setLayout(0, QtWidgets.QFormLayout.FieldRole, self.horizontalLayout_4)
self.label_4 = QtWidgets.QLabel(self.groupBox_6)
self.label_4.setObjectName("label_4")
self.formLayout_3.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.label_4)
self.horizontalLayout_5 = QtWidgets.QHBoxLayout()
self.horizontalLayout_5.setObjectName("horizontalLayout_5")
self.spinBoxFontSize = QtWidgets.QSpinBox(self.groupBox_6)
self.spinBoxFontSize.setProperty("value", 10)
self.spinBoxFontSize.setObjectName("spinBoxFontSize")
self.horizontalLayout_5.addWidget(self.spinBoxFontSize)
self.formLayout_3.setLayout(1, QtWidgets.QFormLayout.FieldRole, self.horizontalLayout_5)
self.verticalLayout_3.addWidget(self.groupBox_6)
self.groupBox_7 = QtWidgets.QGroupBox(self.tabStyle)
self.groupBox_7.setObjectName("groupBox_7")
self.gridLayout_4 = QtWidgets.QGridLayout(self.groupBox_7)
self.gridLayout_4.setObjectName("gridLayout_4")
self.listWidgetColorSchemes = QtWidgets.QListWidget(self.groupBox_7)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.listWidgetColorSchemes.sizePolicy().hasHeightForWidth())
self.listWidgetColorSchemes.setSizePolicy(sizePolicy)
self.listWidgetColorSchemes.setObjectName("listWidgetColorSchemes")
self.gridLayout_4.addWidget(self.listWidgetColorSchemes, 0, 0, 1, 1)
self.plainTextEdit = CobolCodeEdit(self.groupBox_7)
self.plainTextEdit.setObjectName("plainTextEdit")
self.gridLayout_4.addWidget(self.plainTextEdit, 0, 1, 1, 1)
self.verticalLayout_3.addWidget(self.groupBox_7)
icon = QtGui.QIcon.fromTheme("applications-graphics")
self.tabWidget.addTab(self.tabStyle, icon, "")
self.tabCompiler = QtWidgets.QWidget()
self.tabCompiler.setObjectName("tabCompiler")
self.verticalLayout_11 = QtWidgets.QVBoxLayout(self.tabCompiler)
self.verticalLayout_11.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_11.setObjectName("verticalLayout_11")
self.scrollArea_5 = QtWidgets.QScrollArea(self.tabCompiler)
self.scrollArea_5.setWidgetResizable(True)
self.scrollArea_5.setObjectName("scrollArea_5")
self.scrollAreaWidgetContents_7 = QtWidgets.QWidget()
self.scrollAreaWidgetContents_7.setGeometry(QtCore.QRect(0, 0, 1224, 973))
self.scrollAreaWidgetContents_7.setObjectName("scrollAreaWidgetContents_7")
self.formLayout_6 = QtWidgets.QFormLayout(self.scrollAreaWidgetContents_7)
self.formLayout_6.setFieldGrowthPolicy(QtWidgets.QFormLayout.ExpandingFieldsGrow)
self.formLayout_6.setContentsMargins(9, 9, 9, 9)
self.formLayout_6.setSpacing(9)
self.formLayout_6.setObjectName("formLayout_6")
self.label_compiler_path = QtWidgets.QLabel(self.scrollAreaWidgetContents_7)
self.label_compiler_path.setObjectName("label_compiler_path")
self.formLayout_6.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.label_compiler_path)
self.horizontalLayout_9 = QtWidgets.QHBoxLayout()
self.horizontalLayout_9.setContentsMargins(0, -1, -1, -1)
self.horizontalLayout_9.setObjectName("horizontalLayout_9")
self.lineEditCompilerPath = PathLineEdit(self.scrollAreaWidgetContents_7)
self.lineEditCompilerPath.setObjectName("lineEditCompilerPath")
self.horizontalLayout_9.addWidget(self.lineEditCompilerPath)
self.toolButtonCustomCompilerPath = QtWidgets.QToolButton(self.scrollAreaWidgetContents_7)
self.toolButtonCustomCompilerPath.setObjectName("toolButtonCustomCompilerPath")
self.horizontalLayout_9.addWidget(self.toolButtonCustomCompilerPath)
self.toolButtonCheckCompiler = QtWidgets.QPushButton(self.scrollAreaWidgetContents_7)
icon = QtGui.QIcon.fromTheme("emblem-checked")
self.toolButtonCheckCompiler.setIcon(icon)
self.toolButtonCheckCompiler.setObjectName("toolButtonCheckCompiler")
self.horizontalLayout_9.addWidget(self.toolButtonCheckCompiler)
self.formLayout_6.setLayout(0, QtWidgets.QFormLayout.FieldRole, self.horizontalLayout_9)
self.groupBoxEnvVars = QtWidgets.QGroupBox(self.scrollAreaWidgetContents_7)
self.groupBoxEnvVars.setObjectName("groupBoxEnvVars")
self.formLayout_12 = QtWidgets.QFormLayout(self.groupBoxEnvVars)
self.formLayout_12.setFieldGrowthPolicy(QtWidgets.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout_12.setObjectName("formLayout_12")
self.PATH = PathLineEdit(self.groupBoxEnvVars)
self.PATH.setObjectName("PATH")
self.formLayout_12.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.PATH)
self.COB_CONFIG_DIR = PathLineEdit(self.groupBoxEnvVars)
self.COB_CONFIG_DIR.setObjectName("COB_CONFIG_DIR")
self.formLayout_12.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.COB_CONFIG_DIR)
self.COB_COPY_DIR = PathLineEdit(self.groupBoxEnvVars)
self.COB_COPY_DIR.setObjectName("COB_COPY_DIR")
self.formLayout_12.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.COB_COPY_DIR)
self.COB_INCLUDE_PATH = PathLineEdit(self.groupBoxEnvVars)
self.COB_INCLUDE_PATH.setObjectName("COB_INCLUDE_PATH")
self.formLayout_12.setWidget(4, QtWidgets.QFormLayout.FieldRole, self.COB_INCLUDE_PATH)
self.COB_LIB_PATH = PathLineEdit(self.groupBoxEnvVars)
self.COB_LIB_PATH.setObjectName("COB_LIB_PATH")
self.formLayout_12.setWidget(5, QtWidgets.QFormLayout.FieldRole, self.COB_LIB_PATH)
self.cbPATH = QtWidgets.QCheckBox(self.groupBoxEnvVars)
self.cbPATH.setObjectName("cbPATH")
self.formLayout_12.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.cbPATH)
self.cbCOB_CONFIG_DIR = QtWidgets.QCheckBox(self.groupBoxEnvVars)
self.cbCOB_CONFIG_DIR.setObjectName("cbCOB_CONFIG_DIR")
self.formLayout_12.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.cbCOB_CONFIG_DIR)
self.cbCOB_COPY_DIR = QtWidgets.QCheckBox(self.groupBoxEnvVars)
self.cbCOB_COPY_DIR.setObjectName("cbCOB_COPY_DIR")
self.formLayout_12.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.cbCOB_COPY_DIR)
self.cbCOB_INCLUDE_PATH = QtWidgets.QCheckBox(self.groupBoxEnvVars)
self.cbCOB_INCLUDE_PATH.setObjectName("cbCOB_INCLUDE_PATH")
self.formLayout_12.setWidget(4, QtWidgets.QFormLayout.LabelRole, self.cbCOB_INCLUDE_PATH)
self.cbCOB_LIB_PATH = QtWidgets.QCheckBox(self.groupBoxEnvVars)
self.cbCOB_LIB_PATH.setObjectName("cbCOB_LIB_PATH")
self.formLayout_12.setWidget(5, QtWidgets.QFormLayout.LabelRole, self.cbCOB_LIB_PATH)
self.formLayout_6.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.groupBoxEnvVars)
self.cbAutoDetectSublmodules = QtWidgets.QCheckBox(self.scrollAreaWidgetContents_7)
self.cbAutoDetectSublmodules.setObjectName("cbAutoDetectSublmodules")
self.formLayout_6.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.cbAutoDetectSublmodules)
self.labelVCVARS = QtWidgets.QLabel(self.scrollAreaWidgetContents_7)
self.labelVCVARS.setObjectName("labelVCVARS")
self.formLayout_6.setWidget(3, QtWidgets.QFormLayout.LabelRole, self.labelVCVARS)
self.horizontalLayout_8 = QtWidgets.QHBoxLayout()
self.horizontalLayout_8.setContentsMargins(0, -1, -1, -1)
self.horizontalLayout_8.setObjectName("horizontalLayout_8")
self.lineEditVCVARS = PathLineEdit(self.scrollAreaWidgetContents_7)
self.lineEditVCVARS.setObjectName("lineEditVCVARS")
self.horizontalLayout_8.addWidget(self.lineEditVCVARS)
self.toolButtonVCVARS = QtWidgets.QToolButton(self.scrollAreaWidgetContents_7)
self.toolButtonVCVARS.setObjectName("toolButtonVCVARS")
self.horizontalLayout_8.addWidget(self.toolButtonVCVARS)
self.combo_arch = QtWidgets.QComboBox(self.scrollAreaWidgetContents_7)
self.combo_arch.setEditable(False)
self.combo_arch.setObjectName("combo_arch")
self.combo_arch.addItem("")
self.combo_arch.addItem("")
self.horizontalLayout_8.addWidget(self.combo_arch)
self.formLayout_6.setLayout(3, QtWidgets.QFormLayout.FieldRole, self.horizontalLayout_8)
self.label_36 = QtWidgets.QLabel(self.scrollAreaWidgetContents_7)
self.label_36.setObjectName("label_36")
self.formLayout_6.setWidget(5, QtWidgets.QFormLayout.LabelRole, self.label_36)
self.lineEditOutputDirectory = PathLineEdit(self.scrollAreaWidgetContents_7)
self.lineEditOutputDirectory.setObjectName("lineEditOutputDirectory")
self.formLayout_6.setWidget(5, QtWidgets.QFormLayout.FieldRole, self.lineEditOutputDirectory)
self.cb_copy_runtime_dlls = QtWidgets.QCheckBox(self.scrollAreaWidgetContents_7)
self.cb_copy_runtime_dlls.setObjectName("cb_copy_runtime_dlls")
self.formLayout_6.setWidget(6, QtWidgets.QFormLayout.FieldRole, self.cb_copy_runtime_dlls)
self.label_32 = QtWidgets.QLabel(self.scrollAreaWidgetContents_7)
self.label_32.setObjectName("label_32")
self.formLayout_6.setWidget(7, QtWidgets.QFormLayout.LabelRole, self.label_32)
self.lineEditCobcExts = QtWidgets.QLineEdit(self.scrollAreaWidgetContents_7)
self.lineEditCobcExts.setObjectName("lineEditCobcExts")
self.formLayout_6.setWidget(7, QtWidgets.QFormLayout.FieldRole, self.lineEditCobcExts)
self.label_8 = QtWidgets.QLabel(self.scrollAreaWidgetContents_7)
self.label_8.setObjectName("label_8")
self.formLayout_6.setWidget(8, QtWidgets.QFormLayout.LabelRole, self.label_8)
self.comboBoxStandard = QtWidgets.QComboBox(self.scrollAreaWidgetContents_7)
self.comboBoxStandard.setObjectName("comboBoxStandard")
self.comboBoxStandard.addItem("")
self.comboBoxStandard.addItem("")
self.comboBoxStandard.addItem("")
self.comboBoxStandard.addItem("")
self.comboBoxStandard.addItem("")
self.comboBoxStandard.addItem("")
self.comboBoxStandard.addItem("")
self.comboBoxStandard.addItem("")
self.comboBoxStandard.addItem("")
self.comboBoxStandard.addItem("")
self.formLayout_6.setWidget(8, QtWidgets.QFormLayout.FieldRole, self.comboBoxStandard)
self.label_9 = QtWidgets.QLabel(self.scrollAreaWidgetContents_7)
self.label_9.setObjectName("label_9")
self.formLayout_6.setWidget(9, QtWidgets.QFormLayout.LabelRole, self.label_9)
self.checkBoxFreeFormat = QtWidgets.QCheckBox(self.scrollAreaWidgetContents_7)
self.checkBoxFreeFormat.setText("")
self.checkBoxFreeFormat.setObjectName("checkBoxFreeFormat")
self.formLayout_6.setWidget(9, QtWidgets.QFormLayout.FieldRole, self.checkBoxFreeFormat)
self.label_15 = QtWidgets.QLabel(self.scrollAreaWidgetContents_7)
self.label_15.setObjectName("label_15")
self.formLayout_6.setWidget(10, QtWidgets.QFormLayout.LabelRole, self.label_15)
self.gridLayout_5 = QtWidgets.QGridLayout()
self.gridLayout_5.setContentsMargins(0, 0, -1, -1)
self.gridLayout_5.setObjectName("gridLayout_5")
self.cb_ftrace = QtWidgets.QCheckBox(self.scrollAreaWidgetContents_7)
self.cb_ftrace.setObjectName("cb_ftrace")
self.gridLayout_5.addWidget(self.cb_ftrace, 2, 1, 1, 1)
self.cb_static = QtWidgets.QCheckBox(self.scrollAreaWidgetContents_7)
self.cb_static.setObjectName("cb_static")
self.gridLayout_5.addWidget(self.cb_static, 0, 1, 1, 1)
self.cb_g = QtWidgets.QCheckBox(self.scrollAreaWidgetContents_7)
self.cb_g.setObjectName("cb_g")
self.gridLayout_5.addWidget(self.cb_g, 0, 3, 1, 1)
self.cb_debugging_line = QtWidgets.QCheckBox(self.scrollAreaWidgetContents_7)
self.cb_debugging_line.setObjectName("cb_debugging_line")
self.gridLayout_5.addWidget(self.cb_debugging_line, 2, 3, 1, 1)
self.cb_ftraceall = QtWidgets.QCheckBox(self.scrollAreaWidgetContents_7)
self.cb_ftraceall.setObjectName("cb_ftraceall")
self.gridLayout_5.addWidget(self.cb_ftraceall, 2, 2, 1, 1)
self.cb_debug = QtWidgets.QCheckBox(self.scrollAreaWidgetContents_7)
self.cb_debug.setObjectName("cb_debug")
self.gridLayout_5.addWidget(self.cb_debug, 0, 2, 1, 1)
self.cb_w = QtWidgets.QCheckBox(self.scrollAreaWidgetContents_7)
self.cb_w.setObjectName("cb_w")
self.gridLayout_5.addWidget(self.cb_w, 0, 0, 1, 1)
self.cb_wall = QtWidgets.QCheckBox(self.scrollAreaWidgetContents_7)
self.cb_wall.setObjectName("cb_wall")
self.gridLayout_5.addWidget(self.cb_wall, 2, 0, 1, 1)
self.formLayout_6.setLayout(10, QtWidgets.QFormLayout.FieldRole, self.gridLayout_5)
self.label_35 = QtWidgets.QLabel(self.scrollAreaWidgetContents_7)
self.label_35.setObjectName("label_35")
self.formLayout_6.setWidget(12, QtWidgets.QFormLayout.LabelRole, self.label_35)
self.label_37 = QtWidgets.QLabel(self.scrollAreaWidgetContents_7)
self.label_37.setObjectName("label_37")
self.formLayout_6.setWidget(13, QtWidgets.QFormLayout.LabelRole, self.label_37)
self.horizontalLayout_12 = QtWidgets.QHBoxLayout()
self.horizontalLayout_12.setContentsMargins(0, -1, -1, -1)
self.horizontalLayout_12.setObjectName("horizontalLayout_12")
self.listWidgetCopyPaths = QtWidgets.QListWidget(self.scrollAreaWidgetContents_7)
self.listWidgetCopyPaths.setDragDropMode(QtWidgets.QAbstractItemView.InternalMove)
self.listWidgetCopyPaths.setObjectName("listWidgetCopyPaths")
self.horizontalLayout_12.addWidget(self.listWidgetCopyPaths)
self.verticalLayout_12 = QtWidgets.QVBoxLayout()
self.verticalLayout_12.setContentsMargins(0, -1, -1, -1)
self.verticalLayout_12.setObjectName("verticalLayout_12")
self.btAddAbsoluteCopyPath = QtWidgets.QToolButton(self.scrollAreaWidgetContents_7)
icon = QtGui.QIcon.fromTheme("folder-open")
self.btAddAbsoluteCopyPath.setIcon(icon)
self.btAddAbsoluteCopyPath.setObjectName("btAddAbsoluteCopyPath")
self.verticalLayout_12.addWidget(self.btAddAbsoluteCopyPath)
self.btAddRelativeCopyPath = QtWidgets.QToolButton(self.scrollAreaWidgetContents_7)
icon = QtGui.QIcon.fromTheme("list-add")
self.btAddRelativeCopyPath.setIcon(icon)
self.btAddRelativeCopyPath.setObjectName("btAddRelativeCopyPath")
self.verticalLayout_12.addWidget(self.btAddRelativeCopyPath)
self.btRemoveCopyPath = QtWidgets.QToolButton(self.scrollAreaWidgetContents_7)
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(":/ide-icons/rc/list-remove.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btRemoveCopyPath.setIcon(icon2)
self.btRemoveCopyPath.setObjectName("btRemoveCopyPath")
self.verticalLayout_12.addWidget(self.btRemoveCopyPath)
spacerItem4 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.verticalLayout_12.addItem(spacerItem4)
self.horizontalLayout_12.addLayout(self.verticalLayout_12)
self.formLayout_6.setLayout(13, QtWidgets.QFormLayout.FieldRole, self.horizontalLayout_12)
self.label_17 = QtWidgets.QLabel(self.scrollAreaWidgetContents_7)
self.label_17.setObjectName("label_17")
self.formLayout_6.setWidget(14, QtWidgets.QFormLayout.LabelRole, self.label_17)
self.horizontalLayout_10 = QtWidgets.QHBoxLayout()
self.horizontalLayout_10.setContentsMargins(0, -1, -1, -1)
self.horizontalLayout_10.setObjectName("horizontalLayout_10")
self.listWidgetLibPaths = QtWidgets.QListWidget(self.scrollAreaWidgetContents_7)
self.listWidgetLibPaths.setDragDropMode(QtWidgets.QAbstractItemView.InternalMove)
self.listWidgetLibPaths.setObjectName("listWidgetLibPaths")
self.horizontalLayout_10.addWidget(self.listWidgetLibPaths)
self.verticalLayout_4 = QtWidgets.QVBoxLayout()
self.verticalLayout_4.setContentsMargins(0, -1, -1, -1)
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.toolButtonAddLibPath = QtWidgets.QToolButton(self.scrollAreaWidgetContents_7)
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(":/ide-icons/rc/document-open.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButtonAddLibPath.setIcon(icon3)
self.toolButtonAddLibPath.setObjectName("toolButtonAddLibPath")
self.verticalLayout_4.addWidget(self.toolButtonAddLibPath)
self.toolButtonAddRelativeLibPath = QtWidgets.QToolButton(self.scrollAreaWidgetContents_7)
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap(":/ide-icons/rc/list-add.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.toolButtonAddRelativeLibPath.setIcon(icon4)
self.toolButtonAddRelativeLibPath.setObjectName("toolButtonAddRelativeLibPath")
self.verticalLayout_4.addWidget(self.toolButtonAddRelativeLibPath)
self.toolButtonRemoveLibPath = QtWidgets.QToolButton(self.scrollAreaWidgetContents_7)
self.toolButtonRemoveLibPath.setIcon(icon2)
self.toolButtonRemoveLibPath.setObjectName("toolButtonRemoveLibPath")
self.verticalLayout_4.addWidget(self.toolButtonRemoveLibPath)
spacerItem5 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.verticalLayout_4.addItem(spacerItem5)
self.horizontalLayout_10.addLayout(self.verticalLayout_4)
self.formLayout_6.setLayout(14, QtWidgets.QFormLayout.FieldRole, self.horizontalLayout_10)
self.label_18 = QtWidgets.QLabel(self.scrollAreaWidgetContents_7)
self.label_18.setObjectName("label_18")
self.formLayout_6.setWidget(15, QtWidgets.QFormLayout.LabelRole, self.label_18)
self.lineEditLibs = QtWidgets.QLineEdit(self.scrollAreaWidgetContents_7)
self.lineEditLibs.setObjectName("lineEditLibs")
self.formLayout_6.setWidget(15, QtWidgets.QFormLayout.FieldRole, self.lineEditLibs)
self.horizontalLayout_20 = QtWidgets.QHBoxLayout()
self.horizontalLayout_20.setContentsMargins(0, 0, -1, -1)
self.horizontalLayout_20.setObjectName("horizontalLayout_20")
self.le_compiler_flags = QtWidgets.QLineEdit(self.scrollAreaWidgetContents_7)
self.le_compiler_flags.setObjectName("le_compiler_flags")
self.horizontalLayout_20.addWidget(self.le_compiler_flags)
self.btCompilerFlagsHelp = QtWidgets.QToolButton(self.scrollAreaWidgetContents_7)
self.btCompilerFlagsHelp.setObjectName("btCompilerFlagsHelp")
self.horizontalLayout_20.addWidget(self.btCompilerFlagsHelp)
self.formLayout_6.setLayout(12, QtWidgets.QFormLayout.FieldRole, self.horizontalLayout_20)
self.scrollArea_5.setWidget(self.scrollAreaWidgetContents_7)
self.verticalLayout_11.addWidget(self.scrollArea_5)
icon = QtGui.QIcon.fromTheme("exec")
self.tabWidget.addTab(self.tabCompiler, icon, "")
self.tabRun = QtWidgets.QWidget()
self.tabRun.setObjectName("tabRun")
self.verticalLayout_10 = QtWidgets.QVBoxLayout(self.tabRun)
self.verticalLayout_10.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_10.setObjectName("verticalLayout_10")
self.formLayout_13 = QtWidgets.QFormLayout()
self.formLayout_13.setContentsMargins(6, 6, 6, 6)
self.formLayout_13.setObjectName("formLayout_13")
self.lbl_external_terminal_command = QtWidgets.QLabel(self.tabRun)
self.lbl_external_terminal_command.setObjectName("lbl_external_terminal_command")
self.formLayout_13.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.lbl_external_terminal_command)
self.lineEditRunTerm = QtWidgets.QLineEdit(self.tabRun)
self.lineEditRunTerm.setObjectName("lineEditRunTerm")
self.formLayout_13.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.lineEditRunTerm)
self.label_42 = QtWidgets.QLabel(self.tabRun)
self.label_42.setObjectName("label_42")
self.formLayout_13.setWidget(3, QtWidgets.QFormLayout.LabelRole, self.label_42)
self.horizontalLayout_19 = QtWidgets.QHBoxLayout()
self.horizontalLayout_19.setContentsMargins(0, -1, -1, -1)
self.horizontalLayout_19.setObjectName("horizontalLayout_19")
self.edit_working_dir = PathLineEdit(self.tabRun)
self.edit_working_dir.setObjectName("edit_working_dir")
self.horizontalLayout_19.addWidget(self.edit_working_dir)
self.bt_working_dir = QtWidgets.QToolButton(self.tabRun)
self.bt_working_dir.setObjectName("bt_working_dir")
self.horizontalLayout_19.addWidget(self.bt_working_dir)
self.formLayout_13.setLayout(3, QtWidgets.QFormLayout.FieldRole, self.horizontalLayout_19)
self.checkBoxRunExtTerm = QtWidgets.QCheckBox(self.tabRun)
self.checkBoxRunExtTerm.setObjectName("checkBoxRunExtTerm")
self.formLayout_13.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.checkBoxRunExtTerm)
self.label_43 = QtWidgets.QLabel(self.tabRun)
self.label_43.setObjectName("label_43")
self.formLayout_13.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.label_43)
self.verticalLayout_10.addLayout(self.formLayout_13)
self.groupBox_12 = QtWidgets.QGroupBox(self.tabRun)
self.groupBox_12.setObjectName("groupBox_12")
self.horizontalLayout_13 = QtWidgets.QHBoxLayout(self.groupBox_12)
self.horizontalLayout_13.setObjectName("horizontalLayout_13")
self.tw_run_env = QtWidgets.QTableWidget(self.groupBox_12)
self.tw_run_env.setSelectionMode(QtWidgets.QAbstractItemView.SingleSelection)
self.tw_run_env.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectRows)
self.tw_run_env.setObjectName("tw_run_env")
self.tw_run_env.setColumnCount(2)
self.tw_run_env.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.tw_run_env.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tw_run_env.setHorizontalHeaderItem(1, item)
self.tw_run_env.horizontalHeader().setCascadingSectionResizes(False)
self.tw_run_env.horizontalHeader().setDefaultSectionSize(300)
self.tw_run_env.horizontalHeader().setStretchLastSection(True)
self.tw_run_env.verticalHeader().setVisible(False)
self.horizontalLayout_13.addWidget(self.tw_run_env)
self.verticalLayout_14 = QtWidgets.QVBoxLayout()
self.verticalLayout_14.setObjectName("verticalLayout_14")
self.bt_add_run_env = QtWidgets.QPushButton(self.groupBox_12)
icon = QtGui.QIcon.fromTheme("list-add")
self.bt_add_run_env.setIcon(icon)
self.bt_add_run_env.setObjectName("bt_add_run_env")
self.verticalLayout_14.addWidget(self.bt_add_run_env)
self.bt_rm_run_env = QtWidgets.QPushButton(self.groupBox_12)
icon = QtGui.QIcon.fromTheme("list-remove")
self.bt_rm_run_env.setIcon(icon)
self.bt_rm_run_env.setObjectName("bt_rm_run_env")
self.verticalLayout_14.addWidget(self.bt_rm_run_env)
self.bt_clear_run_env = QtWidgets.QPushButton(self.groupBox_12)
icon = QtGui.QIcon.fromTheme("edit-clear")
self.bt_clear_run_env.setIcon(icon)
self.bt_clear_run_env.setObjectName("bt_clear_run_env")
self.verticalLayout_14.addWidget(self.bt_clear_run_env)
spacerItem6 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.verticalLayout_14.addItem(spacerItem6)
self.horizontalLayout_13.addLayout(self.verticalLayout_14)
self.verticalLayout_10.addWidget(self.groupBox_12)
self.verticalLayout_10.setStretch(1, 1)
icon = QtGui.QIcon.fromTheme("media-playback-start")
self.tabWidget.addTab(self.tabRun, icon, "")
self.tabSqlCobol = QtWidgets.QWidget()
self.tabSqlCobol.setObjectName("tabSqlCobol")
self.gridLayout = QtWidgets.QGridLayout(self.tabSqlCobol)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setObjectName("gridLayout")
self.scrollArea_3 = QtWidgets.QScrollArea(self.tabSqlCobol)
self.scrollArea_3.setWidgetResizable(True)
self.scrollArea_3.setObjectName("scrollArea_3")
self.scrollAreaWidgetContents_5 = QtWidgets.QWidget()
self.scrollAreaWidgetContents_5.setGeometry(QtCore.QRect(0, 0, 1244, 897))
self.scrollAreaWidgetContents_5.setObjectName("scrollAreaWidgetContents_5")
self.verticalLayout_9 = QtWidgets.QVBoxLayout(self.scrollAreaWidgetContents_5)
self.verticalLayout_9.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_9.setObjectName("verticalLayout_9")
self.stackedWidgetSQL = QtWidgets.QStackedWidget(self.scrollAreaWidgetContents_5)
self.stackedWidgetSQL.setObjectName("stackedWidgetSQL")
self.page = QtWidgets.QWidget()
self.page.setObjectName("page")
self.gridLayout_3 = QtWidgets.QGridLayout(self.page)
self.gridLayout_3.setContentsMargins(0, 0, 0, 0)
self.gridLayout_3.setObjectName("gridLayout_3")
self.groupBox_8 = QtWidgets.QGroupBox(self.page)
self.groupBox_8.setObjectName("groupBox_8")
self.formLayout_8 = QtWidgets.QFormLayout(self.groupBox_8)
self.formLayout_8.setFieldGrowthPolicy(QtWidgets.QFormLayout.ExpandingFieldsGrow)
self.formLayout_8.setObjectName("formLayout_8")
self.label_19 = QtWidgets.QLabel(self.groupBox_8)
self.label_19.setObjectName("label_19")
self.formLayout_8.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.label_19)
self.verticalLayout_5 = QtWidgets.QVBoxLayout()
self.verticalLayout_5.setContentsMargins(0, -1, -1, 0)
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setContentsMargins(0, -1, -1, -1)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.lineEditDbpre = QtWidgets.QLineEdit(self.groupBox_8)
self.lineEditDbpre.setObjectName("lineEditDbpre")
self.horizontalLayout_2.addWidget(self.lineEditDbpre)
self.toolButtonDbpre = QtWidgets.QToolButton(self.groupBox_8)
self.toolButtonDbpre.setObjectName("toolButtonDbpre")
self.horizontalLayout_2.addWidget(self.toolButtonDbpre)
self.verticalLayout_5.addLayout(self.horizontalLayout_2)
self.labelDbpreVersion = QtWidgets.QLabel(self.groupBox_8)
self.labelDbpreVersion.setStyleSheet("font: oblique 9pt \"Cantarell\";")
self.labelDbpreVersion.setObjectName("labelDbpreVersion")
self.verticalLayout_5.addWidget(self.labelDbpreVersion)
self.formLayout_8.setLayout(1, QtWidgets.QFormLayout.FieldRole, self.verticalLayout_5)
self.label_20 = QtWidgets.QLabel(self.groupBox_8)
self.label_20.setObjectName("label_20")
self.formLayout_8.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.label_20)
self.horizontalLayout_6 = QtWidgets.QHBoxLayout()
self.horizontalLayout_6.setContentsMargins(0, -1, -1, -1)
self.horizontalLayout_6.setObjectName("horizontalLayout_6")
self.lineEditCobmysqlapi = QtWidgets.QLineEdit(self.groupBox_8)
self.lineEditCobmysqlapi.setObjectName("lineEditCobmysqlapi")
self.horizontalLayout_6.addWidget(self.lineEditCobmysqlapi)
self.toolButtonCobMySqlApiPath = QtWidgets.QToolButton(self.groupBox_8)
self.toolButtonCobMySqlApiPath.setObjectName("toolButtonCobMySqlApiPath")
self.horizontalLayout_6.addWidget(self.toolButtonCobMySqlApiPath)
self.formLayout_8.setLayout(2, QtWidgets.QFormLayout.FieldRole, self.horizontalLayout_6)
self.label_21 = QtWidgets.QLabel(self.groupBox_8)
self.label_21.setObjectName("label_21")
self.formLayout_8.setWidget(3, QtWidgets.QFormLayout.LabelRole, self.label_21)
self.horizontalLayout_7 = QtWidgets.QHBoxLayout()
self.horizontalLayout_7.setContentsMargins(0, -1, -1, -1)
self.horizontalLayout_7.setObjectName("horizontalLayout_7")
self.lineEditDbpreFramework = QtWidgets.QLineEdit(self.groupBox_8)
self.lineEditDbpreFramework.setObjectName("lineEditDbpreFramework")
self.horizontalLayout_7.addWidget(self.lineEditDbpreFramework)
self.toolButtonDbpreFramework = QtWidgets.QToolButton(self.groupBox_8)
self.toolButtonDbpreFramework.setObjectName("toolButtonDbpreFramework")
self.horizontalLayout_7.addWidget(self.toolButtonDbpreFramework)
self.formLayout_8.setLayout(3, QtWidgets.QFormLayout.FieldRole, self.horizontalLayout_7)
self.label_33 = QtWidgets.QLabel(self.groupBox_8)
self.label_33.setObjectName("label_33")
self.formLayout_8.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.label_33)
self.lineEditDbpreExts = QtWidgets.QLineEdit(self.groupBox_8)
self.lineEditDbpreExts.setObjectName("lineEditDbpreExts")
self.formLayout_8.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.lineEditDbpreExts)
self.gridLayout_3.addWidget(self.groupBox_8, 0, 0, 1, 1)
self.groupBox_9 = QtWidgets.QGroupBox(self.page)
self.groupBox_9.setObjectName("groupBox_9")
self.formLayout_9 = QtWidgets.QFormLayout(self.groupBox_9)
self.formLayout_9.setFieldGrowthPolicy(QtWidgets.QFormLayout.ExpandingFieldsGrow)
self.formLayout_9.setObjectName("formLayout_9")
self.label_22 = QtWidgets.QLabel(self.groupBox_9)
self.label_22.setObjectName("label_22")
self.formLayout_9.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.label_22)
self.lineEditDBHOST = QtWidgets.QLineEdit(self.groupBox_9)
self.lineEditDBHOST.setText("")
self.lineEditDBHOST.setObjectName("lineEditDBHOST")
self.formLayout_9.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.lineEditDBHOST)
self.label_23 = QtWidgets.QLabel(self.groupBox_9)
self.label_23.setObjectName("label_23")
self.formLayout_9.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.label_23)
self.lineEditDBUSER = QtWidgets.QLineEdit(self.groupBox_9)
self.lineEditDBUSER.setObjectName("lineEditDBUSER")
self.formLayout_9.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.lineEditDBUSER)
self.label_24 = QtWidgets.QLabel(self.groupBox_9)
self.label_24.setObjectName("label_24")
self.formLayout_9.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.label_24)
self.verticalLayout_6 = QtWidgets.QVBoxLayout()
self.verticalLayout_6.setContentsMargins(3, 3, 3, 3)
self.verticalLayout_6.setObjectName("verticalLayout_6")
self.lineEditDBPASSWD = QtWidgets.QLineEdit(self.groupBox_9)
self.lineEditDBPASSWD.setEchoMode(QtWidgets.QLineEdit.Password)
self.lineEditDBPASSWD.setObjectName("lineEditDBPASSWD")
self.verticalLayout_6.addWidget(self.lineEditDBPASSWD)
self.checkBoxShowDbPass = QtWidgets.QCheckBox(self.groupBox_9)
self.checkBoxShowDbPass.setObjectName("checkBoxShowDbPass")
self.verticalLayout_6.addWidget(self.checkBoxShowDbPass)
self.formLayout_9.setLayout(2, QtWidgets.QFormLayout.FieldRole, self.verticalLayout_6)
self.label_25 = QtWidgets.QLabel(self.groupBox_9)
self.label_25.setObjectName("label_25")
self.formLayout_9.setWidget(3, QtWidgets.QFormLayout.LabelRole, self.label_25)
self.lineEditDBNAME = QtWidgets.QLineEdit(self.groupBox_9)
self.lineEditDBNAME.setText("")
self.lineEditDBNAME.setObjectName("lineEditDBNAME")
self.formLayout_9.setWidget(3, QtWidgets.QFormLayout.FieldRole, self.lineEditDBNAME)
self.label_26 = QtWidgets.QLabel(self.groupBox_9)
self.label_26.setObjectName("label_26")
self.formLayout_9.setWidget(4, QtWidgets.QFormLayout.LabelRole, self.label_26)
self.lineEditDBPORT = QtWidgets.QLineEdit(self.groupBox_9)
self.lineEditDBPORT.setText("")
self.lineEditDBPORT.setObjectName("lineEditDBPORT")
self.formLayout_9.setWidget(4, QtWidgets.QFormLayout.FieldRole, self.lineEditDBPORT)
self.label_27 = QtWidgets.QLabel(self.groupBox_9)
self.label_27.setObjectName("label_27")
self.formLayout_9.setWidget(5, QtWidgets.QFormLayout.LabelRole, self.label_27)
self.lineEditDBSOCKET = QtWidgets.QLineEdit(self.groupBox_9)
self.lineEditDBSOCKET.setText("")
self.lineEditDBSOCKET.setObjectName("lineEditDBSOCKET")
self.formLayout_9.setWidget(5, QtWidgets.QFormLayout.FieldRole, self.lineEditDBSOCKET)
spacerItem7 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.formLayout_9.setItem(6, QtWidgets.QFormLayout.LabelRole, spacerItem7)
self.gridLayout_3.addWidget(self.groupBox_9, 1, 0, 1, 1)
self.stackedWidgetSQL.addWidget(self.page)
self.page_2 = QtWidgets.QWidget()
self.page_2.setObjectName("page_2")
self.formLayout_10 = QtWidgets.QFormLayout(self.page_2)
self.formLayout_10.setContentsMargins(0, 0, 0, 0)
self.formLayout_10.setObjectName("formLayout_10")
self.label_29 = QtWidgets.QLabel(self.page_2)
self.label_29.setObjectName("label_29")
self.formLayout_10.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.label_29)
self.horizontalLayout_11 = QtWidgets.QHBoxLayout()
self.horizontalLayout_11.setObjectName("horizontalLayout_11")
self.lineEditESQLOC = QtWidgets.QLineEdit(self.page_2)
self.lineEditESQLOC.setObjectName("lineEditESQLOC")
self.horizontalLayout_11.addWidget(self.lineEditESQLOC)
self.toolButtonESQLOC = QtWidgets.QToolButton(self.page_2)
self.toolButtonESQLOC.setObjectName("toolButtonESQLOC")
self.horizontalLayout_11.addWidget(self.toolButtonESQLOC)
self.formLayout_10.setLayout(1, QtWidgets.QFormLayout.FieldRole, self.horizontalLayout_11)
self.label_34 = QtWidgets.QLabel(self.page_2)
self.label_34.setObjectName("label_34")
self.formLayout_10.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.label_34)
self.lineEditesqlOcExts = QtWidgets.QLineEdit(self.page_2)
self.lineEditesqlOcExts.setObjectName("lineEditesqlOcExts")
self.formLayout_10.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.lineEditesqlOcExts)
self.stackedWidgetSQL.addWidget(self.page_2)
self.verticalLayout_9.addWidget(self.stackedWidgetSQL)
self.scrollArea_3.setWidget(self.scrollAreaWidgetContents_5)
self.gridLayout.addWidget(self.scrollArea_3, 1, 0, 1, 1)
self.label_28 = QtWidgets.QLabel(self.tabSqlCobol)
self.label_28.setOpenExternalLinks(True)
self.label_28.setObjectName("label_28")
self.gridLayout.addWidget(self.label_28, 0, 0, 1, 1)
icon5 = QtGui.QIcon()
icon5.addPixmap(QtGui.QPixmap(":/ide-icons/rc/database.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.tabWidget.addTab(self.tabSqlCobol, icon5, "")
self.widget_2.addWidget(self.tabWidget, 0, 0, 1, 1)
self.gridLayout_2.addWidget(self.widget, 0, 0, 1, 1)
self.retranslateUi(Dialog)
self.tabWidget.setCurrentIndex(2)
self.stackedWidgetSQL.setCurrentIndex(0)
self.buttonBox.accepted.connect(Dialog.accept)
self.buttonBox.rejected.connect(Dialog.reject)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "Preferences"))
self.widget.setAccessibleName(_translate("Dialog", "widget", "widget"))
self.groupBox_3.setTitle(_translate("Dialog", "View"))
self.label_10.setText(_translate("Dialog", "Display line numbers:"))
self.checkBoxViewLineNumber.setToolTip(_translate("Dialog", "Show/Hide line numbers"))
self.checkBoxViewLineNumber.setStatusTip(_translate("Dialog", "Show/Hide line numbers"))
self.label_11.setText(_translate("Dialog", "Highlight current line:"))
self.checkBoxHighlightCurrentLine.setToolTip(_translate("Dialog", "Highlight caret line"))
self.checkBoxHighlightCurrentLine.setStatusTip(_translate("Dialog", "Highlight caret line"))
self.label_12.setText(_translate("Dialog", "Highlight whitespaces:"))
self.checkBoxHighlightWhitespaces.setToolTip(_translate("Dialog", "Show visual whitespaces"))
self.checkBoxHighlightWhitespaces.setStatusTip(_translate("Dialog", "Show visual whitespaces"))
self.label_13.setText(_translate("Dialog", "Show errors:"))
self.checkBoxShowErrors.setToolTip(_translate("Dialog", "Compile your code on the fly and show errors while you\'re typing"))
self.label_38.setText(_translate("Dialog", "Show cursor position in bytes:"))
self.cb_cursor_pos_in_bytes.setToolTip(_translate("Dialog", "<html><head/><body><p>Check this if you want to see the cursor position expressed in bytes instead of characters (encoding is then taken into account).</p></body></html>"))
self.groupBox_11.setTitle(_translate("Dialog", "Margins"))
self.label_5.setText(_translate("Dialog", "Margin 1:"))
self.label_39.setText(_translate("Dialog", "Margin 2:"))
self.label_40.setText(_translate("Dialog", "Margin 3:"))
self.label_41.setText(_translate("Dialog", "Margin 4:"))
self.groupBox.setTitle(_translate("Dialog", "Comments"))
self.label_7.setText(_translate("Dialog", "Symbol"))
self.lineEditCommentIndicator.setText(_translate("Dialog", "*>"))
self.groupBox_2.setTitle(_translate("Dialog", "Indentation"))
self.label.setText(_translate("Dialog", "Width:"))
self.spinBoxEditorTabLen.setToolTip(_translate("Dialog", "Tab length (number of spaces)"))
self.spinBoxEditorTabLen.setStatusTip(_translate("Dialog", "Tab length (number of spaces)"))
self.checkBoxEditorAutoIndent.setToolTip(_translate("Dialog", "Enable/Disable automatic indentation"))
self.checkBoxEditorAutoIndent.setStatusTip(_translate("Dialog", "Enable/Disable automatic indentation"))
self.checkBoxEditorAutoIndent.setText(_translate("Dialog", "Automatic indentation"))
self.checkBoxSmartBackspace.setToolTip(_translate("Dialog", "Backspace will act as shift+tab, i.e. it will eat as much spaces \n"
"as possible to get back to the previous indentation level."))
self.checkBoxSmartBackspace.setText(_translate("Dialog", "Intelligent backspace"))
self.groupBox_4.setTitle(_translate("Dialog", "Code completion"))
self.label_2.setText(_translate("Dialog", "Trigger length:"))
self.spinBoxEditorCCTriggerLen.setToolTip(_translate("Dialog", "Number of characters needed to trigger auto completion"))
self.spinBoxEditorCCTriggerLen.setStatusTip(_translate("Dialog", "Number of characters needed to trigger auto completion"))
self.label_16.setText(_translate("Dialog", "Proposed keywords:"))
self.rbLowerCaseKwds.setToolTip(_translate("Dialog", "All proposed keywords are lower-case"))
self.rbLowerCaseKwds.setText(_translate("Dialog", "&lower-case"))
self.rbUpperCaseKwds.setToolTip(_translate("Dialog", "All proposed keywords are UPPER-CASE"))
self.rbUpperCaseKwds.setText(_translate("Dialog", "&UPPER-CASE"))
self.label_31.setText(_translate("Dialog", "Filter mode:"))
self.comboCcFilterMode.setItemText(0, _translate("Dialog", "Prefix (faster)"))
self.comboCcFilterMode.setItemText(1, _translate("Dialog", "Subsequence (smarter)"))
self.groupBox_10.setTitle(_translate("Dialog", "EOL"))
self.label_30.setText(_translate("Dialog", "Preferred EOL:"))
self.comboBoxPreferredEOL.setItemText(0, _translate("Dialog", "System"))
self.comboBoxPreferredEOL.setItemText(1, _translate("Dialog", "Linux"))
self.comboBoxPreferredEOL.setItemText(2, _translate("Dialog", "Mac"))
self.comboBoxPreferredEOL.setItemText(3, _translate("Dialog", "Windows"))
self.checkBoxAutodetectEOL.setText(_translate("Dialog", "Auto detect EOL"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tabEditor), _translate("Dialog", "Editor"))
self.groupBox_5.setTitle(_translate("Dialog", "Application style"))
self.radioButtonColorWhite.setToolTip(_translate("Dialog", "Use native style"))
self.radioButtonColorWhite.setStatusTip(_translate("Dialog", "Use native style"))
self.radioButtonColorWhite.setText(_translate("Dialog", "&Native"))
self.radioButtonColorDark.setToolTip(_translate("Dialog", "Use a global dark style (using QDarkStyleSheet)"))
self.radioButtonColorDark.setStatusTip(_translate("Dialog", "Use a global dark style (using QDarkStyleSheet)"))
self.radioButtonColorDark.setText(_translate("Dialog", "Dark"))
self.lblIconTheme.setText(_translate("Dialog", "Icon theme:"))
self.groupBox_6.setTitle(_translate("Dialog", "Editor font"))
self.label_3.setText(_translate("Dialog", "Editor font:"))
self.fontComboBox.setToolTip(_translate("Dialog", "Change editor font"))
self.fontComboBox.setStatusTip(_translate("Dialog", "Change editor font"))
self.label_4.setText(_translate("Dialog", "Font size:"))
self.spinBoxFontSize.setToolTip(_translate("Dialog", "Change editor font size"))
self.spinBoxFontSize.setStatusTip(_translate("Dialog", "Change editor font size"))
self.groupBox_7.setTitle(_translate("Dialog", "Editor color scheme"))
self.listWidgetColorSchemes.setToolTip(_translate("Dialog", "Pygments color schemes."))
self.listWidgetColorSchemes.setStatusTip(_translate("Dialog", "Pygments color schemes."))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tabStyle), _translate("Dialog", "Style"))
self.label_compiler_path.setText(_translate("Dialog", "Compiler path:"))
self.lineEditCompilerPath.setToolTip(_translate("Dialog", "<html><head/><body><p>GnuCOBOL compiler path (complete path or executable name if correct path is defined in PATH).</p></body></html>"))
self.lineEditCompilerPath.setStatusTip(_translate("Dialog", "Full path to the GnuCOBOL compiler."))
self.toolButtonCustomCompilerPath.setText(_translate("Dialog", "..."))
self.toolButtonCheckCompiler.setText(_translate("Dialog", "Check compiler"))
self.groupBoxEnvVars.setTitle(_translate("Dialog", "Environment variables"))
self.cbPATH.setText(_translate("Dialog", "PATH:"))
self.cbCOB_CONFIG_DIR.setText(_translate("Dialog", "COB_CONFIG_DIR:"))
self.cbCOB_COPY_DIR.setText(_translate("Dialog", "COB_COPY_DIR:"))
self.cbCOB_INCLUDE_PATH.setText(_translate("Dialog", "COB_INCLUDE_PATH:"))
self.cbCOB_LIB_PATH.setText(_translate("Dialog", "COB_LIB_PATH:"))
self.cbAutoDetectSublmodules.setToolTip(_translate("Dialog", "<html><head/><body><p>If checked, the IDE will recursively look for CALL PROGRAM statements to build a list of submodules that will be automatically compiled when you\'re compiling the main program.</p></body></html>"))
self.cbAutoDetectSublmodules.setText(_translate("Dialog", "Auto-detect and compile submodules"))
self.labelVCVARS.setText(_translate("Dialog", "VCVARSALL path:"))
self.lineEditVCVARS.setToolTip(_translate("Dialog", "<html><head/><body><p>Path to VCVARSALL.bat. It is needed if you\'re using a GnuCOBOL compiler built with Visual Studio.</p><p><br/>The VCVARSALL.bat file is located under the VC directory of you Visual Studio installation. Make sure to use the same version as the one used to build the compiler!</p></body></html>"))
self.toolButtonVCVARS.setText(_translate("Dialog", "..."))
self.combo_arch.setItemText(0, _translate("Dialog", "x86"))
self.combo_arch.setItemText(1, _translate("Dialog", "x64"))
self.label_36.setText(_translate("Dialog", "Output directory:"))
self.cb_copy_runtime_dlls.setText(_translate("Dialog", "Copy runtime dlls to output directory"))
self.label_32.setText(_translate("Dialog", "Associated extensions:"))
self.label_8.setText(_translate("Dialog", "Standard:"))
self.comboBoxStandard.setItemText(0, _translate("Dialog", "default"))
self.comboBoxStandard.setItemText(1, _translate("Dialog", "cobol2002"))
self.comboBoxStandard.setItemText(2, _translate("Dialog", "cobol85"))
self.comboBoxStandard.setItemText(3, _translate("Dialog", "ibm"))
self.comboBoxStandard.setItemText(4, _translate("Dialog", "mvs"))
self.comboBoxStandard.setItemText(5, _translate("Dialog", "bs2000"))
self.comboBoxStandard.setItemText(6, _translate("Dialog", "mf"))
self.comboBoxStandard.setItemText(7, _translate("Dialog", "cobol2014"))
self.comboBoxStandard.setItemText(8, _translate("Dialog", "acu"))
self.comboBoxStandard.setItemText(9, _translate("Dialog", "none"))
self.label_9.setText(_translate("Dialog", "Free format:"))
self.checkBoxFreeFormat.setToolTip(_translate("Dialog", "Code and compile with free format support"))
self.label_15.setText(_translate("Dialog", "Compiler flags"))
self.cb_ftrace.setToolTip(_translate("Dialog", "<html><head/><body><p>Generate trace code</p><p> - Executed SECTION/PARAGRAPH</p></body></html>"))
self.cb_ftrace.setText(_translate("Dialog", "-ftrace"))
self.cb_static.setToolTip(_translate("Dialog", "Link statically"))
self.cb_static.setText(_translate("Dialog", "-static"))
self.cb_g.setToolTip(_translate("Dialog", "Enable C compiler debug / stack check / trace"))
self.cb_g.setText(_translate("Dialog", "-g"))
self.cb_debugging_line.setToolTip(_translate("Dialog", "<html><head/><body><p>Enable debugging lines</p><p> - \'D\' in indicator column or floating >>D</p></body></html>"))
self.cb_debugging_line.setText(_translate("Dialog", "-fdebugging-line"))
self.cb_ftraceall.setToolTip(_translate("Dialog", "<html><head/><body><p>Generate trace code</p><p> - Executed SECTION/PARAGRAPH/STATEMENTS</p><p> - Turned on by -debug</p></body></html>"))
self.cb_ftraceall.setText(_translate("Dialog", "-ftraceall"))
self.cb_debug.setText(_translate("Dialog", "-debug"))
self.cb_w.setToolTip(_translate("Dialog", "Enable all warnings"))
self.cb_w.setText(_translate("Dialog", "-W"))
self.cb_wall.setToolTip(_translate("Dialog", "Enable most warnings"))
self.cb_wall.setText(_translate("Dialog", "-Wall"))
self.label_35.setText(_translate("Dialog", "Extra compiler flags"))
self.label_37.setText(_translate("Dialog", "Copybook paths:"))
self.listWidgetCopyPaths.setToolTip(_translate("Dialog", "The list of copybooks paths. You can use drag & drop to reorder them."))
self.btAddAbsoluteCopyPath.setText(_translate("Dialog", "..."))
self.btAddRelativeCopyPath.setText(_translate("Dialog", "..."))
self.btRemoveCopyPath.setText(_translate("Dialog", "..."))
self.label_17.setText(_translate("Dialog", "Library paths:"))
self.listWidgetLibPaths.setToolTip(_translate("Dialog", "The list of library paths. You can use drag & drop to reorder them."))
self.toolButtonAddLibPath.setToolTip(_translate("Dialog", "Add an absolute library path"))
self.toolButtonAddLibPath.setText(_translate("Dialog", "..."))
self.toolButtonAddRelativeLibPath.setToolTip(_translate("Dialog", "Add a relative library path"))
self.toolButtonAddRelativeLibPath.setText(_translate("Dialog", "..."))
self.toolButtonRemoveLibPath.setText(_translate("Dialog", "..."))
self.label_18.setText(_translate("Dialog", "Libraries"))
self.lineEditLibs.setToolTip(_translate("Dialog", "<html><head/><body><p>Add the libraries you would like your programs to link with here (-l option)</p></body></html>"))
self.le_compiler_flags.setToolTip(_translate("Dialog", "You can add other compiler flags here"))
self.btCompilerFlagsHelp.setText(_translate("Dialog", "?"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tabCompiler), _translate("Dialog", "Compiler"))
self.lbl_external_terminal_command.setText(_translate("Dialog", "External terminal command:"))
self.lineEditRunTerm.setToolTip(_translate("Dialog", "External terminal command (filename is appended at the end of the command)"))
self.lineEditRunTerm.setStatusTip(_translate("Dialog", "External terminal command"))
self.label_42.setText(_translate("Dialog", "Working Directory:"))
self.bt_working_dir.setText(_translate("Dialog", "..."))
self.checkBoxRunExtTerm.setText(_translate("Dialog", "Run in external terminal"))
self.label_43.setText(_translate("Dialog", "Terminal mode:"))
self.groupBox_12.setTitle(_translate("Dialog", "Environment"))
item = self.tw_run_env.horizontalHeaderItem(0)
item.setText(_translate("Dialog", "Key"))
item = self.tw_run_env.horizontalHeaderItem(1)
item.setText(_translate("Dialog", "Value"))
self.bt_add_run_env.setText(_translate("Dialog", "Add "))
self.bt_rm_run_env.setText(_translate("Dialog", "Remove"))
self.bt_clear_run_env.setText(_translate("Dialog", "Clear"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tabRun), _translate("Dialog", "Run"))
self.groupBox_8.setTitle(_translate("Dialog", "DBPRE Configuration"))
self.label_19.setText(_translate("Dialog", "dbpre"))
self.toolButtonDbpre.setText(_translate("Dialog", "..."))
self.labelDbpreVersion.setText(_translate("Dialog", "invalid dbpre executable"))
self.label_20.setText(_translate("Dialog", "cobmysqlapi:"))
self.toolButtonCobMySqlApiPath.setText(_translate("Dialog", "..."))
self.label_21.setText(_translate("Dialog", "Framework:"))
self.toolButtonDbpreFramework.setText(_translate("Dialog", "..."))
self.label_33.setText(_translate("Dialog", "Associated extensions:"))
self.groupBox_9.setTitle(_translate("Dialog", "DB Connection Parameters"))
self.label_22.setText(_translate("Dialog", "DBHOST:"))
self.label_23.setText(_translate("Dialog", "DBUSER:"))
self.label_24.setText(_translate("Dialog", "DBPASSWD:"))
self.checkBoxShowDbPass.setText(_translate("Dialog", "Show password"))
self.label_25.setText(_translate("Dialog", "DBNAME:"))
self.label_26.setText(_translate("Dialog", "DBPORT:"))
self.label_27.setText(_translate("Dialog", "DBSOCKET:"))
self.label_29.setText(_translate("Dialog", "esqlOC folder:"))
self.toolButtonESQLOC.setText(_translate("Dialog", "..."))
self.label_34.setText(_translate("Dialog", "Associated extensions:"))
self.label_28.setText(_translate("Dialog", "<html><head/><body><p align=\"center\">Read the <a href=\"http://opencobolide.readthedocs.org/en/latest/advanced.html#how-to-setup-dbpre-integration-with-opencobolide\"><span style=\" text-decoration: underline; color:#2980b9;\">guide</span></a> to get started</p></body></html>"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tabSqlCobol), _translate("Dialog", "SQL COBOL"))
from open_cobol_ide.view.editors import CobolCodeEdit
from open_cobol_ide.view.widgets import ColorPicker, PathLineEdit
from . import ide_rc | OpenCobolIDE/OpenCobolIDE | open_cobol_ide/view/forms/dlg_preferences_ui.py | Python | gpl-3.0 | 72,494 |
#!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=g-bad-name
# pylint: disable=unused-argument
# os_compat must be first to ensure timezones are UTC.
# pylint: disable=unused-import
# pylint: disable=g-bad-import-order
from google.appengine.tools import os_compat
# testutil must be imported before mock.
# pylint: disable=g-bad-import-order
from testlib import testutil
import base64
import collections
import datetime
import httplib
import math
import os
import time
import unittest
import mock
import mox
import json
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import datastore
from google.appengine.api import datastore_errors
from google.appengine.api import taskqueue
from google.appengine.ext import db
from google.appengine.ext import key_range
from mapreduce import context
from mapreduce import control
from mapreduce import datastore_range_iterators as db_iters
from mapreduce import errors
from mapreduce import handlers
from mapreduce import hooks
from mapreduce import input_readers
from mapreduce import key_ranges
from mapreduce import map_job_context
from mapreduce import model
from mapreduce import operation
from mapreduce import output_writers
from mapreduce import parameters
from mapreduce import shard_life_cycle
from mapreduce import test_support
from mapreduce import util
from mapreduce.api import map_job
from google.appengine.ext.webapp import mock_webapp
MAPPER_PARAMS = {"batch_size": 50}
PARAM_DONE_CALLBACK = model.MapreduceSpec.PARAM_DONE_CALLBACK
PARAM_DONE_CALLBACK_QUEUE = model.MapreduceSpec.PARAM_DONE_CALLBACK_QUEUE
class TestHooks(hooks.Hooks):
"""Test hooks class."""
enqueue_worker_task_calls = []
enqueue_done_task_calls = []
enqueue_controller_task_calls = []
enqueue_kickoff_task_calls = []
def __init__(self, mapper):
super(TestHooks, self).__init__(mapper)
def enqueue_worker_task(self, task, queue_name):
self.enqueue_worker_task_calls.append((task, queue_name))
task.add(queue_name=queue_name)
def enqueue_kickoff_task(self, task, queue_name):
self.enqueue_kickoff_task_calls.append((task, queue_name))
task.add(queue_name=queue_name)
def enqueue_done_task(self, task, queue_name):
self.enqueue_done_task_calls.append((task, queue_name))
task.add(queue_name=queue_name)
def enqueue_controller_task(self, task, queue_name):
self.enqueue_controller_task_calls.append((task, queue_name))
task.add(queue_name=queue_name)
@classmethod
def reset(cls):
cls.enqueue_worker_task_calls = []
cls.enqueue_done_task_calls = []
cls.enqueue_controller_task_calls = []
cls.enqueue_kickoff_task_calls = []
class TestKind(db.Model):
"""Used for testing."""
foobar = db.StringProperty(default="meep")
def TestMap(entity):
"""Used for testing."""
pass
class MockTime(object):
"""Simple class to use for mocking time() function."""
now = time.time()
@staticmethod
def time():
"""Get current mock time."""
return MockTime.now
@staticmethod
def advance_time(delta):
"""Advance current mock time by delta."""
MockTime.now += delta
class TestEntity(db.Model):
"""Test entity class."""
a = db.IntegerProperty(default=1)
class TestHandler(object):
"""Test handler which stores all processed entities keys.
Properties:
processed_keys: all keys of processed entities.
delay: advances mock time by this delay on every call.
"""
processed_keys = []
delay = 0
def __call__(self, entity):
"""Main handler process function.
Args:
entity: entity to process.
"""
TestHandler.processed_keys.append(str(entity.key()))
MockTime.advance_time(TestHandler.delay)
@staticmethod
def reset():
"""Clear processed_keys & reset delay to 0."""
TestHandler.processed_keys = []
TestHandler.delay = 0
class TestOperation(operation.Operation):
"""Test operation which records entity on execution."""
processed_keys = []
def __init__(self, entity):
self.entity = entity
def __call__(self, cxt):
TestOperation.processed_keys.append(str(self.entity.key()))
@classmethod
def reset(cls):
cls.processed_keys = []
def test_handler_raise_exception(entity):
"""Test handler function which always raises exception.
Raises:
ValueError: always
"""
raise ValueError()
def test_handler_raise_fail_job_exception(entity):
"""Test handler function which always raises exception.
Raises:
FailJobError: always.
"""
raise errors.FailJobError()
def test_handler_yield_op(entity):
"""Test handler function which yields test operation twice for entity."""
yield TestOperation(entity)
yield TestOperation(entity)
def test_param_validator_success(params):
"""Test parameter validator that is successful."""
params["test"] = "good"
def test_param_validator_raise_exception(params):
"""Test parameter validator that fails."""
raise Exception("These params are bad")
def test_handler_yield_keys(entity):
"""Test handler which yeilds entity keys."""
yield entity.key()
class InputReader(input_readers.DatastoreInputReader):
"""Test input reader which records number of yields."""
yields = 0
# Used to uniquely identity an input reader instance across serializations.
next_instance_id = 0
def __init__(self, iterator, instance_id=None):
super(InputReader, self).__init__(iterator)
self.instance_id = instance_id
def __iter__(self):
for entity in input_readers.DatastoreInputReader.__iter__(self):
InputReader.yields += 1
yield entity
@classmethod
def split_input(cls, mapper_spec):
"""Split into the exact number of shards asked for."""
shard_count = mapper_spec.shard_count
query_spec = cls._get_query_spec(mapper_spec)
k_ranges = [key_ranges.KeyRangesFactory.create_from_list(
[key_range.KeyRange()]) for _ in range(shard_count)]
iters = [db_iters.RangeIteratorFactory.create_key_ranges_iterator(
r, query_spec, cls._KEY_RANGE_ITER_CLS) for r in k_ranges]
results = []
for i in iters:
results.append(cls(i, cls.next_instance_id))
cls.next_instance_id += 1
return results
def to_json(self):
return {"iter": self._iter.to_json(),
"instance_id": self.instance_id}
@classmethod
def from_json(cls, json):
"""Create new DatastoreInputReader from json, encoded by to_json.
Args:
json: json representation of DatastoreInputReader.
Returns:
an instance of DatastoreInputReader with all data deserialized from json.
"""
return cls(db_iters.RangeIteratorFactory.from_json(json["iter"]),
json["instance_id"])
@classmethod
def reset(cls):
cls.yields = 0
cls.next_instance_id = 0
class EmptyInputReader(input_readers.DatastoreInputReader):
"""Always returns nothing from input splits."""
@classmethod
def split_input(cls, mapper_spec):
return None
class TestOutputWriter(output_writers.OutputWriter):
"""Test output writer."""
# store lifecycle events.
events = []
@classmethod
def reset(cls):
cls.events = []
@classmethod
def validate(cls, mapper_spec):
assert isinstance(mapper_spec, model.MapperSpec)
if "fail_writer_validate" in mapper_spec.params:
raise Exception("Failed Validation")
@classmethod
def init_job(cls, mapreduce_state):
assert isinstance(mapreduce_state, model.MapreduceState)
cls.events.append("init_job")
@classmethod
def finalize_job(cls, mapreduce_state):
assert isinstance(mapreduce_state, model.MapreduceState)
cls.events.append("finalize_job")
@classmethod
def create(cls, mr_spec, shard_number, shard_attempt, _writer_state=None):
cls.events.append("create-" + str(shard_number))
return cls()
def to_json(self):
return {}
@classmethod
def from_json(cls, json_dict):
return cls()
def write(self, data):
ctx = context.get()
assert isinstance(ctx, context.Context)
self.events.append("write-" + str(data))
def finalize(self, ctx, shard_state):
assert isinstance(ctx, context.Context)
self.events.append("finalize-" + str(shard_state.shard_number))
def _supports_slice_recovery(self, mapper_spec):
return True
def _recover(self, mr_spec, shard_number, shard_attempt):
self.events.append("recover")
return self.__class__()
class ShardLifeCycleOutputWriter(shard_life_cycle._ShardLifeCycle,
TestOutputWriter):
"""OutputWriter implementing life cycle methods."""
def begin_shard(self, shard_ctx):
assert isinstance(shard_ctx, map_job_context.ShardContext)
self.events.append("begin_shard-%s" % shard_ctx.id)
def end_shard(self, shard_ctx):
assert isinstance(shard_ctx, map_job_context.ShardContext)
self.events.append("end_shard-%s" % shard_ctx.id)
def begin_slice(self, slice_ctx):
assert isinstance(slice_ctx, map_job_context.SliceContext)
self.events.append("begin_slice-%s" % slice_ctx.number)
def end_slice(self, slice_ctx):
assert isinstance(slice_ctx, map_job_context.SliceContext)
self.events.append("end_slice-%s" % slice_ctx.number)
class UnfinalizableTestOutputWriter(TestOutputWriter):
"""An output writer where all calls to finalize fail."""
def finalize(self, ctx, shard_state):
raise Exception("This will always break")
class MatchesContext(mox.Comparator):
"""Mox comparator to match context instances."""
def __init__(self, **kwargs):
self.kwargs = kwargs
def equals(self, ctx):
"""Check to see if ctx matches arguments."""
if self.kwargs.get("task_retry_count", 0) != ctx.task_retry_count:
return False
return True
def __repr__(self):
return "MatchesContext(%s)" % self.kwargs
ENTITY_KIND = "__main__.TestEntity"
MAPPER_HANDLER_SPEC = __name__ + "." + TestHandler.__name__
COUNTER_MAPPER_CALLS = context.COUNTER_MAPPER_CALLS
class MapreduceHandlerTestBase(testutil.HandlerTestBase):
"""Base class for all mapreduce's HugeTaskHandler tests.
Contains common fixture and utility methods.
"""
def setUp(self):
"""Sets up the test harness."""
testutil.HandlerTestBase.setUp(self)
TestHandler.reset()
TestOutputWriter.reset()
TestHooks.reset()
def find_task_by_name(self, tasks, name):
"""Find a task with given name.
Args:
tasks: iterable of tasks.
name: a name to look for.
Returns:
task or None
"""
for task in tasks:
if task["name"] == name:
return task
return None
def verify_shard_task(self, task, shard_id, slice_id=0, eta=None,
countdown=None, verify_spec=True, **kwargs):
"""Checks that all shard task properties have expected values.
Args:
task: task to check.
shard_id: expected shard id.
slice_id: expected slice_id.
eta: expected task eta.
countdown: expected task delay from now.
verify_spec: check mapreduce_spec if True.
**kwargs: Extra keyword arguments to pass to verify_mapreduce_spec.
"""
expected_task_name = handlers.MapperWorkerCallbackHandler.get_task_name(
shard_id, slice_id)
self.assertEqual(expected_task_name, task["name"])
self.assertEqual("POST", task["method"])
self.assertEqual("/mapreduce/worker_callback/" + shard_id, task["url"])
if eta:
self.assertEqual(eta.strftime("%Y/%m/%d %H:%M:%S"), task["eta"])
if countdown:
expected_etc_sec = time.time() + countdown
eta_sec = time.mktime(time.strptime(task["eta"], "%Y/%m/%d %H:%M:%S"))
self.assertTrue(expected_etc_sec < eta_sec + 10)
request = mock_webapp.MockRequest()
request.body = base64.b64decode(task["body"])
request.headers = dict(task["headers"])
payload = model.HugeTask.decode_payload(request)
self.assertEqual(str(shard_id), payload["shard_id"])
self.assertEqual(str(slice_id), payload["slice_id"])
if verify_spec:
self.assertTrue(payload["mapreduce_spec"])
mapreduce_spec = model.MapreduceSpec.from_json_str(
payload["mapreduce_spec"])
self.verify_mapreduce_spec(mapreduce_spec, **kwargs)
def verify_mapreduce_spec(self, mapreduce_spec, **kwargs):
"""Check all mapreduce spec properties to have expected values.
Args:
mapreduce_spec: mapreduce spec to check as MapreduceSpec.
kwargs: expected property values. Checks for default property value if
particular property is not specified.
"""
self.assertTrue(mapreduce_spec)
self.assertEquals(kwargs.get("mapper_handler_spec", MAPPER_HANDLER_SPEC),
mapreduce_spec.mapper.handler_spec)
self.assertEquals(kwargs.get("output_writer_spec", None),
mapreduce_spec.mapper.output_writer_spec)
self.assertEquals(
ENTITY_KIND,
mapreduce_spec.mapper.params["input_reader"]["entity_kind"])
self.assertEquals(kwargs.get("shard_count", 8),
mapreduce_spec.mapper.shard_count)
self.assertEquals(kwargs.get("hooks_class_name"),
mapreduce_spec.hooks_class_name)
def verify_shard_state(self, shard_state, **kwargs):
"""Checks that all shard state properties have expected values.
Args:
shard_state: shard state to check.
kwargs: expected property values. Checks for default property value if
particular property is not specified.
"""
self.assertTrue(shard_state)
self.assertEqual(kwargs.get("active", True), shard_state.active)
self.assertEqual(kwargs.get("processed", 0),
shard_state.counters_map.get(COUNTER_MAPPER_CALLS))
self.assertEqual(kwargs.get("result_status", None),
shard_state.result_status)
self.assertEqual(kwargs.get("slice_retries", 0),
shard_state.slice_retries)
self.assertEqual(kwargs.get("retries", 0),
shard_state.retries)
self.assertEqual(kwargs.get("input_finished", False),
shard_state.input_finished)
def verify_mapreduce_state(self, mapreduce_state, **kwargs):
"""Checks mapreduce state to have expected property values.
Args:
mapreduce_state: mapreduce state to check.
kwargs: expected property values. Checks for default property value if
particular property is not specified.
"""
self.assertTrue(mapreduce_state)
self.assertTrue(
mapreduce_state.chart_url.startswith("http://chart.apis.google.com/"),
"Wrong chart url: " + mapreduce_state.chart_url)
self.assertEquals(kwargs.get("active", True), mapreduce_state.active)
self.assertEquals(kwargs.get("processed", 0),
mapreduce_state.counters_map.get(COUNTER_MAPPER_CALLS))
self.assertEquals(kwargs.get("result_status", None),
mapreduce_state.result_status)
mapreduce_spec = mapreduce_state.mapreduce_spec
self.verify_mapreduce_spec(mapreduce_spec, **kwargs)
def verify_controller_task(self, task, **kwargs):
"""Checks that all update status task properties have expected values.
Args:
task: task to check.
kwargs: expected property values. Checks for default if property is not
specified.
"""
self.assertEqual("POST", task["method"])
request = mock_webapp.MockRequest()
request.body = base64.b64decode(task["body"])
request.headers = dict(task["headers"])
payload = model.HugeTask.decode_payload(request)
mapreduce_spec = model.MapreduceSpec.from_json_str(
payload["mapreduce_spec"])
self.verify_mapreduce_spec(mapreduce_spec, **kwargs)
self.assertEqual(
"/mapreduce/controller_callback/" + mapreduce_spec.mapreduce_id,
task["url"])
def create_mapreduce_spec(self,
mapreduce_id,
shard_count=8,
mapper_handler_spec=MAPPER_HANDLER_SPEC,
mapper_parameters=None,
hooks_class_name=None,
output_writer_spec=None,
input_reader_spec=None):
"""Create a new valid mapreduce_spec.
Args:
mapreduce_id: mapreduce id.
shard_count: number of shards in the handlers.
mapper_handler_spec: handler specification to use for handlers.
hooks_class_name: fully qualified name of the hooks class.
Returns:
new MapreduceSpec.
"""
params = {
"input_reader": {
"entity_kind": __name__ + "." + TestEntity.__name__
},
}
if mapper_parameters is not None:
params.update(mapper_parameters)
mapper_spec = model.MapperSpec(
mapper_handler_spec,
input_reader_spec or __name__ + ".InputReader",
params,
shard_count,
output_writer_spec=output_writer_spec)
mapreduce_spec = model.MapreduceSpec(
"my job", mapreduce_id, mapper_spec.to_json(),
params=map_job.JobConfig._get_default_mr_params(),
hooks_class_name=hooks_class_name)
self.verify_mapreduce_spec(mapreduce_spec,
shard_count=shard_count,
mapper_handler_spec=mapper_handler_spec,
hooks_class_name=hooks_class_name,
output_writer_spec=output_writer_spec)
state = model.MapreduceState(
key_name=mapreduce_id,
last_poll_time=datetime.datetime.now())
state.mapreduce_spec = mapreduce_spec
state.active = True
state.shard_count = shard_count
state.active_shards = shard_count
state.put()
return mapreduce_spec
def create_shard_state(self, mapreduce_id, shard_number):
"""Creates a new valid shard state.
Args:
mapreduce_id: mapreduce id to create state for as string.
shard_number: shard number as int.
Returns:
new ShardState.
"""
shard_state = model.ShardState.create_new(mapreduce_id, shard_number)
self.verify_shard_state(shard_state)
return shard_state
def create_and_store_shard_state(self, mapreduce_id, shard_number):
"""Creates a new valid shard state and saves it into memcache.
Args:
mapreduce_id: mapreduce id to create state for as string.
shard_number: shard number as int.
Returns:
new ShardState.
"""
shard_state = self.create_shard_state(mapreduce_id, shard_number)
shard_state.put()
return shard_state
def key(self, entity_id):
"""Create a key for TestEntity with specified id.
Used to shorted expected data.
Args:
entity_id: entity id
Returns:
db.Key instance with specified id for TestEntity.
"""
return db.Key.from_path("TestEntity", entity_id)
class StartJobHandlerTest(testutil.HandlerTestBase):
"""Test handlers.StartJobHandler.
This class mostly tests request handling and parameter parsing aspect of the
handler.
"""
NAME = "my_job"
HANDLER_SPEC = MAPPER_HANDLER_SPEC
ENTITY_KIND = __name__ + "." + TestEntity.__name__
INPUT_READER_SPEC = ("mapreduce.input_readers."
"DatastoreInputReader")
OUTPUT_WRITER_SPEC = __name__ + ".TestOutputWriter"
SHARD_COUNT = "9"
PROCESSING_RATE = "1234"
QUEUE = "crazy-queue"
def setUp(self):
"""Sets up the test harness."""
super(StartJobHandlerTest, self).setUp()
self.handler = handlers.StartJobHandler()
self.handler.initialize(mock_webapp.MockRequest(),
mock_webapp.MockResponse())
self.handler.request.path = "/mapreduce/command/start_job"
self.handler.request.set("name", self.NAME)
self.handler.request.set("mapper_handler", self.HANDLER_SPEC)
self.handler.request.set("mapper_input_reader", self.INPUT_READER_SPEC)
self.handler.request.set("mapper_output_writer", self.OUTPUT_WRITER_SPEC)
self.handler.request.set("mapper_params.shard_count", self.SHARD_COUNT)
self.handler.request.set("mapper_params.entity_kind",
self.ENTITY_KIND)
self.handler.request.set("mapper_params.processing_rate",
self.PROCESSING_RATE)
self.handler.request.set("mapper_params.queue_name", self.QUEUE)
self.handler.request.headers["X-Requested-With"] = "XMLHttpRequest"
def testCSRF(self):
"""Tests that that handler only accepts AJAX requests."""
del self.handler.request.headers["X-Requested-With"]
self.handler.post()
self.assertEquals(httplib.FORBIDDEN, self.handler.response.status)
def testSmoke(self):
"""Verifies main execution path of starting scan over several entities."""
for _ in range(100):
TestEntity().put()
self.handler.post()
tasks = self.taskqueue.GetTasks(self.QUEUE)
# Only kickoff task should be there.
self.assertEquals(1, len(tasks))
task_mr_id = test_support.decode_task_payload(tasks[0]).get("mapreduce_id")
# Verify mr id is generated.
mapreduce_id = self.handler.json_response["mapreduce_id"]
self.assertTrue(mapreduce_id)
self.assertEqual(task_mr_id, mapreduce_id)
# Verify state is created.
state = model.MapreduceState.get_by_job_id(
self.handler.json_response["mapreduce_id"])
self.assertTrue(state)
self.assertTrue(state.active)
self.assertEqual(0, state.active_shards)
# Verify mapreduce spec.
self.assertEqual(self.HANDLER_SPEC,
state.mapreduce_spec.mapper.handler_spec)
self.assertEqual(self.NAME, state.mapreduce_spec.name)
self.assertEqual(self.INPUT_READER_SPEC,
state.mapreduce_spec.mapper.input_reader_spec)
self.assertEqual(self.OUTPUT_WRITER_SPEC,
state.mapreduce_spec.mapper.output_writer_spec)
self.assertEqual(int(self.SHARD_COUNT),
state.mapreduce_spec.mapper.shard_count)
self.assertEqual(int(self.PROCESSING_RATE),
state.mapreduce_spec.mapper.params["processing_rate"])
self.assertEqual(self.QUEUE,
state.mapreduce_spec.params["queue_name"])
def testOtherApp(self):
"""Verifies main execution path of starting scan over several entities."""
apiproxy_stub_map.apiproxy.GetStub("datastore_v3").SetTrusted(True)
self.handler.request.set("mapper_params._app", "otherapp")
TestEntity(_app="otherapp").put()
self.handler.post()
# Verify state is created.
state = model.MapreduceState.get_by_job_id(
self.handler.json_response["mapreduce_id"])
self.assertEqual("otherapp", state.app_id)
def testRequiredParams(self):
"""Tests that required parameters are enforced."""
TestEntity().put()
self.handler.post()
self.handler.request.set("name", None)
self.assertRaises(errors.NotEnoughArgumentsError, self.handler.handle)
self.handler.request.set("name", "my job")
self.handler.request.set("mapper_input_reader", None)
self.assertRaises(errors.NotEnoughArgumentsError, self.handler.handle)
self.handler.request.set(
"mapper_input_reader",
"mapreduce.input_readers.DatastoreInputReader")
self.handler.request.set("mapper_handler", None)
self.assertRaises(errors.NotEnoughArgumentsError, self.handler.handle)
self.handler.request.set("mapper_handler", MAPPER_HANDLER_SPEC)
self.handler.request.set("mapper_params.entity_kind", None)
self.assertRaises(input_readers.BadReaderParamsError, self.handler.handle)
self.handler.request.set("mapper_params.entity_kind",
(__name__ + "." + TestEntity.__name__))
self.handler.post()
def testParameterValidationSuccess(self):
"""Tests validating user-supplied parameters."""
TestEntity().put()
self.handler.request.set("mapper_params.one", ["red", "blue"])
self.handler.request.set("mapper_params.two", "green")
self.handler.request.set("mapper_params_validator",
__name__ + ".test_param_validator_success")
self.handler.post()
state = model.MapreduceState.get_by_job_id(
self.handler.json_response["mapreduce_id"])
params = state.mapreduce_spec.mapper.params
self.assertEquals(["red", "blue"], params["one"])
self.assertEquals("green", params["two"])
# From the validator function
self.assertEquals("good", params["test"])
def testMapreduceParameters(self):
"""Tests propagation of user-supplied mapreduce parameters."""
TestEntity().put()
self.handler.request.set("params.one", ["red", "blue"])
self.handler.request.set("params.two", "green")
self.handler.request.set("params_validator",
__name__ + ".test_param_validator_success")
self.handler.post()
state = model.MapreduceState.get_by_job_id(
self.handler.json_response["mapreduce_id"])
params = state.mapreduce_spec.params
self.assertEquals(["red", "blue"], params["one"])
self.assertEquals("green", params["two"])
# From the validator function
self.assertEquals("good", params["test"])
def testParameterValidationFailure(self):
"""Tests when validating user-supplied parameters fails."""
self.handler.request.set("mapper_params_validator",
__name__ + ".test_param_validator_raise_exception")
try:
self.handler.handle()
self.fail()
except Exception, e:
self.assertEquals("These params are bad", str(e))
def testParameterValidationUnknown(self):
"""Tests the user-supplied parameter validation function cannot be found."""
self.handler.request.set("mapper_params_validator", "does_not_exist")
self.assertRaises(ImportError, self.handler.handle)
def testOutputWriterValidateFails(self):
TestEntity().put()
self.handler.request.set("mapper_output_writer",
__name__ + ".TestOutputWriter")
self.handler.request.set("mapper_params.fail_writer_validate",
"true")
self.assertRaises(Exception, self.handler.handle)
class StartJobHandlerFunctionalTest(testutil.HandlerTestBase):
"""Test _start_map function.
Since this function is often called separately from the handler as well as
by the handler directly.
"""
NAME = "my_job"
HANLDER_SPEC = MAPPER_HANDLER_SPEC
ENTITY_KIND = __name__ + "." + TestEntity.__name__
INPUT_READER_SPEC = ("mapreduce.input_readers."
"DatastoreInputReader")
OUTPUT_WRITER_SPEC = __name__ + ".TestOutputWriter"
SHARD_COUNT = "9"
QUEUE = "crazy-queue"
MAPREDUCE_SPEC_PARAMS = map_job.JobConfig._get_default_mr_params()
MAPREDUCE_SPEC_PARAMS.update({"foo": "bar",
"base_path": "/foo",
"queue_name": QUEUE})
HOOKS = __name__ + ".TestHooks"
def setUp(self):
super(StartJobHandlerFunctionalTest, self).setUp()
self.mapper_spec = model.MapperSpec(
handler_spec=self.HANLDER_SPEC,
input_reader_spec=self.INPUT_READER_SPEC,
params={"input_reader": {"entity_kind": self.ENTITY_KIND}},
shard_count=self.SHARD_COUNT,
output_writer_spec=self.OUTPUT_WRITER_SPEC)
TestHooks.reset()
def assertSuccess(self, mr_id, hooks_class_name=None):
# Create spec from test setup data.
mapreduce_spec = model.MapreduceSpec(
name=self.NAME,
mapreduce_id=mr_id,
mapper_spec=self.mapper_spec.to_json(),
params=self.MAPREDUCE_SPEC_PARAMS,
hooks_class_name=hooks_class_name)
# Verify state.
state = model.MapreduceState.get_by_job_id(mr_id)
self.assertEqual(mapreduce_spec, state.mapreduce_spec)
self.assertTrue(state.active)
self.assertEqual(0, state.active_shards)
# Verify task.
tasks = self.taskqueue.GetTasks(self.QUEUE)
self.assertEquals(1, len(tasks))
task = tasks[0]
task_mr_id = test_support.decode_task_payload(task).get("mapreduce_id")
self.assertEqual(mr_id, task_mr_id)
# Check task headers.
headers = dict(task["headers"])
self.assertEqual(mr_id, headers[util._MR_ID_TASK_HEADER])
self.assertTrue(headers["Host"], self.host)
self.assertEqual("/foo/kickoffjob_callback/" + mapreduce_spec.mapreduce_id,
task["url"])
def testSmoke(self):
mr_id = handlers.StartJobHandler._start_map(
self.NAME, self.mapper_spec,
mapreduce_params=self.MAPREDUCE_SPEC_PARAMS,
queue_name=self.QUEUE)
self.assertSuccess(mr_id)
def testStartWithOpenedTxn(self):
@db.transactional(xg=True)
def txn():
# Four dummy entities to fill transaction.
for _ in range(4):
TestEntity().put()
return handlers.StartJobHandler._start_map(
self.NAME, self.mapper_spec,
mapreduce_params=self.MAPREDUCE_SPEC_PARAMS,
queue_name=self.QUEUE,
in_xg_transaction=True)
mr_id = txn()
self.assertSuccess(mr_id)
def testStartWithIndependentTxn(self):
"""Tests MR uses independent txn."""
# Tests MR txn doesn't interfere with outer one.
@db.transactional()
def txn():
# Put a dummy entity to fill the transaction.
TestEntity().put()
return handlers.StartJobHandler._start_map(
self.NAME, self.mapper_spec,
mapreduce_params=self.MAPREDUCE_SPEC_PARAMS,
queue_name=self.QUEUE,
in_xg_transaction=False)
mr_id = txn()
self.assertSuccess(mr_id)
def testWithHooks(self):
mr_id = handlers.StartJobHandler._start_map(
self.NAME, self.mapper_spec,
mapreduce_params=self.MAPREDUCE_SPEC_PARAMS,
queue_name=self.QUEUE,
hooks_class_name=self.HOOKS)
self.assertSuccess(mr_id, self.HOOKS)
self.assertEqual(1, len(TestHooks.enqueue_kickoff_task_calls))
def testOtherApp(self):
mr_id = handlers.StartJobHandler._start_map(
self.NAME, self.mapper_spec,
mapreduce_params=self.MAPREDUCE_SPEC_PARAMS,
queue_name=self.QUEUE,
_app="otherapp")
self.assertSuccess(mr_id)
state = model.MapreduceState.get_by_job_id(mr_id)
self.assertEqual("otherapp", state.app_id)
def testHandlerUnknown(self):
"""Tests when the handler function cannot be found."""
self.mapper_spec.handler_spec = "does_not_exists"
self.assertRaises(ImportError, handlers.StartJobHandler._start_map,
self.NAME, self.mapper_spec, self.MAPREDUCE_SPEC_PARAMS,
queue_name=self.QUEUE)
def testInputReaderUnknown(self):
"""Tests when the input reader function cannot be found."""
self.mapper_spec.input_reader_spec = "does_not_exists"
self.assertRaises(ImportError, handlers.StartJobHandler._start_map,
self.NAME, self.mapper_spec, self.MAPREDUCE_SPEC_PARAMS,
queue_name=self.QUEUE)
def testInvalidOutputWriter(self):
"""Tests setting output writer parameter."""
self.mapper_spec.output_writer_spec = "does_not_exists"
self.assertRaises(ImportError, handlers.StartJobHandler._start_map,
self.NAME, self.mapper_spec, self.MAPREDUCE_SPEC_PARAMS,
queue_name=self.QUEUE)
class KickOffJobHandlerTest(testutil.HandlerTestBase):
"""Test handlers.StartJobHandler."""
NAME = "my_job"
HANLDER_SPEC = MAPPER_HANDLER_SPEC
ENTITY_KIND = __name__ + "." + TestEntity.__name__
INPUT_READER_SPEC = __name__ + ".InputReader"
OUTPUT_WRITER_SPEC = __name__ + ".TestOutputWriter"
SHARD_COUNT = "9"
QUEUE = "crazy-queue"
MAPREDUCE_SPEC_PARAMS = map_job.JobConfig._get_default_mr_params()
MAPREDUCE_SPEC_PARAMS.update({"foo": "bar",
"base_path": parameters.config.BASE_PATH,
"queue_name": QUEUE})
HOOKS = __name__ + ".TestHooks"
def setUp(self):
super(KickOffJobHandlerTest, self).setUp()
TestHooks.reset()
InputReader.reset()
TestOutputWriter.reset()
self._original_reschedule = handlers.ControllerCallbackHandler.reschedule
def tearDown(self):
super(KickOffJobHandlerTest, self).tearDown()
handlers.ControllerCallbackHandler.reschedule = self._original_reschedule
def createDummyHandler(self):
self.handler = handlers.KickOffJobHandler()
self.mapreduce_id = "foo_id"
request = mock_webapp.MockRequest()
request.headers["X-AppEngine-QueueName"] = self.QUEUE
request.path = "/mapreduce/kickoff_callback/" + self.mapreduce_id
self.handler.initialize(request,
mock_webapp.MockResponse())
def testInvalidMRState(self):
self.createDummyHandler()
# No mr_state exists.
self.handler.request.set("mapreduce_id", self.mapreduce_id)
self.handler.post()
self.assertEqual(0, len(self.taskqueue.GetTasks(self.QUEUE)))
# mr_state is not active.
state = model.MapreduceState.create_new(self.mapreduce_id)
state.active = False
state.put()
self.handler.post()
self.assertEqual(0, len(self.taskqueue.GetTasks(self.QUEUE)))
def setUpValidState(self, hooks_class_name=None):
self.mapper_spec = model.MapperSpec(
handler_spec=self.HANLDER_SPEC,
input_reader_spec=self.INPUT_READER_SPEC,
params={"entity_kind": self.ENTITY_KIND},
shard_count=self.SHARD_COUNT,
output_writer_spec=self.OUTPUT_WRITER_SPEC)
for _ in range(10):
TestEntity().put()
# Use StartJobHandler for setup.
self.mr_id = handlers.StartJobHandler._start_map(
self.NAME, self.mapper_spec,
mapreduce_params=self.MAPREDUCE_SPEC_PARAMS,
queue_name=self.QUEUE,
hooks_class_name=hooks_class_name)
def assertSuccess(self):
# Verify states and that input reader split has been called.
state = model.MapreduceState.get_by_job_id(self.mr_id)
self.assertTrue(state.active)
self.assertTrue(int(self.SHARD_COUNT), state.active_shards)
shard_states = list(model.ShardState.find_all_by_mapreduce_state(state))
self.assertEqual(int(self.SHARD_COUNT), len(shard_states))
for ss in shard_states:
self.assertTrue(ss.active)
# Verify tasks.
tasks = self.taskqueue.GetTasks(self.QUEUE)
worker_tasks = 0
controller_tasks = 0
for task in tasks:
self.assertEqual(self.QUEUE, task["queue_name"])
# Check task headers.
headers = dict(task["headers"])
self.assertEqual(self.mr_id, headers[util._MR_ID_TASK_HEADER])
self.assertEqual(self.host, headers["Host"])
if task["url"].startswith("/mapreduce/worker_callback"):
worker_tasks += 1
if task["url"].startswith("/mapreduce/controller_callback"):
controller_tasks += 1
self.assertEqual(int(self.SHARD_COUNT), worker_tasks)
self.assertEqual(1, controller_tasks)
def testSmoke(self):
self.setUpValidState()
test_support.execute_all_tasks(self.taskqueue, self.QUEUE)
self.assertSuccess()
# Verify output writers has been created.
self.assertEqual(
["init_job", "create-0", "create-1", "create-2", "create-3",
"create-4", "create-5", "create-6", "create-7", "create-8"],
TestOutputWriter.events)
def testDropGracefully(self):
self.setUpValidState()
def always_fail(*args, **kwds):
raise Exception("Raise an exception for test.")
handlers.ControllerCallbackHandler.reschedule = always_fail
test_support.execute_until_empty(self.taskqueue, self.QUEUE)
# Final state is set correctly.
state = model.MapreduceState.get_by_job_id(self.mr_id)
self.assertFalse(state.active)
self.assertEqual(model.MapreduceState.RESULT_FAILED, state.result_status)
# Abort command is issued.
self.assertTrue(model.MapreduceControl.get_key_by_job_id(self.mr_id))
# If a shard was started, then it has been aborted.
shard_states = list(model.ShardState.find_all_by_mapreduce_state(state))
for ss in shard_states:
self.assertFalse(ss.active)
self.assertEqual(model.ShardState.RESULT_ABORTED, ss.result_status)
# Cleanup was called.
trash = list(model._HugeTaskPayload.all().ancestor(state).run())
self.assertFalse(trash)
def testWithHooks(self):
self.setUpValidState(self.HOOKS)
test_support.execute_all_tasks(self.taskqueue, self.QUEUE)
self.assertSuccess()
self.assertEqual(1, len(TestHooks.enqueue_controller_task_calls))
self.assertEqual(int(self.SHARD_COUNT),
len(TestHooks.enqueue_worker_task_calls))
def testNoInput(self):
self.INPUT_READER_SPEC = __name__ + ".EmptyInputReader"
self.setUpValidState()
test_support.execute_all_tasks(self.taskqueue, self.QUEUE)
state = model.MapreduceState.get_by_job_id(self.mr_id)
self.assertFalse(state.active)
self.assertEqual(model.MapreduceState.RESULT_SUCCESS, state.result_status)
def testGetInputReaders(self):
self.createDummyHandler()
self.setUpValidState()
state = model.MapreduceState.get_by_job_id(self.mr_id)
readers, serialized_readers_entity = (
self.handler._get_input_readers(state))
self.assertEqual(int(self.SHARD_COUNT), len(readers))
self.assertEqual(int(self.SHARD_COUNT), state.active_shards)
self.assertEqual(int(self.SHARD_COUNT),
state.mapreduce_spec.mapper.shard_count)
self.assertTrue(self.handler._save_states(state, serialized_readers_entity))
# Call again to test idempotency.
new_readers, new_serialized_readers_entity = (
self.handler._get_input_readers(state))
# Serialized new readers should be the same as serialized old ones.
self.assertEqual(serialized_readers_entity.payload,
new_serialized_readers_entity.payload)
self.assertEqual(json.dumps([i.to_json_str() for i in new_readers]),
serialized_readers_entity.payload)
def testSaveState(self):
self.createDummyHandler()
self.setUpValidState()
state = model.MapreduceState.get_by_job_id(self.mr_id)
_, serialized_readers_entity = (
self.handler._get_input_readers(state))
self.assertTrue(self.handler._save_states(state, serialized_readers_entity))
# Call again to test idempotency.
self.assertEqual(
None, self.handler._save_states(state, serialized_readers_entity))
def testScheduleShards(self):
self.createDummyHandler()
self.setUpValidState()
self.taskqueue.FlushQueue(self.QUEUE)
state = model.MapreduceState.get_by_job_id(self.mr_id)
readers, _ = (
self.handler._get_input_readers(state))
self.handler._schedule_shards(state.mapreduce_spec, readers, self.QUEUE,
"/foo", state)
shard_states = list(model.ShardState.find_all_by_mapreduce_state(state))
self.assertEqual(int(self.SHARD_COUNT), len(shard_states))
for ss in shard_states:
self.assertTrue(ss.active)
# Verify output writers has been created.
self.assertEqual(
["create-0", "create-1", "create-2", "create-3",
"create-4", "create-5", "create-6", "create-7", "create-8"],
TestOutputWriter.events)
# Verify tasks.
tasks = self.taskqueue.GetTasks(self.QUEUE)
worker_tasks = 0
for task, ss in zip(tasks, shard_states):
self.assertEqual("/foo/worker_callback/" + ss.shard_id, task["url"])
worker_tasks += 1
self.assertEqual(int(self.SHARD_COUNT), worker_tasks)
# Call again to test idempotency.
self.handler._schedule_shards(state.mapreduce_spec, readers, self.QUEUE,
"/foo", state)
self.assertEqual(int(self.SHARD_COUNT),
len(self.taskqueue.GetTasks(self.QUEUE)))
new_shard_states = model.ShardState.find_all_by_mapreduce_state(state)
for o, n in zip(shard_states, new_shard_states):
self.assertEqual(o, n)
class MapperWorkerCallbackHandlerLeaseTest(testutil.HandlerTestBase):
"""Test lease related logics of handlers.MapperWorkerCallbackHandler.
These tests creates a WorkerHandler for the same shard.
WorkerHandler gets a payload that's (in)consistent with datastore's
ShardState in some way.
"""
# This shard's number.
SHARD_NUMBER = 1
# Current slice id in datastore.
CURRENT_SLICE_ID = 3
CURRENT_REQUEST_ID = "20150131a"
# Request id from the previous slice execution.
PREVIOUS_REQUEST_ID = "19991231a"
def setUp(self):
super(MapperWorkerCallbackHandlerLeaseTest, self).setUp()
os.environ["REQUEST_LOG_ID"] = self.CURRENT_REQUEST_ID
self.mr_spec = None
self._init_job()
self.shard_id = None
self.shard_state = None
self._init_shard()
self.mr_state = None
self._init_mr_state()
self._original_duration = parameters.config._SLICE_DURATION_SEC
# Make sure handler can process at most one entity and thus
# shard will still be in active state after one call.
parameters.config._SLICE_DURATION_SEC = 0
def tearDown(self):
parameters.config._SLICE_DURATION_SEC = self._original_duration
super(MapperWorkerCallbackHandlerLeaseTest, self).tearDown()
def _init_job(self, handler_spec=MAPPER_HANDLER_SPEC):
"""Init job specs."""
mapper_spec = model.MapperSpec(
handler_spec=handler_spec,
input_reader_spec=__name__ + "." + InputReader.__name__,
params={"input_reader": {"entity_kind": ENTITY_KIND}},
shard_count=self.SHARD_NUMBER)
self.mr_spec = model.MapreduceSpec(
name="mapreduce_name",
mapreduce_id="mapreduce_id",
mapper_spec=mapper_spec.to_json(),
params=map_job.JobConfig._get_default_mr_params())
def _init_shard(self):
"""Init shard state."""
self.shard_state = model.ShardState.create_new(
self.mr_spec.mapreduce_id,
shard_number=self.SHARD_NUMBER)
self.shard_state.slice_id = self.CURRENT_SLICE_ID
self.shard_state.put()
self.shard_id = self.shard_state.shard_id
def _init_mr_state(self):
self.mr_state = model.MapreduceState.create_new(mapreduce_id="mapreduce_id")
self.mr_state.mapreduce_spec = self.mr_spec
self.mr_state.put()
def _create_handler(self, slice_id=CURRENT_SLICE_ID):
"""Create a handler instance with payload for a particular slice."""
# Reset map handler.
TestHandler.reset()
# Create input reader and test entity.
InputReader.reset()
TestEntity().put()
TestEntity().put()
reader_iter = db_iters.RangeIteratorFactory.create_key_ranges_iterator(
key_ranges.KeyRangesFactory.create_from_list([key_range.KeyRange()]),
model.QuerySpec("ENTITY_KIND", model_class_path=ENTITY_KIND),
db_iters.KeyRangeModelIterator)
# Create worker handler.
handler = handlers.MapperWorkerCallbackHandler()
request = mock_webapp.MockRequest()
request.headers[model.HugeTask.PAYLOAD_VERSION_HEADER] = (
model.HugeTask.PAYLOAD_VERSION)
request.headers["X-AppEngine-QueueName"] = "default"
request.headers[util._MR_ID_TASK_HEADER] = self.mr_spec.mapreduce_id
request.headers[util._MR_SHARD_ID_TASK_HEADER] = self.shard_id
handler.initialize(request, mock_webapp.MockResponse())
# Create transient shard state.
tstate = model.TransientShardState(
base_path="base_path",
mapreduce_spec=self.mr_spec,
shard_id=self.shard_state.shard_id,
slice_id=slice_id,
input_reader=InputReader(reader_iter),
initial_input_reader=InputReader(reader_iter))
# Set request according to transient shard state.
worker_params = tstate.to_dict()
for param_name in worker_params:
handler.request.set(param_name, worker_params[param_name])
return handler, tstate
def assertNoEffect(self, no_shard_state=False):
"""Assert shard state and taskqueue didn't change."""
stub = apiproxy_stub_map.apiproxy.GetStub("taskqueue")
self.assertEqual(0, len(stub.GetTasks("default")))
shard_state = model.ShardState.get_by_shard_id(self.shard_state.shard_id)
if not no_shard_state:
assert shard_state
if shard_state:
# sync auto_now field
shard_state.update_time = self.shard_state.update_time
self.assertEqual(str(self.shard_state), str(shard_state))
def testStateNotFound(self):
handler, _ = self._create_handler()
self.shard_state.delete()
handler.post()
self.assertNoEffect(no_shard_state=True)
self.assertEqual(None, model.ShardState.get_by_shard_id(self.shard_id))
def testStateNotActive(self):
handler, _ = self._create_handler()
self.shard_state.active = False
self.shard_state.put()
handler.post()
self.assertNoEffect()
def testOldTask(self):
handler, _ = self._create_handler(slice_id=self.CURRENT_SLICE_ID - 1)
handler.post()
self.assertNoEffect()
def testFutureTask(self):
handler, _ = self._create_handler(slice_id=self.CURRENT_SLICE_ID + 1)
handler.post()
self.assertEqual(httplib.SERVICE_UNAVAILABLE, handler.response.status)
def testLeaseHasNotEnd(self):
self.shard_state.slice_start_time = datetime.datetime.now()
self.shard_state.put()
handler, _ = self._create_handler()
with mock.patch("datetime.datetime", autospec=True) as dt:
# One millisecons after.
dt.now.return_value = (self.shard_state.slice_start_time +
datetime.timedelta(milliseconds=1))
self.assertEqual(
math.ceil(parameters._LEASE_DURATION_SEC),
handler._wait_time(self.shard_state,
parameters._LEASE_DURATION_SEC))
handler.post()
self.assertEqual(httplib.SERVICE_UNAVAILABLE, handler.response.status)
def testRequestHasNotEnd(self):
# Previous request's lease has timed out but the request has not.
now = datetime.datetime.now()
old = (now -
datetime.timedelta(seconds=parameters._LEASE_DURATION_SEC + 1))
self.shard_state.slice_start_time = old
self.shard_state.slice_request_id = self.PREVIOUS_REQUEST_ID
self.shard_state.put()
handler, _ = self._create_handler()
# Lease has ended.
self.assertEqual(0,
handler._wait_time(self.shard_state,
parameters._LEASE_DURATION_SEC),
lambda: now)
# Logs API doesn't think the request has ended.
self.assertFalse(handler._has_old_request_ended(self.shard_state))
# Request has not timed out.
self.assertTrue(handler._wait_time(
self.shard_state,
parameters._MAX_LEASE_DURATION_SEC,
lambda: now))
handler.post()
self.assertEqual(httplib.SERVICE_UNAVAILABLE, handler.response.status)
def testRequestHasTimedOut(self):
slice_start_time = datetime.datetime(2000, 1, 1)
self.shard_state.slice_start_time = slice_start_time
self.shard_state.slice_request_id = self.PREVIOUS_REQUEST_ID
self.shard_state.put()
handler, tstate = self._create_handler()
# Lease has ended.
self.assertEqual(0,
handler._wait_time(self.shard_state,
parameters._LEASE_DURATION_SEC))
# Logs API doesn't think the request has ended.
self.assertFalse(handler._has_old_request_ended(self.shard_state))
# But request has timed out.
self.assertEqual(0, handler._wait_time(
self.shard_state, parameters._MAX_LEASE_DURATION_SEC))
# acquire lease should succeed.
handler._try_acquire_lease(self.shard_state, tstate)
shard_state = model.ShardState.get_by_shard_id(self.shard_id)
self.assertTrue(shard_state.active)
self.assertEqual(self.CURRENT_SLICE_ID, shard_state.slice_id)
self.assertEqual(self.CURRENT_REQUEST_ID, shard_state.slice_request_id)
self.assertTrue(shard_state.slice_start_time > slice_start_time)
def testContentionWhenAcquireLease(self):
# Shard has moved on AFTER we got shard state.
self.shard_state.slice_id += 1
self.shard_state.put()
# Revert in memory shard state.
self.shard_state.slice_id -= 1
handler, tstate = self._create_handler()
self.assertEqual(
handler._TASK_DIRECTIVE.RETRY_TASK,
# Use old shard state.
handler._try_acquire_lease(self.shard_state, tstate))
def testAcquireLeaseSuccess(self):
# lease acquired a long time ago.
slice_start_time = datetime.datetime(2000, 1, 1)
self.shard_state.slice_start_time = slice_start_time
self.shard_state.slice_request_id = self.PREVIOUS_REQUEST_ID
self.shard_state.put()
handler, tstate = self._create_handler()
with mock.patch("google.appengine.api"
".logservice.fetch") as fetch:
mock_request_log = mock.Mock()
mock_request_log.finished = True
fetch.return_value = [mock_request_log]
handler._try_acquire_lease(self.shard_state, tstate)
shard_state = model.ShardState.get_by_shard_id(self.shard_id)
self.assertTrue(shard_state.active)
self.assertEqual(self.CURRENT_SLICE_ID, shard_state.slice_id)
self.assertEqual(self.CURRENT_REQUEST_ID, shard_state.slice_request_id)
self.assertTrue(shard_state.slice_start_time > slice_start_time)
self.assertEqual(shard_state, self.shard_state)
def testLeaseFreedOnSuccess(self):
self.shard_state.slice_start_time = datetime.datetime(2000, 1, 1)
self.shard_state.slice_request_id = self.PREVIOUS_REQUEST_ID
self.shard_state.put()
handler, _ = self._create_handler()
with mock.patch("google.appengine.api"
".logservice.fetch") as fetch:
mock_request_log = mock.Mock()
mock_request_log.finished = True
fetch.return_value = [mock_request_log]
handler.post()
shard_state = model.ShardState.get_by_shard_id(self.shard_id)
self.assertTrue(shard_state.active)
# Slice moved on.
self.assertEquals(self.CURRENT_SLICE_ID + 1, shard_state.slice_id)
# Lease is freed.
self.assertFalse(shard_state.slice_start_time)
self.assertFalse(shard_state.slice_request_id)
stub = apiproxy_stub_map.apiproxy.GetStub("taskqueue")
self.assertEqual(1, len(stub.GetTasks("default")))
def testLeaseFreedOnSliceRetry(self):
# Reinitialize with faulty map function.
self._init_job(__name__ + "." + test_handler_raise_exception.__name__)
self._init_shard()
handler, _ = self._create_handler()
handler.post()
self.assertEqual(httplib.SERVICE_UNAVAILABLE, handler.response.status)
shard_state = model.ShardState.get_by_shard_id(self.shard_id)
self.assertTrue(shard_state.active)
# Slice stays the same.
self.assertEquals(self.CURRENT_SLICE_ID, shard_state.slice_id)
# Lease is freed.
self.assertFalse(shard_state.slice_start_time)
self.assertFalse(shard_state.slice_request_id)
# Slice retry is increased.
self.assertEqual(self.shard_state.slice_retries + 1,
shard_state.slice_retries)
def testLeaseFreedOnTaskqueueUnavailable(self):
handler, _ = self._create_handler()
with mock.patch("mapreduce"
".handlers.MapperWorkerCallbackHandler._add_task") as add:
add.side_effect = taskqueue.Error
self.assertRaises(taskqueue.Error, handler.post)
# No new task in taskqueue.
stub = apiproxy_stub_map.apiproxy.GetStub("taskqueue")
self.assertEqual(0, len(stub.GetTasks("default")))
shard_state = model.ShardState.get_by_shard_id(self.shard_state.shard_id)
self.assertTrue(shard_state.acquired_once)
# Besides these fields, all other fields should be the same.
shard_state.acquired_once = self.shard_state.acquired_once
shard_state.update_time = self.shard_state.update_time
self.assertEqual(str(self.shard_state), str(shard_state))
class MapperWorkerCallbackHandlerTest(MapreduceHandlerTestBase):
"""Test handlers.MapperWorkerCallbackHandler."""
def setUp(self):
"""Sets up the test harness."""
MapreduceHandlerTestBase.setUp(self)
self.original_task_add = taskqueue.Task.add
self.original_slice_duration = parameters.config._SLICE_DURATION_SEC
self.original_task_max_data_processing_attempts = (
parameters.config.TASK_MAX_DATA_PROCESSING_ATTEMPTS)
self.original_supports_slice_recovery = (
TestOutputWriter._supports_slice_recovery)
self.init()
def tearDown(self):
handlers._TEST_INJECTED_FAULTS.clear()
taskqueue.Task.add = self.original_task_add
parameters.config._SLICE_DURATION_SEC = self.original_slice_duration
parameters.config.TASK_MAX_DATA_PROCESSING_ATTEMPTS = (
self.original_task_max_data_processing_attempts)
TestOutputWriter._supports_slice_recovery = (
self.original_supports_slice_recovery)
MapreduceHandlerTestBase.tearDown(self)
def init(self,
mapper_handler_spec=MAPPER_HANDLER_SPEC,
mapper_parameters=None,
hooks_class_name=None,
output_writer_spec=None,
shard_count=8,
mr_params=None,
shard_retries=0):
"""Init everything needed for testing worker callbacks.
Args:
mapper_handler_spec: handler specification to use in test.
mapper_params: mapper specification to use in test.
hooks_class_name: fully qualified name of the hooks class to use in test.
"""
InputReader.reset()
self.mapreduce_id = "mapreduce0"
self.mapreduce_spec = self.create_mapreduce_spec(
self.mapreduce_id,
shard_count,
mapper_handler_spec=mapper_handler_spec,
hooks_class_name=hooks_class_name,
output_writer_spec=output_writer_spec,
mapper_parameters=mapper_parameters)
if mr_params:
self.mapreduce_spec.params.update(mr_params)
self.shard_number = 1
self.slice_id = 0
self.shard_state = self.create_and_store_shard_state(
self.mapreduce_id, self.shard_number)
self.shard_state.retries = shard_retries
self.shard_state.slice_id = self.slice_id
self.shard_state.put()
self.shard_id = self.shard_state.shard_id
output_writer = None
if self.mapreduce_spec.mapper.output_writer_class():
output_writer_cls = self.mapreduce_spec.mapper.output_writer_class()
output_writer = output_writer_cls.create(self.mapreduce_spec,
self.shard_number,
shard_retries + 1)
reader_iter = db_iters.RangeIteratorFactory.create_key_ranges_iterator(
key_ranges.KeyRangesFactory.create_from_list([key_range.KeyRange()]),
model.QuerySpec("ENTITY_KIND", model_class_path=ENTITY_KIND),
db_iters.KeyRangeModelIterator)
self.transient_state = model.TransientShardState(
"/mapreduce",
self.mapreduce_spec,
self.shard_id,
self.slice_id,
InputReader(reader_iter),
InputReader(reader_iter),
output_writer=output_writer,
retries=shard_retries)
self.handler = handlers.MapperWorkerCallbackHandler()
self.handler._time = MockTime.time
request = mock_webapp.MockRequest()
request.headers["X-AppEngine-QueueName"] = "default"
request.headers["X-AppEngine-TaskName"] = "foo-task-name"
request.path = "/mapreduce/worker_callback/" + self.shard_id
request.headers[util._MR_ID_TASK_HEADER] = self.mapreduce_id
request.headers[util._MR_SHARD_ID_TASK_HEADER] = self.shard_id
request.headers[model.HugeTask.PAYLOAD_VERSION_HEADER] = (
model.HugeTask.PAYLOAD_VERSION)
self.request = request
self.response = mock_webapp.MockResponse()
self.handler.initialize(self.request, self.response)
worker_params = self.transient_state.to_dict()
for param_name in worker_params:
self.handler.request.set(param_name, worker_params[param_name])
def _handle_request(self, expect_finalize=True):
"""Handles request and optionally finalizes the output stream."""
self.assertEquals(0, len(self.taskqueue.GetTasks("default")))
self.handler.post()
if not expect_finalize:
self.assertEquals(0, len(self.taskqueue.GetTasks("default")))
return
# We processed all the input but we still need to finalize (on a separate
# slice).
shard_state = model.ShardState.get_by_shard_id(self.shard_id)
self.assertTrue(shard_state.input_finished)
self.assertTrue(shard_state.active)
tasks = self.taskqueue.GetTasks("default")
self.assertEquals(1, len(tasks))
self.verify_shard_task(
tasks[0], self.shard_id, slice_id=1, verify_spec=False)
self.taskqueue.FlushQueue("default")
self.handler.request.set("slice_id", 1)
self.handler.post()
shard_state = model.ShardState.get_by_shard_id(self.shard_id)
self.assertFalse(shard_state.active)
def testDecodingPayloadFailed(self):
shard_state = model.ShardState.get_by_shard_id(self.shard_id)
self.assertTrue(shard_state.active)
del self.request.headers[model.HugeTask.PAYLOAD_VERSION_HEADER]
self.handler.initialize(self.request, self.response)
self._handle_request(expect_finalize=False)
shard_state = model.ShardState.get_by_shard_id(self.shard_id)
self.assertFalse(shard_state.active)
self.assertEqual(model.ShardState.RESULT_FAILED, shard_state.result_status)
def testSmoke(self):
"""Test main execution path of entity scanning.
No processing rate limit.
"""
e1 = TestEntity()
e1.put()
e2 = TestEntity()
e2.put()
self._handle_request()
self.assertEquals([str(e1.key()), str(e2.key())],
TestHandler.processed_keys)
# we should have finished
self.verify_shard_state(
model.ShardState.get_by_shard_id(self.shard_id),
active=False, processed=2, input_finished=True,
result_status=model.ShardState.RESULT_SUCCESS)
tasks = self.taskqueue.GetTasks("default")
self.assertEquals(0, len(tasks))
def testMaintainLCNoOp(self):
shard_ctx = mock.MagicMock()
slice_ctx = mock.MagicMock()
# Do nothing if the object doesn't implement shard_life_cycle interface.
self.handler._maintain_LC(object(), 1, shard_ctx=shard_ctx,
slice_ctx=slice_ctx)
def testMaintainLCBeginShard(self):
obj = mock.Mock(spec=shard_life_cycle._ShardLifeCycle)
shard_ctx = mock.MagicMock()
slice_ctx = mock.MagicMock()
self.handler._maintain_LC(obj, 0, shard_ctx=shard_ctx,
slice_ctx=slice_ctx)
self.assertEqual(
obj.mock_calls,
[mock.call.begin_shard(shard_ctx),
mock.call.begin_slice(slice_ctx)])
def testMaintainLCEndShard(self):
obj = mock.Mock(spec=shard_life_cycle._ShardLifeCycle)
shard_ctx = mock.MagicMock()
slice_ctx = mock.MagicMock()
self.handler._maintain_LC(obj, 0, begin_slice=False, last_slice=True,
shard_ctx=shard_ctx, slice_ctx=slice_ctx)
self.assertEqual(
obj.mock_calls,
[mock.call.end_slice(slice_ctx),
mock.call.end_shard(shard_ctx)])
def testMaintainLCBeginSlice(self):
obj = mock.Mock(spec=shard_life_cycle._ShardLifeCycle)
slice_ctx = mock.MagicMock()
shard_ctx = mock.MagicMock()
self.handler._maintain_LC(obj, 1, slice_ctx=slice_ctx,
shard_ctx=shard_ctx)
self.assertEqual(
obj.mock_calls,
[mock.call.begin_slice(slice_ctx)])
def testMaintainLCEndSlice(self):
obj = mock.Mock(spec=shard_life_cycle._ShardLifeCycle)
slice_ctx = mock.MagicMock()
shard_ctx = mock.MagicMock()
self.handler._maintain_LC(obj, 1, begin_slice=False,
shard_ctx=shard_ctx, slice_ctx=slice_ctx)
self.assertEqual(
obj.mock_calls,
[mock.call.end_slice(slice_ctx)])
def testLCBeginSliceCallOrdering(self):
parent = mock.MagicMock()
parent.handler = mock.Mock(spec=shard_life_cycle._ShardLifeCycle)
parent.input_reader = mock.Mock(spec=shard_life_cycle._ShardLifeCycle)
parent.output_writer = mock.Mock(spec=shard_life_cycle._ShardLifeCycle)
TState = collections.namedtuple(
"TState", ["handler", "input_reader", "output_writer"])
tstate = TState(parent.handler, parent.input_reader, parent.output_writer)
self.handler._lc_start_slice(tstate, 42)
parent.assert_has_calls([mock.call.output_writer.begin_slice(None),
mock.call.input_reader.begin_slice(None),
mock.call.handler.begin_slice(None)])
def testLCEndSliceCallOrdering(self):
parent = mock.MagicMock()
parent.handler = mock.Mock(spec=shard_life_cycle._ShardLifeCycle)
parent.input_reader = mock.Mock(spec=shard_life_cycle._ShardLifeCycle)
parent.output_writer = mock.Mock(spec=shard_life_cycle._ShardLifeCycle)
TState = collections.namedtuple(
"TState", ["handler", "input_reader", "output_writer"])
tstate = TState(parent.handler, parent.input_reader, parent.output_writer)
self.handler._lc_end_slice(tstate, 42)
parent.assert_has_calls([mock.call.handler.end_slice(None),
mock.call.input_reader.end_slice(None),
mock.call.output_writer.end_slice(None)])
def testCompletedState(self):
self.shard_state.input_finished = True
self.shard_state.active = False
self.shard_state.put()
e1 = TestEntity()
e1.put()
self._handle_request(expect_finalize=False)
# completed state => no data processed
self.assertEquals([], TestHandler.processed_keys)
self.verify_shard_state(
model.ShardState.get_by_shard_id(self.shard_id),
active=False,
input_finished=True)
self.assertEquals(0, len(self.taskqueue.GetTasks("default")))
def testAllInputProcessedStopsProcessing(self):
self.shard_state.input_finished = True
self.shard_state.put()
e1 = TestEntity()
e1.put()
self.handler.post()
# completed state => no data processed
self.assertEquals([], TestHandler.processed_keys)
# but shard is done now
self.verify_shard_state(
model.ShardState.get_by_shard_id(self.shard_id),
active=False,
result_status=model.ShardState.RESULT_SUCCESS,
input_finished=True)
def testShardStateCollision(self):
handlers._TEST_INJECTED_FAULTS.add("worker_active_state_collision")
e1 = TestEntity()
e1.put()
self._handle_request(expect_finalize=False)
# Data will still be processed
self.assertEquals([str(e1.key())], TestHandler.processed_keys)
# Shard state should not be overriden, i.e. left active.
self.verify_shard_state(
model.ShardState.get_by_shard_id(self.shard_id), active=True)
self.assertEquals(0, len(self.taskqueue.GetTasks("default")))
def testNoShardState(self):
"""Correct handling of missing shard state."""
self.shard_state.delete()
e1 = TestEntity()
e1.put()
self._handle_request(expect_finalize=False)
# no state => no data processed
self.assertEquals([], TestHandler.processed_keys)
self.assertEquals(0, len(self.taskqueue.GetTasks("default")))
def testNoData(self):
"""Test no data to scan case."""
self.verify_shard_state(
model.ShardState.get_by_shard_id(self.shard_id), active=True)
self._handle_request()
self.assertEquals([], TestHandler.processed_keys)
self.verify_shard_state(
model.ShardState.get_by_shard_id(self.shard_id),
active=False, input_finished=True,
result_status=model.ShardState.RESULT_SUCCESS)
def testUserAbort(self):
"""Tests a user-initiated abort of the shard."""
# Be sure to have an output writer for the abort step so we can confirm
# that the finalize() method is never called.
self.init(__name__ + ".test_handler_yield_keys",
output_writer_spec=__name__ + ".UnfinalizableTestOutputWriter")
model.MapreduceControl.abort(self.mapreduce_id, force_writes=True)
self._handle_request(expect_finalize=False)
self.verify_shard_state(
model.ShardState.get_by_shard_id(self.shard_id),
active=False,
result_status=model.ShardState.RESULT_ABORTED)
def testLifeCycle(self):
"""Tests life cycle methods are called."""
self.init(__name__ + ".test_handler_yield_keys",
output_writer_spec=__name__ + ".ShardLifeCycleOutputWriter")
e1 = TestEntity()
e1.put()
self._handle_request()
expected_events = [
"create-1",
"begin_shard-mapreduce0-1",
"begin_slice-0",
"write-agd0ZXN0YXBwchALEgpUZXN0RW50aXR5GAEM",
"end_slice-0",
"begin_slice-1",
"end_slice-1",
"end_shard-mapreduce0-1",
"finalize-1"
]
self.assertEquals(expected_events, ShardLifeCycleOutputWriter.events)
def testLongProcessingShouldStartAnotherSlice(self):
"""Long scan.
If scanning takes too long, it should be paused, and new continuation task
should be spawned.
"""
e1 = TestEntity()
e1.put()
e2 = TestEntity()
e2.put()
TestHandler.delay = parameters.config._SLICE_DURATION_SEC + 10
self.handler.post()
# only first entity should be processed
self.assertEquals([str(e1.key())], TestHandler.processed_keys)
# slice should be still active
self.verify_shard_state(
model.ShardState.get_by_shard_id(self.shard_id),
processed=1)
# new task should be spawned
tasks = self.taskqueue.GetTasks("default")
self.assertEquals(1, len(tasks))
self.verify_shard_task(tasks[0], self.shard_id, self.slice_id + 1)
self.assertEquals(tasks[0]["eta_delta"], "0:00:00 ago")
def testLimitingRate(self):
"""Test not enough quota to process everything in this slice."""
e1 = TestEntity()
e1.put()
e2 = TestEntity()
e2.put()
e3 = TestEntity()
e3.put()
# Everytime the handler is called, it increases time by this amount.
TestHandler.delay = parameters.config._SLICE_DURATION_SEC/2 - 1
# handler should be called twice.
self.init(mapper_parameters={
"processing_rate": 10.0/parameters.config._SLICE_DURATION_SEC},
shard_count=5)
self.handler.post()
self.assertEquals(2, len(TestHandler.processed_keys))
self.verify_shard_state(
model.ShardState.get_by_shard_id(self.shard_id),
active=True, processed=2,
result_status=None)
tasks = self.taskqueue.GetTasks("default")
self.assertEquals(1, len(tasks))
self.assertEquals(tasks[0]["eta_delta"], "0:00:02 from now")
def testLongProcessDataWithAllowCheckpoint(self):
"""Tests that process_datum works with input_readers.ALLOW_CHECKPOINT."""
self.handler._start_time = 0
self.assertFalse(self.handler._process_datum(input_readers.ALLOW_CHECKPOINT,
None,
None,
None))
def testScheduleSlice(self):
"""Test _schedule_slice method."""
self.handler._schedule_slice(
self.shard_state,
model.TransientShardState(
"/mapreduce", self.mapreduce_spec,
self.shard_id, 123, mock.Mock(), mock.Mock()))
tasks = self.taskqueue.GetTasks("default")
self.assertEquals(1, len(tasks))
# Verify task headers.
headers = dict(tasks[0]["headers"])
self.assertEqual(self.mapreduce_id, headers[util._MR_ID_TASK_HEADER])
self.assertEqual(self.shard_id, headers[util._MR_SHARD_ID_TASK_HEADER])
self.assertEqual(self.host, headers["Host"])
self.verify_shard_task(tasks[0], self.shard_id, 123)
def testScheduleSlice_Eta(self):
"""Test _schedule_slice method."""
eta = datetime.datetime.utcnow() + datetime.timedelta(hours=1)
self.handler._schedule_slice(
self.shard_state,
model.TransientShardState(
"/mapreduce", self.mapreduce_spec,
self.shard_id, 123, mock.Mock(), mock.Mock()),
eta=eta)
tasks = self.taskqueue.GetTasks("default")
self.assertEquals(1, len(tasks))
self.verify_shard_task(tasks[0], self.shard_id, 123, eta=eta)
def testScheduleSlice_Countdown(self):
"""Test _schedule_slice method."""
countdown = 60 * 60
self.handler._schedule_slice(
self.shard_state,
model.TransientShardState(
"/mapreduce", self.mapreduce_spec,
self.shard_id, 123, mock.Mock(), mock.Mock()),
countdown=countdown)
tasks = self.taskqueue.GetTasks("default")
self.assertEquals(1, len(tasks))
self.verify_shard_task(tasks[0], self.shard_id, 123, countdown=countdown)
def testScheduleSlice_QueuePreserved(self):
"""Tests that _schedule_slice will enqueue tasks on the calling queue."""
os.environ["HTTP_X_APPENGINE_QUEUENAME"] = "crazy-queue"
try:
self.handler._schedule_slice(
self.shard_state,
model.TransientShardState(
"/mapreduce", self.mapreduce_spec,
self.shard_id, 123, mock.Mock(), mock.Mock()))
tasks = self.taskqueue.GetTasks("crazy-queue")
self.assertEquals(1, len(tasks))
self.verify_shard_task(tasks[0], self.shard_id, 123)
finally:
del os.environ["HTTP_X_APPENGINE_QUEUENAME"]
def testScheduleSlice_TombstoneErrors(self):
"""Tests when the scheduled slice already exists."""
self.handler._schedule_slice(self.shard_state, self.transient_state)
# This catches the exception.
self.handler._schedule_slice(self.shard_state, self.transient_state)
# The task won't re-enqueue because it has the same name.
tasks = self.taskqueue.GetTasks("default")
self.assertEquals(1, len(tasks))
def testScheduleSlice_Hooks(self):
"""Test _schedule_slice method with a hooks class installed."""
hooks_class_name = __name__ + '.' + TestHooks.__name__
self.init(hooks_class_name=hooks_class_name)
self.handler._schedule_slice(self.shard_state, self.transient_state)
self.assertEquals(1, len(self.taskqueue.GetTasks("default")))
self.assertEquals(1, len(TestHooks.enqueue_worker_task_calls))
task, queue_name = TestHooks.enqueue_worker_task_calls[0]
self.assertEquals("/mapreduce/worker_callback/" + self.shard_state.shard_id,
task.url)
self.assertEquals("default", queue_name)
def testScheduleSlice_RaisingHooks(self):
"""Test _schedule_slice method with an empty hooks class installed.
The installed hooks class will raise NotImplementedError in response to
all method calls.
"""
hooks_class_name = hooks.__name__ + '.' + hooks.Hooks.__name__
self.init(hooks_class_name=hooks_class_name)
self.handler._schedule_slice(
self.shard_state,
model.TransientShardState(
"/mapreduce", self.mapreduce_spec,
self.shard_id, 123, mock.Mock(), mock.Mock()))
tasks = self.taskqueue.GetTasks("default")
self.assertEquals(1, len(tasks))
self.verify_shard_task(tasks[0], self.shard_id, 123,
hooks_class_name=hooks_class_name)
def testDatastoreExceptionInHandler(self):
"""Test when a handler can't save state to datastore."""
self.init(__name__ + ".test_handler_yield_keys")
TestEntity().put()
original_method = datastore.PutAsync
datastore.PutAsync = mock.MagicMock(side_effect=datastore_errors.Timeout())
# Tests that handler doesn't abort task for datastore errors.
# Unfornately they still increase TaskExecutionCount.
for _ in range(parameters.config.TASK_MAX_DATA_PROCESSING_ATTEMPTS):
self.assertRaises(datastore_errors.Timeout, self.handler.post)
self.verify_shard_state(
model.ShardState.get_by_shard_id(self.shard_id),
active=True,
processed=0)
datastore.PutAsync = original_method
self._handle_request()
self.verify_shard_state(
model.ShardState.get_by_shard_id(self.shard_id),
active=False, input_finished=True,
result_status=model.ShardState.RESULT_SUCCESS,
processed=1)
def testTaskqueueExceptionInHandler(self):
"""Test when a handler can't reach taskqueue."""
self.init(__name__ + ".test_handler_yield_keys")
# Force enqueue another task.
parameters.config._SLICE_DURATION_SEC = 0
TestEntity().put()
taskqueue.Task.add = mock.MagicMock(side_effect=taskqueue.TransientError)
# Tests that handler doesn't abort task for taskqueue errors.
# Unfornately they still increase TaskExecutionCount.
for _ in range(parameters.config.TASK_MAX_DATA_PROCESSING_ATTEMPTS):
self.assertRaises(taskqueue.TransientError, self.handler.post)
self.verify_shard_state(
model.ShardState.get_by_shard_id(self.shard_id),
active=True,
processed=0)
taskqueue.Task.add = self.original_task_add
self.handler.post()
self.verify_shard_state(
model.ShardState.get_by_shard_id(self.shard_id),
active=True,
result_status=None,
processed=1)
def testSliceRecoveryNotCalledWithNoOutputWriter(self):
shard_state = model.ShardState.get_by_shard_id(self.shard_id)
# This slice had acquired shard lock in previous attempt.
shard_state.acquired_once = True
shard_state.put()
self._handle_request()
self.verify_shard_state(
model.ShardState.get_by_shard_id(self.shard_id),
active=False, input_finished=True,
result_status=model.ShardState.RESULT_SUCCESS,
processed=0)
def testSliceRecoveryNotCalledWithOutputWriter(self):
"""Test when output writer doesn't support slice recovery."""
self.init(output_writer_spec=__name__ + ".TestOutputWriter")
TestOutputWriter._supports_slice_recovery = lambda self, spec: False
shard_state = model.ShardState.get_by_shard_id(self.shard_id)
# This slice had acquired shard lock in previous attempt.
shard_state.acquired_once = True
shard_state.put()
self._handle_request()
self.verify_shard_state(
model.ShardState.get_by_shard_id(self.shard_id),
active=False, input_finished=True,
result_status=model.ShardState.RESULT_SUCCESS)
self.assertFalse("recover" in TestOutputWriter.events)
def testSliceRecoveryNotCalledWithOutputWriter2(self):
"""Test when the slice isn't a retry."""
self.init(output_writer_spec=__name__ + ".TestOutputWriter")
self._handle_request()
self.verify_shard_state(
model.ShardState.get_by_shard_id(self.shard_id),
active=False, input_finished=True,
result_status=model.ShardState.RESULT_SUCCESS)
self.assertFalse("recover" in TestOutputWriter.events)
def testSliceRecoveryCalled(self):
output_writer_spec = __name__ + ".TestOutputWriter"
self.init(output_writer_spec=output_writer_spec)
shard_state = model.ShardState.get_by_shard_id(self.shard_id)
# This slice had acquired shard lock in previous attempt.
shard_state.acquired_once = True
shard_state.put()
self.handler.post()
self.verify_shard_state(
model.ShardState.get_by_shard_id(self.shard_id),
active=True,
result_status=None,
slice_id=self.slice_id+2)
tasks = self.taskqueue.GetTasks("default")
self.assertEqual(1, len(tasks))
self.verify_shard_task(tasks[0], self.shard_id, self.slice_id+2,
output_writer_spec=output_writer_spec)
self.assertTrue("recover" in TestOutputWriter.events)
def testSliceRecoveryFailed(self):
"""Test that slice recovery failures are retried like all other failures."""
self.init(output_writer_spec=__name__ + ".TestOutputWriter")
def _raise(self):
raise Exception("Raise an exception on intention.")
TestOutputWriter._supports_slice_recovery = _raise
shard_state = model.ShardState.get_by_shard_id(self.shard_id)
# This slice had acquired shard lock in previous attempt.
shard_state.acquired_once = True
shard_state.put()
self.handler.post()
# Slice gets retried.
self.assertEqual(httplib.SERVICE_UNAVAILABLE, self.handler.response.status)
self.verify_shard_state(
model.ShardState.get_by_shard_id(self.shard_id),
active=True,
result_status=None,
slice_retries=1)
def testSliceAndShardRetries(self):
"""Test when a handler throws a non fatal exception."""
self.init(__name__ + ".test_handler_raise_exception")
TestEntity().put()
# First time, the task gets retried.
self._handle_request(expect_finalize=False)
self.assertEqual(httplib.SERVICE_UNAVAILABLE, self.handler.response.status)
self.verify_shard_state(
model.ShardState.get_by_shard_id(self.shard_id),
active=True,
processed=0,
slice_retries=1)
# After the Nth attempt on slice, we retry the shard.
shard_state = model.ShardState.get_by_shard_id(self.shard_id)
shard_state.slice_retries = (
parameters.config.TASK_MAX_DATA_PROCESSING_ATTEMPTS)
shard_state.put()
# TODO(user): fix
self.handler.post()
self.verify_shard_state(
model.ShardState.get_by_shard_id(self.shard_id),
active=True,
result_status=None,
processed=0,
slice_retries=0,
retries=1)
# TODO(user): test MR jobs that only allow slice or shard retry when
# it is configurable per job.
def testShardRetryFailed(self):
"""Test when shard retry failed."""
self.init(__name__ + ".test_handler_raise_exception",
shard_retries=parameters.config.SHARD_MAX_ATTEMPTS)
TestEntity().put()
# Slice attempts have exhausted.
shard_state = model.ShardState.get_by_shard_id(self.shard_id)
shard_state.slice_retries = (
parameters.config.TASK_MAX_DATA_PROCESSING_ATTEMPTS)
shard_state.put()
self._handle_request(expect_finalize=False)
self.verify_shard_state(
model.ShardState.get_by_shard_id(self.shard_id),
active=False,
result_status=model.ShardState.RESULT_FAILED,
processed=1,
slice_retries=parameters.config.TASK_MAX_DATA_PROCESSING_ATTEMPTS,
retries=parameters.config.SHARD_MAX_ATTEMPTS)
def testShardRetryInitiatedAtBeginning(self):
"""Test shard retry can be initiated at the beginning of a slice."""
self.init(__name__ + ".test_handler_yield_keys")
TestEntity().put()
# Slice has been attempted before.
shard_state = model.ShardState.get_by_shard_id(self.shard_id)
shard_state.acquired_once = True
shard_state.put()
# Disable slice retry.
parameters.config.TASK_MAX_DATA_PROCESSING_ATTEMPTS = 1
self.handler.post()
self.verify_shard_state(
model.ShardState.get_by_shard_id(self.shard_id),
active=True,
result_status=None,
processed=0,
slice_retries=0,
# Retried once.
retries=1)
def testSuccessfulSliceRetryClearsSliceRetriesCount(self):
self.init(__name__ + ".test_handler_yield_op")
TestEntity().put()
TestEntity().put()
# Force enqueue another task.
parameters.config._SLICE_DURATION_SEC = 0
shard_state = model.ShardState.get_by_shard_id(self.shard_id)
shard_state.slice_retries = (
parameters.config.TASK_MAX_DATA_PROCESSING_ATTEMPTS - 1)
shard_state.put()
self.handler.post()
self.verify_shard_state(
model.ShardState.get_by_shard_id(self.shard_id),
active=True,
processed=1,
slice_retries=0)
def testExceptionInHandler(self):
"""Test behavior when handler throws exception."""
self.init(__name__ + ".test_handler_raise_exception")
TestEntity().put()
# Stub out context._set
m = mox.Mox()
m.StubOutWithMock(context.Context, "_set", use_mock_anything=True)
# Record calls
context.Context._set(mox.IsA(context.Context))
m.ReplayAll()
try: # test, verify
self.handler.post()
self.assertEqual(httplib.SERVICE_UNAVAILABLE,
self.handler.response.status)
# slice should be still active
shard_state = model.ShardState.get_by_shard_id(self.shard_id)
self.verify_shard_state(shard_state, processed=0, slice_retries=1)
# mapper calls counter should not be incremented
self.assertEquals(0, shard_state.counters_map.get(
context.COUNTER_MAPPER_CALLS))
# new task should not be spawned
tasks = self.taskqueue.GetTasks("default")
self.assertEquals(0, len(tasks))
m.VerifyAll()
finally:
m.UnsetStubs()
def testFailJobExceptionInHandler(self):
"""Test behavior when handler throws exception."""
self.init(__name__ + ".test_handler_raise_fail_job_exception")
TestEntity().put()
# Stub out context._set
m = mox.Mox()
m.StubOutWithMock(context.Context, "_set", use_mock_anything=True)
# Record calls
context.Context._set(mox.IsA(context.Context))
m.ReplayAll()
try: # test, verify
self.handler.post()
# slice should not be active
shard_state = model.ShardState.get_by_shard_id(self.shard_id)
self.verify_shard_state(
shard_state,
processed=1,
active=False,
result_status = model.ShardState.RESULT_FAILED)
self.assertEquals(1, shard_state.counters_map.get(
context.COUNTER_MAPPER_CALLS))
# new task should not be spawned
tasks = self.taskqueue.GetTasks("default")
self.assertEquals(0, len(tasks))
m.VerifyAll()
finally:
m.UnsetStubs()
def testContext(self):
"""Test proper context initialization."""
self.handler.request.headers["X-AppEngine-TaskExecutionCount"] = 5
TestEntity().put()
m = mox.Mox()
m.StubOutWithMock(context.Context, "_set", use_mock_anything=True)
context.Context._set(MatchesContext(task_retry_count=5))
m.ReplayAll()
try: # test, verify
self.handler.post()
m.VerifyAll()
finally:
m.UnsetStubs()
def testContextFlush(self):
"""Test context handling."""
TestEntity().put()
# Stub out context
m = mox.Mox()
m.StubOutWithMock(context.Context, "_set", use_mock_anything=True)
m.StubOutWithMock(context.Context, "flush", use_mock_anything=True)
# Record calls
context.Context._set(mox.IsA(context.Context))
context.get().flush()
m.ReplayAll()
try: # test, verify
self.handler.post()
# 1 entity should be processed
self.assertEquals(1, len(TestHandler.processed_keys))
m.VerifyAll()
finally:
m.UnsetStubs()
def testOperationYield(self):
"""Test yielding operations from handler."""
self.init(__name__ + ".test_handler_yield_op")
e1 = TestEntity().put()
e2 = TestEntity().put()
self._handle_request()
self.assertEquals([str(e1), str(e1), str(e2), str(e2)],
TestOperation.processed_keys)
def testOutputWriter(self):
self.init(__name__ + ".test_handler_yield_keys",
output_writer_spec=__name__ + ".TestOutputWriter")
e1 = TestEntity().put()
e2 = TestEntity().put()
self._handle_request()
self.assertEquals(
["create-1",
"write-" + str(e1),
"write-" + str(e2),
"finalize-1",
], TestOutputWriter.events)
class ControllerCallbackHandlerTest(MapreduceHandlerTestBase):
"""Test handlers.ControllerCallbackHandler."""
def setUp(self):
"""Sets up the test harness."""
MapreduceHandlerTestBase.setUp(self)
self.mapreduce_state = model.MapreduceState.create_new()
self.mapreduce_state.put()
self.mapreduce_id = self.mapreduce_state.key().name()
mapreduce_spec = self.create_mapreduce_spec(self.mapreduce_id, 3)
mapreduce_spec.params[PARAM_DONE_CALLBACK] = "/fin"
mapreduce_spec.params[PARAM_DONE_CALLBACK_QUEUE] = "crazy-queue"
mapreduce_spec.params['base_path'] = parameters.config.BASE_PATH
self.mapreduce_state.mapreduce_spec = mapreduce_spec
self.mapreduce_state.chart_url = "http://chart.apis.google.com/chart?"
self.mapreduce_state.active = True
self.mapreduce_state.active_shards = 3
self.mapreduce_state.put()
self.handler = handlers.ControllerCallbackHandler()
self.handler._time = MockTime.time
request = mock_webapp.MockRequest()
request.headers["X-AppEngine-QueueName"] = "default"
request.headers["X-AppEngine-TaskName"] = "foo-task-name"
request.headers[util._MR_ID_TASK_HEADER] = self.mapreduce_id
request.headers[model.HugeTask.PAYLOAD_VERSION_HEADER] = (
model.HugeTask.PAYLOAD_VERSION)
request.path = "/mapreduce/controller_callback"
self.request = request
self.response = mock_webapp.MockResponse()
self.handler.initialize(self.request, self.response)
self.handler.request.set("mapreduce_spec", mapreduce_spec.to_json_str())
self.handler.request.set("serial_id", "1234")
self.verify_mapreduce_state(self.mapreduce_state, shard_count=3)
def verify_done_task(self):
tasks = self.taskqueue.GetTasks("crazy-queue")
self.assertEquals(1, len(tasks))
task = tasks[0]
self.assertTrue(task)
self.assertEquals("/fin", task["url"])
self.assertEquals("POST", task["method"])
headers = dict(task["headers"])
self.assertEquals(self.mapreduce_id, headers["Mapreduce-Id"])
self.assertEqual(self.host, headers["Host"])
def testStateUpdateIsCmpAndSet(self):
"""Verify updating model.MapreduceState is cmp and set."""
# Create shard states for 3 finished shards.
shard_states = []
for i in range(3):
shard_state = self.create_shard_state(self.mapreduce_id, i)
shard_state.active = False
shard_state.put()
shard_states.append(shard_state)
# MapreduceState.active is changed to False by another duplicated running
# controller task.
mapreduce_state = model.MapreduceState.get_by_key_name(self.mapreduce_id)
mapreduce_state.active = False
mapreduce_state.result_status = model.MapreduceState.RESULT_SUCCESS
mapreduce_state.put()
# Invoke controller handler with stale mapreduce_state and shard_states.
mapreduce_state.active = True
mapreduce_state.result_status = None
for s in shard_states:
s.active = True
self.handler._update_state_from_shard_states(mapreduce_state,
shard_states,
None)
# Make sure we did't overwrite active or result_status.
mapreduce_state = model.MapreduceState.get_by_key_name(self.mapreduce_id)
self.verify_mapreduce_state(
mapreduce_state,
shard_count=3,
active=False,
result_status=model.MapreduceState.RESULT_SUCCESS)
# New controller task will drop itself because it detected that
# mapreduce_state.active is False.
# It will enqueue a finalizejob callback to cleanup garbage.
self.handler.post()
tasks = self.taskqueue.GetTasks("default")
self.assertEqual(1, len(tasks))
self.assertEqual("/mapreduce/finalizejob_callback/" + self.mapreduce_id,
tasks[0]["url"])
def testDecodingPayloadFailed(self):
for i in range(3):
self.create_and_store_shard_state(self.mapreduce_id, i)
del self.request.headers[model.HugeTask.PAYLOAD_VERSION_HEADER]
self.handler.initialize(self.request, self.response)
self.handler.post()
state = model.MapreduceState.get_by_job_id(self.mapreduce_id)
self.assertFalse(state.active)
self.assertEqual(model.MapreduceState.RESULT_FAILED, state.result_status)
shard_states = model.ShardState.find_all_by_mapreduce_state(state)
for ss in shard_states:
self.assertFalse(ss.active)
self.assertEqual(model.ShardState.RESULT_FAILED, ss.result_status)
def testDecodingPayloadFailedIdempotency(self):
for i in range(3):
self.create_and_store_shard_state(self.mapreduce_id, i)
# Set one shard state to failed as if the drop_gracefully logic has
# been run once but failed.
state = model.MapreduceState.get_by_job_id(self.mapreduce_id)
shard_states = list(model.ShardState.find_all_by_mapreduce_state(state))
shard_states[0].set_for_failure()
shard_states[0].put()
del self.request.headers[model.HugeTask.PAYLOAD_VERSION_HEADER]
self.handler.initialize(self.request, self.response)
self.handler.post()
self.handler.post()
state = model.MapreduceState.get_by_job_id(self.mapreduce_id)
self.assertFalse(state.active)
self.assertEqual(model.MapreduceState.RESULT_FAILED, state.result_status)
shard_states = model.ShardState.find_all_by_mapreduce_state(state)
for ss in shard_states:
self.assertFalse(ss.active)
self.assertEqual(model.ShardState.RESULT_FAILED, ss.result_status)
def testSmoke(self):
"""Verify main execution path.
Should aggregate all data from all shards correctly.
"""
# check that chart_url is updated.
self.mapreduce_state.chart_url = ""
self.mapreduce_state.put()
for i in range(3):
shard_state = self.create_shard_state(self.mapreduce_id, i)
shard_state.counters_map.increment(
COUNTER_MAPPER_CALLS, i * 2 + 1) # 1, 3, 5
# We should have mapreduce active even some (not all)
# shards are not active
if i == 0:
shard_state.active = False
shard_state.put()
self.handler.post()
mapreduce_state = model.MapreduceState.get_by_key_name(self.mapreduce_id)
# we should have 1 + 3 + 5 = 9 elements processed
self.verify_mapreduce_state(mapreduce_state, processed=9, shard_count=3)
self.assertEquals(0, mapreduce_state.failed_shards)
self.assertEquals(0, mapreduce_state.aborted_shards)
# new task should be spawned
tasks = self.taskqueue.GetTasks("default")
self.assertEquals(1, len(tasks))
for task in tasks:
headers = dict(task["headers"])
self.assertEqual(self.mapreduce_id, headers[util._MR_ID_TASK_HEADER])
self.assertEqual(self.host, headers["Host"])
self.verify_controller_task(tasks[0], shard_count=3)
def testMissingShardState(self):
"""Correct handling of missing shard state."""
self.handler.post()
mapreduce_state = model.MapreduceState.get_by_key_name(self.mapreduce_id)
self.verify_mapreduce_state(mapreduce_state, active=False, shard_count=3,
result_status=model.ShardState.RESULT_FAILED)
self.assertEquals(0, mapreduce_state.failed_shards)
self.assertEquals(0, mapreduce_state.aborted_shards)
# Abort signal should be present.
self.assertEquals(
model.MapreduceControl.ABORT,
db.get(model.MapreduceControl.get_key_by_job_id(
self.mapreduce_id)).command)
tasks = self.taskqueue.GetTasks("default")
# Finalize task should be spawned.
self.assertEquals(1, len(tasks))
self.assertEquals("/mapreduce/finalizejob_callback/" + self.mapreduce_id,
tasks[0]["url"])
# Done Callback task should be spawned
self.verify_done_task()
def testAllShardsAreDone(self):
"""Mapreduce should become inactive when all shards have finished."""
for i in range(3):
shard_state = self.create_shard_state(self.mapreduce_id, i)
shard_state.counters_map.increment(
COUNTER_MAPPER_CALLS, i * 2 + 1) # 1, 3, 5
shard_state.active = False
shard_state.result_status = model.ShardState.RESULT_SUCCESS
shard_state.put()
self.handler.post()
mapreduce_state = model.MapreduceState.get_by_key_name(self.mapreduce_id)
self.verify_mapreduce_state(
mapreduce_state, processed=9, active=False, shard_count=3,
result_status=model.MapreduceState.RESULT_SUCCESS)
self.assertEquals(0, mapreduce_state.failed_shards)
self.assertEquals(0, mapreduce_state.aborted_shards)
tasks = self.taskqueue.GetTasks("default")
# Finalize task should be spawned.
self.assertEquals(1, len(tasks))
self.assertEquals("/mapreduce/finalizejob_callback/" + self.mapreduce_id,
tasks[0]["url"])
headers = dict(tasks[0]["headers"])
self.assertEqual(self.mapreduce_id, headers[util._MR_ID_TASK_HEADER])
self.assertEqual(self.host, headers["Host"])
# Done Callback task should be spawned
self.verify_done_task()
self.assertEquals(3, len(list(
model.ShardState.find_all_by_mapreduce_state(mapreduce_state))))
def testShardsDoneFinalizeOutputWriter(self):
self.mapreduce_state.mapreduce_spec.mapper.output_writer_spec = (
__name__ + "." + TestOutputWriter.__name__)
self.mapreduce_state.put()
self.handler.request.set("mapreduce_spec",
self.mapreduce_state.mapreduce_spec.to_json_str())
for i in range(3):
shard_state = self.create_shard_state(self.mapreduce_id, i)
shard_state.counters_map.increment(
COUNTER_MAPPER_CALLS, i * 2 + 1) # 1, 3, 5
shard_state.active = False
shard_state.result_status = model.ShardState.RESULT_SUCCESS
shard_state.put()
self.handler.post()
self.assertEquals(["finalize_job"], TestOutputWriter.events)
def testShardsDoneWithHooks(self):
self.mapreduce_state.mapreduce_spec.hooks_class_name = (
__name__ + '.' + TestHooks.__name__)
self.mapreduce_state.put()
self.handler.request.set("mapreduce_spec",
self.mapreduce_state.mapreduce_spec.to_json_str())
for i in range(3):
shard_state = self.create_shard_state(self.mapreduce_id, i)
shard_state.active = False
shard_state.result_status = model.ShardState.RESULT_SUCCESS
shard_state.put()
self.handler.post()
self.assertEquals(1, len(TestHooks.enqueue_done_task_calls))
task, queue_name = TestHooks.enqueue_done_task_calls[0]
self.assertEquals('crazy-queue', queue_name)
self.assertEquals('/fin', task.url)
def testShardFailure(self):
"""Tests that when one shard fails the job will be aborted."""
for i in range(3):
shard_state = self.create_shard_state(self.mapreduce_id, i)
if i == 0:
shard_state.result_status = model.ShardState.RESULT_FAILED
shard_state.active = False
else:
shard_state.result_status = model.ShardState.RESULT_SUCCESS
shard_state.active = True
shard_state.put()
self.handler.post()
mapreduce_state = model.MapreduceState.get_by_key_name(self.mapreduce_id)
self.verify_mapreduce_state(
mapreduce_state, active=True, shard_count=3)
self.assertEquals(1, mapreduce_state.failed_shards)
self.assertEquals(0, mapreduce_state.aborted_shards)
# new task should be spawned
tasks = self.taskqueue.GetTasks("default")
self.assertEquals(1, len(tasks))
self.verify_controller_task(tasks[0], shard_count=3)
# Abort signal should be present.
self.assertEquals(
model.MapreduceControl.ABORT,
db.get(model.MapreduceControl.get_key_by_job_id(
self.mapreduce_id)).command)
def testShardFailureAllDone(self):
"""Tests that individual shard failure affects the job outcome."""
for i in range(3):
shard_state = self.create_shard_state(self.mapreduce_id, i)
shard_state.active = False
if i == 0:
shard_state.result_status = model.ShardState.RESULT_FAILED
elif i == 1:
shard_state.result_status = model.ShardState.RESULT_ABORTED
else:
shard_state.result_status = model.ShardState.RESULT_SUCCESS
shard_state.put()
self.handler.post()
mapreduce_state = model.MapreduceState.get_by_key_name(self.mapreduce_id)
self.verify_mapreduce_state(
mapreduce_state, active=False, shard_count=3,
result_status=model.ShardState.RESULT_FAILED)
self.assertEquals(1, mapreduce_state.failed_shards)
self.assertEquals(1, mapreduce_state.aborted_shards)
tasks = self.taskqueue.GetTasks("default")
# Finalize task should be spawned.
self.assertEquals(1, len(tasks))
self.assertEquals("/mapreduce/finalizejob_callback/" + self.mapreduce_id,
tasks[0]["url"])
# Done Callback task should be spawned
self.verify_done_task()
self.assertEquals(3, len(list(
model.ShardState.find_all_by_mapreduce_state(mapreduce_state))))
def testUserAbort(self):
"""Tests that user abort will stop the job."""
for i in range(3):
shard_state = self.create_shard_state(self.mapreduce_id, i)
shard_state.active = True
shard_state.put()
model.MapreduceControl.abort(self.mapreduce_id)
self.handler.post()
mapreduce_state = model.MapreduceState.get_by_key_name(self.mapreduce_id)
self.verify_mapreduce_state(
mapreduce_state, active=True, shard_count=3)
self.assertEquals(0, mapreduce_state.failed_shards)
self.assertEquals(0, mapreduce_state.aborted_shards)
# new task should be spawned
tasks = self.taskqueue.GetTasks("default")
self.assertEquals(1, len(tasks))
self.verify_controller_task(tasks[0], shard_count=3)
self.taskqueue.FlushQueue("default")
# Repeated calls to callback closure while the shards are active will
# result in a no op. As the controller waits for the shards to finish.
self.handler.post()
mapreduce_state = model.MapreduceState.get_by_key_name(self.mapreduce_id)
self.verify_mapreduce_state(
mapreduce_state, active=True, shard_count=3)
self.assertEquals(0, mapreduce_state.failed_shards)
self.assertEquals(0, mapreduce_state.aborted_shards)
tasks = self.taskqueue.GetTasks("default")
self.assertEquals(1, len(tasks))
self.verify_controller_task(tasks[0], shard_count=3)
self.taskqueue.FlushQueue("default")
# Force all shards to completion state (success, success, or abort).
shard_state_list = list(
model.ShardState.find_all_by_mapreduce_state(mapreduce_state))
self.assertEquals(3, len(shard_state_list))
shard_state_list[0].active = False
shard_state_list[0].result_status = model.ShardState.RESULT_SUCCESS
shard_state_list[1].active = False
shard_state_list[1].result_status = model.ShardState.RESULT_SUCCESS
shard_state_list[2].active = False
shard_state_list[2].result_status = model.ShardState.RESULT_ABORTED
db.put(shard_state_list)
self.handler.post()
mapreduce_state = model.MapreduceState.get_by_key_name(self.mapreduce_id)
self.verify_mapreduce_state(
mapreduce_state, active=False, shard_count=3,
result_status=model.MapreduceState.RESULT_ABORTED)
self.assertEquals(1, mapreduce_state.aborted_shards)
tasks = self.taskqueue.GetTasks("default")
# Finalize task should be spawned.
self.assertEquals(1, len(tasks))
self.assertEquals("/mapreduce/finalizejob_callback/" + self.mapreduce_id,
tasks[0]["url"])
# Done Callback task should be spawned
self.verify_done_task()
def testScheduleQueueName(self):
"""Tests that the calling queue name is preserved on schedule calls."""
os.environ["HTTP_X_APPENGINE_QUEUENAME"] = "crazy-queue"
try:
self.mapreduce_state.put()
for i in range(3):
shard_state = self.create_shard_state(self.mapreduce_id, i)
shard_state.put()
self.handler.post()
# new task should be spawned on the calling queue
tasks = self.taskqueue.GetTasks("crazy-queue")
self.assertEquals(1, len(tasks))
self.verify_controller_task(tasks[0], shard_count=3)
finally:
del os.environ["HTTP_X_APPENGINE_QUEUENAME"]
class CleanUpJobTest(testutil.HandlerTestBase):
"""Tests cleaning up jobs."""
def setUp(self):
"""Sets up the test harness."""
testutil.HandlerTestBase.setUp(self)
TestKind().put()
self.mapreduce_id = control.start_map(
"my job 1",
"__main__.TestMap",
"mapreduce.input_readers.DatastoreInputReader",
{"entity_kind": "__main__.TestKind"},
4)
self.handler = handlers.CleanUpJobHandler()
self.handler.initialize(mock_webapp.MockRequest(),
mock_webapp.MockResponse())
self.handler.request.path = "/mapreduce/command/clean_up_job"
self.handler.request.headers["X-Requested-With"] = "XMLHttpRequest"
def KickOffMapreduce(self):
"""Executes pending kickoff task."""
test_support.execute_all_tasks(self.taskqueue)
def testCSRF(self):
"""Test that we check the X-Requested-With header."""
del self.handler.request.headers["X-Requested-With"]
self.handler.post()
self.assertEquals(httplib.FORBIDDEN, self.handler.response.status)
def testBasic(self):
"""Tests cleaning up the job.
Note: This cleans up a running mapreduce, but that's okay because
the prohibition against doing so is done on the client side.
"""
self.KickOffMapreduce()
key = model.MapreduceState.get_key_by_job_id(self.mapreduce_id)
self.assertTrue(db.get(key))
self.handler.request.set("mapreduce_id", self.mapreduce_id)
self.handler.post()
result = json.loads(self.handler.response.out.getvalue())
self.assertEquals({"status": ("Job %s successfully cleaned up." %
self.mapreduce_id) },
result)
state = model.MapreduceState.get_by_job_id(self.mapreduce_id)
self.assertFalse(state)
self.assertFalse(list(model.ShardState.find_all_by_mapreduce_state(state)))
if __name__ == "__main__":
unittest.main()
| potatolondon/potato-mapreduce | test/mapreduce/handlers_test.py | Python | apache-2.0 | 102,043 |
# module pyparsing.py
#
# Copyright (c) 2003-2008 Paul T. McGuire
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
#from __future__ import generators
__doc__ = \
"""
pyparsing module - Classes and methods to define and execute parsing grammars
The pyparsing module is an alternative approach to creating and executing simple grammars,
vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you
don't need to learn a new syntax for defining grammars or matching expressions - the parsing module
provides a library of classes that you use to construct the grammar directly in Python.
Here is a program to parse "Hello, World!" (or any greeting of the form "<salutation>, <addressee>!")::
from pyparsing import Word, alphas
# define grammar of a greeting
greet = Word( alphas ) + "," + Word( alphas ) + "!"
hello = "Hello, World!"
print hello, "->", greet.parseString( hello )
The program outputs the following::
Hello, World! -> ['Hello', ',', 'World', '!']
The Python representation of the grammar is quite readable, owing to the self-explanatory
class names, and the use of '+', '|' and '^' operators.
The parsed results returned from parseString() can be accessed as a nested list, a dictionary, or an
object with named attributes.
The pyparsing module handles some of the problems that are typically vexing when writing text parsers:
- extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.)
- quoted strings
- embedded comments
"""
__version__ = "1.5.0"
__versionTime__ = "28 May 2008 10:05"
__author__ = "Paul McGuire <ptmcg@users.sourceforge.net>"
import string
from weakref import ref as wkref
import copy,sys
import warnings
import re
import sre_constants
import xml.sax.saxutils
#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) )
__all__ = [
'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty',
'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal',
'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or',
'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException',
'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException',
'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', 'Upcase',
'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore',
'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col',
'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString',
'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'getTokensEndLoc', 'hexnums',
'htmlComment', 'javaStyleComment', 'keepOriginalText', 'line', 'lineEnd', 'lineStart', 'lineno',
'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral',
'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables',
'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity',
'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd',
'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute',
'indentedBlock',
]
"""
Detect if we are running version 3.X and make appropriate changes
Robert A. Clark
"""
if sys.version_info[0] > 2:
_PY3K = True
_MAX_INT = sys.maxsize
basestring = str
else:
_PY3K = False
_MAX_INT = sys.maxint
if not _PY3K:
def _ustr(obj):
"""Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries
str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It
then < returns the unicode object | encodes it with the default encoding | ... >.
"""
try:
# If this works, then _ustr(obj) has the same behaviour as str(obj), so
# it won't break any existing code.
return str(obj)
except UnicodeEncodeError:
# The Python docs (http://docs.python.org/ref/customization.html#l2h-182)
# state that "The return value must be a string object". However, does a
# unicode object (being a subclass of basestring) count as a "string
# object"?
# If so, then return a unicode object:
return unicode(obj)
# Else encode it... but how? There are many choices... :)
# Replace unprintables with escape codes?
#return unicode(obj).encode(sys.getdefaultencoding(), 'backslashreplace_errors')
# Replace unprintables with question marks?
#return unicode(obj).encode(sys.getdefaultencoding(), 'replace')
# ...
else:
_ustr = str
def _str2dict(strg):
return dict( [(c,0) for c in strg] )
#~ return set( [c for c in strg] )
class _Constants(object):
pass
if not _PY3K:
alphas = string.lowercase + string.uppercase
else:
alphas = string.ascii_lowercase + string.ascii_uppercase
nums = string.digits
hexnums = nums + "ABCDEFabcdef"
alphanums = alphas + nums
_bslash = "\\"
printables = "".join( [ c for c in string.printable if c not in string.whitespace ] )
class ParseBaseException(Exception):
"""base exception class for all parsing runtime exceptions"""
__slots__ = ( "loc","msg","pstr","parserElement" )
# Performance tuning: we construct a *lot* of these, so keep this
# constructor as small and fast as possible
def __init__( self, pstr, loc=0, msg=None, elem=None ):
self.loc = loc
if msg is None:
self.msg = pstr
self.pstr = ""
else:
self.msg = msg
self.pstr = pstr
self.parserElement = elem
def __getattr__( self, aname ):
"""supported attributes by name are:
- lineno - returns the line number of the exception text
- col - returns the column number of the exception text
- line - returns the line containing the exception text
"""
if( aname == "lineno" ):
return lineno( self.loc, self.pstr )
elif( aname in ("col", "column") ):
return col( self.loc, self.pstr )
elif( aname == "line" ):
return line( self.loc, self.pstr )
else:
raise AttributeError(aname)
def __str__( self ):
return "%s (at char %d), (line:%d, col:%d)" % \
( self.msg, self.loc, self.lineno, self.column )
def __repr__( self ):
return _ustr(self)
def markInputline( self, markerString = ">!<" ):
"""Extracts the exception line from the input string, and marks
the location of the exception with a special symbol.
"""
line_str = self.line
line_column = self.column - 1
if markerString:
line_str = "".join( [line_str[:line_column],
markerString, line_str[line_column:]])
return line_str.strip()
class ParseException(ParseBaseException):
"""exception thrown when parse expressions don't match class;
supported attributes by name are:
- lineno - returns the line number of the exception text
- col - returns the column number of the exception text
- line - returns the line containing the exception text
"""
pass
class ParseFatalException(ParseBaseException):
"""user-throwable exception thrown when inconsistent parse content
is found; stops all parsing immediately"""
pass
class ParseSyntaxException(ParseFatalException):
"""just like ParseFatalException, but thrown internally when an
ErrorStop indicates that parsing is to stop immediately because
an unbacktrackable syntax error has been found"""
def __init__(self, pe):
super(ParseSyntaxException, self).__init__(
pe.pstr, pe.loc, pe.msg, pe.parserElement)
#~ class ReparseException(ParseBaseException):
#~ """Experimental class - parse actions can raise this exception to cause
#~ pyparsing to reparse the input string:
#~ - with a modified input string, and/or
#~ - with a modified start location
#~ Set the values of the ReparseException in the constructor, and raise the
#~ exception in a parse action to cause pyparsing to use the new string/location.
#~ Setting the values as None causes no change to be made.
#~ """
#~ def __init_( self, newstring, restartLoc ):
#~ self.newParseText = newstring
#~ self.reparseLoc = restartLoc
class RecursiveGrammarException(Exception):
"""exception thrown by validate() if the grammar could be improperly recursive"""
def __init__( self, parseElementList ):
self.parseElementTrace = parseElementList
def __str__( self ):
return "RecursiveGrammarException: %s" % self.parseElementTrace
class _ParseResultsWithOffset(object):
def __init__(self,p1,p2):
self.tup = (p1,p2)
def __getitem__(self,i):
return self.tup[i]
def __repr__(self):
return repr(self.tup)
class ParseResults(object):
"""Structured parse results, to provide multiple means of access to the parsed data:
- as a list (len(results))
- by list index (results[0], results[1], etc.)
- by attribute (results.<resultsName>)
"""
__slots__ = ( "__toklist", "__tokdict", "__doinit", "__name", "__parent", "__accumNames", "__weakref__" )
def __new__(cls, toklist, name=None, asList=True, modal=True ):
if isinstance(toklist, cls):
return toklist
retobj = object.__new__(cls)
retobj.__doinit = True
return retobj
# Performance tuning: we construct a *lot* of these, so keep this
# constructor as small and fast as possible
def __init__( self, toklist, name=None, asList=True, modal=True ):
if self.__doinit:
self.__doinit = False
self.__name = None
self.__parent = None
self.__accumNames = {}
if isinstance(toklist, list):
self.__toklist = toklist[:]
else:
self.__toklist = [toklist]
self.__tokdict = dict()
# this line is related to debugging the asXML bug
#~ asList = False
if name:
if not modal:
self.__accumNames[name] = 0
if isinstance(name,int):
name = _ustr(name) # will always return a str, but use _ustr for consistency
self.__name = name
if not toklist in (None,'',[]):
if isinstance(toklist,basestring):
toklist = [ toklist ]
if asList:
if isinstance(toklist,ParseResults):
self[name] = _ParseResultsWithOffset(toklist.copy(),-1)
else:
self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),-1)
self[name].__name = name
else:
try:
self[name] = toklist[0]
except (KeyError,TypeError):
self[name] = toklist
def __getitem__( self, i ):
if isinstance( i, (int,slice) ):
return self.__toklist[i]
else:
if i not in self.__accumNames:
return self.__tokdict[i][-1][0]
else:
return ParseResults([ v[0] for v in self.__tokdict[i] ])
def __setitem__( self, k, v ):
if isinstance(v,_ParseResultsWithOffset):
self.__tokdict[k] = self.__tokdict.get(k,list()) + [v]
sub = v[0]
elif isinstance(k,int):
self.__toklist[k] = v
sub = v
else:
self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)]
sub = v
if isinstance(sub,ParseResults):
sub.__parent = wkref(self)
def __delitem__( self, i ):
if isinstance(i,(int,slice)):
mylen = len( self.__toklist )
del self.__toklist[i]
# convert int to slice
if isinstance(i, int):
if i < 0:
i += mylen
i = slice(i, i+1)
# get removed indices
removed = list(range(*i.indices(mylen)))
removed.reverse()
# fixup indices in token dictionary
for name in self.__tokdict:
occurrences = self.__tokdict[name]
for j in removed:
for k, (value, position) in enumerate(occurrences):
occurrences[k] = _ParseResultsWithOffset(value, position - (position > j))
else:
del self.__tokdict[i]
def __contains__( self, k ):
return k in self.__tokdict
def __len__( self ): return len( self.__toklist )
def __bool__(self): return len( self.__toklist ) > 0
__nonzero__ = __bool__
def __iter__( self ): return iter( self.__toklist )
def __reversed__( self ): return iter( reversed(self.__toklist) )
def keys( self ):
"""Returns all named result keys."""
return self.__tokdict.keys()
def pop( self, index=-1 ):
"""Removes and returns item at specified index (default=last).
Will work with either numeric indices or dict-key indicies."""
ret = self[index]
del self[index]
return ret
def get(self, key, defaultValue=None):
"""Returns named result matching the given key, or if there is no
such name, then returns the given defaultValue or None if no
defaultValue is specified."""
if key in self:
return self[key]
else:
return defaultValue
def insert( self, index, insStr ):
self.__toklist.insert(index, insStr)
# fixup indices in token dictionary
for name in self.__tokdict:
occurrences = self.__tokdict[name]
for k, (value, position) in enumerate(occurrences):
occurrences[k] = _ParseResultsWithOffset(value, position + (position > j))
def items( self ):
"""Returns all named result keys and values as a list of tuples."""
return [(k,self[k]) for k in self.__tokdict]
def values( self ):
"""Returns all named result values."""
return [ v[-1][0] for v in self.__tokdict.values() ]
def __getattr__( self, name ):
if name not in self.__slots__:
if name in self.__tokdict:
if name not in self.__accumNames:
return self.__tokdict[name][-1][0]
else:
return ParseResults([ v[0] for v in self.__tokdict[name] ])
else:
return ""
return None
def __add__( self, other ):
ret = self.copy()
ret += other
return ret
def __iadd__( self, other ):
if other.__tokdict:
offset = len(self.__toklist)
addoffset = ( lambda a: (a<0 and offset) or (a+offset) )
otheritems = other.__tokdict.items()
otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) )
for (k,vlist) in otheritems for v in vlist]
for k,v in otherdictitems:
self[k] = v
if isinstance(v[0],ParseResults):
v[0].__parent = wkref(self)
self.__toklist += other.__toklist
self.__accumNames.update( other.__accumNames )
del other
return self
def __repr__( self ):
return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) )
def __str__( self ):
out = "["
sep = ""
for i in self.__toklist:
if isinstance(i, ParseResults):
out += sep + _ustr(i)
else:
out += sep + repr(i)
sep = ", "
out += "]"
return out
def _asStringList( self, sep='' ):
out = []
for item in self.__toklist:
if out and sep:
out.append(sep)
if isinstance( item, ParseResults ):
out += item._asStringList()
else:
out.append( _ustr(item) )
return out
def asList( self ):
"""Returns the parse results as a nested list of matching tokens, all converted to strings."""
out = []
for res in self.__toklist:
if isinstance(res,ParseResults):
out.append( res.asList() )
else:
out.append( res )
return out
def asDict( self ):
"""Returns the named parse results as dictionary."""
return dict( self.items() )
def copy( self ):
"""Returns a new copy of a ParseResults object."""
ret = ParseResults( self.__toklist )
ret.__tokdict = self.__tokdict.copy()
ret.__parent = self.__parent
ret.__accumNames.update( self.__accumNames )
ret.__name = self.__name
return ret
def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ):
"""Returns the parse results as XML. Tags are created for tokens and lists that have defined results names."""
nl = "\n"
out = []
namedItems = dict( [ (v[1],k) for (k,vlist) in self.__tokdict.items()
for v in vlist ] )
nextLevelIndent = indent + " "
# collapse out indents if formatting is not desired
if not formatted:
indent = ""
nextLevelIndent = ""
nl = ""
selfTag = None
if doctag is not None:
selfTag = doctag
else:
if self.__name:
selfTag = self.__name
if not selfTag:
if namedItemsOnly:
return ""
else:
selfTag = "ITEM"
out += [ nl, indent, "<", selfTag, ">" ]
worklist = self.__toklist
for i,res in enumerate(worklist):
if isinstance(res,ParseResults):
if i in namedItems:
out += [ res.asXML(namedItems[i],
namedItemsOnly and doctag is None,
nextLevelIndent,
formatted)]
else:
out += [ res.asXML(None,
namedItemsOnly and doctag is None,
nextLevelIndent,
formatted)]
else:
# individual token, see if there is a name for it
resTag = None
if i in namedItems:
resTag = namedItems[i]
if not resTag:
if namedItemsOnly:
continue
else:
resTag = "ITEM"
xmlBodyText = xml.sax.saxutils.escape(_ustr(res))
out += [ nl, nextLevelIndent, "<", resTag, ">",
xmlBodyText,
"</", resTag, ">" ]
out += [ nl, indent, "</", selfTag, ">" ]
return "".join(out)
def __lookup(self,sub):
for k,vlist in self.__tokdict.items():
for v,loc in vlist:
if sub is v:
return k
return None
def getName(self):
"""Returns the results name for this token expression."""
if self.__name:
return self.__name
elif self.__parent:
par = self.__parent()
if par:
return par.__lookup(self)
else:
return None
elif (len(self) == 1 and
len(self.__tokdict) == 1 and
self.__tokdict.values()[0][0][1] in (0,-1)):
return self.__tokdict.keys()[0]
else:
return None
def dump(self,indent='',depth=0):
"""Diagnostic method for listing out the contents of a ParseResults.
Accepts an optional indent argument so that this string can be embedded
in a nested display of other data."""
out = []
out.append( indent+_ustr(self.asList()) )
keys = self.items()
keys.sort()
for k,v in keys:
if out:
out.append('\n')
out.append( "%s%s- %s: " % (indent,(' '*depth), k) )
if isinstance(v,ParseResults):
if v.keys():
#~ out.append('\n')
out.append( v.dump(indent,depth+1) )
#~ out.append('\n')
else:
out.append(_ustr(v))
else:
out.append(_ustr(v))
#~ out.append('\n')
return "".join(out)
# add support for pickle protocol
def __getstate__(self):
return ( self.__toklist,
( self.__tokdict.copy(),
self.__parent is not None and self.__parent() or None,
self.__accumNames,
self.__name ) )
def __setstate__(self,state):
self.__toklist = state[0]
self.__tokdict, \
par, \
inAccumNames, \
self.__name = state[1]
self.__accumNames = {}
self.__accumNames.update(inAccumNames)
if par is not None:
self.__parent = wkref(par)
else:
self.__parent = None
def col (loc,strg):
"""Returns current column within a string, counting newlines as line separators.
The first column is number 1.
Note: the default parsing behavior is to expand tabs in the input string
before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information
on parsing strings containing <TAB>s, and suggested methods to maintain a
consistent view of the parsed string, the parse location, and line and column
positions within the parsed string.
"""
return (loc<len(strg) and strg[loc] == '\n') and 1 or loc - strg.rfind("\n", 0, loc)
def lineno(loc,strg):
"""Returns current line number within a string, counting newlines as line separators.
The first line is number 1.
Note: the default parsing behavior is to expand tabs in the input string
before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information
on parsing strings containing <TAB>s, and suggested methods to maintain a
consistent view of the parsed string, the parse location, and line and column
positions within the parsed string.
"""
return strg.count("\n",0,loc) + 1
def line( loc, strg ):
"""Returns the line of text containing loc within a string, counting newlines as line separators.
"""
lastCR = strg.rfind("\n", 0, loc)
nextCR = strg.find("\n", loc)
if nextCR > 0:
return strg[lastCR+1:nextCR]
else:
return strg[lastCR+1:]
def _defaultStartDebugAction( instring, loc, expr ):
print ("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))
def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ):
print ("Matched " + _ustr(expr) + " -> " + str(toks.asList()))
def _defaultExceptionDebugAction( instring, loc, expr, exc ):
print ("Exception raised:" + _ustr(exc))
def nullDebugAction(*args):
"""'Do-nothing' debug action, to suppress debugging output during parsing."""
pass
class ParserElement(object):
"""Abstract base level parser element class."""
DEFAULT_WHITE_CHARS = " \n\t\r"
def setDefaultWhitespaceChars( chars ):
"""Overrides the default whitespace chars
"""
ParserElement.DEFAULT_WHITE_CHARS = chars
setDefaultWhitespaceChars = staticmethod(setDefaultWhitespaceChars)
def __init__( self, savelist=False ):
self.parseAction = list()
self.failAction = None
#~ self.name = "<unknown>" # don't define self.name, let subclasses try/except upcall
self.strRepr = None
self.resultsName = None
self.saveAsList = savelist
self.skipWhitespace = True
self.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
self.copyDefaultWhiteChars = True
self.mayReturnEmpty = False # used when checking for left-recursion
self.keepTabs = False
self.ignoreExprs = list()
self.debug = False
self.streamlined = False
self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index
self.errmsg = ""
self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all)
self.debugActions = ( None, None, None ) #custom debug actions
self.re = None
self.callPreparse = True # used to avoid redundant calls to preParse
self.callDuringTry = False
def copy( self ):
"""Make a copy of this ParserElement. Useful for defining different parse actions
for the same parsing pattern, using copies of the original parse element."""
cpy = copy.copy( self )
cpy.parseAction = self.parseAction[:]
cpy.ignoreExprs = self.ignoreExprs[:]
if self.copyDefaultWhiteChars:
cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
return cpy
def setName( self, name ):
"""Define name for this expression, for use in debugging."""
self.name = name
self.errmsg = "Expected " + self.name
if hasattr(self,"exception"):
self.exception.msg = self.errmsg
return self
def setResultsName( self, name, listAllMatches=False ):
"""Define name for referencing matching tokens as a nested attribute
of the returned parse results.
NOTE: this returns a *copy* of the original ParserElement object;
this is so that the client can define a basic element, such as an
integer, and reference it in multiple places with different names.
"""
newself = self.copy()
newself.resultsName = name
newself.modalResults = not listAllMatches
return newself
def setBreak(self,breakFlag = True):
"""Method to invoke the Python pdb debugger when this element is
about to be parsed. Set breakFlag to True to enable, False to
disable.
"""
if breakFlag:
_parseMethod = self._parse
def breaker(instring, loc, doActions=True, callPreParse=True):
import pdb
pdb.set_trace()
_parseMethod( instring, loc, doActions, callPreParse )
breaker._originalParseMethod = _parseMethod
self._parse = breaker
else:
if hasattr(self._parse,"_originalParseMethod"):
self._parse = self._parse._originalParseMethod
return self
def _normalizeParseActionArgs( f ):
"""Internal method used to decorate parse actions that take fewer than 3 arguments,
so that all parse actions can be called as f(s,l,t)."""
STAR_ARGS = 4
try:
restore = None
if isinstance(f,type):
restore = f
f = f.__init__
if not _PY3K:
codeObj = f.func_code
else:
codeObj = f.code
if codeObj.co_flags & STAR_ARGS:
return f
numargs = codeObj.co_argcount
if not _PY3K:
if hasattr(f,"im_self"):
numargs -= 1
else:
if hasattr(f,"__self__"):
numargs -= 1
if restore:
f = restore
except AttributeError:
try:
if not _PY3K:
call_im_func_code = f.__call__.im_func.func_code
else:
call_im_func_code = f.__code__
# not a function, must be a callable object, get info from the
# im_func binding of its bound __call__ method
if call_im_func_code.co_flags & STAR_ARGS:
return f
numargs = call_im_func_code.co_argcount
if not _PY3K:
if hasattr(f.__call__,"im_self"):
numargs -= 1
else:
if hasattr(f.__call__,"__self__"):
numargs -= 0
except AttributeError:
if not _PY3K:
call_func_code = f.__call__.func_code
else:
call_func_code = f.__call__.__code__
# not a bound method, get info directly from __call__ method
if call_func_code.co_flags & STAR_ARGS:
return f
numargs = call_func_code.co_argcount
if not _PY3K:
if hasattr(f.__call__,"im_self"):
numargs -= 1
else:
if hasattr(f.__call__,"__self__"):
numargs -= 1
#~ print ("adding function %s with %d args" % (f.func_name,numargs))
if numargs == 3:
return f
else:
if numargs > 3:
def tmp(s,l,t):
return f(f.__call__.__self__, s,l,t)
if numargs == 2:
def tmp(s,l,t):
return f(l,t)
elif numargs == 1:
def tmp(s,l,t):
return f(t)
else: #~ numargs == 0:
def tmp(s,l,t):
return f()
try:
tmp.__name__ = f.__name__
except (AttributeError,TypeError):
# no need for special handling if attribute doesnt exist
pass
try:
tmp.__doc__ = f.__doc__
except (AttributeError,TypeError):
# no need for special handling if attribute doesnt exist
pass
try:
tmp.__dict__.update(f.__dict__)
except (AttributeError,TypeError):
# no need for special handling if attribute doesnt exist
pass
return tmp
_normalizeParseActionArgs = staticmethod(_normalizeParseActionArgs)
def setParseAction( self, *fns, **kwargs ):
"""Define action to perform when successfully matching parse element definition.
Parse action fn is a callable method with 0-3 arguments, called as fn(s,loc,toks),
fn(loc,toks), fn(toks), or just fn(), where:
- s = the original string being parsed (see note below)
- loc = the location of the matching substring
- toks = a list of the matched tokens, packaged as a ParseResults object
If the functions in fns modify the tokens, they can return them as the return
value from fn, and the modified list of tokens will replace the original.
Otherwise, fn does not need to return any value.
Note: the default parsing behavior is to expand tabs in the input string
before starting the parsing process. See L{I{parseString}<parseString>} for more information
on parsing strings containing <TAB>s, and suggested methods to maintain a
consistent view of the parsed string, the parse location, and line and column
positions within the parsed string.
"""
self.parseAction = list(map(self._normalizeParseActionArgs, list(fns)))
self.callDuringTry = ("callDuringTry" in kwargs and kwargs["callDuringTry"])
return self
def addParseAction( self, *fns, **kwargs ):
"""Add parse action to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}."""
self.parseAction += list(map(self._normalizeParseActionArgs, list(fns)))
self.callDuringTry = self.callDuringTry or ("callDuringTry" in kwargs and kwargs["callDuringTry"])
return self
def setFailAction( self, fn ):
"""Define action to perform if parsing fails at this expression.
Fail acton fn is a callable function that takes the arguments
fn(s,loc,expr,err) where:
- s = string being parsed
- loc = location where expression match was attempted and failed
- expr = the parse expression that failed
- err = the exception thrown
The function returns no value. It may throw ParseFatalException
if it is desired to stop parsing immediately."""
self.failAction = fn
return self
def _skipIgnorables( self, instring, loc ):
exprsFound = True
while exprsFound:
exprsFound = False
for e in self.ignoreExprs:
try:
while 1:
loc,dummy = e._parse( instring, loc )
exprsFound = True
except ParseException:
pass
return loc
def preParse( self, instring, loc ):
if self.ignoreExprs:
loc = self._skipIgnorables( instring, loc )
if self.skipWhitespace:
wt = self.whiteChars
instrlen = len(instring)
while loc < instrlen and instring[loc] in wt:
loc += 1
return loc
def parseImpl( self, instring, loc, doActions=True ):
return loc, []
def postParse( self, instring, loc, tokenlist ):
return tokenlist
#~ @profile
def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ):
debugging = ( self.debug ) #and doActions )
if debugging or self.failAction:
#~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))
if (self.debugActions[0] ):
self.debugActions[0]( instring, loc, self )
if callPreParse and self.callPreparse:
preloc = self.preParse( instring, loc )
else:
preloc = loc
tokensStart = loc
try:
try:
loc,tokens = self.parseImpl( instring, preloc, doActions )
except IndexError:
raise ParseException( instring, len(instring), self.errmsg, self )
except ParseBaseException, err:
#~ print ("Exception raised:", err)
if self.debugActions[2]:
self.debugActions[2]( instring, tokensStart, self, err )
if self.failAction:
self.failAction( instring, tokensStart, self, err )
raise
else:
if callPreParse and self.callPreparse:
preloc = self.preParse( instring, loc )
else:
preloc = loc
tokensStart = loc
if self.mayIndexError or loc >= len(instring):
try:
loc,tokens = self.parseImpl( instring, preloc, doActions )
except IndexError:
raise ParseException( instring, len(instring), self.errmsg, self )
else:
loc,tokens = self.parseImpl( instring, preloc, doActions )
tokens = self.postParse( instring, loc, tokens )
retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults )
if self.parseAction and (doActions or self.callDuringTry):
if debugging:
try:
for fn in self.parseAction:
tokens = fn( instring, tokensStart, retTokens )
if tokens is not None:
retTokens = ParseResults( tokens,
self.resultsName,
asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
modal=self.modalResults )
except ParseBaseException, err:
#~ print "Exception raised in user parse action:", err
if (self.debugActions[2] ):
self.debugActions[2]( instring, tokensStart, self, err )
raise
else:
for fn in self.parseAction:
tokens = fn( instring, tokensStart, retTokens )
if tokens is not None:
retTokens = ParseResults( tokens,
self.resultsName,
asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
modal=self.modalResults )
if debugging:
#~ print ("Matched",self,"->",retTokens.asList())
if (self.debugActions[1] ):
self.debugActions[1]( instring, tokensStart, loc, self, retTokens )
return loc, retTokens
def tryParse( self, instring, loc ):
try:
return self._parse( instring, loc, doActions=False )[0]
except ParseFatalException:
raise ParseException( instring, loc, self.errmsg, self)
# this method gets repeatedly called during backtracking with the same arguments -
# we can cache these arguments and save ourselves the trouble of re-parsing the contained expression
def _parseCache( self, instring, loc, doActions=True, callPreParse=True ):
lookup = (self,instring,loc,callPreParse,doActions)
if lookup in ParserElement._exprArgCache:
value = ParserElement._exprArgCache[ lookup ]
if isinstance(value,Exception):
raise value
return value
else:
try:
value = self._parseNoCache( instring, loc, doActions, callPreParse )
ParserElement._exprArgCache[ lookup ] = (value[0],value[1].copy())
return value
except ParseBaseException, pe:
ParserElement._exprArgCache[ lookup ] = pe
raise
_parse = _parseNoCache
# argument cache for optimizing repeated calls when backtracking through recursive expressions
_exprArgCache = {}
def resetCache():
ParserElement._exprArgCache.clear()
resetCache = staticmethod(resetCache)
_packratEnabled = False
def enablePackrat():
"""Enables "packrat" parsing, which adds memoizing to the parsing logic.
Repeated parse attempts at the same string location (which happens
often in many complex grammars) can immediately return a cached value,
instead of re-executing parsing/validating code. Memoizing is done of
both valid results and parsing exceptions.
This speedup may break existing programs that use parse actions that
have side-effects. For this reason, packrat parsing is disabled when
you first import pyparsing. To activate the packrat feature, your
program must call the class method ParserElement.enablePackrat(). If
your program uses psyco to "compile as you go", you must call
enablePackrat before calling psyco.full(). If you do not do this,
Python will crash. For best results, call enablePackrat() immediately
after importing pyparsing.
"""
if not ParserElement._packratEnabled:
ParserElement._packratEnabled = True
ParserElement._parse = ParserElement._parseCache
enablePackrat = staticmethod(enablePackrat)
def parseString( self, instring, parseAll=False ):
"""Execute the parse expression with the given string.
This is the main interface to the client code, once the complete
expression has been built.
If you want the grammar to require that the entire input string be
successfully parsed, then set parseAll to True (equivalent to ending
the grammar with StringEnd()).
Note: parseString implicitly calls expandtabs() on the input string,
in order to report proper column numbers in parse actions.
If the input string contains tabs and
the grammar uses parse actions that use the loc argument to index into the
string being parsed, you can ensure you have a consistent view of the input
string by:
- calling parseWithTabs on your grammar before calling parseString
(see L{I{parseWithTabs}<parseWithTabs>})
- define your parse action using the full (s,loc,toks) signature, and
reference the input string using the parse action's s argument
- explictly expand the tabs in your input string before calling
parseString
"""
ParserElement.resetCache()
if not self.streamlined:
self.streamline()
#~ self.saveAsList = True
for e in self.ignoreExprs:
e.streamline()
if not self.keepTabs:
instring = instring.expandtabs()
loc, tokens = self._parse( instring, 0 )
if parseAll:
StringEnd()._parse( instring, loc )
return tokens
def scanString( self, instring, maxMatches=_MAX_INT ):
"""Scan the input string for expression matches. Each match will return the
matching tokens, start location, and end location. May be called with optional
maxMatches argument, to clip scanning after 'n' matches are found.
Note that the start and end locations are reported relative to the string
being parsed. See L{I{parseString}<parseString>} for more information on parsing
strings with embedded tabs."""
if not self.streamlined:
self.streamline()
for e in self.ignoreExprs:
e.streamline()
if not self.keepTabs:
instring = _ustr(instring).expandtabs()
instrlen = len(instring)
loc = 0
preparseFn = self.preParse
parseFn = self._parse
ParserElement.resetCache()
matches = 0
while loc <= instrlen and matches < maxMatches:
try:
preloc = preparseFn( instring, loc )
nextLoc,tokens = parseFn( instring, preloc, callPreParse=False )
except ParseException:
loc = preloc+1
else:
matches += 1
yield tokens, preloc, nextLoc
loc = nextLoc
def transformString( self, instring ):
"""Extension to scanString, to modify matching text with modified tokens that may
be returned from a parse action. To use transformString, define a grammar and
attach a parse action to it that modifies the returned token list.
Invoking transformString() on a target string will then scan for matches,
and replace the matched text patterns according to the logic in the parse
action. transformString() returns the resulting transformed string."""
out = []
lastE = 0
# force preservation of <TAB>s, to minimize unwanted transformation of string, and to
# keep string locs straight between transformString and scanString
self.keepTabs = True
for t,s,e in self.scanString( instring ):
out.append( instring[lastE:s] )
if t:
if isinstance(t,ParseResults):
out += t.asList()
elif isinstance(t,list):
out += t
else:
out.append(t)
lastE = e
out.append(instring[lastE:])
return "".join(map(_ustr,out))
def searchString( self, instring, maxMatches=_MAX_INT ):
"""Another extension to scanString, simplifying the access to the tokens found
to match the given parse expression. May be called with optional
maxMatches argument, to clip searching after 'n' matches are found.
"""
return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ])
def __add__(self, other ):
"""Implementation of + operator - returns And"""
if isinstance( other, basestring ):
other = Literal( other )
if not isinstance( other, ParserElement ):
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2)
return None
return And( [ self, other ] )
def __radd__(self, other ):
"""Implementation of + operator when left operand is not a ParserElement"""
if isinstance( other, basestring ):
other = Literal( other )
if not isinstance( other, ParserElement ):
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2)
return None
return other + self
def __sub__(self, other):
"""Implementation of - operator, returns And with error stop"""
if isinstance( other, basestring ):
other = Literal( other )
if not isinstance( other, ParserElement ):
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2)
return None
return And( [ self, And._ErrorStop(), other ] )
def __rsub__(self, other ):
"""Implementation of - operator when left operand is not a ParserElement"""
if isinstance( other, basestring ):
other = Literal( other )
if not isinstance( other, ParserElement ):
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2)
return None
return other - self
def __mul__(self,other):
if isinstance(other,int):
minElements, optElements = other,0
elif isinstance(other,tuple):
if len(other)==0:
other = (None,None)
elif len(other)==1:
other = (other[0],None)
if len(other)==2:
if other[0] is None:
other = (0, other[1])
if isinstance(other[0],int) and other[1] is None:
if other[0] == 0:
return ZeroOrMore(self)
if other[0] == 1:
return OneOrMore(self)
else:
return self*other[0] + ZeroOrMore(self)
elif isinstance(other[0],int) and isinstance(other[1],int):
minElements, optElements = other
optElements -= minElements
else:
raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1]))
else:
raise TypeError("can only multiply 'ParserElement' and int or (int,int) objects")
else:
raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other))
if minElements < 0:
raise ValueError("cannot multiply ParserElement by negative value")
if optElements < 0:
raise ValueError("second tuple value must be greater or equal to first tuple value")
if minElements == optElements == 0:
raise ValueError("cannot multiply ParserElement by 0 or (0,0)")
if (optElements):
def makeOptionalList(n):
if n>1:
return Optional(self + makeOptionalList(n-1))
else:
return Optional(self)
if minElements:
if minElements == 1:
ret = self + makeOptionalList(optElements)
else:
ret = And([self]*minElements) + makeOptionalList(optElements)
else:
ret = makeOptionalList(optElements)
else:
if minElements == 1:
ret = self
else:
ret = And([self]*minElements)
return ret
def __rmul__(self, other):
return self.__mul__(other)
def __or__(self, other ):
"""Implementation of | operator - returns MatchFirst"""
if isinstance( other, basestring ):
other = Literal( other )
if not isinstance( other, ParserElement ):
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2)
return None
return MatchFirst( [ self, other ] )
def __ror__(self, other ):
"""Implementation of | operator when left operand is not a ParserElement"""
if isinstance( other, basestring ):
other = Literal( other )
if not isinstance( other, ParserElement ):
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2)
return None
return other | self
def __xor__(self, other ):
"""Implementation of ^ operator - returns Or"""
if isinstance( other, basestring ):
other = Literal( other )
if not isinstance( other, ParserElement ):
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2)
return None
return Or( [ self, other ] )
def __rxor__(self, other ):
"""Implementation of ^ operator when left operand is not a ParserElement"""
if isinstance( other, basestring ):
other = Literal( other )
if not isinstance( other, ParserElement ):
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2)
return None
return other ^ self
def __and__(self, other ):
"""Implementation of & operator - returns Each"""
if isinstance( other, basestring ):
other = Literal( other )
if not isinstance( other, ParserElement ):
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2)
return None
return Each( [ self, other ] )
def __rand__(self, other ):
"""Implementation of & operator when left operand is not a ParserElement"""
if isinstance( other, basestring ):
other = Literal( other )
if not isinstance( other, ParserElement ):
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2)
return None
return other & self
def __invert__( self ):
"""Implementation of ~ operator - returns NotAny"""
return NotAny( self )
def __call__(self, name):
"""Shortcut for setResultsName, with listAllMatches=default::
userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno")
could be written as::
userdata = Word(alphas)("name") + Word(nums+"-")("socsecno")
"""
return self.setResultsName(name)
def suppress( self ):
"""Suppresses the output of this ParserElement; useful to keep punctuation from
cluttering up returned output.
"""
return Suppress( self )
def leaveWhitespace( self ):
"""Disables the skipping of whitespace before matching the characters in the
ParserElement's defined pattern. This is normally only used internally by
the pyparsing module, but may be needed in some whitespace-sensitive grammars.
"""
self.skipWhitespace = False
return self
def setWhitespaceChars( self, chars ):
"""Overrides the default whitespace chars
"""
self.skipWhitespace = True
self.whiteChars = chars
self.copyDefaultWhiteChars = False
return self
def parseWithTabs( self ):
"""Overrides default behavior to expand <TAB>s to spaces before parsing the input string.
Must be called before parseString when the input grammar contains elements that
match <TAB> characters."""
self.keepTabs = True
return self
def ignore( self, other ):
"""Define expression to be ignored (e.g., comments) while doing pattern
matching; may be called repeatedly, to define multiple comment or other
ignorable patterns.
"""
if isinstance( other, Suppress ):
if other not in self.ignoreExprs:
self.ignoreExprs.append( other )
else:
self.ignoreExprs.append( Suppress( other ) )
return self
def setDebugActions( self, startAction, successAction, exceptionAction ):
"""Enable display of debugging messages while doing pattern matching."""
self.debugActions = (startAction or _defaultStartDebugAction,
successAction or _defaultSuccessDebugAction,
exceptionAction or _defaultExceptionDebugAction)
self.debug = True
return self
def setDebug( self, flag=True ):
"""Enable display of debugging messages while doing pattern matching.
Set flag to True to enable, False to disable."""
if flag:
self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction )
else:
self.debug = False
return self
def __str__( self ):
return self.name
def __repr__( self ):
return _ustr(self)
def streamline( self ):
self.streamlined = True
self.strRepr = None
return self
def checkRecursion( self, parseElementList ):
pass
def validate( self, validateTrace=[] ):
"""Check defined expressions for valid structure, check for infinite recursive definitions."""
self.checkRecursion( [] )
def parseFile( self, file_or_filename ):
"""Execute the parse expression on the given file or filename.
If a filename is specified (instead of a file object),
the entire file is opened, read, and closed before parsing.
"""
try:
file_contents = file_or_filename.read()
except AttributeError:
f = open(file_or_filename, "rb")
file_contents = f.read()
f.close()
return self.parseString(file_contents)
def getException(self):
return ParseException("",0,self.errmsg,self)
def __getattr__(self,aname):
if aname == "myException":
self.myException = ret = self.getException();
return ret;
else:
raise AttributeError("no such attribute " + aname)
def __eq__(self,other):
if isinstance(other, basestring):
try:
(self + StringEnd()).parseString(_ustr(other))
return True
except ParseBaseException:
return False
else:
return super(ParserElement,self)==other
def __hash__(self):
return hash(id(self))
def __req__(self,other):
return self == other
class Token(ParserElement):
"""Abstract ParserElement subclass, for defining atomic matching patterns."""
def __init__( self ):
super(Token,self).__init__( savelist=False )
#self.myException = ParseException("",0,"",self)
def setName(self, name):
s = super(Token,self).setName(name)
self.errmsg = "Expected " + self.name
#s.myException.msg = self.errmsg
return s
class Empty(Token):
"""An empty token, will always match."""
def __init__( self ):
super(Empty,self).__init__()
self.name = "Empty"
self.mayReturnEmpty = True
self.mayIndexError = False
class NoMatch(Token):
"""A token that will never match."""
def __init__( self ):
super(NoMatch,self).__init__()
self.name = "NoMatch"
self.mayReturnEmpty = True
self.mayIndexError = False
self.errmsg = "Unmatchable token"
#self.myException.msg = self.errmsg
def parseImpl( self, instring, loc, doActions=True ):
exc = self.myException
exc.loc = loc
exc.pstr = instring
raise exc
class Literal(Token):
"""Token to exactly match a specified string."""
def __init__( self, matchString ):
super(Literal,self).__init__()
self.match = matchString
self.matchLen = len(matchString)
try:
self.firstMatchChar = matchString[0]
except IndexError:
warnings.warn("null string passed to Literal; use Empty() instead",
SyntaxWarning, stacklevel=2)
self.__class__ = Empty
self.name = '"%s"' % _ustr(self.match)
self.errmsg = "Expected " + self.name
self.mayReturnEmpty = False
#self.myException.msg = self.errmsg
self.mayIndexError = False
# Performance tuning: this routine gets called a *lot*
# if this is a single character match string and the first character matches,
# short-circuit as quickly as possible, and avoid calling startswith
#~ @profile
def parseImpl( self, instring, loc, doActions=True ):
if (instring[loc] == self.firstMatchChar and
(self.matchLen==1 or instring.startswith(self.match,loc)) ):
return loc+self.matchLen, self.match
#~ raise ParseException( instring, loc, self.errmsg )
exc = self.myException
exc.loc = loc
exc.pstr = instring
raise exc
_L = Literal
class Keyword(Token):
"""Token to exactly match a specified string as a keyword, that is, it must be
immediately followed by a non-keyword character. Compare with Literal::
Literal("if") will match the leading 'if' in 'ifAndOnlyIf'.
Keyword("if") will not; it will only match the leading 'if in 'if x=1', or 'if(y==2)'
Accepts two optional constructor arguments in addition to the keyword string:
identChars is a string of characters that would be valid identifier characters,
defaulting to all alphanumerics + "_" and "$"; caseless allows case-insensitive
matching, default is False.
"""
DEFAULT_KEYWORD_CHARS = alphanums+"_$"
def __init__( self, matchString, identChars=DEFAULT_KEYWORD_CHARS, caseless=False ):
super(Keyword,self).__init__()
self.match = matchString
self.matchLen = len(matchString)
try:
self.firstMatchChar = matchString[0]
except IndexError:
warnings.warn("null string passed to Keyword; use Empty() instead",
SyntaxWarning, stacklevel=2)
self.name = '"%s"' % self.match
self.errmsg = "Expected " + self.name
self.mayReturnEmpty = False
#self.myException.msg = self.errmsg
self.mayIndexError = False
self.caseless = caseless
if caseless:
self.caselessmatch = matchString.upper()
identChars = identChars.upper()
self.identChars = _str2dict(identChars)
def parseImpl( self, instring, loc, doActions=True ):
if self.caseless:
if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and
(loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and
(loc == 0 or instring[loc-1].upper() not in self.identChars) ):
return loc+self.matchLen, self.match
else:
if (instring[loc] == self.firstMatchChar and
(self.matchLen==1 or instring.startswith(self.match,loc)) and
(loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and
(loc == 0 or instring[loc-1] not in self.identChars) ):
return loc+self.matchLen, self.match
#~ raise ParseException( instring, loc, self.errmsg )
exc = self.myException
exc.loc = loc
exc.pstr = instring
raise exc
def copy(self):
c = super(Keyword,self).copy()
c.identChars = Keyword.DEFAULT_KEYWORD_CHARS
return c
def setDefaultKeywordChars( chars ):
"""Overrides the default Keyword chars
"""
Keyword.DEFAULT_KEYWORD_CHARS = chars
setDefaultKeywordChars = staticmethod(setDefaultKeywordChars)
class CaselessLiteral(Literal):
"""Token to match a specified string, ignoring case of letters.
Note: the matched results will always be in the case of the given
match string, NOT the case of the input text.
"""
def __init__( self, matchString ):
super(CaselessLiteral,self).__init__( matchString.upper() )
# Preserve the defining literal.
self.returnString = matchString
self.name = "'%s'" % self.returnString
self.errmsg = "Expected " + self.name
#self.myException.msg = self.errmsg
def parseImpl( self, instring, loc, doActions=True ):
if instring[ loc:loc+self.matchLen ].upper() == self.match:
return loc+self.matchLen, self.returnString
#~ raise ParseException( instring, loc, self.errmsg )
exc = self.myException
exc.loc = loc
exc.pstr = instring
raise exc
class CaselessKeyword(Keyword):
def __init__( self, matchString, identChars=Keyword.DEFAULT_KEYWORD_CHARS ):
super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True )
def parseImpl( self, instring, loc, doActions=True ):
if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and
(loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ):
return loc+self.matchLen, self.match
#~ raise ParseException( instring, loc, self.errmsg )
exc = self.myException
exc.loc = loc
exc.pstr = instring
raise exc
class Word(Token):
"""Token for matching words composed of allowed character sets.
Defined with string containing all allowed initial characters,
an optional string containing allowed body characters (if omitted,
defaults to the initial character set), and an optional minimum,
maximum, and/or exact length. The default value for min is 1 (a
minimum value < 1 is not valid); the default values for max and exact
are 0, meaning no maximum or exact length restriction.
"""
def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False ):
super(Word,self).__init__()
self.initCharsOrig = initChars
self.initChars = _str2dict(initChars)
if bodyChars :
self.bodyCharsOrig = bodyChars
self.bodyChars = _str2dict(bodyChars)
else:
self.bodyCharsOrig = initChars
self.bodyChars = _str2dict(initChars)
self.maxSpecified = max > 0
if min < 1:
raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted")
self.minLen = min
if max > 0:
self.maxLen = max
else:
self.maxLen = _MAX_INT
if exact > 0:
self.maxLen = exact
self.minLen = exact
self.name = _ustr(self)
self.errmsg = "Expected " + self.name
#self.myException.msg = self.errmsg
self.mayIndexError = False
self.asKeyword = asKeyword
if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0):
if self.bodyCharsOrig == self.initCharsOrig:
self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig)
elif len(self.bodyCharsOrig) == 1:
self.reString = "%s[%s]*" % \
(re.escape(self.initCharsOrig),
_escapeRegexRangeChars(self.bodyCharsOrig),)
else:
self.reString = "[%s][%s]*" % \
(_escapeRegexRangeChars(self.initCharsOrig),
_escapeRegexRangeChars(self.bodyCharsOrig),)
if self.asKeyword:
self.reString = r"\b"+self.reString+r"\b"
try:
self.re = re.compile( self.reString )
except:
self.re = None
def parseImpl( self, instring, loc, doActions=True ):
if self.re:
result = self.re.match(instring,loc)
if not result:
exc = self.myException
exc.loc = loc
exc.pstr = instring
raise exc
loc = result.end()
return loc,result.group()
if not(instring[ loc ] in self.initChars):
#~ raise ParseException( instring, loc, self.errmsg )
exc = self.myException
exc.loc = loc
exc.pstr = instring
raise exc
start = loc
loc += 1
instrlen = len(instring)
bodychars = self.bodyChars
maxloc = start + self.maxLen
maxloc = min( maxloc, instrlen )
while loc < maxloc and instring[loc] in bodychars:
loc += 1
throwException = False
if loc - start < self.minLen:
throwException = True
if self.maxSpecified and loc < instrlen and instring[loc] in bodychars:
throwException = True
if self.asKeyword:
if (start>0 and instring[start-1] in bodychars) or (loc<instrlen and instring[loc] in bodychars):
throwException = True
if throwException:
#~ raise ParseException( instring, loc, self.errmsg )
exc = self.myException
exc.loc = loc
exc.pstr = instring
raise exc
return loc, instring[start:loc]
def __str__( self ):
try:
return super(Word,self).__str__()
except:
pass
if self.strRepr is None:
def charsAsStr(s):
if len(s)>4:
return s[:4]+"..."
else:
return s
if ( self.initCharsOrig != self.bodyCharsOrig ):
self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) )
else:
self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig)
return self.strRepr
class Regex(Token):
"""Token for matching strings that match a given regular expression.
Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module.
"""
def __init__( self, pattern, flags=0):
"""The parameters pattern and flags are passed to the re.compile() function as-is. See the Python re module for an explanation of the acceptable patterns and flags."""
super(Regex,self).__init__()
if len(pattern) == 0:
warnings.warn("null string passed to Regex; use Empty() instead",
SyntaxWarning, stacklevel=2)
self.pattern = pattern
self.flags = flags
try:
self.re = re.compile(self.pattern, self.flags)
self.reString = self.pattern
except sre_constants.error:
warnings.warn("invalid pattern (%s) passed to Regex" % pattern,
SyntaxWarning, stacklevel=2)
raise
self.name = _ustr(self)
self.errmsg = "Expected " + self.name
#self.myException.msg = self.errmsg
self.mayIndexError = False
self.mayReturnEmpty = True
def parseImpl( self, instring, loc, doActions=True ):
result = self.re.match(instring,loc)
if not result:
exc = self.myException
exc.loc = loc
exc.pstr = instring
raise exc
loc = result.end()
d = result.groupdict()
ret = ParseResults(result.group())
if d:
for k in d:
ret[k] = d[k]
return loc,ret
def __str__( self ):
try:
return super(Regex,self).__str__()
except:
pass
if self.strRepr is None:
self.strRepr = "Re:(%s)" % repr(self.pattern)
return self.strRepr
class QuotedString(Token):
"""Token for matching strings that are delimited by quoting characters.
"""
def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None):
"""
Defined with the following parameters:
- quoteChar - string of one or more characters defining the quote delimiting string
- escChar - character to escape quotes, typically backslash (default=None)
- escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=None)
- multiline - boolean indicating whether quotes can span multiple lines (default=False)
- unquoteResults - boolean indicating whether the matched text should be unquoted (default=True)
- endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=None => same as quoteChar)
"""
super(QuotedString,self).__init__()
# remove white space from quote chars - wont work anyway
quoteChar = quoteChar.strip()
if len(quoteChar) == 0:
warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)
raise SyntaxError()
if endQuoteChar is None:
endQuoteChar = quoteChar
else:
endQuoteChar = endQuoteChar.strip()
if len(endQuoteChar) == 0:
warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)
raise SyntaxError()
self.quoteChar = quoteChar
self.quoteCharLen = len(quoteChar)
self.firstQuoteChar = quoteChar[0]
self.endQuoteChar = endQuoteChar
self.endQuoteCharLen = len(endQuoteChar)
self.escChar = escChar
self.escQuote = escQuote
self.unquoteResults = unquoteResults
if multiline:
self.flags = re.MULTILINE | re.DOTALL
self.pattern = r'%s(?:[^%s%s]' % \
( re.escape(self.quoteChar),
_escapeRegexRangeChars(self.endQuoteChar[0]),
(escChar is not None and _escapeRegexRangeChars(escChar) or '') )
else:
self.flags = 0
self.pattern = r'%s(?:[^%s\n\r%s]' % \
( re.escape(self.quoteChar),
_escapeRegexRangeChars(self.endQuoteChar[0]),
(escChar is not None and _escapeRegexRangeChars(escChar) or '') )
if len(self.endQuoteChar) > 1:
self.pattern += (
'|(?:' + ')|(?:'.join(["%s[^%s]" % (re.escape(self.endQuoteChar[:i]),
_escapeRegexRangeChars(self.endQuoteChar[i]))
for i in range(len(self.endQuoteChar)-1,0,-1)]) + ')'
)
if escQuote:
self.pattern += (r'|(?:%s)' % re.escape(escQuote))
if escChar:
self.pattern += (r'|(?:%s.)' % re.escape(escChar))
self.escCharReplacePattern = re.escape(self.escChar)+"(.)"
self.pattern += (r')*%s' % re.escape(self.endQuoteChar))
try:
self.re = re.compile(self.pattern, self.flags)
self.reString = self.pattern
except sre_constants.error:
warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern,
SyntaxWarning, stacklevel=2)
raise
self.name = _ustr(self)
self.errmsg = "Expected " + self.name
#self.myException.msg = self.errmsg
self.mayIndexError = False
self.mayReturnEmpty = True
def parseImpl( self, instring, loc, doActions=True ):
result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None
if not result:
exc = self.myException
exc.loc = loc
exc.pstr = instring
raise exc
loc = result.end()
ret = result.group()
if self.unquoteResults:
# strip off quotes
ret = ret[self.quoteCharLen:-self.endQuoteCharLen]
if isinstance(ret,basestring):
# replace escaped characters
if self.escChar:
ret = re.sub(self.escCharReplacePattern,"\g<1>",ret)
# replace escaped quotes
if self.escQuote:
ret = ret.replace(self.escQuote, self.endQuoteChar)
return loc, ret
def __str__( self ):
try:
return super(QuotedString,self).__str__()
except:
pass
if self.strRepr is None:
self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar)
return self.strRepr
class CharsNotIn(Token):
"""Token for matching words composed of characters *not* in a given set.
Defined with string containing all disallowed characters, and an optional
minimum, maximum, and/or exact length. The default value for min is 1 (a
minimum value < 1 is not valid); the default values for max and exact
are 0, meaning no maximum or exact length restriction.
"""
def __init__( self, notChars, min=1, max=0, exact=0 ):
super(CharsNotIn,self).__init__()
self.skipWhitespace = False
self.notChars = notChars
if min < 1:
raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted")
self.minLen = min
if max > 0:
self.maxLen = max
else:
self.maxLen = _MAX_INT
if exact > 0:
self.maxLen = exact
self.minLen = exact
self.name = _ustr(self)
self.errmsg = "Expected " + self.name
self.mayReturnEmpty = ( self.minLen == 0 )
#self.myException.msg = self.errmsg
self.mayIndexError = False
def parseImpl( self, instring, loc, doActions=True ):
if instring[loc] in self.notChars:
#~ raise ParseException( instring, loc, self.errmsg )
exc = self.myException
exc.loc = loc
exc.pstr = instring
raise exc
start = loc
loc += 1
notchars = self.notChars
maxlen = min( start+self.maxLen, len(instring) )
while loc < maxlen and \
(instring[loc] not in notchars):
loc += 1
if loc - start < self.minLen:
#~ raise ParseException( instring, loc, self.errmsg )
exc = self.myException
exc.loc = loc
exc.pstr = instring
raise exc
return loc, instring[start:loc]
def __str__( self ):
try:
return super(CharsNotIn, self).__str__()
except:
pass
if self.strRepr is None:
if len(self.notChars) > 4:
self.strRepr = "!W:(%s...)" % self.notChars[:4]
else:
self.strRepr = "!W:(%s)" % self.notChars
return self.strRepr
class White(Token):
"""Special matching class for matching whitespace. Normally, whitespace is ignored
by pyparsing grammars. This class is included when some whitespace structures
are significant. Define with a string containing the whitespace characters to be
matched; default is " \\t\\n". Also takes optional min, max, and exact arguments,
as defined for the Word class."""
whiteStrs = {
" " : "<SPC>",
"\t": "<TAB>",
"\n": "<LF>",
"\r": "<CR>",
"\f": "<FF>",
}
def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0):
super(White,self).__init__()
self.matchWhite = ws
self.setWhitespaceChars( "".join([c for c in self.whiteChars if c not in self.matchWhite]) )
#~ self.leaveWhitespace()
self.name = ("".join([White.whiteStrs[c] for c in self.matchWhite]))
self.mayReturnEmpty = True
self.errmsg = "Expected " + self.name
#self.myException.msg = self.errmsg
self.minLen = min
if max > 0:
self.maxLen = max
else:
self.maxLen = _MAX_INT
if exact > 0:
self.maxLen = exact
self.minLen = exact
def parseImpl( self, instring, loc, doActions=True ):
if not(instring[ loc ] in self.matchWhite):
#~ raise ParseException( instring, loc, self.errmsg )
exc = self.myException
exc.loc = loc
exc.pstr = instring
raise exc
start = loc
loc += 1
maxloc = start + self.maxLen
maxloc = min( maxloc, len(instring) )
while loc < maxloc and instring[loc] in self.matchWhite:
loc += 1
if loc - start < self.minLen:
#~ raise ParseException( instring, loc, self.errmsg )
exc = self.myException
exc.loc = loc
exc.pstr = instring
raise exc
return loc, instring[start:loc]
class _PositionToken(Token):
def __init__( self ):
super(_PositionToken,self).__init__()
self.name=self.__class__.__name__
self.mayReturnEmpty = True
self.mayIndexError = False
class GoToColumn(_PositionToken):
"""Token to advance to a specific column of input text; useful for tabular report scraping."""
def __init__( self, colno ):
super(GoToColumn,self).__init__()
self.col = colno
def preParse( self, instring, loc ):
if col(loc,instring) != self.col:
instrlen = len(instring)
if self.ignoreExprs:
loc = self._skipIgnorables( instring, loc )
while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col :
loc += 1
return loc
def parseImpl( self, instring, loc, doActions=True ):
thiscol = col( loc, instring )
if thiscol > self.col:
raise ParseException( instring, loc, "Text not in expected column", self )
newloc = loc + self.col - thiscol
ret = instring[ loc: newloc ]
return newloc, ret
class LineStart(_PositionToken):
"""Matches if current position is at the beginning of a line within the parse string"""
def __init__( self ):
super(LineStart,self).__init__()
self.setWhitespaceChars( " \t" )
self.errmsg = "Expected start of line"
#self.myException.msg = self.errmsg
def preParse( self, instring, loc ):
preloc = super(LineStart,self).preParse(instring,loc)
if instring[preloc] == "\n":
loc += 1
return loc
def parseImpl( self, instring, loc, doActions=True ):
if not( loc==0 or
(loc == self.preParse( instring, 0 )) or
(instring[loc-1] == "\n") ): #col(loc, instring) != 1:
#~ raise ParseException( instring, loc, "Expected start of line" )
exc = self.myException
exc.loc = loc
exc.pstr = instring
raise exc
return loc, []
class LineEnd(_PositionToken):
"""Matches if current position is at the end of a line within the parse string"""
def __init__( self ):
super(LineEnd,self).__init__()
self.setWhitespaceChars( " \t" )
self.errmsg = "Expected end of line"
#self.myException.msg = self.errmsg
def parseImpl( self, instring, loc, doActions=True ):
if loc<len(instring):
if instring[loc] == "\n":
return loc+1, "\n"
else:
#~ raise ParseException( instring, loc, "Expected end of line" )
exc = self.myException
exc.loc = loc
exc.pstr = instring
raise exc
elif loc == len(instring):
return loc+1, []
else:
exc = self.myException
exc.loc = loc
exc.pstr = instring
raise exc
class StringStart(_PositionToken):
"""Matches if current position is at the beginning of the parse string"""
def __init__( self ):
super(StringStart,self).__init__()
self.errmsg = "Expected start of text"
#self.myException.msg = self.errmsg
def parseImpl( self, instring, loc, doActions=True ):
if loc != 0:
# see if entire string up to here is just whitespace and ignoreables
if loc != self.preParse( instring, 0 ):
#~ raise ParseException( instring, loc, "Expected start of text" )
exc = self.myException
exc.loc = loc
exc.pstr = instring
raise exc
return loc, []
class StringEnd(_PositionToken):
"""Matches if current position is at the end of the parse string"""
def __init__( self ):
super(StringEnd,self).__init__()
self.errmsg = "Expected end of text"
#self.myException.msg = self.errmsg
def parseImpl( self, instring, loc, doActions=True ):
if loc < len(instring):
#~ raise ParseException( instring, loc, "Expected end of text" )
exc = self.myException
exc.loc = loc
exc.pstr = instring
raise exc
elif loc == len(instring):
return loc+1, []
elif loc > len(instring):
return loc, []
else:
exc = self.myException
exc.loc = loc
exc.pstr = instring
raise exc
class WordStart(_PositionToken):
"""Matches if the current position is at the beginning of a Word, and
is not preceded by any character in a given set of wordChars
(default=printables). To emulate the \b behavior of regular expressions,
use WordStart(alphanums). WordStart will also match at the beginning of
the string being parsed, or at the beginning of a line.
"""
def __init__(self, wordChars = printables):
super(WordStart,self).__init__()
self.wordChars = _str2dict(wordChars)
self.errmsg = "Not at the start of a word"
def parseImpl(self, instring, loc, doActions=True ):
if loc != 0:
if (instring[loc-1] in self.wordChars or
instring[loc] not in self.wordChars):
exc = self.myException
exc.loc = loc
exc.pstr = instring
raise exc
return loc, []
class WordEnd(_PositionToken):
"""Matches if the current position is at the end of a Word, and
is not followed by any character in a given set of wordChars
(default=printables). To emulate the \b behavior of regular expressions,
use WordEnd(alphanums). WordEnd will also match at the end of
the string being parsed, or at the end of a line.
"""
def __init__(self, wordChars = printables):
super(WordEnd,self).__init__()
self.wordChars = _str2dict(wordChars)
self.skipWhitespace = False
self.errmsg = "Not at the end of a word"
def parseImpl(self, instring, loc, doActions=True ):
instrlen = len(instring)
if instrlen>0 and loc<instrlen:
if (instring[loc] in self.wordChars or
instring[loc-1] not in self.wordChars):
#~ raise ParseException( instring, loc, "Expected end of word" )
exc = self.myException
exc.loc = loc
exc.pstr = instring
raise exc
return loc, []
class ParseExpression(ParserElement):
"""Abstract subclass of ParserElement, for combining and post-processing parsed tokens."""
def __init__( self, exprs, savelist = False ):
super(ParseExpression,self).__init__(savelist)
if isinstance( exprs, list ):
self.exprs = exprs
elif isinstance( exprs, basestring ):
self.exprs = [ Literal( exprs ) ]
else:
self.exprs = [ exprs ]
self.callPreparse = False
def __getitem__( self, i ):
return self.exprs[i]
def append( self, other ):
self.exprs.append( other )
self.strRepr = None
return self
def leaveWhitespace( self ):
"""Extends leaveWhitespace defined in base class, and also invokes leaveWhitespace on
all contained expressions."""
self.skipWhitespace = False
self.exprs = [ e.copy() for e in self.exprs ]
for e in self.exprs:
e.leaveWhitespace()
return self
def ignore( self, other ):
if isinstance( other, Suppress ):
if other not in self.ignoreExprs:
super( ParseExpression, self).ignore( other )
for e in self.exprs:
e.ignore( self.ignoreExprs[-1] )
else:
super( ParseExpression, self).ignore( other )
for e in self.exprs:
e.ignore( self.ignoreExprs[-1] )
return self
def __str__( self ):
try:
return super(ParseExpression,self).__str__()
except:
pass
if self.strRepr is None:
self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.exprs) )
return self.strRepr
def streamline( self ):
super(ParseExpression,self).streamline()
for e in self.exprs:
e.streamline()
# collapse nested And's of the form And( And( And( a,b), c), d) to And( a,b,c,d )
# but only if there are no parse actions or resultsNames on the nested And's
# (likewise for Or's and MatchFirst's)
if ( len(self.exprs) == 2 ):
other = self.exprs[0]
if ( isinstance( other, self.__class__ ) and
not(other.parseAction) and
other.resultsName is None and
not other.debug ):
self.exprs = other.exprs[:] + [ self.exprs[1] ]
self.strRepr = None
self.mayReturnEmpty |= other.mayReturnEmpty
self.mayIndexError |= other.mayIndexError
other = self.exprs[-1]
if ( isinstance( other, self.__class__ ) and
not(other.parseAction) and
other.resultsName is None and
not other.debug ):
self.exprs = self.exprs[:-1] + other.exprs[:]
self.strRepr = None
self.mayReturnEmpty |= other.mayReturnEmpty
self.mayIndexError |= other.mayIndexError
return self
def setResultsName( self, name, listAllMatches=False ):
ret = super(ParseExpression,self).setResultsName(name,listAllMatches)
return ret
def validate( self, validateTrace=[] ):
tmp = validateTrace[:]+[self]
for e in self.exprs:
e.validate(tmp)
self.checkRecursion( [] )
class And(ParseExpression):
"""Requires all given ParseExpressions to be found in the given order.
Expressions may be separated by whitespace.
May be constructed using the '+' operator.
"""
class _ErrorStop(Empty):
def __new__(cls,*args,**kwargs):
return And._ErrorStop.instance
_ErrorStop.instance = Empty()
_ErrorStop.instance.leaveWhitespace()
def __init__( self, exprs, savelist = True ):
super(And,self).__init__(exprs, savelist)
self.mayReturnEmpty = True
for e in self.exprs:
if not e.mayReturnEmpty:
self.mayReturnEmpty = False
break
self.setWhitespaceChars( exprs[0].whiteChars )
self.skipWhitespace = exprs[0].skipWhitespace
self.callPreparse = True
def parseImpl( self, instring, loc, doActions=True ):
# pass False as last arg to _parse for first element, since we already
# pre-parsed the string as part of our And pre-parsing
loc, resultlist = self.exprs[0]._parse( instring, loc, doActions, callPreParse=False )
errorStop = False
for e in self.exprs[1:]:
if e is And._ErrorStop.instance:
errorStop = True
continue
if errorStop:
try:
loc, exprtokens = e._parse( instring, loc, doActions )
except ParseBaseException, pe:
raise ParseSyntaxException(pe)
except IndexError, ie:
raise ParseSyntaxException( ParseException(instring, len(instring), self.errmsg, self) )
else:
loc, exprtokens = e._parse( instring, loc, doActions )
if exprtokens or exprtokens.keys():
resultlist += exprtokens
return loc, resultlist
def __iadd__(self, other ):
if isinstance( other, basestring ):
other = Literal( other )
return self.append( other ) #And( [ self, other ] )
def checkRecursion( self, parseElementList ):
subRecCheckList = parseElementList[:] + [ self ]
for e in self.exprs:
e.checkRecursion( subRecCheckList )
if not e.mayReturnEmpty:
break
def __str__( self ):
if hasattr(self,"name"):
return self.name
if self.strRepr is None:
self.strRepr = "{" + " ".join( [ _ustr(e) for e in self.exprs ] ) + "}"
return self.strRepr
class Or(ParseExpression):
"""Requires that at least one ParseExpression is found.
If two expressions match, the expression that matches the longest string will be used.
May be constructed using the '^' operator.
"""
def __init__( self, exprs, savelist = False ):
super(Or,self).__init__(exprs, savelist)
self.mayReturnEmpty = False
for e in self.exprs:
if e.mayReturnEmpty:
self.mayReturnEmpty = True
break
def parseImpl( self, instring, loc, doActions=True ):
maxExcLoc = -1
maxMatchLoc = -1
maxException = None
for e in self.exprs:
try:
loc2 = e.tryParse( instring, loc )
except ParseException, err:
if err.loc > maxExcLoc:
maxException = err
maxExcLoc = err.loc
except IndexError:
if len(instring) > maxExcLoc:
maxException = ParseException(instring,len(instring),e.errmsg,self)
maxExcLoc = len(instring)
else:
if loc2 > maxMatchLoc:
maxMatchLoc = loc2
maxMatchExp = e
if maxMatchLoc < 0:
if maxException is not None:
raise maxException
else:
raise ParseException(instring, loc, "no defined alternatives to match", self)
return maxMatchExp._parse( instring, loc, doActions )
def __ixor__(self, other ):
if isinstance( other, basestring ):
other = Literal( other )
return self.append( other ) #Or( [ self, other ] )
def __str__( self ):
if hasattr(self,"name"):
return self.name
if self.strRepr is None:
self.strRepr = "{" + " ^ ".join( [ _ustr(e) for e in self.exprs ] ) + "}"
return self.strRepr
def checkRecursion( self, parseElementList ):
subRecCheckList = parseElementList[:] + [ self ]
for e in self.exprs:
e.checkRecursion( subRecCheckList )
class MatchFirst(ParseExpression):
"""Requires that at least one ParseExpression is found.
If two expressions match, the first one listed is the one that will match.
May be constructed using the '|' operator.
"""
def __init__( self, exprs, savelist = False ):
super(MatchFirst,self).__init__(exprs, savelist)
if exprs:
self.mayReturnEmpty = False
for e in self.exprs:
if e.mayReturnEmpty:
self.mayReturnEmpty = True
break
else:
self.mayReturnEmpty = True
def parseImpl( self, instring, loc, doActions=True ):
maxExcLoc = -1
maxException = None
for e in self.exprs:
try:
ret = e._parse( instring, loc, doActions )
return ret
except ParseException, err:
if err.loc > maxExcLoc:
maxException = err
maxExcLoc = err.loc
except IndexError:
if len(instring) > maxExcLoc:
maxException = ParseException(instring,len(instring),e.errmsg,self)
maxExcLoc = len(instring)
# only got here if no expression matched, raise exception for match that made it the furthest
else:
if maxException is not None:
raise maxException
else:
raise ParseException(instring, loc, "no defined alternatives to match", self)
def __ior__(self, other ):
if isinstance( other, basestring ):
other = Literal( other )
return self.append( other ) #MatchFirst( [ self, other ] )
def __str__( self ):
if hasattr(self,"name"):
return self.name
if self.strRepr is None:
self.strRepr = "{" + " | ".join( [ _ustr(e) for e in self.exprs ] ) + "}"
return self.strRepr
def checkRecursion( self, parseElementList ):
subRecCheckList = parseElementList[:] + [ self ]
for e in self.exprs:
e.checkRecursion( subRecCheckList )
class Each(ParseExpression):
"""Requires all given ParseExpressions to be found, but in any order.
Expressions may be separated by whitespace.
May be constructed using the '&' operator.
"""
def __init__( self, exprs, savelist = True ):
super(Each,self).__init__(exprs, savelist)
self.mayReturnEmpty = True
for e in self.exprs:
if not e.mayReturnEmpty:
self.mayReturnEmpty = False
break
self.skipWhitespace = True
self.initExprGroups = True
def parseImpl( self, instring, loc, doActions=True ):
if self.initExprGroups:
self.optionals = [ e.expr for e in self.exprs if isinstance(e,Optional) ]
self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ]
self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ]
self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ]
self.required += self.multirequired
self.initExprGroups = False
tmpLoc = loc
tmpReqd = self.required[:]
tmpOpt = self.optionals[:]
matchOrder = []
keepMatching = True
while keepMatching:
tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired
failed = []
for e in tmpExprs:
try:
tmpLoc = e.tryParse( instring, tmpLoc )
except ParseException:
failed.append(e)
else:
matchOrder.append(e)
if e in tmpReqd:
tmpReqd.remove(e)
elif e in tmpOpt:
tmpOpt.remove(e)
if len(failed) == len(tmpExprs):
keepMatching = False
if tmpReqd:
missing = ", ".join( [ _ustr(e) for e in tmpReqd ] )
raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing )
# add any unmatched Optionals, in case they have default values defined
matchOrder += list(e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt)
resultlist = []
for e in matchOrder:
loc,results = e._parse(instring,loc,doActions)
resultlist.append(results)
finalResults = ParseResults([])
for r in resultlist:
dups = {}
for k in r.keys():
if k in finalResults.keys():
tmp = ParseResults(finalResults[k])
tmp += ParseResults(r[k])
dups[k] = tmp
finalResults += ParseResults(r)
for k,v in dups.items():
finalResults[k] = v
return loc, finalResults
def __str__( self ):
if hasattr(self,"name"):
return self.name
if self.strRepr is None:
self.strRepr = "{" + " & ".join( [ _ustr(e) for e in self.exprs ] ) + "}"
return self.strRepr
def checkRecursion( self, parseElementList ):
subRecCheckList = parseElementList[:] + [ self ]
for e in self.exprs:
e.checkRecursion( subRecCheckList )
class ParseElementEnhance(ParserElement):
"""Abstract subclass of ParserElement, for combining and post-processing parsed tokens."""
def __init__( self, expr, savelist=False ):
super(ParseElementEnhance,self).__init__(savelist)
if isinstance( expr, basestring ):
expr = Literal(expr)
self.expr = expr
self.strRepr = None
if expr is not None:
self.mayIndexError = expr.mayIndexError
self.mayReturnEmpty = expr.mayReturnEmpty
self.setWhitespaceChars( expr.whiteChars )
self.skipWhitespace = expr.skipWhitespace
self.saveAsList = expr.saveAsList
self.callPreparse = expr.callPreparse
self.ignoreExprs.extend(expr.ignoreExprs)
def parseImpl( self, instring, loc, doActions=True ):
if self.expr is not None:
return self.expr._parse( instring, loc, doActions, callPreParse=False )
else:
raise ParseException("",loc,self.errmsg,self)
def leaveWhitespace( self ):
self.skipWhitespace = False
self.expr = self.expr.copy()
if self.expr is not None:
self.expr.leaveWhitespace()
return self
def ignore( self, other ):
if isinstance( other, Suppress ):
if other not in self.ignoreExprs:
super( ParseElementEnhance, self).ignore( other )
if self.expr is not None:
self.expr.ignore( self.ignoreExprs[-1] )
else:
super( ParseElementEnhance, self).ignore( other )
if self.expr is not None:
self.expr.ignore( self.ignoreExprs[-1] )
return self
def streamline( self ):
super(ParseElementEnhance,self).streamline()
if self.expr is not None:
self.expr.streamline()
return self
def checkRecursion( self, parseElementList ):
if self in parseElementList:
raise RecursiveGrammarException( parseElementList+[self] )
subRecCheckList = parseElementList[:] + [ self ]
if self.expr is not None:
self.expr.checkRecursion( subRecCheckList )
def validate( self, validateTrace=[] ):
tmp = validateTrace[:]+[self]
if self.expr is not None:
self.expr.validate(tmp)
self.checkRecursion( [] )
def __str__( self ):
try:
return super(ParseElementEnhance,self).__str__()
except:
pass
if self.strRepr is None and self.expr is not None:
self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) )
return self.strRepr
class FollowedBy(ParseElementEnhance):
"""Lookahead matching of the given parse expression. FollowedBy
does *not* advance the parsing position within the input string, it only
verifies that the specified parse expression matches at the current
position. FollowedBy always returns a null token list."""
def __init__( self, expr ):
super(FollowedBy,self).__init__(expr)
self.mayReturnEmpty = True
def parseImpl( self, instring, loc, doActions=True ):
self.expr.tryParse( instring, loc )
return loc, []
class NotAny(ParseElementEnhance):
"""Lookahead to disallow matching with the given parse expression. NotAny
does *not* advance the parsing position within the input string, it only
verifies that the specified parse expression does *not* match at the current
position. Also, NotAny does *not* skip over leading whitespace. NotAny
always returns a null token list. May be constructed using the '~' operator."""
def __init__( self, expr ):
super(NotAny,self).__init__(expr)
#~ self.leaveWhitespace()
self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs
self.mayReturnEmpty = True
self.errmsg = "Found unwanted token, "+_ustr(self.expr)
#self.myException = ParseException("",0,self.errmsg,self)
def parseImpl( self, instring, loc, doActions=True ):
try:
self.expr.tryParse( instring, loc )
except (ParseException,IndexError):
pass
else:
#~ raise ParseException(instring, loc, self.errmsg )
exc = self.myException
exc.loc = loc
exc.pstr = instring
raise exc
return loc, []
def __str__( self ):
if hasattr(self,"name"):
return self.name
if self.strRepr is None:
self.strRepr = "~{" + _ustr(self.expr) + "}"
return self.strRepr
class ZeroOrMore(ParseElementEnhance):
"""Optional repetition of zero or more of the given expression."""
def __init__( self, expr ):
super(ZeroOrMore,self).__init__(expr)
self.mayReturnEmpty = True
def parseImpl( self, instring, loc, doActions=True ):
tokens = []
try:
loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False )
hasIgnoreExprs = ( len(self.ignoreExprs) > 0 )
while 1:
if hasIgnoreExprs:
preloc = self._skipIgnorables( instring, loc )
else:
preloc = loc
loc, tmptokens = self.expr._parse( instring, preloc, doActions )
if tmptokens or tmptokens.keys():
tokens += tmptokens
except (ParseException,IndexError):
pass
return loc, tokens
def __str__( self ):
if hasattr(self,"name"):
return self.name
if self.strRepr is None:
self.strRepr = "[" + _ustr(self.expr) + "]..."
return self.strRepr
def setResultsName( self, name, listAllMatches=False ):
ret = super(ZeroOrMore,self).setResultsName(name,listAllMatches)
ret.saveAsList = True
return ret
class OneOrMore(ParseElementEnhance):
"""Repetition of one or more of the given expression."""
def parseImpl( self, instring, loc, doActions=True ):
# must be at least one
loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False )
try:
hasIgnoreExprs = ( len(self.ignoreExprs) > 0 )
while 1:
if hasIgnoreExprs:
preloc = self._skipIgnorables( instring, loc )
else:
preloc = loc
loc, tmptokens = self.expr._parse( instring, preloc, doActions )
if tmptokens or tmptokens.keys():
tokens += tmptokens
except (ParseException,IndexError):
pass
return loc, tokens
def __str__( self ):
if hasattr(self,"name"):
return self.name
if self.strRepr is None:
self.strRepr = "{" + _ustr(self.expr) + "}..."
return self.strRepr
def setResultsName( self, name, listAllMatches=False ):
ret = super(OneOrMore,self).setResultsName(name,listAllMatches)
ret.saveAsList = True
return ret
class _NullToken(object):
def __bool__(self):
return False
__nonzero__ = __bool__
def __str__(self):
return ""
_optionalNotMatched = _NullToken()
class Optional(ParseElementEnhance):
"""Optional matching of the given expression.
A default return string can also be specified, if the optional expression
is not found.
"""
def __init__( self, exprs, default=_optionalNotMatched ):
super(Optional,self).__init__( exprs, savelist=False )
self.defaultValue = default
self.mayReturnEmpty = True
def parseImpl( self, instring, loc, doActions=True ):
try:
loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False )
except (ParseException,IndexError):
if self.defaultValue is not _optionalNotMatched:
if self.expr.resultsName:
tokens = ParseResults([ self.defaultValue ])
tokens[self.expr.resultsName] = self.defaultValue
else:
tokens = [ self.defaultValue ]
else:
tokens = []
return loc, tokens
def __str__( self ):
if hasattr(self,"name"):
return self.name
if self.strRepr is None:
self.strRepr = "[" + _ustr(self.expr) + "]"
return self.strRepr
class SkipTo(ParseElementEnhance):
"""Token for skipping over all undefined text until the matched expression is found.
If include is set to true, the matched expression is also consumed. The ignore
argument is used to define grammars (typically quoted strings and comments) that
might contain false matches.
"""
def __init__( self, other, include=False, ignore=None ):
super( SkipTo, self ).__init__( other )
if ignore is not None:
self.expr = self.expr.copy()
self.expr.ignore(ignore)
self.mayReturnEmpty = True
self.mayIndexError = False
self.includeMatch = include
self.asList = False
self.errmsg = "No match found for "+_ustr(self.expr)
#self.myException = ParseException("",0,self.errmsg,self)
def parseImpl( self, instring, loc, doActions=True ):
startLoc = loc
instrlen = len(instring)
expr = self.expr
while loc <= instrlen:
try:
loc = expr._skipIgnorables( instring, loc )
expr._parse( instring, loc, doActions=False, callPreParse=False )
if self.includeMatch:
skipText = instring[startLoc:loc]
loc,mat = expr._parse(instring,loc,doActions,callPreParse=False)
if mat:
skipRes = ParseResults( skipText )
skipRes += mat
return loc, [ skipRes ]
else:
return loc, [ skipText ]
else:
return loc, [ instring[startLoc:loc] ]
except (ParseException,IndexError):
loc += 1
exc = self.myException
exc.loc = loc
exc.pstr = instring
raise exc
class Forward(ParseElementEnhance):
"""Forward declaration of an expression to be defined later -
used for recursive grammars, such as algebraic infix notation.
When the expression is known, it is assigned to the Forward variable using the '<<' operator.
Note: take care when assigning to Forward not to overlook precedence of operators.
Specifically, '|' has a lower precedence than '<<', so that::
fwdExpr << a | b | c
will actually be evaluated as::
(fwdExpr << a) | b | c
thereby leaving b and c out as parseable alternatives. It is recommended that you
explicitly group the values inserted into the Forward::
fwdExpr << (a | b | c)
"""
def __init__( self, other=None ):
super(Forward,self).__init__( other, savelist=False )
def __lshift__( self, other ):
if isinstance( other, basestring ):
other = Literal(other)
self.expr = other
self.mayReturnEmpty = other.mayReturnEmpty
self.strRepr = None
self.mayIndexError = self.expr.mayIndexError
self.mayReturnEmpty = self.expr.mayReturnEmpty
self.setWhitespaceChars( self.expr.whiteChars )
self.skipWhitespace = self.expr.skipWhitespace
self.saveAsList = self.expr.saveAsList
self.ignoreExprs.extend(self.expr.ignoreExprs)
return None
def leaveWhitespace( self ):
self.skipWhitespace = False
return self
def streamline( self ):
if not self.streamlined:
self.streamlined = True
if self.expr is not None:
self.expr.streamline()
return self
def validate( self, validateTrace=[] ):
if self not in validateTrace:
tmp = validateTrace[:]+[self]
if self.expr is not None:
self.expr.validate(tmp)
self.checkRecursion([])
def __str__( self ):
if hasattr(self,"name"):
return self.name
self.__class__ = _ForwardNoRecurse
try:
if self.expr is not None:
retString = _ustr(self.expr)
else:
retString = "None"
finally:
self.__class__ = Forward
return "Forward: "+retString
def copy(self):
if self.expr is not None:
return super(Forward,self).copy()
else:
ret = Forward()
ret << self
return ret
class _ForwardNoRecurse(Forward):
def __str__( self ):
return "..."
class TokenConverter(ParseElementEnhance):
"""Abstract subclass of ParseExpression, for converting parsed results."""
def __init__( self, expr, savelist=False ):
super(TokenConverter,self).__init__( expr )#, savelist )
self.saveAsList = False
class Upcase(TokenConverter):
"""Converter to upper case all matching tokens."""
def __init__(self, *args):
super(Upcase,self).__init__(*args)
warnings.warn("Upcase class is deprecated, use upcaseTokens parse action instead",
DeprecationWarning,stacklevel=2)
def postParse( self, instring, loc, tokenlist ):
return list(map( string.upper, tokenlist ))
class Combine(TokenConverter):
"""Converter to concatenate all matching tokens to a single string.
By default, the matching patterns must also be contiguous in the input string;
this can be disabled by specifying 'adjacent=False' in the constructor.
"""
def __init__( self, expr, joinString="", adjacent=True ):
super(Combine,self).__init__( expr )
# suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself
if adjacent:
self.leaveWhitespace()
self.adjacent = adjacent
self.skipWhitespace = True
self.joinString = joinString
def ignore( self, other ):
if self.adjacent:
ParserElement.ignore(self, other)
else:
super( Combine, self).ignore( other )
return self
def postParse( self, instring, loc, tokenlist ):
retToks = tokenlist.copy()
del retToks[:]
retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults)
if self.resultsName and len(retToks.keys())>0:
return [ retToks ]
else:
return retToks
class Group(TokenConverter):
"""Converter to return the matched tokens as a list - useful for returning tokens of ZeroOrMore and OneOrMore expressions."""
def __init__( self, expr ):
super(Group,self).__init__( expr )
self.saveAsList = True
def postParse( self, instring, loc, tokenlist ):
return [ tokenlist ]
class Dict(TokenConverter):
"""Converter to return a repetitive expression as a list, but also as a dictionary.
Each element can also be referenced using the first token in the expression as its key.
Useful for tabular report scraping when the first column can be used as a item key.
"""
def __init__( self, exprs ):
super(Dict,self).__init__( exprs )
self.saveAsList = True
def postParse( self, instring, loc, tokenlist ):
for i,tok in enumerate(tokenlist):
if len(tok) == 0:
continue
ikey = tok[0]
if isinstance(ikey,int):
ikey = _ustr(tok[0]).strip()
if len(tok)==1:
tokenlist[ikey] = _ParseResultsWithOffset("",i)
elif len(tok)==2 and not isinstance(tok[1],ParseResults):
tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i)
else:
dictvalue = tok.copy() #ParseResults(i)
del dictvalue[0]
if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.keys()):
tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i)
else:
tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i)
if self.resultsName:
return [ tokenlist ]
else:
return tokenlist
class Suppress(TokenConverter):
"""Converter for ignoring the results of a parsed expression."""
def postParse( self, instring, loc, tokenlist ):
return []
def suppress( self ):
return self
class OnlyOnce(object):
"""Wrapper for parse actions, to ensure they are only called once."""
def __init__(self, methodCall):
self.callable = ParserElement._normalizeParseActionArgs(methodCall)
self.called = False
def __call__(self,s,l,t):
if not self.called:
results = self.callable(s,l,t)
self.called = True
return results
raise ParseException(s,l,"")
def reset(self):
self.called = False
def traceParseAction(f):
"""Decorator for debugging parse actions."""
f = ParserElement._normalizeParseActionArgs(f)
def z(*paArgs):
thisFunc = f.func_name
s,l,t = paArgs[-3:]
if len(paArgs)>3:
thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc
sys.stderr.write( ">>entering %s(line: '%s', %d, %s)\n" % (thisFunc,line(l,s),l,t) )
try:
ret = f(*paArgs)
except Exception, exc:
sys.stderr.write( "<<leaving %s (exception: %s)\n" % (thisFunc,exc) )
raise
sys.stderr.write( "<<leaving %s (ret: %s)\n" % (thisFunc,ret) )
return ret
try:
z.__name__ = f.__name__
except AttributeError:
pass
return z
#
# global helpers
#
def delimitedList( expr, delim=",", combine=False ):
"""Helper to define a delimited list of expressions - the delimiter defaults to ','.
By default, the list elements and delimiters can have intervening whitespace, and
comments, but this can be overridden by passing 'combine=True' in the constructor.
If combine is set to True, the matching tokens are returned as a single token
string, with the delimiters included; otherwise, the matching tokens are returned
as a list of tokens, with the delimiters suppressed.
"""
dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..."
if combine:
return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName)
else:
return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName)
def countedArray( expr ):
"""Helper to define a counted list of expressions.
This helper defines a pattern of the form::
integer expr expr expr...
where the leading integer tells how many expr expressions follow.
The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed.
"""
arrayExpr = Forward()
def countFieldParseAction(s,l,t):
n = int(t[0])
arrayExpr << (n and Group(And([expr]*n)) or Group(empty))
return []
return ( Word(nums).setName("arrayLen").setParseAction(countFieldParseAction, callDuringTry=True) + arrayExpr )
def _flatten(L):
if type(L) is not list: return [L]
if L == []: return L
return _flatten(L[0]) + _flatten(L[1:])
def matchPreviousLiteral(expr):
"""Helper to define an expression that is indirectly defined from
the tokens matched in a previous expression, that is, it looks
for a 'repeat' of a previous expression. For example::
first = Word(nums)
second = matchPreviousLiteral(first)
matchExpr = first + ":" + second
will match "1:1", but not "1:2". Because this matches a
previous literal, will also match the leading "1:1" in "1:10".
If this is not desired, use matchPreviousExpr.
Do *not* use with packrat parsing enabled.
"""
rep = Forward()
def copyTokenToRepeater(s,l,t):
if t:
if len(t) == 1:
rep << t[0]
else:
# flatten t tokens
tflat = _flatten(t.asList())
rep << And( [ Literal(tt) for tt in tflat ] )
else:
rep << Empty()
expr.addParseAction(copyTokenToRepeater, callDuringTry=True)
return rep
def matchPreviousExpr(expr):
"""Helper to define an expression that is indirectly defined from
the tokens matched in a previous expression, that is, it looks
for a 'repeat' of a previous expression. For example::
first = Word(nums)
second = matchPreviousExpr(first)
matchExpr = first + ":" + second
will match "1:1", but not "1:2". Because this matches by
expressions, will *not* match the leading "1:1" in "1:10";
the expressions are evaluated first, and then compared, so
"1" is compared with "10".
Do *not* use with packrat parsing enabled.
"""
rep = Forward()
e2 = expr.copy()
rep << e2
def copyTokenToRepeater(s,l,t):
matchTokens = _flatten(t.asList())
def mustMatchTheseTokens(s,l,t):
theseTokens = _flatten(t.asList())
if theseTokens != matchTokens:
raise ParseException("",0,"")
rep.setParseAction( mustMatchTheseTokens, callDuringTry=True )
expr.addParseAction(copyTokenToRepeater, callDuringTry=True)
return rep
def _escapeRegexRangeChars(s):
#~ escape these chars: ^-]
for c in r"\^-]":
s = s.replace(c,"\\"+c)
s = s.replace("\n",r"\n")
s = s.replace("\t",r"\t")
return _ustr(s)
def oneOf( strs, caseless=False, useRegex=True ):
"""Helper to quickly define a set of alternative Literals, and makes sure to do
longest-first testing when there is a conflict, regardless of the input order,
but returns a MatchFirst for best performance.
Parameters:
- strs - a string of space-delimited literals, or a list of string literals
- caseless - (default=False) - treat all literals as caseless
- useRegex - (default=True) - as an optimization, will generate a Regex
object; otherwise, will generate a MatchFirst object (if caseless=True, or
if creating a Regex raises an exception)
"""
if caseless:
isequal = ( lambda a,b: a.upper() == b.upper() )
masks = ( lambda a,b: b.upper().startswith(a.upper()) )
parseElementClass = CaselessLiteral
else:
isequal = ( lambda a,b: a == b )
masks = ( lambda a,b: b.startswith(a) )
parseElementClass = Literal
if isinstance(strs,(list,tuple)):
symbols = strs[:]
elif isinstance(strs,basestring):
symbols = strs.split()
else:
warnings.warn("Invalid argument to oneOf, expected string or list",
SyntaxWarning, stacklevel=2)
i = 0
while i < len(symbols)-1:
cur = symbols[i]
for j,other in enumerate(symbols[i+1:]):
if ( isequal(other, cur) ):
del symbols[i+j+1]
break
elif ( masks(cur, other) ):
del symbols[i+j+1]
symbols.insert(i,other)
cur = other
break
else:
i += 1
if not caseless and useRegex:
#~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] ))
try:
if len(symbols)==len("".join(symbols)):
return Regex( "[%s]" % "".join( [ _escapeRegexRangeChars(sym) for sym in symbols] ) )
else:
return Regex( "|".join( [ re.escape(sym) for sym in symbols] ) )
except:
warnings.warn("Exception creating Regex for oneOf, building MatchFirst",
SyntaxWarning, stacklevel=2)
# last resort, just use MatchFirst
return MatchFirst( [ parseElementClass(sym) for sym in symbols ] )
def dictOf( key, value ):
"""Helper to easily and clearly define a dictionary by specifying the respective patterns
for the key and value. Takes care of defining the Dict, ZeroOrMore, and Group tokens
in the proper order. The key pattern can include delimiting markers or punctuation,
as long as they are suppressed, thereby leaving the significant key text. The value
pattern can include named results, so that the Dict results can include named token
fields.
"""
return Dict( ZeroOrMore( Group ( key + value ) ) )
# convenience constants for positional expressions
empty = Empty().setName("empty")
lineStart = LineStart().setName("lineStart")
lineEnd = LineEnd().setName("lineEnd")
stringStart = StringStart().setName("stringStart")
stringEnd = StringEnd().setName("stringEnd")
_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1])
_printables_less_backslash = "".join([ c for c in printables if c not in r"\]" ])
_escapedHexChar = Combine( Suppress(_bslash + "0x") + Word(hexnums) ).setParseAction(lambda s,l,t:unichr(int(t[0],16)))
_escapedOctChar = Combine( Suppress(_bslash) + Word("0","01234567") ).setParseAction(lambda s,l,t:unichr(int(t[0],8)))
_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | Word(_printables_less_backslash,exact=1)
_charRange = Group(_singleChar + Suppress("-") + _singleChar)
_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]"
_expanded = lambda p: (isinstance(p,ParseResults) and ''.join([ unichr(c) for c in range(ord(p[0]),ord(p[1])+1) ]) or p)
def srange(s):
r"""Helper to easily define string ranges for use in Word construction. Borrows
syntax from regexp '[]' string range definitions::
srange("[0-9]") -> "0123456789"
srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz"
srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_"
The input string must be enclosed in []'s, and the returned string is the expanded
character set joined into a single string.
The values enclosed in the []'s may be::
a single character
an escaped character with a leading backslash (such as \- or \])
an escaped hex character with a leading '\0x' (\0x21, which is a '!' character)
an escaped octal character with a leading '\0' (\041, which is a '!' character)
a range of any of the above, separated by a dash ('a-z', etc.)
any combination of the above ('aeiouy', 'a-zA-Z0-9_$', etc.)
"""
try:
return "".join([_expanded(part) for part in _reBracketExpr.parseString(s).body])
except:
return ""
def matchOnlyAtCol(n):
"""Helper method for defining parse actions that require matching at a specific
column in the input text.
"""
def verifyCol(strg,locn,toks):
if col(locn,strg) != n:
raise ParseException(strg,locn,"matched token not at column %d" % n)
return verifyCol
def replaceWith(replStr):
"""Helper method for common parse actions that simply return a literal value. Especially
useful when used with transformString().
"""
def _replFunc(*args):
return [replStr]
return _replFunc
def removeQuotes(s,l,t):
"""Helper parse action for removing quotation marks from parsed quoted strings.
To use, add this parse action to quoted string using::
quotedString.setParseAction( removeQuotes )
"""
return t[0][1:-1]
def upcaseTokens(s,l,t):
"""Helper parse action to convert tokens to upper case."""
return [ tt.upper() for tt in map(_ustr,t) ]
def downcaseTokens(s,l,t):
"""Helper parse action to convert tokens to lower case."""
return [ tt.lower() for tt in map(_ustr,t) ]
def keepOriginalText(s,startLoc,t):
"""Helper parse action to preserve original parsed text,
overriding any nested parse actions."""
try:
endloc = getTokensEndLoc()
except ParseException:
raise ParseFatalException("incorrect usage of keepOriginalText - may only be called as a parse action")
del t[:]
t += ParseResults(s[startLoc:endloc])
return t
def getTokensEndLoc():
"""Method to be called from within a parse action to determine the end
location of the parsed tokens."""
import inspect
fstack = inspect.stack()
try:
# search up the stack (through intervening argument normalizers) for correct calling routine
for f in fstack[2:]:
if f[3] == "_parseNoCache":
endloc = f[0].f_locals["loc"]
return endloc
else:
raise ParseFatalException("incorrect usage of getTokensEndLoc - may only be called from within a parse action")
finally:
del fstack
def _makeTags(tagStr, xml):
"""Internal helper to construct opening and closing tag expressions, given a tag name"""
if isinstance(tagStr,basestring):
resname = tagStr
tagStr = Keyword(tagStr, caseless=not xml)
else:
resname = tagStr.name
tagAttrName = Word(alphas,alphanums+"_-:")
if (xml):
tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes )
openTag = Suppress("<") + tagStr + \
Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \
Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">")
else:
printablesLessRAbrack = "".join( [ c for c in printables if c not in ">" ] )
tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack)
openTag = Suppress("<") + tagStr + \
Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \
Optional( Suppress("=") + tagAttrValue ) ))) + \
Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">")
closeTag = Combine(_L("</") + tagStr + ">")
openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % tagStr)
closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("</%s>" % tagStr)
return openTag, closeTag
def makeHTMLTags(tagStr):
"""Helper to construct opening and closing tag expressions for HTML, given a tag name"""
return _makeTags( tagStr, False )
def makeXMLTags(tagStr):
"""Helper to construct opening and closing tag expressions for XML, given a tag name"""
return _makeTags( tagStr, True )
def withAttribute(*args,**attrDict):
"""Helper to create a validating parse action to be used with start tags created
with makeXMLTags or makeHTMLTags. Use withAttribute to qualify a starting tag
with a required attribute value, to avoid false matches on common tags such as
<TD> or <DIV>.
Call withAttribute with a series of attribute names and values. Specify the list
of filter attributes names and values as:
- keyword arguments, as in (class="Customer",align="right"), or
- a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") )
For attribute names with a namespace prefix, you must use the second form. Attribute
names are matched insensitive to upper/lower case.
To verify that the attribute exists, but without specifying a value, pass
withAttribute.ANY_VALUE as the value.
"""
if args:
attrs = args[:]
else:
attrs = attrDict.items()
attrs = [(k,v) for k,v in attrs]
def pa(s,l,tokens):
for attrName,attrValue in attrs:
if attrName not in tokens:
raise ParseException(s,l,"no matching attribute " + attrName)
if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue:
raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" %
(attrName, tokens[attrName], attrValue))
return pa
withAttribute.ANY_VALUE = object()
opAssoc = _Constants()
opAssoc.LEFT = object()
opAssoc.RIGHT = object()
def operatorPrecedence( baseExpr, opList ):
"""Helper method for constructing grammars of expressions made up of
operators working in a precedence hierarchy. Operators may be unary or
binary, left- or right-associative. Parse actions can also be attached
to operator expressions.
Parameters:
- baseExpr - expression representing the most basic element for the nested
- opList - list of tuples, one for each operator precedence level in the
expression grammar; each tuple is of the form
(opExpr, numTerms, rightLeftAssoc, parseAction), where:
- opExpr is the pyparsing expression for the operator;
may also be a string, which will be converted to a Literal;
if numTerms is 3, opExpr is a tuple of two expressions, for the
two operators separating the 3 terms
- numTerms is the number of terms for this operator (must
be 1, 2, or 3)
- rightLeftAssoc is the indicator whether the operator is
right or left associative, using the pyparsing-defined
constants opAssoc.RIGHT and opAssoc.LEFT.
- parseAction is the parse action to be associated with
expressions matching this operator expression (the
parse action tuple member may be omitted)
"""
ret = Forward()
lastExpr = baseExpr | ( Suppress('(') + ret + Suppress(')') )
for i,operDef in enumerate(opList):
opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4]
if arity == 3:
if opExpr is None or len(opExpr) != 2:
raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions")
opExpr1, opExpr2 = opExpr
thisExpr = Forward()#.setName("expr%d" % i)
if rightLeftAssoc == opAssoc.LEFT:
if arity == 1:
matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) )
elif arity == 2:
if opExpr is not None:
matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) )
else:
matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) )
elif arity == 3:
matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \
Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr )
else:
raise ValueError("operator must be unary (1), binary (2), or ternary (3)")
elif rightLeftAssoc == opAssoc.RIGHT:
if arity == 1:
# try to avoid LR with this extra test
if not isinstance(opExpr, Optional):
opExpr = Optional(opExpr)
matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr )
elif arity == 2:
if opExpr is not None:
matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) )
else:
matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) )
elif arity == 3:
matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \
Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr )
else:
raise ValueError("operator must be unary (1), binary (2), or ternary (3)")
else:
raise ValueError("operator must indicate right or left associativity")
if pa:
matchExpr.setParseAction( pa )
thisExpr << ( matchExpr | lastExpr )
lastExpr = thisExpr
ret << lastExpr
return ret
dblQuotedString = Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\x[0-9a-fA-F]+)|(?:\\.))*"').setName("string enclosed in double quotes")
sglQuotedString = Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\x[0-9a-fA-F]+)|(?:\\.))*'").setName("string enclosed in single quotes")
quotedString = Regex(r'''(?:"(?:[^"\n\r\\]|(?:"")|(?:\\x[0-9a-fA-F]+)|(?:\\.))*")|(?:'(?:[^'\n\r\\]|(?:'')|(?:\\x[0-9a-fA-F]+)|(?:\\.))*')''').setName("quotedString using single or double quotes")
unicodeString = Combine(_L('u') + quotedString.copy())
def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString):
"""Helper method for defining nested lists enclosed in opening and closing
delimiters ("(" and ")" are the default).
Parameters:
- opener - opening character for a nested list (default="("); can also be a pyparsing expression
- closer - closing character for a nested list (default=")"); can also be a pyparsing expression
- content - expression for items within the nested lists (default=None)
- ignoreExpr - expression for ignoring opening and closing delimiters (default=quotedString)
If an expression is not provided for the content argument, the nested
expression will capture all whitespace-delimited content between delimiters
as a list of separate values.
Use the ignoreExpr argument to define expressions that may contain
opening or closing characters that should not be treated as opening
or closing characters for nesting, such as quotedString or a comment
expression. Specify multiple expressions using an Or or MatchFirst.
The default is quotedString, but if no expressions are to be ignored,
then pass None for this argument.
"""
if opener == closer:
raise ValueError("opening and closing strings cannot be the same")
if content is None:
if isinstance(opener,basestring) and isinstance(closer,basestring):
if ignoreExpr is not None:
content = (Combine(OneOrMore(~ignoreExpr +
CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1))
).setParseAction(lambda t:t[0].strip()))
else:
content = (empty+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS).setParseAction(lambda t:t[0].strip()))
else:
raise ValueError("opening and closing arguments must be strings if no content expression is given")
ret = Forward()
if ignoreExpr is not None:
ret << Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) )
else:
ret << Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) )
return ret
def indentedBlock(blockStatementExpr, indentStack, indent=True):
"""Helper method for defining space-delimited indentation blocks, such as
those used to define block statements in Python source code.
Parameters:
- blockStatementExpr - expression defining syntax of statement that
is repeated within the indented block
- indentStack - list created by caller to manage indentation stack
(multiple statementWithIndentedBlock expressions within a single grammar
should share a common indentStack)
- indent - boolean indicating whether block must be indented beyond the
the current level; set to False for block of left-most statements
(default=True)
A valid block must contain at least one blockStatement.
"""
def checkPeerIndent(s,l,t):
if l >= len(s): return
curCol = col(l,s)
if curCol != indentStack[-1]:
if curCol > indentStack[-1]:
raise ParseFatalException(s,l,"illegal nesting")
raise ParseException(s,l,"not a peer entry")
def checkSubIndent(s,l,t):
curCol = col(l,s)
if curCol > indentStack[-1]:
indentStack.append( curCol )
else:
raise ParseException(s,l,"not a subentry")
def checkUnindent(s,l,t):
if l >= len(s): return
curCol = col(l,s)
if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]):
raise ParseException(s,l,"not an unindent")
indentStack.pop()
NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress())
INDENT = Empty() + Empty().setParseAction(checkSubIndent)
PEER = Empty().setParseAction(checkPeerIndent)
UNDENT = Empty().setParseAction(checkUnindent)
if indent:
smExpr = Group( Optional(NL) +
FollowedBy(blockStatementExpr) +
INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT)
else:
smExpr = Group( Optional(NL) +
(OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) )
blockStatementExpr.ignore("\\" + LineEnd())
return smExpr
alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]")
punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]")
anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:"))
commonHTMLEntity = Combine(_L("&") + oneOf("gt lt amp nbsp quot").setResultsName("entity") +";")
_htmlEntityMap = dict(zip("gt lt amp nbsp quot".split(),"><& '"))
replaceHTMLEntity = lambda t : t.entity in _htmlEntityMap and _htmlEntityMap[t.entity] or None
# it's easy to get these comment structures wrong - they're very common, so may as well make them available
cStyleComment = Regex(r"/\*(?:[^*]*\*+)+?/").setName("C style comment")
htmlComment = Regex(r"<!--[\s\S]*?-->")
restOfLine = Regex(r".*").leaveWhitespace()
dblSlashComment = Regex(r"\/\/(\\\n|.)*").setName("// comment")
cppStyleComment = Regex(r"/(?:\*(?:[^*]*\*+)+?/|/[^\n]*(?:\n[^\n]*)*?(?:(?<!\\)|\Z))").setName("C++ style comment")
javaStyleComment = cppStyleComment
pythonStyleComment = Regex(r"#.*").setName("Python style comment")
_noncomma = "".join( [ c for c in printables if c != "," ] )
_commasepitem = Combine(OneOrMore(Word(_noncomma) +
Optional( Word(" \t") +
~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem")
commaSeparatedList = delimitedList( Optional( quotedString | _commasepitem, default="") ).setName("commaSeparatedList")
if __name__ == "__main__":
def test( teststring ):
try:
tokens = simpleSQL.parseString( teststring )
tokenlist = tokens.asList()
print (teststring + "->" + str(tokenlist))
print ("tokens = " + str(tokens))
print ("tokens.columns = " + str(tokens.columns))
print ("tokens.tables = " + str(tokens.tables))
print (tokens.asXML("SQL",True))
except ParseBaseException,err:
print (teststring + "->")
print (err.line)
print (" "*(err.column-1) + "^")
print (err)
print()
selectToken = CaselessLiteral( "select" )
fromToken = CaselessLiteral( "from" )
ident = Word( alphas, alphanums + "_$" )
columnName = delimitedList( ident, ".", combine=True ).setParseAction( upcaseTokens )
columnNameList = Group( delimitedList( columnName ) )#.setName("columns")
tableName = delimitedList( ident, ".", combine=True ).setParseAction( upcaseTokens )
tableNameList = Group( delimitedList( tableName ) )#.setName("tables")
simpleSQL = ( selectToken + \
( '*' | columnNameList ).setResultsName( "columns" ) + \
fromToken + \
tableNameList.setResultsName( "tables" ) )
test( "SELECT * from XYZZY, ABC" )
test( "select * from SYS.XYZZY" )
test( "Select A from Sys.dual" )
test( "Select AA,BB,CC from Sys.dual" )
test( "Select A, B, C from Sys.dual" )
test( "Select A, B, C from Sys.dual" )
test( "Xelect A, B, C from Sys.dual" )
test( "Select A, B, C frox Sys.dual" )
test( "Select" )
test( "Select ^^^ frox Sys.dual" )
test( "Select A, B, C from Sys.dual, Table2 " )
| eberle1080/tesserae-ng | website/graphite/thirdparty/pyparsing.py | Python | bsd-2-clause | 147,580 |
from django.core.mail import mail_admins
from django.utils.translation import ugettext as _
from django.shortcuts import get_object_or_404, render
from django.http import HttpResponseRedirect
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login
from django.contrib.auth.decorators import login_required
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.contrib.sites.shortcuts import get_current_site
from django.conf import settings
from datetime import datetime, timedelta
from django_redis import get_redis_connection
from .models import (Invitation, UserProfile,
EmailCandidate)
from .forms import (FollowForm, RegisterForm, UpdateProfileForm,
EmailCandidateForm, QuestionForm,
FollowerActionForm)
from ..question.models import (QuestionMeta, Question)
from django.http import HttpResponseBadRequest
from ..follow.models import UserFollow
from ..notification.utils import notify
from ..notification.decorators import delete_notification
from ..story.models import Story
from ..account.signals import follower_count_changed
from ..account.forms import NotificationPreferencesForm
from resmin.utils import paginated, send_email_from_template
redis = get_redis_connection('default')
@delete_notification
def profile(request, username=None, listing='public', action=None):
user = get_object_or_404(User, username=username, is_active=True) if \
username else request.user
user_is_blocked_me, user_is_blocked_by_me,\
i_am_follower_of_user, have_pending_follow_request \
= False, False, False, False
if request.user.is_authenticated():
user_is_blocked_me = user.is_blocked_by(request.user)
user_is_blocked_by_me = user.is_blocked_by(request.user)
i_am_follower_of_user = request.user.is_following(user)
have_pending_follow_request = \
request.user.has_pending_follow_request(user)
ctx = {'profile_user': user,
'listing': listing,
'user_is_blocked_by_me': user_is_blocked_by_me,
'user_is_blocked_me': user_is_blocked_me,
'have_pending_follow_request': have_pending_follow_request,
'i_am_follower_of_user': i_am_follower_of_user}
# If there are not blocks, fill ctx with answers
if not (user_is_blocked_me or user_is_blocked_by_me):
stories = Story.objects.build(
frm=user, requested_user=request.user, listing=listing)
ctx['stories'] = paginated(request, stories,
settings.STORIES_PER_PAGE)
if request.POST:
question_form = QuestionForm(request.POST, questioner=request.user,
questionee=user)
if question_form.is_valid():
question_form.save()
messages.success(request, _('Your question sent to user.'))
return HttpResponseRedirect(user.get_absolute_url())
else:
question_form = QuestionForm(questioner=request.user,
questionee=user)
if action:
ctx['action'] = action
follow_form = FollowForm(follower=request.user,
target=user,
action=action)
if request.POST:
follow_form = FollowForm(request.POST,
follower=request.user,
target=user,
action=action)
if follow_form.is_valid():
follow_form.save()
if action == 'follow':
messages.success(
request, _('Follow request sent to user'))
elif action == 'unfollow':
messages.success(
request, _('You are not a follower anymore'))
elif action == 'block':
messages.success(
request, _('You have blocked this user'))
elif action == 'unblock':
messages.success(
request, _('You have unblocked this user'))
return HttpResponseRedirect(user.get_absolute_url())
ctx['follow_form'] = follow_form
ctx['question_form'] = question_form
return render(request, "auth/user_detail.html", ctx)
@login_required
def pending_follow_requests(request):
pfr = UserFollow.objects.filter(
status=UserFollow.PENDING, target=request.user)
site = get_current_site(request) if not pfr else None
return render(
request,
'auth/pending_follow_requests.html',
{'pending_follow_requests': pfr,
'site': site})
@login_required
def pending_follow_request_action(request):
if request.method == 'POST':
frpk = request.POST.get('pk')
try:
frpk = int(frpk)
except ValueError:
return JsonRsponse({'errMsg': 'Invalida data'}, status=400)
action = request.POST['action']
follow_request = get_object_or_404(
UserFollow, pk=frpk, target=request.user)
if action == 'accept':
follow_request.status = UserFollow.FOLLOWING
follow_request.save()
follower_count_changed.send(sender=request.user)
notify(ntype_slug='user_accepted_my_follow_request',
sub=follow_request.target,
obj=follow_request,
recipient=follow_request.follower,
url=follow_request.target.get_absolute_url())
return JsonRsponse({'success': True})
elif action == 'accept-restricted':
follow_request.status = UserFollow.FOLLOWING_RESTRICTED
follow_request.save()
follower_count_changed.send(sender=request.user)
notify(ntype_slug='user_accepted_my_follow_request',
sub=follow_request.target,
obj=follow_request,
recipient=follow_request.follower,
url=follow_request.target.get_absolute_url())
if action == 'decline':
follow_request.delete()
return JsonRsponse({'success': True})
return JsonRsponse({'success': False,
'message': 'Invalida data'})
@login_required
def update_profile(request):
profile = get_object_or_404(UserProfile, user=request.user)
form = UpdateProfileForm(instance=profile)
if request.POST:
form = UpdateProfileForm(
request.POST, request.FILES, instance=profile)
if form.is_valid():
profile = form.save(commit=False)
profile.user = request.user
profile.save()
messages.success(request, _('Your profile updated'))
return HttpResponseRedirect(
reverse('profile',
kwargs={'username': request.user.username}))
try:
avatar_questionmeta = QuestionMeta.objects.get(
id=settings.AVATAR_QUESTIONMETA_ID)
except QuestionMeta.DoesNotExist:
avatar_questionmeta = None
return render(
request,
"auth/update_profile.html",
{'form': form,
'profile_user': request.user,
'avatar_questionmeta': avatar_questionmeta})
def register(request):
form = RegisterForm(initial={'key': request.GET.get("key", None)})
if request.POST:
form = RegisterForm(request.POST)
if form.is_valid():
user = form.save()
user = authenticate(username=form.cleaned_data['username'],
password=form.cleaned_data['pass_1'])
login(request, user)
Invitation.objects.create(owner=user)
messages.success(request, _('Registration complete, wellcome :)'))
mail_admins('User %s registered.' % user,
'Seems, we\'re doing things well...')
return HttpResponseRedirect("/")
return render(request, 'auth/register.html', {'form': form})
@login_required
def invitations(request):
return render(
request,
'auth/invitations.html',
{'site': get_current_site(request),
'profile_user': request.user,
'invs': Invitation.objects.filter(
owner=request.user).order_by("used_count")})
@login_required
def followers(request, username):
if request.method == 'POST':
action_form = FollowerActionForm(request.POST, username=username)
if action_form.is_valid():
result = action_form.save()
if result:
messages.warning(request, result)
else:
action_form = FollowerActionForm(username=username)
user = get_object_or_404(User, username=username)
user_follows = UserFollow.objects\
.filter(target=user, follower__is_active=True)\
.prefetch_related('follower__userprofile')
return render(
request,
'auth/followers.html',
{'profile_user': user,
'action_form': action_form,
'user_follows': paginated(request, user_follows,
settings.QUESTIONS_PER_PAGE)})
@login_required
def followings(request, username):
user = get_object_or_404(User, username=username)
user_follows = UserFollow.objects\
.filter(follower=user, target__is_active=True)\
.prefetch_related('follower__userprofile')
return render(
request,
'auth/followings.html',
{'user': request.user,
'profile_user': user,
'user_follows': user_follows})
@login_required
def hof(request):
return render(
request,
'auth/hof.html',
{'profiles': UserProfile.objects.order_by('-like_count')[:40]})
@login_required
def notification_preferences(request):
if request.method == 'POST':
form = NotificationPreferencesForm(request.POST, user=request.user)
if form.is_valid():
form.save()
else:
form = NotificationPreferencesForm(user=request.user)
return render(request, 'auth/notification_preferences.html', {
'profile_user': request.user,
'form': form})
@login_required
def email(request, key=None):
EmailCandidate.objects.filter(
created_at__lte=datetime.utcnow() - timedelta(days=6*30)).delete()
if key:
try:
email = EmailCandidate.objects.get(key=key)
except EmailCandidate.DoesNotExist:
email = None
if email:
user = email.owner
user.email = email.email
user.save()
messages.success(request, _('Your email confirmed :)'))
mail_admins('User %s left his email: %s' % (
user, user.email), 'Seems, we\'re doing things well...')
return HttpResponseRedirect("/")
else:
return render(request, 'auth/email_form.html', {
'key_wrong': True})
else:
if request.POST:
form = EmailCandidateForm(request.POST)
if form.is_valid():
candidate = form.save(commit=False)
candidate.owner = request.user
candidate.save()
send_email_from_template(
'confirmation',
[candidate.email],
{'domain': get_current_site(request),
'candidate': candidate})
return render(request, 'auth/email_form.html', {
'profile_user': request.user})
else:
return render(request, 'auth/email_form.html', {
'form': form,
'profile_user': request.user})
else:
return render(request, 'auth/email_form.html', {
'form': EmailCandidateForm,
'profile_user': request.user})
| Resmin/Resmin | resmin/apps/account/views.py | Python | gpl-3.0 | 11,996 |
from rest_framework import serializers
from .models import Book
class BookSerializer(serializers.ModelSerializer):
owner = serializers.ReadOnlyField(source='owner.username')
class Meta:
model = Book
fields = ( 'id', 'owner', 'ISBN', 'title', 'author', 'requested', 'active')
| Guin-/bookswap | bookswap/books/serializers.py | Python | mit | 303 |
def extractExperimentserialWordpressCom(item):
'''
Parser for 'experimentserial.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
| fake-name/ReadableWebProxy | WebMirror/management/rss_parser_funcs/feed_parse_extractExperimentserialWordpressCom.py | Python | bsd-3-clause | 572 |
#
# Copyright (C) 2009-2011 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): Chris Lumens <clumens@redhat.com>
# David Cantrell <dcantrell@redhat.com>
#
import _ped
import unittest
from tests.baseclass import *
# One class per method, multiple tests per class. For these simple methods,
# that seems like good organization. More complicated methods may require
# multiple classes and their own test suite.
class DiskTypeNewTestCase(unittest.TestCase):
def runTest(self):
# You're not allowed to create a new DiskType object by hand.
self.assertRaises(TypeError, _ped.DiskType)
class DiskTypeGetSetTestCase(RequiresDiskTypes):
def runTest(self):
# All attributes are read-only.
for name in self.disktype.keys():
t = self.disktype[name]
self.assertRaises(AttributeError, setattr, t, "name", "fakename")
self.assertRaises(AttributeError, setattr, t, "features", 47)
self.assertTrue(isinstance(t.name, str))
self.assertEquals(t.name, name)
self.assertTrue(isinstance(t.features, long))
class DiskTypeCheckFeatureTestCase(RequiresDiskTypes):
def runTest(self):
# The following types have no features [that libparted supports]
for name in ['aix', 'sun', 'bsd', 'loop']:
self.assertFalse(self.disktype[name].check_feature(_ped.DISK_TYPE_EXTENDED))
self.assertFalse(self.disktype[name].check_feature(_ped.DISK_TYPE_PARTITION_NAME))
# The following types support DISK_TYPE_EXTENDED
for name in ['msdos']:
self.assertTrue(self.disktype[name].check_feature(_ped.DISK_TYPE_EXTENDED))
self.assertFalse(self.disktype[name].check_feature(_ped.DISK_TYPE_PARTITION_NAME))
# The following types support DISK_TYPE_PARTITION_NAME
for name in ['amiga', 'gpt', 'mac', 'pc98']:
self.assertFalse(self.disktype[name].check_feature(_ped.DISK_TYPE_EXTENDED))
self.assertTrue(self.disktype[name].check_feature(_ped.DISK_TYPE_PARTITION_NAME))
# The following types support all features
for name in ['dvh']:
self.assertTrue(self.disktype[name].check_feature(_ped.DISK_TYPE_EXTENDED))
self.assertTrue(self.disktype[name].check_feature(_ped.DISK_TYPE_PARTITION_NAME))
class DiskTypeStrTestCase(RequiresDiskTypes):
def runTest(self):
self.assertEquals(str(self.disktype['msdos']), '_ped.DiskType instance --\n name: msdos features: 1')
self.assertEquals(str(self.disktype['aix']), '_ped.DiskType instance --\n name: aix features: 0')
self.assertEquals(str(self.disktype['sun']), '_ped.DiskType instance --\n name: sun features: 0')
self.assertEquals(str(self.disktype['amiga']), '_ped.DiskType instance --\n name: amiga features: 2')
self.assertEquals(str(self.disktype['gpt']), '_ped.DiskType instance --\n name: gpt features: 2')
self.assertEquals(str(self.disktype['mac']), '_ped.DiskType instance --\n name: mac features: 2')
self.assertEquals(str(self.disktype['bsd']), '_ped.DiskType instance --\n name: bsd features: 0')
self.assertEquals(str(self.disktype['pc98']), '_ped.DiskType instance --\n name: pc98 features: 2')
self.assertEquals(str(self.disktype['loop']), '_ped.DiskType instance --\n name: loop features: 0')
self.assertEquals(str(self.disktype['dvh']), '_ped.DiskType instance --\n name: dvh features: 3')
| bbqlinux/pyparted | tests/test__ped_disktype.py | Python | gpl-2.0 | 4,410 |
# coding=utf-8
import sys
from collections import OrderedDict
"""
PatternCount(Text, Pattern)
count ← 0
for i ← 0 to |Text| − |Pattern|
if Text(i, |Pattern|) = Pattern
count ← count + 1
return count
"""
def count_pattern(dna, pattern):
dna = dna.upper()
pattern = pattern.upper()
count = 0
start = 0
while True:
start = dna.find(pattern, start) + 1
if start > 0:
count += 1
else:
return count
"""
FrequentWords(Text, k)
FrequentPatterns ← an empty set
for i ← 0 to |Text| − k
Pattern ← the k-mer Text(i, k)
Count(i) ← PatternCount(Text, Pattern)
maxCount ← maximum value in array Count
for i ← 0 to |Text| − k
if Count(i) = maxCount
add Text(i, k) to FrequentPatterns
return FrequentPatterns
"""
def get_most_freq_n_mer(dna, n):
dna = str.upper(dna)
length = str.__len__(dna)
dict = {}
for index in range(0, length - n + 1):
n_mer = dna[index: index + n]
if dict.__contains__(n_mer):
dict[n_mer] += 1
else:
dict[n_mer] = 1
max_count = 0
most_freq_n_mers = []
for n_mer, count in iter(dict.items()):
if count > max_count:
most_freq_n_mers = [n_mer]
max_count = count
elif count == max_count:
most_freq_n_mers.append(n_mer)
print('Most Frequent n-mer Count: %d' % max_count)
for n_mer in most_freq_n_mers:
print(n_mer)
return most_freq_n_mers, max_count
def get_reverse_complement(dna):
dna = str.upper(dna)
complement = ''
for base in dna:
if base == 'A':
complement += 'T'
elif base == 'T':
complement += 'A'
elif base == 'C':
complement += 'G'
elif base == 'G':
complement += 'C'
else:
raise Exception('Invalid DNA base.')
return complement[::-1]
'''
CODE CHALLENGE: Solve the Pattern Matching Problem.
Input: Two strings, Pattern and Genome.
Output: A collection of space-separated integers specifying all starting positions where Pattern appears
as a substring of Genome.
'''
def match_pattern(dna, pattern, match_rc=False):
dna = dna.upper()
pattern = pattern.upper()
pattern_rc = get_reverse_complement(pattern)
locations = []
for index in range(len(dna) - len(pattern) + 1):
s = dna[index:index + len(pattern)]
if s == pattern or (match_rc and s == pattern_rc):
locations.append(str(index))
print(", ".join(locations))
return locations
'''
Clump Finding Problem: Find patterns forming clumps in a string.
Input: A string Genome, and integers k, L, and t.
Output: All distinct k-mers forming (L, t)-clumps in Genome.
Definition of Clump:
Given integers L and t, a k-mer Pattern forms an (L, t)-clump inside a (longer) string Genome
if there is an interval of Genome of length L in which this k-mer appears at least t times.
(This definition assumes that the k-mer completely fits within the interval.)
'''
def find_clump(dna, k, t, L):
s = dna[0: L]
counts = _count_frequencies(s, k)
frequent_set = set()
for pattern, count in iter(counts.items()):
if count >= t:
frequent_set.add(pattern)
for index in range(1, len(dna)-L+1):
begin = dna[index - 1: index - 1 + k]
counts[begin] -= 1
end = dna[index + L - k: index + L]
increase_count(counts, end)
for pattern, count in iter(counts.items()):
if count >= t:
frequent_set.add(pattern)
clumps = sorted(list(frequent_set))
return clumps
def _count_frequencies(dna, k):
dict = {}
for index in range(len(dna)-k+1):
pattern = dna[index: index + k]
increase_count(dict, pattern)
return dict
def increase_count(dict, pattern):
if dict.__contains__(pattern):
dict[pattern] += 1
else:
dict[pattern] = 1
_base_to_number_dict = {'A': 0, 'C': 1, 'G': 2, 'T': 3}
_number_to_base_table = ['A', 'C', 'G', 'T']
def pattern_to_number(pattern):
number = 0
for base in pattern:
number = number * 4 + _base_to_number_dict[base]
return number
def number_to_pattern(number, k):
pattern = []
while k>0:
remainder = number % 4
number /= 4
pattern.insert(0, _number_to_base_table[remainder])
k -= 1
return ''.join(pattern)
def compute_frequencies(text, k):
frequency_array = [0] * 4**k
for index in range(0, len(text) - k + 1):
pattern = text[index:index+k]
array_index = pattern_to_number(pattern)
frequency_array[array_index] += 1
return frequency_array
def find_clump_efficient(dna, k, t, L):
s = dna[0: L]
frequency_array = [0] * 4**k
frequent_set = set()
for index in range(0, len(s) - k + 1):
pattern = s[index:index+k]
array_index = pattern_to_number(pattern)
frequency_array[array_index] += 1
if frequency_array[array_index] >= t:
frequent_set.add(pattern)
for index in range(1, len(dna)-L+1):
begin = dna[index - 1: index - 1 + k]
frequency_array[pattern_to_number(begin)] -=1
end = dna[index + L - k: index + L]
end_index = pattern_to_number(end)
frequency_array[end_index] += 1
if frequency_array[end_index] >= t:
frequent_set.add(end)
clumps = sorted(list(frequent_set))
return clumps
| Bioinformanics/ucsd-bioinformatics-1 | Week1/week1_utility.py | Python | gpl-3.0 | 5,648 |
from mergeRUCBSampler import mergeRUCBSampler
from RelativeThompsonSampler import RelativeThompsonSampler
from RelativeConfidenceSampler import RelativeConfidenceSampler
from RelativeUCBSampler import RelativeUCBSampler
from SAVAGESampler import SAVAGESampler
from BeatTheMeanSampler import BeatTheMeanSampler
from BaselineSampler import BaselineSampler | redreamality/learning-to-rank | lerot/sampler/__init__.py | Python | gpl-3.0 | 353 |
# -*- coding: ascii -*-
u"""
:Copyright:
Copyright 2014 - 2021
Andr\xe9 Malo or his licensors, as applicable
:License:
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===============================
Tests for gensaschema._config
===============================
Tests for gensaschema._config
"""
__author__ = u"Andr\xe9 Malo"
import os as _os
import tempfile as _tempfile
from gensaschema import _config
# pylint: disable = protected-access
def fixture(name):
""" Find fixture """
return _os.path.join(
_os.path.dirname(_os.path.normpath(_os.path.abspath(__file__))),
'fixtures',
name,
)
def test_init():
""" Config initialization works as expected """
inst = _config.Config.from_file(fixture('config1.schema'))
assert inst.tables == [
('Yo', 'Yo'),
('some', 'table'),
('somethingElse', 'somethingElse'),
('y', 'x.y'),
('a', 'b.c'),
]
assert inst.schemas == {'foo': 'bar'}
assert inst._lines == [
'# This is a comment. I love comments.\n', '#\n', '\n', 'Yo\n',
'some = table\n', 'somethingElse\n', 'x.y\n', 'a = b.c\n', '\n',
'[schemas]\n', 'foo = bar\n',
]
def test_dump():
""" Config dumps properly """
inst = _config.Config(tables=[
('Yo', 'Yo'),
('some', 'table'),
('somethingElse', 'somethingElse'),
('y', 'x.y'),
('a', 'b.c'),
], schemas={'foo': 'bar'})
fp = _tempfile.TemporaryFile(mode="w+")
inst.dump(fp)
fp.seek(0, 0)
assert fp.read() == """
# This is a comment. I love comments.
#
# This files contains table names, one per line
# Comments and empty lines are ignored
#
# If the table name contains a dot, the first part is treated as
# schema name.
#
# If the table variable should be treated differently, use:
#
# name = table
#
# The basename of this file (modulo .schema extension) is used as
# basename for the python file.
Yo = Yo
some = table
somethingElse = somethingElse
y = x.y
a = b.c
[schemas]
foo = bar
""".strip() + '\n'
fp.close()
| ndparker/gensaschema | tests/unit/test_config.py | Python | apache-2.0 | 2,566 |
import logging
from gettext import gettext as _
from pulp.plugins.importer import Importer
from pulp.common.config import read_json_config
from pulp.server.exceptions import PulpCodedException
from pulp_puppet.common import constants
from pulp_puppet.plugins.importers import configuration, upload, copier
from pulp_puppet.plugins.importers.directory import SynchronizeWithDirectory
from pulp_puppet.plugins.importers.forge import SynchronizeWithPuppetForge
# The platform currently doesn't support automatic loading of conf files when the plugin
# uses entry points. The current thinking is that the conf files will be named the same as
# the plugin and put in a conf.d type of location. For now, this implementation will assume
# that's the final solution and the plugin will attempt to load the file itself in the
# entry_point method.
CONF_FILENAME = 'server/plugins.conf.d/%s.json' % constants.IMPORTER_TYPE_ID
_logger = logging.getLogger(__name__)
def entry_point():
"""
Entry point that pulp platform uses to load the importer
:return: importer class and its config
:rtype: Importer, {}
"""
plugin_config = read_json_config(CONF_FILENAME)
return PuppetModuleImporter, plugin_config
class PuppetModuleImporter(Importer):
def __init__(self):
super(PuppetModuleImporter, self).__init__()
self.sync_method = None
self.sync_cancelled = False
@classmethod
def metadata(cls):
return {
'id': constants.IMPORTER_TYPE_ID,
'display_name': _('Puppet Importer'),
'types': [constants.TYPE_PUPPET_MODULE]
}
def validate_config(self, repo, config):
return configuration.validate(config)
def sync_repo(self, repo, sync_conduit, config):
self.sync_cancelled = False
# Supports two methods of synchronization.
# 1. Synchronize with a directory containing a pulp manifest and puppet modules.
# 2. Synchronize with Puppet Forge.
# When the feed URL references a PULP_MANIFEST, the directory synchronization
# method is used. Otherwise, the puppet forge synchronization method is used.
# synchronize with a directory
self.sync_method = SynchronizeWithDirectory(repo, sync_conduit, config)
report = self.sync_method()
# When fetching the PULP_MANIFEST is not successful, it's assumed that the
# feed points to a puppet forge instance and the synchronization is retried
# using puppet forge method.
if report.metadata_state == constants.STATE_FAILED:
self.sync_method = SynchronizeWithPuppetForge(repo, sync_conduit, config)
report = self.sync_method()
self.sync_method = None
return report.build_final_report()
def import_units(self, source_repo, dest_repo, import_conduit, config, units=None):
return copier.copy_units(import_conduit, units)
def upload_unit(self, repo, type_id, unit_key, metadata, file_path, conduit, config):
report = upload.handle_uploaded_unit(repo, type_id, unit_key, metadata, file_path,
conduit)
return report
def cancel_sync_repo(self):
"""
Cancel a running repository synchronization operation.
"""
self.sync_cancelled = True
if self.sync_method is None:
return
self.sync_method.cancel()
def is_sync_cancelled(self):
"""
Hook into the plugin to check if a cancel request has been issued for a sync.
:return: True if the sync should stop running; False otherwise
:rtype: bool
"""
return self.sync_cancelled
| ammaritiz/pulp_puppet | pulp_puppet_plugins/pulp_puppet/plugins/importers/importer.py | Python | gpl-2.0 | 3,712 |
"""
These URL patterns are included in two different ways in the main urls.py, with
an extra argument present in one case. Thus, there are two different ways for
each name to resolve and Django must distinguish the possibilities based on the
argument list.
"""
from django.conf.urls import url
from .views import empty_view
urlpatterns = [
url(r'^part/(?P<value>\w+)/$', empty_view, name="part"),
url(r'^part2/(?:(?P<value>\w+)/)?$', empty_view, name="part2"),
]
| yephper/django | tests/urlpatterns_reverse/included_urls2.py | Python | bsd-3-clause | 489 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import getdate, cstr, flt
from frappe import _, _dict
from erpnext.accounts.utils import get_account_currency
def execute(filters=None):
account_details = {}
for acc in frappe.db.sql("""select name, is_group from tabAccount""", as_dict=1):
account_details.setdefault(acc.name, acc)
validate_filters(filters, account_details)
validate_party(filters)
filters = set_account_currency(filters)
columns = get_columns(filters)
res = get_result(filters, account_details)
return columns, res
def validate_filters(filters, account_details):
if not filters.get('company'):
frappe.throw(_('{0} is mandatory').format(_('Company')))
if filters.get("account") and not account_details.get(filters.account):
frappe.throw(_("Account {0} does not exists").format(filters.account))
if filters.get("account") and filters.get("group_by_account") \
and account_details[filters.account].is_group == 0:
frappe.throw(_("Can not filter based on Account, if grouped by Account"))
if filters.get("voucher_no") and filters.get("group_by_voucher"):
frappe.throw(_("Can not filter based on Voucher No, if grouped by Voucher"))
if filters.from_date > filters.to_date:
frappe.throw(_("From Date must be before To Date"))
def validate_party(filters):
party_type, party = filters.get("party_type"), filters.get("party")
if party:
if not party_type:
frappe.throw(_("To filter based on Party, select Party Type first"))
elif not frappe.db.exists(party_type, party):
frappe.throw(_("Invalid {0}: {1}").format(party_type, party))
def set_account_currency(filters):
if not (filters.get("account") or filters.get("party")):
return filters
else:
filters["company_currency"] = frappe.db.get_value("Company", filters.company, "default_currency")
account_currency = None
if filters.get("account"):
account_currency = get_account_currency(filters.account)
elif filters.get("party"):
gle_currency = frappe.db.get_value("GL Entry", {"party_type": filters.party_type,
"party": filters.party, "company": filters.company}, "account_currency")
if gle_currency:
account_currency = gle_currency
else:
account_currency = None if filters.party_type == "Employee" else \
frappe.db.get_value(filters.party_type, filters.party, "default_currency")
filters["account_currency"] = account_currency or filters.company_currency
if filters.account_currency != filters.company_currency:
filters["show_in_account_currency"] = 1
return filters
def get_columns(filters):
columns = [
_("Posting Date") + ":Date:90", _("Account") + ":Link/Account:200",
_("Debit") + ":Float:100", _("Credit") + ":Float:100"
]
if filters.get("show_in_account_currency"):
columns += [
_("Debit") + " (" + filters.account_currency + ")" + ":Float:100",
_("Credit") + " (" + filters.account_currency + ")" + ":Float:100"
]
columns += [
_("Voucher Type") + "::120", _("Voucher No") + ":Dynamic Link/"+_("Voucher Type")+":160",
_("Against Account") + "::120", _("Party Type") + "::80", _("Party") + "::150",
_("Project") + ":Link/Project:100", _("Cost Center") + ":Link/Cost Center:100",
_("Against Voucher Type") + "::120", _("Against Voucher") + ":Dynamic Link/"+_("Against Voucher Type")+":160",
_("Remarks") + "::400"
]
return columns
def get_result(filters, account_details):
gl_entries = get_gl_entries(filters)
data = get_data_with_opening_closing(filters, account_details, gl_entries)
result = get_result_as_list(data, filters)
return result
def get_gl_entries(filters):
select_fields = """, sum(debit_in_account_currency) as debit_in_account_currency,
sum(credit_in_account_currency) as credit_in_account_currency""" \
if filters.get("show_in_account_currency") else ""
group_by_condition = "group by voucher_type, voucher_no, account, cost_center" \
if filters.get("group_by_voucher") else "group by name"
gl_entries = frappe.db.sql("""
select
posting_date, account, party_type, party,
sum(debit) as debit, sum(credit) as credit,
voucher_type, voucher_no, cost_center, project,
against_voucher_type, against_voucher,
remarks, against, is_opening {select_fields}
from `tabGL Entry`
where company=%(company)s {conditions}
{group_by_condition}
order by posting_date, account"""\
.format(select_fields=select_fields, conditions=get_conditions(filters),
group_by_condition=group_by_condition), filters, as_dict=1)
return gl_entries
def get_conditions(filters):
conditions = []
if filters.get("account"):
lft, rgt = frappe.db.get_value("Account", filters["account"], ["lft", "rgt"])
conditions.append("""account in (select name from tabAccount
where lft>=%s and rgt<=%s and docstatus<2)""" % (lft, rgt))
if filters.get("voucher_no"):
conditions.append("voucher_no=%(voucher_no)s")
if filters.get("party_type"):
conditions.append("party_type=%(party_type)s")
if filters.get("party"):
conditions.append("party=%(party)s")
if not (filters.get("account") or filters.get("party") or filters.get("group_by_account")):
conditions.append("posting_date >=%(from_date)s")
if filters.get("project"):
conditions.append("project=%(project)s")
from frappe.desk.reportview import build_match_conditions
match_conditions = build_match_conditions("GL Entry")
if match_conditions: conditions.append(match_conditions)
return "and {}".format(" and ".join(conditions)) if conditions else ""
def get_data_with_opening_closing(filters, account_details, gl_entries):
data = []
gle_map = initialize_gle_map(gl_entries)
totals, entries = get_accountwise_gle(filters, gl_entries, gle_map)
# Opening for filtered account
data.append(totals.opening)
if filters.get("group_by_account"):
for acc, acc_dict in gle_map.items():
if acc_dict.entries:
# opening
data.append({})
data.append(acc_dict.totals.opening)
data += acc_dict.entries
# totals
data.append(acc_dict.totals.total)
# closing
data.append(acc_dict.totals.closing)
data.append({})
else:
data += entries
# totals
data.append(totals.total)
# closing
data.append(totals.closing)
#total closing
total_closing = totals.total_closing
total_debit = totals.closing.get('debit', 0)
total_credit = totals.closing.get('credit', 0)
debit_in_account_currency = totals.closing.get('debit_in_account_currency', 0)
credit_in_account_currency = totals.closing.get('credit_in_account_currency', 0)
total_amount = total_debit - total_credit
if total_amount > 0:
total_closing['debit'] = total_amount
total_closing['debit_in_account_currency'] = debit_in_account_currency - credit_in_account_currency
else:
total_closing['credit'] = abs(total_amount)
total_closing['credit_in_account_currency'] = abs(debit_in_account_currency - credit_in_account_currency)
data.append(totals.total_closing)
return data
def get_totals_dict():
def _get_debit_credit_dict(label):
return _dict(
account = "'{0}'".format(label),
debit = 0.0,
credit = 0.0,
debit_in_account_currency = 0.0,
credit_in_account_currency = 0.0
)
return _dict(
opening = _get_debit_credit_dict(_('Opening')),
total = _get_debit_credit_dict(_('Total')),
closing = _get_debit_credit_dict(_('Closing (Opening + Total)')),
total_closing = _get_debit_credit_dict(_('Closing Balance (Dr - Cr)'))
)
def initialize_gle_map(gl_entries):
gle_map = frappe._dict()
for gle in gl_entries:
gle_map.setdefault(gle.account, _dict(totals = get_totals_dict(), entries = []))
return gle_map
def get_accountwise_gle(filters, gl_entries, gle_map):
totals = get_totals_dict()
entries = []
def update_value_in_dict(data, key, gle):
data[key].debit += flt(gle.debit)
data[key].credit += flt(gle.credit)
data[key].debit_in_account_currency += flt(gle.debit_in_account_currency)
data[key].credit_in_account_currency += flt(gle.credit_in_account_currency)
from_date, to_date = getdate(filters.from_date), getdate(filters.to_date)
for gle in gl_entries:
if gle.posting_date < from_date or cstr(gle.is_opening) == "Yes":
update_value_in_dict(gle_map[gle.account].totals, 'opening', gle)
update_value_in_dict(totals, 'opening', gle)
update_value_in_dict(gle_map[gle.account].totals, 'closing', gle)
update_value_in_dict(totals, 'closing', gle)
elif gle.posting_date <= to_date:
update_value_in_dict(gle_map[gle.account].totals, 'total', gle)
update_value_in_dict(totals, 'total', gle)
if filters.get("group_by_account"):
gle_map[gle.account].entries.append(gle)
else:
entries.append(gle)
update_value_in_dict(gle_map[gle.account].totals, 'closing', gle)
update_value_in_dict(totals, 'closing', gle)
return totals, entries
def get_result_as_list(data, filters):
result = []
for d in data:
row = [d.get("posting_date"), d.get("account"), d.get("debit"), d.get("credit")]
if filters.get("show_in_account_currency"):
row += [d.get("debit_in_account_currency"), d.get("credit_in_account_currency")]
row += [d.get("voucher_type"), d.get("voucher_no"), d.get("against"),
d.get("party_type"), d.get("party"), d.get("project"), d.get("cost_center"), d.get("against_voucher_type"), d.get("against_voucher"), d.get("remarks")
]
result.append(row)
return result
| tfroehlich82/erpnext | erpnext/accounts/report/general_ledger/general_ledger.py | Python | gpl-3.0 | 9,442 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Scholar2Text - Python App for converting Scholarly PDFs to Text
#
# Converts a single PDF to text
#
# Author: Casey McLaughlin
# License: GPLv2 (see LICENSE.md)
#
import re, os, sys, nltk, numpy
def combineNarrative(txt):
'''Attempts to combine the narrative of text by remove extraneous blocks'''
#Split by paragraphs (two linebreaks in a row)
paragraphs = txt.split("\n\n")
outParagraphs = []
for p in paragraphs:
if isNarrative(p):
outParagraphs.append(re.sub(r"\n", ' ', p))
return "\n\n".join(outParagraphs)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def isNarrative(paragraph):
'''Attempts to determine if a given paragraph is part of the narrative'''
#Words that indicate typical heading phrases and therefore
#exempt from further scrutiny
exemptWords = ('methods', 'method', 'abstract', 'result', 'results',
'conclusion', 'analysis', 'aim', 'sample', 'procedures',
'population', 'measurement instruments', 'discussion',
'conclusion',' background', 'ethics', 'demographics',
'introduction')
#These words almost always signify auxiliary text within one-liners
blackwords = ('', '\xa9', 'copyright') #xa9 is copyright symbol
#These words usually indicate a figure caption
blackStartWords = ('table', 'figure', 'illustration', 'ill')
# ------
#Strip surrounding whitespace
paragraph = paragraph.strip()
#Split into lines
lines = list(w.strip() for w in paragraph.split("\n") if w.strip() != '')
#If there were just a bunch of empty lines, return false
if len(lines) == 0:
return False
#Get set of words in paragraph
nltkObj = nltk.Text(nltk.word_tokenize(paragraph))
paragraphWordSet = set(w.lower() for w in nltkObj)
#Tests...
#If single line or two lines and contains exempt word, return true
if len(lines) <= 1 and len(set(exemptWords).intersection(paragraphWordSet)) > 0:
return True
#If single line and less than 35, return false
if len(lines) == 1 and len(paragraph) < 35:
return False
#If single line or two lines and contains black word, return false
if len(lines) <= 2 and len(set(blackwords).intersection(paragraphWordSet)) > 0:
return False
#If average line length is less than 20, return false
if numpy.mean(list(len(l) for l in lines)) < 20:
return False
return True
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def runCli():
try:
inFile = os.path.abspath(sys.argv[1])
with open(inFile) as f:
content = f.read()
print combineNarrative(content)
except IOError:
print "Could not read from file: " + inFile
except IndexError:
print "You must specify a filename"
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if __name__ == "__main__":
runCli() | idiginfo/scholar2text | python/scholarlynarr.py | Python | gpl-2.0 | 3,048 |
from __future__ import unicode_literals
from django.apps import AppConfig
class CommentsConfig(AppConfig):
name = 'comments'
| bigtree6688/dockerfiles | django_demo/demo/comments/apps.py | Python | gpl-3.0 | 132 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.