repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values | var_hash
int64 -9,223,186,179,200,150,000
9,223,291,175B
| doc_hash
int64 -9,223,304,365,658,930,000
9,223,309,051B
| line_mean
float64 3.5
99.8
| line_max
int64 13
999
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
fxtentacle/phantomjs | src/qt/qtbase/util/local_database/dateconverter.py | 105 | 5318 | #!/usr/bin/env python
#############################################################################
##
## Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
## Contact: http://www.qt-project.org/legal
##
## This file is part of the test suite of the Qt Toolkit.
##
## $QT_BEGIN_LICENSE:LGPL$
## Commercial License Usage
## Licensees holding valid commercial Qt licenses may use this file in
## accordance with the commercial license agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and Digia. For licensing terms and
## conditions see http://qt.digia.com/licensing. For further information
## use the contact form at http://qt.digia.com/contact-us.
##
## GNU Lesser General Public License Usage
## Alternatively, this file may be used under the terms of the GNU Lesser
## General Public License version 2.1 as published by the Free Software
## Foundation and appearing in the file LICENSE.LGPL included in the
## packaging of this file. Please review the following information to
## ensure the GNU Lesser General Public License version 2.1 requirements
## will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
##
## In addition, as a special exception, Digia gives you certain additional
## rights. These rights are described in the Digia Qt LGPL Exception
## version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
##
## GNU General Public License Usage
## Alternatively, this file may be used under the terms of the GNU
## General Public License version 3.0 as published by the Free Software
## Foundation and appearing in the file LICENSE.GPL included in the
## packaging of this file. Please review the following information to
## ensure the GNU General Public License version 3.0 requirements will be
## met: http://www.gnu.org/copyleft/gpl.html.
##
##
## $QT_END_LICENSE$
##
#############################################################################
import re
def _convert_pattern(pattern):
# patterns from http://www.unicode.org/reports/tr35/#Date_Format_Patterns
qt_regexps = {
r"yyy{3,}" : "yyyy", # more that three digits hence convert to four-digit year
r"L" : "M", # stand-alone month names. not supported.
r"g{1,}": "", # modified julian day. not supported.
r"S{1,}" : "", # fractional seconds. not supported.
r"A{1,}" : "" # milliseconds in day. not supported.
}
qt_patterns = {
"G" : "", "GG" : "", "GGG" : "", "GGGG" : "", "GGGGG" : "", # Era. not supported.
"y" : "yyyy", # four-digit year without leading zeroes
"Q" : "", "QQ" : "", "QQQ" : "", "QQQQ" : "", # quarter. not supported.
"q" : "", "qq" : "", "qqq" : "", "qqqq" : "", # quarter. not supported.
"MMMMM" : "MMM", # narrow month name.
"LLLLL" : "MMM", # stand-alone narrow month name.
"l" : "", # special symbol for chinese leap month. not supported.
"w" : "", "W" : "", # week of year/month. not supported.
"D" : "", "DD" : "", "DDD" : "", # day of year. not supported.
"F" : "", # day of week in month. not supported.
"E" : "ddd", "EE" : "ddd", "EEE" : "ddd", "EEEEE" : "ddd", "EEEE" : "dddd", # day of week
"e" : "ddd", "ee" : "ddd", "eee" : "ddd", "eeeee" : "ddd", "eeee" : "dddd", # local day of week
"c" : "ddd", "cc" : "ddd", "ccc" : "ddd", "ccccc" : "ddd", "cccc" : "dddd", # stand-alone local day of week
"a" : "AP", # AM/PM
"K" : "h", # Hour 0-11
"k" : "H", # Hour 1-24
"j" : "", # special reserved symbol.
"z" : "t", "zz" : "t", "zzz" : "t", "zzzz" : "t", # timezone
"Z" : "t", "ZZ" : "t", "ZZZ" : "t", "ZZZZ" : "t", # timezone
"v" : "t", "vv" : "t", "vvv" : "t", "vvvv" : "t", # timezone
"V" : "t", "VV" : "t", "VVV" : "t", "VVVV" : "t" # timezone
}
if qt_patterns.has_key(pattern):
return qt_patterns[pattern]
for r,v in qt_regexps.items():
pattern = re.sub(r, v, pattern)
return pattern
def convert_date(input):
result = ""
patterns = "GyYuQqMLlwWdDFgEecahHKkjmsSAzZvV"
last = ""
inquote = 0
chars_to_strip = " -"
for c in input:
if c == "'":
inquote = inquote + 1
if inquote % 2 == 0:
if c in patterns:
if not last:
last = c
else:
if c in last:
last += c
else:
# pattern changed
converted = _convert_pattern(last)
result += converted
if not converted:
result = result.rstrip(chars_to_strip)
last = c
continue
if last:
# pattern ended
converted = _convert_pattern(last)
result += converted
if not converted:
result = result.rstrip(chars_to_strip)
last = ""
result += c
if last:
converted = _convert_pattern(last)
result += converted
if not converted:
result = result.rstrip(chars_to_strip)
return result.lstrip(chars_to_strip)
| bsd-3-clause | 4,522,112,350,333,505,000 | -8,577,283,111,753,953,000 | 43.316667 | 115 | 0.544566 | false |
vicky2135/lucious | tests/integration/offer/test_availability.py | 2 | 4516 | import datetime
from decimal import Decimal as D
from django.test import TestCase
from oscar.apps.offer import models
from oscar.core.compat import get_user_model
from oscar.test.factories import (
OrderDiscountFactory, UserFactory, create_order)
User = get_user_model()
class TestAPerUserConditionalOffer(TestCase):
def setUp(self):
self.offer = models.ConditionalOffer(max_user_applications=1)
self.user = UserFactory()
def test_is_available_with_no_applications(self):
self.assertTrue(self.offer.is_available())
def test_max_applications_is_correct_when_no_applications(self):
self.assertEqual(1, self.offer.get_max_applications(self.user))
def test_max_applications_is_correct_when_equal_applications(self):
order = create_order(user=self.user)
OrderDiscountFactory(
order=order, offer_id=self.offer.id, frequency=1)
self.assertEqual(0, self.offer.get_max_applications(self.user))
def test_max_applications_is_correct_when_more_applications(self):
order = create_order(user=self.user)
OrderDiscountFactory(
order=order, offer_id=self.offer.id, frequency=5)
self.assertEqual(0, self.offer.get_max_applications(self.user))
class TestADateBasedConditionalOffer(TestCase):
def setUp(self):
self.start = datetime.date(2011, 1, 1)
self.end = datetime.date(2011, 2, 1)
self.offer = models.ConditionalOffer(start_datetime=self.start,
end_datetime=self.end)
def test_is_available_during_date_range(self):
test = datetime.date(2011, 1, 10)
self.assertTrue(self.offer.is_available(test_date=test))
def test_is_inactive_before_date_range(self):
test = datetime.date(2010, 3, 10)
self.assertFalse(self.offer.is_available(test_date=test))
def test_is_inactive_after_date_range(self):
test = datetime.date(2011, 3, 10)
self.assertFalse(self.offer.is_available(test_date=test))
def test_is_active_on_end_datetime(self):
self.assertTrue(self.offer.is_available(test_date=self.end))
class TestAConsumptionFrequencyBasedConditionalOffer(TestCase):
def setUp(self):
self.offer = models.ConditionalOffer(max_global_applications=4)
def test_is_available_with_no_applications(self):
self.assertTrue(self.offer.is_available())
def test_is_available_with_fewer_applications_than_max(self):
self.offer.num_applications = 3
self.assertTrue(self.offer.is_available())
def test_is_inactive_with_equal_applications_to_max(self):
self.offer.num_applications = 4
self.assertFalse(self.offer.is_available())
def test_is_inactive_with_more_applications_than_max(self):
self.offer.num_applications = 4
self.assertFalse(self.offer.is_available())
def test_restricts_number_of_applications_correctly_with_no_applications(self):
self.assertEqual(4, self.offer.get_max_applications())
def test_restricts_number_of_applications_correctly_with_fewer_applications_than_max(self):
self.offer.num_applications = 3
self.assertEqual(1, self.offer.get_max_applications())
def test_restricts_number_of_applications_correctly_with_more_applications_than_max(self):
self.offer.num_applications = 5
self.assertEqual(0, self.offer.get_max_applications())
class TestCappedDiscountConditionalOffer(TestCase):
def setUp(self):
self.offer = models.ConditionalOffer(
max_discount=D('100.00'),
total_discount=D('0.00'))
def test_is_available_when_below_threshold(self):
self.assertTrue(self.offer.is_available())
def test_is_inactive_when_on_threshold(self):
self.offer.total_discount = self.offer.max_discount
self.assertFalse(self.offer.is_available())
def test_is_inactive_when_above_threshold(self):
self.offer.total_discount = self.offer.max_discount + D('10.00')
self.assertFalse(self.offer.is_available())
class TestASuspendedOffer(TestCase):
def setUp(self):
self.offer = models.ConditionalOffer(
status=models.ConditionalOffer.SUSPENDED)
def test_is_unavailable(self):
self.assertFalse(self.offer.is_available())
def test_lists_suspension_as_an_availability_restriction(self):
restrictions = self.offer.availability_restrictions()
self.assertEqual(1, len(restrictions))
| bsd-3-clause | 3,979,560,450,449,526,300 | 5,594,462,735,300,759,000 | 35.128 | 95 | 0.694641 | false |
sumit4iit/django-guardian | guardian/admin.py | 9 | 14646 | from django import forms
from django.conf import settings
from django.conf.urls.defaults import patterns, url
from django.contrib import admin
from django.contrib import messages
from django.contrib.admin.widgets import FilteredSelectMultiple
from django.core.urlresolvers import reverse
from django.shortcuts import render_to_response, get_object_or_404, redirect
from django.template import RequestContext
from django.utils.datastructures import SortedDict
from django.utils.translation import ugettext, ugettext_lazy as _
from guardian.forms import UserObjectPermissionsForm
from guardian.forms import GroupObjectPermissionsForm
from guardian.shortcuts import get_perms
from guardian.shortcuts import get_users_with_perms
from guardian.shortcuts import get_groups_with_perms
from guardian.shortcuts import get_perms_for_model
from guardian.models import User, Group
class AdminUserObjectPermissionsForm(UserObjectPermissionsForm):
"""
Extends :form:`UserObjectPermissionsForm`. It only overrides
``get_obj_perms_field_widget`` method so it return
``django.contrib.admin.widgets.FilteredSelectMultiple`` widget.
"""
def get_obj_perms_field_widget(self):
return FilteredSelectMultiple(_("Permissions"), False)
class AdminGroupObjectPermissionsForm(GroupObjectPermissionsForm):
"""
Extends :form:`GroupObjectPermissionsForm`. It only overrides
``get_obj_perms_field_widget`` method so it return
``django.contrib.admin.widgets.FilteredSelectMultiple`` widget.
"""
def get_obj_perms_field_widget(self):
return FilteredSelectMultiple(_("Permissions"), False)
class GuardedModelAdmin(admin.ModelAdmin):
"""
Extends ``django.contrib.admin.ModelAdmin`` class. Provides some extra
views for object permissions management at admin panel. It also changes
default ``change_form_template`` option to
``'admin/guardian/model/change_form.html'`` which is required for proper
url (object permissions related) being shown at the model pages.
**Extra options**
``GuardedModelAdmin.obj_perms_manage_template``
*Default*: ``admin/guardian/model/obj_perms_manage.html``
``GuardedModelAdmin.obj_perms_manage_user_template``
*Default*: ``admin/guardian/model/obj_perms_manage_user.html``
``GuardedModelAdmin.obj_perms_manage_group_template``
*Default*: ``admin/guardian/model/obj_perms_manage_group.html``
``GuardedModelAdmin.user_can_access_owned_objects_only``
*Default*: ``False``
If this would be set to ``True``, ``request.user`` would be used to
filter out objects he or she doesn't own (checking ``user`` field
of used model - field name may be overridden by
``user_owned_objects_field`` option.
.. note::
Please remember that this will **NOT** affect superusers!
Admins would still see all items.
``GuardedModelAdmin.user_owned_objects_field``
*Default*: ``user``
**Usage example**
Just use :admin:`GuardedModelAdmin` instead of
``django.contrib.admin.ModelAdmin``.
.. code-block:: python
from django.contrib import admin
from guardian.admin import GuardedModelAdmin
from myapp.models import Author
class AuthorAdmin(GuardedModelAdmin):
pass
admin.site.register(Author, AuthorAdmin)
"""
change_form_template = \
'admin/guardian/model/change_form.html'
obj_perms_manage_template = \
'admin/guardian/model/obj_perms_manage.html'
obj_perms_manage_user_template = \
'admin/guardian/model/obj_perms_manage_user.html'
obj_perms_manage_group_template = \
'admin/guardian/model/obj_perms_manage_group.html'
user_can_access_owned_objects_only = False
user_owned_objects_field = 'user'
def queryset(self, request):
qs = super(GuardedModelAdmin, self).queryset(request)
if self.user_can_access_owned_objects_only and \
not request.user.is_superuser:
filters = {self.user_owned_objects_field: request.user}
qs = qs.filter(**filters)
return qs
def get_urls(self):
"""
Extends standard admin model urls with the following:
- ``.../permissions/``
- ``.../permissions/user-manage/<user_id>/``
- ``.../permissions/group-manage/<group_id>/``
.. note::
``...`` above are standard, instance detail url (i.e.
``/admin/flatpages/1/``)
"""
urls = super(GuardedModelAdmin, self).get_urls()
info = self.model._meta.app_label, self.model._meta.module_name
myurls = patterns('',
url(r'^(?P<object_pk>.+)/permissions/$',
view=self.admin_site.admin_view(self.obj_perms_manage_view),
name='%s_%s_permissions' % info),
url(r'^(?P<object_pk>.+)/permissions/user-manage/(?P<user_id>\-?\d+)/$',
view=self.admin_site.admin_view(
self.obj_perms_manage_user_view),
name='%s_%s_permissions_manage_user' % info),
url(r'^(?P<object_pk>.+)/permissions/group-manage/(?P<group_id>\-?\d+)/$',
view=self.admin_site.admin_view(
self.obj_perms_manage_group_view),
name='%s_%s_permissions_manage_group' % info),
)
return myurls + urls
def get_obj_perms_base_context(self, request, obj):
"""
Returns context dictionary with common admin and object permissions
related content.
"""
context = {
'adminform': {'model_admin': self},
'object': obj,
'app_label': self.model._meta.app_label,
'opts': self.model._meta,
'original': hasattr(obj, '__unicode__') and obj.__unicode__() or\
str(obj),
'has_change_permission': self.has_change_permission(request, obj),
'model_perms': get_perms_for_model(obj),
'title': _("Object permissions"),
}
return context
def obj_perms_manage_view(self, request, object_pk):
"""
Main object permissions view. Presents all users and groups with any
object permissions for the current model *instance*. Users or groups
without object permissions for related *instance* would **not** be
shown. In order to add or manage user or group one should use links or
forms presented within the page.
"""
obj = get_object_or_404(self.queryset(request), pk=object_pk)
users_perms = SortedDict(
get_users_with_perms(obj, attach_perms=True,
with_group_users=False))
users_perms.keyOrder.sort(key=lambda user: user.username)
groups_perms = SortedDict(
get_groups_with_perms(obj, attach_perms=True))
groups_perms.keyOrder.sort(key=lambda group: group.name)
if request.method == 'POST' and 'submit_manage_user' in request.POST:
user_form = UserManage(request.POST)
group_form = GroupManage()
info = (
self.admin_site.name,
self.model._meta.app_label,
self.model._meta.module_name
)
if user_form.is_valid():
user_id = user_form.cleaned_data['user'].id
url = reverse(
'%s:%s_%s_permissions_manage_user' % info,
args=[obj.pk, user_id]
)
return redirect(url)
elif request.method == 'POST' and 'submit_manage_group' in request.POST:
user_form = UserManage()
group_form = GroupManage(request.POST)
info = (
self.admin_site.name,
self.model._meta.app_label,
self.model._meta.module_name
)
if group_form.is_valid():
group_id = group_form.cleaned_data['group'].id
url = reverse(
'%s:%s_%s_permissions_manage_group' % info,
args=[obj.pk, group_id]
)
return redirect(url)
else:
user_form = UserManage()
group_form = GroupManage()
context = self.get_obj_perms_base_context(request, obj)
context['users_perms'] = users_perms
context['groups_perms'] = groups_perms
context['user_form'] = user_form
context['group_form'] = group_form
return render_to_response(self.get_obj_perms_manage_template(),
context, RequestContext(request, current_app=self.admin_site.name))
def get_obj_perms_manage_template(self):
"""
Returns main object permissions admin template. May be overridden if
need to change it dynamically.
.. note::
If ``INSTALLED_APPS`` contains ``grappelli`` this function would
return ``"admin/guardian/grappelli/obj_perms_manage.html"``.
"""
if 'grappelli' in settings.INSTALLED_APPS:
return 'admin/guardian/contrib/grappelli/obj_perms_manage.html'
return self.obj_perms_manage_template
def obj_perms_manage_user_view(self, request, object_pk, user_id):
"""
Manages selected users' permissions for current object.
"""
user = get_object_or_404(User, id=user_id)
obj = get_object_or_404(self.queryset(request), pk=object_pk)
form_class = self.get_obj_perms_manage_user_form()
form = form_class(user, obj, request.POST or None)
if request.method == 'POST' and form.is_valid():
form.save_obj_perms()
msg = ugettext("Permissions saved.")
messages.success(request, msg)
info = (
self.admin_site.name,
self.model._meta.app_label,
self.model._meta.module_name
)
url = reverse(
'%s:%s_%s_permissions_manage_user' % info,
args=[obj.pk, user.id]
)
return redirect(url)
context = self.get_obj_perms_base_context(request, obj)
context['user_obj'] = user
context['user_perms'] = get_perms(user, obj)
context['form'] = form
return render_to_response(self.get_obj_perms_manage_user_template(),
context, RequestContext(request, current_app=self.admin_site.name))
def get_obj_perms_manage_user_template(self):
"""
Returns object permissions for user admin template. May be overridden
if need to change it dynamically.
.. note::
If ``INSTALLED_APPS`` contains ``grappelli`` this function would
return ``"admin/guardian/grappelli/obj_perms_manage_user.html"``.
"""
if 'grappelli' in settings.INSTALLED_APPS:
return 'admin/guardian/contrib/grappelli/obj_perms_manage_user.html'
return self.obj_perms_manage_user_template
def get_obj_perms_manage_user_form(self):
"""
Returns form class for user object permissions management. By default
:form:`AdminUserObjectPermissionsForm` is returned.
"""
return AdminUserObjectPermissionsForm
def obj_perms_manage_group_view(self, request, object_pk, group_id):
"""
Manages selected groups' permissions for current object.
"""
group = get_object_or_404(Group, id=group_id)
obj = get_object_or_404(self.queryset(request), pk=object_pk)
form_class = self.get_obj_perms_manage_group_form()
form = form_class(group, obj, request.POST or None)
if request.method == 'POST' and form.is_valid():
form.save_obj_perms()
msg = ugettext("Permissions saved.")
messages.success(request, msg)
info = (
self.admin_site.name,
self.model._meta.app_label,
self.model._meta.module_name
)
url = reverse(
'%s:%s_%s_permissions_manage_group' % info,
args=[obj.pk, group.id]
)
return redirect(url)
context = self.get_obj_perms_base_context(request, obj)
context['group_obj'] = group
context['group_perms'] = get_perms(group, obj)
context['form'] = form
return render_to_response(self.get_obj_perms_manage_group_template(),
context, RequestContext(request, current_app=self.admin_site.name))
def get_obj_perms_manage_group_template(self):
"""
Returns object permissions for group admin template. May be overridden
if need to change it dynamically.
.. note::
If ``INSTALLED_APPS`` contains ``grappelli`` this function would
return ``"admin/guardian/grappelli/obj_perms_manage_group.html"``.
"""
if 'grappelli' in settings.INSTALLED_APPS:
return 'admin/guardian/contrib/grappelli/obj_perms_manage_group.html'
return self.obj_perms_manage_group_template
def get_obj_perms_manage_group_form(self):
"""
Returns form class for group object permissions management. By default
:form:`AdminGroupObjectPermissionsForm` is returned.
"""
return AdminGroupObjectPermissionsForm
class UserManage(forms.Form):
user = forms.RegexField(label=_("Username"), max_length=30,
regex=r'^[\w.@+-]+$',
error_messages = {
'invalid': _("This value may contain only letters, numbers and "
"@/./+/-/_ characters."),
'does_not_exist': _("This user does not exist")})
def clean_user(self):
"""
Returns ``User`` instance based on the given username.
"""
username = self.cleaned_data['user']
try:
user = User.objects.get(username=username)
return user
except User.DoesNotExist:
raise forms.ValidationError(
self.fields['user'].error_messages['does_not_exist'])
class GroupManage(forms.Form):
group = forms.CharField(max_length=80, error_messages={'does_not_exist':
_("This group does not exist")})
def clean_group(self):
"""
Returns ``Group`` instance based on the given group name.
"""
name = self.cleaned_data['group']
try:
group = Group.objects.get(name=name)
return group
except Group.DoesNotExist:
raise forms.ValidationError(
self.fields['group'].error_messages['does_not_exist'])
| bsd-2-clause | -9,015,401,245,945,846,000 | -7,131,302,707,731,951,000 | 37.340314 | 86 | 0.608289 | false |
gautam1858/tensorflow | tensorflow/contrib/learn/python/learn/estimators/svm.py | 42 | 9389 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Support Vector Machine (SVM) Estimator (deprecated).
This module and all its submodules are deprecated. See
[contrib/learn/README.md](https://www.tensorflow.org/code/tensorflow/contrib/learn/README.md)
for migration instructions.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib import layers
from tensorflow.contrib.framework import deprecated
from tensorflow.contrib.framework import deprecated_arg_values
from tensorflow.contrib.learn.python.learn.estimators import estimator
from tensorflow.contrib.learn.python.learn.estimators import head as head_lib
from tensorflow.contrib.learn.python.learn.estimators import linear
from tensorflow.contrib.learn.python.learn.estimators import prediction_key
from tensorflow.contrib.linear_optimizer.python import sdca_optimizer
def _as_iterable(preds, output):
for pred in preds:
yield pred[output]
class SVM(estimator.Estimator):
"""Support Vector Machine (SVM) model for binary classification.
THIS CLASS IS DEPRECATED. See
[contrib/learn/README.md](https://www.tensorflow.org/code/tensorflow/contrib/learn/README.md)
for general migration instructions.
Currently, only linear SVMs are supported. For the underlying optimization
problem, the `SDCAOptimizer` is used. For performance and convergence tuning,
the num_loss_partitions parameter passed to `SDCAOptimizer` (see `__init__()`
method), should be set to (#concurrent train ops per worker) x (#workers). If
num_loss_partitions is larger or equal to this value, convergence is
guaranteed but becomes slower as num_loss_partitions increases. If it is set
to a smaller value, the optimizer is more aggressive in reducing the global
loss but convergence is not guaranteed. The recommended value in an
`Estimator` (where there is one process per worker) is the number of workers
running the train steps. It defaults to 1 (single machine).
Example:
```python
real_feature_column = real_valued_column(...)
sparse_feature_column = sparse_column_with_hash_bucket(...)
estimator = SVM(
example_id_column='example_id',
feature_columns=[real_feature_column, sparse_feature_column],
l2_regularization=10.0)
# Input builders
def input_fn_train: # returns x, y
...
def input_fn_eval: # returns x, y
...
estimator.fit(input_fn=input_fn_train)
estimator.evaluate(input_fn=input_fn_eval)
estimator.predict(x=x)
```
Input of `fit` and `evaluate` should have following features, otherwise there
will be a `KeyError`:
a feature with `key=example_id_column` whose value is a `Tensor` of dtype
string.
if `weight_column_name` is not `None`, a feature with
`key=weight_column_name` whose value is a `Tensor`.
for each `column` in `feature_columns`:
- if `column` is a `SparseColumn`, a feature with `key=column.name`
whose `value` is a `SparseTensor`.
- if `column` is a `RealValuedColumn, a feature with `key=column.name`
whose `value` is a `Tensor`.
"""
def __init__(self,
example_id_column,
feature_columns,
weight_column_name=None,
model_dir=None,
l1_regularization=0.0,
l2_regularization=0.0,
num_loss_partitions=1,
kernels=None,
config=None,
feature_engineering_fn=None):
"""Constructs an `SVM` estimator object.
Args:
example_id_column: A string defining the feature column name representing
example ids. Used to initialize the underlying optimizer.
feature_columns: An iterable containing all the feature columns used by
the model. All items in the set should be instances of classes derived
from `FeatureColumn`.
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator to
continue training a previously saved model.
l1_regularization: L1-regularization parameter. Refers to global L1
regularization (across all examples).
l2_regularization: L2-regularization parameter. Refers to global L2
regularization (across all examples).
num_loss_partitions: number of partitions of the (global) loss function
optimized by the underlying optimizer (SDCAOptimizer).
kernels: A list of kernels for the SVM. Currently, no kernels are
supported. Reserved for future use for non-linear SVMs.
config: RunConfig object to configure the runtime settings.
feature_engineering_fn: Feature engineering function. Takes features and
labels which are the output of `input_fn` and
returns features and labels which will be fed
into the model.
Raises:
ValueError: if kernels passed is not None.
"""
if kernels is not None:
raise ValueError("Kernel SVMs are not currently supported.")
optimizer = sdca_optimizer.SDCAOptimizer(
example_id_column=example_id_column,
num_loss_partitions=num_loss_partitions,
symmetric_l1_regularization=l1_regularization,
symmetric_l2_regularization=l2_regularization)
self._feature_columns = feature_columns
chief_hook = linear._SdcaUpdateWeightsHook() # pylint: disable=protected-access
super(SVM, self).__init__(
model_fn=linear.sdca_model_fn,
model_dir=model_dir,
config=config,
params={
"head": head_lib.binary_svm_head(
weight_column_name=weight_column_name,
enable_centered_bias=False),
"feature_columns": feature_columns,
"optimizer": optimizer,
"weight_column_name": weight_column_name,
"update_weights_hook": chief_hook,
},
feature_engineering_fn=feature_engineering_fn)
@deprecated_arg_values(
estimator.AS_ITERABLE_DATE, estimator.AS_ITERABLE_INSTRUCTIONS,
as_iterable=False)
def predict_classes(self, x=None, input_fn=None, batch_size=None,
as_iterable=True):
"""Runs inference to determine the predicted class."""
key = prediction_key.PredictionKey.CLASSES
preds = super(SVM, self).predict(
x=x,
input_fn=input_fn,
batch_size=batch_size,
outputs=[key],
as_iterable=as_iterable)
if as_iterable:
return _as_iterable(preds, output=key)
return preds[key]
@deprecated_arg_values(
estimator.AS_ITERABLE_DATE, estimator.AS_ITERABLE_INSTRUCTIONS,
as_iterable=False)
def predict_proba(self, x=None, input_fn=None, batch_size=None, outputs=None,
as_iterable=True):
"""Runs inference to determine the class probability predictions."""
key = prediction_key.PredictionKey.PROBABILITIES
preds = super(SVM, self).predict(
x=x,
input_fn=input_fn,
batch_size=batch_size,
outputs=[key],
as_iterable=as_iterable)
if as_iterable:
return _as_iterable(preds, output=key)
return preds[key]
# pylint: enable=protected-access
@deprecated("2017-03-25", "Please use Estimator.export_savedmodel() instead.")
def export(self, export_dir, signature_fn=None,
input_fn=None, default_batch_size=1,
exports_to_keep=None):
"""See BaseEstimator.export."""
return self.export_with_defaults(
export_dir=export_dir,
signature_fn=signature_fn,
input_fn=input_fn,
default_batch_size=default_batch_size,
exports_to_keep=exports_to_keep)
@deprecated("2017-03-25", "Please use Estimator.export_savedmodel() instead.")
def export_with_defaults(
self,
export_dir,
signature_fn=None,
input_fn=None,
default_batch_size=1,
exports_to_keep=None):
"""Same as BaseEstimator.export, but uses some defaults."""
def default_input_fn(unused_estimator, examples):
return layers.parse_feature_columns_from_examples(
examples, self._feature_columns)
return super(SVM, self).export(export_dir=export_dir,
signature_fn=signature_fn,
input_fn=input_fn or default_input_fn,
default_batch_size=default_batch_size,
exports_to_keep=exports_to_keep)
| apache-2.0 | -5,362,882,620,450,832,000 | -1,987,637,339,683,949,000 | 40.915179 | 95 | 0.673767 | false |
fmacias64/spyre | setup.py | 3 | 1217 | from setuptools import setup, find_packages
setup(
name='DataSpyre',
version='0.2.0',
description='Spyre makes it easy to build interactive web applications, and requires no knowledge of HTML, CSS, or Javascript.',
url='https://github.com/adamhajari/spyre',
author='Adam Hajari',
author_email='adam@nextbigsound.com',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: CherryPy',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'Environment :: Web Environment',
'Topic :: Scientific/Engineering',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
keywords='web application template data visualization',
include_package_data = True, # include everything in source control
packages = ['spyre'], # include all packages under src
package_data = {
'': ['*.js','*.css','*.html'],
'public': ['js/*.js','css/*.css'],
},
install_requires=[
"numpy",
"pandas",
"cherrypy",
"jinja2",
"matplotlib",
]
)
| mit | -7,877,962,106,221,243,000 | -1,114,610,179,148,254,500 | 32.805556 | 132 | 0.59244 | false |
mikemow/youtube-dl | youtube_dl/extractor/ellentv.py | 107 | 2708 | # coding: utf-8
from __future__ import unicode_literals
import json
from .common import InfoExtractor
from ..utils import (
ExtractorError,
)
class EllenTVIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?(?:ellentv|ellentube)\.com/videos/(?P<id>[a-z0-9_-]+)'
_TEST = {
'url': 'http://www.ellentv.com/videos/0-ipq1gsai/',
'md5': '8e3c576bf2e9bfff4d76565f56f94c9c',
'info_dict': {
'id': '0_ipq1gsai',
'ext': 'mp4',
'title': 'Fast Fingers of Fate',
'description': 'md5:587e79fbbd0d73b148bc596d99ce48e6',
'timestamp': 1428035648,
'upload_date': '20150403',
'uploader_id': 'batchUser',
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(
'http://widgets.ellentube.com/videos/%s' % video_id,
video_id)
partner_id = self._search_regex(
r"var\s+partnerId\s*=\s*'([^']+)", webpage, 'partner id')
kaltura_id = self._search_regex(
[r'id="kaltura_player_([^"]+)"',
r"_wb_entry_id\s*:\s*'([^']+)",
r'data-kaltura-entry-id="([^"]+)'],
webpage, 'kaltura id')
return self.url_result('kaltura:%s:%s' % (partner_id, kaltura_id), 'Kaltura')
class EllenTVClipsIE(InfoExtractor):
IE_NAME = 'EllenTV:clips'
_VALID_URL = r'https?://(?:www\.)?ellentv\.com/episodes/(?P<id>[a-z0-9_-]+)'
_TEST = {
'url': 'http://www.ellentv.com/episodes/meryl-streep-vanessa-hudgens/',
'info_dict': {
'id': 'meryl-streep-vanessa-hudgens',
'title': 'Meryl Streep, Vanessa Hudgens',
},
'playlist_mincount': 7,
}
def _real_extract(self, url):
playlist_id = self._match_id(url)
webpage = self._download_webpage(url, playlist_id)
playlist = self._extract_playlist(webpage)
return {
'_type': 'playlist',
'id': playlist_id,
'title': self._og_search_title(webpage),
'entries': self._extract_entries(playlist)
}
def _extract_playlist(self, webpage):
json_string = self._search_regex(r'playerView.addClips\(\[\{(.*?)\}\]\);', webpage, 'json')
try:
return json.loads("[{" + json_string + "}]")
except ValueError as ve:
raise ExtractorError('Failed to download JSON', cause=ve)
def _extract_entries(self, playlist):
return [
self.url_result(
'kaltura:%s:%s' % (item['kaltura_partner_id'], item['kaltura_entry_id']),
'Kaltura')
for item in playlist]
| unlicense | -6,047,428,086,753,971,000 | 1,626,319,364,783,986,200 | 31.238095 | 99 | 0.536928 | false |
xorstream/unicorn | qemu/header_gen.py | 5 | 105836 | #!/usr/bin/python
# Unicorn Emulator Engine
# By Dang Hoang Vu & Nguyen Anh Quynh
# syntax: ./header_gen.py <arm|aarch64|x86|name>
import sys
symbols = (
'arm_release',
'aarch64_tb_set_jmp_target',
'ppc_tb_set_jmp_target',
'use_idiv_instructions_rt',
'tcg_target_deposit_valid',
'helper_power_down',
'check_exit_request',
'address_space_unregister',
'tb_invalidate_phys_page_fast',
'phys_mem_clean',
'tb_cleanup',
'memory_map',
'memory_map_ptr',
'memory_unmap',
'memory_free',
'free_code_gen_buffer',
'helper_raise_exception',
'tcg_enabled',
'tcg_exec_init',
'memory_register_types',
'cpu_exec_init_all',
'vm_start',
'resume_all_vcpus',
'a15_l2ctlr_read',
'a64_translate_init',
'aa32_generate_debug_exceptions',
'aa64_cacheop_access',
'aa64_daif_access',
'aa64_daif_write',
'aa64_dczid_read',
'aa64_fpcr_read',
'aa64_fpcr_write',
'aa64_fpsr_read',
'aa64_fpsr_write',
'aa64_generate_debug_exceptions',
'aa64_zva_access',
'aarch64_banked_spsr_index',
'aarch64_restore_sp',
'aarch64_save_sp',
'accel_find',
'accel_init_machine',
'accel_type',
'access_with_adjusted_size',
'add128',
'add16_sat',
'add16_usat',
'add192',
'add8_sat',
'add8_usat',
'add_cpreg_to_hashtable',
'add_cpreg_to_list',
'addFloat128Sigs',
'addFloat32Sigs',
'addFloat64Sigs',
'addFloatx80Sigs',
'add_qemu_ldst_label',
'address_space_access_valid',
'address_space_destroy',
'address_space_destroy_dispatch',
'address_space_get_flatview',
'address_space_init',
'address_space_init_dispatch',
'address_space_lookup_region',
'address_space_map',
'address_space_read',
'address_space_rw',
'address_space_translate',
'address_space_translate_for_iotlb',
'address_space_translate_internal',
'address_space_unmap',
'address_space_update_topology',
'address_space_update_topology_pass',
'address_space_write',
'addrrange_contains',
'addrrange_end',
'addrrange_equal',
'addrrange_intersection',
'addrrange_intersects',
'addrrange_make',
'adjust_endianness',
'all_helpers',
'alloc_code_gen_buffer',
'alloc_entry',
'always_true',
'arm1026_initfn',
'arm1136_initfn',
'arm1136_r2_initfn',
'arm1176_initfn',
'arm11mpcore_initfn',
'arm926_initfn',
'arm946_initfn',
'arm_ccnt_enabled',
'arm_cp_read_zero',
'arm_cp_reset_ignore',
'arm_cpu_do_interrupt',
'arm_cpu_exec_interrupt',
'arm_cpu_finalizefn',
'arm_cpu_get_phys_page_debug',
'arm_cpu_handle_mmu_fault',
'arm_cpu_initfn',
'arm_cpu_list',
'cpu_loop_exit',
'arm_cpu_post_init',
'arm_cpu_realizefn',
'arm_cpu_register_gdb_regs_for_features',
'arm_cpu_register_types',
'cpu_resume_from_signal',
'arm_cpus',
'arm_cpu_set_pc',
'arm_cp_write_ignore',
'arm_current_el',
'arm_dc_feature',
'arm_debug_excp_handler',
'arm_debug_target_el',
'arm_el_is_aa64',
'arm_env_get_cpu',
'arm_excp_target_el',
'arm_excp_unmasked',
'arm_feature',
'arm_generate_debug_exceptions',
'gen_intermediate_code',
'gen_intermediate_code_pc',
'arm_gen_test_cc',
'arm_gt_ptimer_cb',
'arm_gt_vtimer_cb',
'arm_handle_psci_call',
'arm_is_psci_call',
'arm_is_secure',
'arm_is_secure_below_el3',
'arm_ldl_code',
'arm_lduw_code',
'arm_log_exception',
'arm_reg_read',
'arm_reg_reset',
'arm_reg_write',
'restore_state_to_opc',
'arm_rmode_to_sf',
'arm_singlestep_active',
'tlb_fill',
'tlb_flush',
'tlb_flush_page',
'tlb_set_page',
'arm_translate_init',
'arm_v7m_class_init',
'arm_v7m_cpu_do_interrupt',
'ats_access',
'ats_write',
'bad_mode_switch',
'bank_number',
'bitmap_zero_extend',
'bp_wp_matches',
'breakpoint_invalidate',
'build_page_bitmap',
'bus_add_child',
'bus_class_init',
'bus_info',
'bus_unparent',
'cache_block_ops_cp_reginfo',
'cache_dirty_status_cp_reginfo',
'cache_test_clean_cp_reginfo',
'call_recip_estimate',
'can_merge',
'capacity_increase',
'ccsidr_read',
'check_ap',
'check_breakpoints',
'check_watchpoints',
'cho',
'clear_bit',
'clz32',
'clz64',
'cmp_flatrange_addr',
'code_gen_alloc',
'commonNaNToFloat128',
'commonNaNToFloat16',
'commonNaNToFloat32',
'commonNaNToFloat64',
'commonNaNToFloatx80',
'compute_abs_deadline',
'cond_name',
'configure_accelerator',
'container_get',
'container_info',
'container_register_types',
'contextidr_write',
'core_log_global_start',
'core_log_global_stop',
'core_memory_listener',
'cortexa15_cp_reginfo',
'cortex_a15_initfn',
'cortexa8_cp_reginfo',
'cortex_a8_initfn',
'cortexa9_cp_reginfo',
'cortex_a9_initfn',
'cortex_m3_initfn',
'count_cpreg',
'countLeadingZeros32',
'countLeadingZeros64',
'cp_access_ok',
'cpacr_write',
'cpreg_field_is_64bit',
'cp_reginfo',
'cpreg_key_compare',
'cpreg_make_keylist',
'cp_reg_reset',
'cpreg_to_kvm_id',
'cpsr_read',
'cpsr_write',
'cptype_valid',
'cpu_abort',
'cpu_arm_exec',
'cpu_arm_gen_code',
'cpu_arm_init',
'cpu_breakpoint_insert',
'cpu_breakpoint_remove',
'cpu_breakpoint_remove_all',
'cpu_breakpoint_remove_by_ref',
'cpu_can_do_io',
'cpu_can_run',
'cpu_class_init',
'cpu_common_class_by_name',
'cpu_common_exec_interrupt',
'cpu_common_get_arch_id',
'cpu_common_get_memory_mapping',
'cpu_common_get_paging_enabled',
'cpu_common_has_work',
'cpu_common_initfn',
'cpu_common_noop',
'cpu_common_parse_features',
'cpu_common_realizefn',
'cpu_common_reset',
'cpu_dump_statistics',
'cpu_exec_init',
'cpu_flush_icache_range',
'cpu_gen_init',
'cpu_get_clock',
'cpu_get_real_ticks',
'cpu_get_tb_cpu_state',
'cpu_handle_debug_exception',
'cpu_handle_guest_debug',
'cpu_inb',
'cpu_inl',
'cpu_interrupt',
'cpu_interrupt_handler',
'cpu_inw',
'cpu_io_recompile',
'cpu_is_stopped',
'cpu_ldl_code',
'cpu_ldub_code',
'cpu_lduw_code',
'cpu_memory_rw_debug',
'cpu_mmu_index',
'cpu_outb',
'cpu_outl',
'cpu_outw',
'cpu_physical_memory_clear_dirty_range',
'cpu_physical_memory_get_clean',
'cpu_physical_memory_get_dirty',
'cpu_physical_memory_get_dirty_flag',
'cpu_physical_memory_is_clean',
'cpu_physical_memory_is_io',
'cpu_physical_memory_map',
'cpu_physical_memory_range_includes_clean',
'cpu_physical_memory_reset_dirty',
'cpu_physical_memory_rw',
'cpu_physical_memory_set_dirty_flag',
'cpu_physical_memory_set_dirty_range',
'cpu_physical_memory_unmap',
'cpu_physical_memory_write_rom',
'cpu_physical_memory_write_rom_internal',
'cpu_register',
'cpu_register_types',
'cpu_restore_state',
'cpu_restore_state_from_tb',
'cpu_single_step',
'cpu_tb_exec',
'cpu_tlb_reset_dirty_all',
'cpu_to_be64',
'cpu_to_le32',
'cpu_to_le64',
'cpu_type_info',
'cpu_unassigned_access',
'cpu_watchpoint_address_matches',
'cpu_watchpoint_insert',
'cpu_watchpoint_remove',
'cpu_watchpoint_remove_all',
'cpu_watchpoint_remove_by_ref',
'crc32c_table',
'create_new_memory_mapping',
'csselr_write',
'cto32',
'ctr_el0_access',
'ctz32',
'ctz64',
'dacr_write',
'dbgbcr_write',
'dbgbvr_write',
'dbgwcr_write',
'dbgwvr_write',
'debug_cp_reginfo',
'debug_frame',
'debug_lpae_cp_reginfo',
'define_arm_cp_regs',
'define_arm_cp_regs_with_opaque',
'define_debug_regs',
'define_one_arm_cp_reg',
'define_one_arm_cp_reg_with_opaque',
'deposit32',
'deposit64',
'deregister_tm_clones',
'device_class_base_init',
'device_class_init',
'device_finalize',
'device_get_realized',
'device_initfn',
'device_post_init',
'device_reset',
'device_set_realized',
'device_type_info',
'disas_arm_insn',
'disas_coproc_insn',
'disas_dsp_insn',
'disas_iwmmxt_insn',
'disas_neon_data_insn',
'disas_neon_ls_insn',
'disas_thumb2_insn',
'disas_thumb_insn',
'disas_vfp_insn',
'disas_vfp_v8_insn',
'do_arm_semihosting',
'do_clz16',
'do_clz8',
'do_constant_folding',
'do_constant_folding_2',
'do_constant_folding_cond',
'do_constant_folding_cond2',
'do_constant_folding_cond_32',
'do_constant_folding_cond_64',
'do_constant_folding_cond_eq',
'do_fcvt_f16_to_f32',
'do_fcvt_f32_to_f16',
'do_ssat',
'do_usad',
'do_usat',
'do_v7m_exception_exit',
'dummy_c15_cp_reginfo',
'dummy_func',
'dummy_section',
'_DYNAMIC',
'_edata',
'_end',
'end_list',
'eq128',
'ErrorClass_lookup',
'error_copy',
'error_exit',
'error_get_class',
'error_get_pretty',
'error_setg_file_open',
'estimateDiv128To64',
'estimateSqrt32',
'excnames',
'excp_is_internal',
'extended_addresses_enabled',
'extended_mpu_ap_bits',
'extract32',
'extract64',
'extractFloat128Exp',
'extractFloat128Frac0',
'extractFloat128Frac1',
'extractFloat128Sign',
'extractFloat16Exp',
'extractFloat16Frac',
'extractFloat16Sign',
'extractFloat32Exp',
'extractFloat32Frac',
'extractFloat32Sign',
'extractFloat64Exp',
'extractFloat64Frac',
'extractFloat64Sign',
'extractFloatx80Exp',
'extractFloatx80Frac',
'extractFloatx80Sign',
'fcse_write',
'find_better_copy',
'find_default_machine',
'find_desc_by_name',
'find_first_bit',
'find_paging_enabled_cpu',
'find_ram_block',
'find_ram_offset',
'find_string',
'find_type',
'_fini',
'flatrange_equal',
'flatview_destroy',
'flatview_init',
'flatview_insert',
'flatview_lookup',
'flatview_ref',
'flatview_simplify',
'flatview_unref',
'float128_add',
'float128_compare',
'float128_compare_internal',
'float128_compare_quiet',
'float128_default_nan',
'float128_div',
'float128_eq',
'float128_eq_quiet',
'float128_is_quiet_nan',
'float128_is_signaling_nan',
'float128_le',
'float128_le_quiet',
'float128_lt',
'float128_lt_quiet',
'float128_maybe_silence_nan',
'float128_mul',
'float128_rem',
'float128_round_to_int',
'float128_scalbn',
'float128_sqrt',
'float128_sub',
'float128ToCommonNaN',
'float128_to_float32',
'float128_to_float64',
'float128_to_floatx80',
'float128_to_int32',
'float128_to_int32_round_to_zero',
'float128_to_int64',
'float128_to_int64_round_to_zero',
'float128_unordered',
'float128_unordered_quiet',
'float16_default_nan',
'float16_is_quiet_nan',
'float16_is_signaling_nan',
'float16_maybe_silence_nan',
'float16ToCommonNaN',
'float16_to_float32',
'float16_to_float64',
'float32_abs',
'float32_add',
'float32_chs',
'float32_compare',
'float32_compare_internal',
'float32_compare_quiet',
'float32_default_nan',
'float32_div',
'float32_eq',
'float32_eq_quiet',
'float32_exp2',
'float32_exp2_coefficients',
'float32_is_any_nan',
'float32_is_infinity',
'float32_is_neg',
'float32_is_quiet_nan',
'float32_is_signaling_nan',
'float32_is_zero',
'float32_is_zero_or_denormal',
'float32_le',
'float32_le_quiet',
'float32_log2',
'float32_lt',
'float32_lt_quiet',
'float32_max',
'float32_maxnum',
'float32_maxnummag',
'float32_maybe_silence_nan',
'float32_min',
'float32_minmax',
'float32_minnum',
'float32_minnummag',
'float32_mul',
'float32_muladd',
'float32_rem',
'float32_round_to_int',
'float32_scalbn',
'float32_set_sign',
'float32_sqrt',
'float32_squash_input_denormal',
'float32_sub',
'float32ToCommonNaN',
'float32_to_float128',
'float32_to_float16',
'float32_to_float64',
'float32_to_floatx80',
'float32_to_int16',
'float32_to_int16_round_to_zero',
'float32_to_int32',
'float32_to_int32_round_to_zero',
'float32_to_int64',
'float32_to_int64_round_to_zero',
'float32_to_uint16',
'float32_to_uint16_round_to_zero',
'float32_to_uint32',
'float32_to_uint32_round_to_zero',
'float32_to_uint64',
'float32_to_uint64_round_to_zero',
'float32_unordered',
'float32_unordered_quiet',
'float64_abs',
'float64_add',
'float64_chs',
'float64_compare',
'float64_compare_internal',
'float64_compare_quiet',
'float64_default_nan',
'float64_div',
'float64_eq',
'float64_eq_quiet',
'float64_is_any_nan',
'float64_is_infinity',
'float64_is_neg',
'float64_is_quiet_nan',
'float64_is_signaling_nan',
'float64_is_zero',
'float64_le',
'float64_le_quiet',
'float64_log2',
'float64_lt',
'float64_lt_quiet',
'float64_max',
'float64_maxnum',
'float64_maxnummag',
'float64_maybe_silence_nan',
'float64_min',
'float64_minmax',
'float64_minnum',
'float64_minnummag',
'float64_mul',
'float64_muladd',
'float64_rem',
'float64_round_to_int',
'float64_scalbn',
'float64_set_sign',
'float64_sqrt',
'float64_squash_input_denormal',
'float64_sub',
'float64ToCommonNaN',
'float64_to_float128',
'float64_to_float16',
'float64_to_float32',
'float64_to_floatx80',
'float64_to_int16',
'float64_to_int16_round_to_zero',
'float64_to_int32',
'float64_to_int32_round_to_zero',
'float64_to_int64',
'float64_to_int64_round_to_zero',
'float64_to_uint16',
'float64_to_uint16_round_to_zero',
'float64_to_uint32',
'float64_to_uint32_round_to_zero',
'float64_to_uint64',
'float64_to_uint64_round_to_zero',
'float64_trunc_to_int',
'float64_unordered',
'float64_unordered_quiet',
'float_raise',
'floatx80_add',
'floatx80_compare',
'floatx80_compare_internal',
'floatx80_compare_quiet',
'floatx80_default_nan',
'floatx80_div',
'floatx80_eq',
'floatx80_eq_quiet',
'floatx80_is_quiet_nan',
'floatx80_is_signaling_nan',
'floatx80_le',
'floatx80_le_quiet',
'floatx80_lt',
'floatx80_lt_quiet',
'floatx80_maybe_silence_nan',
'floatx80_mul',
'floatx80_rem',
'floatx80_round_to_int',
'floatx80_scalbn',
'floatx80_sqrt',
'floatx80_sub',
'floatx80ToCommonNaN',
'floatx80_to_float128',
'floatx80_to_float32',
'floatx80_to_float64',
'floatx80_to_int32',
'floatx80_to_int32_round_to_zero',
'floatx80_to_int64',
'floatx80_to_int64_round_to_zero',
'floatx80_unordered',
'floatx80_unordered_quiet',
'flush_icache_range',
'format_string',
'fp_decode_rm',
'frame_dummy',
'free_range',
'fstat64',
'futex_wait',
'futex_wake',
'gen_aa32_ld16s',
'gen_aa32_ld16u',
'gen_aa32_ld32u',
'gen_aa32_ld64',
'gen_aa32_ld8s',
'gen_aa32_ld8u',
'gen_aa32_st16',
'gen_aa32_st32',
'gen_aa32_st64',
'gen_aa32_st8',
'gen_adc',
'gen_adc_CC',
'gen_add16',
'gen_add_carry',
'gen_add_CC',
'gen_add_datah_offset',
'gen_add_data_offset',
'gen_addq',
'gen_addq_lo',
'gen_addq_msw',
'gen_arm_parallel_addsub',
'gen_arm_shift_im',
'gen_arm_shift_reg',
'gen_bx',
'gen_bx_im',
'gen_clrex',
'generate_memory_topology',
'generic_timer_cp_reginfo',
'gen_exception',
'gen_exception_insn',
'gen_exception_internal',
'gen_exception_internal_insn',
'gen_exception_return',
'gen_goto_tb',
'gen_helper_access_check_cp_reg',
'gen_helper_add_saturate',
'gen_helper_add_setq',
'gen_helper_clear_pstate_ss',
'gen_helper_clz32',
'gen_helper_clz64',
'gen_helper_clz_arm',
'gen_helper_cpsr_read',
'gen_helper_cpsr_write',
'gen_helper_crc32_arm',
'gen_helper_crc32c',
'gen_helper_crypto_aese',
'gen_helper_crypto_aesmc',
'gen_helper_crypto_sha1_3reg',
'gen_helper_crypto_sha1h',
'gen_helper_crypto_sha1su1',
'gen_helper_crypto_sha256h',
'gen_helper_crypto_sha256h2',
'gen_helper_crypto_sha256su0',
'gen_helper_crypto_sha256su1',
'gen_helper_double_saturate',
'gen_helper_exception_internal',
'gen_helper_exception_with_syndrome',
'gen_helper_get_cp_reg',
'gen_helper_get_cp_reg64',
'gen_helper_get_r13_banked',
'gen_helper_get_user_reg',
'gen_helper_iwmmxt_addcb',
'gen_helper_iwmmxt_addcl',
'gen_helper_iwmmxt_addcw',
'gen_helper_iwmmxt_addnb',
'gen_helper_iwmmxt_addnl',
'gen_helper_iwmmxt_addnw',
'gen_helper_iwmmxt_addsb',
'gen_helper_iwmmxt_addsl',
'gen_helper_iwmmxt_addsw',
'gen_helper_iwmmxt_addub',
'gen_helper_iwmmxt_addul',
'gen_helper_iwmmxt_adduw',
'gen_helper_iwmmxt_align',
'gen_helper_iwmmxt_avgb0',
'gen_helper_iwmmxt_avgb1',
'gen_helper_iwmmxt_avgw0',
'gen_helper_iwmmxt_avgw1',
'gen_helper_iwmmxt_bcstb',
'gen_helper_iwmmxt_bcstl',
'gen_helper_iwmmxt_bcstw',
'gen_helper_iwmmxt_cmpeqb',
'gen_helper_iwmmxt_cmpeql',
'gen_helper_iwmmxt_cmpeqw',
'gen_helper_iwmmxt_cmpgtsb',
'gen_helper_iwmmxt_cmpgtsl',
'gen_helper_iwmmxt_cmpgtsw',
'gen_helper_iwmmxt_cmpgtub',
'gen_helper_iwmmxt_cmpgtul',
'gen_helper_iwmmxt_cmpgtuw',
'gen_helper_iwmmxt_insr',
'gen_helper_iwmmxt_macsw',
'gen_helper_iwmmxt_macuw',
'gen_helper_iwmmxt_maddsq',
'gen_helper_iwmmxt_madduq',
'gen_helper_iwmmxt_maxsb',
'gen_helper_iwmmxt_maxsl',
'gen_helper_iwmmxt_maxsw',
'gen_helper_iwmmxt_maxub',
'gen_helper_iwmmxt_maxul',
'gen_helper_iwmmxt_maxuw',
'gen_helper_iwmmxt_minsb',
'gen_helper_iwmmxt_minsl',
'gen_helper_iwmmxt_minsw',
'gen_helper_iwmmxt_minub',
'gen_helper_iwmmxt_minul',
'gen_helper_iwmmxt_minuw',
'gen_helper_iwmmxt_msbb',
'gen_helper_iwmmxt_msbl',
'gen_helper_iwmmxt_msbw',
'gen_helper_iwmmxt_muladdsl',
'gen_helper_iwmmxt_muladdsw',
'gen_helper_iwmmxt_muladdswl',
'gen_helper_iwmmxt_mulshw',
'gen_helper_iwmmxt_mulslw',
'gen_helper_iwmmxt_muluhw',
'gen_helper_iwmmxt_mululw',
'gen_helper_iwmmxt_packsl',
'gen_helper_iwmmxt_packsq',
'gen_helper_iwmmxt_packsw',
'gen_helper_iwmmxt_packul',
'gen_helper_iwmmxt_packuq',
'gen_helper_iwmmxt_packuw',
'gen_helper_iwmmxt_rorl',
'gen_helper_iwmmxt_rorq',
'gen_helper_iwmmxt_rorw',
'gen_helper_iwmmxt_sadb',
'gen_helper_iwmmxt_sadw',
'gen_helper_iwmmxt_setpsr_nz',
'gen_helper_iwmmxt_shufh',
'gen_helper_iwmmxt_slll',
'gen_helper_iwmmxt_sllq',
'gen_helper_iwmmxt_sllw',
'gen_helper_iwmmxt_sral',
'gen_helper_iwmmxt_sraq',
'gen_helper_iwmmxt_sraw',
'gen_helper_iwmmxt_srll',
'gen_helper_iwmmxt_srlq',
'gen_helper_iwmmxt_srlw',
'gen_helper_iwmmxt_subnb',
'gen_helper_iwmmxt_subnl',
'gen_helper_iwmmxt_subnw',
'gen_helper_iwmmxt_subsb',
'gen_helper_iwmmxt_subsl',
'gen_helper_iwmmxt_subsw',
'gen_helper_iwmmxt_subub',
'gen_helper_iwmmxt_subul',
'gen_helper_iwmmxt_subuw',
'gen_helper_iwmmxt_unpackhb',
'gen_helper_iwmmxt_unpackhl',
'gen_helper_iwmmxt_unpackhsb',
'gen_helper_iwmmxt_unpackhsl',
'gen_helper_iwmmxt_unpackhsw',
'gen_helper_iwmmxt_unpackhub',
'gen_helper_iwmmxt_unpackhul',
'gen_helper_iwmmxt_unpackhuw',
'gen_helper_iwmmxt_unpackhw',
'gen_helper_iwmmxt_unpacklb',
'gen_helper_iwmmxt_unpackll',
'gen_helper_iwmmxt_unpacklsb',
'gen_helper_iwmmxt_unpacklsl',
'gen_helper_iwmmxt_unpacklsw',
'gen_helper_iwmmxt_unpacklub',
'gen_helper_iwmmxt_unpacklul',
'gen_helper_iwmmxt_unpackluw',
'gen_helper_iwmmxt_unpacklw',
'gen_helper_neon_abd_f32',
'gen_helper_neon_abdl_s16',
'gen_helper_neon_abdl_s32',
'gen_helper_neon_abdl_s64',
'gen_helper_neon_abdl_u16',
'gen_helper_neon_abdl_u32',
'gen_helper_neon_abdl_u64',
'gen_helper_neon_abd_s16',
'gen_helper_neon_abd_s32',
'gen_helper_neon_abd_s8',
'gen_helper_neon_abd_u16',
'gen_helper_neon_abd_u32',
'gen_helper_neon_abd_u8',
'gen_helper_neon_abs_s16',
'gen_helper_neon_abs_s8',
'gen_helper_neon_acge_f32',
'gen_helper_neon_acgt_f32',
'gen_helper_neon_addl_saturate_s32',
'gen_helper_neon_addl_saturate_s64',
'gen_helper_neon_addl_u16',
'gen_helper_neon_addl_u32',
'gen_helper_neon_add_u16',
'gen_helper_neon_add_u8',
'gen_helper_neon_ceq_f32',
'gen_helper_neon_ceq_u16',
'gen_helper_neon_ceq_u32',
'gen_helper_neon_ceq_u8',
'gen_helper_neon_cge_f32',
'gen_helper_neon_cge_s16',
'gen_helper_neon_cge_s32',
'gen_helper_neon_cge_s8',
'gen_helper_neon_cge_u16',
'gen_helper_neon_cge_u32',
'gen_helper_neon_cge_u8',
'gen_helper_neon_cgt_f32',
'gen_helper_neon_cgt_s16',
'gen_helper_neon_cgt_s32',
'gen_helper_neon_cgt_s8',
'gen_helper_neon_cgt_u16',
'gen_helper_neon_cgt_u32',
'gen_helper_neon_cgt_u8',
'gen_helper_neon_cls_s16',
'gen_helper_neon_cls_s32',
'gen_helper_neon_cls_s8',
'gen_helper_neon_clz_u16',
'gen_helper_neon_clz_u8',
'gen_helper_neon_cnt_u8',
'gen_helper_neon_fcvt_f16_to_f32',
'gen_helper_neon_fcvt_f32_to_f16',
'gen_helper_neon_hadd_s16',
'gen_helper_neon_hadd_s32',
'gen_helper_neon_hadd_s8',
'gen_helper_neon_hadd_u16',
'gen_helper_neon_hadd_u32',
'gen_helper_neon_hadd_u8',
'gen_helper_neon_hsub_s16',
'gen_helper_neon_hsub_s32',
'gen_helper_neon_hsub_s8',
'gen_helper_neon_hsub_u16',
'gen_helper_neon_hsub_u32',
'gen_helper_neon_hsub_u8',
'gen_helper_neon_max_s16',
'gen_helper_neon_max_s32',
'gen_helper_neon_max_s8',
'gen_helper_neon_max_u16',
'gen_helper_neon_max_u32',
'gen_helper_neon_max_u8',
'gen_helper_neon_min_s16',
'gen_helper_neon_min_s32',
'gen_helper_neon_min_s8',
'gen_helper_neon_min_u16',
'gen_helper_neon_min_u32',
'gen_helper_neon_min_u8',
'gen_helper_neon_mull_p8',
'gen_helper_neon_mull_s16',
'gen_helper_neon_mull_s8',
'gen_helper_neon_mull_u16',
'gen_helper_neon_mull_u8',
'gen_helper_neon_mul_p8',
'gen_helper_neon_mul_u16',
'gen_helper_neon_mul_u8',
'gen_helper_neon_narrow_high_u16',
'gen_helper_neon_narrow_high_u8',
'gen_helper_neon_narrow_round_high_u16',
'gen_helper_neon_narrow_round_high_u8',
'gen_helper_neon_narrow_sat_s16',
'gen_helper_neon_narrow_sat_s32',
'gen_helper_neon_narrow_sat_s8',
'gen_helper_neon_narrow_sat_u16',
'gen_helper_neon_narrow_sat_u32',
'gen_helper_neon_narrow_sat_u8',
'gen_helper_neon_narrow_u16',
'gen_helper_neon_narrow_u8',
'gen_helper_neon_negl_u16',
'gen_helper_neon_negl_u32',
'gen_helper_neon_paddl_u16',
'gen_helper_neon_paddl_u32',
'gen_helper_neon_padd_u16',
'gen_helper_neon_padd_u8',
'gen_helper_neon_pmax_s16',
'gen_helper_neon_pmax_s8',
'gen_helper_neon_pmax_u16',
'gen_helper_neon_pmax_u8',
'gen_helper_neon_pmin_s16',
'gen_helper_neon_pmin_s8',
'gen_helper_neon_pmin_u16',
'gen_helper_neon_pmin_u8',
'gen_helper_neon_pmull_64_hi',
'gen_helper_neon_pmull_64_lo',
'gen_helper_neon_qabs_s16',
'gen_helper_neon_qabs_s32',
'gen_helper_neon_qabs_s8',
'gen_helper_neon_qadd_s16',
'gen_helper_neon_qadd_s32',
'gen_helper_neon_qadd_s64',
'gen_helper_neon_qadd_s8',
'gen_helper_neon_qadd_u16',
'gen_helper_neon_qadd_u32',
'gen_helper_neon_qadd_u64',
'gen_helper_neon_qadd_u8',
'gen_helper_neon_qdmulh_s16',
'gen_helper_neon_qdmulh_s32',
'gen_helper_neon_qneg_s16',
'gen_helper_neon_qneg_s32',
'gen_helper_neon_qneg_s8',
'gen_helper_neon_qrdmulh_s16',
'gen_helper_neon_qrdmulh_s32',
'gen_helper_neon_qrshl_s16',
'gen_helper_neon_qrshl_s32',
'gen_helper_neon_qrshl_s64',
'gen_helper_neon_qrshl_s8',
'gen_helper_neon_qrshl_u16',
'gen_helper_neon_qrshl_u32',
'gen_helper_neon_qrshl_u64',
'gen_helper_neon_qrshl_u8',
'gen_helper_neon_qshl_s16',
'gen_helper_neon_qshl_s32',
'gen_helper_neon_qshl_s64',
'gen_helper_neon_qshl_s8',
'gen_helper_neon_qshl_u16',
'gen_helper_neon_qshl_u32',
'gen_helper_neon_qshl_u64',
'gen_helper_neon_qshl_u8',
'gen_helper_neon_qshlu_s16',
'gen_helper_neon_qshlu_s32',
'gen_helper_neon_qshlu_s64',
'gen_helper_neon_qshlu_s8',
'gen_helper_neon_qsub_s16',
'gen_helper_neon_qsub_s32',
'gen_helper_neon_qsub_s64',
'gen_helper_neon_qsub_s8',
'gen_helper_neon_qsub_u16',
'gen_helper_neon_qsub_u32',
'gen_helper_neon_qsub_u64',
'gen_helper_neon_qsub_u8',
'gen_helper_neon_qunzip16',
'gen_helper_neon_qunzip32',
'gen_helper_neon_qunzip8',
'gen_helper_neon_qzip16',
'gen_helper_neon_qzip32',
'gen_helper_neon_qzip8',
'gen_helper_neon_rhadd_s16',
'gen_helper_neon_rhadd_s32',
'gen_helper_neon_rhadd_s8',
'gen_helper_neon_rhadd_u16',
'gen_helper_neon_rhadd_u32',
'gen_helper_neon_rhadd_u8',
'gen_helper_neon_rshl_s16',
'gen_helper_neon_rshl_s32',
'gen_helper_neon_rshl_s64',
'gen_helper_neon_rshl_s8',
'gen_helper_neon_rshl_u16',
'gen_helper_neon_rshl_u32',
'gen_helper_neon_rshl_u64',
'gen_helper_neon_rshl_u8',
'gen_helper_neon_shl_s16',
'gen_helper_neon_shl_s32',
'gen_helper_neon_shl_s64',
'gen_helper_neon_shl_s8',
'gen_helper_neon_shl_u16',
'gen_helper_neon_shl_u32',
'gen_helper_neon_shl_u64',
'gen_helper_neon_shl_u8',
'gen_helper_neon_subl_u16',
'gen_helper_neon_subl_u32',
'gen_helper_neon_sub_u16',
'gen_helper_neon_sub_u8',
'gen_helper_neon_tbl',
'gen_helper_neon_tst_u16',
'gen_helper_neon_tst_u32',
'gen_helper_neon_tst_u8',
'gen_helper_neon_unarrow_sat16',
'gen_helper_neon_unarrow_sat32',
'gen_helper_neon_unarrow_sat8',
'gen_helper_neon_unzip16',
'gen_helper_neon_unzip8',
'gen_helper_neon_widen_s16',
'gen_helper_neon_widen_s8',
'gen_helper_neon_widen_u16',
'gen_helper_neon_widen_u8',
'gen_helper_neon_zip16',
'gen_helper_neon_zip8',
'gen_helper_pre_hvc',
'gen_helper_pre_smc',
'gen_helper_qadd16',
'gen_helper_qadd8',
'gen_helper_qaddsubx',
'gen_helper_qsub16',
'gen_helper_qsub8',
'gen_helper_qsubaddx',
'gen_helper_rbit',
'gen_helper_recpe_f32',
'gen_helper_recpe_u32',
'gen_helper_recps_f32',
'gen_helper_rintd',
'gen_helper_rintd_exact',
'gen_helper_rints',
'gen_helper_rints_exact',
'gen_helper_ror_cc',
'gen_helper_rsqrte_f32',
'gen_helper_rsqrte_u32',
'gen_helper_rsqrts_f32',
'gen_helper_sadd16',
'gen_helper_sadd8',
'gen_helper_saddsubx',
'gen_helper_sar_cc',
'gen_helper_sdiv',
'gen_helper_sel_flags',
'gen_helper_set_cp_reg',
'gen_helper_set_cp_reg64',
'gen_helper_set_neon_rmode',
'gen_helper_set_r13_banked',
'gen_helper_set_rmode',
'gen_helper_set_user_reg',
'gen_helper_shadd16',
'gen_helper_shadd8',
'gen_helper_shaddsubx',
'gen_helper_shl_cc',
'gen_helper_shr_cc',
'gen_helper_shsub16',
'gen_helper_shsub8',
'gen_helper_shsubaddx',
'gen_helper_ssat',
'gen_helper_ssat16',
'gen_helper_ssub16',
'gen_helper_ssub8',
'gen_helper_ssubaddx',
'gen_helper_sub_saturate',
'gen_helper_sxtb16',
'gen_helper_uadd16',
'gen_helper_uadd8',
'gen_helper_uaddsubx',
'gen_helper_udiv',
'gen_helper_uhadd16',
'gen_helper_uhadd8',
'gen_helper_uhaddsubx',
'gen_helper_uhsub16',
'gen_helper_uhsub8',
'gen_helper_uhsubaddx',
'gen_helper_uqadd16',
'gen_helper_uqadd8',
'gen_helper_uqaddsubx',
'gen_helper_uqsub16',
'gen_helper_uqsub8',
'gen_helper_uqsubaddx',
'gen_helper_usad8',
'gen_helper_usat',
'gen_helper_usat16',
'gen_helper_usub16',
'gen_helper_usub8',
'gen_helper_usubaddx',
'gen_helper_uxtb16',
'gen_helper_v7m_mrs',
'gen_helper_v7m_msr',
'gen_helper_vfp_absd',
'gen_helper_vfp_abss',
'gen_helper_vfp_addd',
'gen_helper_vfp_adds',
'gen_helper_vfp_cmpd',
'gen_helper_vfp_cmped',
'gen_helper_vfp_cmpes',
'gen_helper_vfp_cmps',
'gen_helper_vfp_divd',
'gen_helper_vfp_divs',
'gen_helper_vfp_fcvtds',
'gen_helper_vfp_fcvt_f16_to_f32',
'gen_helper_vfp_fcvt_f16_to_f64',
'gen_helper_vfp_fcvt_f32_to_f16',
'gen_helper_vfp_fcvt_f64_to_f16',
'gen_helper_vfp_fcvtsd',
'gen_helper_vfp_get_fpscr',
'gen_helper_vfp_maxnumd',
'gen_helper_vfp_maxnums',
'gen_helper_vfp_maxs',
'gen_helper_vfp_minnumd',
'gen_helper_vfp_minnums',
'gen_helper_vfp_mins',
'gen_helper_vfp_muladdd',
'gen_helper_vfp_muladds',
'gen_helper_vfp_muld',
'gen_helper_vfp_muls',
'gen_helper_vfp_negd',
'gen_helper_vfp_negs',
'gen_helper_vfp_set_fpscr',
'gen_helper_vfp_shtod',
'gen_helper_vfp_shtos',
'gen_helper_vfp_sitod',
'gen_helper_vfp_sitos',
'gen_helper_vfp_sltod',
'gen_helper_vfp_sltos',
'gen_helper_vfp_sqrtd',
'gen_helper_vfp_sqrts',
'gen_helper_vfp_subd',
'gen_helper_vfp_subs',
'gen_helper_vfp_toshd_round_to_zero',
'gen_helper_vfp_toshs_round_to_zero',
'gen_helper_vfp_tosid',
'gen_helper_vfp_tosis',
'gen_helper_vfp_tosizd',
'gen_helper_vfp_tosizs',
'gen_helper_vfp_tosld',
'gen_helper_vfp_tosld_round_to_zero',
'gen_helper_vfp_tosls',
'gen_helper_vfp_tosls_round_to_zero',
'gen_helper_vfp_touhd_round_to_zero',
'gen_helper_vfp_touhs_round_to_zero',
'gen_helper_vfp_touid',
'gen_helper_vfp_touis',
'gen_helper_vfp_touizd',
'gen_helper_vfp_touizs',
'gen_helper_vfp_tould',
'gen_helper_vfp_tould_round_to_zero',
'gen_helper_vfp_touls',
'gen_helper_vfp_touls_round_to_zero',
'gen_helper_vfp_uhtod',
'gen_helper_vfp_uhtos',
'gen_helper_vfp_uitod',
'gen_helper_vfp_uitos',
'gen_helper_vfp_ultod',
'gen_helper_vfp_ultos',
'gen_helper_wfe',
'gen_helper_wfi',
'gen_hvc',
'gen_intermediate_code_internal',
'gen_intermediate_code_internal_a64',
'gen_iwmmxt_address',
'gen_iwmmxt_shift',
'gen_jmp',
'gen_load_and_replicate',
'gen_load_exclusive',
'gen_logic_CC',
'gen_logicq_cc',
'gen_lookup_tb',
'gen_mov_F0_vreg',
'gen_mov_F1_vreg',
'gen_mov_vreg_F0',
'gen_muls_i64_i32',
'gen_mulu_i64_i32',
'gen_mulxy',
'gen_neon_add',
'gen_neon_addl',
'gen_neon_addl_saturate',
'gen_neon_bsl',
'gen_neon_dup_high16',
'gen_neon_dup_low16',
'gen_neon_dup_u8',
'gen_neon_mull',
'gen_neon_narrow',
'gen_neon_narrow_op',
'gen_neon_narrow_sats',
'gen_neon_narrow_satu',
'gen_neon_negl',
'gen_neon_rsb',
'gen_neon_shift_narrow',
'gen_neon_subl',
'gen_neon_trn_u16',
'gen_neon_trn_u8',
'gen_neon_unarrow_sats',
'gen_neon_unzip',
'gen_neon_widen',
'gen_neon_zip',
'gen_new_label',
'gen_nop_hint',
'gen_op_iwmmxt_addl_M0_wRn',
'gen_op_iwmmxt_addnb_M0_wRn',
'gen_op_iwmmxt_addnl_M0_wRn',
'gen_op_iwmmxt_addnw_M0_wRn',
'gen_op_iwmmxt_addsb_M0_wRn',
'gen_op_iwmmxt_addsl_M0_wRn',
'gen_op_iwmmxt_addsw_M0_wRn',
'gen_op_iwmmxt_addub_M0_wRn',
'gen_op_iwmmxt_addul_M0_wRn',
'gen_op_iwmmxt_adduw_M0_wRn',
'gen_op_iwmmxt_andq_M0_wRn',
'gen_op_iwmmxt_avgb0_M0_wRn',
'gen_op_iwmmxt_avgb1_M0_wRn',
'gen_op_iwmmxt_avgw0_M0_wRn',
'gen_op_iwmmxt_avgw1_M0_wRn',
'gen_op_iwmmxt_cmpeqb_M0_wRn',
'gen_op_iwmmxt_cmpeql_M0_wRn',
'gen_op_iwmmxt_cmpeqw_M0_wRn',
'gen_op_iwmmxt_cmpgtsb_M0_wRn',
'gen_op_iwmmxt_cmpgtsl_M0_wRn',
'gen_op_iwmmxt_cmpgtsw_M0_wRn',
'gen_op_iwmmxt_cmpgtub_M0_wRn',
'gen_op_iwmmxt_cmpgtul_M0_wRn',
'gen_op_iwmmxt_cmpgtuw_M0_wRn',
'gen_op_iwmmxt_macsw_M0_wRn',
'gen_op_iwmmxt_macuw_M0_wRn',
'gen_op_iwmmxt_maddsq_M0_wRn',
'gen_op_iwmmxt_madduq_M0_wRn',
'gen_op_iwmmxt_maxsb_M0_wRn',
'gen_op_iwmmxt_maxsl_M0_wRn',
'gen_op_iwmmxt_maxsw_M0_wRn',
'gen_op_iwmmxt_maxub_M0_wRn',
'gen_op_iwmmxt_maxul_M0_wRn',
'gen_op_iwmmxt_maxuw_M0_wRn',
'gen_op_iwmmxt_minsb_M0_wRn',
'gen_op_iwmmxt_minsl_M0_wRn',
'gen_op_iwmmxt_minsw_M0_wRn',
'gen_op_iwmmxt_minub_M0_wRn',
'gen_op_iwmmxt_minul_M0_wRn',
'gen_op_iwmmxt_minuw_M0_wRn',
'gen_op_iwmmxt_movq_M0_wRn',
'gen_op_iwmmxt_movq_wRn_M0',
'gen_op_iwmmxt_mulshw_M0_wRn',
'gen_op_iwmmxt_mulslw_M0_wRn',
'gen_op_iwmmxt_muluhw_M0_wRn',
'gen_op_iwmmxt_mululw_M0_wRn',
'gen_op_iwmmxt_orq_M0_wRn',
'gen_op_iwmmxt_packsl_M0_wRn',
'gen_op_iwmmxt_packsq_M0_wRn',
'gen_op_iwmmxt_packsw_M0_wRn',
'gen_op_iwmmxt_packul_M0_wRn',
'gen_op_iwmmxt_packuq_M0_wRn',
'gen_op_iwmmxt_packuw_M0_wRn',
'gen_op_iwmmxt_sadb_M0_wRn',
'gen_op_iwmmxt_sadw_M0_wRn',
'gen_op_iwmmxt_set_cup',
'gen_op_iwmmxt_set_mup',
'gen_op_iwmmxt_setpsr_nz',
'gen_op_iwmmxt_subnb_M0_wRn',
'gen_op_iwmmxt_subnl_M0_wRn',
'gen_op_iwmmxt_subnw_M0_wRn',
'gen_op_iwmmxt_subsb_M0_wRn',
'gen_op_iwmmxt_subsl_M0_wRn',
'gen_op_iwmmxt_subsw_M0_wRn',
'gen_op_iwmmxt_subub_M0_wRn',
'gen_op_iwmmxt_subul_M0_wRn',
'gen_op_iwmmxt_subuw_M0_wRn',
'gen_op_iwmmxt_unpackhb_M0_wRn',
'gen_op_iwmmxt_unpackhl_M0_wRn',
'gen_op_iwmmxt_unpackhsb_M0',
'gen_op_iwmmxt_unpackhsl_M0',
'gen_op_iwmmxt_unpackhsw_M0',
'gen_op_iwmmxt_unpackhub_M0',
'gen_op_iwmmxt_unpackhul_M0',
'gen_op_iwmmxt_unpackhuw_M0',
'gen_op_iwmmxt_unpackhw_M0_wRn',
'gen_op_iwmmxt_unpacklb_M0_wRn',
'gen_op_iwmmxt_unpackll_M0_wRn',
'gen_op_iwmmxt_unpacklsb_M0',
'gen_op_iwmmxt_unpacklsl_M0',
'gen_op_iwmmxt_unpacklsw_M0',
'gen_op_iwmmxt_unpacklub_M0',
'gen_op_iwmmxt_unpacklul_M0',
'gen_op_iwmmxt_unpackluw_M0',
'gen_op_iwmmxt_unpacklw_M0_wRn',
'gen_op_iwmmxt_xorq_M0_wRn',
'gen_rev16',
'gen_revsh',
'gen_rfe',
'gen_sar',
'gen_sbc_CC',
'gen_sbfx',
'gen_set_CF_bit31',
'gen_set_condexec',
'gen_set_cpsr',
'gen_set_label',
'gen_set_pc_im',
'gen_set_psr',
'gen_set_psr_im',
'gen_shl',
'gen_shr',
'gen_smc',
'gen_smul_dual',
'gen_srs',
'gen_ss_advance',
'gen_step_complete_exception',
'gen_store_exclusive',
'gen_storeq_reg',
'gen_sub_carry',
'gen_sub_CC',
'gen_subq_msw',
'gen_swap_half',
'gen_thumb2_data_op',
'gen_thumb2_parallel_addsub',
'gen_ubfx',
'gen_vfp_abs',
'gen_vfp_add',
'gen_vfp_cmp',
'gen_vfp_cmpe',
'gen_vfp_div',
'gen_vfp_F1_ld0',
'gen_vfp_F1_mul',
'gen_vfp_F1_neg',
'gen_vfp_ld',
'gen_vfp_mrs',
'gen_vfp_msr',
'gen_vfp_mul',
'gen_vfp_neg',
'gen_vfp_shto',
'gen_vfp_sito',
'gen_vfp_slto',
'gen_vfp_sqrt',
'gen_vfp_st',
'gen_vfp_sub',
'gen_vfp_tosh',
'gen_vfp_tosi',
'gen_vfp_tosiz',
'gen_vfp_tosl',
'gen_vfp_touh',
'gen_vfp_toui',
'gen_vfp_touiz',
'gen_vfp_toul',
'gen_vfp_uhto',
'gen_vfp_uito',
'gen_vfp_ulto',
'get_arm_cp_reginfo',
'get_clock',
'get_clock_realtime',
'get_constraint_priority',
'get_float_exception_flags',
'get_float_rounding_mode',
'get_fpstatus_ptr',
'get_level1_table_address',
'get_mem_index',
'get_next_param_value',
'get_opt_name',
'get_opt_value',
'get_page_addr_code',
'get_param_value',
'get_phys_addr',
'get_phys_addr_lpae',
'get_phys_addr_mpu',
'get_phys_addr_v5',
'get_phys_addr_v6',
'get_system_memory',
'get_ticks_per_sec',
'g_list_insert_sorted_merged',
'_GLOBAL_OFFSET_TABLE_',
'gt_cntfrq_access',
'gt_cnt_read',
'gt_cnt_reset',
'gt_counter_access',
'gt_ctl_write',
'gt_cval_write',
'gt_get_countervalue',
'gt_pct_access',
'gt_ptimer_access',
'gt_recalc_timer',
'gt_timer_access',
'gt_tval_read',
'gt_tval_write',
'gt_vct_access',
'gt_vtimer_access',
'guest_phys_blocks_free',
'guest_phys_blocks_init',
'handle_vcvt',
'handle_vminmaxnm',
'handle_vrint',
'handle_vsel',
'has_help_option',
'have_bmi1',
'have_bmi2',
'hcr_write',
'helper_access_check_cp_reg',
'helper_add_saturate',
'helper_add_setq',
'helper_add_usaturate',
'helper_be_ldl_cmmu',
'helper_be_ldq_cmmu',
'helper_be_ldq_mmu',
'helper_be_ldsl_mmu',
'helper_be_ldsw_mmu',
'helper_be_ldul_mmu',
'helper_be_lduw_mmu',
'helper_be_ldw_cmmu',
'helper_be_stl_mmu',
'helper_be_stq_mmu',
'helper_be_stw_mmu',
'helper_clear_pstate_ss',
'helper_clz_arm',
'helper_cpsr_read',
'helper_cpsr_write',
'helper_crc32_arm',
'helper_crc32c',
'helper_crypto_aese',
'helper_crypto_aesmc',
'helper_crypto_sha1_3reg',
'helper_crypto_sha1h',
'helper_crypto_sha1su1',
'helper_crypto_sha256h',
'helper_crypto_sha256h2',
'helper_crypto_sha256su0',
'helper_crypto_sha256su1',
'helper_dc_zva',
'helper_double_saturate',
'helper_exception_internal',
'helper_exception_return',
'helper_exception_with_syndrome',
'helper_get_cp_reg',
'helper_get_cp_reg64',
'helper_get_r13_banked',
'helper_get_user_reg',
'helper_iwmmxt_addcb',
'helper_iwmmxt_addcl',
'helper_iwmmxt_addcw',
'helper_iwmmxt_addnb',
'helper_iwmmxt_addnl',
'helper_iwmmxt_addnw',
'helper_iwmmxt_addsb',
'helper_iwmmxt_addsl',
'helper_iwmmxt_addsw',
'helper_iwmmxt_addub',
'helper_iwmmxt_addul',
'helper_iwmmxt_adduw',
'helper_iwmmxt_align',
'helper_iwmmxt_avgb0',
'helper_iwmmxt_avgb1',
'helper_iwmmxt_avgw0',
'helper_iwmmxt_avgw1',
'helper_iwmmxt_bcstb',
'helper_iwmmxt_bcstl',
'helper_iwmmxt_bcstw',
'helper_iwmmxt_cmpeqb',
'helper_iwmmxt_cmpeql',
'helper_iwmmxt_cmpeqw',
'helper_iwmmxt_cmpgtsb',
'helper_iwmmxt_cmpgtsl',
'helper_iwmmxt_cmpgtsw',
'helper_iwmmxt_cmpgtub',
'helper_iwmmxt_cmpgtul',
'helper_iwmmxt_cmpgtuw',
'helper_iwmmxt_insr',
'helper_iwmmxt_macsw',
'helper_iwmmxt_macuw',
'helper_iwmmxt_maddsq',
'helper_iwmmxt_madduq',
'helper_iwmmxt_maxsb',
'helper_iwmmxt_maxsl',
'helper_iwmmxt_maxsw',
'helper_iwmmxt_maxub',
'helper_iwmmxt_maxul',
'helper_iwmmxt_maxuw',
'helper_iwmmxt_minsb',
'helper_iwmmxt_minsl',
'helper_iwmmxt_minsw',
'helper_iwmmxt_minub',
'helper_iwmmxt_minul',
'helper_iwmmxt_minuw',
'helper_iwmmxt_msbb',
'helper_iwmmxt_msbl',
'helper_iwmmxt_msbw',
'helper_iwmmxt_muladdsl',
'helper_iwmmxt_muladdsw',
'helper_iwmmxt_muladdswl',
'helper_iwmmxt_mulshw',
'helper_iwmmxt_mulslw',
'helper_iwmmxt_muluhw',
'helper_iwmmxt_mululw',
'helper_iwmmxt_packsl',
'helper_iwmmxt_packsq',
'helper_iwmmxt_packsw',
'helper_iwmmxt_packul',
'helper_iwmmxt_packuq',
'helper_iwmmxt_packuw',
'helper_iwmmxt_rorl',
'helper_iwmmxt_rorq',
'helper_iwmmxt_rorw',
'helper_iwmmxt_sadb',
'helper_iwmmxt_sadw',
'helper_iwmmxt_setpsr_nz',
'helper_iwmmxt_shufh',
'helper_iwmmxt_slll',
'helper_iwmmxt_sllq',
'helper_iwmmxt_sllw',
'helper_iwmmxt_sral',
'helper_iwmmxt_sraq',
'helper_iwmmxt_sraw',
'helper_iwmmxt_srll',
'helper_iwmmxt_srlq',
'helper_iwmmxt_srlw',
'helper_iwmmxt_subnb',
'helper_iwmmxt_subnl',
'helper_iwmmxt_subnw',
'helper_iwmmxt_subsb',
'helper_iwmmxt_subsl',
'helper_iwmmxt_subsw',
'helper_iwmmxt_subub',
'helper_iwmmxt_subul',
'helper_iwmmxt_subuw',
'helper_iwmmxt_unpackhb',
'helper_iwmmxt_unpackhl',
'helper_iwmmxt_unpackhsb',
'helper_iwmmxt_unpackhsl',
'helper_iwmmxt_unpackhsw',
'helper_iwmmxt_unpackhub',
'helper_iwmmxt_unpackhul',
'helper_iwmmxt_unpackhuw',
'helper_iwmmxt_unpackhw',
'helper_iwmmxt_unpacklb',
'helper_iwmmxt_unpackll',
'helper_iwmmxt_unpacklsb',
'helper_iwmmxt_unpacklsl',
'helper_iwmmxt_unpacklsw',
'helper_iwmmxt_unpacklub',
'helper_iwmmxt_unpacklul',
'helper_iwmmxt_unpackluw',
'helper_iwmmxt_unpacklw',
'helper_ldb_cmmu',
'helper_ldb_mmu',
'helper_ldl_cmmu',
'helper_ldl_mmu',
'helper_ldq_cmmu',
'helper_ldq_mmu',
'helper_ldw_cmmu',
'helper_ldw_mmu',
'helper_le_ldl_cmmu',
'helper_le_ldq_cmmu',
'helper_le_ldq_mmu',
'helper_le_ldsl_mmu',
'helper_le_ldsw_mmu',
'helper_le_ldul_mmu',
'helper_le_lduw_mmu',
'helper_le_ldw_cmmu',
'helper_le_stl_mmu',
'helper_le_stq_mmu',
'helper_le_stw_mmu',
'helper_msr_i_pstate',
'helper_neon_abd_f32',
'helper_neon_abdl_s16',
'helper_neon_abdl_s32',
'helper_neon_abdl_s64',
'helper_neon_abdl_u16',
'helper_neon_abdl_u32',
'helper_neon_abdl_u64',
'helper_neon_abd_s16',
'helper_neon_abd_s32',
'helper_neon_abd_s8',
'helper_neon_abd_u16',
'helper_neon_abd_u32',
'helper_neon_abd_u8',
'helper_neon_abs_s16',
'helper_neon_abs_s8',
'helper_neon_acge_f32',
'helper_neon_acge_f64',
'helper_neon_acgt_f32',
'helper_neon_acgt_f64',
'helper_neon_addl_saturate_s32',
'helper_neon_addl_saturate_s64',
'helper_neon_addl_u16',
'helper_neon_addl_u32',
'helper_neon_add_u16',
'helper_neon_add_u8',
'helper_neon_ceq_f32',
'helper_neon_ceq_u16',
'helper_neon_ceq_u32',
'helper_neon_ceq_u8',
'helper_neon_cge_f32',
'helper_neon_cge_s16',
'helper_neon_cge_s32',
'helper_neon_cge_s8',
'helper_neon_cge_u16',
'helper_neon_cge_u32',
'helper_neon_cge_u8',
'helper_neon_cgt_f32',
'helper_neon_cgt_s16',
'helper_neon_cgt_s32',
'helper_neon_cgt_s8',
'helper_neon_cgt_u16',
'helper_neon_cgt_u32',
'helper_neon_cgt_u8',
'helper_neon_cls_s16',
'helper_neon_cls_s32',
'helper_neon_cls_s8',
'helper_neon_clz_u16',
'helper_neon_clz_u8',
'helper_neon_cnt_u8',
'helper_neon_fcvt_f16_to_f32',
'helper_neon_fcvt_f32_to_f16',
'helper_neon_hadd_s16',
'helper_neon_hadd_s32',
'helper_neon_hadd_s8',
'helper_neon_hadd_u16',
'helper_neon_hadd_u32',
'helper_neon_hadd_u8',
'helper_neon_hsub_s16',
'helper_neon_hsub_s32',
'helper_neon_hsub_s8',
'helper_neon_hsub_u16',
'helper_neon_hsub_u32',
'helper_neon_hsub_u8',
'helper_neon_max_s16',
'helper_neon_max_s32',
'helper_neon_max_s8',
'helper_neon_max_u16',
'helper_neon_max_u32',
'helper_neon_max_u8',
'helper_neon_min_s16',
'helper_neon_min_s32',
'helper_neon_min_s8',
'helper_neon_min_u16',
'helper_neon_min_u32',
'helper_neon_min_u8',
'helper_neon_mull_p8',
'helper_neon_mull_s16',
'helper_neon_mull_s8',
'helper_neon_mull_u16',
'helper_neon_mull_u8',
'helper_neon_mul_p8',
'helper_neon_mul_u16',
'helper_neon_mul_u8',
'helper_neon_narrow_high_u16',
'helper_neon_narrow_high_u8',
'helper_neon_narrow_round_high_u16',
'helper_neon_narrow_round_high_u8',
'helper_neon_narrow_sat_s16',
'helper_neon_narrow_sat_s32',
'helper_neon_narrow_sat_s8',
'helper_neon_narrow_sat_u16',
'helper_neon_narrow_sat_u32',
'helper_neon_narrow_sat_u8',
'helper_neon_narrow_u16',
'helper_neon_narrow_u8',
'helper_neon_negl_u16',
'helper_neon_negl_u32',
'helper_neon_paddl_u16',
'helper_neon_paddl_u32',
'helper_neon_padd_u16',
'helper_neon_padd_u8',
'helper_neon_pmax_s16',
'helper_neon_pmax_s8',
'helper_neon_pmax_u16',
'helper_neon_pmax_u8',
'helper_neon_pmin_s16',
'helper_neon_pmin_s8',
'helper_neon_pmin_u16',
'helper_neon_pmin_u8',
'helper_neon_pmull_64_hi',
'helper_neon_pmull_64_lo',
'helper_neon_qabs_s16',
'helper_neon_qabs_s32',
'helper_neon_qabs_s64',
'helper_neon_qabs_s8',
'helper_neon_qadd_s16',
'helper_neon_qadd_s32',
'helper_neon_qadd_s64',
'helper_neon_qadd_s8',
'helper_neon_qadd_u16',
'helper_neon_qadd_u32',
'helper_neon_qadd_u64',
'helper_neon_qadd_u8',
'helper_neon_qdmulh_s16',
'helper_neon_qdmulh_s32',
'helper_neon_qneg_s16',
'helper_neon_qneg_s32',
'helper_neon_qneg_s64',
'helper_neon_qneg_s8',
'helper_neon_qrdmulh_s16',
'helper_neon_qrdmulh_s32',
'helper_neon_qrshl_s16',
'helper_neon_qrshl_s32',
'helper_neon_qrshl_s64',
'helper_neon_qrshl_s8',
'helper_neon_qrshl_u16',
'helper_neon_qrshl_u32',
'helper_neon_qrshl_u64',
'helper_neon_qrshl_u8',
'helper_neon_qshl_s16',
'helper_neon_qshl_s32',
'helper_neon_qshl_s64',
'helper_neon_qshl_s8',
'helper_neon_qshl_u16',
'helper_neon_qshl_u32',
'helper_neon_qshl_u64',
'helper_neon_qshl_u8',
'helper_neon_qshlu_s16',
'helper_neon_qshlu_s32',
'helper_neon_qshlu_s64',
'helper_neon_qshlu_s8',
'helper_neon_qsub_s16',
'helper_neon_qsub_s32',
'helper_neon_qsub_s64',
'helper_neon_qsub_s8',
'helper_neon_qsub_u16',
'helper_neon_qsub_u32',
'helper_neon_qsub_u64',
'helper_neon_qsub_u8',
'helper_neon_qunzip16',
'helper_neon_qunzip32',
'helper_neon_qunzip8',
'helper_neon_qzip16',
'helper_neon_qzip32',
'helper_neon_qzip8',
'helper_neon_rbit_u8',
'helper_neon_rhadd_s16',
'helper_neon_rhadd_s32',
'helper_neon_rhadd_s8',
'helper_neon_rhadd_u16',
'helper_neon_rhadd_u32',
'helper_neon_rhadd_u8',
'helper_neon_rshl_s16',
'helper_neon_rshl_s32',
'helper_neon_rshl_s64',
'helper_neon_rshl_s8',
'helper_neon_rshl_u16',
'helper_neon_rshl_u32',
'helper_neon_rshl_u64',
'helper_neon_rshl_u8',
'helper_neon_shl_s16',
'helper_neon_shl_s32',
'helper_neon_shl_s64',
'helper_neon_shl_s8',
'helper_neon_shl_u16',
'helper_neon_shl_u32',
'helper_neon_shl_u64',
'helper_neon_shl_u8',
'helper_neon_sqadd_u16',
'helper_neon_sqadd_u32',
'helper_neon_sqadd_u64',
'helper_neon_sqadd_u8',
'helper_neon_subl_u16',
'helper_neon_subl_u32',
'helper_neon_sub_u16',
'helper_neon_sub_u8',
'helper_neon_tbl',
'helper_neon_tst_u16',
'helper_neon_tst_u32',
'helper_neon_tst_u8',
'helper_neon_unarrow_sat16',
'helper_neon_unarrow_sat32',
'helper_neon_unarrow_sat8',
'helper_neon_unzip16',
'helper_neon_unzip8',
'helper_neon_uqadd_s16',
'helper_neon_uqadd_s32',
'helper_neon_uqadd_s64',
'helper_neon_uqadd_s8',
'helper_neon_widen_s16',
'helper_neon_widen_s8',
'helper_neon_widen_u16',
'helper_neon_widen_u8',
'helper_neon_zip16',
'helper_neon_zip8',
'helper_pre_hvc',
'helper_pre_smc',
'helper_qadd16',
'helper_qadd8',
'helper_qaddsubx',
'helper_qsub16',
'helper_qsub8',
'helper_qsubaddx',
'helper_rbit',
'helper_recpe_f32',
'helper_recpe_f64',
'helper_recpe_u32',
'helper_recps_f32',
'helper_ret_ldb_cmmu',
'helper_ret_ldsb_mmu',
'helper_ret_ldub_mmu',
'helper_ret_stb_mmu',
'helper_rintd',
'helper_rintd_exact',
'helper_rints',
'helper_rints_exact',
'helper_ror_cc',
'helper_rsqrte_f32',
'helper_rsqrte_f64',
'helper_rsqrte_u32',
'helper_rsqrts_f32',
'helper_sadd16',
'helper_sadd8',
'helper_saddsubx',
'helper_sar_cc',
'helper_sdiv',
'helper_sel_flags',
'helper_set_cp_reg',
'helper_set_cp_reg64',
'helper_set_neon_rmode',
'helper_set_r13_banked',
'helper_set_rmode',
'helper_set_user_reg',
'helper_shadd16',
'helper_shadd8',
'helper_shaddsubx',
'helper_shl_cc',
'helper_shr_cc',
'helper_shsub16',
'helper_shsub8',
'helper_shsubaddx',
'helper_ssat',
'helper_ssat16',
'helper_ssub16',
'helper_ssub8',
'helper_ssubaddx',
'helper_stb_mmu',
'helper_stl_mmu',
'helper_stq_mmu',
'helper_stw_mmu',
'helper_sub_saturate',
'helper_sub_usaturate',
'helper_sxtb16',
'helper_uadd16',
'helper_uadd8',
'helper_uaddsubx',
'helper_udiv',
'helper_uhadd16',
'helper_uhadd8',
'helper_uhaddsubx',
'helper_uhsub16',
'helper_uhsub8',
'helper_uhsubaddx',
'helper_uqadd16',
'helper_uqadd8',
'helper_uqaddsubx',
'helper_uqsub16',
'helper_uqsub8',
'helper_uqsubaddx',
'helper_usad8',
'helper_usat',
'helper_usat16',
'helper_usub16',
'helper_usub8',
'helper_usubaddx',
'helper_uxtb16',
'helper_v7m_mrs',
'helper_v7m_msr',
'helper_vfp_absd',
'helper_vfp_abss',
'helper_vfp_addd',
'helper_vfp_adds',
'helper_vfp_cmpd',
'helper_vfp_cmped',
'helper_vfp_cmpes',
'helper_vfp_cmps',
'helper_vfp_divd',
'helper_vfp_divs',
'helper_vfp_fcvtds',
'helper_vfp_fcvt_f16_to_f32',
'helper_vfp_fcvt_f16_to_f64',
'helper_vfp_fcvt_f32_to_f16',
'helper_vfp_fcvt_f64_to_f16',
'helper_vfp_fcvtsd',
'helper_vfp_get_fpscr',
'helper_vfp_maxd',
'helper_vfp_maxnumd',
'helper_vfp_maxnums',
'helper_vfp_maxs',
'helper_vfp_mind',
'helper_vfp_minnumd',
'helper_vfp_minnums',
'helper_vfp_mins',
'helper_vfp_muladdd',
'helper_vfp_muladds',
'helper_vfp_muld',
'helper_vfp_muls',
'helper_vfp_negd',
'helper_vfp_negs',
'helper_vfp_set_fpscr',
'helper_vfp_shtod',
'helper_vfp_shtos',
'helper_vfp_sitod',
'helper_vfp_sitos',
'helper_vfp_sltod',
'helper_vfp_sltos',
'helper_vfp_sqrtd',
'helper_vfp_sqrts',
'helper_vfp_sqtod',
'helper_vfp_sqtos',
'helper_vfp_subd',
'helper_vfp_subs',
'helper_vfp_toshd',
'helper_vfp_toshd_round_to_zero',
'helper_vfp_toshs',
'helper_vfp_toshs_round_to_zero',
'helper_vfp_tosid',
'helper_vfp_tosis',
'helper_vfp_tosizd',
'helper_vfp_tosizs',
'helper_vfp_tosld',
'helper_vfp_tosld_round_to_zero',
'helper_vfp_tosls',
'helper_vfp_tosls_round_to_zero',
'helper_vfp_tosqd',
'helper_vfp_tosqs',
'helper_vfp_touhd',
'helper_vfp_touhd_round_to_zero',
'helper_vfp_touhs',
'helper_vfp_touhs_round_to_zero',
'helper_vfp_touid',
'helper_vfp_touis',
'helper_vfp_touizd',
'helper_vfp_touizs',
'helper_vfp_tould',
'helper_vfp_tould_round_to_zero',
'helper_vfp_touls',
'helper_vfp_touls_round_to_zero',
'helper_vfp_touqd',
'helper_vfp_touqs',
'helper_vfp_uhtod',
'helper_vfp_uhtos',
'helper_vfp_uitod',
'helper_vfp_uitos',
'helper_vfp_ultod',
'helper_vfp_ultos',
'helper_vfp_uqtod',
'helper_vfp_uqtos',
'helper_wfe',
'helper_wfi',
'hex2decimal',
'hw_breakpoint_update',
'hw_breakpoint_update_all',
'hw_watchpoint_update',
'hw_watchpoint_update_all',
'_init',
'init_cpreg_list',
'init_lists',
'input_type_enum',
'int128_2_64',
'int128_add',
'int128_addto',
'int128_and',
'int128_eq',
'int128_ge',
'int128_get64',
'int128_gt',
'int128_le',
'int128_lt',
'int128_make64',
'int128_max',
'int128_min',
'int128_ne',
'int128_neg',
'int128_nz',
'int128_rshift',
'int128_sub',
'int128_subfrom',
'int128_zero',
'int16_to_float32',
'int16_to_float64',
'int32_to_float128',
'int32_to_float32',
'int32_to_float64',
'int32_to_floatx80',
'int64_to_float128',
'int64_to_float32',
'int64_to_float64',
'int64_to_floatx80',
'invalidate_and_set_dirty',
'invalidate_page_bitmap',
'io_mem_read',
'io_mem_write',
'io_readb',
'io_readl',
'io_readq',
'io_readw',
'iotlb_to_region',
'io_writeb',
'io_writel',
'io_writeq',
'io_writew',
'is_a64',
'is_help_option',
'isr_read',
'is_valid_option_list',
'iwmmxt_load_creg',
'iwmmxt_load_reg',
'iwmmxt_store_creg',
'iwmmxt_store_reg',
'__jit_debug_descriptor',
'__jit_debug_register_code',
'kvm_to_cpreg_id',
'last_ram_offset',
'ldl_be_p',
'ldl_be_phys',
'ldl_he_p',
'ldl_le_p',
'ldl_le_phys',
'ldl_phys',
'ldl_phys_internal',
'ldq_be_p',
'ldq_be_phys',
'ldq_he_p',
'ldq_le_p',
'ldq_le_phys',
'ldq_phys',
'ldq_phys_internal',
'ldst_name',
'ldub_p',
'ldub_phys',
'lduw_be_p',
'lduw_be_phys',
'lduw_he_p',
'lduw_le_p',
'lduw_le_phys',
'lduw_phys',
'lduw_phys_internal',
'le128',
'linked_bp_matches',
'listener_add_address_space',
'load_cpu_offset',
'load_reg',
'load_reg_var',
'log_cpu_state',
'lpae_cp_reginfo',
'lt128',
'machine_class_init',
'machine_finalize',
'machine_info',
'machine_initfn',
'machine_register_types',
'machvirt_init',
'machvirt_machine_init',
'maj',
'mapping_conflict',
'mapping_contiguous',
'mapping_have_same_region',
'mapping_merge',
'mem_add',
'mem_begin',
'mem_commit',
'memory_access_is_direct',
'memory_access_size',
'memory_init',
'memory_listener_match',
'memory_listener_register',
'memory_listener_unregister',
'memory_map_init',
'memory_mapping_filter',
'memory_mapping_list_add_mapping_sorted',
'memory_mapping_list_add_merge_sorted',
'memory_mapping_list_free',
'memory_mapping_list_init',
'memory_region_access_valid',
'memory_region_add_subregion',
'memory_region_add_subregion_common',
'memory_region_add_subregion_overlap',
'memory_region_big_endian',
'memory_region_clear_pending',
'memory_region_del_subregion',
'memory_region_destructor_alias',
'memory_region_destructor_none',
'memory_region_destructor_ram',
'memory_region_destructor_ram_from_ptr',
'memory_region_dispatch_read',
'memory_region_dispatch_read1',
'memory_region_dispatch_write',
'memory_region_escape_name',
'memory_region_finalize',
'memory_region_find',
'memory_region_get_addr',
'memory_region_get_alignment',
'memory_region_get_container',
'memory_region_get_fd',
'memory_region_get_may_overlap',
'memory_region_get_priority',
'memory_region_get_ram_addr',
'memory_region_get_ram_ptr',
'memory_region_get_size',
'memory_region_info',
'memory_region_init',
'memory_region_init_alias',
'memory_region_initfn',
'memory_region_init_io',
'memory_region_init_ram',
'memory_region_init_ram_ptr',
'memory_region_init_reservation',
'memory_region_is_iommu',
'memory_region_is_logging',
'memory_region_is_mapped',
'memory_region_is_ram',
'memory_region_is_rom',
'memory_region_is_romd',
'memory_region_is_skip_dump',
'memory_region_is_unassigned',
'memory_region_name',
'memory_region_need_escape',
'memory_region_oldmmio_read_accessor',
'memory_region_oldmmio_write_accessor',
'memory_region_present',
'memory_region_read_accessor',
'memory_region_readd_subregion',
'memory_region_ref',
'memory_region_resolve_container',
'memory_region_rom_device_set_romd',
'memory_region_section_get_iotlb',
'memory_region_set_address',
'memory_region_set_alias_offset',
'memory_region_set_enabled',
'memory_region_set_readonly',
'memory_region_set_skip_dump',
'memory_region_size',
'memory_region_to_address_space',
'memory_region_transaction_begin',
'memory_region_transaction_commit',
'memory_region_unref',
'memory_region_update_container_subregions',
'memory_region_write_accessor',
'memory_region_wrong_endianness',
'memory_try_enable_merging',
'module_call_init',
'module_load',
'mpidr_cp_reginfo',
'mpidr_read',
'msr_mask',
'mul128By64To192',
'mul128To256',
'mul64To128',
'muldiv64',
'neon_2rm_is_float_op',
'neon_2rm_sizes',
'neon_3r_sizes',
'neon_get_scalar',
'neon_load_reg',
'neon_load_reg64',
'neon_load_scratch',
'neon_ls_element_type',
'neon_reg_offset',
'neon_store_reg',
'neon_store_reg64',
'neon_store_scratch',
'new_ldst_label',
'next_list',
'normalizeFloat128Subnormal',
'normalizeFloat16Subnormal',
'normalizeFloat32Subnormal',
'normalizeFloat64Subnormal',
'normalizeFloatx80Subnormal',
'normalizeRoundAndPackFloat128',
'normalizeRoundAndPackFloat32',
'normalizeRoundAndPackFloat64',
'normalizeRoundAndPackFloatx80',
'not_v6_cp_reginfo',
'not_v7_cp_reginfo',
'not_v8_cp_reginfo',
'object_child_foreach',
'object_class_foreach',
'object_class_foreach_tramp',
'object_class_get_list',
'object_class_get_list_tramp',
'object_class_get_parent',
'object_deinit',
'object_dynamic_cast',
'object_finalize',
'object_finalize_child_property',
'object_get_child_property',
'object_get_link_property',
'object_get_root',
'object_initialize_with_type',
'object_init_with_type',
'object_instance_init',
'object_new_with_type',
'object_post_init_with_type',
'object_property_add_alias',
'object_property_add_link',
'object_property_add_uint16_ptr',
'object_property_add_uint32_ptr',
'object_property_add_uint64_ptr',
'object_property_add_uint8_ptr',
'object_property_allow_set_link',
'object_property_del',
'object_property_del_all',
'object_property_find',
'object_property_get',
'object_property_get_bool',
'object_property_get_int',
'object_property_get_link',
'object_property_get_qobject',
'object_property_get_str',
'object_property_get_type',
'object_property_is_child',
'object_property_set',
'object_property_set_description',
'object_property_set_link',
'object_property_set_qobject',
'object_release_link_property',
'object_resolve_abs_path',
'object_resolve_child_property',
'object_resolve_link',
'object_resolve_link_property',
'object_resolve_partial_path',
'object_resolve_path',
'object_resolve_path_component',
'object_resolve_path_type',
'object_set_link_property',
'object_unparent',
'omap_cachemaint_write',
'omap_cp_reginfo',
'omap_threadid_write',
'omap_ticonfig_write',
'omap_wfi_write',
'op_bits',
'open_modeflags',
'op_to_mov',
'op_to_movi',
'output_type_enum',
'packFloat128',
'packFloat16',
'packFloat32',
'packFloat64',
'packFloatx80',
'page_find',
'page_find_alloc',
'page_flush_tb',
'page_flush_tb_1',
'page_init',
'page_size_init',
'par',
'parse_array',
'parse_error',
'parse_escape',
'parse_keyword',
'parse_literal',
'parse_object',
'parse_optional',
'parse_option_bool',
'parse_option_number',
'parse_option_size',
'parse_pair',
'parser_context_free',
'parser_context_new',
'parser_context_peek_token',
'parser_context_pop_token',
'parser_context_restore',
'parser_context_save',
'parse_str',
'parse_type_bool',
'parse_type_int',
'parse_type_number',
'parse_type_size',
'parse_type_str',
'parse_value',
'par_write',
'patch_reloc',
'phys_map_node_alloc',
'phys_map_node_reserve',
'phys_mem_alloc',
'phys_mem_set_alloc',
'phys_page_compact',
'phys_page_compact_all',
'phys_page_find',
'phys_page_set',
'phys_page_set_level',
'phys_section_add',
'phys_section_destroy',
'phys_sections_free',
'pickNaN',
'pickNaNMulAdd',
'pmccfiltr_write',
'pmccntr_read',
'pmccntr_sync',
'pmccntr_write',
'pmccntr_write32',
'pmcntenclr_write',
'pmcntenset_write',
'pmcr_write',
'pmintenclr_write',
'pmintenset_write',
'pmovsr_write',
'pmreg_access',
'pmsav5_cp_reginfo',
'pmsav5_data_ap_read',
'pmsav5_data_ap_write',
'pmsav5_insn_ap_read',
'pmsav5_insn_ap_write',
'pmuserenr_write',
'pmxevtyper_write',
'print_type_bool',
'print_type_int',
'print_type_number',
'print_type_size',
'print_type_str',
'propagateFloat128NaN',
'propagateFloat32MulAddNaN',
'propagateFloat32NaN',
'propagateFloat64MulAddNaN',
'propagateFloat64NaN',
'propagateFloatx80NaN',
'property_get_alias',
'property_get_bool',
'property_get_str',
'property_get_uint16_ptr',
'property_get_uint32_ptr',
'property_get_uint64_ptr',
'property_get_uint8_ptr',
'property_release_alias',
'property_release_bool',
'property_release_str',
'property_resolve_alias',
'property_set_alias',
'property_set_bool',
'property_set_str',
'pstate_read',
'pstate_write',
'pxa250_initfn',
'pxa255_initfn',
'pxa260_initfn',
'pxa261_initfn',
'pxa262_initfn',
'pxa270a0_initfn',
'pxa270a1_initfn',
'pxa270b0_initfn',
'pxa270b1_initfn',
'pxa270c0_initfn',
'pxa270c5_initfn',
'qapi_dealloc_end_implicit_struct',
'qapi_dealloc_end_list',
'qapi_dealloc_end_struct',
'qapi_dealloc_get_visitor',
'qapi_dealloc_next_list',
'qapi_dealloc_pop',
'qapi_dealloc_push',
'qapi_dealloc_start_implicit_struct',
'qapi_dealloc_start_list',
'qapi_dealloc_start_struct',
'qapi_dealloc_start_union',
'qapi_dealloc_type_bool',
'qapi_dealloc_type_enum',
'qapi_dealloc_type_int',
'qapi_dealloc_type_number',
'qapi_dealloc_type_size',
'qapi_dealloc_type_str',
'qapi_dealloc_visitor_cleanup',
'qapi_dealloc_visitor_new',
'qapi_free_boolList',
'qapi_free_ErrorClassList',
'qapi_free_int16List',
'qapi_free_int32List',
'qapi_free_int64List',
'qapi_free_int8List',
'qapi_free_intList',
'qapi_free_numberList',
'qapi_free_strList',
'qapi_free_uint16List',
'qapi_free_uint32List',
'qapi_free_uint64List',
'qapi_free_uint8List',
'qapi_free_X86CPUFeatureWordInfo',
'qapi_free_X86CPUFeatureWordInfoList',
'qapi_free_X86CPURegister32List',
'qbool_destroy_obj',
'qbool_from_int',
'qbool_get_int',
'qbool_type',
'qbus_create',
'qbus_create_inplace',
'qbus_finalize',
'qbus_initfn',
'qbus_realize',
'qdev_create',
'qdev_get_type',
'qdev_register_types',
'qdev_set_parent_bus',
'qdev_try_create',
'qdict_add_key',
'qdict_array_split',
'qdict_clone_shallow',
'qdict_del',
'qdict_destroy_obj',
'qdict_entry_key',
'qdict_entry_value',
'qdict_extract_subqdict',
'qdict_find',
'qdict_first',
'qdict_flatten',
'qdict_flatten_qdict',
'qdict_flatten_qlist',
'qdict_get',
'qdict_get_bool',
'qdict_get_double',
'qdict_get_int',
'qdict_get_obj',
'qdict_get_qdict',
'qdict_get_qlist',
'qdict_get_str',
'qdict_get_try_bool',
'qdict_get_try_int',
'qdict_get_try_str',
'qdict_haskey',
'qdict_has_prefixed_entries',
'qdict_iter',
'qdict_join',
'qdict_new',
'qdict_next',
'qdict_next_entry',
'qdict_put_obj',
'qdict_size',
'qdict_type',
'qemu_clock_get_us',
'qemu_clock_ptr',
'qemu_clocks',
'qemu_get_cpu',
'qemu_get_guest_memory_mapping',
'qemu_get_guest_simple_memory_mapping',
'qemu_get_ram_block',
'qemu_get_ram_block_host_ptr',
'qemu_get_ram_fd',
'qemu_get_ram_ptr',
'qemu_host_page_mask',
'qemu_host_page_size',
'qemu_init_vcpu',
'qemu_ld_helpers',
'qemu_log_close',
'qemu_log_enabled',
'qemu_log_flush',
'qemu_loglevel_mask',
'qemu_log_vprintf',
'qemu_oom_check',
'qemu_parse_fd',
'qemu_ram_addr_from_host',
'qemu_ram_addr_from_host_nofail',
'qemu_ram_alloc',
'qemu_ram_alloc_from_ptr',
'qemu_ram_foreach_block',
'qemu_ram_free',
'qemu_ram_free_from_ptr',
'qemu_ram_ptr_length',
'qemu_ram_remap',
'qemu_ram_setup_dump',
'qemu_ram_unset_idstr',
'qemu_real_host_page_size',
'qemu_st_helpers',
'qemu_tcg_init_vcpu',
'qemu_try_memalign',
'qentry_destroy',
'qerror_human',
'qerror_report',
'qerror_report_err',
'qfloat_destroy_obj',
'qfloat_from_double',
'qfloat_get_double',
'qfloat_type',
'qint_destroy_obj',
'qint_from_int',
'qint_get_int',
'qint_type',
'qlist_append_obj',
'qlist_copy',
'qlist_copy_elem',
'qlist_destroy_obj',
'qlist_empty',
'qlist_entry_obj',
'qlist_first',
'qlist_iter',
'qlist_new',
'qlist_next',
'qlist_peek',
'qlist_pop',
'qlist_size',
'qlist_size_iter',
'qlist_type',
'qmp_input_end_implicit_struct',
'qmp_input_end_list',
'qmp_input_end_struct',
'qmp_input_get_next_type',
'qmp_input_get_object',
'qmp_input_get_visitor',
'qmp_input_next_list',
'qmp_input_optional',
'qmp_input_pop',
'qmp_input_push',
'qmp_input_start_implicit_struct',
'qmp_input_start_list',
'qmp_input_start_struct',
'qmp_input_type_bool',
'qmp_input_type_int',
'qmp_input_type_number',
'qmp_input_type_str',
'qmp_input_visitor_cleanup',
'qmp_input_visitor_new',
'qmp_input_visitor_new_strict',
'qmp_output_add_obj',
'qmp_output_end_list',
'qmp_output_end_struct',
'qmp_output_first',
'qmp_output_get_qobject',
'qmp_output_get_visitor',
'qmp_output_last',
'qmp_output_next_list',
'qmp_output_pop',
'qmp_output_push_obj',
'qmp_output_start_list',
'qmp_output_start_struct',
'qmp_output_type_bool',
'qmp_output_type_int',
'qmp_output_type_number',
'qmp_output_type_str',
'qmp_output_visitor_cleanup',
'qmp_output_visitor_new',
'qobject_decref',
'qobject_to_qbool',
'qobject_to_qdict',
'qobject_to_qfloat',
'qobject_to_qint',
'qobject_to_qlist',
'qobject_to_qstring',
'qobject_type',
'qstring_append',
'qstring_append_chr',
'qstring_append_int',
'qstring_destroy_obj',
'qstring_from_escaped_str',
'qstring_from_str',
'qstring_from_substr',
'qstring_get_length',
'qstring_get_str',
'qstring_new',
'qstring_type',
'ram_block_add',
'ram_size',
'range_compare',
'range_covers_byte',
'range_get_last',
'range_merge',
'ranges_can_merge',
'raw_read',
'raw_write',
'rcon',
'read_raw_cp_reg',
'recip_estimate',
'recip_sqrt_estimate',
'register_cp_regs_for_features',
'register_multipage',
'register_subpage',
'register_tm_clones',
'register_types_object',
'regnames',
'render_memory_region',
'reset_all_temps',
'reset_temp',
'rol32',
'rol64',
'ror32',
'ror64',
'roundAndPackFloat128',
'roundAndPackFloat16',
'roundAndPackFloat32',
'roundAndPackFloat64',
'roundAndPackFloatx80',
'roundAndPackInt32',
'roundAndPackInt64',
'roundAndPackUint64',
'round_to_inf',
'run_on_cpu',
's0',
'S0',
's1',
'S1',
'sa1100_initfn',
'sa1110_initfn',
'save_globals',
'scr_write',
'sctlr_write',
'set_bit',
'set_bits',
'set_default_nan_mode',
'set_feature',
'set_float_detect_tininess',
'set_float_exception_flags',
'set_float_rounding_mode',
'set_flush_inputs_to_zero',
'set_flush_to_zero',
'set_swi_errno',
'sextract32',
'sextract64',
'shift128ExtraRightJamming',
'shift128Right',
'shift128RightJamming',
'shift32RightJamming',
'shift64ExtraRightJamming',
'shift64RightJamming',
'shifter_out_im',
'shortShift128Left',
'shortShift192Left',
'simple_mpu_ap_bits',
'size_code_gen_buffer',
'softmmu_lock_user',
'softmmu_lock_user_string',
'softmmu_tget32',
'softmmu_tget8',
'softmmu_tput32',
'softmmu_unlock_user',
'sort_constraints',
'sp_el0_access',
'spsel_read',
'spsel_write',
'start_list',
'stb_p',
'stb_phys',
'stl_be_p',
'stl_be_phys',
'stl_he_p',
'stl_le_p',
'stl_le_phys',
'stl_phys',
'stl_phys_internal',
'stl_phys_notdirty',
'store_cpu_offset',
'store_reg',
'store_reg_bx',
'store_reg_from_load',
'stq_be_p',
'stq_be_phys',
'stq_he_p',
'stq_le_p',
'stq_le_phys',
'stq_phys',
'string_input_get_visitor',
'string_input_visitor_cleanup',
'string_input_visitor_new',
'strongarm_cp_reginfo',
'strstart',
'strtosz',
'strtosz_suffix',
'stw_be_p',
'stw_be_phys',
'stw_he_p',
'stw_le_p',
'stw_le_phys',
'stw_phys',
'stw_phys_internal',
'sub128',
'sub16_sat',
'sub16_usat',
'sub192',
'sub8_sat',
'sub8_usat',
'subFloat128Sigs',
'subFloat32Sigs',
'subFloat64Sigs',
'subFloatx80Sigs',
'subpage_accepts',
'subpage_init',
'subpage_ops',
'subpage_read',
'subpage_register',
'subpage_write',
'suffix_mul',
'swap_commutative',
'swap_commutative2',
'switch_mode',
'switch_v7m_sp',
'syn_aa32_bkpt',
'syn_aa32_hvc',
'syn_aa32_smc',
'syn_aa32_svc',
'syn_breakpoint',
'sync_globals',
'syn_cp14_rrt_trap',
'syn_cp14_rt_trap',
'syn_cp15_rrt_trap',
'syn_cp15_rt_trap',
'syn_data_abort',
'syn_fp_access_trap',
'syn_insn_abort',
'syn_swstep',
'syn_uncategorized',
'syn_watchpoint',
'syscall_err',
'system_bus_class_init',
'system_bus_info',
't2ee_cp_reginfo',
'table_logic_cc',
'target_parse_constraint',
'target_words_bigendian',
'tb_add_jump',
'tb_alloc',
'tb_alloc_page',
'tb_check_watchpoint',
'tb_find_fast',
'tb_find_pc',
'tb_find_slow',
'tb_flush',
'tb_flush_jmp_cache',
'tb_free',
'tb_gen_code',
'tb_hash_remove',
'tb_invalidate_phys_addr',
'tb_invalidate_phys_page_range',
'tb_invalidate_phys_range',
'tb_jmp_cache_hash_func',
'tb_jmp_cache_hash_page',
'tb_jmp_remove',
'tb_link_page',
'tb_page_remove',
'tb_phys_hash_func',
'tb_phys_invalidate',
'tb_reset_jump',
'tb_set_jmp_target',
'tcg_accel_class_init',
'tcg_accel_type',
'tcg_add_param_i32',
'tcg_add_param_i64',
'tcg_add_target_add_op_defs',
'tcg_allowed',
'tcg_canonicalize_memop',
'tcg_commit',
'tcg_cond_to_jcc',
'tcg_constant_folding',
'tcg_const_i32',
'tcg_const_i64',
'tcg_const_local_i32',
'tcg_const_local_i64',
'tcg_context_init',
'tcg_cpu_address_space_init',
'tcg_cpu_exec',
'tcg_current_code_size',
'tcg_dump_info',
'tcg_dump_ops',
'tcg_exec_all',
'tcg_find_helper',
'tcg_func_start',
'tcg_gen_abs_i32',
'tcg_gen_add2_i32',
'tcg_gen_add_i32',
'tcg_gen_add_i64',
'tcg_gen_addi_i32',
'tcg_gen_addi_i64',
'tcg_gen_andc_i32',
'tcg_gen_and_i32',
'tcg_gen_and_i64',
'tcg_gen_andi_i32',
'tcg_gen_andi_i64',
'tcg_gen_br',
'tcg_gen_brcond_i32',
'tcg_gen_brcond_i64',
'tcg_gen_brcondi_i32',
'tcg_gen_bswap16_i32',
'tcg_gen_bswap32_i32',
'tcg_gen_callN',
'tcg_gen_code',
'tcg_gen_code_common',
'tcg_gen_code_search_pc',
'tcg_gen_concat_i32_i64',
'tcg_gen_debug_insn_start',
'tcg_gen_deposit_i32',
'tcg_gen_exit_tb',
'tcg_gen_ext16s_i32',
'tcg_gen_ext16u_i32',
'tcg_gen_ext32s_i64',
'tcg_gen_ext32u_i64',
'tcg_gen_ext8s_i32',
'tcg_gen_ext8u_i32',
'tcg_gen_ext_i32_i64',
'tcg_gen_extu_i32_i64',
'tcg_gen_goto_tb',
'tcg_gen_ld_i32',
'tcg_gen_ld_i64',
'tcg_gen_ldst_op_i32',
'tcg_gen_ldst_op_i64',
'tcg_gen_movcond_i32',
'tcg_gen_movcond_i64',
'tcg_gen_mov_i32',
'tcg_gen_mov_i64',
'tcg_gen_movi_i32',
'tcg_gen_movi_i64',
'tcg_gen_mul_i32',
'tcg_gen_muls2_i32',
'tcg_gen_mulu2_i32',
'tcg_gen_neg_i32',
'tcg_gen_neg_i64',
'tcg_gen_not_i32',
'tcg_gen_op0',
'tcg_gen_op1i',
'tcg_gen_op2_i32',
'tcg_gen_op2_i64',
'tcg_gen_op2i_i32',
'tcg_gen_op2i_i64',
'tcg_gen_op3_i32',
'tcg_gen_op3_i64',
'tcg_gen_op4_i32',
'tcg_gen_op4i_i32',
'tcg_gen_op4ii_i32',
'tcg_gen_op4ii_i64',
'tcg_gen_op5ii_i32',
'tcg_gen_op6_i32',
'tcg_gen_op6i_i32',
'tcg_gen_op6i_i64',
'tcg_gen_orc_i32',
'tcg_gen_or_i32',
'tcg_gen_or_i64',
'tcg_gen_ori_i32',
'tcg_gen_qemu_ld_i32',
'tcg_gen_qemu_ld_i64',
'tcg_gen_qemu_st_i32',
'tcg_gen_qemu_st_i64',
'tcg_gen_rotl_i32',
'tcg_gen_rotli_i32',
'tcg_gen_rotr_i32',
'tcg_gen_rotri_i32',
'tcg_gen_sar_i32',
'tcg_gen_sari_i32',
'tcg_gen_setcond_i32',
'tcg_gen_shl_i32',
'tcg_gen_shl_i64',
'tcg_gen_shli_i32',
'tcg_gen_shli_i64',
'tcg_gen_shr_i32',
'tcg_gen_shifti_i64',
'tcg_gen_shr_i64',
'tcg_gen_shri_i32',
'tcg_gen_shri_i64',
'tcg_gen_st_i32',
'tcg_gen_st_i64',
'tcg_gen_sub_i32',
'tcg_gen_sub_i64',
'tcg_gen_subi_i32',
'tcg_gen_trunc_i64_i32',
'tcg_gen_trunc_shr_i64_i32',
'tcg_gen_xor_i32',
'tcg_gen_xor_i64',
'tcg_gen_xori_i32',
'tcg_get_arg_str_i32',
'tcg_get_arg_str_i64',
'tcg_get_arg_str_idx',
'tcg_global_mem_new_i32',
'tcg_global_mem_new_i64',
'tcg_global_mem_new_internal',
'tcg_global_reg_new_i32',
'tcg_global_reg_new_i64',
'tcg_global_reg_new_internal',
'tcg_handle_interrupt',
'tcg_init',
'tcg_invert_cond',
'tcg_la_bb_end',
'tcg_la_br_end',
'tcg_la_func_end',
'tcg_liveness_analysis',
'tcg_malloc',
'tcg_malloc_internal',
'tcg_op_defs_org',
'tcg_opt_gen_mov',
'tcg_opt_gen_movi',
'tcg_optimize',
'tcg_out16',
'tcg_out32',
'tcg_out64',
'tcg_out8',
'tcg_out_addi',
'tcg_out_branch',
'tcg_out_brcond32',
'tcg_out_brcond64',
'tcg_out_bswap32',
'tcg_out_bswap64',
'tcg_out_call',
'tcg_out_cmp',
'tcg_out_ext16s',
'tcg_out_ext16u',
'tcg_out_ext32s',
'tcg_out_ext32u',
'tcg_out_ext8s',
'tcg_out_ext8u',
'tcg_out_jmp',
'tcg_out_jxx',
'tcg_out_label',
'tcg_out_ld',
'tcg_out_modrm',
'tcg_out_modrm_offset',
'tcg_out_modrm_sib_offset',
'tcg_out_mov',
'tcg_out_movcond32',
'tcg_out_movcond64',
'tcg_out_movi',
'tcg_out_op',
'tcg_out_pop',
'tcg_out_push',
'tcg_out_qemu_ld',
'tcg_out_qemu_ld_direct',
'tcg_out_qemu_ld_slow_path',
'tcg_out_qemu_st',
'tcg_out_qemu_st_direct',
'tcg_out_qemu_st_slow_path',
'tcg_out_reloc',
'tcg_out_rolw_8',
'tcg_out_setcond32',
'tcg_out_setcond64',
'tcg_out_shifti',
'tcg_out_st',
'tcg_out_tb_finalize',
'tcg_out_tb_init',
'tcg_out_tlb_load',
'tcg_out_vex_modrm',
'tcg_patch32',
'tcg_patch8',
'tcg_pcrel_diff',
'tcg_pool_reset',
'tcg_prologue_init',
'tcg_ptr_byte_diff',
'tcg_reg_alloc',
'tcg_reg_alloc_bb_end',
'tcg_reg_alloc_call',
'tcg_reg_alloc_mov',
'tcg_reg_alloc_movi',
'tcg_reg_alloc_op',
'tcg_reg_alloc_start',
'tcg_reg_free',
'tcg_reg_sync',
'tcg_set_frame',
'tcg_set_nop',
'tcg_swap_cond',
'tcg_target_callee_save_regs',
'tcg_target_call_iarg_regs',
'tcg_target_call_oarg_regs',
'tcg_target_const_match',
'tcg_target_init',
'tcg_target_qemu_prologue',
'tcg_target_reg_alloc_order',
'tcg_temp_alloc',
'tcg_temp_free_i32',
'tcg_temp_free_i64',
'tcg_temp_free_internal',
'tcg_temp_local_new_i32',
'tcg_temp_local_new_i64',
'tcg_temp_new_i32',
'tcg_temp_new_i64',
'tcg_temp_new_internal',
'tcg_temp_new_internal_i32',
'tcg_temp_new_internal_i64',
'tdb_hash',
'teecr_write',
'teehbr_access',
'temp_allocate_frame',
'temp_dead',
'temps_are_copies',
'temp_save',
'temp_sync',
'tgen_arithi',
'tgen_arithr',
'thumb2_logic_op',
'ti925t_initfn',
'tlb_add_large_page',
'tlb_flush_entry',
'tlbi_aa64_asid_is_write',
'tlbi_aa64_asid_write',
'tlbi_aa64_vaa_is_write',
'tlbi_aa64_vaa_write',
'tlbi_aa64_va_is_write',
'tlbi_aa64_va_write',
'tlbiall_is_write',
'tlbiall_write',
'tlbiasid_is_write',
'tlbiasid_write',
'tlbimvaa_is_write',
'tlbimvaa_write',
'tlbimva_is_write',
'tlbimva_write',
'tlb_is_dirty_ram',
'tlb_protect_code',
'tlb_reset_dirty_range',
'tlb_reset_dirty_range_all',
'tlb_set_dirty',
'tlb_set_dirty1',
'tlb_unprotect_code_phys',
'tlb_vaddr_to_host',
'token_get_type',
'token_get_value',
'token_is_escape',
'token_is_keyword',
'token_is_operator',
'tokens_append_from_iter',
'to_qiv',
'to_qov',
'tosa_init',
'tosa_machine_init',
'tswap32',
'tswap64',
'type_class_get_size',
'type_get_by_name',
'type_get_parent',
'type_has_parent',
'type_initialize',
'type_initialize_interface',
'type_is_ancestor',
'type_new',
'type_object_get_size',
'type_register_internal',
'type_table_add',
'type_table_get',
'type_table_lookup',
'uint16_to_float32',
'uint16_to_float64',
'uint32_to_float32',
'uint32_to_float64',
'uint64_to_float128',
'uint64_to_float32',
'uint64_to_float64',
'unassigned_io_ops',
'unassigned_io_read',
'unassigned_io_write',
'unassigned_mem_accepts',
'unassigned_mem_ops',
'unassigned_mem_read',
'unassigned_mem_write',
'update_spsel',
'v6_cp_reginfo',
'v6k_cp_reginfo',
'v7_cp_reginfo',
'v7mp_cp_reginfo',
'v7m_pop',
'v7m_push',
'v8_cp_reginfo',
'v8_el2_cp_reginfo',
'v8_el3_cp_reginfo',
'v8_el3_no_el2_cp_reginfo',
'vapa_cp_reginfo',
'vbar_write',
'vfp_exceptbits_from_host',
'vfp_exceptbits_to_host',
'vfp_get_fpcr',
'vfp_get_fpscr',
'vfp_get_fpsr',
'vfp_reg_offset',
'vfp_set_fpcr',
'vfp_set_fpscr',
'vfp_set_fpsr',
'visit_end_implicit_struct',
'visit_end_list',
'visit_end_struct',
'visit_end_union',
'visit_get_next_type',
'visit_next_list',
'visit_optional',
'visit_start_implicit_struct',
'visit_start_list',
'visit_start_struct',
'visit_start_union',
'vmsa_cp_reginfo',
'vmsa_tcr_el1_write',
'vmsa_ttbcr_raw_write',
'vmsa_ttbcr_reset',
'vmsa_ttbcr_write',
'vmsa_ttbr_write',
'write_cpustate_to_list',
'write_list_to_cpustate',
'write_raw_cp_reg',
'X86CPURegister32_lookup',
'x86_op_defs',
'xpsr_read',
'xpsr_write',
'xscale_cpar_write',
'xscale_cp_reginfo'
)
arm_symbols = (
'ARM_REGS_STORAGE_SIZE',
)
aarch64_symbols = (
'ARM64_REGS_STORAGE_SIZE',
'arm64_release',
'arm64_reg_reset',
'arm64_reg_read',
'arm64_reg_write',
'gen_a64_set_pc_im',
'aarch64_cpu_register_types',
'helper_udiv64',
'helper_sdiv64',
'helper_cls64',
'helper_cls32',
'helper_rbit64',
'helper_vfp_cmps_a64',
'helper_vfp_cmpes_a64',
'helper_vfp_cmpd_a64',
'helper_vfp_cmped_a64',
'helper_vfp_mulxs',
'helper_vfp_mulxd',
'helper_simd_tbl',
'helper_neon_ceq_f64',
'helper_neon_cge_f64',
'helper_neon_cgt_f64',
'helper_recpsf_f32',
'helper_recpsf_f64',
'helper_rsqrtsf_f32',
'helper_rsqrtsf_f64',
'helper_neon_addlp_s8',
'helper_neon_addlp_u8',
'helper_neon_addlp_s16',
'helper_neon_addlp_u16',
'helper_frecpx_f32',
'helper_frecpx_f64',
'helper_fcvtx_f64_to_f32',
'helper_crc32_64',
'helper_crc32c_64',
'aarch64_cpu_do_interrupt',
)
mips_symbols = (
'cpu_mips_exec',
'cpu_mips_get_random',
'cpu_mips_get_count',
'cpu_mips_store_count',
'cpu_mips_store_compare',
'cpu_mips_start_count',
'cpu_mips_stop_count',
'mips_machine_init',
'cpu_mips_kseg0_to_phys',
'cpu_mips_phys_to_kseg0',
'cpu_mips_kvm_um_phys_to_kseg0',
'mips_cpu_register_types',
'cpu_mips_init',
'cpu_state_reset',
'helper_msa_andi_b',
'helper_msa_ori_b',
'helper_msa_nori_b',
'helper_msa_xori_b',
'helper_msa_bmnzi_b',
'helper_msa_bmzi_b',
'helper_msa_bseli_b',
'helper_msa_shf_df',
'helper_msa_and_v',
'helper_msa_or_v',
'helper_msa_nor_v',
'helper_msa_xor_v',
'helper_msa_bmnz_v',
'helper_msa_bmz_v',
'helper_msa_bsel_v',
'helper_msa_addvi_df',
'helper_msa_subvi_df',
'helper_msa_ceqi_df',
'helper_msa_clei_s_df',
'helper_msa_clei_u_df',
'helper_msa_clti_s_df',
'helper_msa_clti_u_df',
'helper_msa_maxi_s_df',
'helper_msa_maxi_u_df',
'helper_msa_mini_s_df',
'helper_msa_mini_u_df',
'helper_msa_ldi_df',
'helper_msa_slli_df',
'helper_msa_srai_df',
'helper_msa_srli_df',
'helper_msa_bclri_df',
'helper_msa_bseti_df',
'helper_msa_bnegi_df',
'helper_msa_sat_s_df',
'helper_msa_sat_u_df',
'helper_msa_srari_df',
'helper_msa_srlri_df',
'helper_msa_binsli_df',
'helper_msa_binsri_df',
'helper_msa_sll_df',
'helper_msa_sra_df',
'helper_msa_srl_df',
'helper_msa_bclr_df',
'helper_msa_bset_df',
'helper_msa_bneg_df',
'helper_msa_addv_df',
'helper_msa_subv_df',
'helper_msa_max_s_df',
'helper_msa_max_u_df',
'helper_msa_min_s_df',
'helper_msa_min_u_df',
'helper_msa_max_a_df',
'helper_msa_min_a_df',
'helper_msa_ceq_df',
'helper_msa_clt_s_df',
'helper_msa_clt_u_df',
'helper_msa_cle_s_df',
'helper_msa_cle_u_df',
'helper_msa_add_a_df',
'helper_msa_adds_a_df',
'helper_msa_adds_s_df',
'helper_msa_adds_u_df',
'helper_msa_ave_s_df',
'helper_msa_ave_u_df',
'helper_msa_aver_s_df',
'helper_msa_aver_u_df',
'helper_msa_subs_s_df',
'helper_msa_subs_u_df',
'helper_msa_subsus_u_df',
'helper_msa_subsuu_s_df',
'helper_msa_asub_s_df',
'helper_msa_asub_u_df',
'helper_msa_mulv_df',
'helper_msa_div_s_df',
'helper_msa_div_u_df',
'helper_msa_mod_s_df',
'helper_msa_mod_u_df',
'helper_msa_dotp_s_df',
'helper_msa_dotp_u_df',
'helper_msa_srar_df',
'helper_msa_srlr_df',
'helper_msa_hadd_s_df',
'helper_msa_hadd_u_df',
'helper_msa_hsub_s_df',
'helper_msa_hsub_u_df',
'helper_msa_mul_q_df',
'helper_msa_mulr_q_df',
'helper_msa_sld_df',
'helper_msa_maddv_df',
'helper_msa_msubv_df',
'helper_msa_dpadd_s_df',
'helper_msa_dpadd_u_df',
'helper_msa_dpsub_s_df',
'helper_msa_dpsub_u_df',
'helper_msa_binsl_df',
'helper_msa_binsr_df',
'helper_msa_madd_q_df',
'helper_msa_msub_q_df',
'helper_msa_maddr_q_df',
'helper_msa_msubr_q_df',
'helper_msa_splat_df',
'helper_msa_pckev_df',
'helper_msa_pckod_df',
'helper_msa_ilvl_df',
'helper_msa_ilvr_df',
'helper_msa_ilvev_df',
'helper_msa_ilvod_df',
'helper_msa_vshf_df',
'helper_msa_sldi_df',
'helper_msa_splati_df',
'helper_msa_copy_s_df',
'helper_msa_copy_u_df',
'helper_msa_insert_df',
'helper_msa_insve_df',
'helper_msa_ctcmsa',
'helper_msa_cfcmsa',
'helper_msa_move_v',
'helper_msa_fill_df',
'helper_msa_nlzc_df',
'helper_msa_nloc_df',
'helper_msa_pcnt_df',
'helper_msa_fcaf_df',
'helper_msa_fcun_df',
'helper_msa_fceq_df',
'helper_msa_fcueq_df',
'helper_msa_fclt_df',
'helper_msa_fcult_df',
'helper_msa_fcle_df',
'helper_msa_fcule_df',
'helper_msa_fsaf_df',
'helper_msa_fsun_df',
'helper_msa_fseq_df',
'helper_msa_fsueq_df',
'helper_msa_fslt_df',
'helper_msa_fsult_df',
'helper_msa_fsle_df',
'helper_msa_fsule_df',
'helper_msa_fcor_df',
'helper_msa_fcune_df',
'helper_msa_fcne_df',
'helper_msa_fsor_df',
'helper_msa_fsune_df',
'helper_msa_fsne_df',
'helper_msa_fadd_df',
'helper_msa_fsub_df',
'helper_msa_fmul_df',
'helper_msa_fdiv_df',
'helper_msa_fmadd_df',
'helper_msa_fmsub_df',
'helper_msa_fexp2_df',
'helper_msa_fexdo_df',
'helper_msa_ftq_df',
'helper_msa_fmin_df',
'helper_msa_fmin_a_df',
'helper_msa_fmax_df',
'helper_msa_fmax_a_df',
'helper_msa_fclass_df',
'helper_msa_ftrunc_s_df',
'helper_msa_ftrunc_u_df',
'helper_msa_fsqrt_df',
'helper_msa_frsqrt_df',
'helper_msa_frcp_df',
'helper_msa_frint_df',
'helper_msa_flog2_df',
'helper_msa_fexupl_df',
'helper_msa_fexupr_df',
'helper_msa_ffql_df',
'helper_msa_ffqr_df',
'helper_msa_ftint_s_df',
'helper_msa_ftint_u_df',
'helper_msa_ffint_s_df',
'helper_msa_ffint_u_df',
'helper_paddsb',
'helper_paddusb',
'helper_paddsh',
'helper_paddush',
'helper_paddb',
'helper_paddh',
'helper_paddw',
'helper_psubsb',
'helper_psubusb',
'helper_psubsh',
'helper_psubush',
'helper_psubb',
'helper_psubh',
'helper_psubw',
'helper_pshufh',
'helper_packsswh',
'helper_packsshb',
'helper_packushb',
'helper_punpcklwd',
'helper_punpckhwd',
'helper_punpcklhw',
'helper_punpckhhw',
'helper_punpcklbh',
'helper_punpckhbh',
'helper_pavgh',
'helper_pavgb',
'helper_pmaxsh',
'helper_pminsh',
'helper_pmaxub',
'helper_pminub',
'helper_pcmpeqw',
'helper_pcmpgtw',
'helper_pcmpeqh',
'helper_pcmpgth',
'helper_pcmpeqb',
'helper_pcmpgtb',
'helper_psllw',
'helper_psrlw',
'helper_psraw',
'helper_psllh',
'helper_psrlh',
'helper_psrah',
'helper_pmullh',
'helper_pmulhh',
'helper_pmulhuh',
'helper_pmaddhw',
'helper_pasubub',
'helper_biadd',
'helper_pmovmskb',
'helper_absq_s_ph',
'helper_absq_s_qb',
'helper_absq_s_w',
'helper_addqh_ph',
'helper_addqh_r_ph',
'helper_addqh_r_w',
'helper_addqh_w',
'helper_adduh_qb',
'helper_adduh_r_qb',
'helper_subqh_ph',
'helper_subqh_r_ph',
'helper_subqh_r_w',
'helper_subqh_w',
'helper_addq_ph',
'helper_addq_s_ph',
'helper_addq_s_w',
'helper_addu_ph',
'helper_addu_qb',
'helper_addu_s_ph',
'helper_addu_s_qb',
'helper_subq_ph',
'helper_subq_s_ph',
'helper_subq_s_w',
'helper_subu_ph',
'helper_subu_qb',
'helper_subu_s_ph',
'helper_subu_s_qb',
'helper_subuh_qb',
'helper_subuh_r_qb',
'helper_addsc',
'helper_addwc',
'helper_modsub',
'helper_raddu_w_qb',
'helper_precr_qb_ph',
'helper_precrq_qb_ph',
'helper_precr_sra_ph_w',
'helper_precr_sra_r_ph_w',
'helper_precrq_ph_w',
'helper_precrq_rs_ph_w',
'helper_precrqu_s_qb_ph',
'helper_precequ_ph_qbl',
'helper_precequ_ph_qbr',
'helper_precequ_ph_qbla',
'helper_precequ_ph_qbra',
'helper_preceu_ph_qbl',
'helper_preceu_ph_qbr',
'helper_preceu_ph_qbla',
'helper_preceu_ph_qbra',
'helper_shll_qb',
'helper_shrl_qb',
'helper_shra_qb',
'helper_shra_r_qb',
'helper_shll_ph',
'helper_shll_s_ph',
'helper_shll_s_w',
'helper_shra_r_w',
'helper_shrl_ph',
'helper_shra_ph',
'helper_shra_r_ph',
'helper_muleu_s_ph_qbl',
'helper_muleu_s_ph_qbr',
'helper_mulq_rs_ph',
'helper_mul_ph',
'helper_mul_s_ph',
'helper_mulq_s_ph',
'helper_muleq_s_w_phl',
'helper_muleq_s_w_phr',
'helper_mulsaq_s_w_ph',
'helper_mulsa_w_ph',
'helper_dpau_h_qbl',
'helper_dpau_h_qbr',
'helper_dpsu_h_qbl',
'helper_dpsu_h_qbr',
'helper_dpa_w_ph',
'helper_dpax_w_ph',
'helper_dps_w_ph',
'helper_dpsx_w_ph',
'helper_dpaq_s_w_ph',
'helper_dpaqx_s_w_ph',
'helper_dpsq_s_w_ph',
'helper_dpsqx_s_w_ph',
'helper_dpaqx_sa_w_ph',
'helper_dpsqx_sa_w_ph',
'helper_dpaq_sa_l_w',
'helper_dpsq_sa_l_w',
'helper_maq_s_w_phl',
'helper_maq_s_w_phr',
'helper_maq_sa_w_phl',
'helper_maq_sa_w_phr',
'helper_mulq_s_w',
'helper_mulq_rs_w',
'helper_bitrev',
'helper_insv',
'helper_cmpgu_eq_qb',
'helper_cmpgu_lt_qb',
'helper_cmpgu_le_qb',
'helper_cmpu_eq_qb',
'helper_cmpu_lt_qb',
'helper_cmpu_le_qb',
'helper_cmp_eq_ph',
'helper_cmp_lt_ph',
'helper_cmp_le_ph',
'helper_pick_qb',
'helper_pick_ph',
'helper_packrl_ph',
'helper_extr_w',
'helper_extr_r_w',
'helper_extr_rs_w',
'helper_extr_s_h',
'helper_extp',
'helper_extpdp',
'helper_shilo',
'helper_mthlip',
'cpu_wrdsp',
'helper_wrdsp',
'cpu_rddsp',
'helper_rddsp',
'helper_raise_exception_err',
'helper_clo',
'helper_clz',
'helper_muls',
'helper_mulsu',
'helper_macc',
'helper_macchi',
'helper_maccu',
'helper_macchiu',
'helper_msac',
'helper_msachi',
'helper_msacu',
'helper_msachiu',
'helper_mulhi',
'helper_mulhiu',
'helper_mulshi',
'helper_mulshiu',
'helper_bitswap',
'helper_ll',
'helper_sc',
'helper_swl',
'helper_swr',
'helper_lwm',
'helper_swm',
'helper_mfc0_mvpcontrol',
'helper_mfc0_mvpconf0',
'helper_mfc0_mvpconf1',
'helper_mfc0_random',
'helper_mfc0_tcstatus',
'helper_mftc0_tcstatus',
'helper_mfc0_tcbind',
'helper_mftc0_tcbind',
'helper_mfc0_tcrestart',
'helper_mftc0_tcrestart',
'helper_mfc0_tchalt',
'helper_mftc0_tchalt',
'helper_mfc0_tccontext',
'helper_mftc0_tccontext',
'helper_mfc0_tcschedule',
'helper_mftc0_tcschedule',
'helper_mfc0_tcschefback',
'helper_mftc0_tcschefback',
'helper_mfc0_count',
'helper_mftc0_entryhi',
'helper_mftc0_cause',
'helper_mftc0_status',
'helper_mfc0_lladdr',
'helper_mfc0_watchlo',
'helper_mfc0_watchhi',
'helper_mfc0_debug',
'helper_mftc0_debug',
'helper_mtc0_index',
'helper_mtc0_mvpcontrol',
'helper_mtc0_vpecontrol',
'helper_mttc0_vpecontrol',
'helper_mftc0_vpecontrol',
'helper_mftc0_vpeconf0',
'helper_mtc0_vpeconf0',
'helper_mttc0_vpeconf0',
'helper_mtc0_vpeconf1',
'helper_mtc0_yqmask',
'helper_mtc0_vpeopt',
'helper_mtc0_entrylo0',
'helper_mtc0_tcstatus',
'helper_mttc0_tcstatus',
'helper_mtc0_tcbind',
'helper_mttc0_tcbind',
'helper_mtc0_tcrestart',
'helper_mttc0_tcrestart',
'helper_mtc0_tchalt',
'helper_mttc0_tchalt',
'helper_mtc0_tccontext',
'helper_mttc0_tccontext',
'helper_mtc0_tcschedule',
'helper_mttc0_tcschedule',
'helper_mtc0_tcschefback',
'helper_mttc0_tcschefback',
'helper_mtc0_entrylo1',
'helper_mtc0_context',
'helper_mtc0_pagemask',
'helper_mtc0_pagegrain',
'helper_mtc0_wired',
'helper_mtc0_srsconf0',
'helper_mtc0_srsconf1',
'helper_mtc0_srsconf2',
'helper_mtc0_srsconf3',
'helper_mtc0_srsconf4',
'helper_mtc0_hwrena',
'helper_mtc0_count',
'helper_mtc0_entryhi',
'helper_mttc0_entryhi',
'helper_mtc0_compare',
'helper_mtc0_status',
'helper_mttc0_status',
'helper_mtc0_intctl',
'helper_mtc0_srsctl',
'helper_mtc0_cause',
'helper_mttc0_cause',
'helper_mftc0_epc',
'helper_mftc0_ebase',
'helper_mtc0_ebase',
'helper_mttc0_ebase',
'helper_mftc0_configx',
'helper_mtc0_config0',
'helper_mtc0_config2',
'helper_mtc0_config4',
'helper_mtc0_config5',
'helper_mtc0_lladdr',
'helper_mtc0_watchlo',
'helper_mtc0_watchhi',
'helper_mtc0_xcontext',
'helper_mtc0_framemask',
'helper_mtc0_debug',
'helper_mttc0_debug',
'helper_mtc0_performance0',
'helper_mtc0_taglo',
'helper_mtc0_datalo',
'helper_mtc0_taghi',
'helper_mtc0_datahi',
'helper_mftgpr',
'helper_mftlo',
'helper_mfthi',
'helper_mftacx',
'helper_mftdsp',
'helper_mttgpr',
'helper_mttlo',
'helper_mtthi',
'helper_mttacx',
'helper_mttdsp',
'helper_dmt',
'helper_emt',
'helper_dvpe',
'helper_evpe',
'helper_fork',
'helper_yield',
'r4k_helper_tlbinv',
'r4k_helper_tlbinvf',
'r4k_helper_tlbwi',
'r4k_helper_tlbwr',
'r4k_helper_tlbp',
'r4k_helper_tlbr',
'helper_tlbwi',
'helper_tlbwr',
'helper_tlbp',
'helper_tlbr',
'helper_tlbinv',
'helper_tlbinvf',
'helper_di',
'helper_ei',
'helper_eret',
'helper_deret',
'helper_rdhwr_cpunum',
'helper_rdhwr_synci_step',
'helper_rdhwr_cc',
'helper_rdhwr_ccres',
'helper_pmon',
'helper_wait',
'mips_cpu_do_unaligned_access',
'mips_cpu_unassigned_access',
'ieee_rm',
'helper_cfc1',
'helper_ctc1',
'ieee_ex_to_mips',
'helper_float_sqrt_d',
'helper_float_sqrt_s',
'helper_float_cvtd_s',
'helper_float_cvtd_w',
'helper_float_cvtd_l',
'helper_float_cvtl_d',
'helper_float_cvtl_s',
'helper_float_cvtps_pw',
'helper_float_cvtpw_ps',
'helper_float_cvts_d',
'helper_float_cvts_w',
'helper_float_cvts_l',
'helper_float_cvts_pl',
'helper_float_cvts_pu',
'helper_float_cvtw_s',
'helper_float_cvtw_d',
'helper_float_roundl_d',
'helper_float_roundl_s',
'helper_float_roundw_d',
'helper_float_roundw_s',
'helper_float_truncl_d',
'helper_float_truncl_s',
'helper_float_truncw_d',
'helper_float_truncw_s',
'helper_float_ceill_d',
'helper_float_ceill_s',
'helper_float_ceilw_d',
'helper_float_ceilw_s',
'helper_float_floorl_d',
'helper_float_floorl_s',
'helper_float_floorw_d',
'helper_float_floorw_s',
'helper_float_abs_d',
'helper_float_abs_s',
'helper_float_abs_ps',
'helper_float_chs_d',
'helper_float_chs_s',
'helper_float_chs_ps',
'helper_float_maddf_s',
'helper_float_maddf_d',
'helper_float_msubf_s',
'helper_float_msubf_d',
'helper_float_max_s',
'helper_float_max_d',
'helper_float_maxa_s',
'helper_float_maxa_d',
'helper_float_min_s',
'helper_float_min_d',
'helper_float_mina_s',
'helper_float_mina_d',
'helper_float_rint_s',
'helper_float_rint_d',
'helper_float_class_s',
'helper_float_class_d',
'helper_float_recip_d',
'helper_float_recip_s',
'helper_float_rsqrt_d',
'helper_float_rsqrt_s',
'helper_float_recip1_d',
'helper_float_recip1_s',
'helper_float_recip1_ps',
'helper_float_rsqrt1_d',
'helper_float_rsqrt1_s',
'helper_float_rsqrt1_ps',
'helper_float_add_d',
'helper_float_add_s',
'helper_float_add_ps',
'helper_float_sub_d',
'helper_float_sub_s',
'helper_float_sub_ps',
'helper_float_mul_d',
'helper_float_mul_s',
'helper_float_mul_ps',
'helper_float_div_d',
'helper_float_div_s',
'helper_float_div_ps',
'helper_float_madd_d',
'helper_float_madd_s',
'helper_float_madd_ps',
'helper_float_msub_d',
'helper_float_msub_s',
'helper_float_msub_ps',
'helper_float_nmadd_d',
'helper_float_nmadd_s',
'helper_float_nmadd_ps',
'helper_float_nmsub_d',
'helper_float_nmsub_s',
'helper_float_nmsub_ps',
'helper_float_recip2_d',
'helper_float_recip2_s',
'helper_float_recip2_ps',
'helper_float_rsqrt2_d',
'helper_float_rsqrt2_s',
'helper_float_rsqrt2_ps',
'helper_float_addr_ps',
'helper_float_mulr_ps',
'helper_cmp_d_f',
'helper_cmpabs_d_f',
'helper_cmp_d_un',
'helper_cmpabs_d_un',
'helper_cmp_d_eq',
'helper_cmpabs_d_eq',
'helper_cmp_d_ueq',
'helper_cmpabs_d_ueq',
'helper_cmp_d_olt',
'helper_cmpabs_d_olt',
'helper_cmp_d_ult',
'helper_cmpabs_d_ult',
'helper_cmp_d_ole',
'helper_cmpabs_d_ole',
'helper_cmp_d_ule',
'helper_cmpabs_d_ule',
'helper_cmp_d_sf',
'helper_cmpabs_d_sf',
'helper_cmp_d_ngle',
'helper_cmpabs_d_ngle',
'helper_cmp_d_seq',
'helper_cmpabs_d_seq',
'helper_cmp_d_ngl',
'helper_cmpabs_d_ngl',
'helper_cmp_d_lt',
'helper_cmpabs_d_lt',
'helper_cmp_d_nge',
'helper_cmpabs_d_nge',
'helper_cmp_d_le',
'helper_cmpabs_d_le',
'helper_cmp_d_ngt',
'helper_cmpabs_d_ngt',
'helper_cmp_s_f',
'helper_cmpabs_s_f',
'helper_cmp_s_un',
'helper_cmpabs_s_un',
'helper_cmp_s_eq',
'helper_cmpabs_s_eq',
'helper_cmp_s_ueq',
'helper_cmpabs_s_ueq',
'helper_cmp_s_olt',
'helper_cmpabs_s_olt',
'helper_cmp_s_ult',
'helper_cmpabs_s_ult',
'helper_cmp_s_ole',
'helper_cmpabs_s_ole',
'helper_cmp_s_ule',
'helper_cmpabs_s_ule',
'helper_cmp_s_sf',
'helper_cmpabs_s_sf',
'helper_cmp_s_ngle',
'helper_cmpabs_s_ngle',
'helper_cmp_s_seq',
'helper_cmpabs_s_seq',
'helper_cmp_s_ngl',
'helper_cmpabs_s_ngl',
'helper_cmp_s_lt',
'helper_cmpabs_s_lt',
'helper_cmp_s_nge',
'helper_cmpabs_s_nge',
'helper_cmp_s_le',
'helper_cmpabs_s_le',
'helper_cmp_s_ngt',
'helper_cmpabs_s_ngt',
'helper_cmp_ps_f',
'helper_cmpabs_ps_f',
'helper_cmp_ps_un',
'helper_cmpabs_ps_un',
'helper_cmp_ps_eq',
'helper_cmpabs_ps_eq',
'helper_cmp_ps_ueq',
'helper_cmpabs_ps_ueq',
'helper_cmp_ps_olt',
'helper_cmpabs_ps_olt',
'helper_cmp_ps_ult',
'helper_cmpabs_ps_ult',
'helper_cmp_ps_ole',
'helper_cmpabs_ps_ole',
'helper_cmp_ps_ule',
'helper_cmpabs_ps_ule',
'helper_cmp_ps_sf',
'helper_cmpabs_ps_sf',
'helper_cmp_ps_ngle',
'helper_cmpabs_ps_ngle',
'helper_cmp_ps_seq',
'helper_cmpabs_ps_seq',
'helper_cmp_ps_ngl',
'helper_cmpabs_ps_ngl',
'helper_cmp_ps_lt',
'helper_cmpabs_ps_lt',
'helper_cmp_ps_nge',
'helper_cmpabs_ps_nge',
'helper_cmp_ps_le',
'helper_cmpabs_ps_le',
'helper_cmp_ps_ngt',
'helper_cmpabs_ps_ngt',
'helper_r6_cmp_d_af',
'helper_r6_cmp_d_un',
'helper_r6_cmp_d_eq',
'helper_r6_cmp_d_ueq',
'helper_r6_cmp_d_lt',
'helper_r6_cmp_d_ult',
'helper_r6_cmp_d_le',
'helper_r6_cmp_d_ule',
'helper_r6_cmp_d_saf',
'helper_r6_cmp_d_sun',
'helper_r6_cmp_d_seq',
'helper_r6_cmp_d_sueq',
'helper_r6_cmp_d_slt',
'helper_r6_cmp_d_sult',
'helper_r6_cmp_d_sle',
'helper_r6_cmp_d_sule',
'helper_r6_cmp_d_or',
'helper_r6_cmp_d_une',
'helper_r6_cmp_d_ne',
'helper_r6_cmp_d_sor',
'helper_r6_cmp_d_sune',
'helper_r6_cmp_d_sne',
'helper_r6_cmp_s_af',
'helper_r6_cmp_s_un',
'helper_r6_cmp_s_eq',
'helper_r6_cmp_s_ueq',
'helper_r6_cmp_s_lt',
'helper_r6_cmp_s_ult',
'helper_r6_cmp_s_le',
'helper_r6_cmp_s_ule',
'helper_r6_cmp_s_saf',
'helper_r6_cmp_s_sun',
'helper_r6_cmp_s_seq',
'helper_r6_cmp_s_sueq',
'helper_r6_cmp_s_slt',
'helper_r6_cmp_s_sult',
'helper_r6_cmp_s_sle',
'helper_r6_cmp_s_sule',
'helper_r6_cmp_s_or',
'helper_r6_cmp_s_une',
'helper_r6_cmp_s_ne',
'helper_r6_cmp_s_sor',
'helper_r6_cmp_s_sune',
'helper_r6_cmp_s_sne',
'helper_msa_ld_df',
'helper_msa_st_df',
'no_mmu_map_address',
'fixed_mmu_map_address',
'r4k_map_address',
'mips_cpu_get_phys_page_debug',
'mips_cpu_handle_mmu_fault',
'cpu_mips_translate_address',
'exception_resume_pc',
'mips_cpu_do_interrupt',
'mips_cpu_exec_interrupt',
'r4k_invalidate_tlb',
'helper_absq_s_ob',
'helper_absq_s_qh',
'helper_absq_s_pw',
'helper_adduh_ob',
'helper_adduh_r_ob',
'helper_subuh_ob',
'helper_subuh_r_ob',
'helper_addq_pw',
'helper_addq_qh',
'helper_addq_s_pw',
'helper_addq_s_qh',
'helper_addu_ob',
'helper_addu_qh',
'helper_addu_s_ob',
'helper_addu_s_qh',
'helper_subq_pw',
'helper_subq_qh',
'helper_subq_s_pw',
'helper_subq_s_qh',
'helper_subu_ob',
'helper_subu_qh',
'helper_subu_s_ob',
'helper_subu_s_qh',
'helper_raddu_l_ob',
'helper_precr_ob_qh',
'helper_precr_sra_qh_pw',
'helper_precr_sra_r_qh_pw',
'helper_precrq_ob_qh',
'helper_precrq_qh_pw',
'helper_precrq_rs_qh_pw',
'helper_precrq_pw_l',
'helper_precrqu_s_ob_qh',
'helper_preceq_pw_qhl',
'helper_preceq_pw_qhr',
'helper_preceq_pw_qhla',
'helper_preceq_pw_qhra',
'helper_precequ_qh_obl',
'helper_precequ_qh_obr',
'helper_precequ_qh_obla',
'helper_precequ_qh_obra',
'helper_preceu_qh_obl',
'helper_preceu_qh_obr',
'helper_preceu_qh_obla',
'helper_preceu_qh_obra',
'helper_shll_ob',
'helper_shrl_ob',
'helper_shra_ob',
'helper_shra_r_ob',
'helper_shll_qh',
'helper_shll_s_qh',
'helper_shrl_qh',
'helper_shra_qh',
'helper_shra_r_qh',
'helper_shll_pw',
'helper_shll_s_pw',
'helper_shra_pw',
'helper_shra_r_pw',
'helper_muleu_s_qh_obl',
'helper_muleu_s_qh_obr',
'helper_mulq_rs_qh',
'helper_muleq_s_pw_qhl',
'helper_muleq_s_pw_qhr',
'helper_mulsaq_s_w_qh',
'helper_dpau_h_obl',
'helper_dpau_h_obr',
'helper_dpsu_h_obl',
'helper_dpsu_h_obr',
'helper_dpa_w_qh',
'helper_dpaq_s_w_qh',
'helper_dps_w_qh',
'helper_dpsq_s_w_qh',
'helper_dpaq_sa_l_pw',
'helper_dpsq_sa_l_pw',
'helper_mulsaq_s_l_pw',
'helper_maq_s_w_qhll',
'helper_maq_s_w_qhlr',
'helper_maq_s_w_qhrl',
'helper_maq_s_w_qhrr',
'helper_maq_sa_w_qhll',
'helper_maq_sa_w_qhlr',
'helper_maq_sa_w_qhrl',
'helper_maq_sa_w_qhrr',
'helper_maq_s_l_pwl',
'helper_maq_s_l_pwr',
'helper_dmadd',
'helper_dmaddu',
'helper_dmsub',
'helper_dmsubu',
'helper_dinsv',
'helper_cmpgu_eq_ob',
'helper_cmpgu_lt_ob',
'helper_cmpgu_le_ob',
'helper_cmpu_eq_ob',
'helper_cmpu_lt_ob',
'helper_cmpu_le_ob',
'helper_cmp_eq_qh',
'helper_cmp_lt_qh',
'helper_cmp_le_qh',
'helper_cmp_eq_pw',
'helper_cmp_lt_pw',
'helper_cmp_le_pw',
'helper_cmpgdu_eq_ob',
'helper_cmpgdu_lt_ob',
'helper_cmpgdu_le_ob',
'helper_pick_ob',
'helper_pick_qh',
'helper_pick_pw',
'helper_packrl_pw',
'helper_dextr_w',
'helper_dextr_r_w',
'helper_dextr_rs_w',
'helper_dextr_l',
'helper_dextr_r_l',
'helper_dextr_rs_l',
'helper_dextr_s_h',
'helper_dextp',
'helper_dextpdp',
'helper_dshilo',
'helper_dmthlip',
'helper_dclo',
'helper_dclz',
'helper_dbitswap',
'helper_lld',
'helper_scd',
'helper_sdl',
'helper_sdr',
'helper_ldm',
'helper_sdm',
'helper_dmfc0_tcrestart',
'helper_dmfc0_tchalt',
'helper_dmfc0_tccontext',
'helper_dmfc0_tcschedule',
'helper_dmfc0_tcschefback',
'helper_dmfc0_lladdr',
'helper_dmfc0_watchlo',
'helper_dmtc0_entrylo0',
'helper_dmtc0_entrylo1',
'mips_reg_reset',
'mips_reg_read',
'mips_reg_write',
'mips_tcg_init',
'mips_cpu_list',
'mips_release',
'MIPS64_REGS_STORAGE_SIZE',
'MIPS_REGS_STORAGE_SIZE'
)
sparc_symbols = (
'cpu_sparc_exec',
'helper_compute_psr',
'helper_compute_C_icc',
'cpu_sparc_init',
'cpu_sparc_set_id',
'sparc_cpu_register_types',
'helper_fadds',
'helper_faddd',
'helper_faddq',
'helper_fsubs',
'helper_fsubd',
'helper_fsubq',
'helper_fmuls',
'helper_fmuld',
'helper_fmulq',
'helper_fdivs',
'helper_fdivd',
'helper_fdivq',
'helper_fsmuld',
'helper_fdmulq',
'helper_fnegs',
'helper_fitos',
'helper_fitod',
'helper_fitoq',
'helper_fdtos',
'helper_fstod',
'helper_fqtos',
'helper_fstoq',
'helper_fqtod',
'helper_fdtoq',
'helper_fstoi',
'helper_fdtoi',
'helper_fqtoi',
'helper_fabss',
'helper_fsqrts',
'helper_fsqrtd',
'helper_fsqrtq',
'helper_fcmps',
'helper_fcmpd',
'helper_fcmpes',
'helper_fcmped',
'helper_fcmpq',
'helper_fcmpeq',
'helper_ldfsr',
'helper_debug',
'helper_udiv_cc',
'helper_sdiv_cc',
'helper_taddcctv',
'helper_tsubcctv',
'sparc_cpu_do_interrupt',
'helper_check_align',
'helper_ld_asi',
'helper_st_asi',
'helper_cas_asi',
'helper_ldqf',
'helper_stqf',
'sparc_cpu_unassigned_access',
'sparc_cpu_do_unaligned_access',
'sparc_cpu_handle_mmu_fault',
'dump_mmu',
'sparc_cpu_get_phys_page_debug',
'sparc_reg_reset',
'sparc_reg_read',
'sparc_reg_write',
'gen_intermediate_code_init',
'cpu_set_cwp',
'cpu_get_psr',
'cpu_put_psr',
'cpu_cwp_inc',
'cpu_cwp_dec',
'helper_save',
'helper_restore')
if __name__ == '__main__':
arch = sys.argv[1]
print("/* Autogen header for Unicorn Engine - DONOT MODIFY */")
print("#ifndef UNICORN_AUTOGEN_%s_H" %arch.upper())
print("#define UNICORN_AUTOGEN_%s_H" %arch.upper())
for s in symbols:
print("#define %s %s_%s" %(s, s, arch))
if 'arm' in arch:
for s in arm_symbols:
print("#define %s %s_%s" %(s, s, arch))
if 'aarch64' in arch:
for s in aarch64_symbols:
print("#define %s %s_%s" %(s, s, arch))
if 'mips' in arch:
for s in mips_symbols:
print("#define %s %s_%s" %(s, s, arch))
if 'sparc' in arch:
for s in sparc_symbols:
print("#define %s %s_%s" %(s, s, arch))
print("#endif")
| gpl-2.0 | 2,159,049,785,083,688,200 | -5,052,414,780,174,093,000 | 24.908446 | 65 | 0.596886 | false |
GoeGaming/lutris | lutris/runners/steam.py | 1 | 6910 | import os
import time
import subprocess
from lutris.runners.runner import Runner
from lutris.gui.dialogs import NoticeDialog
from lutris.thread import LutrisThread
from lutris.util.log import logger
from lutris.util import system
from lutris.util.steam import (get_path_from_appmanifest, read_config,
get_default_acf, to_vdf)
def shutdown():
"""Cleanly quit Steam"""
logger.debug("Shutting down Steam")
if is_running():
subprocess.call(['steam', '-shutdown'])
def get_steam_pid():
"""Return pid of Steam process"""
return system.get_pid('steam$')
def kill():
"""Force quit Steam"""
system.kill_pid(get_steam_pid())
def is_running():
"""Checks if Steam is running"""
return bool(get_steam_pid())
class steam(Runner):
""" Runs Steam for Linux games """
human_name = "Steam"
platform = "Steam for Linux"
game_options = [
{
"option": 'appid',
'label': "Application ID",
"type": "string",
'help': ("The application ID can be retrieved from the game's "
"page at steampowered.com. Example: 235320 is the "
"app ID for <i>Original War</i> in: \n"
"http://store.steampowered.com/app/<b>235320</b>/")
}
]
runner_options = [
{
'option': 'quit_steam_on_exit',
'label': "Stop Steam after game exits",
'type': 'bool',
'default': False,
'help': ("Quit Steam after the game has quit\n"
"(only if it was started by Lutris)")
}
]
system_options_override = [
{
'option': 'disable_runtime',
'default': True,
}
]
def __init__(self, config=None):
super(steam, self).__init__(config)
self.own_game_remove_method = "Remove game data (through Steam)"
self.no_game_remove_warning = True
self.original_steampid = None
@property
def browse_dir(self):
"""Return the path to open with the Browse Files action."""
if not self.is_installed():
installed = self.install_dialog()
if not installed:
return False
return self.game_path
@property
def steam_config(self):
"""Return the "Steam" part of Steam's config.vdf as a dict"""
if not self.steam_data_dir:
return
return read_config(self.steam_data_dir)
@property
def game_path(self):
appid = self.game_config.get('appid')
for apps_path in self.get_steamapps_dirs():
game_path = get_path_from_appmanifest(apps_path, appid)
if game_path:
return game_path
logger.warning("Data path for SteamApp %s not found.", appid)
@property
def steam_exe(self):
"""Return Steam exe's path"""
return 'steam'
@property
def steam_data_dir(self):
"""Return dir where Steam files lie"""
candidates = (
"~/.local/share/Steam/",
"~/.local/share/steam/",
"~/.steam/",
"~/.Steam/",
)
for candidate in candidates:
path = os.path.expanduser(candidate)
if os.path.exists(path):
return path
def get_game_path_from_appid(self, appid):
"""Return the game directory"""
for apps_path in self.get_steamapps_dirs():
game_path = get_path_from_appmanifest(apps_path, appid)
if game_path:
return game_path
logger.warning("Data path for SteamApp %s not found.", appid)
def get_steamapps_dirs(self):
"""Return a list of the Steam library main + custom folders."""
dirs = []
# Main steamapps dir
if self.steam_data_dir:
main_dir = os.path.join(self.steam_data_dir, 'SteamApps')
main_dir = system.fix_path_case(main_dir)
if main_dir:
dirs.append(main_dir)
# Custom dirs
steam_config = self.steam_config
if steam_config:
i = 1
while ('BaseInstallFolder_%s' % i) in steam_config:
path = steam_config['BaseInstallFolder_%s' % i] + '/SteamApps'
path = system.fix_path_case(path)
if path:
dirs.append(path)
i += 1
return dirs
def install(self):
message = "Steam for Linux installation is not handled by Lutris.\n" \
"Please go to " \
"<a href='http://steampowered.com'>http://steampowered.com</a>" \
" or install Steam with the package provided by your distribution."
NoticeDialog(message)
def is_installed(self):
return bool(system.find_executable(self.steam_exe))
def install_game(self, appid):
logger.debug("Installing steam game %s", appid)
acf_data = get_default_acf(appid, appid)
acf_content = to_vdf(acf_data)
steamapps_dirs = self.get_steamapps_dirs()
acf_path = os.path.join(steamapps_dirs[0], "appmanifest_%s.acf" % appid)
with open(acf_path, "w") as acf_file:
acf_file.write(acf_content)
if is_running():
shutdown()
time.sleep(5)
else:
logger.debug("Steam not running")
subprocess.Popen(["steam", "steam://preload/%s" % appid])
def prelaunch(self):
from lutris.runners import winesteam
if winesteam.is_running():
if winesteam.is_running():
logger.info("Steam does not shutdown, killing it...")
winesteam.kill()
time.sleep(2)
if winesteam.is_running():
logger.error("Failed to shutdown Steam for Windows :(")
return False
else:
logger.debug("winesteam not running")
return True
def play(self):
# Get current steam pid to act as the root pid instead of lutris
self.original_steampid = get_steam_pid()
appid = self.game_config.get('appid')
return {
'command': [self.steam_exe, 'steam://rungameid/%s' % appid],
'rootpid': self.original_steampid
}
def stop(self):
if self.runner_config.get('quit_steam_on_exit') \
and not self.original_steampid:
shutdown()
def remove_game_data(self, **kwargs):
if not self.is_installed():
installed = self.install_dialog()
if not installed:
return False
appid = self.game_config.get('appid')
logger.debug("Launching Wine Steam uninstall of game %s" % appid)
command = [self.steam_exe, 'steam://uninstall/%s' % appid]
thread = LutrisThread(command, runner=self)
thread.start()
| gpl-3.0 | -767,053,506,306,585,600 | -1,497,304,380,084,324,900 | 32.062201 | 80 | 0.554414 | false |
woddx/privacyidea | tests/test_lib_tokens_motp.py | 3 | 7378 | """
This test file tests the lib.tokens.spasstoken
This depends on lib.tokenclass
"""
from .base import MyTestCase
from privacyidea.lib.tokens.motptoken import MotpTokenClass
from privacyidea.lib.tokens.mOTP import mTimeOtp
from privacyidea.models import Token
from privacyidea.lib.resolver import save_resolver
from privacyidea.lib.realm import set_realm
from privacyidea.lib.user import User
PWFILE = "tests/testdata/passwords"
class MotpTokenTestCase(MyTestCase):
otppin = "topsecret"
motppin = "1234"
serial1 = "ser1"
serial2 = "ser2"
resolvername1 = "resolver1"
resolvername2 = "Resolver2"
resolvername3 = "reso3"
realm1 = "realm1"
realm2 = "realm2"
def test_00_create_user_realm(self):
rid = save_resolver({"resolver": self.resolvername1,
"type": "passwdresolver",
"fileName": PWFILE})
self.assertTrue(rid > 0, rid)
(added, failed) = set_realm(self.realm1,
[self.resolvername1])
self.assertTrue(len(failed) == 0)
self.assertTrue(len(added) == 1)
user = User(login="root",
realm=self.realm1,
resolver=self.resolvername1)
user_str = "%s" % user
self.assertTrue(user_str == "<root.resolver1@realm1>", user_str)
self.assertFalse(user.is_empty())
self.assertTrue(User().is_empty())
user_repr = "%r" % user
expected = "User(login='root', realm='realm1', resolver='resolver1')"
self.assertTrue(user_repr == expected, user_repr)
def test_01_create_token(self):
db_token = Token(self.serial1, tokentype="motp")
db_token.save()
token = MotpTokenClass(db_token)
token.update({"otpkey": "909a4d4ba980b2c6",
"motppin": self.motppin,
"pin": self.otppin})
self.assertTrue(token.token.serial == self.serial1, token)
self.assertTrue(token.token.tokentype == "motp", token.token.tokentype)
self.assertTrue(token.type == "motp", token)
class_prefix = token.get_class_prefix()
self.assertTrue(class_prefix == "PIMO", class_prefix)
self.assertTrue(token.get_class_type() == "motp", token)
def test_02_check_password(self):
db_token = Token.query.filter(Token.serial == self.serial1).first()
token = MotpTokenClass(db_token)
# Wrong OTP value
r = token.check_otp("aba73b")
self.assertTrue(r == -1, r)
# check pin+otp:
token.set_pin(self.otppin)
r = token.authenticate("%saba73b" % self.otppin)
self.assertTrue(r[0], r)
self.assertTrue(r[1] == -1, r)
def test_03_enroll_genkey(self):
db_token = Token(self.serial2, tokentype="motp")
db_token.save()
token = MotpTokenClass(db_token)
token.update({"genkey": "1",
"motppin": self.motppin,
"pin": self.otppin})
db_token = Token.query.filter(Token.serial == self.serial2).first()
token = MotpTokenClass(db_token)
# check that the userpin is set
self.assertTrue(token.token.user_pin, token.token.user_pin)
# check that the otp value is set
self.assertTrue(token.token.key_enc, token.token.key_enc)
def test_16_init_detail(self):
db_token = Token.query.filter_by(serial=self.serial2).first()
token = MotpTokenClass(db_token)
token.add_init_details("otpkey", "11223344556677889900")
token.set_user(User(login="cornelius",
realm=self.realm1))
token.save()
self.assertTrue(token.token.resolver_type == "passwdresolver",
token.token.resolver_type)
self.assertTrue(token.token.resolver == self.resolvername1,
token.token.resolver)
self.assertTrue(token.token.user_id == "1000",
token.token.user_id)
user_object = token.get_user()
self.assertTrue(user_object.login == "cornelius",
user_object)
self.assertTrue(user_object.resolver == self.resolvername1,
user_object)
detail = token.get_init_detail()
self.assertTrue("otpkey" in detail, detail)
# but the otpkey must not be written to token.token.info (DB)
# As this only writes the OTPkey to the internal init_details dict
self.assertTrue("otpkey" not in token.token.get_info(),
token.token.get_info())
# Now get the Token2 URL, which we only
# get, if a user is specified.
detail = token.get_init_detail(user=User("cornelius",
self.realm1))
self.assertTrue("otpkey" in detail, detail)
otpkey = detail.get("otpkey")
self.assertTrue("img" in otpkey, otpkey)
self.assertTrue("motpurl" in detail, detail)
motpurl = detail.get("motpurl").get("value")
self.assertTrue(motpurl == 'motp://privacyidea:mylabel?'
'secret=11223344556677889900', motpurl)
self.assertRaises(Exception, token.set_init_details, "unvalid value")
token.set_init_details({"detail1": "value1"})
self.assertTrue("detail1" in token.get_init_details(),
token.get_init_details())
def test_04_class_methods(self):
db_token = Token.query.filter(Token.serial == self.serial1).first()
token = MotpTokenClass(db_token)
info = token.get_class_info()
self.assertTrue(info.get("title") == "mOTP Token", info.get(
'title'))
info = token.get_class_info("title")
self.assertTrue(info == "mOTP Token", info)
def test_05_test_vector(self):
# Testvector from
# https://github.com/neush/otpn900/blob/master/src/test_motp.c
key = "0123456789abcdef"
epoch = [ 129612120, 129612130, 0, 4, 129612244, 129612253]
pins = ["6666", "6666", "1", "1", "77777777", "77777777"]
otps = ["6ed4e4", "502a59", "bd94a4", "fb596e", "7abf75", "4d4ac4"]
i = 0
motp1 = mTimeOtp(key=key, pin=pins[0])
for e in epoch:
pin = pins[i]
otp = otps[i]
sotp = motp1.calcOtp(e, key, pin)
self.assertTrue(sotp == otp, "%s==%s" % (sotp, otp))
i += 1
def test_06_reuse_otp_value(self):
key = "0123456789abcdef"
db_token = Token("motp002", tokentype="motp")
db_token.save()
token = MotpTokenClass(db_token)
token.update({"otpkey": key,
"motppin": "6666",
"pin": "test"})
self.assertTrue(token.token.tokentype == "motp", token.token.tokentype)
self.assertTrue(token.type == "motp", token)
class_prefix = token.get_class_prefix()
self.assertTrue(class_prefix == "PIMO", class_prefix)
self.assertTrue(token.get_class_type() == "motp", token)
# Correct OTP value
r = token.check_otp("6ed4e4", options={"initTime": 129612120})
self.assertTrue(r == 129612120, r)
# Check the same value again
r = token.check_otp("6ed4e4", options={"initTime": 129612120})
self.assertTrue(r == -1, r)
| agpl-3.0 | -2,982,024,834,001,035,000 | -7,206,385,818,940,207,000 | 38.244681 | 79 | 0.583763 | false |
VisionInternet/visionLiveSDK-Python | setup.py | 1 | 1303 | # -*- coding: utf-8 -*-
##############################################################################
#
# Python visionLive API
# Copyright 2017 Vision Internet (http://www.visioninternet.com)
#
##############################################################################
from setuptools import setup, find_packages
import os
import re
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def get_version():
init = read(os.path.join('visionLiveSDK', '__init__.py'))
return re.search("__version__ = '([0-9.]*)'", init).group(1)
setup(name='visionLiveSDK',
version=get_version(),
description='SDK for visionLive API ',
long_description=read('README.md'),
author='Vision Internet',
author_email='yding@visioninternet.com',
url='https://github.com/VisionInternet/',
packages=find_packages(),
classifiers=[
'Development Status :: Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
],
license='AGPLv3+',
use_2to3=True,
) | gpl-3.0 | -293,329,438,904,112,900 | 4,844,932,858,029,863,000 | 33.315789 | 93 | 0.561013 | false |
zjutjsj1004/third | boost/tools/build/src/tools/rc.py | 1 | 7483 | # Status: being ported by Steven Watanabe
# Base revision: 47077
#
# Copyright (C) Andre Hentz 2003. Permission to copy, use, modify, sell and
# distribute this software is granted provided this copyright notice appears in
# all copies. This software is provided "as is" without express or implied
# warranty, and with no claim as to its suitability for any purpose.
#
# Copyright (c) 2006 Rene Rivera.
#
# Copyright (c) 2008 Steven Watanabe
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
##import type ;
##import generators ;
##import feature ;
##import errors ;
##import scanner ;
##import toolset : flags ;
import os.path
import re
import bjam
from b2.build import type, toolset, generators, scanner, feature
from b2.exceptions import AlreadyDefined
from b2.tools import builtin
from b2.util import regex
from b2.build.toolset import flags
from b2.manager import get_manager
from b2.util import utility
__debug = None
def debug():
global __debug
if __debug is None:
__debug = "--debug-configuration" in bjam.variable("ARGV")
return __debug
type.register('RC', ['rc'])
def init():
pass
def configure (command = None, condition = None, options = None):
"""
Configures a new resource compilation command specific to a condition,
usually a toolset selection condition. The possible options are:
* <rc-type>(rc|windres) - Indicates the type of options the command
accepts.
Even though the arguments are all optional, only when a command, condition,
and at minimum the rc-type option are given will the command be configured.
This is so that callers don't have to check auto-configuration values
before calling this. And still get the functionality of build failures when
the resource compiler can't be found.
"""
rc_type = feature.get_values('<rc-type>', options)
if rc_type:
assert(len(rc_type) == 1)
rc_type = rc_type[0]
if command and condition and rc_type:
flags('rc.compile.resource', '.RC', condition, command)
flags('rc.compile.resource', '.RC_TYPE', condition, rc_type.lower())
flags('rc.compile.resource', 'DEFINES', [], ['<define>'])
flags('rc.compile.resource', 'INCLUDES', [], ['<include>'])
if debug():
print 'notice: using rc compiler ::', condition, '::', command
engine = get_manager().engine()
class RCAction:
"""Class representing bjam action defined from Python.
The function must register the action to execute."""
def __init__(self, action_name, function):
self.action_name = action_name
self.function = function
def __call__(self, targets, sources, property_set):
if self.function:
self.function(targets, sources, property_set)
# FIXME: What is the proper way to dispatch actions?
def rc_register_action(action_name, function = None):
global engine
if engine.actions.has_key(action_name):
raise AlreadyDefined("Bjam action %s is already defined" % action_name)
engine.actions[action_name] = RCAction(action_name, function)
def rc_compile_resource(targets, sources, properties):
rc_type = bjam.call('get-target-variable', targets, '.RC_TYPE')
global engine
engine.set_update_action('rc.compile.resource.' + rc_type, targets, sources, properties)
rc_register_action('rc.compile.resource', rc_compile_resource)
engine.register_action(
'rc.compile.resource.rc',
'"$(.RC)" -l 0x409 "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -fo "$(<)" "$(>)"')
engine.register_action(
'rc.compile.resource.windres',
'"$(.RC)" "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -o "$(<)" -i "$(>)"')
# FIXME: this was originally declared quietly
engine.register_action(
'compile.resource.null',
'as /dev/null -o "$(<)"')
# Since it's a common practice to write
# exe hello : hello.cpp hello.rc
# we change the name of object created from RC file, to
# avoid conflict with hello.cpp.
# The reason we generate OBJ and not RES, is that gcc does not
# seem to like RES files, but works OK with OBJ.
# See http://article.gmane.org/gmane.comp.lib.boost.build/5643/
#
# Using 'register-c-compiler' adds the build directory to INCLUDES
# FIXME: switch to generators
builtin.register_c_compiler('rc.compile.resource', ['RC'], ['OBJ(%_res)'], [])
__angle_include_re = "#include[ ]*<([^<]+)>"
# Register scanner for resources
class ResScanner(scanner.Scanner):
def __init__(self, includes):
scanner.__init__ ;
self.includes = includes
def pattern(self):
return "(([^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)" +\
"[ ]+([^ \"]+|\"[^\"]+\"))|(#include[ ]*(<[^<]+>|\"[^\"]+\")))" ;
def process(self, target, matches, binding):
binding = binding[0]
angle = regex.transform(matches, "#include[ ]*<([^<]+)>")
quoted = regex.transform(matches, "#include[ ]*\"([^\"]+)\"")
res = regex.transform(matches,
"[^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)" +\
"[ ]+(([^ \"]+)|\"([^\"]+)\")", [3, 4])
# Icons and other includes may referenced as
#
# IDR_MAINFRAME ICON "res\\icon.ico"
#
# so we have to replace double backslashes to single ones.
res = [ re.sub(r'\\\\', '/', match) for match in res if match is not None ]
# CONSIDER: the new scoping rule seem to defeat "on target" variables.
g = bjam.call('get-target-variable', target, 'HDRGRIST')[0]
b = os.path.normpath(os.path.dirname(binding))
# Attach binding of including file to included targets.
# When target is directly created from virtual target
# this extra information is unnecessary. But in other
# cases, it allows to distinguish between two headers of the
# same name included from different places.
# We don't need this extra information for angle includes,
# since they should not depend on including file (we can't
# get literal "." in include path).
g2 = g + "#" + b
g = "<" + g + ">"
g2 = "<" + g2 + ">"
angle = [g + x for x in angle]
quoted = [g2 + x for x in quoted]
res = [g2 + x for x in res]
all = angle + quoted
bjam.call('mark-included', target, all)
engine = get_manager().engine()
engine.add_dependency(target, res)
bjam.call('NOCARE', all + res)
engine.set_target_variable(angle, 'SEARCH', [utility.get_value(inc) for inc in self.includes])
engine.set_target_variable(quoted, 'SEARCH', [b + utility.get_value(inc) for inc in self.includes])
engine.set_target_variable(res, 'SEARCH', [b + utility.get_value(inc) for inc in self.includes])
# Just propagate current scanner to includes, in a hope
# that includes do not change scanners.
get_manager().scanners().propagate(self, angle + quoted)
scanner.register(ResScanner, 'include')
type.set_scanner('RC', ResScanner)
| mit | 5,984,805,575,517,093,000 | 2,203,266,767,415,078,100 | 36.178571 | 109 | 0.616865 | false |
mdaus/nitro | modules/python/nitf/samples/nitf_extract.py | 2 | 2822 | #!/usr/bin/env python
"""
* =========================================================================
* This file is part of NITRO
* =========================================================================
*
* (C) Copyright 2004 - 2016, MDA Information Systems LLC
*
* NITRO is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program; if not, If not,
* see <http://www.gnu.org/licenses/>.
*
*
"""
from nitf import *
import os, sys, logging, glob
logging.basicConfig(level=logging.INFO, stream=sys.stdout,
format='%(asctime)s %(levelname)s %(message)s')
def extract_image(subheader, index, imageReader, outDir=None, baseName=None):
window = SubWindow()
window.numRows = subheader['numRows'].intValue()
window.numCols = subheader['numCols'].intValue()
window.bandList = list(range(subheader.getBandCount()))
nbpp = subheader['numBitsPerPixel'].intValue()
bandData = imageReader.read(window)
if not outDir: outDir = os.getcwd()
if not baseName: baseName = os.path.basename(os.tempnam())
outNames = []
for band, data in enumerate(bandData):
outName = '%s_%d__%d_x_%d_%d_band_%d.out' % (
baseName, index, window.numRows, window.numCols, nbpp, band)
outName = os.path.join(outDir, outName)
f = open(outName, 'wb')
f.write(data)
f.close()
outNames.append(outName)
logging.info('Wrote band data to file %s' % outName)
return outNames
def extract_images(fileName, outDir=None):
if not outDir: outDir = os.getcwd()
if not os.path.exists(outDir): os.makedirs(outDir)
handle = IOHandle(fileName)
reader = Reader()
record = reader.read(handle)
logging.info('Dumping file: %s' % fileName)
for i, segment in enumerate(record.getImages()):
logging.info('--- Image [%d] ---' % i)
imReader = reader.newImageReader(i)
extract_image(segment.subheader, i, imReader, outDir, os.path.basename(fileName))
handle.close()
if __name__ == '__main__':
for arg in sys.argv[1:]:
if os.path.isfile(arg):
extract_images(arg)
elif os.path.isdir(arg):
map(extract_images, glob.glob(os.path.join(arg, '*.ntf')) + glob.glob(os.path.join(arg, '*.NTF')))
| lgpl-3.0 | 6,135,002,898,815,960,000 | -8,463,996,040,692,731,000 | 33.414634 | 110 | 0.620836 | false |
boorad/bigcouch | couchjs/scons/scons-local-2.0.1/SCons/Tool/mslink.py | 61 | 10682 | """SCons.Tool.mslink
Tool-specific initialization for the Microsoft linker.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/mslink.py 5134 2010/08/16 23:02:40 bdeegan"
import os.path
import SCons.Action
import SCons.Defaults
import SCons.Errors
import SCons.Platform.win32
import SCons.Tool
import SCons.Tool.msvc
import SCons.Tool.msvs
import SCons.Util
from MSCommon import msvc_setup_env_once, msvc_exists
def pdbGenerator(env, target, source, for_signature):
try:
return ['/PDB:%s' % target[0].attributes.pdb, '/DEBUG']
except (AttributeError, IndexError):
return None
def _dllTargets(target, source, env, for_signature, paramtp):
listCmd = []
dll = env.FindIxes(target, '%sPREFIX' % paramtp, '%sSUFFIX' % paramtp)
if dll: listCmd.append("/out:%s"%dll.get_string(for_signature))
implib = env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX')
if implib: listCmd.append("/implib:%s"%implib.get_string(for_signature))
return listCmd
def _dllSources(target, source, env, for_signature, paramtp):
listCmd = []
deffile = env.FindIxes(source, "WINDOWSDEFPREFIX", "WINDOWSDEFSUFFIX")
for src in source:
# Check explicitly for a non-None deffile so that the __cmp__
# method of the base SCons.Util.Proxy class used for some Node
# proxies doesn't try to use a non-existent __dict__ attribute.
if deffile and src == deffile:
# Treat this source as a .def file.
listCmd.append("/def:%s" % src.get_string(for_signature))
else:
# Just treat it as a generic source file.
listCmd.append(src)
return listCmd
def windowsShlinkTargets(target, source, env, for_signature):
return _dllTargets(target, source, env, for_signature, 'SHLIB')
def windowsShlinkSources(target, source, env, for_signature):
return _dllSources(target, source, env, for_signature, 'SHLIB')
def _windowsLdmodTargets(target, source, env, for_signature):
"""Get targets for loadable modules."""
return _dllTargets(target, source, env, for_signature, 'LDMODULE')
def _windowsLdmodSources(target, source, env, for_signature):
"""Get sources for loadable modules."""
return _dllSources(target, source, env, for_signature, 'LDMODULE')
def _dllEmitter(target, source, env, paramtp):
"""Common implementation of dll emitter."""
SCons.Tool.msvc.validate_vars(env)
extratargets = []
extrasources = []
dll = env.FindIxes(target, '%sPREFIX' % paramtp, '%sSUFFIX' % paramtp)
no_import_lib = env.get('no_import_lib', 0)
if not dll:
raise SCons.Errors.UserError('A shared library should have exactly one target with the suffix: %s' % env.subst('$%sSUFFIX' % paramtp))
insert_def = env.subst("$WINDOWS_INSERT_DEF")
if not insert_def in ['', '0', 0] and \
not env.FindIxes(source, "WINDOWSDEFPREFIX", "WINDOWSDEFSUFFIX"):
# append a def file to the list of sources
extrasources.append(
env.ReplaceIxes(dll,
'%sPREFIX' % paramtp, '%sSUFFIX' % paramtp,
"WINDOWSDEFPREFIX", "WINDOWSDEFSUFFIX"))
version_num, suite = SCons.Tool.msvs.msvs_parse_version(env.get('MSVS_VERSION', '6.0'))
if version_num >= 8.0 and env.get('WINDOWS_INSERT_MANIFEST', 0):
# MSVC 8 automatically generates .manifest files that must be installed
extratargets.append(
env.ReplaceIxes(dll,
'%sPREFIX' % paramtp, '%sSUFFIX' % paramtp,
"WINDOWSSHLIBMANIFESTPREFIX", "WINDOWSSHLIBMANIFESTSUFFIX"))
if 'PDB' in env and env['PDB']:
pdb = env.arg2nodes('$PDB', target=target, source=source)[0]
extratargets.append(pdb)
target[0].attributes.pdb = pdb
if not no_import_lib and \
not env.FindIxes(target, "LIBPREFIX", "LIBSUFFIX"):
# Append an import library to the list of targets.
extratargets.append(
env.ReplaceIxes(dll,
'%sPREFIX' % paramtp, '%sSUFFIX' % paramtp,
"LIBPREFIX", "LIBSUFFIX"))
# and .exp file is created if there are exports from a DLL
extratargets.append(
env.ReplaceIxes(dll,
'%sPREFIX' % paramtp, '%sSUFFIX' % paramtp,
"WINDOWSEXPPREFIX", "WINDOWSEXPSUFFIX"))
return (target+extratargets, source+extrasources)
def windowsLibEmitter(target, source, env):
return _dllEmitter(target, source, env, 'SHLIB')
def ldmodEmitter(target, source, env):
"""Emitter for loadable modules.
Loadable modules are identical to shared libraries on Windows, but building
them is subject to different parameters (LDMODULE*).
"""
return _dllEmitter(target, source, env, 'LDMODULE')
def prog_emitter(target, source, env):
SCons.Tool.msvc.validate_vars(env)
extratargets = []
exe = env.FindIxes(target, "PROGPREFIX", "PROGSUFFIX")
if not exe:
raise SCons.Errors.UserError("An executable should have exactly one target with the suffix: %s" % env.subst("$PROGSUFFIX"))
version_num, suite = SCons.Tool.msvs.msvs_parse_version(env.get('MSVS_VERSION', '6.0'))
if version_num >= 8.0 and env.get('WINDOWS_INSERT_MANIFEST', 0):
# MSVC 8 automatically generates .manifest files that have to be installed
extratargets.append(
env.ReplaceIxes(exe,
"PROGPREFIX", "PROGSUFFIX",
"WINDOWSPROGMANIFESTPREFIX", "WINDOWSPROGMANIFESTSUFFIX"))
if 'PDB' in env and env['PDB']:
pdb = env.arg2nodes('$PDB', target=target, source=source)[0]
extratargets.append(pdb)
target[0].attributes.pdb = pdb
return (target+extratargets,source)
def RegServerFunc(target, source, env):
if 'register' in env and env['register']:
ret = regServerAction([target[0]], [source[0]], env)
if ret:
raise SCons.Errors.UserError("Unable to register %s" % target[0])
else:
print "Registered %s sucessfully" % target[0]
return ret
return 0
regServerAction = SCons.Action.Action("$REGSVRCOM", "$REGSVRCOMSTR")
regServerCheck = SCons.Action.Action(RegServerFunc, None)
shlibLinkAction = SCons.Action.Action('${TEMPFILE("$SHLINK $SHLINKFLAGS $_SHLINK_TARGETS $_LIBDIRFLAGS $_LIBFLAGS $_PDB $_SHLINK_SOURCES")}')
compositeShLinkAction = shlibLinkAction + regServerCheck
ldmodLinkAction = SCons.Action.Action('${TEMPFILE("$LDMODULE $LDMODULEFLAGS $_LDMODULE_TARGETS $_LIBDIRFLAGS $_LIBFLAGS $_PDB $_LDMODULE_SOURCES")}')
compositeLdmodAction = ldmodLinkAction + regServerCheck
def generate(env):
"""Add Builders and construction variables for ar to an Environment."""
SCons.Tool.createSharedLibBuilder(env)
SCons.Tool.createProgBuilder(env)
env['SHLINK'] = '$LINK'
env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS /dll')
env['_SHLINK_TARGETS'] = windowsShlinkTargets
env['_SHLINK_SOURCES'] = windowsShlinkSources
env['SHLINKCOM'] = compositeShLinkAction
env.Append(SHLIBEMITTER = [windowsLibEmitter])
env['LINK'] = 'link'
env['LINKFLAGS'] = SCons.Util.CLVar('/nologo')
env['_PDB'] = pdbGenerator
env['LINKCOM'] = '${TEMPFILE("$LINK $LINKFLAGS /OUT:$TARGET.windows $_LIBDIRFLAGS $_LIBFLAGS $_PDB $SOURCES.windows")}'
env.Append(PROGEMITTER = [prog_emitter])
env['LIBDIRPREFIX']='/LIBPATH:'
env['LIBDIRSUFFIX']=''
env['LIBLINKPREFIX']=''
env['LIBLINKSUFFIX']='$LIBSUFFIX'
env['WIN32DEFPREFIX'] = ''
env['WIN32DEFSUFFIX'] = '.def'
env['WIN32_INSERT_DEF'] = 0
env['WINDOWSDEFPREFIX'] = '${WIN32DEFPREFIX}'
env['WINDOWSDEFSUFFIX'] = '${WIN32DEFSUFFIX}'
env['WINDOWS_INSERT_DEF'] = '${WIN32_INSERT_DEF}'
env['WIN32EXPPREFIX'] = ''
env['WIN32EXPSUFFIX'] = '.exp'
env['WINDOWSEXPPREFIX'] = '${WIN32EXPPREFIX}'
env['WINDOWSEXPSUFFIX'] = '${WIN32EXPSUFFIX}'
env['WINDOWSSHLIBMANIFESTPREFIX'] = ''
env['WINDOWSSHLIBMANIFESTSUFFIX'] = '${SHLIBSUFFIX}.manifest'
env['WINDOWSPROGMANIFESTPREFIX'] = ''
env['WINDOWSPROGMANIFESTSUFFIX'] = '${PROGSUFFIX}.manifest'
env['REGSVRACTION'] = regServerCheck
env['REGSVR'] = os.path.join(SCons.Platform.win32.get_system_root(),'System32','regsvr32')
env['REGSVRFLAGS'] = '/s '
env['REGSVRCOM'] = '$REGSVR $REGSVRFLAGS ${TARGET.windows}'
# Set-up ms tools paths
msvc_setup_env_once(env)
# Loadable modules are on Windows the same as shared libraries, but they
# are subject to different build parameters (LDMODULE* variables).
# Therefore LDMODULE* variables correspond as much as possible to
# SHLINK*/SHLIB* ones.
SCons.Tool.createLoadableModuleBuilder(env)
env['LDMODULE'] = '$SHLINK'
env['LDMODULEPREFIX'] = '$SHLIBPREFIX'
env['LDMODULESUFFIX'] = '$SHLIBSUFFIX'
env['LDMODULEFLAGS'] = '$SHLINKFLAGS'
env['_LDMODULE_TARGETS'] = _windowsLdmodTargets
env['_LDMODULE_SOURCES'] = _windowsLdmodSources
env['LDMODULEEMITTER'] = [ldmodEmitter]
env['LDMODULECOM'] = compositeLdmodAction
def exists(env):
return msvc_exists()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| apache-2.0 | 893,465,057,959,295,100 | -4,278,977,760,664,162,300 | 39.157895 | 149 | 0.666729 | false |
mancoast/CPythonPyc_test | fail/321_test_urllib2_localnet.py | 51 | 21528 | #!/usr/bin/env python3
import os
import email
import urllib.parse
import urllib.request
import http.server
import unittest
import hashlib
from test import support
threading = support.import_module('threading')
here = os.path.dirname(__file__)
# Self-signed cert file for 'localhost'
CERT_localhost = os.path.join(here, 'keycert.pem')
# Self-signed cert file for 'fakehostname'
CERT_fakehostname = os.path.join(here, 'keycert2.pem')
# Loopback http server infrastructure
class LoopbackHttpServer(http.server.HTTPServer):
"""HTTP server w/ a few modifications that make it useful for
loopback testing purposes.
"""
def __init__(self, server_address, RequestHandlerClass):
http.server.HTTPServer.__init__(self,
server_address,
RequestHandlerClass)
# Set the timeout of our listening socket really low so
# that we can stop the server easily.
self.socket.settimeout(0.1)
def get_request(self):
"""HTTPServer method, overridden."""
request, client_address = self.socket.accept()
# It's a loopback connection, so setting the timeout
# really low shouldn't affect anything, but should make
# deadlocks less likely to occur.
request.settimeout(10.0)
return (request, client_address)
class LoopbackHttpServerThread(threading.Thread):
"""Stoppable thread that runs a loopback http server."""
def __init__(self, request_handler):
threading.Thread.__init__(self)
self._stop_server = False
self.ready = threading.Event()
request_handler.protocol_version = "HTTP/1.0"
self.httpd = LoopbackHttpServer(("127.0.0.1", 0),
request_handler)
#print "Serving HTTP on %s port %s" % (self.httpd.server_name,
# self.httpd.server_port)
self.port = self.httpd.server_port
def stop(self):
"""Stops the webserver if it's currently running."""
# Set the stop flag.
self._stop_server = True
self.join()
self.httpd.server_close()
def run(self):
self.ready.set()
while not self._stop_server:
self.httpd.handle_request()
# Authentication infrastructure
class DigestAuthHandler:
"""Handler for performing digest authentication."""
def __init__(self):
self._request_num = 0
self._nonces = []
self._users = {}
self._realm_name = "Test Realm"
self._qop = "auth"
def set_qop(self, qop):
self._qop = qop
def set_users(self, users):
assert isinstance(users, dict)
self._users = users
def set_realm(self, realm):
self._realm_name = realm
def _generate_nonce(self):
self._request_num += 1
nonce = hashlib.md5(str(self._request_num).encode("ascii")).hexdigest()
self._nonces.append(nonce)
return nonce
def _create_auth_dict(self, auth_str):
first_space_index = auth_str.find(" ")
auth_str = auth_str[first_space_index+1:]
parts = auth_str.split(",")
auth_dict = {}
for part in parts:
name, value = part.split("=")
name = name.strip()
if value[0] == '"' and value[-1] == '"':
value = value[1:-1]
else:
value = value.strip()
auth_dict[name] = value
return auth_dict
def _validate_auth(self, auth_dict, password, method, uri):
final_dict = {}
final_dict.update(auth_dict)
final_dict["password"] = password
final_dict["method"] = method
final_dict["uri"] = uri
HA1_str = "%(username)s:%(realm)s:%(password)s" % final_dict
HA1 = hashlib.md5(HA1_str.encode("ascii")).hexdigest()
HA2_str = "%(method)s:%(uri)s" % final_dict
HA2 = hashlib.md5(HA2_str.encode("ascii")).hexdigest()
final_dict["HA1"] = HA1
final_dict["HA2"] = HA2
response_str = "%(HA1)s:%(nonce)s:%(nc)s:" \
"%(cnonce)s:%(qop)s:%(HA2)s" % final_dict
response = hashlib.md5(response_str.encode("ascii")).hexdigest()
return response == auth_dict["response"]
def _return_auth_challenge(self, request_handler):
request_handler.send_response(407, "Proxy Authentication Required")
request_handler.send_header("Content-Type", "text/html")
request_handler.send_header(
'Proxy-Authenticate', 'Digest realm="%s", '
'qop="%s",'
'nonce="%s", ' % \
(self._realm_name, self._qop, self._generate_nonce()))
# XXX: Not sure if we're supposed to add this next header or
# not.
#request_handler.send_header('Connection', 'close')
request_handler.end_headers()
request_handler.wfile.write(b"Proxy Authentication Required.")
return False
def handle_request(self, request_handler):
"""Performs digest authentication on the given HTTP request
handler. Returns True if authentication was successful, False
otherwise.
If no users have been set, then digest auth is effectively
disabled and this method will always return True.
"""
if len(self._users) == 0:
return True
if "Proxy-Authorization" not in request_handler.headers:
return self._return_auth_challenge(request_handler)
else:
auth_dict = self._create_auth_dict(
request_handler.headers["Proxy-Authorization"]
)
if auth_dict["username"] in self._users:
password = self._users[ auth_dict["username"] ]
else:
return self._return_auth_challenge(request_handler)
if not auth_dict.get("nonce") in self._nonces:
return self._return_auth_challenge(request_handler)
else:
self._nonces.remove(auth_dict["nonce"])
auth_validated = False
# MSIE uses short_path in its validation, but Python's
# urllib.request uses the full path, so we're going to see if
# either of them works here.
for path in [request_handler.path, request_handler.short_path]:
if self._validate_auth(auth_dict,
password,
request_handler.command,
path):
auth_validated = True
if not auth_validated:
return self._return_auth_challenge(request_handler)
return True
# Proxy test infrastructure
class FakeProxyHandler(http.server.BaseHTTPRequestHandler):
"""This is a 'fake proxy' that makes it look like the entire
internet has gone down due to a sudden zombie invasion. It main
utility is in providing us with authentication support for
testing.
"""
def __init__(self, digest_auth_handler, *args, **kwargs):
# This has to be set before calling our parent's __init__(), which will
# try to call do_GET().
self.digest_auth_handler = digest_auth_handler
http.server.BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
def log_message(self, format, *args):
# Uncomment the next line for debugging.
# sys.stderr.write(format % args)
pass
def do_GET(self):
(scm, netloc, path, params, query, fragment) = urllib.parse.urlparse(
self.path, "http")
self.short_path = path
if self.digest_auth_handler.handle_request(self):
self.send_response(200, "OK")
self.send_header("Content-Type", "text/html")
self.end_headers()
self.wfile.write(bytes("You've reached %s!<BR>" % self.path,
"ascii"))
self.wfile.write(b"Our apologies, but our server is down due to "
b"a sudden zombie invasion.")
# Test cases
class ProxyAuthTests(unittest.TestCase):
URL = "http://localhost"
USER = "tester"
PASSWD = "test123"
REALM = "TestRealm"
def setUp(self):
super(ProxyAuthTests, self).setUp()
self.digest_auth_handler = DigestAuthHandler()
self.digest_auth_handler.set_users({self.USER: self.PASSWD})
self.digest_auth_handler.set_realm(self.REALM)
def create_fake_proxy_handler(*args, **kwargs):
return FakeProxyHandler(self.digest_auth_handler, *args, **kwargs)
self.server = LoopbackHttpServerThread(create_fake_proxy_handler)
self.server.start()
self.server.ready.wait()
proxy_url = "http://127.0.0.1:%d" % self.server.port
handler = urllib.request.ProxyHandler({"http" : proxy_url})
self.proxy_digest_handler = urllib.request.ProxyDigestAuthHandler()
self.opener = urllib.request.build_opener(
handler, self.proxy_digest_handler)
def tearDown(self):
self.server.stop()
super(ProxyAuthTests, self).tearDown()
def test_proxy_with_bad_password_raises_httperror(self):
self.proxy_digest_handler.add_password(self.REALM, self.URL,
self.USER, self.PASSWD+"bad")
self.digest_auth_handler.set_qop("auth")
self.assertRaises(urllib.error.HTTPError,
self.opener.open,
self.URL)
def test_proxy_with_no_password_raises_httperror(self):
self.digest_auth_handler.set_qop("auth")
self.assertRaises(urllib.error.HTTPError,
self.opener.open,
self.URL)
def test_proxy_qop_auth_works(self):
self.proxy_digest_handler.add_password(self.REALM, self.URL,
self.USER, self.PASSWD)
self.digest_auth_handler.set_qop("auth")
result = self.opener.open(self.URL)
while result.read():
pass
result.close()
def test_proxy_qop_auth_int_works_or_throws_urlerror(self):
self.proxy_digest_handler.add_password(self.REALM, self.URL,
self.USER, self.PASSWD)
self.digest_auth_handler.set_qop("auth-int")
try:
result = self.opener.open(self.URL)
except urllib.error.URLError:
# It's okay if we don't support auth-int, but we certainly
# shouldn't receive any kind of exception here other than
# a URLError.
result = None
if result:
while result.read():
pass
result.close()
def GetRequestHandler(responses):
class FakeHTTPRequestHandler(http.server.BaseHTTPRequestHandler):
server_version = "TestHTTP/"
requests = []
headers_received = []
port = 80
def do_GET(self):
body = self.send_head()
while body:
done = self.wfile.write(body)
body = body[done:]
def do_POST(self):
content_length = self.headers["Content-Length"]
post_data = self.rfile.read(int(content_length))
self.do_GET()
self.requests.append(post_data)
def send_head(self):
FakeHTTPRequestHandler.headers_received = self.headers
self.requests.append(self.path)
response_code, headers, body = responses.pop(0)
self.send_response(response_code)
for (header, value) in headers:
self.send_header(header, value % {'port':self.port})
if body:
self.send_header("Content-type", "text/plain")
self.end_headers()
return body
self.end_headers()
def log_message(self, *args):
pass
return FakeHTTPRequestHandler
class TestUrlopen(unittest.TestCase):
"""Tests urllib.request.urlopen using the network.
These tests are not exhaustive. Assuming that testing using files does a
good job overall of some of the basic interface features. There are no
tests exercising the optional 'data' and 'proxies' arguments. No tests
for transparent redirection have been written.
"""
def setUp(self):
super(TestUrlopen, self).setUp()
self.server = None
def tearDown(self):
if self.server is not None:
self.server.stop()
super(TestUrlopen, self).tearDown()
def urlopen(self, url, data=None, **kwargs):
l = []
f = urllib.request.urlopen(url, data, **kwargs)
try:
# Exercise various methods
l.extend(f.readlines(200))
l.append(f.readline())
l.append(f.read(1024))
l.append(f.read())
finally:
f.close()
return b"".join(l)
def start_server(self, responses=None):
if responses is None:
responses = [(200, [], b"we don't care")]
handler = GetRequestHandler(responses)
self.server = LoopbackHttpServerThread(handler)
self.server.start()
self.server.ready.wait()
port = self.server.port
handler.port = port
return handler
def start_https_server(self, responses=None, certfile=CERT_localhost):
if not hasattr(urllib.request, 'HTTPSHandler'):
self.skipTest('ssl support required')
from test.ssl_servers import make_https_server
if responses is None:
responses = [(200, [], b"we care a bit")]
handler = GetRequestHandler(responses)
server = make_https_server(self, certfile=certfile, handler_class=handler)
handler.port = server.port
return handler
def test_redirection(self):
expected_response = b"We got here..."
responses = [
(302, [("Location", "http://localhost:%(port)s/somewhere_else")],
""),
(200, [], expected_response)
]
handler = self.start_server(responses)
data = self.urlopen("http://localhost:%s/" % handler.port)
self.assertEqual(data, expected_response)
self.assertEqual(handler.requests, ["/", "/somewhere_else"])
def test_chunked(self):
expected_response = b"hello world"
chunked_start = (
b'a\r\n'
b'hello worl\r\n'
b'1\r\n'
b'd\r\n'
b'0\r\n'
)
response = [(200, [("Transfer-Encoding", "chunked")], chunked_start)]
handler = self.start_server(response)
data = self.urlopen("http://localhost:%s/" % handler.port)
self.assertEqual(data, expected_response)
def test_404(self):
expected_response = b"Bad bad bad..."
handler = self.start_server([(404, [], expected_response)])
try:
self.urlopen("http://localhost:%s/weeble" % handler.port)
except urllib.error.URLError as f:
data = f.read()
f.close()
else:
self.fail("404 should raise URLError")
self.assertEqual(data, expected_response)
self.assertEqual(handler.requests, ["/weeble"])
def test_200(self):
expected_response = b"pycon 2008..."
handler = self.start_server([(200, [], expected_response)])
data = self.urlopen("http://localhost:%s/bizarre" % handler.port)
self.assertEqual(data, expected_response)
self.assertEqual(handler.requests, ["/bizarre"])
def test_200_with_parameters(self):
expected_response = b"pycon 2008..."
handler = self.start_server([(200, [], expected_response)])
data = self.urlopen("http://localhost:%s/bizarre" % handler.port,
b"get=with_feeling")
self.assertEqual(data, expected_response)
self.assertEqual(handler.requests, ["/bizarre", b"get=with_feeling"])
def test_https(self):
handler = self.start_https_server()
data = self.urlopen("https://localhost:%s/bizarre" % handler.port)
self.assertEqual(data, b"we care a bit")
def test_https_with_cafile(self):
handler = self.start_https_server(certfile=CERT_localhost)
import ssl
# Good cert
data = self.urlopen("https://localhost:%s/bizarre" % handler.port,
cafile=CERT_localhost)
self.assertEqual(data, b"we care a bit")
# Bad cert
with self.assertRaises(urllib.error.URLError) as cm:
self.urlopen("https://localhost:%s/bizarre" % handler.port,
cafile=CERT_fakehostname)
# Good cert, but mismatching hostname
handler = self.start_https_server(certfile=CERT_fakehostname)
with self.assertRaises(ssl.CertificateError) as cm:
self.urlopen("https://localhost:%s/bizarre" % handler.port,
cafile=CERT_fakehostname)
def test_sending_headers(self):
handler = self.start_server()
req = urllib.request.Request("http://localhost:%s/" % handler.port,
headers={"Range": "bytes=20-39"})
urllib.request.urlopen(req)
self.assertEqual(handler.headers_received["Range"], "bytes=20-39")
def test_basic(self):
handler = self.start_server()
open_url = urllib.request.urlopen("http://localhost:%s" % handler.port)
for attr in ("read", "close", "info", "geturl"):
self.assertTrue(hasattr(open_url, attr), "object returned from "
"urlopen lacks the %s attribute" % attr)
try:
self.assertTrue(open_url.read(), "calling 'read' failed")
finally:
open_url.close()
def test_info(self):
handler = self.start_server()
try:
open_url = urllib.request.urlopen(
"http://localhost:%s" % handler.port)
info_obj = open_url.info()
self.assertIsInstance(info_obj, email.message.Message,
"object returned by 'info' is not an "
"instance of email.message.Message")
self.assertEqual(info_obj.get_content_subtype(), "plain")
finally:
self.server.stop()
def test_geturl(self):
# Make sure same URL as opened is returned by geturl.
handler = self.start_server()
open_url = urllib.request.urlopen("http://localhost:%s" % handler.port)
url = open_url.geturl()
self.assertEqual(url, "http://localhost:%s" % handler.port)
def test_bad_address(self):
# Make sure proper exception is raised when connecting to a bogus
# address.
self.assertRaises(IOError,
# Given that both VeriSign and various ISPs have in
# the past or are presently hijacking various invalid
# domain name requests in an attempt to boost traffic
# to their own sites, finding a domain name to use
# for this test is difficult. RFC2606 leads one to
# believe that '.invalid' should work, but experience
# seemed to indicate otherwise. Single character
# TLDs are likely to remain invalid, so this seems to
# be the best choice. The trailing '.' prevents a
# related problem: The normal DNS resolver appends
# the domain names from the search path if there is
# no '.' the end and, and if one of those domains
# implements a '*' rule a result is returned.
# However, none of this will prevent the test from
# failing if the ISP hijacks all invalid domain
# requests. The real solution would be to be able to
# parameterize the framework with a mock resolver.
urllib.request.urlopen,
"http://sadflkjsasf.i.nvali.d./")
def test_iteration(self):
expected_response = b"pycon 2008..."
handler = self.start_server([(200, [], expected_response)])
data = urllib.request.urlopen("http://localhost:%s" % handler.port)
for line in data:
self.assertEqual(line, expected_response)
def test_line_iteration(self):
lines = [b"We\n", b"got\n", b"here\n", b"verylong " * 8192 + b"\n"]
expected_response = b"".join(lines)
handler = self.start_server([(200, [], expected_response)])
data = urllib.request.urlopen("http://localhost:%s" % handler.port)
for index, line in enumerate(data):
self.assertEqual(line, lines[index],
"Fetched line number %s doesn't match expected:\n"
" Expected length was %s, got %s" %
(index, len(lines[index]), len(line)))
self.assertEqual(index + 1, len(lines))
@support.reap_threads
def test_main():
support.run_unittest(ProxyAuthTests, TestUrlopen)
if __name__ == "__main__":
test_main()
| gpl-3.0 | -2,235,116,854,253,178,400 | 2,564,913,551,817,089,000 | 37.170213 | 82 | 0.570559 | false |
lixt/lily2-gem5 | src/cpu/o3/FuncUnitConfig.py | 49 | 4259 | # Copyright (c) 2010 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Copyright (c) 2006-2007 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Kevin Lim
from m5.SimObject import SimObject
from m5.params import *
from FuncUnit import *
class IntALU(FUDesc):
opList = [ OpDesc(opClass='IntAlu') ]
count = 6
class IntMultDiv(FUDesc):
opList = [ OpDesc(opClass='IntMult', opLat=3),
OpDesc(opClass='IntDiv', opLat=20, issueLat=19) ]
count=2
class FP_ALU(FUDesc):
opList = [ OpDesc(opClass='FloatAdd', opLat=2),
OpDesc(opClass='FloatCmp', opLat=2),
OpDesc(opClass='FloatCvt', opLat=2) ]
count = 4
class FP_MultDiv(FUDesc):
opList = [ OpDesc(opClass='FloatMult', opLat=4),
OpDesc(opClass='FloatDiv', opLat=12, issueLat=12),
OpDesc(opClass='FloatSqrt', opLat=24, issueLat=24) ]
count = 2
class SIMD_Unit(FUDesc):
opList = [ OpDesc(opClass='SimdAdd'),
OpDesc(opClass='SimdAddAcc'),
OpDesc(opClass='SimdAlu'),
OpDesc(opClass='SimdCmp'),
OpDesc(opClass='SimdCvt'),
OpDesc(opClass='SimdMisc'),
OpDesc(opClass='SimdMult'),
OpDesc(opClass='SimdMultAcc'),
OpDesc(opClass='SimdShift'),
OpDesc(opClass='SimdShiftAcc'),
OpDesc(opClass='SimdSqrt'),
OpDesc(opClass='SimdFloatAdd'),
OpDesc(opClass='SimdFloatAlu'),
OpDesc(opClass='SimdFloatCmp'),
OpDesc(opClass='SimdFloatCvt'),
OpDesc(opClass='SimdFloatDiv'),
OpDesc(opClass='SimdFloatMisc'),
OpDesc(opClass='SimdFloatMult'),
OpDesc(opClass='SimdFloatMultAcc'),
OpDesc(opClass='SimdFloatSqrt') ]
count = 4
class ReadPort(FUDesc):
opList = [ OpDesc(opClass='MemRead') ]
count = 0
class WritePort(FUDesc):
opList = [ OpDesc(opClass='MemWrite') ]
count = 0
class RdWrPort(FUDesc):
opList = [ OpDesc(opClass='MemRead'), OpDesc(opClass='MemWrite') ]
count = 4
class IprPort(FUDesc):
opList = [ OpDesc(opClass='IprAccess', opLat = 3, issueLat = 3) ]
count = 1
| bsd-3-clause | 582,122,282,226,071,800 | 4,506,466,734,746,516,000 | 39.951923 | 72 | 0.689129 | false |
Honry/crosswalk-test-suite | stability/stability-lowresource-android-tests/lowresource/Webapp_Operations_UnderLowDisk.py | 7 | 9653 | #!/usr/bin/env python
# coding=utf-8
#
# Copyright (c) 2015 Intel Corporation.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of works must retain the original copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the original copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this work without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors:
# Li, Hao<haox.li@intel.com>
import unittest
import os
import sys
import commands
import shutil
import time
import subprocess
import glob
from TestApp import *
reload(sys)
sys.setdefaultencoding('utf-8')
SCRIPT_PATH = os.path.realpath(__file__)
ConstPath = os.path.dirname(SCRIPT_PATH)
appsrc = ConstPath + "/../testapp/helloworld"
approot = ConstPath + "/helloworld"
app_tools_dir = os.environ.get('CROSSWALK_APP_TOOLS_CACHE_DIR')
instaled_app_list = []
def setUp():
global device, apptools, crosswalkzip
#device = 'E6OKCY411012'
device = os.environ.get('DEVICE_ID')
global device_abi
device_abi = getDeviceCpuAbi(device)
if not device:
print 'Get env error\n'
sys.exit(1)
if not app_tools_dir:
print ("Not find CROSSWALK_APP_TOOLS_CACHE_DIR\n")
sys.exit(1)
# app tools commend
apptools = "crosswalk-pkg"
if os.system(apptools) != 0:
apptools = app_tools_dir + "/crosswalk-app-tools/src/crosswalk-pkg"
# crosswalk lib
zips = glob.glob(os.path.join(app_tools_dir, "crosswalk-*.zip"))
if len(zips) == 0:
print ("Not find crosswalk zip in CROSSWALK_APP_TOOLS_CACHE_DIR\n")
sys.exit(1)
# latest version
zips.sort(reverse = True)
crosswalkzip = zips[0]
def getFreeDiskSize(device):
# Disk size: M
cmd = "%s -s %s shell df|grep %s |awk -F \" \" '{print $4}'" % (ADB_CMD, device, "/data")
(return_code, output) = doCMD(cmd)
for line in output:
if line.endswith("G"):
# 1G = 1024M
return int(float(line[0:-1]) * 1024)
else:
return int(float(line[0:-1]))
def getDeviceCpuAbi(device):
cmd = "%s -s %s shell getprop|grep \"\[ro.product.cpu.abi\]\"" % (ADB_CMD, device)
(return_code, output) = doCMD(cmd)
for line in output:
if "x86" in line:
return "x86"
else:
return "arm"
def getFileSize(filepath):
filesize = 0
if os.path.exists(filepath):
filesize = float(os.stat(filepath).st_size)
# size: M
filesize = filesize/1024/1024
else:
print "-->> %s does not exists" % filepath
return filesize
def createAPK(appname):
action_status = True
# Remove existed manifest.json
if os.path.exists(appsrc + "/manifest.json"):
os.remove(appsrc + "/manifest.json")
# build apk
cmd = "%s --crosswalk=%s --platforms=android --android=%s --targets=%s -m " \
"\"{\\\"name\\\": \\\"%s\\\", \\\"start_url\\\": \\\"index.html\\\", \\\"xwalk_package_id\\\": \\\"org.xwalk.%s\\\"}\" %s" % \
(apptools,
crosswalkzip,
"embedded",
device_abi,
appname,
appname,
appsrc)
(return_code, output) = doCMD(cmd)
if return_code == 0:
print "-->> org.xwalk.%s success to build." % appname
cmd = "mv *.apk %s/%s.apk" % (approot, appname)
(return_code, output) = doCMD(cmd)
else:
print "-->> org.xwalk.%s fail to build." % appname
action_status = False
return action_status
def deleteAPK(testapp):
cmd = "rm -rf %s" % (testapp.location)
(return_code, output) = doCMD(cmd)
if return_code == 0:
print "-->> %s success to delete." % testapp.location
return True
else:
print "-->> %s fail to delete." % testapp.location
return False
def cleanWork():
cmd = "rm -rf %s" % (appsrc + "/*.temp.mp4")
(return_code, output) = doCMD(cmd)
cmd = "rm -rf %s" % (approot)
(return_code, output) = doCMD(cmd)
for i in range(len(instaled_app_list)):
instaled_app_list[i].uninstall()
def makeLowDisk():
cleanWork()
action_status = False
if not os.path.exists(approot):
cmd = "mkdir %s" % approot
(return_code, output) = doCMD(cmd)
videofile = appsrc + "/res/w3c/movie_300.mp4"
videosize = getFileSize(videofile)
if videosize <= 0:
print "-->> Lack pre-condition resource files"
return False
tmpreadystate = [False, False, False]
global instaled_app_list
while not action_status:
freesize = getFreeDiskSize(device)
if (freesize >= 1024) and not tmpreadystate[0]:
# make app size: 500M
count = int((500 - videosize)/videosize)
for i in range(count):
cmd = "cp %s %s " % (videofile, appsrc + "/video" + str(i) +".temp.mp4")
(return_code, output) = doCMD(cmd)
tmpreadystate[0] = True
elif (freesize >= 512) and (freesize < 1024) and not tmpreadystate[1]:
# clean appsrc
if tmpreadystate[0]:
cmd = "rm -rf %s/*.temp.mp4" % (appsrc)
(return_code, output) = doCMD(cmd)
(return_code, output) = doCMD(cmd)
# make app size: 100M
count = int((100 - videosize)/videosize)
for i in range(count):
cmd = "cp %s %s " % (videofile, appsrc + "/video" + str(i) +".temp.mp4")
(return_code, output) = doCMD(cmd)
tmpreadystate[1] = True
elif (freesize < 512) and not tmpreadystate[2]:
# clean appsrc
cmd = "rm -rf %s/*.temp.mp4" % (appsrc)
(return_code, output) = doCMD(cmd)
tmpreadystate[2] = True
appname = "helloworld%s" % int(time.time())
if createAPK(appname):
apkname = appname[0].upper() + appname[1:]
apkpath = approot + "/" + appname + ".apk"
testapp = TestApp(device, apkpath,
"org.xwalk." + appname, apkname + "Activity")
#If app exists, go to next
if not testapp.isInstalled():
#if app is not installed successful, delete the package, return True
if not testapp.install():
action_status = True
deleteAPK(testapp)
# tmpreadystate[2] == True,
# means free disk is too small to install test app
# need to uninstall the last one to keep more free disk
if len(instaled_app_list) > 0 and tmpreadystate[2]:
testapp = instaled_app_list.pop(-1)
testapp.uninstall()
deleteAPK(testapp)
else:
instaled_app_list.append(testapp)
else:
break
return action_status
class TestStabilityInLowDiskFunctions(unittest.TestCase):
def test_app_repeatedly_in_lowdisk(self):
setUp()
if makeLowDisk():
testapp = TestApp(device, ConstPath + "/../testapp/lowresourcetest.apk",
"org.xwalk.lowresourcetest", "LowresourcetestActivity")
if testapp.isInstalled():
testapp.uninstall()
for i in range(20):
if testapp.install() and testapp.launch():
switchresult = False
for i in range(2):
time.sleep(1)
# swtich app
switchresult = testapp.switch()
if switchresult:
time.sleep(1)
if testapp.stop() and testapp.uninstall():
time.sleep(1)
else:
testapp.uninstall()
cleanWork()
self.assertTrue(False)
else:
testapp.uninstall()
cleanWork()
self.assertTrue(False)
else:
testapp.uninstall()
cleanWork()
self.assertTrue(False)
testapp.uninstall()
cleanWork()
self.assertTrue(True)
else:
print "-->> Test envrionment fail to set up"
cleanWork()
self.assertTrue(False)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | -3,939,190,024,695,348,000 | 6,664,746,193,966,965,000 | 33.848375 | 136 | 0.56656 | false |
staslev/incubator-beam | sdks/python/apache_beam/testing/test_pipeline_test.py | 9 | 4209 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Unit test for the TestPipeline class"""
import logging
import unittest
from hamcrest.core.assert_that import assert_that as hc_assert_that
from hamcrest.core.base_matcher import BaseMatcher
from apache_beam.internal import pickler
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.testing.test_pipeline import TestPipeline
# A simple matcher that is ued for testing extra options appending.
class SimpleMatcher(BaseMatcher):
def _matches(self, item):
return True
class TestPipelineTest(unittest.TestCase):
TEST_CASE = {'options':
['--test-pipeline-options', '--job=mockJob --male --age=1'],
'expected_list': ['--job=mockJob', '--male', '--age=1'],
'expected_dict': {'job': 'mockJob',
'male': True,
'age': 1}}
# Used for testing pipeline option creation.
class TestParsingOptions(PipelineOptions):
@classmethod
def _add_argparse_args(cls, parser):
parser.add_argument('--job', action='store', help='mock job')
parser.add_argument('--male', action='store_true', help='mock gender')
parser.add_argument('--age', action='store', type=int, help='mock age')
def test_option_args_parsing(self):
test_pipeline = TestPipeline(argv=self.TEST_CASE['options'])
self.assertListEqual(
sorted(test_pipeline.get_full_options_as_args()),
sorted(self.TEST_CASE['expected_list']))
def test_empty_option_args_parsing(self):
test_pipeline = TestPipeline()
self.assertListEqual([],
test_pipeline.get_full_options_as_args())
def test_create_test_pipeline_options(self):
test_pipeline = TestPipeline(argv=self.TEST_CASE['options'])
test_options = PipelineOptions(test_pipeline.get_full_options_as_args())
self.assertDictContainsSubset(self.TEST_CASE['expected_dict'],
test_options.get_all_options())
EXTRA_OPT_CASES = [
{'options': {'name': 'Mark'},
'expected': ['--name=Mark']},
{'options': {'student': True},
'expected': ['--student']},
{'options': {'student': False},
'expected': []},
{'options': {'name': 'Mark', 'student': True},
'expected': ['--name=Mark', '--student']}
]
def test_append_extra_options(self):
test_pipeline = TestPipeline()
for case in self.EXTRA_OPT_CASES:
opt_list = test_pipeline.get_full_options_as_args(**case['options'])
self.assertListEqual(sorted(opt_list), sorted(case['expected']))
def test_append_verifier_in_extra_opt(self):
extra_opt = {'matcher': SimpleMatcher()}
opt_list = TestPipeline().get_full_options_as_args(**extra_opt)
_, value = opt_list[0].split('=', 1)
matcher = pickler.loads(value)
self.assertTrue(isinstance(matcher, BaseMatcher))
hc_assert_that(None, matcher)
def test_get_option(self):
name, value = ('job', 'mockJob')
test_pipeline = TestPipeline()
test_pipeline.options_list = ['--%s=%s' % (name, value)]
self.assertEqual(test_pipeline.get_option(name), value)
def test_skip_IT(self):
test_pipeline = TestPipeline(is_integration_test=True)
test_pipeline.run()
# Note that this will never be reached since it should be skipped above.
self.fail()
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
unittest.main()
| apache-2.0 | -2,563,458,111,849,821,000 | -7,493,938,004,308,992,000 | 36.580357 | 79 | 0.665954 | false |
serialdoom/ansible | lib/ansible/plugins/lookup/ini.py | 4 | 4178 | # (c) 2015, Yannig Perre <yannig.perre(at)gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from io import StringIO
import os
import re
try:
# python2
import ConfigParser as configparser
except ImportError:
# python3
import configparser
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
from ansible.module_utils._text import to_bytes, to_text
def _parse_params(term):
'''Safely split parameter term to preserve spaces'''
keys = ['key', 'section', 'file', 're']
params = {}
for k in keys:
params[k] = ''
thiskey = 'key'
for idp,phrase in enumerate(term.split()):
for k in keys:
if ('%s=' % k) in phrase:
thiskey = k
if idp == 0 or not params[thiskey]:
params[thiskey] = phrase
else:
params[thiskey] += ' ' + phrase
rparams = [params[x] for x in keys if params[x]]
return rparams
class LookupModule(LookupBase):
def read_properties(self, filename, key, dflt, is_regexp):
config = StringIO()
current_cfg_file = open(to_bytes(filename, errors='surrogate_or_strict'), 'rb')
config.write(u'[java_properties]\n' + to_text(current_cfg_file.read(), errors='surrogate_or_strict'))
config.seek(0, os.SEEK_SET)
self.cp.readfp(config)
return self.get_value(key, 'java_properties', dflt, is_regexp)
def read_ini(self, filename, key, section, dflt, is_regexp):
self.cp.readfp(open(to_bytes(filename, errors='surrogate_or_strict')))
return self.get_value(key, section, dflt, is_regexp)
def get_value(self, key, section, dflt, is_regexp):
# Retrieve all values from a section using a regexp
if is_regexp:
return [v for k, v in self.cp.items(section) if re.match(key, k)]
value = None
# Retrieve a single value
try:
value = self.cp.get(section, key)
except configparser.NoOptionError:
return dflt
return value
def run(self, terms, variables=None, **kwargs):
basedir = self.get_basedir(variables)
self.basedir = basedir
self.cp = configparser.ConfigParser()
ret = []
for term in terms:
params = _parse_params(term)
key = params[0]
paramvals = {
'file' : 'ansible.ini',
're' : False,
'default' : None,
'section' : "global",
'type' : "ini",
}
# parameters specified?
try:
for param in params[1:]:
name, value = param.split('=')
assert(name in paramvals)
paramvals[name] = value
except (ValueError, AssertionError) as e:
raise AnsibleError(e)
path = self.find_file_in_search_path(variables, 'files', paramvals['file'])
if paramvals['type'] == "properties":
var = self.read_properties(path, key, paramvals['default'], paramvals['re'])
else:
var = self.read_ini(path, key, paramvals['section'], paramvals['default'], paramvals['re'])
if var is not None:
if type(var) is list:
for v in var:
ret.append(v)
else:
ret.append(var)
return ret
| gpl-3.0 | -1,703,833,748,818,080,000 | -4,466,033,317,497,300,000 | 32.693548 | 109 | 0.586166 | false |
shaistaansari/django | django/contrib/staticfiles/handlers.py | 581 | 2328 | from django.conf import settings
from django.contrib.staticfiles import utils
from django.contrib.staticfiles.views import serve
from django.core.handlers.wsgi import WSGIHandler, get_path_info
from django.utils.six.moves.urllib.parse import urlparse
from django.utils.six.moves.urllib.request import url2pathname
class StaticFilesHandler(WSGIHandler):
"""
WSGI middleware that intercepts calls to the static files directory, as
defined by the STATIC_URL setting, and serves those files.
"""
# May be used to differentiate between handler types (e.g. in a
# request_finished signal)
handles_files = True
def __init__(self, application):
self.application = application
self.base_url = urlparse(self.get_base_url())
super(StaticFilesHandler, self).__init__()
def get_base_url(self):
utils.check_settings()
return settings.STATIC_URL
def _should_handle(self, path):
"""
Checks if the path should be handled. Ignores the path if:
* the host is provided as part of the base_url
* the request's path isn't under the media path (or equal)
"""
return path.startswith(self.base_url[2]) and not self.base_url[1]
def file_path(self, url):
"""
Returns the relative path to the media file on disk for the given URL.
"""
relative_url = url[len(self.base_url[2]):]
return url2pathname(relative_url)
def serve(self, request):
"""
Actually serves the request path.
"""
return serve(request, self.file_path(request.path), insecure=True)
def get_response(self, request):
from django.http import Http404
if self._should_handle(request.path):
try:
return self.serve(request)
except Http404 as e:
if settings.DEBUG:
from django.views import debug
return debug.technical_404_response(request, e)
return super(StaticFilesHandler, self).get_response(request)
def __call__(self, environ, start_response):
if not self._should_handle(get_path_info(environ)):
return self.application(environ, start_response)
return super(StaticFilesHandler, self).__call__(environ, start_response)
| bsd-3-clause | 2,438,573,436,390,772,000 | 2,283,489,281,853,570,600 | 35.375 | 80 | 0.649914 | false |
dscho/hg | hgext/zeroconf/Zeroconf.py | 1 | 58442 | from __future__ import absolute_import, print_function
""" Multicast DNS Service Discovery for Python, v0.12
Copyright (C) 2003, Paul Scott-Murphy
This module provides a framework for the use of DNS Service Discovery
using IP multicast. It has been tested against the JRendezvous
implementation from <a href="http://strangeberry.com">StrangeBerry</a>,
and against the mDNSResponder from Mac OS X 10.3.8.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, see
<http://www.gnu.org/licenses/>.
"""
"""0.12 update - allow selection of binding interface
typo fix - Thanks A. M. Kuchlingi
removed all use of word 'Rendezvous' - this is an API change"""
"""0.11 update - correction to comments for addListener method
support for new record types seen from OS X
- IPv6 address
- hostinfo
ignore unknown DNS record types
fixes to name decoding
works alongside other processes using port 5353 (e.g. Mac OS X)
tested against Mac OS X 10.3.2's mDNSResponder
corrections to removal of list entries for service browser"""
"""0.10 update - Jonathon Paisley contributed these corrections:
always multicast replies, even when query is unicast
correct a pointer encoding problem
can now write records in any order
traceback shown on failure
better TXT record parsing
server is now separate from name
can cancel a service browser
modified some unit tests to accommodate these changes"""
"""0.09 update - remove all records on service unregistration
fix DOS security problem with readName"""
"""0.08 update - changed licensing to LGPL"""
"""0.07 update - faster shutdown on engine
pointer encoding of outgoing names
ServiceBrowser now works
new unit tests"""
"""0.06 update - small improvements with unit tests
added defined exception types
new style objects
fixed hostname/interface problem
fixed socket timeout problem
fixed addServiceListener() typo bug
using select() for socket reads
tested on Debian unstable with Python 2.2.2"""
"""0.05 update - ensure case insensitivity on domain names
support for unicast DNS queries"""
"""0.04 update - added some unit tests
added __ne__ adjuncts where required
ensure names end in '.local.'
timeout on receiving socket for clean shutdown"""
__author__ = "Paul Scott-Murphy"
__email__ = "paul at scott dash murphy dot com"
__version__ = "0.12"
import itertools
import select
import socket
import string
import struct
import threading
import time
import traceback
__all__ = ["Zeroconf", "ServiceInfo", "ServiceBrowser"]
# hook for threads
globals()['_GLOBAL_DONE'] = 0
# Some timing constants
_UNREGISTER_TIME = 125
_CHECK_TIME = 175
_REGISTER_TIME = 225
_LISTENER_TIME = 200
_BROWSER_TIME = 500
# Some DNS constants
_MDNS_ADDR = '224.0.0.251'
_MDNS_PORT = 5353
_DNS_PORT = 53
_DNS_TTL = 60 * 60 # one hour default TTL
_MAX_MSG_TYPICAL = 1460 # unused
_MAX_MSG_ABSOLUTE = 8972
_FLAGS_QR_MASK = 0x8000 # query response mask
_FLAGS_QR_QUERY = 0x0000 # query
_FLAGS_QR_RESPONSE = 0x8000 # response
_FLAGS_AA = 0x0400 # Authoritative answer
_FLAGS_TC = 0x0200 # Truncated
_FLAGS_RD = 0x0100 # Recursion desired
_FLAGS_RA = 0x8000 # Recursion available
_FLAGS_Z = 0x0040 # Zero
_FLAGS_AD = 0x0020 # Authentic data
_FLAGS_CD = 0x0010 # Checking disabled
_CLASS_IN = 1
_CLASS_CS = 2
_CLASS_CH = 3
_CLASS_HS = 4
_CLASS_NONE = 254
_CLASS_ANY = 255
_CLASS_MASK = 0x7FFF
_CLASS_UNIQUE = 0x8000
_TYPE_A = 1
_TYPE_NS = 2
_TYPE_MD = 3
_TYPE_MF = 4
_TYPE_CNAME = 5
_TYPE_SOA = 6
_TYPE_MB = 7
_TYPE_MG = 8
_TYPE_MR = 9
_TYPE_NULL = 10
_TYPE_WKS = 11
_TYPE_PTR = 12
_TYPE_HINFO = 13
_TYPE_MINFO = 14
_TYPE_MX = 15
_TYPE_TXT = 16
_TYPE_AAAA = 28
_TYPE_SRV = 33
_TYPE_ANY = 255
# Mapping constants to names
_CLASSES = { _CLASS_IN : "in",
_CLASS_CS : "cs",
_CLASS_CH : "ch",
_CLASS_HS : "hs",
_CLASS_NONE : "none",
_CLASS_ANY : "any" }
_TYPES = { _TYPE_A : "a",
_TYPE_NS : "ns",
_TYPE_MD : "md",
_TYPE_MF : "mf",
_TYPE_CNAME : "cname",
_TYPE_SOA : "soa",
_TYPE_MB : "mb",
_TYPE_MG : "mg",
_TYPE_MR : "mr",
_TYPE_NULL : "null",
_TYPE_WKS : "wks",
_TYPE_PTR : "ptr",
_TYPE_HINFO : "hinfo",
_TYPE_MINFO : "minfo",
_TYPE_MX : "mx",
_TYPE_TXT : "txt",
_TYPE_AAAA : "quada",
_TYPE_SRV : "srv",
_TYPE_ANY : "any" }
# utility functions
def currentTimeMillis():
"""Current system time in milliseconds"""
return time.time() * 1000
# Exceptions
class NonLocalNameException(Exception):
pass
class NonUniqueNameException(Exception):
pass
class NamePartTooLongException(Exception):
pass
class AbstractMethodException(Exception):
pass
class BadTypeInNameException(Exception):
pass
class BadDomainName(Exception):
def __init__(self, pos):
Exception.__init__(self, "at position %s" % pos)
class BadDomainNameCircular(BadDomainName):
pass
# implementation classes
class DNSEntry(object):
"""A DNS entry"""
def __init__(self, name, type, clazz):
self.key = string.lower(name)
self.name = name
self.type = type
self.clazz = clazz & _CLASS_MASK
self.unique = (clazz & _CLASS_UNIQUE) != 0
def __eq__(self, other):
"""Equality test on name, type, and class"""
if isinstance(other, DNSEntry):
return (self.name == other.name and self.type == other.type and
self.clazz == other.clazz)
return 0
def __ne__(self, other):
"""Non-equality test"""
return not self.__eq__(other)
def getClazz(self, clazz):
"""Class accessor"""
try:
return _CLASSES[clazz]
except KeyError:
return "?(%s)" % (clazz)
def getType(self, type):
"""Type accessor"""
try:
return _TYPES[type]
except KeyError:
return "?(%s)" % (type)
def toString(self, hdr, other):
"""String representation with additional information"""
result = ("%s[%s,%s" %
(hdr, self.getType(self.type), self.getClazz(self.clazz)))
if self.unique:
result += "-unique,"
else:
result += ","
result += self.name
if other is not None:
result += ",%s]" % (other)
else:
result += "]"
return result
class DNSQuestion(DNSEntry):
"""A DNS question entry"""
def __init__(self, name, type, clazz):
if not name.endswith(".local."):
raise NonLocalNameException(name)
DNSEntry.__init__(self, name, type, clazz)
def answeredBy(self, rec):
"""Returns true if the question is answered by the record"""
return (self.clazz == rec.clazz and
(self.type == rec.type or self.type == _TYPE_ANY) and
self.name == rec.name)
def __repr__(self):
"""String representation"""
return DNSEntry.toString(self, "question", None)
class DNSRecord(DNSEntry):
"""A DNS record - like a DNS entry, but has a TTL"""
def __init__(self, name, type, clazz, ttl):
DNSEntry.__init__(self, name, type, clazz)
self.ttl = ttl
self.created = currentTimeMillis()
def __eq__(self, other):
"""Tests equality as per DNSRecord"""
if isinstance(other, DNSRecord):
return DNSEntry.__eq__(self, other)
return 0
def suppressedBy(self, msg):
"""Returns true if any answer in a message can suffice for the
information held in this record."""
for record in msg.answers:
if self.suppressedByAnswer(record):
return 1
return 0
def suppressedByAnswer(self, other):
"""Returns true if another record has same name, type and class,
and if its TTL is at least half of this record's."""
if self == other and other.ttl > (self.ttl / 2):
return 1
return 0
def getExpirationTime(self, percent):
"""Returns the time at which this record will have expired
by a certain percentage."""
return self.created + (percent * self.ttl * 10)
def getRemainingTTL(self, now):
"""Returns the remaining TTL in seconds."""
return max(0, (self.getExpirationTime(100) - now) / 1000)
def isExpired(self, now):
"""Returns true if this record has expired."""
return self.getExpirationTime(100) <= now
def isStale(self, now):
"""Returns true if this record is at least half way expired."""
return self.getExpirationTime(50) <= now
def resetTTL(self, other):
"""Sets this record's TTL and created time to that of
another record."""
self.created = other.created
self.ttl = other.ttl
def write(self, out):
"""Abstract method"""
raise AbstractMethodException
def toString(self, other):
"""String representation with additional information"""
arg = ("%s/%s,%s" %
(self.ttl, self.getRemainingTTL(currentTimeMillis()), other))
return DNSEntry.toString(self, "record", arg)
class DNSAddress(DNSRecord):
"""A DNS address record"""
def __init__(self, name, type, clazz, ttl, address):
DNSRecord.__init__(self, name, type, clazz, ttl)
self.address = address
def write(self, out):
"""Used in constructing an outgoing packet"""
out.writeString(self.address, len(self.address))
def __eq__(self, other):
"""Tests equality on address"""
if isinstance(other, DNSAddress):
return self.address == other.address
return 0
def __repr__(self):
"""String representation"""
try:
return socket.inet_ntoa(self.address)
except Exception:
return self.address
class DNSHinfo(DNSRecord):
"""A DNS host information record"""
def __init__(self, name, type, clazz, ttl, cpu, os):
DNSRecord.__init__(self, name, type, clazz, ttl)
self.cpu = cpu
self.os = os
def write(self, out):
"""Used in constructing an outgoing packet"""
out.writeString(self.cpu, len(self.cpu))
out.writeString(self.os, len(self.os))
def __eq__(self, other):
"""Tests equality on cpu and os"""
if isinstance(other, DNSHinfo):
return self.cpu == other.cpu and self.os == other.os
return 0
def __repr__(self):
"""String representation"""
return self.cpu + " " + self.os
class DNSPointer(DNSRecord):
"""A DNS pointer record"""
def __init__(self, name, type, clazz, ttl, alias):
DNSRecord.__init__(self, name, type, clazz, ttl)
self.alias = alias
def write(self, out):
"""Used in constructing an outgoing packet"""
out.writeName(self.alias)
def __eq__(self, other):
"""Tests equality on alias"""
if isinstance(other, DNSPointer):
return self.alias == other.alias
return 0
def __repr__(self):
"""String representation"""
return self.toString(self.alias)
class DNSText(DNSRecord):
"""A DNS text record"""
def __init__(self, name, type, clazz, ttl, text):
DNSRecord.__init__(self, name, type, clazz, ttl)
self.text = text
def write(self, out):
"""Used in constructing an outgoing packet"""
out.writeString(self.text, len(self.text))
def __eq__(self, other):
"""Tests equality on text"""
if isinstance(other, DNSText):
return self.text == other.text
return 0
def __repr__(self):
"""String representation"""
if len(self.text) > 10:
return self.toString(self.text[:7] + "...")
else:
return self.toString(self.text)
class DNSService(DNSRecord):
"""A DNS service record"""
def __init__(self, name, type, clazz, ttl, priority, weight, port, server):
DNSRecord.__init__(self, name, type, clazz, ttl)
self.priority = priority
self.weight = weight
self.port = port
self.server = server
def write(self, out):
"""Used in constructing an outgoing packet"""
out.writeShort(self.priority)
out.writeShort(self.weight)
out.writeShort(self.port)
out.writeName(self.server)
def __eq__(self, other):
"""Tests equality on priority, weight, port and server"""
if isinstance(other, DNSService):
return (self.priority == other.priority and
self.weight == other.weight and
self.port == other.port and
self.server == other.server)
return 0
def __repr__(self):
"""String representation"""
return self.toString("%s:%s" % (self.server, self.port))
class DNSIncoming(object):
"""Object representation of an incoming DNS packet"""
def __init__(self, data):
"""Constructor from string holding bytes of packet"""
self.offset = 0
self.data = data
self.questions = []
self.answers = []
self.numquestions = 0
self.numanswers = 0
self.numauthorities = 0
self.numadditionals = 0
self.readHeader()
self.readQuestions()
self.readOthers()
def readHeader(self):
"""Reads header portion of packet"""
format = '!HHHHHH'
length = struct.calcsize(format)
info = struct.unpack(format,
self.data[self.offset:self.offset + length])
self.offset += length
self.id = info[0]
self.flags = info[1]
self.numquestions = info[2]
self.numanswers = info[3]
self.numauthorities = info[4]
self.numadditionals = info[5]
def readQuestions(self):
"""Reads questions section of packet"""
format = '!HH'
length = struct.calcsize(format)
for i in range(0, self.numquestions):
name = self.readName()
info = struct.unpack(format,
self.data[self.offset:self.offset + length])
self.offset += length
try:
question = DNSQuestion(name, info[0], info[1])
self.questions.append(question)
except NonLocalNameException:
pass
def readInt(self):
"""Reads an integer from the packet"""
format = '!I'
length = struct.calcsize(format)
info = struct.unpack(format,
self.data[self.offset:self.offset + length])
self.offset += length
return info[0]
def readCharacterString(self):
"""Reads a character string from the packet"""
length = ord(self.data[self.offset])
self.offset += 1
return self.readString(length)
def readString(self, len):
"""Reads a string of a given length from the packet"""
format = '!' + str(len) + 's'
length = struct.calcsize(format)
info = struct.unpack(format,
self.data[self.offset:self.offset + length])
self.offset += length
return info[0]
def readUnsignedShort(self):
"""Reads an unsigned short from the packet"""
format = '!H'
length = struct.calcsize(format)
info = struct.unpack(format,
self.data[self.offset:self.offset + length])
self.offset += length
return info[0]
def readOthers(self):
"""Reads answers, authorities and additionals section of the packet"""
format = '!HHiH'
length = struct.calcsize(format)
n = self.numanswers + self.numauthorities + self.numadditionals
for i in range(0, n):
domain = self.readName()
info = struct.unpack(format,
self.data[self.offset:self.offset + length])
self.offset += length
rec = None
if info[0] == _TYPE_A:
rec = DNSAddress(domain, info[0], info[1], info[2],
self.readString(4))
elif info[0] == _TYPE_CNAME or info[0] == _TYPE_PTR:
rec = DNSPointer(domain, info[0], info[1], info[2],
self.readName())
elif info[0] == _TYPE_TXT:
rec = DNSText(domain, info[0], info[1], info[2],
self.readString(info[3]))
elif info[0] == _TYPE_SRV:
rec = DNSService(domain, info[0], info[1], info[2],
self.readUnsignedShort(),
self.readUnsignedShort(),
self.readUnsignedShort(),
self.readName())
elif info[0] == _TYPE_HINFO:
rec = DNSHinfo(domain, info[0], info[1], info[2],
self.readCharacterString(),
self.readCharacterString())
elif info[0] == _TYPE_AAAA:
rec = DNSAddress(domain, info[0], info[1], info[2],
self.readString(16))
else:
# Try to ignore types we don't know about
# this may mean the rest of the name is
# unable to be parsed, and may show errors
# so this is left for debugging. New types
# encountered need to be parsed properly.
#
#print "UNKNOWN TYPE = " + str(info[0])
#raise BadTypeInNameException
self.offset += info[3]
if rec is not None:
self.answers.append(rec)
def isQuery(self):
"""Returns true if this is a query"""
return (self.flags & _FLAGS_QR_MASK) == _FLAGS_QR_QUERY
def isResponse(self):
"""Returns true if this is a response"""
return (self.flags & _FLAGS_QR_MASK) == _FLAGS_QR_RESPONSE
def readUTF(self, offset, len):
"""Reads a UTF-8 string of a given length from the packet"""
return self.data[offset:offset + len].decode('utf-8')
def readName(self):
"""Reads a domain name from the packet"""
result = ''
off = self.offset
next = -1
first = off
while True:
len = ord(self.data[off])
off += 1
if len == 0:
break
t = len & 0xC0
if t == 0x00:
result = ''.join((result, self.readUTF(off, len) + '.'))
off += len
elif t == 0xC0:
if next < 0:
next = off + 1
off = ((len & 0x3F) << 8) | ord(self.data[off])
if off >= first:
raise BadDomainNameCircular(off)
first = off
else:
raise BadDomainName(off)
if next >= 0:
self.offset = next
else:
self.offset = off
return result
class DNSOutgoing(object):
"""Object representation of an outgoing packet"""
def __init__(self, flags, multicast=1):
self.finished = 0
self.id = 0
self.multicast = multicast
self.flags = flags
self.names = {}
self.data = []
self.size = 12
self.questions = []
self.answers = []
self.authorities = []
self.additionals = []
def addQuestion(self, record):
"""Adds a question"""
self.questions.append(record)
def addAnswer(self, inp, record):
"""Adds an answer"""
if not record.suppressedBy(inp):
self.addAnswerAtTime(record, 0)
def addAnswerAtTime(self, record, now):
"""Adds an answer if if does not expire by a certain time"""
if record is not None:
if now == 0 or not record.isExpired(now):
self.answers.append((record, now))
def addAuthoritativeAnswer(self, record):
"""Adds an authoritative answer"""
self.authorities.append(record)
def addAdditionalAnswer(self, record):
"""Adds an additional answer"""
self.additionals.append(record)
def writeByte(self, value):
"""Writes a single byte to the packet"""
format = '!c'
self.data.append(struct.pack(format, chr(value)))
self.size += 1
def insertShort(self, index, value):
"""Inserts an unsigned short in a certain position in the packet"""
format = '!H'
self.data.insert(index, struct.pack(format, value))
self.size += 2
def writeShort(self, value):
"""Writes an unsigned short to the packet"""
format = '!H'
self.data.append(struct.pack(format, value))
self.size += 2
def writeInt(self, value):
"""Writes an unsigned integer to the packet"""
format = '!I'
self.data.append(struct.pack(format, int(value)))
self.size += 4
def writeString(self, value, length):
"""Writes a string to the packet"""
format = '!' + str(length) + 's'
self.data.append(struct.pack(format, value))
self.size += length
def writeUTF(self, s):
"""Writes a UTF-8 string of a given length to the packet"""
utfstr = s.encode('utf-8')
length = len(utfstr)
if length > 64:
raise NamePartTooLongException
self.writeByte(length)
self.writeString(utfstr, length)
def writeName(self, name):
"""Writes a domain name to the packet"""
try:
# Find existing instance of this name in packet
#
index = self.names[name]
except KeyError:
# No record of this name already, so write it
# out as normal, recording the location of the name
# for future pointers to it.
#
self.names[name] = self.size
parts = name.split('.')
if parts[-1] == '':
parts = parts[:-1]
for part in parts:
self.writeUTF(part)
self.writeByte(0)
return
# An index was found, so write a pointer to it
#
self.writeByte((index >> 8) | 0xC0)
self.writeByte(index)
def writeQuestion(self, question):
"""Writes a question to the packet"""
self.writeName(question.name)
self.writeShort(question.type)
self.writeShort(question.clazz)
def writeRecord(self, record, now):
"""Writes a record (answer, authoritative answer, additional) to
the packet"""
self.writeName(record.name)
self.writeShort(record.type)
if record.unique and self.multicast:
self.writeShort(record.clazz | _CLASS_UNIQUE)
else:
self.writeShort(record.clazz)
if now == 0:
self.writeInt(record.ttl)
else:
self.writeInt(record.getRemainingTTL(now))
index = len(self.data)
# Adjust size for the short we will write before this record
#
self.size += 2
record.write(self)
self.size -= 2
length = len(''.join(self.data[index:]))
self.insertShort(index, length) # Here is the short we adjusted for
def packet(self):
"""Returns a string containing the packet's bytes
No further parts should be added to the packet once this
is done."""
if not self.finished:
self.finished = 1
for question in self.questions:
self.writeQuestion(question)
for answer, time_ in self.answers:
self.writeRecord(answer, time_)
for authority in self.authorities:
self.writeRecord(authority, 0)
for additional in self.additionals:
self.writeRecord(additional, 0)
self.insertShort(0, len(self.additionals))
self.insertShort(0, len(self.authorities))
self.insertShort(0, len(self.answers))
self.insertShort(0, len(self.questions))
self.insertShort(0, self.flags)
if self.multicast:
self.insertShort(0, 0)
else:
self.insertShort(0, self.id)
return ''.join(self.data)
class DNSCache(object):
"""A cache of DNS entries"""
def __init__(self):
self.cache = {}
def add(self, entry):
"""Adds an entry"""
try:
list = self.cache[entry.key]
except KeyError:
list = self.cache[entry.key] = []
list.append(entry)
def remove(self, entry):
"""Removes an entry"""
try:
list = self.cache[entry.key]
list.remove(entry)
except KeyError:
pass
def get(self, entry):
"""Gets an entry by key. Will return None if there is no
matching entry."""
try:
list = self.cache[entry.key]
return list[list.index(entry)]
except (KeyError, ValueError):
return None
def getByDetails(self, name, type, clazz):
"""Gets an entry by details. Will return None if there is
no matching entry."""
entry = DNSEntry(name, type, clazz)
return self.get(entry)
def entriesWithName(self, name):
"""Returns a list of entries whose key matches the name."""
try:
return self.cache[name]
except KeyError:
return []
def entries(self):
"""Returns a list of all entries"""
try:
return list(itertools.chain.from_iterable(self.cache.values()))
except Exception:
return []
class Engine(threading.Thread):
"""An engine wraps read access to sockets, allowing objects that
need to receive data from sockets to be called back when the
sockets are ready.
A reader needs a handle_read() method, which is called when the socket
it is interested in is ready for reading.
Writers are not implemented here, because we only send short
packets.
"""
def __init__(self, zeroconf):
threading.Thread.__init__(self)
self.zeroconf = zeroconf
self.readers = {} # maps socket to reader
self.timeout = 5
self.condition = threading.Condition()
self.start()
def run(self):
while not globals()['_GLOBAL_DONE']:
rs = self.getReaders()
if len(rs) == 0:
# No sockets to manage, but we wait for the timeout
# or addition of a socket
#
self.condition.acquire()
self.condition.wait(self.timeout)
self.condition.release()
else:
try:
rr, wr, er = select.select(rs, [], [], self.timeout)
for sock in rr:
try:
self.readers[sock].handle_read()
except Exception:
if not globals()['_GLOBAL_DONE']:
traceback.print_exc()
except Exception:
pass
def getReaders(self):
self.condition.acquire()
result = self.readers.keys()
self.condition.release()
return result
def addReader(self, reader, socket):
self.condition.acquire()
self.readers[socket] = reader
self.condition.notify()
self.condition.release()
def delReader(self, socket):
self.condition.acquire()
del self.readers[socket]
self.condition.notify()
self.condition.release()
def notify(self):
self.condition.acquire()
self.condition.notify()
self.condition.release()
class Listener(object):
"""A Listener is used by this module to listen on the multicast
group to which DNS messages are sent, allowing the implementation
to cache information as it arrives.
It requires registration with an Engine object in order to have
the read() method called when a socket is available for reading."""
def __init__(self, zeroconf):
self.zeroconf = zeroconf
self.zeroconf.engine.addReader(self, self.zeroconf.socket)
def handle_read(self):
data, (addr, port) = self.zeroconf.socket.recvfrom(_MAX_MSG_ABSOLUTE)
self.data = data
msg = DNSIncoming(data)
if msg.isQuery():
# Always multicast responses
#
if port == _MDNS_PORT:
self.zeroconf.handleQuery(msg, _MDNS_ADDR, _MDNS_PORT)
# If it's not a multicast query, reply via unicast
# and multicast
#
elif port == _DNS_PORT:
self.zeroconf.handleQuery(msg, addr, port)
self.zeroconf.handleQuery(msg, _MDNS_ADDR, _MDNS_PORT)
else:
self.zeroconf.handleResponse(msg)
class Reaper(threading.Thread):
"""A Reaper is used by this module to remove cache entries that
have expired."""
def __init__(self, zeroconf):
threading.Thread.__init__(self)
self.zeroconf = zeroconf
self.start()
def run(self):
while True:
self.zeroconf.wait(10 * 1000)
if globals()['_GLOBAL_DONE']:
return
now = currentTimeMillis()
for record in self.zeroconf.cache.entries():
if record.isExpired(now):
self.zeroconf.updateRecord(now, record)
self.zeroconf.cache.remove(record)
class ServiceBrowser(threading.Thread):
"""Used to browse for a service of a specific type.
The listener object will have its addService() and
removeService() methods called when this browser
discovers changes in the services availability."""
def __init__(self, zeroconf, type, listener):
"""Creates a browser for a specific type"""
threading.Thread.__init__(self)
self.zeroconf = zeroconf
self.type = type
self.listener = listener
self.services = {}
self.nexttime = currentTimeMillis()
self.delay = _BROWSER_TIME
self.list = []
self.done = 0
self.zeroconf.addListener(self, DNSQuestion(self.type, _TYPE_PTR,
_CLASS_IN))
self.start()
def updateRecord(self, zeroconf, now, record):
"""Callback invoked by Zeroconf when new information arrives.
Updates information required by browser in the Zeroconf cache."""
if record.type == _TYPE_PTR and record.name == self.type:
expired = record.isExpired(now)
try:
oldrecord = self.services[record.alias.lower()]
if not expired:
oldrecord.resetTTL(record)
else:
del self.services[record.alias.lower()]
callback = (lambda x:
self.listener.removeService(x, self.type, record.alias))
self.list.append(callback)
return
except Exception:
if not expired:
self.services[record.alias.lower()] = record
callback = (lambda x:
self.listener.addService(x, self.type, record.alias))
self.list.append(callback)
expires = record.getExpirationTime(75)
if expires < self.nexttime:
self.nexttime = expires
def cancel(self):
self.done = 1
self.zeroconf.notifyAll()
def run(self):
while True:
event = None
now = currentTimeMillis()
if len(self.list) == 0 and self.nexttime > now:
self.zeroconf.wait(self.nexttime - now)
if globals()['_GLOBAL_DONE'] or self.done:
return
now = currentTimeMillis()
if self.nexttime <= now:
out = DNSOutgoing(_FLAGS_QR_QUERY)
out.addQuestion(DNSQuestion(self.type, _TYPE_PTR, _CLASS_IN))
for record in self.services.values():
if not record.isExpired(now):
out.addAnswerAtTime(record, now)
self.zeroconf.send(out)
self.nexttime = now + self.delay
self.delay = min(20 * 1000, self.delay * 2)
if len(self.list) > 0:
event = self.list.pop(0)
if event is not None:
event(self.zeroconf)
class ServiceInfo(object):
"""Service information"""
def __init__(self, type, name, address=None, port=None, weight=0,
priority=0, properties=None, server=None):
"""Create a service description.
type: fully qualified service type name
name: fully qualified service name
address: IP address as unsigned short, network byte order
port: port that the service runs on
weight: weight of the service
priority: priority of the service
properties: dictionary of properties (or a string holding the bytes for
the text field)
server: fully qualified name for service host (defaults to name)"""
if not name.endswith(type):
raise BadTypeInNameException
self.type = type
self.name = name
self.address = address
self.port = port
self.weight = weight
self.priority = priority
if server:
self.server = server
else:
self.server = name
self.setProperties(properties)
def setProperties(self, properties):
"""Sets properties and text of this info from a dictionary"""
if isinstance(properties, dict):
self.properties = properties
list = []
result = ''
for key in properties:
value = properties[key]
if value is None:
suffix = ''
elif isinstance(value, str):
suffix = value
elif isinstance(value, int):
if value:
suffix = 'true'
else:
suffix = 'false'
else:
suffix = ''
list.append('='.join((key, suffix)))
for item in list:
result = ''.join((result, struct.pack('!c', chr(len(item))),
item))
self.text = result
else:
self.text = properties
def setText(self, text):
"""Sets properties and text given a text field"""
self.text = text
try:
result = {}
end = len(text)
index = 0
strs = []
while index < end:
length = ord(text[index])
index += 1
strs.append(text[index:index + length])
index += length
for s in strs:
eindex = s.find('=')
if eindex == -1:
# No equals sign at all
key = s
value = 0
else:
key = s[:eindex]
value = s[eindex + 1:]
if value == 'true':
value = 1
elif value == 'false' or not value:
value = 0
# Only update non-existent properties
if key and result.get(key) is None:
result[key] = value
self.properties = result
except Exception:
traceback.print_exc()
self.properties = None
def getType(self):
"""Type accessor"""
return self.type
def getName(self):
"""Name accessor"""
if self.type is not None and self.name.endswith("." + self.type):
return self.name[:len(self.name) - len(self.type) - 1]
return self.name
def getAddress(self):
"""Address accessor"""
return self.address
def getPort(self):
"""Port accessor"""
return self.port
def getPriority(self):
"""Priority accessor"""
return self.priority
def getWeight(self):
"""Weight accessor"""
return self.weight
def getProperties(self):
"""Properties accessor"""
return self.properties
def getText(self):
"""Text accessor"""
return self.text
def getServer(self):
"""Server accessor"""
return self.server
def updateRecord(self, zeroconf, now, record):
"""Updates service information from a DNS record"""
if record is not None and not record.isExpired(now):
if record.type == _TYPE_A:
#if record.name == self.name:
if record.name == self.server:
self.address = record.address
elif record.type == _TYPE_SRV:
if record.name == self.name:
self.server = record.server
self.port = record.port
self.weight = record.weight
self.priority = record.priority
#self.address = None
self.updateRecord(zeroconf, now,
zeroconf.cache.getByDetails(self.server,
_TYPE_A, _CLASS_IN))
elif record.type == _TYPE_TXT:
if record.name == self.name:
self.setText(record.text)
def request(self, zeroconf, timeout):
"""Returns true if the service could be discovered on the
network, and updates this object with details discovered.
"""
now = currentTimeMillis()
delay = _LISTENER_TIME
next = now + delay
last = now + timeout
result = 0
try:
zeroconf.addListener(self, DNSQuestion(self.name, _TYPE_ANY,
_CLASS_IN))
while (self.server is None or self.address is None or
self.text is None):
if last <= now:
return 0
if next <= now:
out = DNSOutgoing(_FLAGS_QR_QUERY)
out.addQuestion(DNSQuestion(self.name, _TYPE_SRV,
_CLASS_IN))
out.addAnswerAtTime(
zeroconf.cache.getByDetails(self.name,
_TYPE_SRV,
_CLASS_IN),
now)
out.addQuestion(DNSQuestion(self.name, _TYPE_TXT,
_CLASS_IN))
out.addAnswerAtTime(
zeroconf.cache.getByDetails(self.name, _TYPE_TXT,
_CLASS_IN),
now)
if self.server is not None:
out.addQuestion(
DNSQuestion(self.server, _TYPE_A, _CLASS_IN))
out.addAnswerAtTime(
zeroconf.cache.getByDetails(self.server, _TYPE_A,
_CLASS_IN),
now)
zeroconf.send(out)
next = now + delay
delay = delay * 2
zeroconf.wait(min(next, last) - now)
now = currentTimeMillis()
result = 1
finally:
zeroconf.removeListener(self)
return result
def __eq__(self, other):
"""Tests equality of service name"""
if isinstance(other, ServiceInfo):
return other.name == self.name
return 0
def __ne__(self, other):
"""Non-equality test"""
return not self.__eq__(other)
def __repr__(self):
"""String representation"""
result = ("service[%s,%s:%s," %
(self.name, socket.inet_ntoa(self.getAddress()), self.port))
if self.text is None:
result += "None"
else:
if len(self.text) < 20:
result += self.text
else:
result += self.text[:17] + "..."
result += "]"
return result
class Zeroconf(object):
"""Implementation of Zeroconf Multicast DNS Service Discovery
Supports registration, unregistration, queries and browsing.
"""
def __init__(self, bindaddress=None):
"""Creates an instance of the Zeroconf class, establishing
multicast communications, listening and reaping threads."""
globals()['_GLOBAL_DONE'] = 0
if bindaddress is None:
self.intf = socket.gethostbyname(socket.gethostname())
else:
self.intf = bindaddress
self.group = ('', _MDNS_PORT)
self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
except Exception:
# SO_REUSEADDR should be equivalent to SO_REUSEPORT for
# multicast UDP sockets (p 731, "TCP/IP Illustrated,
# Volume 2"), but some BSD-derived systems require
# SO_REUSEPORT to be specified explicitly. Also, not all
# versions of Python have SO_REUSEPORT available. So
# if you're on a BSD-based system, and haven't upgraded
# to Python 2.3 yet, you may find this library doesn't
# work as expected.
#
pass
self.socket.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_TTL, "\xff")
self.socket.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_LOOP, "\x01")
try:
self.socket.bind(self.group)
except Exception:
# Some versions of linux raise an exception even though
# SO_REUSEADDR and SO_REUSEPORT have been set, so ignore it
pass
self.socket.setsockopt(socket.SOL_IP, socket.IP_ADD_MEMBERSHIP,
socket.inet_aton(_MDNS_ADDR) + socket.inet_aton('0.0.0.0'))
self.listeners = []
self.browsers = []
self.services = {}
self.servicetypes = {}
self.cache = DNSCache()
self.condition = threading.Condition()
self.engine = Engine(self)
self.listener = Listener(self)
self.reaper = Reaper(self)
def isLoopback(self):
return self.intf.startswith("127.0.0.1")
def isLinklocal(self):
return self.intf.startswith("169.254.")
def wait(self, timeout):
"""Calling thread waits for a given number of milliseconds or
until notified."""
self.condition.acquire()
self.condition.wait(timeout / 1000)
self.condition.release()
def notifyAll(self):
"""Notifies all waiting threads"""
self.condition.acquire()
self.condition.notifyAll()
self.condition.release()
def getServiceInfo(self, type, name, timeout=3000):
"""Returns network's service information for a particular
name and type, or None if no service matches by the timeout,
which defaults to 3 seconds."""
info = ServiceInfo(type, name)
if info.request(self, timeout):
return info
return None
def addServiceListener(self, type, listener):
"""Adds a listener for a particular service type. This object
will then have its updateRecord method called when information
arrives for that type."""
self.removeServiceListener(listener)
self.browsers.append(ServiceBrowser(self, type, listener))
def removeServiceListener(self, listener):
"""Removes a listener from the set that is currently listening."""
for browser in self.browsers:
if browser.listener == listener:
browser.cancel()
del browser
def registerService(self, info, ttl=_DNS_TTL):
"""Registers service information to the network with a default TTL
of 60 seconds. Zeroconf will then respond to requests for
information for that service. The name of the service may be
changed if needed to make it unique on the network."""
self.checkService(info)
self.services[info.name.lower()] = info
if info.type in self.servicetypes:
self.servicetypes[info.type] += 1
else:
self.servicetypes[info.type] = 1
now = currentTimeMillis()
nexttime = now
i = 0
while i < 3:
if now < nexttime:
self.wait(nexttime - now)
now = currentTimeMillis()
continue
out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
out.addAnswerAtTime(DNSPointer(info.type, _TYPE_PTR,
_CLASS_IN, ttl, info.name), 0)
out.addAnswerAtTime(
DNSService(
info.name, _TYPE_SRV,
_CLASS_IN, ttl, info.priority, info.weight, info.port,
info.server),
0)
out.addAnswerAtTime(
DNSText(info.name, _TYPE_TXT, _CLASS_IN, ttl, info.text),
0)
if info.address:
out.addAnswerAtTime(DNSAddress(info.server, _TYPE_A,
_CLASS_IN, ttl, info.address), 0)
self.send(out)
i += 1
nexttime += _REGISTER_TIME
def unregisterService(self, info):
"""Unregister a service."""
try:
del self.services[info.name.lower()]
if self.servicetypes[info.type] > 1:
self.servicetypes[info.type] -= 1
else:
del self.servicetypes[info.type]
except KeyError:
pass
now = currentTimeMillis()
nexttime = now
i = 0
while i < 3:
if now < nexttime:
self.wait(nexttime - now)
now = currentTimeMillis()
continue
out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
out.addAnswerAtTime(
DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, 0, info.name), 0)
out.addAnswerAtTime(
DNSService(info.name, _TYPE_SRV,
_CLASS_IN, 0, info.priority, info.weight, info.port,
info.name),
0)
out.addAnswerAtTime(DNSText(info.name, _TYPE_TXT,
_CLASS_IN, 0, info.text), 0)
if info.address:
out.addAnswerAtTime(DNSAddress(info.server, _TYPE_A,
_CLASS_IN, 0, info.address), 0)
self.send(out)
i += 1
nexttime += _UNREGISTER_TIME
def unregisterAllServices(self):
"""Unregister all registered services."""
if len(self.services) > 0:
now = currentTimeMillis()
nexttime = now
i = 0
while i < 3:
if now < nexttime:
self.wait(nexttime - now)
now = currentTimeMillis()
continue
out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
for info in self.services.values():
out.addAnswerAtTime(DNSPointer(info.type, _TYPE_PTR,
_CLASS_IN, 0, info.name), 0)
out.addAnswerAtTime(
DNSService(info.name, _TYPE_SRV,
_CLASS_IN, 0, info.priority, info.weight,
info.port, info.server),
0)
out.addAnswerAtTime(DNSText(info.name, _TYPE_TXT,
_CLASS_IN, 0, info.text), 0)
if info.address:
out.addAnswerAtTime(DNSAddress(info.server, _TYPE_A,
_CLASS_IN, 0, info.address), 0)
self.send(out)
i += 1
nexttime += _UNREGISTER_TIME
def checkService(self, info):
"""Checks the network for a unique service name, modifying the
ServiceInfo passed in if it is not unique."""
now = currentTimeMillis()
nexttime = now
i = 0
while i < 3:
for record in self.cache.entriesWithName(info.type):
if (record.type == _TYPE_PTR and not record.isExpired(now) and
record.alias == info.name):
if (info.name.find('.') < 0):
info.name = ("%w.[%s:%d].%s" %
(info.name, info.address, info.port, info.type))
self.checkService(info)
return
raise NonUniqueNameException
if now < nexttime:
self.wait(nexttime - now)
now = currentTimeMillis()
continue
out = DNSOutgoing(_FLAGS_QR_QUERY | _FLAGS_AA)
self.debug = out
out.addQuestion(DNSQuestion(info.type, _TYPE_PTR, _CLASS_IN))
out.addAuthoritativeAnswer(DNSPointer(info.type, _TYPE_PTR,
_CLASS_IN, _DNS_TTL, info.name))
self.send(out)
i += 1
nexttime += _CHECK_TIME
def addListener(self, listener, question):
"""Adds a listener for a given question. The listener will have
its updateRecord method called when information is available to
answer the question."""
now = currentTimeMillis()
self.listeners.append(listener)
if question is not None:
for record in self.cache.entriesWithName(question.name):
if question.answeredBy(record) and not record.isExpired(now):
listener.updateRecord(self, now, record)
self.notifyAll()
def removeListener(self, listener):
"""Removes a listener."""
try:
self.listeners.remove(listener)
self.notifyAll()
except Exception:
pass
def updateRecord(self, now, rec):
"""Used to notify listeners of new information that has updated
a record."""
for listener in self.listeners:
listener.updateRecord(self, now, rec)
self.notifyAll()
def handleResponse(self, msg):
"""Deal with incoming response packets. All answers
are held in the cache, and listeners are notified."""
now = currentTimeMillis()
for record in msg.answers:
expired = record.isExpired(now)
if record in self.cache.entries():
if expired:
self.cache.remove(record)
else:
entry = self.cache.get(record)
if entry is not None:
entry.resetTTL(record)
record = entry
else:
self.cache.add(record)
self.updateRecord(now, record)
def handleQuery(self, msg, addr, port):
"""Deal with incoming query packets. Provides a response if
possible."""
out = None
# Support unicast client responses
#
if port != _MDNS_PORT:
out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA, 0)
for question in msg.questions:
out.addQuestion(question)
for question in msg.questions:
if question.type == _TYPE_PTR:
if question.name == "_services._dns-sd._udp.local.":
for stype in self.servicetypes.keys():
if out is None:
out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
out.addAnswer(msg,
DNSPointer(
"_services._dns-sd._udp.local.",
_TYPE_PTR, _CLASS_IN,
_DNS_TTL, stype))
for service in self.services.values():
if question.name == service.type:
if out is None:
out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
out.addAnswer(msg, DNSPointer(service.type, _TYPE_PTR,
_CLASS_IN, _DNS_TTL, service.name))
else:
try:
if out is None:
out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
# Answer A record queries for any service addresses we know
if question.type == _TYPE_A or question.type == _TYPE_ANY:
for service in self.services.values():
if service.server == question.name.lower():
out.addAnswer(msg,
DNSAddress(question.name, _TYPE_A,
_CLASS_IN | _CLASS_UNIQUE,
_DNS_TTL, service.address))
service = self.services.get(question.name.lower(), None)
if not service: continue
if (question.type == _TYPE_SRV or
question.type == _TYPE_ANY):
out.addAnswer(msg,
DNSService(question.name, _TYPE_SRV,
_CLASS_IN | _CLASS_UNIQUE,
_DNS_TTL, service.priority,
service.weight, service.port,
service.server))
if (question.type == _TYPE_TXT or
question.type == _TYPE_ANY):
out.addAnswer(msg, DNSText(question.name, _TYPE_TXT,
_CLASS_IN | _CLASS_UNIQUE, _DNS_TTL, service.text))
if question.type == _TYPE_SRV:
out.addAdditionalAnswer(
DNSAddress(service.server, _TYPE_A,
_CLASS_IN | _CLASS_UNIQUE,
_DNS_TTL, service.address))
except Exception:
traceback.print_exc()
if out is not None and out.answers:
out.id = msg.id
self.send(out, addr, port)
def send(self, out, addr=_MDNS_ADDR, port=_MDNS_PORT):
"""Sends an outgoing packet."""
# This is a quick test to see if we can parse the packets we generate
#temp = DNSIncoming(out.packet())
try:
self.socket.sendto(out.packet(), 0, (addr, port))
except Exception:
# Ignore this, it may be a temporary loss of network connection
pass
def close(self):
"""Ends the background threads, and prevent this instance from
servicing further queries."""
if globals()['_GLOBAL_DONE'] == 0:
globals()['_GLOBAL_DONE'] = 1
self.notifyAll()
self.engine.notify()
self.unregisterAllServices()
self.socket.setsockopt(socket.SOL_IP, socket.IP_DROP_MEMBERSHIP,
socket.inet_aton(_MDNS_ADDR) + socket.inet_aton('0.0.0.0'))
self.socket.close()
# Test a few module features, including service registration, service
# query (for Zoe), and service unregistration.
if __name__ == '__main__':
print("Multicast DNS Service Discovery for Python, version", __version__)
r = Zeroconf()
print("1. Testing registration of a service...")
desc = {'version':'0.10','a':'test value', 'b':'another value'}
info = ServiceInfo("_http._tcp.local.",
"My Service Name._http._tcp.local.",
socket.inet_aton("127.0.0.1"), 1234, 0, 0, desc)
print(" Registering service...")
r.registerService(info)
print(" Registration done.")
print("2. Testing query of service information...")
print(" Getting ZOE service:",
str(r.getServiceInfo("_http._tcp.local.", "ZOE._http._tcp.local.")))
print(" Query done.")
print("3. Testing query of own service...")
print(" Getting self:",
str(r.getServiceInfo("_http._tcp.local.",
"My Service Name._http._tcp.local.")))
print(" Query done.")
print("4. Testing unregister of service information...")
r.unregisterService(info)
print(" Unregister done.")
r.close()
| gpl-2.0 | -2,214,631,821,735,984,600 | 7,106,141,910,092,217,000 | 33.786905 | 80 | 0.53465 | false |
xrg/django-static-gitified | tests/urls.py | 91 | 1189 | from django.conf.urls import patterns, include
urlpatterns = patterns('',
# test_client modeltest urls
(r'^test_client/', include('modeltests.test_client.urls')),
(r'^test_client_regress/', include('regressiontests.test_client_regress.urls')),
# File upload test views
(r'^file_uploads/', include('regressiontests.file_uploads.urls')),
# Always provide the auth system login and logout views
(r'^accounts/login/$', 'django.contrib.auth.views.login', {'template_name': 'login.html'}),
(r'^accounts/logout/$', 'django.contrib.auth.views.logout'),
# test urlconf for {% url %} template tag
(r'^url_tag/', include('regressiontests.templates.urls')),
# django built-in views
(r'^views/', include('regressiontests.views.urls')),
# test urlconf for middleware tests
(r'^middleware/', include('regressiontests.middleware.urls')),
# admin widget tests
(r'widget_admin/', include('regressiontests.admin_widgets.urls')),
# admin custom URL tests
(r'^custom_urls/', include('regressiontests.admin_custom_urls.urls')),
# admin scripts tests
(r'^admin_scripts/', include('regressiontests.admin_scripts.urls')),
)
| bsd-3-clause | -7,902,141,370,102,427,000 | 4,035,789,695,079,124,500 | 35.030303 | 95 | 0.679563 | false |
Komzpa/omim | 3party/Alohalytics/tests/googletest/xcode/Scripts/versiongenerate.py | 3088 | 4536 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A script to prepare version informtion for use the gtest Info.plist file.
This script extracts the version information from the configure.ac file and
uses it to generate a header file containing the same information. The
#defines in this header file will be included in during the generation of
the Info.plist of the framework, giving the correct value to the version
shown in the Finder.
This script makes the following assumptions (these are faults of the script,
not problems with the Autoconf):
1. The AC_INIT macro will be contained within the first 1024 characters
of configure.ac
2. The version string will be 3 integers separated by periods and will be
surrounded by squre brackets, "[" and "]" (e.g. [1.0.1]). The first
segment represents the major version, the second represents the minor
version and the third represents the fix version.
3. No ")" character exists between the opening "(" and closing ")" of
AC_INIT, including in comments and character strings.
"""
import sys
import re
# Read the command line argument (the output directory for Version.h)
if (len(sys.argv) < 3):
print "Usage: versiongenerate.py input_dir output_dir"
sys.exit(1)
else:
input_dir = sys.argv[1]
output_dir = sys.argv[2]
# Read the first 1024 characters of the configure.ac file
config_file = open("%s/configure.ac" % input_dir, 'r')
buffer_size = 1024
opening_string = config_file.read(buffer_size)
config_file.close()
# Extract the version string from the AC_INIT macro
# The following init_expression means:
# Extract three integers separated by periods and surrounded by squre
# brackets(e.g. "[1.0.1]") between "AC_INIT(" and ")". Do not be greedy
# (*? is the non-greedy flag) since that would pull in everything between
# the first "(" and the last ")" in the file.
version_expression = re.compile(r"AC_INIT\(.*?\[(\d+)\.(\d+)\.(\d+)\].*?\)",
re.DOTALL)
version_values = version_expression.search(opening_string)
major_version = version_values.group(1)
minor_version = version_values.group(2)
fix_version = version_values.group(3)
# Write the version information to a header file to be included in the
# Info.plist file.
file_data = """//
// DO NOT MODIFY THIS FILE (but you can delete it)
//
// This file is autogenerated by the versiongenerate.py script. This script
// is executed in a "Run Script" build phase when creating gtest.framework. This
// header file is not used during compilation of C-source. Rather, it simply
// defines some version strings for substitution in the Info.plist. Because of
// this, we are not not restricted to C-syntax nor are we using include guards.
//
#define GTEST_VERSIONINFO_SHORT %s.%s
#define GTEST_VERSIONINFO_LONG %s.%s.%s
""" % (major_version, minor_version, major_version, minor_version, fix_version)
version_file = open("%s/Version.h" % output_dir, 'w')
version_file.write(file_data)
version_file.close()
| apache-2.0 | 3,855,333,370,444,227,000 | -1,635,965,717,663,996,400 | 44.36 | 80 | 0.737654 | false |
aganezov/bg | bg/distances.py | 1 | 1464 | # -*- coding: utf-8 -*-
import networkx as nx
def get_all_cycles(breakpoint_graph):
visited = set()
cycles = []
for vertex in breakpoint_graph.nodes():
if vertex in visited:
continue
try:
cycle = nx.find_cycle(breakpoint_graph.bg, vertex)
new = False
for v1, v2, dir in cycle:
if v1 not in visited:
new = True
visited.add(v1)
if new:
cycles.append(cycle)
except:
pass
return cycles
def get_all_paths(breakpoint_graph):
ccs = []
for cc in breakpoint_graph.connected_components_subgraphs(copy=False):
if any(map(lambda vertex: vertex.is_irregular_vertex, cc.nodes())):
ccs.append(cc)
continue
return ccs
def scj(breakpoint_graph):
number_of_genes = len([v for v in breakpoint_graph.nodes() if v.is_regular_vertex]) / 2
cycles = get_all_cycles(breakpoint_graph=breakpoint_graph)
two_cycles = [cycle for cycle in cycles if len(cycle) == 2]
adjacency_graph_two_cycles = [cycle for cycle in two_cycles if all(map(lambda c_entry: c_entry[0].is_regular_vertex, cycle))]
adjacency_graph_paths = get_all_paths(breakpoint_graph=breakpoint_graph)
number_of_paths = len(adjacency_graph_paths)
return int(2 * number_of_genes - 2 * len(adjacency_graph_two_cycles) - number_of_paths)
single_cut_and_join_distance = scj
| mit | 3,494,672,603,890,986,500 | -6,909,416,838,241,666,000 | 31.533333 | 129 | 0.612022 | false |
psf/black | src/black/numerics.py | 1 | 1843 | """
Formatting numeric literals.
"""
from blib2to3.pytree import Leaf
def format_hex(text: str) -> str:
"""
Formats a hexadecimal string like "0x12B3"
"""
before, after = text[:2], text[2:]
return f"{before}{after.upper()}"
def format_scientific_notation(text: str) -> str:
"""Formats a numeric string utilizing scentific notation"""
before, after = text.split("e")
sign = ""
if after.startswith("-"):
after = after[1:]
sign = "-"
elif after.startswith("+"):
after = after[1:]
before = format_float_or_int_string(before)
return f"{before}e{sign}{after}"
def format_long_or_complex_number(text: str) -> str:
"""Formats a long or complex string like `10L` or `10j`"""
number = text[:-1]
suffix = text[-1]
# Capitalize in "2L" because "l" looks too similar to "1".
if suffix == "l":
suffix = "L"
return f"{format_float_or_int_string(number)}{suffix}"
def format_float_or_int_string(text: str) -> str:
"""Formats a float string like "1.0"."""
if "." not in text:
return text
before, after = text.split(".")
return f"{before or 0}.{after or 0}"
def normalize_numeric_literal(leaf: Leaf) -> None:
"""Normalizes numeric (float, int, and complex) literals.
All letters used in the representation are normalized to lowercase (except
in Python 2 long literals).
"""
text = leaf.value.lower()
if text.startswith(("0o", "0b")):
# Leave octal and binary literals alone.
pass
elif text.startswith("0x"):
text = format_hex(text)
elif "e" in text:
text = format_scientific_notation(text)
elif text.endswith(("j", "l")):
text = format_long_or_complex_number(text)
else:
text = format_float_or_int_string(text)
leaf.value = text
| mit | 6,638,736,832,500,751,000 | 8,067,030,393,803,246,000 | 27.353846 | 78 | 0.607705 | false |
xcgoner/dist-mxnet | python/mxnet/autograd.py | 7 | 14381 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
"""Autograd for NDArray."""
from __future__ import absolute_import
from __future__ import division
from threading import Lock
import traceback
import ctypes
from ctypes import c_int, c_void_p, CFUNCTYPE, POINTER, cast
from .base import _LIB, check_call, string_types
from .base import mx_uint, NDArrayHandle, c_array, MXCallbackList, SymbolHandle
from .ndarray import NDArray
from .symbol import _GRAD_REQ_MAP, Symbol
def set_recording(is_recording): #pylint: disable=redefined-outer-name
"""Set status to recording/not recording. When recording, graph will be constructed
for gradient computation.
Parameters
----------
is_recording: bool
Returns
-------
previous state before this set.
"""
prev = ctypes.c_int()
check_call(_LIB.MXAutogradSetIsRecording(
ctypes.c_int(is_recording), ctypes.byref(prev)))
return bool(prev.value)
def set_training(train_mode): #pylint: disable=redefined-outer-name
"""Set status to training/predicting. This affects ctx.is_train in operator
running context. For example, Dropout will drop inputs randomly when
train_mode=True while simply passing through if train_mode=False.
Parameters
----------
train_mode: bool
Returns
-------
previous state before this set.
"""
prev = ctypes.c_int()
check_call(_LIB.MXAutogradSetIsTraining(
ctypes.c_int(train_mode), ctypes.byref(prev)))
return bool(prev.value)
def is_recording():
"""Get status on recording/not recording.
Returns
-------
Current state of recording.
"""
curr = ctypes.c_bool()
check_call(_LIB.MXAutogradIsRecording(ctypes.byref(curr)))
return curr.value
def is_training():
"""Get status on training/predicting.
Returns
-------
Current state of training/predicting.
"""
curr = ctypes.c_bool()
check_call(_LIB.MXAutogradIsTraining(ctypes.byref(curr)))
return curr.value
class _RecordingStateScope(object):
"""Scope for managing training state.
Example::
with _RecordingStateScope(True, True):
y = model(x)
backward([y])
"""
def __init__(self, is_record, train_mode): #pylint: disable=redefined-outer-name
self._enter_is_record = is_record
self._enter_train_mode = train_mode
self._prev_is_record = None
self._prev_train_mode = None
def __enter__(self):
if self._enter_is_record is not None:
self._prev_is_record = set_recording(self._enter_is_record)
if self._enter_train_mode is not None:
self._prev_train_mode = set_training(self._enter_train_mode)
def __exit__(self, ptype, value, trace):
if self._enter_is_record is not None and self._prev_is_record != self._enter_is_record:
set_recording(self._prev_is_record)
if self._enter_train_mode is not None and self._prev_train_mode != self._enter_train_mode:
set_training(self._prev_train_mode)
def record(train_mode=True): #pylint: disable=redefined-outer-name
"""Returns an autograd recording scope context to be used in 'with' statement
and captures code that needs gradients to be calculated.
.. note:: When forwarding with train_mode=False, the corresponding backward
should also use train_mode=False, otherwise gradient is undefined.
Example::
with autograd.record():
y = model(x)
backward([y])
metric.update(...)
optim.step(...)
Parameters
----------
train_mode: bool, default True
Whether the forward pass is in training or predicting mode. This controls the behavior
of some layers such as Dropout, BatchNorm.
"""
return _RecordingStateScope(True, train_mode)
def pause(train_mode=False): #pylint: disable=redefined-outer-name
"""Returns a scope context to be used in 'with' statement for codes that do not need
gradients to be calculated.
Example::
with autograd.record():
y = model(x)
backward([y])
with autograd.pause():
# testing, IO, gradient updates...
Parameters
----------
train_mode: bool, default False
Whether to do forward for training or predicting.
"""
return _RecordingStateScope(False, train_mode)
def train_mode():
"""Returns a scope context to be used in 'with' statement
in which forward pass behavior is set to training mode,
without changing the recording states.
Example::
y = model(x)
with autograd.train_mode():
y = dropout(y)
"""
return _RecordingStateScope(None, True)
def predict_mode():
"""Returns a scope context to be used in 'with' statement
in which forward pass behavior is set to inference mode,
without changing the recording states.
Example::
with autograd.record():
y = model(x)
with autograd.predict_mode():
y = sampling(y)
backward([y])
"""
return _RecordingStateScope(None, False)
def mark_variables(variables, gradients, grad_reqs='write'):
"""Mark NDArrays as variables to compute gradient for autograd.
Parameters
----------
variables: NDArray or list of NDArray
gradients: NDArray or list of NDArray
grad_reqs: str or list of str
"""
if isinstance(variables, NDArray):
assert isinstance(gradients, NDArray)
variables = [variables]
gradients = [gradients]
variable_handles = []
gradient_handles = []
for var, gradvar in zip(variables, gradients):
variable_handles.append(var.handle)
gradient_handles.append(gradvar.handle)
if isinstance(grad_reqs, string_types):
grad_reqs = [_GRAD_REQ_MAP[grad_reqs]]*len(variables)
else:
grad_reqs = [_GRAD_REQ_MAP[i] for i in grad_reqs]
check_call(_LIB.MXAutogradMarkVariables(
len(variable_handles),
c_array(NDArrayHandle, variable_handles),
c_array(mx_uint, grad_reqs),
c_array(NDArrayHandle, gradient_handles)))
def backward(heads, head_grads=None, retain_graph=False, train_mode=True): #pylint: disable=redefined-outer-name
"""Compute the gradients of heads w.r.t previously marked variables.
Parameters
----------
heads: NDArray or list of NDArray
Output NDArray(s)
head_grads: NDArray or list of NDArray or None
Gradients with respect to heads.
train_mode: bool, optional
Whether to do backward for training or predicting.
"""
if isinstance(heads, NDArray):
assert head_grads is None or isinstance(head_grads, NDArray)
heads = [heads]
head_grads = [head_grads] if head_grads is not None else None
output_handles = []
for arr in heads:
output_handles.append(arr.handle)
if head_grads is None:
check_call(_LIB.MXAutogradBackwardEx(
len(output_handles),
c_array(NDArrayHandle, output_handles),
ctypes.c_void_p(0),
ctypes.c_int(retain_graph),
ctypes.c_int(train_mode)))
return
ograd_handles = []
for arr in head_grads:
if arr is not None:
ograd_handles.append(arr.handle)
else:
ograd_handles.append(NDArrayHandle(0))
assert len(ograd_handles) == len(output_handles), \
"heads and head_grads must have the same length"
check_call(_LIB.MXAutogradBackwardEx(
len(output_handles),
c_array(NDArrayHandle, output_handles),
c_array(NDArrayHandle, ograd_handles),
ctypes.c_int(retain_graph),
ctypes.c_int(train_mode)))
def get_symbol(x):
"""Retrieve recorded computation history as `Symbol`.
Parameters
----------
x : NDArray
Array representing the head of computation graph.
Returns
-------
Symbol
The retrieved Symbol.
"""
hdl = SymbolHandle()
check_call(_LIB.MXAutogradGetSymbol(x.handle, ctypes.byref(hdl)))
return Symbol(hdl)
class Function(object):
"""User-defined differentiable function.
Function allows defining both forward and backward computation for
custom operators. During gradient computation, the used-defined
backward function will be used instead of the default chain-rule.
You can also cast to numpy array and back for some operations in
forward and backward.
For example, a stable sigmoid function can be defined as::
class sigmoid(Function):
def forward(self, x):
y = 1 / (1 + mx.nd.exp(-x))
self.save_for_backward(y)
return y
def backward(self, dy):
# backward takes as many inputs as forward's return value,
# and returns as many NDArrays as forward's arguments.
y, = self.saved_tensors
return y * (1-y)
"""
_bwd_functype = CFUNCTYPE(c_int, c_int, c_int, POINTER(c_void_p),
POINTER(c_int), c_int, c_void_p)
_del_functype = CFUNCTYPE(c_int, c_void_p)
class _Registry(object):
"""CustomOp registry."""
def __init__(self):
self.ref_holder = {}
self.counter = 0
self.lock = Lock()
def inc(self):
"""Get index for new entry."""
self.lock.acquire()
cur = self.counter
self.counter += 1
self.lock.release()
return cur
_registry = _Registry()
def __init__(self):
self._used = False
self.saved_tensors = ()
def save_for_backward(self, *args):
self.saved_tensors = args
def __call__(self, *inputs):
assert not self._used, \
"Each Function instance can only be called once. "\
"Please create another instance."
self._used = True
prev_recording = set_recording(False)
outputs = self.forward(*inputs)
set_recording(prev_recording)
if not prev_recording:
return outputs
ret_outputs = outputs
if isinstance(outputs, NDArray):
outputs = (outputs,)
key = Function._registry.inc()
def backward_entry(num_ograds, num_igrads, ptrs, reqs, is_train, _):
"""entry point for backward."""
# pylint: disable=W0613
try:
output_grads = [NDArray(ctypes.cast(i, NDArrayHandle), writable=False) \
for i in ptrs[:num_ograds]]
input_grads = [NDArray(ctypes.cast(i, NDArrayHandle), writable=True) \
for i in ptrs[num_ograds:num_ograds+num_igrads]]
reqs = [reqs[i] for i in range(num_igrads)]
rets = self.backward(*output_grads)
if isinstance(rets, NDArray):
rets = (rets,)
assert len(rets) == len(input_grads), \
"%s.backward must return exactly the same number " \
"of NDArrays as the number of NDArrays arguments to forward." \
"Expecting %d got %d"%(self.__class__.name, len(input_grads), len(rets))
for igrad, ret, req in zip(input_grads, rets, reqs):
assert isinstance(ret, NDArray), \
"autograd.Function.backward must return NDArrays, not %s"%type(ret)
if req == 0: # null
return
elif req == 1 or req == 2: # write or inplace
igrad[:] = ret
elif req == 'add':
igrad[:] += ret
except Exception: # pylint: disable=broad-except
print('Error in Function.backward: %s' % traceback.format_exc())
return False
return True
def delete_entry(_):
"""C Callback for CustomFunction::delete"""
try:
del Function._registry.ref_holder[key]
except Exception: # pylint: disable=broad-except
print('Error in autograd.Function.delete: %s' % traceback.format_exc())
return False
return True
input_handles = [x.handle for x in inputs]
output_handles = [x.handle for x in outputs]
callbacks = [Function._bwd_functype(backward_entry),
Function._del_functype(delete_entry)]
callbacks = [cast(i, CFUNCTYPE(c_int)) for i in callbacks]
context = MXCallbackList(c_int(len(callbacks)),
cast(c_array(CFUNCTYPE(c_int), callbacks),
POINTER(CFUNCTYPE(c_int))),
cast(c_array(c_void_p, [None]*len(callbacks)),
POINTER(c_void_p)))
check_call(_LIB.MXCustomFunctionRecord(
c_int(len(inputs)),
c_array(NDArrayHandle, input_handles),
c_int(len(outputs)),
c_array(NDArrayHandle, output_handles),
ctypes.byref(context)))
Function._registry.ref_holder[key] = context
return ret_outputs
def forward(self, *inputs):
"""Forward computation."""
raise NotImplementedError
def backward(self, *output_grads):
"""Backward computation.
Takes as many inputs as forward's outputs,
and returns as many NDArrays as forward's inputs.
"""
raise NotImplementedError
| apache-2.0 | -2,455,686,734,164,137,500 | -6,728,962,928,501,191,000 | 32.444186 | 112 | 0.60427 | false |
prakashpp/trytond-customs-value | tests/test_product.py | 2 | 2895 | # -*- coding: utf-8 -*-
import sys
import os
import unittest
import trytond.tests.test_tryton
from trytond.tests.test_tryton import POOL, DB_NAME, USER, CONTEXT
from trytond.transaction import Transaction
from decimal import Decimal
DIR = os.path.abspath(os.path.normpath(os.path.join(
__file__, '..', '..', '..', '..', '..', 'trytond'
)))
if os.path.isdir(DIR):
sys.path.insert(0, os.path.dirname(DIR))
class TestBase(unittest.TestCase):
"""
Base Test Case
"""
def setUp(self):
"""
Set up data used in the tests.
this method is called before each test function execution.
"""
trytond.tests.test_tryton.install_module('customs_value')
self.Product = POOL.get('product.product')
self.Template = POOL.get('product.template')
self.Uom = POOL.get('product.uom')
def create_product_template(self):
"""
Creates default product template
"""
self.uom, = self.Uom.search([('name', '=', 'Unit')])
return self.Template.create([{
'name': 'product',
'list_price': Decimal('20'),
'cost_price': Decimal('5'),
'default_uom': self.uom.id,
}])[0]
class TestProduct(TestBase):
'''
Test Product
'''
def test0010_check_product_custom_value(self):
"""
Check custom value for product
"""
with Transaction().start(DB_NAME, USER, context=CONTEXT):
template = self.create_product_template()
product, = self.Product.create([{
'template': template,
}])
self.assertEqual(
product.use_list_price_as_customs_value, True
)
self.assertEqual(product.customs_value_used, product.list_price)
product, = self.Product.create([{
'template': template,
'customs_value': Decimal('50'),
'use_list_price_as_customs_value': False
}])
self.assertEqual(
product.use_list_price_as_customs_value, False
)
self.assertEqual(product.customs_value_used, product.customs_value)
product, = self.Product.create([{
'template': template,
'customs_value': Decimal('50'),
'use_list_price_as_customs_value': True
}])
self.assertEqual(
product.use_list_price_as_customs_value, True
)
self.assertEqual(product.customs_value_used, product.list_price)
def suite():
"""
Define suite
"""
test_suite = trytond.tests.test_tryton.suite()
test_suite.addTests(
unittest.TestLoader().loadTestsFromTestCase(TestProduct)
)
return test_suite
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite())
| bsd-3-clause | 8,772,305,857,833,849,000 | -7,286,141,495,559,006,000 | 26.056075 | 79 | 0.562003 | false |
ecanzonieri/pyleus | examples/apparent_temperature/apparent_temperature/wind_speed_generator.py | 9 | 1135 | from __future__ import absolute_import
import logging
from collections import namedtuple
import random
from apparent_temperature.measure_generator import MeasureGeneratorSpout
log = logging.getLogger('wind_speed_generator')
WindSpeedMeasure = namedtuple(
"WindSpeedMeasure",
"id_sensor timestamp wind_speed")
class WindSpeedSpout(MeasureGeneratorSpout):
OUTPUT_FIELDS = WindSpeedMeasure
SENSORS = {
1042: (15, 8),
1077: (8, 6),
1078: (3, 7),
1079: (8, 5),
1082: (11, 4),
1126: (28, 9),
1156: (22, 5),
1178: (12, 12),
1201: (34, 18),
1234: (12, 5),
1312: (0, 12),
1448: (20, 8),
2089: (6, 6),
}
def measure(self, *args):
return max(0, random.normalvariate(*args))
def log(self, measure):
log.debug("id: {0}, time: {1}, wind-speed: {2} mph"
.format(*measure))
if __name__ == '__main__':
logging.basicConfig(
level=logging.DEBUG,
filename='/tmp/apparent_temperature_wind_speed.log',
filemode='a',
)
WindSpeedSpout().run()
| apache-2.0 | -7,816,038,791,187,324,000 | 8,206,060,737,355,108,000 | 20.826923 | 72 | 0.567401 | false |
rorasa/KeeTerm | Crypto/SelfTest/Cipher/test_XOR.py | 119 | 2538 | # -*- coding: utf-8 -*-
#
# SelfTest/Cipher/XOR.py: Self-test for the XOR "cipher"
#
# Written in 2008 by Dwayne C. Litzenberger <dlitz@dlitz.net>
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Self-test suite for Crypto.Cipher.XOR"""
import unittest
__revision__ = "$Id$"
from Crypto.Util.py3compat import *
# This is a list of (plaintext, ciphertext, key) tuples.
test_data = [
# Test vectors written from scratch. (Nobody posts XOR test vectors on the web? How disappointing.)
('01', '01',
'00',
'zero key'),
('0102040810204080', '0003050911214181',
'01',
'1-byte key'),
('0102040810204080', 'cda8c8a2dc8a8c2a',
'ccaa',
'2-byte key'),
('ff'*64, 'fffefdfcfbfaf9f8f7f6f5f4f3f2f1f0efeeedecebeae9e8e7e6e5e4e3e2e1e0'*2,
'000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f',
'32-byte key'),
]
class TruncationSelfTest(unittest.TestCase):
def runTest(self):
"""33-byte key (should raise ValueError under current implementation)"""
# Crypto.Cipher.XOR previously truncated its inputs at 32 bytes. Now
# it should raise a ValueError if the length is too long.
self.assertRaises(ValueError, XOR.new, "x"*33)
def get_tests(config={}):
global XOR
from Crypto.Cipher import XOR
from common import make_stream_tests
return make_stream_tests(XOR, "XOR", test_data) + [TruncationSelfTest()]
if __name__ == '__main__':
import unittest
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
# vim:set ts=4 sw=4 sts=4 expandtab:
| mit | -2,425,192,003,238,474,000 | 2,495,582,448,126,315,000 | 34.25 | 105 | 0.662727 | false |
madan96/sympy | sympy/solvers/tests/test_solvers.py | 2 | 69392 | from sympy import (
Abs, And, Derivative, Dummy, Eq, Float, Function, Gt, I, Integral,
LambertW, Lt, Matrix, Or, Piecewise, Poly, Q, Rational, S, Symbol,
Wild, acos, asin, atan, atanh, cos, cosh, diff, erf, erfinv, erfc,
erfcinv, exp, im, log, pi, re, sec, sin,
sinh, solve, solve_linear, sqrt, sstr, symbols, sympify, tan, tanh,
root, simplify, atan2, arg, Mul, SparseMatrix, ask, Tuple, nsolve, oo)
from sympy.core.compatibility import range
from sympy.core.function import nfloat
from sympy.solvers import solve_linear_system, solve_linear_system_LU, \
solve_undetermined_coeffs
from sympy.solvers.solvers import _invert, unrad, checksol, posify, _ispow, \
det_quick, det_perm, det_minor, _simple_dens, check_assumptions
from sympy.physics.units import cm
from sympy.polys.rootoftools import CRootOf
from sympy.utilities.pytest import slow, XFAIL, SKIP, raises, skip, ON_TRAVIS
from sympy.utilities.randtest import verify_numerically as tn
from sympy.abc import a, b, c, d, k, h, p, x, y, z, t, q, m
def NS(e, n=15, **options):
return sstr(sympify(e).evalf(n, **options), full_prec=True)
def test_swap_back():
f, g = map(Function, 'fg')
fx, gx = f(x), g(x)
assert solve([fx + y - 2, fx - gx - 5], fx, y, gx) == \
{fx: gx + 5, y: -gx - 3}
assert solve(fx + gx*x - 2, [fx, gx]) == {fx: 2, gx: 0}
assert solve(fx + gx**2*x - y, [fx, gx]) == [{fx: y - gx**2*x}]
assert solve([f(1) - 2, x + 2]) == [{x: -2, f(1): 2}]
def guess_solve_strategy(eq, symbol):
try:
solve(eq, symbol)
return True
except (TypeError, NotImplementedError):
return False
def test_guess_poly():
# polynomial equations
assert guess_solve_strategy( S(4), x ) # == GS_POLY
assert guess_solve_strategy( x, x ) # == GS_POLY
assert guess_solve_strategy( x + a, x ) # == GS_POLY
assert guess_solve_strategy( 2*x, x ) # == GS_POLY
assert guess_solve_strategy( x + sqrt(2), x) # == GS_POLY
assert guess_solve_strategy( x + 2**Rational(1, 4), x) # == GS_POLY
assert guess_solve_strategy( x**2 + 1, x ) # == GS_POLY
assert guess_solve_strategy( x**2 - 1, x ) # == GS_POLY
assert guess_solve_strategy( x*y + y, x ) # == GS_POLY
assert guess_solve_strategy( x*exp(y) + y, x) # == GS_POLY
assert guess_solve_strategy(
(x - y**3)/(y**2*sqrt(1 - y**2)), x) # == GS_POLY
def test_guess_poly_cv():
# polynomial equations via a change of variable
assert guess_solve_strategy( sqrt(x) + 1, x ) # == GS_POLY_CV_1
assert guess_solve_strategy(
x**Rational(1, 3) + sqrt(x) + 1, x ) # == GS_POLY_CV_1
assert guess_solve_strategy( 4*x*(1 - sqrt(x)), x ) # == GS_POLY_CV_1
# polynomial equation multiplying both sides by x**n
assert guess_solve_strategy( x + 1/x + y, x ) # == GS_POLY_CV_2
def test_guess_rational_cv():
# rational functions
assert guess_solve_strategy( (x + 1)/(x**2 + 2), x) # == GS_RATIONAL
assert guess_solve_strategy(
(x - y**3)/(y**2*sqrt(1 - y**2)), y) # == GS_RATIONAL_CV_1
# rational functions via the change of variable y -> x**n
assert guess_solve_strategy( (sqrt(x) + 1)/(x**Rational(1, 3) + sqrt(x) + 1), x ) \
#== GS_RATIONAL_CV_1
def test_guess_transcendental():
#transcendental functions
assert guess_solve_strategy( exp(x) + 1, x ) # == GS_TRANSCENDENTAL
assert guess_solve_strategy( 2*cos(x) - y, x ) # == GS_TRANSCENDENTAL
assert guess_solve_strategy(
exp(x) + exp(-x) - y, x ) # == GS_TRANSCENDENTAL
assert guess_solve_strategy(3**x - 10, x) # == GS_TRANSCENDENTAL
assert guess_solve_strategy(-3**x + 10, x) # == GS_TRANSCENDENTAL
assert guess_solve_strategy(a*x**b - y, x) # == GS_TRANSCENDENTAL
def test_solve_args():
# equation container, issue 5113
ans = {x: -3, y: 1}
eqs = (x + 5*y - 2, -3*x + 6*y - 15)
assert all(solve(container(eqs), x, y) == ans for container in
(tuple, list, set, frozenset))
assert solve(Tuple(*eqs), x, y) == ans
# implicit symbol to solve for
assert set(solve(x**2 - 4)) == set([S(2), -S(2)])
assert solve([x + y - 3, x - y - 5]) == {x: 4, y: -1}
assert solve(x - exp(x), x, implicit=True) == [exp(x)]
# no symbol to solve for
assert solve(42) == []
assert solve([1, 2]) == []
# duplicate symbols removed
assert solve((x - 3, y + 2), x, y, x) == {x: 3, y: -2}
# unordered symbols
# only 1
assert solve(y - 3, set([y])) == [3]
# more than 1
assert solve(y - 3, set([x, y])) == [{y: 3}]
# multiple symbols: take the first linear solution
assert solve(x + y - 3, [x, y]) == [{x: 3 - y}]
# unless it is an undetermined coefficients system
assert solve(a + b*x - 2, [a, b]) == {a: 2, b: 0}
args = (a + b)*x - b**2 + 2, a, b
assert solve(*args) == \
[(-sqrt(2), sqrt(2)), (sqrt(2), -sqrt(2))]
assert solve(*args, set=True) == \
([a, b], set([(-sqrt(2), sqrt(2)), (sqrt(2), -sqrt(2))]))
assert solve(*args, dict=True) == \
[{b: sqrt(2), a: -sqrt(2)}, {b: -sqrt(2), a: sqrt(2)}]
eq = a*x**2 + b*x + c - ((x - h)**2 + 4*p*k)/4/p
flags = dict(dict=True)
assert solve(eq, [h, p, k], exclude=[a, b, c], **flags) == \
[{k: c - b**2/(4*a), h: -b/(2*a), p: 1/(4*a)}]
flags.update(dict(simplify=False))
assert solve(eq, [h, p, k], exclude=[a, b, c], **flags) == \
[{k: (4*a*c - b**2)/(4*a), h: -b/(2*a), p: 1/(4*a)}]
# failing undetermined system
assert solve(a*x + b**2/(x + 4) - 3*x - 4/x, a, b) == \
[{a: (-b**2*x + 3*x**3 + 12*x**2 + 4*x + 16)/(x**2*(x + 4))}]
# failed single equation
assert solve(1/(1/x - y + exp(y))) == []
raises(
NotImplementedError, lambda: solve(exp(x) + sin(x) + exp(y) + sin(y)))
# failed system
# -- when no symbols given, 1 fails
assert solve([y, exp(x) + x]) == [{x: -LambertW(1), y: 0}]
# both fail
assert solve(
(exp(x) - x, exp(y) - y)) == [{x: -LambertW(-1), y: -LambertW(-1)}]
# -- when symbols given
solve([y, exp(x) + x], x, y) == [(-LambertW(1), 0)]
# symbol is a number
assert solve(x**2 - pi, pi) == [x**2]
# no equations
assert solve([], [x]) == []
# overdetermined system
# - nonlinear
assert solve([(x + y)**2 - 4, x + y - 2]) == [{x: -y + 2}]
# - linear
assert solve((x + y - 2, 2*x + 2*y - 4)) == {x: -y + 2}
def test_solve_polynomial1():
assert solve(3*x - 2, x) == [Rational(2, 3)]
assert solve(Eq(3*x, 2), x) == [Rational(2, 3)]
assert set(solve(x**2 - 1, x)) == set([-S(1), S(1)])
assert set(solve(Eq(x**2, 1), x)) == set([-S(1), S(1)])
assert solve(x - y**3, x) == [y**3]
rx = root(x, 3)
assert solve(x - y**3, y) == [
rx, -rx/2 - sqrt(3)*I*rx/2, -rx/2 + sqrt(3)*I*rx/2]
a11, a12, a21, a22, b1, b2 = symbols('a11,a12,a21,a22,b1,b2')
assert solve([a11*x + a12*y - b1, a21*x + a22*y - b2], x, y) == \
{
x: (a22*b1 - a12*b2)/(a11*a22 - a12*a21),
y: (a11*b2 - a21*b1)/(a11*a22 - a12*a21),
}
solution = {y: S.Zero, x: S.Zero}
assert solve((x - y, x + y), x, y ) == solution
assert solve((x - y, x + y), (x, y)) == solution
assert solve((x - y, x + y), [x, y]) == solution
assert set(solve(x**3 - 15*x - 4, x)) == set([
-2 + 3**Rational(1, 2),
S(4),
-2 - 3**Rational(1, 2)
])
assert set(solve((x**2 - 1)**2 - a, x)) == \
set([sqrt(1 + sqrt(a)), -sqrt(1 + sqrt(a)),
sqrt(1 - sqrt(a)), -sqrt(1 - sqrt(a))])
def test_solve_polynomial2():
assert solve(4, x) == []
def test_solve_polynomial_cv_1a():
"""
Test for solving on equations that can be converted to a polynomial equation
using the change of variable y -> x**Rational(p, q)
"""
assert solve( sqrt(x) - 1, x) == [1]
assert solve( sqrt(x) - 2, x) == [4]
assert solve( x**Rational(1, 4) - 2, x) == [16]
assert solve( x**Rational(1, 3) - 3, x) == [27]
assert solve(sqrt(x) + x**Rational(1, 3) + x**Rational(1, 4), x) == [0]
def test_solve_polynomial_cv_1b():
assert set(solve(4*x*(1 - a*sqrt(x)), x)) == set([S(0), 1/a**2])
assert set(solve(x*(root(x, 3) - 3), x)) == set([S(0), S(27)])
def test_solve_polynomial_cv_2():
"""
Test for solving on equations that can be converted to a polynomial equation
multiplying both sides of the equation by x**m
"""
assert solve(x + 1/x - 1, x) in \
[[ Rational(1, 2) + I*sqrt(3)/2, Rational(1, 2) - I*sqrt(3)/2],
[ Rational(1, 2) - I*sqrt(3)/2, Rational(1, 2) + I*sqrt(3)/2]]
def test_quintics_1():
f = x**5 - 110*x**3 - 55*x**2 + 2310*x + 979
s = solve(f, check=False)
for root in s:
res = f.subs(x, root.n()).n()
assert tn(res, 0)
f = x**5 - 15*x**3 - 5*x**2 + 10*x + 20
s = solve(f)
for root in s:
assert root.func == CRootOf
# if one uses solve to get the roots of a polynomial that has a CRootOf
# solution, make sure that the use of nfloat during the solve process
# doesn't fail. Note: if you want numerical solutions to a polynomial
# it is *much* faster to use nroots to get them than to solve the
# equation only to get RootOf solutions which are then numerically
# evaluated. So for eq = x**5 + 3*x + 7 do Poly(eq).nroots() rather
# than [i.n() for i in solve(eq)] to get the numerical roots of eq.
assert nfloat(solve(x**5 + 3*x**3 + 7)[0], exponent=False) == \
CRootOf(x**5 + 3*x**3 + 7, 0).n()
def test_highorder_poly():
# just testing that the uniq generator is unpacked
sol = solve(x**6 - 2*x + 2)
assert all(isinstance(i, CRootOf) for i in sol) and len(sol) == 6
@slow
def test_quintics_2():
f = x**5 + 15*x + 12
s = solve(f, check=False)
for root in s:
res = f.subs(x, root.n()).n()
assert tn(res, 0)
f = x**5 - 15*x**3 - 5*x**2 + 10*x + 20
s = solve(f)
for root in s:
assert root.func == CRootOf
def test_solve_rational():
"""Test solve for rational functions"""
assert solve( ( x - y**3 )/( (y**2)*sqrt(1 - y**2) ), x) == [y**3]
def test_solve_nonlinear():
assert solve(x**2 - y**2, x, y) == [{x: -y}, {x: y}]
assert solve(x**2 - y**2/exp(x), x, y) == [{x: 2*LambertW(y/2)}]
assert solve(x**2 - y**2/exp(x), y, x) == [{y: -x*sqrt(exp(x))}, {y: x*sqrt(exp(x))}]
def test_issue_8666():
x = symbols('x')
assert solve(Eq(x**2 - 1/(x**2 - 4), 4 - 1/(x**2 - 4)), x) == []
assert solve(Eq(x + 1/x, 1/x), x) == []
def test_issue_7228():
assert solve(4**(2*(x**2) + 2*x) - 8, x) == [-Rational(3, 2), S.Half]
def test_issue_7190():
assert solve(log(x-3) + log(x+3), x) == [sqrt(10)]
def test_linear_system():
x, y, z, t, n = symbols('x, y, z, t, n')
assert solve([x - 1, x - y, x - 2*y, y - 1], [x, y]) == []
assert solve([x - 1, x - y, x - 2*y, x - 1], [x, y]) == []
assert solve([x - 1, x - 1, x - y, x - 2*y], [x, y]) == []
assert solve([x + 5*y - 2, -3*x + 6*y - 15], x, y) == {x: -3, y: 1}
M = Matrix([[0, 0, n*(n + 1), (n + 1)**2, 0],
[n + 1, n + 1, -2*n - 1, -(n + 1), 0],
[-1, 0, 1, 0, 0]])
assert solve_linear_system(M, x, y, z, t) == \
{x: -t - t/n, z: -t - t/n, y: 0}
assert solve([x + y + z + t, -z - t], x, y, z, t) == {x: -y, z: -t}
def test_linear_system_function():
a = Function('a')
assert solve([a(0, 0) + a(0, 1) + a(1, 0) + a(1, 1), -a(1, 0) - a(1, 1)],
a(0, 0), a(0, 1), a(1, 0), a(1, 1)) == {a(1, 0): -a(1, 1), a(0, 0): -a(0, 1)}
def test_linear_systemLU():
n = Symbol('n')
M = Matrix([[1, 2, 0, 1], [1, 3, 2*n, 1], [4, -1, n**2, 1]])
assert solve_linear_system_LU(M, [x, y, z]) == {z: -3/(n**2 + 18*n),
x: 1 - 12*n/(n**2 + 18*n),
y: 6*n/(n**2 + 18*n)}
# Note: multiple solutions exist for some of these equations, so the tests
# should be expected to break if the implementation of the solver changes
# in such a way that a different branch is chosen
def test_solve_transcendental():
from sympy.abc import a, b
assert solve(exp(x) - 3, x) == [log(3)]
assert set(solve((a*x + b)*(exp(x) - 3), x)) == set([-b/a, log(3)])
assert solve(cos(x) - y, x) == [-acos(y) + 2*pi, acos(y)]
assert solve(2*cos(x) - y, x) == [-acos(y/2) + 2*pi, acos(y/2)]
assert solve(Eq(cos(x), sin(x)), x) == [-3*pi/4, pi/4]
assert set(solve(exp(x) + exp(-x) - y, x)) in [set([
log(y/2 - sqrt(y**2 - 4)/2),
log(y/2 + sqrt(y**2 - 4)/2),
]), set([
log(y - sqrt(y**2 - 4)) - log(2),
log(y + sqrt(y**2 - 4)) - log(2)]),
set([
log(y/2 - sqrt((y - 2)*(y + 2))/2),
log(y/2 + sqrt((y - 2)*(y + 2))/2)])]
assert solve(exp(x) - 3, x) == [log(3)]
assert solve(Eq(exp(x), 3), x) == [log(3)]
assert solve(log(x) - 3, x) == [exp(3)]
assert solve(sqrt(3*x) - 4, x) == [Rational(16, 3)]
assert solve(3**(x + 2), x) == []
assert solve(3**(2 - x), x) == []
assert solve(x + 2**x, x) == [-LambertW(log(2))/log(2)]
ans = solve(3*x + 5 + 2**(-5*x + 3), x)
assert len(ans) == 1 and ans[0].expand() == \
-Rational(5, 3) + LambertW(-10240*root(2, 3)*log(2)/3)/(5*log(2))
assert solve(5*x - 1 + 3*exp(2 - 7*x), x) == \
[Rational(1, 5) + LambertW(-21*exp(Rational(3, 5))/5)/7]
assert solve(2*x + 5 + log(3*x - 2), x) == \
[Rational(2, 3) + LambertW(2*exp(-Rational(19, 3))/3)/2]
assert solve(3*x + log(4*x), x) == [LambertW(Rational(3, 4))/3]
assert set(solve((2*x + 8)*(8 + exp(x)), x)) == set([S(-4), log(8) + pi*I])
eq = 2*exp(3*x + 4) - 3
ans = solve(eq, x) # this generated a failure in flatten
assert len(ans) == 3 and all(eq.subs(x, a).n(chop=True) == 0 for a in ans)
assert solve(2*log(3*x + 4) - 3, x) == [(exp(Rational(3, 2)) - 4)/3]
assert solve(exp(x) + 1, x) == [pi*I]
eq = 2*(3*x + 4)**5 - 6*7**(3*x + 9)
result = solve(eq, x)
ans = [(log(2401) + 5*LambertW(-log(7**(7*3**Rational(1, 5)/5))))/(3*log(7))/-1]
assert result == ans
# it works if expanded, too
assert solve(eq.expand(), x) == result
assert solve(z*cos(x) - y, x) == [-acos(y/z) + 2*pi, acos(y/z)]
assert solve(z*cos(2*x) - y, x) == [-acos(y/z)/2 + pi, acos(y/z)/2]
assert solve(z*cos(sin(x)) - y, x) == [
asin(acos(y/z) - 2*pi) + pi, -asin(acos(y/z)) + pi,
-asin(acos(y/z) - 2*pi), asin(acos(y/z))]
assert solve(z*cos(x), x) == [pi/2, 3*pi/2]
# issue 4508
assert solve(y - b*x/(a + x), x) in [[-a*y/(y - b)], [a*y/(b - y)]]
assert solve(y - b*exp(a/x), x) == [a/log(y/b)]
# issue 4507
assert solve(y - b/(1 + a*x), x) in [[(b - y)/(a*y)], [-((y - b)/(a*y))]]
# issue 4506
assert solve(y - a*x**b, x) == [(y/a)**(1/b)]
# issue 4505
assert solve(z**x - y, x) == [log(y)/log(z)]
# issue 4504
assert solve(2**x - 10, x) == [log(10)/log(2)]
# issue 6744
assert solve(x*y) == [{x: 0}, {y: 0}]
assert solve([x*y]) == [{x: 0}, {y: 0}]
assert solve(x**y - 1) == [{x: 1}, {y: 0}]
assert solve([x**y - 1]) == [{x: 1}, {y: 0}]
assert solve(x*y*(x**2 - y**2)) == [{x: 0}, {x: -y}, {x: y}, {y: 0}]
assert solve([x*y*(x**2 - y**2)]) == [{x: 0}, {x: -y}, {x: y}, {y: 0}]
#issue 4739
assert solve(exp(log(5)*x) - 2**x, x) == [0]
# misc
# make sure that the right variables is picked up in tsolve
raises(NotImplementedError, lambda: solve((exp(x) + 1)**x - 2))
# shouldn't generate a GeneratorsNeeded error in _tsolve when the NaN is generated
# for eq_down. Actual answers, as determined numerically are approx. +/- 0.83
raises(NotImplementedError, lambda:
solve(sinh(x)*sinh(sinh(x)) + cosh(x)*cosh(sinh(x)) - 3))
# watch out for recursive loop in tsolve
raises(NotImplementedError, lambda: solve((x + 2)**y*x - 3, x))
# issue 7245
assert solve(sin(sqrt(x))) == [0, pi**2]
# issue 7602
a, b = symbols('a, b', real=True, negative=False)
assert str(solve(Eq(a, 0.5 - cos(pi*b)/2), b)) == \
'[-0.318309886183791*acos(-2.0*a + 1.0) + 2.0, 0.318309886183791*acos(-2.0*a + 1.0)]'
def test_solve_for_functions_derivatives():
t = Symbol('t')
x = Function('x')(t)
y = Function('y')(t)
a11, a12, a21, a22, b1, b2 = symbols('a11,a12,a21,a22,b1,b2')
soln = solve([a11*x + a12*y - b1, a21*x + a22*y - b2], x, y)
assert soln == {
x: (a22*b1 - a12*b2)/(a11*a22 - a12*a21),
y: (a11*b2 - a21*b1)/(a11*a22 - a12*a21),
}
assert solve(x - 1, x) == [1]
assert solve(3*x - 2, x) == [Rational(2, 3)]
soln = solve([a11*x.diff(t) + a12*y.diff(t) - b1, a21*x.diff(t) +
a22*y.diff(t) - b2], x.diff(t), y.diff(t))
assert soln == { y.diff(t): (a11*b2 - a21*b1)/(a11*a22 - a12*a21),
x.diff(t): (a22*b1 - a12*b2)/(a11*a22 - a12*a21) }
assert solve(x.diff(t) - 1, x.diff(t)) == [1]
assert solve(3*x.diff(t) - 2, x.diff(t)) == [Rational(2, 3)]
eqns = set((3*x - 1, 2*y - 4))
assert solve(eqns, set((x, y))) == { x: Rational(1, 3), y: 2 }
x = Symbol('x')
f = Function('f')
F = x**2 + f(x)**2 - 4*x - 1
assert solve(F.diff(x), diff(f(x), x)) == [(-x + 2)/f(x)]
# Mixed cased with a Symbol and a Function
x = Symbol('x')
y = Function('y')(t)
soln = solve([a11*x + a12*y.diff(t) - b1, a21*x +
a22*y.diff(t) - b2], x, y.diff(t))
assert soln == { y.diff(t): (a11*b2 - a21*b1)/(a11*a22 - a12*a21),
x: (a22*b1 - a12*b2)/(a11*a22 - a12*a21) }
def test_issue_3725():
f = Function('f')
F = x**2 + f(x)**2 - 4*x - 1
e = F.diff(x)
assert solve(e, f(x).diff(x)) in [[(2 - x)/f(x)], [-((x - 2)/f(x))]]
def test_issue_3870():
a, b, c, d = symbols('a b c d')
A = Matrix(2, 2, [a, b, c, d])
B = Matrix(2, 2, [0, 2, -3, 0])
C = Matrix(2, 2, [1, 2, 3, 4])
assert solve(A*B - C, [a, b, c, d]) == {a: 1, b: -S(1)/3, c: 2, d: -1}
assert solve([A*B - C], [a, b, c, d]) == {a: 1, b: -S(1)/3, c: 2, d: -1}
assert solve(Eq(A*B, C), [a, b, c, d]) == {a: 1, b: -S(1)/3, c: 2, d: -1}
assert solve([A*B - B*A], [a, b, c, d]) == {a: d, b: -S(2)/3*c}
assert solve([A*C - C*A], [a, b, c, d]) == {a: d - c, b: S(2)/3*c}
assert solve([A*B - B*A, A*C - C*A], [a, b, c, d]) == {a: d, b: 0, c: 0}
assert solve([Eq(A*B, B*A)], [a, b, c, d]) == {a: d, b: -S(2)/3*c}
assert solve([Eq(A*C, C*A)], [a, b, c, d]) == {a: d - c, b: S(2)/3*c}
assert solve([Eq(A*B, B*A), Eq(A*C, C*A)], [a, b, c, d]) == {a: d, b: 0, c: 0}
def test_solve_linear():
w = Wild('w')
assert solve_linear(x, x) == (0, 1)
assert solve_linear(x, exclude=[x]) == (0, 1)
assert solve_linear(x, symbols=[w]) == (0, 1)
assert solve_linear(x, y - 2*x) in [(x, y/3), (y, 3*x)]
assert solve_linear(x, y - 2*x, exclude=[x]) == (y, 3*x)
assert solve_linear(3*x - y, 0) in [(x, y/3), (y, 3*x)]
assert solve_linear(3*x - y, 0, [x]) == (x, y/3)
assert solve_linear(3*x - y, 0, [y]) == (y, 3*x)
assert solve_linear(x**2/y, 1) == (y, x**2)
assert solve_linear(w, x) in [(w, x), (x, w)]
assert solve_linear(cos(x)**2 + sin(x)**2 + 2 + y) == \
(y, -2 - cos(x)**2 - sin(x)**2)
assert solve_linear(cos(x)**2 + sin(x)**2 + 2 + y, symbols=[x]) == (0, 1)
assert solve_linear(Eq(x, 3)) == (x, 3)
assert solve_linear(1/(1/x - 2)) == (0, 0)
assert solve_linear((x + 1)*exp(-x), symbols=[x]) == (x, -1)
assert solve_linear((x + 1)*exp(x), symbols=[x]) == ((x + 1)*exp(x), 1)
assert solve_linear(x*exp(-x**2), symbols=[x]) == (x, 0)
assert solve_linear(0**x - 1) == (0**x - 1, 1)
assert solve_linear(1 + 1/(x - 1)) == (x, 0)
eq = y*cos(x)**2 + y*sin(x)**2 - y # = y*(1 - 1) = 0
assert solve_linear(eq) == (0, 1)
eq = cos(x)**2 + sin(x)**2 # = 1
assert solve_linear(eq) == (0, 1)
raises(ValueError, lambda: solve_linear(Eq(x, 3), 3))
def test_solve_undetermined_coeffs():
assert solve_undetermined_coeffs(a*x**2 + b*x**2 + b*x + 2*c*x + c + 1, [a, b, c], x) == \
{a: -2, b: 2, c: -1}
# Test that rational functions work
assert solve_undetermined_coeffs(a/x + b/(x + 1) - (2*x + 1)/(x**2 + x), [a, b], x) == \
{a: 1, b: 1}
# Test cancellation in rational functions
assert solve_undetermined_coeffs(((c + 1)*a*x**2 + (c + 1)*b*x**2 +
(c + 1)*b*x + (c + 1)*2*c*x + (c + 1)**2)/(c + 1), [a, b, c], x) == \
{a: -2, b: 2, c: -1}
def test_solve_inequalities():
x = Symbol('x')
system = [Lt(x**2 - 2, 0), Gt(x**2 - 1, 0)]
assert solve(system) == \
And(Or(And(Lt(-sqrt(2), x), Lt(x, -1)),
And(Lt(1, x), Lt(x, sqrt(2)))), Eq(0, 0))
x = Symbol('x', real=True)
system = [Lt(x**2 - 2, 0), Gt(x**2 - 1, 0)]
assert solve(system) == \
Or(And(Lt(-sqrt(2), x), Lt(x, -1)), And(Lt(1, x), Lt(x, sqrt(2))))
# issue 6627, 3448
assert solve((x - 3)/(x - 2) < 0, x) == And(Lt(2, x), Lt(x, 3))
assert solve(x/(x + 1) > 1, x) == And(Lt(-oo, x), Lt(x, -1))
assert solve(sin(x) > S.Half) == And(pi/6 < x, x < 5*pi/6)
def test_issue_4793():
assert solve(1/x) == []
assert solve(x*(1 - 5/x)) == [5]
assert solve(x + sqrt(x) - 2) == [1]
assert solve(-(1 + x)/(2 + x)**2 + 1/(2 + x)) == []
assert solve(-x**2 - 2*x + (x + 1)**2 - 1) == []
assert solve((x/(x + 1) + 3)**(-2)) == []
assert solve(x/sqrt(x**2 + 1), x) == [0]
assert solve(exp(x) - y, x) == [log(y)]
assert solve(exp(x)) == []
assert solve(x**2 + x + sin(y)**2 + cos(y)**2 - 1, x) in [[0, -1], [-1, 0]]
eq = 4*3**(5*x + 2) - 7
ans = solve(eq, x)
assert len(ans) == 5 and all(eq.subs(x, a).n(chop=True) == 0 for a in ans)
assert solve(log(x**2) - y**2/exp(x), x, y, set=True) == \
([y], set([
(-sqrt(exp(x)*log(x**2)),),
(sqrt(exp(x)*log(x**2)),)]))
assert solve(x**2*z**2 - z**2*y**2) == [{x: -y}, {x: y}, {z: 0}]
assert solve((x - 1)/(1 + 1/(x - 1))) == []
assert solve(x**(y*z) - x, x) == [1]
raises(NotImplementedError, lambda: solve(log(x) - exp(x), x))
raises(NotImplementedError, lambda: solve(2**x - exp(x) - 3))
def test_PR1964():
# issue 5171
assert solve(sqrt(x)) == solve(sqrt(x**3)) == [0]
assert solve(sqrt(x - 1)) == [1]
# issue 4462
a = Symbol('a')
assert solve(-3*a/sqrt(x), x) == []
# issue 4486
assert solve(2*x/(x + 2) - 1, x) == [2]
# issue 4496
assert set(solve((x**2/(7 - x)).diff(x))) == set([S(0), S(14)])
# issue 4695
f = Function('f')
assert solve((3 - 5*x/f(x))*f(x), f(x)) == [5*x/3]
# issue 4497
assert solve(1/root(5 + x, 5) - 9, x) == [-295244/S(59049)]
assert solve(sqrt(x) + sqrt(sqrt(x)) - 4) == [(-S.Half + sqrt(17)/2)**4]
assert set(solve(Poly(sqrt(exp(x)) + sqrt(exp(-x)) - 4))) in \
[
set([log((-sqrt(3) + 2)**2), log((sqrt(3) + 2)**2)]),
set([2*log(-sqrt(3) + 2), 2*log(sqrt(3) + 2)]),
set([log(-4*sqrt(3) + 7), log(4*sqrt(3) + 7)]),
]
assert set(solve(Poly(exp(x) + exp(-x) - 4))) == \
set([log(-sqrt(3) + 2), log(sqrt(3) + 2)])
assert set(solve(x**y + x**(2*y) - 1, x)) == \
set([(-S.Half + sqrt(5)/2)**(1/y), (-S.Half - sqrt(5)/2)**(1/y)])
assert solve(exp(x/y)*exp(-z/y) - 2, y) == [(x - z)/log(2)]
assert solve(
x**z*y**z - 2, z) in [[log(2)/(log(x) + log(y))], [log(2)/(log(x*y))]]
# if you do inversion too soon then multiple roots (as for the following)
# will be missed, e.g. if exp(3*x) = exp(3) -> 3*x = 3
E = S.Exp1
assert solve(exp(3*x) - exp(3), x) in [
[1, log(E*(-S.Half - sqrt(3)*I/2)), log(E*(-S.Half + sqrt(3)*I/2))],
[1, log(-E/2 - sqrt(3)*E*I/2), log(-E/2 + sqrt(3)*E*I/2)],
]
# coverage test
p = Symbol('p', positive=True)
assert solve((1/p + 1)**(p + 1)) == []
def test_issue_5197():
x = Symbol('x', real=True)
assert solve(x**2 + 1, x) == []
n = Symbol('n', integer=True, positive=True)
assert solve((n - 1)*(n + 2)*(2*n - 1), n) == [1]
x = Symbol('x', positive=True)
y = Symbol('y')
assert solve([x + 5*y - 2, -3*x + 6*y - 15], x, y) == []
# not {x: -3, y: 1} b/c x is positive
# The solution following should not contain (-sqrt(2), sqrt(2))
assert solve((x + y)*n - y**2 + 2, x, y) == [(sqrt(2), -sqrt(2))]
y = Symbol('y', positive=True)
# The solution following should not contain {y: -x*exp(x/2)}
assert solve(x**2 - y**2/exp(x), y, x) == [{y: x*exp(x/2)}]
assert solve(x**2 - y**2/exp(x), x, y) == [{x: 2*LambertW(y/2)}]
x, y, z = symbols('x y z', positive=True)
assert solve(z**2*x**2 - z**2*y**2/exp(x), y, x, z) == [{y: x*exp(x/2)}]
def test_checking():
assert set(
solve(x*(x - y/x), x, check=False)) == set([sqrt(y), S(0), -sqrt(y)])
assert set(solve(x*(x - y/x), x, check=True)) == set([sqrt(y), -sqrt(y)])
# {x: 0, y: 4} sets denominator to 0 in the following so system should return None
assert solve((1/(1/x + 2), 1/(y - 3) - 1)) == []
# 0 sets denominator of 1/x to zero so None is returned
assert solve(1/(1/x + 2)) == []
def test_issue_4671_4463_4467():
assert solve((sqrt(x**2 - 1) - 2)) in ([sqrt(5), -sqrt(5)],
[-sqrt(5), sqrt(5)])
assert solve((2**exp(y**2/x) + 2)/(x**2 + 15), y) == [
-sqrt(x)*sqrt(-log(log(2)) + log(log(2) + I*pi)),
sqrt(x)*sqrt(-log(log(2)) + log(log(2) + I*pi))]
C1, C2 = symbols('C1 C2')
f = Function('f')
assert solve(C1 + C2/x**2 - exp(-f(x)), f(x)) == [log(x**2/(C1*x**2 + C2))]
a = Symbol('a')
E = S.Exp1
assert solve(1 - log(a + 4*x**2), x) in (
[-sqrt(-a + E)/2, sqrt(-a + E)/2],
[sqrt(-a + E)/2, -sqrt(-a + E)/2]
)
assert solve(log(a**(-3) - x**2)/a, x) in (
[-sqrt(-1 + a**(-3)), sqrt(-1 + a**(-3))],
[sqrt(-1 + a**(-3)), -sqrt(-1 + a**(-3))],)
assert solve(1 - log(a + 4*x**2), x) in (
[-sqrt(-a + E)/2, sqrt(-a + E)/2],
[sqrt(-a + E)/2, -sqrt(-a + E)/2],)
assert set(solve((
a**2 + 1) * (sin(a*x) + cos(a*x)), x)) == set([-pi/(4*a), 3*pi/(4*a)])
assert solve(3 - (sinh(a*x) + cosh(a*x)), x) == [log(3)/a]
assert set(solve(3 - (sinh(a*x) + cosh(a*x)**2), x)) == \
set([log(-2 + sqrt(5))/a, log(-sqrt(2) + 1)/a,
log(-sqrt(5) - 2)/a, log(1 + sqrt(2))/a])
assert solve(atan(x) - 1) == [tan(1)]
def test_issue_5132():
r, t = symbols('r,t')
assert set(solve([r - x**2 - y**2, tan(t) - y/x], [x, y])) == \
set([(
-sqrt(r*cos(t)**2), -1*sqrt(r*cos(t)**2)*tan(t)),
(sqrt(r*cos(t)**2), sqrt(r*cos(t)**2)*tan(t))])
assert solve([exp(x) - sin(y), 1/y - 3], [x, y]) == \
[(log(sin(S(1)/3)), S(1)/3)]
assert solve([exp(x) - sin(y), 1/exp(y) - 3], [x, y]) == \
[(log(-sin(log(3))), -log(3))]
assert set(solve([exp(x) - sin(y), y**2 - 4], [x, y])) == \
set([(log(-sin(2)), -S(2)), (log(sin(2)), S(2))])
eqs = [exp(x)**2 - sin(y) + z**2, 1/exp(y) - 3]
assert solve(eqs, set=True) == \
([x, y], set([
(log(-sqrt(-z**2 - sin(log(3)))), -log(3)),
(log(sqrt(-z**2 - sin(log(3)))), -log(3))]))
assert solve(eqs, x, z, set=True) == \
([x], set([
(log(-sqrt(-z**2 + sin(y))),),
(log(sqrt(-z**2 + sin(y))),)]))
assert set(solve(eqs, x, y)) == \
set([
(log(-sqrt(-z**2 - sin(log(3)))), -log(3)),
(log(sqrt(-z**2 - sin(log(3)))), -log(3))])
assert set(solve(eqs, y, z)) == \
set([
(-log(3), -sqrt(-exp(2*x) - sin(log(3)))),
(-log(3), sqrt(-exp(2*x) - sin(log(3))))])
eqs = [exp(x)**2 - sin(y) + z, 1/exp(y) - 3]
assert solve(eqs, set=True) == ([x, y], set(
[
(log(-sqrt(-z - sin(log(3)))), -log(3)),
(log(sqrt(-z - sin(log(3)))), -log(3))]))
assert solve(eqs, x, z, set=True) == ([x], set(
[
(log(-sqrt(-z + sin(y))),),
(log(sqrt(-z + sin(y))),)]))
assert set(solve(eqs, x, y)) == set(
[
(log(-sqrt(-z - sin(log(3)))), -log(3)),
(log(sqrt(-z - sin(log(3)))), -log(3))])
assert solve(eqs, z, y) == \
[(-exp(2*x) - sin(log(3)), -log(3))]
assert solve((sqrt(x**2 + y**2) - sqrt(10), x + y - 4), set=True) == (
[x, y], set([(S(1), S(3)), (S(3), S(1))]))
assert set(solve((sqrt(x**2 + y**2) - sqrt(10), x + y - 4), x, y)) == \
set([(S(1), S(3)), (S(3), S(1))])
def test_issue_5335():
lam, a0, conc = symbols('lam a0 conc')
eqs = [lam + 2*y - a0*(1 - x/2)*x - 0.005*x/2*x,
a0*(1 - x/2)*x - 1*y - 0.743436700916726*y,
x + y - conc]
sym = [x, y, a0]
# there are 4 solutions but only two are valid
assert len(solve(eqs, sym, manual=True, minimal=True, simplify=False)) == 2
@SKIP("Hangs")
def _test_issue_5335_float():
# gives ZeroDivisionError: polynomial division
lam, a0, conc = symbols('lam a0 conc')
eqs = [lam + 2*y - a0*(1 - x/2)*x - 0.005*x/2*x,
a0*(1 - x/2)*x - 1*y - 0.743436700916726*y,
x + y - conc]
sym = [x, y, a0]
assert len(
solve(eqs, sym, rational=False, check=False, simplify=False)) == 2
def test_issue_5767():
assert set(solve([x**2 + y + 4], [x])) == \
set([(-sqrt(-y - 4),), (sqrt(-y - 4),)])
def test_polysys():
assert set(solve([x**2 + 2/y - 2, x + y - 3], [x, y])) == \
set([(S(1), S(2)), (1 + sqrt(5), 2 - sqrt(5)),
(1 - sqrt(5), 2 + sqrt(5))])
assert solve([x**2 + y - 2, x**2 + y]) == []
# the ordering should be whatever the user requested
assert solve([x**2 + y - 3, x - y - 4], (x, y)) != solve([x**2 +
y - 3, x - y - 4], (y, x))
@slow
def test_unrad1():
raises(NotImplementedError, lambda:
unrad(sqrt(x) + sqrt(x + 1) + sqrt(1 - sqrt(x)) + 3))
raises(NotImplementedError, lambda:
unrad(sqrt(x) + (x + 1)**Rational(1, 3) + 2*sqrt(y)))
s = symbols('s', cls=Dummy)
# checkers to deal with possibility of answer coming
# back with a sign change (cf issue 5203)
def check(rv, ans):
assert bool(rv[1]) == bool(ans[1])
if ans[1]:
return s_check(rv, ans)
e = rv[0].expand()
a = ans[0].expand()
return e in [a, -a] and rv[1] == ans[1]
def s_check(rv, ans):
# get the dummy
rv = list(rv)
d = rv[0].atoms(Dummy)
reps = list(zip(d, [s]*len(d)))
# replace s with this dummy
rv = (rv[0].subs(reps).expand(), [rv[1][0].subs(reps), rv[1][1].subs(reps)])
ans = (ans[0].subs(reps).expand(), [ans[1][0].subs(reps), ans[1][1].subs(reps)])
return str(rv[0]) in [str(ans[0]), str(-ans[0])] and \
str(rv[1]) == str(ans[1])
assert check(unrad(sqrt(x)),
(x, []))
assert check(unrad(sqrt(x) + 1),
(x - 1, []))
assert check(unrad(sqrt(x) + root(x, 3) + 2),
(s**3 + s**2 + 2, [s, s**6 - x]))
assert check(unrad(sqrt(x)*root(x, 3) + 2),
(x**5 - 64, []))
assert check(unrad(sqrt(x) + (x + 1)**Rational(1, 3)),
(x**3 - (x + 1)**2, []))
assert check(unrad(sqrt(x) + sqrt(x + 1) + sqrt(2*x)),
(-2*sqrt(2)*x - 2*x + 1, []))
assert check(unrad(sqrt(x) + sqrt(x + 1) + 2),
(16*x - 9, []))
assert check(unrad(sqrt(x) + sqrt(x + 1) + sqrt(1 - x)),
(5*x**2 - 4*x, []))
assert check(unrad(a*sqrt(x) + b*sqrt(x) + c*sqrt(y) + d*sqrt(y)),
((a*sqrt(x) + b*sqrt(x))**2 - (c*sqrt(y) + d*sqrt(y))**2, []))
assert check(unrad(sqrt(x) + sqrt(1 - x)),
(2*x - 1, []))
assert check(unrad(sqrt(x) + sqrt(1 - x) - 3),
(x**2 - x + 16, []))
assert check(unrad(sqrt(x) + sqrt(1 - x) + sqrt(2 + x)),
(5*x**2 - 2*x + 1, []))
assert unrad(sqrt(x) + sqrt(1 - x) + sqrt(2 + x) - 3) in [
(25*x**4 + 376*x**3 + 1256*x**2 - 2272*x + 784, []),
(25*x**8 - 476*x**6 + 2534*x**4 - 1468*x**2 + 169, [])]
assert unrad(sqrt(x) + sqrt(1 - x) + sqrt(2 + x) - sqrt(1 - 2*x)) == \
(41*x**4 + 40*x**3 + 232*x**2 - 160*x + 16, []) # orig root at 0.487
assert check(unrad(sqrt(x) + sqrt(x + 1)), (S(1), []))
eq = sqrt(x) + sqrt(x + 1) + sqrt(1 - sqrt(x))
assert check(unrad(eq),
(16*x**2 - 9*x, []))
assert set(solve(eq, check=False)) == set([S(0), S(9)/16])
assert solve(eq) == []
# but this one really does have those solutions
assert set(solve(sqrt(x) - sqrt(x + 1) + sqrt(1 - sqrt(x)))) == \
set([S.Zero, S(9)/16])
assert check(unrad(sqrt(x) + root(x + 1, 3) + 2*sqrt(y), y),
(S('2*sqrt(x)*(x + 1)**(1/3) + x - 4*y + (x + 1)**(2/3)'), []))
assert check(unrad(sqrt(x/(1 - x)) + (x + 1)**Rational(1, 3)),
(x**5 - x**4 - x**3 + 2*x**2 + x - 1, []))
assert check(unrad(sqrt(x/(1 - x)) + 2*sqrt(y), y),
(4*x*y + x - 4*y, []))
assert check(unrad(sqrt(x)*sqrt(1 - x) + 2, x),
(x**2 - x + 4, []))
# http://tutorial.math.lamar.edu/
# Classes/Alg/SolveRadicalEqns.aspx#Solve_Rad_Ex2_a
assert solve(Eq(x, sqrt(x + 6))) == [3]
assert solve(Eq(x + sqrt(x - 4), 4)) == [4]
assert solve(Eq(1, x + sqrt(2*x - 3))) == []
assert set(solve(Eq(sqrt(5*x + 6) - 2, x))) == set([-S(1), S(2)])
assert set(solve(Eq(sqrt(2*x - 1) - sqrt(x - 4), 2))) == set([S(5), S(13)])
assert solve(Eq(sqrt(x + 7) + 2, sqrt(3 - x))) == [-6]
# http://www.purplemath.com/modules/solverad.htm
assert solve((2*x - 5)**Rational(1, 3) - 3) == [16]
assert set(solve(x + 1 - root(x**4 + 4*x**3 - x, 4))) == \
set([-S(1)/2, -S(1)/3])
assert set(solve(sqrt(2*x**2 - 7) - (3 - x))) == set([-S(8), S(2)])
assert solve(sqrt(2*x + 9) - sqrt(x + 1) - sqrt(x + 4)) == [0]
assert solve(sqrt(x + 4) + sqrt(2*x - 1) - 3*sqrt(x - 1)) == [5]
assert solve(sqrt(x)*sqrt(x - 7) - 12) == [16]
assert solve(sqrt(x - 3) + sqrt(x) - 3) == [4]
assert solve(sqrt(9*x**2 + 4) - (3*x + 2)) == [0]
assert solve(sqrt(x) - 2 - 5) == [49]
assert solve(sqrt(x - 3) - sqrt(x) - 3) == []
assert solve(sqrt(x - 1) - x + 7) == [10]
assert solve(sqrt(x - 2) - 5) == [27]
assert solve(sqrt(17*x - sqrt(x**2 - 5)) - 7) == [3]
assert solve(sqrt(x) - sqrt(x - 1) + sqrt(sqrt(x))) == []
# don't posify the expression in unrad and do use _mexpand
z = sqrt(2*x + 1)/sqrt(x) - sqrt(2 + 1/x)
p = posify(z)[0]
assert solve(p) == []
assert solve(z) == []
assert solve(z + 6*I) == [-S(1)/11]
assert solve(p + 6*I) == []
# issue 8622
assert unrad((root(x + 1, 5) - root(x, 3))) == (
x**5 - x**3 - 3*x**2 - 3*x - 1, [])
# issue #8679
assert check(unrad(x + root(x, 3) + root(x, 3)**2 + sqrt(y), x),
(s**3 + s**2 + s + sqrt(y), [s, s**3 - x]))
# for coverage
assert check(unrad(sqrt(x) + root(x, 3) + y),
(s**3 + s**2 + y, [s, s**6 - x]))
assert solve(sqrt(x) + root(x, 3) - 2) == [1]
raises(NotImplementedError, lambda:
solve(sqrt(x) + root(x, 3) + root(x + 1, 5) - 2))
# fails through a different code path
raises(NotImplementedError, lambda: solve(-sqrt(2) + cosh(x)/x))
# unrad some
assert solve(sqrt(x + root(x, 3))+root(x - y, 5), y) == [
x + (x**(S(1)/3) + x)**(S(5)/2)]
assert check(unrad(sqrt(x) - root(x + 1, 3)*sqrt(x + 2) + 2),
(s**10 + 8*s**8 + 24*s**6 - 12*s**5 - 22*s**4 - 160*s**3 - 212*s**2 -
192*s - 56, [s, s**2 - x]))
e = root(x + 1, 3) + root(x, 3)
assert unrad(e) == (2*x + 1, [])
eq = (sqrt(x) + sqrt(x + 1) + sqrt(1 - x) - 6*sqrt(5)/5)
assert check(unrad(eq),
(15625*x**4 + 173000*x**3 + 355600*x**2 - 817920*x + 331776, []))
assert check(unrad(root(x, 4) + root(x, 4)**3 - 1),
(s**3 + s - 1, [s, s**4 - x]))
assert check(unrad(root(x, 2) + root(x, 2)**3 - 1),
(x**3 + 2*x**2 + x - 1, []))
assert unrad(x**0.5) is None
assert check(unrad(t + root(x + y, 5) + root(x + y, 5)**3),
(s**3 + s + t, [s, s**5 - x - y]))
assert check(unrad(x + root(x + y, 5) + root(x + y, 5)**3, y),
(s**3 + s + x, [s, s**5 - x - y]))
assert check(unrad(x + root(x + y, 5) + root(x + y, 5)**3, x),
(s**5 + s**3 + s - y, [s, s**5 - x - y]))
assert check(unrad(root(x - 1, 3) + root(x + 1, 5) + root(2, 5)),
(s**5 + 5*2**(S(1)/5)*s**4 + s**3 + 10*2**(S(2)/5)*s**3 +
10*2**(S(3)/5)*s**2 + 5*2**(S(4)/5)*s + 4, [s, s**3 - x + 1]))
raises(NotImplementedError, lambda:
unrad((root(x, 2) + root(x, 3) + root(x, 4)).subs(x, x**5 - x + 1)))
# the simplify flag should be reset to False for unrad results;
# if it's not then this next test will take a long time
assert solve(root(x, 3) + root(x, 5) - 2) == [1]
eq = (sqrt(x) + sqrt(x + 1) + sqrt(1 - x) - 6*sqrt(5)/5)
assert check(unrad(eq),
((5*x - 4)*(3125*x**3 + 37100*x**2 + 100800*x - 82944), []))
ans = S('''
[4/5, -1484/375 + 172564/(140625*(114*sqrt(12657)/78125 +
12459439/52734375)**(1/3)) +
4*(114*sqrt(12657)/78125 + 12459439/52734375)**(1/3)]''')
assert solve(eq) == ans
# duplicate radical handling
assert check(unrad(sqrt(x + root(x + 1, 3)) - root(x + 1, 3) - 2),
(s**3 - s**2 - 3*s - 5, [s, s**3 - x - 1]))
# cov post-processing
e = root(x**2 + 1, 3) - root(x**2 - 1, 5) - 2
assert check(unrad(e),
(s**5 - 10*s**4 + 39*s**3 - 80*s**2 + 80*s - 30,
[s, s**3 - x**2 - 1]))
e = sqrt(x + root(x + 1, 2)) - root(x + 1, 3) - 2
assert check(unrad(e),
(s**6 - 2*s**5 - 7*s**4 - 3*s**3 + 26*s**2 + 40*s + 25,
[s, s**3 - x - 1]))
assert check(unrad(e, _reverse=True),
(s**6 - 14*s**5 + 73*s**4 - 187*s**3 + 276*s**2 - 228*s + 89,
[s, s**2 - x - sqrt(x + 1)]))
# this one needs r0, r1 reversal to work
assert check(unrad(sqrt(x + sqrt(root(x, 3) - 1)) - root(x, 6) - 2),
(s**12 - 2*s**8 - 8*s**7 - 8*s**6 + s**4 + 8*s**3 + 23*s**2 +
32*s + 17, [s, s**6 - x]))
# is this needed?
#assert unrad(root(cosh(x), 3)/x*root(x + 1, 5) - 1) == (
# x**15 - x**3*cosh(x)**5 - 3*x**2*cosh(x)**5 - 3*x*cosh(x)**5 - cosh(x)**5, [])
raises(NotImplementedError, lambda:
unrad(sqrt(cosh(x)/x) + root(x + 1,3)*sqrt(x) - 1))
assert unrad(S('(x+y)**(2*y/3) + (x+y)**(1/3) + 1')) is None
assert check(unrad(S('(x+y)**(2*y/3) + (x+y)**(1/3) + 1'), x),
(s**(2*y) + s + 1, [s, s**3 - x - y]))
# This tests two things: that if full unrad is attempted and fails
# the solution should still be found; also it tests that the use of
# composite
assert len(solve(sqrt(y)*x + x**3 - 1, x)) == 3
assert len(solve(-512*y**3 + 1344*(x + 2)**(S(1)/3)*y**2 -
1176*(x + 2)**(S(2)/3)*y - 169*x + 686, y, _unrad=False)) == 3
# watch out for when the cov doesn't involve the symbol of interest
eq = S('-x + (7*y/8 - (27*x/2 + 27*sqrt(x**2)/2)**(1/3)/3)**3 - 1')
assert solve(eq, y) == [
4*2**(S(2)/3)*(27*x + 27*sqrt(x**2))**(S(1)/3)/21 - (-S(1)/2 -
sqrt(3)*I/2)*(-6912*x/343 + sqrt((-13824*x/343 - S(13824)/343)**2)/2 -
S(6912)/343)**(S(1)/3)/3, 4*2**(S(2)/3)*(27*x + 27*sqrt(x**2))**(S(1)/3)/21 -
(-S(1)/2 + sqrt(3)*I/2)*(-6912*x/343 + sqrt((-13824*x/343 -
S(13824)/343)**2)/2 - S(6912)/343)**(S(1)/3)/3, 4*2**(S(2)/3)*(27*x +
27*sqrt(x**2))**(S(1)/3)/21 - (-6912*x/343 + sqrt((-13824*x/343 -
S(13824)/343)**2)/2 - S(6912)/343)**(S(1)/3)/3]
eq = root(x + 1, 3) - (root(x, 3) + root(x, 5))
assert check(unrad(eq),
(3*s**13 + 3*s**11 + s**9 - 1, [s, s**15 - x]))
assert check(unrad(eq - 2),
(3*s**13 + 3*s**11 + 6*s**10 + s**9 + 12*s**8 + 6*s**6 + 12*s**5 +
12*s**3 + 7, [s, s**15 - x]))
assert check(unrad(root(x, 3) - root(x + 1, 4)/2 + root(x + 2, 3)),
(4096*s**13 + 960*s**12 + 48*s**11 - s**10 - 1728*s**4,
[s, s**4 - x - 1])) # orig expr has two real roots: -1, -.389
assert check(unrad(root(x, 3) + root(x + 1, 4) - root(x + 2, 3)/2),
(343*s**13 + 2904*s**12 + 1344*s**11 + 512*s**10 - 1323*s**9 -
3024*s**8 - 1728*s**7 + 1701*s**5 + 216*s**4 - 729*s, [s, s**4 - x -
1])) # orig expr has one real root: -0.048
assert check(unrad(root(x, 3)/2 - root(x + 1, 4) + root(x + 2, 3)),
(729*s**13 - 216*s**12 + 1728*s**11 - 512*s**10 + 1701*s**9 -
3024*s**8 + 1344*s**7 + 1323*s**5 - 2904*s**4 + 343*s, [s, s**4 - x -
1])) # orig expr has 2 real roots: -0.91, -0.15
assert check(unrad(root(x, 3)/2 - root(x + 1, 4) + root(x + 2, 3) - 2),
(729*s**13 + 1242*s**12 + 18496*s**10 + 129701*s**9 + 388602*s**8 +
453312*s**7 - 612864*s**6 - 3337173*s**5 - 6332418*s**4 - 7134912*s**3
- 5064768*s**2 - 2111913*s - 398034, [s, s**4 - x - 1]))
# orig expr has 1 real root: 19.53
ans = solve(sqrt(x) + sqrt(x + 1) -
sqrt(1 - x) - sqrt(2 + x))
assert len(ans) == 1 and NS(ans[0])[:4] == '0.73'
# the fence optimization problem
# https://github.com/sympy/sympy/issues/4793#issuecomment-36994519
F = Symbol('F')
eq = F - (2*x + 2*y + sqrt(x**2 + y**2))
ans = 2*F/7 - sqrt(2)*F/14
X = solve(eq, x, check=False)
for xi in reversed(X): # reverse since currently, ans is the 2nd one
Y = solve((x*y).subs(x, xi).diff(y), y, simplify=False, check=False)
if any((a - ans).expand().is_zero for a in Y):
break
else:
assert None # no answer was found
assert solve(sqrt(x + 1) + root(x, 3) - 2) == S('''
[(-11/(9*(47/54 + sqrt(93)/6)**(1/3)) + 1/3 + (47/54 +
sqrt(93)/6)**(1/3))**3]''')
assert solve(sqrt(sqrt(x + 1)) + x**Rational(1, 3) - 2) == S('''
[(-sqrt(-2*(-1/16 + sqrt(6913)/16)**(1/3) + 6/(-1/16 +
sqrt(6913)/16)**(1/3) + 17/2 + 121/(4*sqrt(-6/(-1/16 +
sqrt(6913)/16)**(1/3) + 2*(-1/16 + sqrt(6913)/16)**(1/3) + 17/4)))/2 +
sqrt(-6/(-1/16 + sqrt(6913)/16)**(1/3) + 2*(-1/16 +
sqrt(6913)/16)**(1/3) + 17/4)/2 + 9/4)**3]''')
assert solve(sqrt(x) + root(sqrt(x) + 1, 3) - 2) == S('''
[(-(81/2 + 3*sqrt(741)/2)**(1/3)/3 + (81/2 + 3*sqrt(741)/2)**(-1/3) +
2)**2]''')
eq = S('''
-x + (1/2 - sqrt(3)*I/2)*(3*x**3/2 - x*(3*x**2 - 34)/2 + sqrt((-3*x**3
+ x*(3*x**2 - 34) + 90)**2/4 - 39304/27) - 45)**(1/3) + 34/(3*(1/2 -
sqrt(3)*I/2)*(3*x**3/2 - x*(3*x**2 - 34)/2 + sqrt((-3*x**3 + x*(3*x**2
- 34) + 90)**2/4 - 39304/27) - 45)**(1/3))''')
assert check(unrad(eq),
(-s*(-s**6 + sqrt(3)*s**6*I - 153*2**(S(2)/3)*3**(S(1)/3)*s**4 +
51*12**(S(1)/3)*s**4 - 102*2**(S(2)/3)*3**(S(5)/6)*s**4*I - 1620*s**3 +
1620*sqrt(3)*s**3*I + 13872*18**(S(1)/3)*s**2 - 471648 +
471648*sqrt(3)*I), [s, s**3 - 306*x - sqrt(3)*sqrt(31212*x**2 -
165240*x + 61484) + 810]))
assert solve(eq) == [] # not other code errors
@slow
def test_unrad_slow():
# this has roots with multiplicity > 1; there should be no
# repeats in roots obtained, however
eq = (sqrt(1 + sqrt(1 - 4*x**2)) - x*((1 + sqrt(1 + 2*sqrt(1 - 4*x**2)))))
assert solve(eq) == [S.Half]
@XFAIL
def test_unrad_fail():
# this only works if we check real_root(eq.subs(x, S(1)/3))
# but checksol doesn't work like that
assert solve(root(x**3 - 3*x**2, 3) + 1 - x) == [S(1)/3]
assert solve(root(x + 1, 3) + root(x**2 - 2, 5) + 1) == [
-1, -1 + CRootOf(x**5 + x**4 + 5*x**3 + 8*x**2 + 10*x + 5, 0)**3]
def test_checksol():
x, y, r, t = symbols('x, y, r, t')
eq = r - x**2 - y**2
dict_var_soln = {y: - sqrt(r) / sqrt(tan(t)**2 + 1),
x: -sqrt(r)*tan(t)/sqrt(tan(t)**2 + 1)}
assert checksol(eq, dict_var_soln) == True
def test__invert():
assert _invert(x - 2) == (2, x)
assert _invert(2) == (2, 0)
assert _invert(exp(1/x) - 3, x) == (1/log(3), x)
assert _invert(exp(1/x + a/x) - 3, x) == ((a + 1)/log(3), x)
assert _invert(a, x) == (a, 0)
def test_issue_4463():
assert solve(-a*x + 2*x*log(x), x) == [exp(a/2)]
assert solve(a/x + exp(x/2), x) == [2*LambertW(-a/2)]
assert solve(x**x) == []
assert solve(x**x - 2) == [exp(LambertW(log(2)))]
assert solve(((x - 3)*(x - 2))**((x - 3)*(x - 4))) == [2]
assert solve(
(a/x + exp(x/2)).diff(x), x) == [4*LambertW(sqrt(2)*sqrt(a)/4)]
def test_issue_5114():
a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r = symbols('a:r')
# there is no 'a' in the equation set but this is how the
# problem was originally posed
syms = a, b, c, f, h, k, n
eqs = [b + r/d - c/d,
c*(1/d + 1/e + 1/g) - f/g - r/d,
f*(1/g + 1/i + 1/j) - c/g - h/i,
h*(1/i + 1/l + 1/m) - f/i - k/m,
k*(1/m + 1/o + 1/p) - h/m - n/p,
n*(1/p + 1/q) - k/p]
assert len(solve(eqs, syms, manual=True, check=False, simplify=False)) == 1
def test_issue_5849():
I1, I2, I3, I4, I5, I6 = symbols('I1:7')
dI1, dI4, dQ2, dQ4, Q2, Q4 = symbols('dI1,dI4,dQ2,dQ4,Q2,Q4')
e = (
I1 - I2 - I3,
I3 - I4 - I5,
I4 + I5 - I6,
-I1 + I2 + I6,
-2*I1 - 2*I3 - 2*I5 - 3*I6 - dI1/2 + 12,
-I4 + dQ4,
-I2 + dQ2,
2*I3 + 2*I5 + 3*I6 - Q2,
I4 - 2*I5 + 2*Q4 + dI4
)
ans = [{
dQ4: I3 - I5,
dI1: -4*I2 - 8*I3 - 4*I5 - 6*I6 + 24,
I4: I3 - I5,
dQ2: I2,
Q2: 2*I3 + 2*I5 + 3*I6,
I1: I2 + I3,
Q4: -I3/2 + 3*I5/2 - dI4/2}]
v = I1, I4, Q2, Q4, dI1, dI4, dQ2, dQ4
assert solve(e, *v, **dict(manual=True, check=False)) == ans
assert solve(e, *v, **dict(manual=True)) == []
# the matrix solver (tested below) doesn't like this because it produces
# a zero row in the matrix. Is this related to issue 4551?
assert [ei.subs(
ans[0]) for ei in e] == [0, 0, I3 - I6, -I3 + I6, 0, 0, 0, 0, 0]
def test_issue_5849_matrix():
'''Same as test_2750 but solved with the matrix solver.'''
I1, I2, I3, I4, I5, I6 = symbols('I1:7')
dI1, dI4, dQ2, dQ4, Q2, Q4 = symbols('dI1,dI4,dQ2,dQ4,Q2,Q4')
e = (
I1 - I2 - I3,
I3 - I4 - I5,
I4 + I5 - I6,
-I1 + I2 + I6,
-2*I1 - 2*I3 - 2*I5 - 3*I6 - dI1/2 + 12,
-I4 + dQ4,
-I2 + dQ2,
2*I3 + 2*I5 + 3*I6 - Q2,
I4 - 2*I5 + 2*Q4 + dI4
)
assert solve(e, I1, I4, Q2, Q4, dI1, dI4, dQ2, dQ4) == {
dI4: -I3 + 3*I5 - 2*Q4,
dI1: -4*I2 - 8*I3 - 4*I5 - 6*I6 + 24,
dQ2: I2,
I1: I2 + I3,
Q2: 2*I3 + 2*I5 + 3*I6,
dQ4: I3 - I5,
I4: I3 - I5}
def test_issue_5901():
f, g, h = map(Function, 'fgh')
a = Symbol('a')
D = Derivative(f(x), x)
G = Derivative(g(a), a)
assert solve(f(x) + f(x).diff(x), f(x)) == \
[-D]
assert solve(f(x) - 3, f(x)) == \
[3]
assert solve(f(x) - 3*f(x).diff(x), f(x)) == \
[3*D]
assert solve([f(x) - 3*f(x).diff(x)], f(x)) == \
{f(x): 3*D}
assert solve([f(x) - 3*f(x).diff(x), f(x)**2 - y + 4], f(x), y) == \
[{f(x): 3*D, y: 9*D**2 + 4}]
assert solve(-f(a)**2*g(a)**2 + f(a)**2*h(a)**2 + g(a).diff(a),
h(a), g(a), set=True) == \
([g(a)], set([
(-sqrt(h(a)**2*f(a)**2 + G)/f(a),),
(sqrt(h(a)**2*f(a)**2+ G)/f(a),)]))
args = [f(x).diff(x, 2)*(f(x) + g(x)) - g(x)**2 + 2, f(x), g(x)]
assert set(solve(*args)) == \
set([(-sqrt(2), sqrt(2)), (sqrt(2), -sqrt(2))])
eqs = [f(x)**2 + g(x) - 2*f(x).diff(x), g(x)**2 - 4]
assert solve(eqs, f(x), g(x), set=True) == \
([f(x), g(x)], set([
(-sqrt(2*D - 2), S(2)),
(sqrt(2*D - 2), S(2)),
(-sqrt(2*D + 2), -S(2)),
(sqrt(2*D + 2), -S(2))]))
# the underlying problem was in solve_linear that was not masking off
# anything but a Mul or Add; it now raises an error if it gets anything
# but a symbol and solve handles the substitutions necessary so solve_linear
# won't make this error
raises(
ValueError, lambda: solve_linear(f(x) + f(x).diff(x), symbols=[f(x)]))
assert solve_linear(f(x) + f(x).diff(x), symbols=[x]) == \
(f(x) + Derivative(f(x), x), 1)
assert solve_linear(f(x) + Integral(x, (x, y)), symbols=[x]) == \
(f(x) + Integral(x, (x, y)), 1)
assert solve_linear(f(x) + Integral(x, (x, y)) + x, symbols=[x]) == \
(x + f(x) + Integral(x, (x, y)), 1)
assert solve_linear(f(y) + Integral(x, (x, y)) + x, symbols=[x]) == \
(x, -f(y) - Integral(x, (x, y)))
assert solve_linear(x - f(x)/a + (f(x) - 1)/a, symbols=[x]) == \
(x, 1/a)
assert solve_linear(x + Derivative(2*x, x)) == \
(x, -2)
assert solve_linear(x + Integral(x, y), symbols=[x]) == \
(x, 0)
assert solve_linear(x + Integral(x, y) - 2, symbols=[x]) == \
(x, 2/(y + 1))
assert set(solve(x + exp(x)**2, exp(x))) == \
set([-sqrt(-x), sqrt(-x)])
assert solve(x + exp(x), x, implicit=True) == \
[-exp(x)]
assert solve(cos(x) - sin(x), x, implicit=True) == []
assert solve(x - sin(x), x, implicit=True) == \
[sin(x)]
assert solve(x**2 + x - 3, x, implicit=True) == \
[-x**2 + 3]
assert solve(x**2 + x - 3, x**2, implicit=True) == \
[-x + 3]
def test_issue_5912():
assert set(solve(x**2 - x - 0.1, rational=True)) == \
set([S(1)/2 + sqrt(35)/10, -sqrt(35)/10 + S(1)/2])
ans = solve(x**2 - x - 0.1, rational=False)
assert len(ans) == 2 and all(a.is_Number for a in ans)
ans = solve(x**2 - x - 0.1)
assert len(ans) == 2 and all(a.is_Number for a in ans)
def test_float_handling():
def test(e1, e2):
return len(e1.atoms(Float)) == len(e2.atoms(Float))
assert solve(x - 0.5, rational=True)[0].is_Rational
assert solve(x - 0.5, rational=False)[0].is_Float
assert solve(x - S.Half, rational=False)[0].is_Rational
assert solve(x - 0.5, rational=None)[0].is_Float
assert solve(x - S.Half, rational=None)[0].is_Rational
assert test(nfloat(1 + 2*x), 1.0 + 2.0*x)
for contain in [list, tuple, set]:
ans = nfloat(contain([1 + 2*x]))
assert type(ans) is contain and test(list(ans)[0], 1.0 + 2.0*x)
k, v = list(nfloat({2*x: [1 + 2*x]}).items())[0]
assert test(k, 2*x) and test(v[0], 1.0 + 2.0*x)
assert test(nfloat(cos(2*x)), cos(2.0*x))
assert test(nfloat(3*x**2), 3.0*x**2)
assert test(nfloat(3*x**2, exponent=True), 3.0*x**2.0)
assert test(nfloat(exp(2*x)), exp(2.0*x))
assert test(nfloat(x/3), x/3.0)
assert test(nfloat(x**4 + 2*x + cos(S(1)/3) + 1),
x**4 + 2.0*x + 1.94495694631474)
# don't call nfloat if there is no solution
tot = 100 + c + z + t
assert solve(((.7 + c)/tot - .6, (.2 + z)/tot - .3, t/tot - .1)) == []
def test_check_assumptions():
x = symbols('x', positive=True)
assert solve(x**2 - 1) == [1]
assert check_assumptions(1, x) == True
def test_issue_6056():
assert solve(tanh(x + 3)*tanh(x - 3) - 1) == []
assert set([simplify(w) for w in solve(tanh(x - 1)*tanh(x + 1) + 1)]) == set([
-log(2)/2 + log(1 - I),
-log(2)/2 + log(-1 - I),
-log(2)/2 + log(1 + I),
-log(2)/2 + log(-1 + I),])
assert set([simplify(w) for w in solve((tanh(x + 3)*tanh(x - 3) + 1)**2)]) == set([
-log(2)/2 + log(1 - I),
-log(2)/2 + log(-1 - I),
-log(2)/2 + log(1 + I),
-log(2)/2 + log(-1 + I),])
def test_issue_6060():
x = Symbol('x')
absxm3 = Piecewise(
(x - 3, S(0) <= x - 3),
(3 - x, S(0) > x - 3)
)
y = Symbol('y')
assert solve(absxm3 - y, x) == [
Piecewise((-y + 3, y > 0), (S.NaN, True)),
Piecewise((y + 3, 0 <= y), (S.NaN, True))
]
y = Symbol('y', positive=True)
assert solve(absxm3 - y, x) == [-y + 3, y + 3]
def test_issue_5673():
eq = -x + exp(exp(LambertW(log(x)))*LambertW(log(x)))
assert checksol(eq, x, 2) is True
assert checksol(eq, x, 2, numerical=False) is None
def test_exclude():
R, C, Ri, Vout, V1, Vminus, Vplus, s = \
symbols('R, C, Ri, Vout, V1, Vminus, Vplus, s')
Rf = symbols('Rf', positive=True) # to eliminate Rf = 0 soln
eqs = [C*V1*s + Vplus*(-2*C*s - 1/R),
Vminus*(-1/Ri - 1/Rf) + Vout/Rf,
C*Vplus*s + V1*(-C*s - 1/R) + Vout/R,
-Vminus + Vplus]
assert solve(eqs, exclude=s*C*R) == [
{
Rf: Ri*(C*R*s + 1)**2/(C*R*s),
Vminus: Vplus,
V1: 2*Vplus + Vplus/(C*R*s),
Vout: C*R*Vplus*s + 3*Vplus + Vplus/(C*R*s)},
{
Vplus: 0,
Vminus: 0,
V1: 0,
Vout: 0},
]
# TODO: Investingate why currently solution [0] is preferred over [1].
assert solve(eqs, exclude=[Vplus, s, C]) in [[{
Vminus: Vplus,
V1: Vout/2 + Vplus/2 + sqrt((Vout - 5*Vplus)*(Vout - Vplus))/2,
R: (Vout - 3*Vplus - sqrt(Vout**2 - 6*Vout*Vplus + 5*Vplus**2))/(2*C*Vplus*s),
Rf: Ri*(Vout - Vplus)/Vplus,
}, {
Vminus: Vplus,
V1: Vout/2 + Vplus/2 - sqrt((Vout - 5*Vplus)*(Vout - Vplus))/2,
R: (Vout - 3*Vplus + sqrt(Vout**2 - 6*Vout*Vplus + 5*Vplus**2))/(2*C*Vplus*s),
Rf: Ri*(Vout - Vplus)/Vplus,
}], [{
Vminus: Vplus,
Vout: (V1**2 - V1*Vplus - Vplus**2)/(V1 - 2*Vplus),
Rf: Ri*(V1 - Vplus)**2/(Vplus*(V1 - 2*Vplus)),
R: Vplus/(C*s*(V1 - 2*Vplus)),
}]]
def test_high_order_roots():
s = x**5 + 4*x**3 + 3*x**2 + S(7)/4
assert set(solve(s)) == set(Poly(s*4, domain='ZZ').all_roots())
def test_minsolve_linear_system():
def count(dic):
return len([x for x in dic.values() if x == 0])
assert count(solve([x + y + z, y + z + a + t], particular=True, quick=True)) \
== 3
assert count(solve([x + y + z, y + z + a + t], particular=True, quick=False)) \
== 3
assert count(solve([x + y + z, y + z + a], particular=True, quick=True)) == 1
assert count(solve([x + y + z, y + z + a], particular=True, quick=False)) == 2
def test_real_roots():
# cf. issue 6650
x = Symbol('x', real=True)
assert len(solve(x**5 + x**3 + 1)) == 1
def test_issue_6528():
eqs = [
327600995*x**2 - 37869137*x + 1809975124*y**2 - 9998905626,
895613949*x**2 - 273830224*x*y + 530506983*y**2 - 10000000000]
# two expressions encountered are > 1400 ops long so if this hangs
# it is likely because simplification is being done
assert len(solve(eqs, y, x, check=False)) == 4
def test_overdetermined():
x = symbols('x', real=True)
eqs = [Abs(4*x - 7) - 5, Abs(3 - 8*x) - 1]
assert solve(eqs, x) == [(S.Half,)]
assert solve(eqs, x, manual=True) == [(S.Half,)]
assert solve(eqs, x, manual=True, check=False) == [(S.Half,), (S(3),)]
def test_issue_6605():
x = symbols('x')
assert solve(4**(x/2) - 2**(x/3)) == [0, 3*I*pi/log(2)]
# while the first one passed, this one failed
x = symbols('x', real=True)
assert solve(5**(x/2) - 2**(x/3)) == [0]
b = sqrt(6)*sqrt(log(2))/sqrt(log(5))
assert solve(5**(x/2) - 2**(3/x)) == [-b, b]
def test__ispow():
assert _ispow(x**2)
assert not _ispow(x)
assert not _ispow(True)
def test_issue_6644():
eq = -sqrt((m - q)**2 + (-m/(2*q) + S(1)/2)**2) + sqrt((-m**2/2 - sqrt(
4*m**4 - 4*m**2 + 8*m + 1)/4 - S(1)/4)**2 + (m**2/2 - m - sqrt(
4*m**4 - 4*m**2 + 8*m + 1)/4 - S(1)/4)**2)
sol = solve(eq, q, simplify=False, check=False)
assert len(sol) == 5
def test_issue_6752():
assert solve([a**2 + a, a - b], [a, b]) == [(-1, -1), (0, 0)]
assert solve([a**2 + a*c, a - b], [a, b]) == [(0, 0), (-c, -c)]
def test_issue_6792():
assert solve(x*(x - 1)**2*(x + 1)*(x**6 - x + 1)) == [
-1, 0, 1, CRootOf(x**6 - x + 1, 0), CRootOf(x**6 - x + 1, 1),
CRootOf(x**6 - x + 1, 2), CRootOf(x**6 - x + 1, 3),
CRootOf(x**6 - x + 1, 4), CRootOf(x**6 - x + 1, 5)]
def test_issues_6819_6820_6821_6248_8692():
# issue 6821
x, y = symbols('x y', real=True)
assert solve(abs(x + 3) - 2*abs(x - 3)) == [1, 9]
assert solve([abs(x) - 2, arg(x) - pi], x) == [(-2,), (2,)]
assert set(solve(abs(x - 7) - 8)) == set([-S(1), S(15)])
# issue 8692
assert solve(Eq(Abs(x + 1) + Abs(x**2 - 7), 9), x) == [
-S(1)/2 + sqrt(61)/2, -sqrt(69)/2 + S(1)/2]
# issue 7145
assert solve(2*abs(x) - abs(x - 1)) == [-1, Rational(1, 3)]
x = symbols('x')
assert solve([re(x) - 1, im(x) - 2], x) == [
{re(x): 1, x: 1 + 2*I, im(x): 2}]
# check for 'dict' handling of solution
eq = sqrt(re(x)**2 + im(x)**2) - 3
assert solve(eq) == solve(eq, x)
i = symbols('i', imaginary=True)
assert solve(abs(i) - 3) == [-3*I, 3*I]
raises(NotImplementedError, lambda: solve(abs(x) - 3))
w = symbols('w', integer=True)
assert solve(2*x**w - 4*y**w, w) == solve((x/y)**w - 2, w)
x, y = symbols('x y', real=True)
assert solve(x + y*I + 3) == {y: 0, x: -3}
# issue 2642
assert solve(x*(1 + I)) == [0]
x, y = symbols('x y', imaginary=True)
assert solve(x + y*I + 3 + 2*I) == {x: -2*I, y: 3*I}
x = symbols('x', real=True)
assert solve(x + y + 3 + 2*I) == {x: -3, y: -2*I}
# issue 6248
f = Function('f')
assert solve(f(x + 1) - f(2*x - 1)) == [2]
assert solve(log(x + 1) - log(2*x - 1)) == [2]
x = symbols('x')
assert solve(2**x + 4**x) == [I*pi/log(2)]
def test_issue_6989():
f = Function('f')
assert solve(Eq(-f(x), Piecewise((1, x > 0), (0, True))), f(x)) == \
[Piecewise((-1, x > 0), (0, True))]
def test_lambert_multivariate():
from sympy.abc import a, x, y
from sympy.solvers.bivariate import _filtered_gens, _lambert, _solve_lambert
assert _filtered_gens(Poly(x + 1/x + exp(x) + y), x) == set([x, exp(x)])
assert _lambert(x, x) == []
assert solve((x**2 - 2*x + 1).subs(x, log(x) + 3*x)) == [LambertW(3*S.Exp1)/3]
assert solve((x**2 - 2*x + 1).subs(x, (log(x) + 3*x)**2 - 1)) == \
[LambertW(3*exp(-sqrt(2)))/3, LambertW(3*exp(sqrt(2)))/3]
assert solve((x**2 - 2*x - 2).subs(x, log(x) + 3*x)) == \
[LambertW(3*exp(1 + sqrt(3)))/3, LambertW(3*exp(-sqrt(3) + 1))/3]
assert solve(x*log(x) + 3*x + 1, x) == [exp(-3 + LambertW(-exp(3)))]
eq = (x*exp(x) - 3).subs(x, x*exp(x))
assert solve(eq) == [LambertW(3*exp(-LambertW(3)))]
# coverage test
raises(NotImplementedError, lambda: solve(x - sin(x)*log(y - x), x))
# if sign is unknown then only this one solution is obtained
assert solve(3*log(a**(3*x + 5)) + a**(3*x + 5), x) == [
-((log(a**5) + LambertW(S(1)/3))/(3*log(a)))]
p = symbols('p', positive=True)
_13 = S(1)/3
_56 = S(5)/6
_53 = S(5)/3
assert solve(3*log(p**(3*x + 5)) + p**(3*x + 5), x) == [
log((-3**_13 - 3**_56*I)*LambertW(_13)**_13/(2*p**_53))/log(p),
log((-3**_13 + 3**_56*I)*LambertW(_13)**_13/(2*p**_53))/log(p),
log((3*LambertW(_13)/p**5)**(1/(3*log(p))))]
# check collection
assert solve(3*log(a**(3*x + 5)) + b*log(a**(3*x + 5)) + a**(3*x + 5), x) == [
-((log(a**5) + LambertW(1/(b + 3)))/(3*log(a)))]
eq = 4*2**(2*p + 3) - 2*p - 3
assert _solve_lambert(eq, p, _filtered_gens(Poly(eq), p)) == [
-S(3)/2 - LambertW(-4*log(2))/(2*log(2))]
# issue 4271
assert solve((a/x + exp(x/2)).diff(x, 2), x) == [
6*LambertW(root(-1, 3)*root(a, 3)/3)]
assert solve((log(x) + x).subs(x, x**2 + 1)) == [
-I*sqrt(-LambertW(1) + 1), sqrt(-1 + LambertW(1))]
# these only give one of the solutions (see XFAIL below)
assert solve(x**3 - 3**x, x) == [-3/log(3)*LambertW(-log(3)/3),
-3*LambertW(-log(3)/3, -1)/log(3)]
# replacing 3 with 2 in the above solution gives 2
assert solve(x**2 - 2**x, x) == [2, -2*LambertW(-log(2)/2, -1)/log(2)]
assert solve(-x**2 + 2**x, x) == [2, -2*LambertW(-log(2)/2, -1)/log(2)]
assert solve(3**cos(x) - cos(x)**3) == [
acos(-3*LambertW(-log(3)/3)/log(3)),
acos(-3*LambertW(-log(3)/3, -1)/log(3))]
@XFAIL
def test_other_lambert():
from sympy.abc import x
assert solve(3*sin(x) - x*sin(3), x) == [3]
assert set(solve(3*log(x) - x*log(3))) == set(
[3, -3*LambertW(-log(3)/3)/log(3)])
a = S(6)/5
assert set(solve(x**a - a**x)) == set(
[a, -a*LambertW(-log(a)/a)/log(a)])
assert set(solve(3**cos(x) - cos(x)**3)) == set(
[acos(3), acos(-3*LambertW(-log(3)/3)/log(3))])
assert set(solve(x**2 - 2**x)) == set(
[2, -2/log(2)*LambertW(log(2)/2)])
def test_rewrite_trig():
assert solve(sin(x) + tan(x)) == [0, -pi, pi, 2*pi]
assert solve(sin(x) + sec(x)) == [
-2*atan(-S.Half + sqrt(2)*sqrt(1 - sqrt(3)*I)/2 + sqrt(3)*I/2),
2*atan(S.Half - sqrt(2)*sqrt(1 + sqrt(3)*I)/2 + sqrt(3)*I/2), 2*atan(S.Half
+ sqrt(2)*sqrt(1 + sqrt(3)*I)/2 + sqrt(3)*I/2), 2*atan(S.Half -
sqrt(3)*I/2 + sqrt(2)*sqrt(1 - sqrt(3)*I)/2)]
assert solve(sinh(x) + tanh(x)) == [0, I*pi]
# issue 6157
assert solve(2*sin(x) - cos(x), x) == [-2*atan(2 + sqrt(5)),
-2*atan(-sqrt(5) + 2)]
@XFAIL
def test_rewrite_trigh():
# if this import passes then the test below should also pass
from sympy import sech
assert solve(sinh(x) + sech(x)) == [
2*atanh(-S.Half + sqrt(5)/2 - sqrt(-2*sqrt(5) + 2)/2),
2*atanh(-S.Half + sqrt(5)/2 + sqrt(-2*sqrt(5) + 2)/2),
2*atanh(-sqrt(5)/2 - S.Half + sqrt(2 + 2*sqrt(5))/2),
2*atanh(-sqrt(2 + 2*sqrt(5))/2 - sqrt(5)/2 - S.Half)]
def test_uselogcombine():
eq = z - log(x) + log(y/(x*(-1 + y**2/x**2)))
assert solve(eq, x, force=True) == [-sqrt(y*(y - exp(z))), sqrt(y*(y - exp(z)))]
assert solve(log(x + 3) + log(1 + 3/x) - 3) in [
[-3 + sqrt(-12 + exp(3))*exp(S(3)/2)/2 + exp(3)/2,
-sqrt(-12 + exp(3))*exp(S(3)/2)/2 - 3 + exp(3)/2],
[-3 + sqrt(-36 + (-exp(3) + 6)**2)/2 + exp(3)/2,
-3 - sqrt(-36 + (-exp(3) + 6)**2)/2 + exp(3)/2],
]
assert solve(log(exp(2*x) + 1) + log(-tanh(x) + 1) - log(2)) == []
def test_atan2():
assert solve(atan2(x, 2) - pi/3, x) == [2*sqrt(3)]
def test_errorinverses():
assert solve(erf(x) - y, x) == [erfinv(y)]
assert solve(erfinv(x) - y, x) == [erf(y)]
assert solve(erfc(x) - y, x) == [erfcinv(y)]
assert solve(erfcinv(x) - y, x) == [erfc(y)]
def test_issue_2725():
R = Symbol('R')
eq = sqrt(2)*R*sqrt(1/(R + 1)) + (R + 1)*(sqrt(2)*sqrt(1/(R + 1)) - 1)
sol = solve(eq, R, set=True)[1]
assert sol == set([(S(5)/3 + (-S(1)/2 - sqrt(3)*I/2)*(S(251)/27 +
sqrt(111)*I/9)**(S(1)/3) + 40/(9*((-S(1)/2 - sqrt(3)*I/2)*(S(251)/27 +
sqrt(111)*I/9)**(S(1)/3))),), (S(5)/3 + 40/(9*(S(251)/27 +
sqrt(111)*I/9)**(S(1)/3)) + (S(251)/27 + sqrt(111)*I/9)**(S(1)/3),)])
def test_issue_5114_6611():
# See that it doesn't hang; this solves in about 2 seconds.
# Also check that the solution is relatively small.
# Note: the system in issue 6611 solves in about 5 seconds and has
# an op-count of 138336 (with simplify=False).
b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r = symbols('b:r')
eqs = Matrix([
[b - c/d + r/d], [c*(1/g + 1/e + 1/d) - f/g - r/d],
[-c/g + f*(1/j + 1/i + 1/g) - h/i], [-f/i + h*(1/m + 1/l + 1/i) - k/m],
[-h/m + k*(1/p + 1/o + 1/m) - n/p], [-k/p + n*(1/q + 1/p)]])
v = Matrix([f, h, k, n, b, c])
ans = solve(list(eqs), list(v), simplify=False)
# If time is taken to simplify then then 2617 below becomes
# 1168 and the time is about 50 seconds instead of 2.
assert sum([s.count_ops() for s in ans.values()]) <= 2617
def test_det_quick():
m = Matrix(3, 3, symbols('a:9'))
assert m.det() == det_quick(m) # calls det_perm
m[0, 0] = 1
assert m.det() == det_quick(m) # calls det_minor
m = Matrix(3, 3, list(range(9)))
assert m.det() == det_quick(m) # defaults to .det()
# make sure they work with Sparse
s = SparseMatrix(2, 2, (1, 2, 1, 4))
assert det_perm(s) == det_minor(s) == s.det()
def test_piecewise():
# if no symbol is given the piecewise detection must still work
assert solve(Piecewise((x - 2, Gt(x, 2)), (2 - x, True)) - 3) == [-1, 5]
def test_real_imag_splitting():
a, b = symbols('a b', real=True)
assert solve(sqrt(a**2 + b**2) - 3, a) == \
[-sqrt(-b**2 + 9), sqrt(-b**2 + 9)]
a, b = symbols('a b', imaginary=True)
assert solve(sqrt(a**2 + b**2) - 3, a) == []
def test_issue_7110():
y = -2*x**3 + 4*x**2 - 2*x + 5
assert any(ask(Q.real(i)) for i in solve(y))
def test_units():
assert solve(1/x - 1/(2*cm)) == [2*cm]
def test_issue_7547():
A, B, V = symbols('A,B,V')
eq1 = Eq(630.26*(V - 39.0)*V*(V + 39) - A + B, 0)
eq2 = Eq(B, 1.36*10**8*(V - 39))
eq3 = Eq(A, 5.75*10**5*V*(V + 39.0))
sol = Matrix(nsolve(Tuple(eq1, eq2, eq3), [A, B, V], (0, 0, 0)))
assert str(sol) == str(Matrix(
[['4442890172.68209'],
['4289299466.1432'],
['70.5389666628177']]))
def test_issue_7895():
r = symbols('r', real=True)
assert solve(sqrt(r) - 2) == [4]
def test_issue_2777():
# the equations represent two circles
x, y = symbols('x y', real=True)
e1, e2 = sqrt(x**2 + y**2) - 10, sqrt(y**2 + (-x + 10)**2) - 3
a, b = 191/S(20), 3*sqrt(391)/20
ans = [(a, -b), (a, b)]
assert solve((e1, e2), (x, y)) == ans
assert solve((e1, e2/(x - a)), (x, y)) == []
# make the 2nd circle's radius be -3
e2 += 6
assert solve((e1, e2), (x, y)) == []
assert solve((e1, e2), (x, y), check=False) == ans
def test_issue_7322():
number = 5.62527e-35
assert solve(x - number, x)[0] == number
def test_nsolve():
raises(ValueError, lambda: nsolve(x, (-1, 1), method='bisect'))
raises(TypeError, lambda: nsolve((x - y + 3,x + y,z - y),(x,y,z),(-50,50)))
raises(TypeError, lambda: nsolve((x + y, x - y), (0, 1)))
def test_issue_8587():
f = Piecewise((2*x**2, And(S(0) < x, x < 1)), (2, True))
assert solve(f - 1) == [1/sqrt(2)]
def test_high_order_multivariate():
assert len(solve(a*x**3 - x + 1, x)) == 3
assert len(solve(a*x**4 - x + 1, x)) == 4
assert solve(a*x**5 - x + 1, x) == [] # incomplete solution allowed
raises(NotImplementedError, lambda:
solve(a*x**5 - x + 1, x, incomplete=False))
# result checking must always consider the denominator and CRootOf
# must be checked, too
d = x**5 - x + 1
assert solve(d*(1 + 1/d)) == [CRootOf(d + 1, i) for i in range(5)]
d = x - 1
assert solve(d*(2 + 1/d)) == [S.Half]
def test_base_0_exp_0():
assert solve(0**x - 1) == [0]
assert solve(0**(x - 2) - 1) == [2]
assert solve(S('x*(1/x**0 - x)', evaluate=False)) == \
[0, 1]
def test__simple_dens():
assert _simple_dens(1/x**0, [x]) == set()
assert _simple_dens(1/x**y, [x]) == set([x**y])
assert _simple_dens(1/root(x, 3), [x]) == set([x])
def test_issue_8755():
# This tests two things: that if full unrad is attempted and fails
# the solution should still be found; also it tests the use of
# keyword `composite`.
assert len(solve(sqrt(y)*x + x**3 - 1, x)) == 3
assert len(solve(-512*y**3 + 1344*(x + 2)**(S(1)/3)*y**2 -
1176*(x + 2)**(S(2)/3)*y - 169*x + 686, y, _unrad=False)) == 3
@slow
def test_issue_8828():
x1 = 0
y1 = -620
r1 = 920
x2 = 126
y2 = 276
x3 = 51
y3 = 205
r3 = 104
v = x, y, z
f1 = (x - x1)**2 + (y - y1)**2 - (r1 - z)**2
f2 = (x2 - x)**2 + (y2 - y)**2 - z**2
f3 = (x - x3)**2 + (y - y3)**2 - (r3 - z)**2
F = f1,f2,f3
g1 = sqrt((x - x1)**2 + (y - y1)**2) + z - r1
g2 = f2
g3 = sqrt((x - x3)**2 + (y - y3)**2) + z - r3
G = g1,g2,g3
A = solve(F, v)
B = solve(G, v)
C = solve(G, v, manual=True)
p, q, r = [set([tuple(i.evalf(2) for i in j) for j in R]) for R in [A, B, C]]
assert p == q == r
def test_issue_2840_8155():
assert solve(sin(3*x) + sin(6*x)) == [
0, -pi, pi, 2*pi, -2*I*log(-(-1)**(S(1)/9)), -2*I*log(-(-1)**(S(2)/9)),
-2*I*log((-1)**(S(7)/9)), -2*I*log((-1)**(S(8)/9)), -2*I*log(-S(1)/2 -
sqrt(3)*I/2), -2*I*log(-S(1)/2 + sqrt(3)*I/2), -2*I*log(S(1)/2 -
sqrt(3)*I/2), -2*I*log(S(1)/2 + sqrt(3)*I/2), -2*I*log(-sqrt(3)/2 - I/2),
-2*I*log(-sqrt(3)/2 + I/2), -2*I*log(sqrt(3)/2 - I/2),
-2*I*log(sqrt(3)/2 + I/2), -2*I*log(-sin(pi/18) - I*cos(pi/18)),
-2*I*log(-sin(pi/18) + I*cos(pi/18)), -2*I*log(sin(pi/18) -
I*cos(pi/18)), -2*I*log(sin(pi/18) + I*cos(pi/18)),
-2*I*log(exp(-2*I*pi/9)), -2*I*log(exp(-I*pi/9)),
-2*I*log(exp(I*pi/9)), -2*I*log(exp(2*I*pi/9))]
assert solve(2*sin(x) - 2*sin(2*x)) == [
0, -pi, pi, -2*I*log(-sqrt(3)/2 - I/2), -2*I*log(-sqrt(3)/2 + I/2),
-2*I*log(sqrt(3)/2 - I/2), -2*I*log(sqrt(3)/2 + I/2)]
def test_issue_9567():
assert solve(1 + 1/(x - 1)) == [0]
| bsd-3-clause | 2,814,971,367,022,690,300 | -4,148,917,813,281,054,000 | 37.636971 | 94 | 0.48203 | false |
ebmdatalab/openprescribing | openprescribing/dmd/forms.py | 1 | 2175 | from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Field, ButtonHolder, Submit
from crispy_forms.bootstrap import InlineCheckboxes
obj_types_choices = [
("vtm", "VTMs"),
("vmp", "VMPs"),
("amp", "AMPs"),
("vmpp", "VMPPs"),
("ampp", "AMPPs"),
]
include_choices = [
("unavailable", "Unavailable items"),
("invalid", "Invalid items"),
("no_bnf_code", "Items with no BNF code"),
]
class SearchForm(forms.Form):
q = forms.CharField(
label="Query, SNOMED code, GTIN, or BNF code/prefix", min_length=3
)
obj_types = forms.MultipleChoiceField(
label="Search...",
choices=obj_types_choices,
required=False,
initial=[tpl[0] for tpl in obj_types_choices],
)
include = forms.MultipleChoiceField(
label="Include...",
choices=include_choices,
required=False,
help_text="Unavailable items are not available to be prescribed and/or have been discontinued.",
)
# This is only used in tests
max_results_per_obj_type = forms.IntegerField(
required=False, widget=forms.HiddenInput()
)
def __init__(self, *args, **kwargs):
super(SearchForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_method = "GET"
self.helper.layout = Layout(
Field("q"),
InlineCheckboxes("obj_types"),
InlineCheckboxes("include"),
ButtonHolder(Submit("submit", "Search")),
)
class AdvancedSearchForm(forms.Form):
search = forms.CharField(required=True, widget=forms.HiddenInput())
include = forms.MultipleChoiceField(
label="Include...",
choices=include_choices,
required=False,
help_text="Unavailable items are not available to be prescribed and/or have been discontinued.",
)
def __init__(self, *args, **kwargs):
super(AdvancedSearchForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_method = "GET"
self.helper.layout = Layout(Field("search"), InlineCheckboxes("include"))
| mit | -8,910,239,025,201,059,000 | -5,885,210,944,085,778,000 | 30.071429 | 104 | 0.622069 | false |
maas/maas | src/maasserver/fields.py | 1 | 29287 | # Copyright 2012-2017 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Custom model and form fields."""
__all__ = [
"CIDRField",
"EditableBinaryField",
"Field",
"HostListFormField",
"IPListFormField",
"IPv4CIDRField",
"MAC",
"MACAddressField",
"MACAddressFormField",
"MODEL_NAME_VALIDATOR",
"NodeChoiceField",
"register_mac_type",
"VerboseRegexValidator",
"VersionedTextFileField",
"validate_mac",
]
from copy import deepcopy
from json import dumps, loads
import re
from django import forms
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.core.validators import RegexValidator, URLValidator
from django.db import connections
from django.db.models import BinaryField, CharField
from django.db.models import Field as _BrokenField
from django.db.models import GenericIPAddressField, IntegerField, Q, URLField
from django.utils.deconstruct import deconstructible
from django.utils.encoding import force_text
from django.utils.translation import ugettext_lazy as _
from netaddr import AddrFormatError, IPAddress, IPNetwork
import psycopg2.extensions
from maasserver.models.versionedtextfile import VersionedTextFile
from maasserver.utils.dns import validate_domain_name, validate_hostname
from maasserver.utils.orm import get_one, validate_in_transaction
from provisioningserver.utils import typed
# Validator for the name attribute of model entities.
MODEL_NAME_VALIDATOR = RegexValidator(r"^\w[ \w-]*$")
class Field(_BrokenField):
"""Django's `Field` has a mutable default argument, hence is broken.
This fixes it.
"""
def __init__(self, *args, validators=None, **kwargs):
kwargs["validators"] = [] if validators is None else validators
super().__init__(*args, **kwargs)
MAC_RE = re.compile(
r"^\s*("
r"([0-9a-fA-F]{1,2}:){5}[0-9a-fA-F]{1,2}|"
r"([0-9a-fA-F]{1,2}-){5}[0-9a-fA-F]{1,2}|"
r"([0-9a-fA-F]{3,4}.){2}[0-9a-fA-F]{3,4}"
r")\s*$"
)
MAC_ERROR_MSG = "'%(value)s' is not a valid MAC address."
class VerboseRegexValidator(RegexValidator):
"""A verbose `RegexValidator`.
This `RegexValidator` includes the checked value in the rendered error
message when the validation fails.
"""
# Set a bugus code to circumvent Django's attempt to re-interpret a
# validator's error message using the field's message it is attached
# to.
code = "bogus-code"
def __call__(self, value):
"""Validates that the input matches the regular expression."""
if not self.regex.search(force_text(value)):
raise ValidationError(
self.message % {"value": value}, code=self.code
)
mac_validator = VerboseRegexValidator(regex=MAC_RE, message=MAC_ERROR_MSG)
def validate_mac(value):
"""Django validator for a MAC."""
if isinstance(value, MAC):
value = value.get_raw()
mac_validator(value)
class StrippedCharField(forms.CharField):
"""A CharField that will strip surrounding whitespace before validation."""
def clean(self, value):
value = self.to_python(value).strip()
return super().clean(value)
class UnstrippedCharField(forms.CharField):
"""A version of forms.CharField that never strips the whitespace.
Django 1.9 has introduced a strip argument that controls stripping of
whitespace *and* which defaults to True, thus breaking compatibility with
1.8 and earlier.
"""
def __init__(self, *args, **kwargs):
# Instead of relying on a version check, we check for CharField
# constructor having a strip kwarg instead.
parent_init = super().__init__
if "strip" in parent_init.__code__.co_varnames:
parent_init(*args, strip=False, **kwargs)
else:
# In Django versions that do not support strip, False was the
# default.
parent_init(*args, **kwargs)
class VerboseRegexField(forms.CharField):
def __init__(self, regex, message, *args, **kwargs):
"""A field that validates its value with a regular expression.
:param regex: Either a string or a compiled regular expression object.
:param message: Error message to use when the validation fails.
"""
super().__init__(*args, **kwargs)
self.validators.append(
VerboseRegexValidator(regex=regex, message=message)
)
class MACAddressFormField(VerboseRegexField):
"""Form field type: MAC address."""
def __init__(self, *args, **kwargs):
super().__init__(regex=MAC_RE, message=MAC_ERROR_MSG, *args, **kwargs)
class MACAddressField(Field):
"""Model field type: MAC address."""
description = "MAC address"
default_validators = [validate_mac]
def db_type(self, *args, **kwargs):
return "macaddr"
def to_python(self, value):
return MAC(value)
def from_db_value(self, value, expression, connection, context):
return MAC(value)
def get_prep_value(self, value):
value = super().get_prep_value(value)
# Convert empty string to None.
if not value:
return None
return value
class MAC:
"""A MAC address represented as a database value.
PostgreSQL supports MAC addresses as a native type. They show up
client-side as this class. It is essentially a wrapper for a string.
This NEVER represents a null or empty MAC address.
"""
def __new__(cls, value):
"""Return `None` if `value` is `None` or the empty string."""
if value is None:
return None
elif isinstance(value, (bytes, str)):
return None if len(value) == 0 else super().__new__(cls)
else:
return super().__new__(cls)
def __init__(self, value):
"""Wrap a MAC address, or None, into a `MAC`.
:param value: A MAC address, in the form of a string or a `MAC`;
or None.
"""
# The wrapped attribute is stored as self._wrapped, following
# ISQLQuote's example.
if isinstance(value, MAC):
self._wrapped = value._wrapped
elif isinstance(value, bytes):
self._wrapped = value.decode("ascii")
elif isinstance(value, str):
self._wrapped = value
else:
raise TypeError("expected MAC or string, got: %r" % (value,))
def __conform__(self, protocol):
"""Tell psycopg2 that this type implements the adapter protocol."""
# The psychopg2 docs say to check that the protocol is ISQLQuote,
# but not what to do if it isn't.
assert protocol == psycopg2.extensions.ISQLQuote, (
"Unsupported psycopg2 adapter protocol: %s" % protocol
)
return self
def getquoted(self):
"""Render this object in SQL.
This is part of psycopg2's adapter protocol.
"""
return "'%s'::macaddr" % self._wrapped
def get_raw(self):
"""Return the wrapped value."""
return self._wrapped
@property
def raw(self):
"""The MAC address as a string."""
return self._wrapped
@classmethod
def parse(cls, value, cur):
"""Turn a value as received from the database into a MAC."""
return cls(value)
def __repr__(self):
"""Represent the MAC as a string."""
return "<MAC %s>" % self._wrapped
def __str__(self):
"""Represent the MAC as a Unicode string."""
return self._wrapped
def __bytes__(self):
return self._wrapped.encode("ascii")
def __eq__(self, other):
"""Two `MAC`s are equal if they wrap the same value.
A MAC is is also equal to the value it wraps. This is non-commutative,
but it supports Django code that compares input values to various
kinds of "null" or "empty."
"""
if isinstance(other, MAC):
return self._wrapped == other._wrapped
else:
return self._wrapped == other
def __ne__(self, other):
return not (self == other)
def __hash__(self):
return hash(self._wrapped)
def register_mac_type(cursor):
"""Register our `MAC` type with psycopg2 and Django."""
# This is standard, but not built-in, magic to register a type in
# psycopg2: execute a query that returns a field of the corresponding
# database type, then get its oid out of the cursor, use that to create
# a "typecaster" in psycopg (by calling new_type(), confusingly!), then
# register that type in psycopg.
cursor.execute("SELECT NULL::macaddr")
oid = cursor.description[0][1]
mac_caster = psycopg2.extensions.new_type((oid,), "macaddr", MAC.parse)
psycopg2.extensions.register_type(mac_caster)
# Now do the same for the type array-of-MACs. The "typecaster" created
# for MAC is passed in; it gets used for parsing an individual element
# of an array's text representation as received from the database.
cursor.execute("SELECT '{}'::macaddr[]")
oid = cursor.description[0][1]
psycopg2.extensions.register_type(
psycopg2.extensions.new_array_type((oid,), "macaddr", mac_caster)
)
class JSONObjectField(Field):
"""A field that will store any jsonizable python object."""
def to_python(self, value):
"""db -> python: json load."""
assert not isinstance(value, bytes)
if value is not None:
if isinstance(value, str):
try:
return loads(value)
except ValueError:
pass
return value
else:
return None
def from_db_value(self, value, expression, connection, context):
return self.to_python(value)
def get_db_prep_value(self, value, connection=None, prepared=False):
"""python -> db: json dump.
Keys are sorted when dumped to guarantee stable output. DB field can
guarantee uniqueness and be queried (the same dict makes the same
JSON).
"""
if value is not None:
return dumps(deepcopy(value), sort_keys=True)
else:
return None
def get_internal_type(self):
return "TextField"
def formfield(self, form_class=None, **kwargs):
"""Return a plain `forms.Field` here to avoid "helpful" conversions.
Django's base model field defaults to returning a `CharField`, which
means that anything that's not character data gets smooshed to text by
`CharField.to_pytnon` in forms (via the woefully named `smart_text`).
This is not helpful.
"""
if form_class is None:
form_class = forms.Field
return super().formfield(form_class=form_class, **kwargs)
class XMLField(Field):
"""A field for storing xml natively.
This is not like the removed Django XMLField which just added basic python
level checking on top of a text column.
Really inserts should be wrapped like `XMLPARSE(DOCUMENT value)` but it's
hard to do from django so rely on postgres supporting casting from char.
"""
description = "XML document or fragment"
def db_type(self, connection):
return "xml"
class EditableBinaryField(BinaryField):
"""An editable binary field.
An editable version of Django's BinaryField.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.editable = True
def deconstruct(self):
# Override deconstruct not to fail on the removal of the 'editable'
# field: the Django migration module assumes the field has its default
# value (False).
return Field.deconstruct(self)
class LargeObjectFile:
"""Large object file.
Proxy the access from this object to psycopg2.
"""
def __init__(self, oid=0, field=None, instance=None, block_size=(1 << 16)):
self.oid = oid
self.field = field
self.instance = instance
self.block_size = block_size
self._lobject = None
def __getattr__(self, name):
if self._lobject is None:
raise IOError("LargeObjectFile is not opened.")
return getattr(self._lobject, name)
def __enter__(self, *args, **kwargs):
return self
def __exit__(self, *args, **kwargs):
self.close()
def __iter__(self):
return self
@typed
def write(self, data: bytes):
"""Write `data` to the underlying large object.
This exists so that type annotations can be enforced.
"""
self._lobject.write(data)
def open(
self, mode="rwb", new_file=None, using="default", connection=None
):
"""Opens the internal large object instance."""
if "b" not in mode:
raise ValueError("Large objects must be opened in binary mode.")
if connection is None:
connection = connections[using]
validate_in_transaction(connection)
self._lobject = connection.connection.lobject(
self.oid, mode, 0, new_file
)
self.oid = self._lobject.oid
return self
def unlink(self):
"""Removes the large object."""
if self._lobject is None:
# Need to open the lobject so we get a reference to it in the
# database, to perform the unlink.
self.open()
self.close()
self._lobject.unlink()
self._lobject = None
self.oid = 0
def __next__(self):
r = self.read(self.block_size)
if len(r) == 0:
raise StopIteration
return r
class LargeObjectDescriptor:
"""LargeObjectField descriptor."""
def __init__(self, field):
self.field = field
def __get__(self, instance, type=None):
if instance is None:
return self
return instance.__dict__[self.field.name]
def __set__(self, instance, value):
value = self.field.to_python(value)
if value is not None:
if not isinstance(value, LargeObjectFile):
value = LargeObjectFile(value, self.field, instance)
instance.__dict__[self.field.name] = value
class LargeObjectField(IntegerField):
"""A field that stores large amounts of data into postgres large object
storage.
Internally the field on the model is an `oid` field, that returns a proxy
to the referenced large object.
"""
def __init__(self, *args, **kwargs):
self.block_size = kwargs.pop("block_size", 1 << 16)
super().__init__(*args, **kwargs)
@property
def validators(self):
# No validation. IntegerField will add incorrect validation. This
# removes that validation.
return []
def db_type(self, connection):
"""Returns the database column data type for LargeObjectField."""
# oid is the column type postgres uses to reference a large object
return "oid"
def contribute_to_class(self, cls, name):
"""Set the descriptor for the large object."""
super().contribute_to_class(cls, name)
setattr(cls, self.name, LargeObjectDescriptor(self))
def get_db_prep_value(self, value, connection=None, prepared=False):
"""python -> db: `oid` value"""
if value is None:
return None
if isinstance(value, LargeObjectFile):
if value.oid > 0:
return value.oid
raise AssertionError(
"LargeObjectFile's oid must be greater than 0."
)
raise AssertionError(
"Invalid LargeObjectField value (expected LargeObjectFile): '%s'"
% repr(value)
)
def to_python(self, value):
"""db -> python: `LargeObjectFile`"""
if value is None:
return None
elif isinstance(value, LargeObjectFile):
return value
elif isinstance(value, int):
return LargeObjectFile(value, self, self.model, self.block_size)
raise AssertionError(
"Invalid LargeObjectField value (expected integer): '%s'"
% repr(value)
)
class CIDRField(Field):
description = "PostgreSQL CIDR field"
def parse_cidr(self, value):
try:
return str(IPNetwork(value).cidr)
except AddrFormatError as e:
raise ValidationError(str(e)) from e
def db_type(self, connection):
return "cidr"
def get_prep_value(self, value):
if value is None or value == "":
return None
return self.parse_cidr(value)
def from_db_value(self, value, expression, connection, context):
if value is None:
return value
return self.parse_cidr(value)
def to_python(self, value):
if value is None or value == "":
return None
if isinstance(value, IPNetwork):
return str(value)
if not value:
return value
return self.parse_cidr(value)
def formfield(self, **kwargs):
defaults = {"form_class": forms.CharField}
defaults.update(kwargs)
return super().formfield(**defaults)
class IPv4CIDRField(CIDRField):
"""IPv4-only CIDR"""
def get_prep_value(self, value):
if value is None or value == "":
return None
return self.to_python(value)
def to_python(self, value):
if value is None or value == "":
return None
else:
try:
cidr = IPNetwork(value)
except AddrFormatError:
raise ValidationError(
"Invalid network: %(cidr)s", params={"cidr": value}
)
if cidr.cidr.version != 4:
raise ValidationError(
"%(cidr)s: Only IPv4 networks supported.",
params={"cidr": value},
)
return str(cidr.cidr)
class IPListFormField(forms.CharField):
"""Accepts a space/comma separated list of IP addresses.
This field normalizes the list to a space-separated list.
"""
separators = re.compile(r"[,\s]+")
def clean(self, value):
if value is None:
return None
else:
ips = re.split(self.separators, value)
ips = [ip.strip() for ip in ips if ip != ""]
for ip in ips:
try:
GenericIPAddressField().clean(ip, model_instance=None)
except ValidationError:
raise ValidationError(
"Invalid IP address: %s; provide a list of "
"space-separated IP addresses" % ip
)
return " ".join(ips)
class HostListFormField(forms.CharField):
"""Accepts a space/comma separated list of hostnames or IP addresses.
This field normalizes the list to a space-separated list.
"""
separators = re.compile(r"[,\s]+")
# Regular expressions to sniff out things that look like IP addresses;
# additional and more robust validation ought to be done to make sure.
pt_ipv4 = r"(?: \d{1,3} [.] \d{1,3} [.] \d{1,3} [.] \d{1,3} )"
pt_ipv6 = r"(?: (?: [\da-fA-F]+ :+)+ (?: [\da-fA-F]+ | %s )+ )" % pt_ipv4
pt_ip = re.compile(r"^ (?: %s | %s ) $" % (pt_ipv4, pt_ipv6), re.VERBOSE)
def clean(self, value):
if value is None:
return None
else:
values = map(str.strip, self.separators.split(value))
values = (value for value in values if len(value) != 0)
values = map(self._clean_addr_or_host, values)
return " ".join(values)
def _clean_addr_or_host(self, value):
looks_like_ip = self.pt_ip.match(value) is not None
if looks_like_ip:
return self._clean_addr(value)
elif ":" in value:
# This is probably an IPv6 address. It's definitely not a
# hostname.
return self._clean_addr(value)
else:
return self._clean_host(value)
def _clean_addr(self, addr):
try:
addr = IPAddress(addr)
except AddrFormatError as error:
message = str(error) # netaddr has good messages.
message = message[:1].upper() + message[1:] + "."
raise ValidationError(message)
else:
return str(addr)
def _clean_host(self, host):
try:
validate_hostname(host)
except ValidationError as error:
raise ValidationError("Invalid hostname: " + error.message)
else:
return host
class SubnetListFormField(forms.CharField):
"""Accepts a space/comma separated list of hostnames, Subnets or IPs.
This field normalizes the list to a space-separated list.
"""
separators = re.compile(r"[,\s]+")
# Regular expressions to sniff out things that look like IP addresses;
# additional and more robust validation ought to be done to make sure.
pt_ipv4 = r"(?: \d{1,3} [.] \d{1,3} [.] \d{1,3} [.] \d{1,3} )"
pt_ipv6 = r"(?: (|[0-9A-Fa-f]{1,4}) [:] (|[0-9A-Fa-f]{1,4}) [:] (.*))"
pt_ip = re.compile(r"^ (?: %s | %s ) $" % (pt_ipv4, pt_ipv6), re.VERBOSE)
pt_subnet = re.compile(
r"^ (?: %s | %s ) \/\d+$" % (pt_ipv4, pt_ipv6), re.VERBOSE
)
def clean(self, value):
if value is None:
return None
else:
values = map(str.strip, self.separators.split(value))
values = (value for value in values if len(value) != 0)
values = map(self._clean_addr_or_host, values)
return " ".join(values)
def _clean_addr_or_host(self, value):
looks_like_ip = self.pt_ip.match(value) is not None
looks_like_subnet = self.pt_subnet.match(value) is not None
if looks_like_subnet:
return self._clean_subnet(value)
elif looks_like_ip:
return self._clean_addr(value)
else:
return self._clean_host(value)
def _clean_addr(self, value):
try:
addr = IPAddress(value)
except ValueError:
return
except AddrFormatError:
raise ValidationError("Invalid IP address: %s." % value)
else:
return str(addr)
def _clean_subnet(self, value):
try:
cidr = IPNetwork(value)
except AddrFormatError:
raise ValidationError("Invalid network: %s." % value)
else:
return str(cidr)
def _clean_host(self, host):
try:
validate_hostname(host)
except ValidationError as error:
raise ValidationError("Invalid hostname: " + error.message)
else:
return host
class CaseInsensitiveChoiceField(forms.ChoiceField):
"""ChoiceField that allows the input to be case insensitive."""
def to_python(self, value):
if value not in self.empty_values:
value = value.lower()
return super().to_python(value)
class SpecifierOrModelChoiceField(forms.ModelChoiceField):
"""ModelChoiceField which is also able to accept input in the format
of a specifiers string.
"""
def to_python(self, value):
try:
return super().to_python(value)
except ValidationError as e:
if isinstance(value, str):
object_id = self.queryset.get_object_id(value)
if object_id is None:
obj = get_one(
self.queryset.filter_by_specifiers(value),
exception_class=ValidationError,
)
if obj is not None:
return obj
else:
try:
return self.queryset.get(id=object_id)
except ObjectDoesNotExist:
# Re-raising this as a ValidationError prevents the API
# from returning an internal server error rather than
# a bad request.
raise ValidationError("None found with id=%s." % value)
raise e
class DomainNameField(CharField):
"""Custom Django field that strips whitespace and trailing '.' characters
from DNS domain names before validating and saving to the database. Also,
validates that the domain name is valid according to RFCs 952 and 1123.
(Note that this field type should NOT be used for hostnames, since the set
of valid hostnames is smaller than the set of valid domain names.)
"""
def __init__(self, *args, **kwargs):
validators = kwargs.pop("validators", [])
validators.append(validate_domain_name)
kwargs["validators"] = validators
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["validators"]
return name, path, args, kwargs
# Here we are using (abusing?) the to_python() function to coerce and
# normalize this type. Django does not have a function intended purely
# to normalize before saving to the database, so to_python() is the next
# closest alternative. For more information, see:
# https://docs.djangoproject.com/en/1.6/ref/forms/validation/
# https://code.djangoproject.com/ticket/6362
def to_python(self, value):
value = super().to_python(value)
if value is None:
return None
value = value.strip().rstrip(".")
return value
class NodeChoiceField(forms.ModelChoiceField):
def __init__(self, queryset, *args, **kwargs):
super().__init__(queryset=queryset.distinct(), *args, **kwargs)
def clean(self, value):
if not value:
return None
# Avoid circular imports
from maasserver.models.node import Node
if isinstance(value, Node):
if value not in self.queryset:
raise ValidationError(
"Select a valid choice. "
"%s is not one of the available choices." % value.system_id
)
return value
try:
return self.queryset.get(Q(system_id=value) | Q(hostname=value))
except Node.DoesNotExist:
raise ValidationError(
"Select a valid choice. "
"%s is not one of the available choices." % value
)
def to_python(self, value):
# Avoid circular imports
from maasserver.models.node import Node
try:
return self.queryset.get(Q(system_id=value) | Q(hostname=value))
except Node.DoesNotExist:
raise ValidationError(
"Select a valid choice. "
"%s is not one of the available choices." % value
)
class VersionedTextFileField(forms.ModelChoiceField):
def __init__(self, *args, **kwargs):
super().__init__(queryset=None, *args, **kwargs)
def clean(self, value):
if self.initial is None:
if value is None:
raise ValidationError("Must be given a value")
# Create a new VersionedTextFile if one doesn't exist
if isinstance(value, dict):
return VersionedTextFile.objects.create(**value)
else:
return VersionedTextFile.objects.create(data=value)
elif value is None:
return self.initial
else:
# Create and return a new VersionedTextFile linked to the previous
# VersionedTextFile
if isinstance(value, dict):
return self.initial.update(**value)
else:
return self.initial.update(value)
@deconstructible
class URLOrPPAValidator(URLValidator):
message = _("Enter a valid repository URL or PPA location.")
ppa_re = (
r"ppa:" + URLValidator.hostname_re + r"/" + URLValidator.hostname_re
)
def __call__(self, value):
match = re.search(URLOrPPAValidator.ppa_re, force_text(value))
# If we don't have a PPA location, let URLValidator do its job.
if not match:
super().__call__(value)
class URLOrPPAFormField(forms.URLField):
widget = forms.URLInput
default_error_messages = {
"invalid": _("Enter a valid repository URL or PPA location.")
}
default_validators = [URLOrPPAValidator()]
def to_python(self, value):
# Call grandparent method (CharField) to get string value.
value = super(forms.URLField, self).to_python(value)
# If it's a PPA locator, return it, else run URL pythonator.
match = re.search(URLOrPPAValidator.ppa_re, value)
return value if match else super().to_python(value)
class URLOrPPAField(URLField):
default_validators = [URLOrPPAValidator()]
description = _("URLOrPPAField")
# Copied from URLField, with modified form_class.
def formfield(self, **kwargs):
defaults = {"form_class": URLOrPPAFormField}
defaults.update(kwargs)
return super(URLField, self).formfield(**defaults)
| agpl-3.0 | -1,164,930,634,722,064,400 | 1,326,503,342,397,666,800 | 31.83296 | 79 | 0.599515 | false |
jckarter/swift | utils/swift_build_support/swift_build_support/build_graph.py | 13 | 6374 | # swift_build_support/build_graph.py ----------------------------*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
#
# This is a simple implementation of an acyclic build graph. We require no
# cycles, so we just perform a reverse post order traversal to get a topological
# ordering. We check during the reverse post order traversal that we do not
# visit any node multiple times.
#
# Nodes are assumed to be a product's class.
#
# ----------------------------------------------------------------------------
def _get_po_ordered_nodes(root, invertedDepMap):
# Then setup our worklist/visited node set.
worklist = [root]
visitedNodes = set([])
# TODO: Can we unify po_ordered_nodes and visitedNodes in some way?
po_ordered_nodes = []
# Until we no longer have nodes to visit...
while not len(worklist) == 0:
# First grab the last element of the worklist. If we have already
# visited this node, just pop it and skip it.
#
# DISCUSSION: Consider the following build graph:
#
# A -> [C, B]
# B -> [C]
#
# In this case, we will most likely get the following worklist
# before actually processing anything:
#
# A, C, B, C
#
# In this case, we want to ignore the initial C pushed onto the
# worklist by visiting A since we will have visited C already due to
# the edge from B -> C.
node = worklist[-1]
if node in visitedNodes:
worklist.pop()
continue
# Then grab the dependents of our node.
deps = invertedDepMap.get(node, set([]))
assert(isinstance(deps, set))
# Then visit those and see if we have not visited any of them. Push
# any such nodes onto the worklist and continue. If we have already
# visited all of our dependents, then we can actually process this
# node.
foundDep = False
for d in deps:
if d not in visitedNodes:
foundDep = True
worklist.append(d)
if foundDep:
continue
# Now process the node by popping it off the worklist, adding it to
# the visited nodes set, and append it to the po_ordered_nodes in
# its final position.
worklist.pop()
visitedNodes.add(node)
po_ordered_nodes.append(node)
return po_ordered_nodes
class BuildDAG(object):
def __init__(self):
self.root = None
# A map from a node to a list of nodes that depend on the given node.
#
# NOTE: This is an inverted dependency map implying that the root will
# be a "final element" of the graph.
self.invertedDepMap = {}
def add_edge(self, pred, succ):
self.invertedDepMap.setdefault(pred, set([succ])) \
.add(succ)
def set_root(self, root):
# Assert that we always only have one root.
assert(self.root is None)
self.root = root
def produce_schedule(self):
# Grab the root and make sure it is not None
root = self.root
assert(root is not None)
# Then perform a post order traversal from root using our inverted
# dependency map to compute a list of our nodes in post order.
#
# NOTE: The index of each node in this list is the post order number of
# the node.
po_ordered_nodes = _get_po_ordered_nodes(root, self.invertedDepMap)
# Ok, we have our post order list. We want to provide our user a reverse
# post order, so we take our array and construct a dictionary of an
# enumeration of the list. This will give us a dictionary mapping our
# product names to their reverse post order number.
rpo_ordered_nodes = list(reversed(po_ordered_nodes))
node_to_rpot_map = dict((y, x) for x, y in enumerate(rpo_ordered_nodes))
# Now before we return our rpo_ordered_nodes and our node_to_rpot_map, lets
# verify that we didn't find any cycles. We can do this by traversing
# our dependency graph in reverse post order and making sure all
# dependencies of each node we visit has a later reverse post order
# number than the node we are checking.
for n, node in enumerate(rpo_ordered_nodes):
for dep in self.invertedDepMap.get(node, []):
if node_to_rpot_map[dep] < n:
print('n: {}. node: {}.'.format(n, node))
print('dep: {}.'.format(dep))
print('inverted dependency map: {}'.format(self.invertedDepMap))
print('rpo ordered nodes: {}'.format(rpo_ordered_nodes))
print('rpo node to rpo number map: {}'.format(node_to_rpot_map))
raise RuntimeError('Found cycle in build graph!')
return (rpo_ordered_nodes, node_to_rpot_map)
def produce_scheduled_build(input_product_classes):
"""For a given a subset input_input_product_classes of
all_input_product_classes, compute a topological ordering of the
input_input_product_classes + topological closures that respects the
dependency graph.
"""
dag = BuildDAG()
worklist = list(input_product_classes)
visited = set(input_product_classes)
# Construct the DAG.
while len(worklist) > 0:
entry = worklist.pop()
deps = entry.get_dependencies()
if len(deps) == 0:
dag.set_root(entry)
for d in deps:
dag.add_edge(d, entry)
if d not in visited:
worklist.append(d)
visited = visited.union(deps)
# Then produce the schedule.
schedule = dag.produce_schedule()
# Finally check that all of our input_product_classes are in the schedule.
if len(set(input_product_classes) - set(schedule[0])) != 0:
raise RuntimeError('Found disconnected graph?!')
return schedule
| apache-2.0 | -6,310,877,604,621,007,000 | -4,487,098,343,949,574,700 | 37.865854 | 84 | 0.602134 | false |
Jgarcia-IAS/SAT | openerp/addons/account_payment/account_payment.py | 212 | 19161 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
import time
from openerp.osv import fields, osv
_logger = logging.getLogger(__name__)
class payment_mode(osv.osv):
_name= 'payment.mode'
_description= 'Payment Mode'
_columns = {
'name': fields.char('Name', required=True, help='Mode of Payment'),
'bank_id': fields.many2one('res.partner.bank', "Bank account",
required=True,help='Bank Account for the Payment Mode'),
'journal': fields.many2one('account.journal', 'Journal', required=True,
domain=[('type', 'in', ('bank','cash'))], help='Bank or Cash Journal for the Payment Mode'),
'company_id': fields.many2one('res.company', 'Company',required=True),
'partner_id':fields.related('company_id','partner_id',type='many2one',relation='res.partner',string='Partner',store=True,),
}
_defaults = {
'company_id': lambda self,cr,uid,c: self.pool.get('res.users').browse(cr, uid, uid, c).company_id.id
}
def suitable_bank_types(self, cr, uid, payment_code=None, context=None):
"""Return the codes of the bank type that are suitable
for the given payment type code"""
if not payment_code:
return []
cr.execute(""" SELECT pb.state
FROM res_partner_bank pb
JOIN payment_mode pm ON (pm.bank_id = pb.id)
WHERE pm.id = %s """, [payment_code])
return [x[0] for x in cr.fetchall()]
def onchange_company_id (self, cr, uid, ids, company_id=False, context=None):
result = {}
if company_id:
partner_id = self.pool.get('res.company').browse(cr, uid, company_id, context=context).partner_id.id
result['partner_id'] = partner_id
return {'value': result}
class payment_order(osv.osv):
_name = 'payment.order'
_description = 'Payment Order'
_rec_name = 'reference'
_order = 'id desc'
#dead code
def get_wizard(self, type):
_logger.warning("No wizard found for the payment type '%s'.", type)
return None
def _total(self, cursor, user, ids, name, args, context=None):
if not ids:
return {}
res = {}
for order in self.browse(cursor, user, ids, context=context):
if order.line_ids:
res[order.id] = reduce(lambda x, y: x + y.amount, order.line_ids, 0.0)
else:
res[order.id] = 0.0
return res
_columns = {
'date_scheduled': fields.date('Scheduled Date', states={'done':[('readonly', True)]}, help='Select a date if you have chosen Preferred Date to be fixed.'),
'reference': fields.char('Reference', required=1, states={'done': [('readonly', True)]}, copy=False),
'mode': fields.many2one('payment.mode', 'Payment Mode', select=True, required=1, states={'done': [('readonly', True)]}, help='Select the Payment Mode to be applied.'),
'state': fields.selection([
('draft', 'Draft'),
('cancel', 'Cancelled'),
('open', 'Confirmed'),
('done', 'Done')], 'Status', select=True, copy=False,
help='When an order is placed the status is \'Draft\'.\n Once the bank is confirmed the status is set to \'Confirmed\'.\n Then the order is paid the status is \'Done\'.'),
'line_ids': fields.one2many('payment.line', 'order_id', 'Payment lines', states={'done': [('readonly', True)]}),
'total': fields.function(_total, string="Total", type='float'),
'user_id': fields.many2one('res.users', 'Responsible', required=True, states={'done': [('readonly', True)]}),
'date_prefered': fields.selection([
('now', 'Directly'),
('due', 'Due date'),
('fixed', 'Fixed date')
], "Preferred Date", change_default=True, required=True, states={'done': [('readonly', True)]}, help="Choose an option for the Payment Order:'Fixed' stands for a date specified by you.'Directly' stands for the direct execution.'Due date' stands for the scheduled date of execution."),
'date_created': fields.date('Creation Date', readonly=True),
'date_done': fields.date('Execution Date', readonly=True),
'company_id': fields.related('mode', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True),
}
_defaults = {
'user_id': lambda self,cr,uid,context: uid,
'state': 'draft',
'date_prefered': 'due',
'date_created': lambda *a: time.strftime('%Y-%m-%d'),
'reference': lambda self,cr,uid,context: self.pool.get('ir.sequence').get(cr, uid, 'payment.order'),
}
def set_to_draft(self, cr, uid, ids, *args):
self.write(cr, uid, ids, {'state': 'draft'})
self.create_workflow(cr, uid, ids)
return True
def action_open(self, cr, uid, ids, *args):
ir_seq_obj = self.pool.get('ir.sequence')
for order in self.read(cr, uid, ids, ['reference']):
if not order['reference']:
reference = ir_seq_obj.get(cr, uid, 'payment.order')
self.write(cr, uid, order['id'], {'reference':reference})
return True
def set_done(self, cr, uid, ids, *args):
self.write(cr, uid, ids, {'date_done': time.strftime('%Y-%m-%d')})
self.signal_workflow(cr, uid, ids, 'done')
return True
def write(self, cr, uid, ids, vals, context=None):
if context is None:
context = {}
payment_line_obj = self.pool.get('payment.line')
payment_line_ids = []
if (vals.get('date_prefered', False) == 'fixed' and not vals.get('date_scheduled', False)) or vals.get('date_scheduled', False):
for order in self.browse(cr, uid, ids, context=context):
for line in order.line_ids:
payment_line_ids.append(line.id)
payment_line_obj.write(cr, uid, payment_line_ids, {'date': vals.get('date_scheduled', False)}, context=context)
elif vals.get('date_prefered', False) == 'due':
vals.update({'date_scheduled': False})
for order in self.browse(cr, uid, ids, context=context):
for line in order.line_ids:
payment_line_obj.write(cr, uid, [line.id], {'date': line.ml_maturity_date}, context=context)
elif vals.get('date_prefered', False) == 'now':
vals.update({'date_scheduled': False})
for order in self.browse(cr, uid, ids, context=context):
for line in order.line_ids:
payment_line_ids.append(line.id)
payment_line_obj.write(cr, uid, payment_line_ids, {'date': False}, context=context)
return super(payment_order, self).write(cr, uid, ids, vals, context=context)
class payment_line(osv.osv):
_name = 'payment.line'
_description = 'Payment Line'
def translate(self, orig):
return {
"due_date": "date_maturity",
"reference": "ref"}.get(orig, orig)
def _info_owner(self, cr, uid, ids, name=None, args=None, context=None):
result = {}
for line in self.browse(cr, uid, ids, context=context):
owner = line.order_id.mode.bank_id.partner_id
result[line.id] = self._get_info_partner(cr, uid, owner, context=context)
return result
def _get_info_partner(self,cr, uid, partner_record, context=None):
if not partner_record:
return False
st = partner_record.street or ''
st1 = partner_record.street2 or ''
zip = partner_record.zip or ''
city = partner_record.city or ''
zip_city = zip + ' ' + city
cntry = partner_record.country_id and partner_record.country_id.name or ''
return partner_record.name + "\n" + st + " " + st1 + "\n" + zip_city + "\n" +cntry
def _info_partner(self, cr, uid, ids, name=None, args=None, context=None):
result = {}
for line in self.browse(cr, uid, ids, context=context):
result[line.id] = False
if not line.partner_id:
break
result[line.id] = self._get_info_partner(cr, uid, line.partner_id, context=context)
return result
#dead code
def select_by_name(self, cr, uid, ids, name, args, context=None):
if not ids: return {}
partner_obj = self.pool.get('res.partner')
cr.execute("""SELECT pl.id, ml.%s
FROM account_move_line ml
INNER JOIN payment_line pl
ON (ml.id = pl.move_line_id)
WHERE pl.id IN %%s"""% self.translate(name),
(tuple(ids),))
res = dict(cr.fetchall())
if name == 'partner_id':
partner_name = {}
for p_id, p_name in partner_obj.name_get(cr, uid,
filter(lambda x:x and x != 0,res.values()), context=context):
partner_name[p_id] = p_name
for id in ids:
if id in res and partner_name:
res[id] = (res[id],partner_name[res[id]])
else:
res[id] = (False,False)
else:
for id in ids:
res.setdefault(id, (False, ""))
return res
def _amount(self, cursor, user, ids, name, args, context=None):
if not ids:
return {}
currency_obj = self.pool.get('res.currency')
if context is None:
context = {}
res = {}
for line in self.browse(cursor, user, ids, context=context):
ctx = context.copy()
ctx['date'] = line.order_id.date_done or time.strftime('%Y-%m-%d')
res[line.id] = currency_obj.compute(cursor, user, line.currency.id,
line.company_currency.id,
line.amount_currency, context=ctx)
return res
def _get_currency(self, cr, uid, context=None):
user_obj = self.pool.get('res.users')
currency_obj = self.pool.get('res.currency')
user = user_obj.browse(cr, uid, uid, context=context)
if user.company_id:
return user.company_id.currency_id.id
else:
return currency_obj.search(cr, uid, [('rate', '=', 1.0)])[0]
def _get_date(self, cr, uid, context=None):
if context is None:
context = {}
payment_order_obj = self.pool.get('payment.order')
date = False
if context.get('order_id') and context['order_id']:
order = payment_order_obj.browse(cr, uid, context['order_id'], context=context)
if order.date_prefered == 'fixed':
date = order.date_scheduled
else:
date = time.strftime('%Y-%m-%d')
return date
def _get_ml_inv_ref(self, cr, uid, ids, *a):
res = {}
for id in self.browse(cr, uid, ids):
res[id.id] = False
if id.move_line_id:
if id.move_line_id.invoice:
res[id.id] = id.move_line_id.invoice.id
return res
def _get_ml_maturity_date(self, cr, uid, ids, *a):
res = {}
for id in self.browse(cr, uid, ids):
if id.move_line_id:
res[id.id] = id.move_line_id.date_maturity
else:
res[id.id] = False
return res
def _get_ml_created_date(self, cr, uid, ids, *a):
res = {}
for id in self.browse(cr, uid, ids):
if id.move_line_id:
res[id.id] = id.move_line_id.date_created
else:
res[id.id] = False
return res
_columns = {
'name': fields.char('Your Reference', required=True),
'communication': fields.char('Communication', required=True, help="Used as the message between ordering customer and current company. Depicts 'What do you want to say to the recipient about this order ?'"),
'communication2': fields.char('Communication 2', help='The successor message of Communication.'),
'move_line_id': fields.many2one('account.move.line', 'Entry line', domain=[('reconcile_id', '=', False), ('account_id.type', '=', 'payable')], help='This Entry Line will be referred for the information of the ordering customer.'),
'amount_currency': fields.float('Amount in Partner Currency', digits=(16, 2),
required=True, help='Payment amount in the partner currency'),
'currency': fields.many2one('res.currency','Partner Currency', required=True),
'company_currency': fields.many2one('res.currency', 'Company Currency', readonly=True),
'bank_id': fields.many2one('res.partner.bank', 'Destination Bank Account'),
'order_id': fields.many2one('payment.order', 'Order', required=True,
ondelete='cascade', select=True),
'partner_id': fields.many2one('res.partner', string="Partner", required=True, help='The Ordering Customer'),
'amount': fields.function(_amount, string='Amount in Company Currency',
type='float',
help='Payment amount in the company currency'),
'ml_date_created': fields.function(_get_ml_created_date, string="Effective Date",
type='date', help="Invoice Effective Date"),
'ml_maturity_date': fields.function(_get_ml_maturity_date, type='date', string='Due Date'),
'ml_inv_ref': fields.function(_get_ml_inv_ref, type='many2one', relation='account.invoice', string='Invoice Ref.'),
'info_owner': fields.function(_info_owner, string="Owner Account", type="text", help='Address of the Main Partner'),
'info_partner': fields.function(_info_partner, string="Destination Account", type="text", help='Address of the Ordering Customer.'),
'date': fields.date('Payment Date', help="If no payment date is specified, the bank will treat this payment line directly"),
'create_date': fields.datetime('Created', readonly=True),
'state': fields.selection([('normal','Free'), ('structured','Structured')], 'Communication Type', required=True),
'bank_statement_line_id': fields.many2one('account.bank.statement.line', 'Bank statement line'),
'company_id': fields.related('order_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True),
}
_defaults = {
'name': lambda obj, cursor, user, context: obj.pool.get('ir.sequence'
).get(cursor, user, 'payment.line'),
'state': 'normal',
'currency': _get_currency,
'company_currency': _get_currency,
'date': _get_date,
}
_sql_constraints = [
('name_uniq', 'UNIQUE(name)', 'The payment line name must be unique!'),
]
def onchange_move_line(self, cr, uid, ids, move_line_id, payment_type, date_prefered, date_scheduled, currency=False, company_currency=False, context=None):
data = {}
move_line_obj = self.pool.get('account.move.line')
data['amount_currency'] = data['communication'] = data['partner_id'] = data['bank_id'] = data['amount'] = False
if move_line_id:
line = move_line_obj.browse(cr, uid, move_line_id, context=context)
data['amount_currency'] = line.amount_residual_currency
res = self.onchange_amount(cr, uid, ids, data['amount_currency'], currency,
company_currency, context)
if res:
data['amount'] = res['value']['amount']
data['partner_id'] = line.partner_id.id
temp = line.currency_id and line.currency_id.id or False
if not temp:
if line.invoice:
data['currency'] = line.invoice.currency_id.id
else:
data['currency'] = temp
# calling onchange of partner and updating data dictionary
temp_dict = self.onchange_partner(cr, uid, ids, line.partner_id.id, payment_type)
data.update(temp_dict['value'])
data['communication'] = line.ref
if date_prefered == 'now':
#no payment date => immediate payment
data['date'] = False
elif date_prefered == 'due':
data['date'] = line.date_maturity
elif date_prefered == 'fixed':
data['date'] = date_scheduled
return {'value': data}
def onchange_amount(self, cr, uid, ids, amount, currency, cmpny_currency, context=None):
if (not amount) or (not cmpny_currency):
return {'value': {'amount': False}}
res = {}
currency_obj = self.pool.get('res.currency')
company_amount = currency_obj.compute(cr, uid, currency, cmpny_currency, amount)
res['amount'] = company_amount
return {'value': res}
def onchange_partner(self, cr, uid, ids, partner_id, payment_type, context=None):
data = {}
partner_obj = self.pool.get('res.partner')
payment_mode_obj = self.pool.get('payment.mode')
data['info_partner'] = data['bank_id'] = False
if partner_id:
part_obj = partner_obj.browse(cr, uid, partner_id, context=context)
partner = part_obj.name or ''
data['info_partner'] = self._get_info_partner(cr, uid, part_obj, context=context)
if part_obj.bank_ids and payment_type:
bank_type = payment_mode_obj.suitable_bank_types(cr, uid, payment_type, context=context)
for bank in part_obj.bank_ids:
if bank.state in bank_type:
data['bank_id'] = bank.id
break
return {'value': data}
def fields_get(self, cr, uid, fields=None, context=None, write_access=True, attributes=None):
res = super(payment_line, self).fields_get(cr, uid, fields, context, write_access, attributes)
if 'communication2' in res:
res['communication2'].setdefault('states', {})
res['communication2']['states']['structured'] = [('readonly', True)]
res['communication2']['states']['normal'] = [('readonly', False)]
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 4,183,097,846,940,907,500 | 7,322,777,428,538,392,000 | 45.848411 | 296 | 0.57951 | false |
willthames/ansible-modules-core | cloud/amazon/ec2_key.py | 26 | 7232 | #!/usr/bin/python
# -*- coding: utf-8 -*-
DOCUMENTATION = '''
---
module: ec2_key
version_added: "1.5"
short_description: maintain an ec2 key pair.
description:
- maintains ec2 key pairs. This module has a dependency on python-boto >= 2.5
options:
name:
description:
- Name of the key pair.
required: true
key_material:
description:
- Public key material.
required: false
region:
description:
- the EC2 region to use
required: false
default: null
aliases: []
state:
description:
- create or delete keypair
required: false
default: 'present'
aliases: []
wait:
description:
- Wait for the specified action to complete before returning.
required: false
default: false
aliases: []
version_added: "1.6"
wait_timeout:
description:
- How long before wait gives up, in seconds
required: false
default: 300
aliases: []
version_added: "1.6"
extends_documentation_fragment: aws
author: Vincent Viallet
'''
EXAMPLES = '''
# Note: None of these examples set aws_access_key, aws_secret_key, or region.
# It is assumed that their matching environment variables are set.
# Creates a new ec2 key pair named `example` if not present, returns generated
# private key
- name: example ec2 key
ec2_key:
name: example
# Creates a new ec2 key pair named `example` if not present using provided key
# material. This could use the 'file' lookup plugin to pull this off disk.
- name: example2 ec2 key
ec2_key:
name: example2
key_material: 'ssh-rsa AAAAxyz...== me@example.com'
state: present
# Creates a new ec2 key pair named `example` if not present using provided key
# material
- name: example3 ec2 key
ec2_key:
name: example3
key_material: "{{ item }}"
with_file: /path/to/public_key.id_rsa.pub
# Removes ec2 key pair by name
- name: remove example key
ec2_key:
name: example
state: absent
'''
try:
import boto.ec2
except ImportError:
print "failed=True msg='boto required for this module'"
sys.exit(1)
import random
import string
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
name=dict(required=True),
key_material=dict(required=False),
state = dict(default='present', choices=['present', 'absent']),
wait = dict(type='bool', default=False),
wait_timeout = dict(default=300),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
name = module.params['name']
state = module.params.get('state')
key_material = module.params.get('key_material')
wait = module.params.get('wait')
wait_timeout = int(module.params.get('wait_timeout'))
changed = False
ec2 = ec2_connect(module)
# find the key if present
key = ec2.get_key_pair(name)
# Ensure requested key is absent
if state == 'absent':
if key:
'''found a match, delete it'''
try:
key.delete()
if wait:
start = time.time()
action_complete = False
while (time.time() - start) < wait_timeout:
if not ec2.get_key_pair(name):
action_complete = True
break
time.sleep(1)
if not action_complete:
module.fail_json(msg="timed out while waiting for the key to be removed")
except Exception, e:
module.fail_json(msg="Unable to delete key pair '%s' - %s" % (key, e))
else:
key = None
changed = True
else:
'''no match found, no changes required'''
# Ensure requested key is present
elif state == 'present':
if key:
# existing key found
if key_material:
# EC2's fingerprints are non-trivial to generate, so push this key
# to a temporary name and make ec2 calculate the fingerprint for us.
#
# http://blog.jbrowne.com/?p=23
# https://forums.aws.amazon.com/thread.jspa?messageID=352828
# find an unused name
test = 'empty'
while test:
randomchars = [random.choice(string.ascii_letters + string.digits) for x in range(0,10)]
tmpkeyname = "ansible-" + ''.join(randomchars)
test = ec2.get_key_pair(tmpkeyname)
# create tmp key
tmpkey = ec2.import_key_pair(tmpkeyname, key_material)
# get tmp key fingerprint
tmpfingerprint = tmpkey.fingerprint
# delete tmp key
tmpkey.delete()
if key.fingerprint != tmpfingerprint:
if not module.check_mode:
key.delete()
key = ec2.import_key_pair(name, key_material)
if wait:
start = time.time()
action_complete = False
while (time.time() - start) < wait_timeout:
if ec2.get_key_pair(name):
action_complete = True
break
time.sleep(1)
if not action_complete:
module.fail_json(msg="timed out while waiting for the key to be re-created")
changed = True
pass
# if the key doesn't exist, create it now
else:
'''no match found, create it'''
if not module.check_mode:
if key_material:
'''We are providing the key, need to import'''
key = ec2.import_key_pair(name, key_material)
else:
'''
No material provided, let AWS handle the key creation and
retrieve the private key
'''
key = ec2.create_key_pair(name)
if wait:
start = time.time()
action_complete = False
while (time.time() - start) < wait_timeout:
if ec2.get_key_pair(name):
action_complete = True
break
time.sleep(1)
if not action_complete:
module.fail_json(msg="timed out while waiting for the key to be created")
changed = True
if key:
data = {
'name': key.name,
'fingerprint': key.fingerprint
}
if key.material:
data.update({'private_key': key.material})
module.exit_json(changed=changed, key=data)
else:
module.exit_json(changed=changed, key=None)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
main()
| gpl-3.0 | -3,351,605,290,186,760,700 | 3,850,756,241,919,674,000 | 29.905983 | 108 | 0.530973 | false |
davidgardenier/frbpoppy | tests/lognlogs/local.py | 1 | 1611 | """Check the log N log F slope of a local population."""
import numpy as np
import matplotlib.pyplot as plt
from frbpoppy import CosmicPopulation, Survey, SurveyPopulation
from frbpoppy.population import unpickle
from tests.convenience import plot_aa_style, rel_path
MAKE = True
if MAKE:
population = CosmicPopulation.simple(1e5, generate=True)
survey = Survey('perfect')
surv_pop = SurveyPopulation(population, survey)
surv_pop.name = 'lognlogflocal'
surv_pop.save()
else:
surv_pop = unpickle('lognlogflocal')
# Get parameter
parms = surv_pop.frbs.fluence
min_p = min(parms)
max_p = max(parms)
# Bin up
min_f = np.log10(min(parms))
max_f = np.log10(max(parms))
log_bins = np.logspace(min_f, max_f, 50)
hist, edges = np.histogram(parms, bins=log_bins)
n_gt_s = np.cumsum(hist[::-1])[::-1]
# Calculate alpha
alpha, alpha_err, norm = surv_pop.frbs.calc_logn_logs(parameter='fluence',
min_p=min_p,
max_p=max_p)
print(alpha, alpha_err, norm)
xs = 10**((np.log10(edges[:-1]) + np.log10(edges[1:])) / 2)
xs = xs[xs >= min_p]
xs = xs[xs <= max_p]
ys = [norm*x**(alpha) for x in xs]
plot_aa_style()
fig = plt.figure()
ax = fig.add_subplot(111)
plt.step(edges[:-1], n_gt_s, where='post')
plt.plot(xs, ys, linestyle='--',
label=rf'$\alpha$ = {alpha:.3} $\pm$ {round(abs(alpha_err), 2)}')
plt.xlabel('Fluence (Jy ms)')
plt.ylabel(r'N(${>}Fluence$)')
plt.xscale('log')
plt.yscale('log')
plt.legend()
plt.tight_layout()
plt.savefig(rel_path('plots/logn_logf_local.pdf'))
| mit | -601,291,705,964,014,100 | 6,451,380,474,483,181,000 | 26.305085 | 74 | 0.628181 | false |
Baumelbi/IntroPython2016 | students/crobison/session04/file_lab.py | 3 | 1359 | # Charles Robison
# 2016.10.21
# File Lab
#!/usr/bin/env python
import os
cwd = os.getcwd()
# write a program which prints the full path to all files
# in the current directory, one per line
for item in os.listdir(cwd):
print(cwd + "/" + item)
# write a program which copies a file from a source, to a
# destination (without using shutil, or the OS copy command)
file = open('file_lab01.txt', 'r')
file_text = file.read()
file_new = open('file_lab02.txt', 'w')
file_new.write(file_text)
file.close()
file_new.close()
# advanced: make it work for any size file: i.e. don’t read
# the entire contents of the file into memory at once.
with open('file_lab01.txt','r') as r, open('file_lab02.txt', 'w') as w:
for line in r:
w.write(line)
r.close()
w.close()
# You will find the list I generated in the first class of all
# the students in the class, and what programming languages they
# have used in the past.
# Write a little script that reads that file, and generates a list
# of all the languages that have been used.
languages = []
with open('students_test.txt', 'r') as infile:
for line in infile:
if not ':' in line:
continue
for student, language in line.split(':'):
languages.append(language)
print(languages)
# Getting value error:
# ValueError: too many values to unpack (expected 2)
| unlicense | -4,087,238,899,123,598,000 | -4,430,138,330,147,929,000 | 27.87234 | 71 | 0.680914 | false |
flwh/KK_mt6589_iq451 | prebuilts/python/linux-x86/2.7.5/lib/python2.7/lib2to3/btm_utils.py | 374 | 10011 | "Utility functions used by the btm_matcher module"
from . import pytree
from .pgen2 import grammar, token
from .pygram import pattern_symbols, python_symbols
syms = pattern_symbols
pysyms = python_symbols
tokens = grammar.opmap
token_labels = token
TYPE_ANY = -1
TYPE_ALTERNATIVES = -2
TYPE_GROUP = -3
class MinNode(object):
"""This class serves as an intermediate representation of the
pattern tree during the conversion to sets of leaf-to-root
subpatterns"""
def __init__(self, type=None, name=None):
self.type = type
self.name = name
self.children = []
self.leaf = False
self.parent = None
self.alternatives = []
self.group = []
def __repr__(self):
return str(self.type) + ' ' + str(self.name)
def leaf_to_root(self):
"""Internal method. Returns a characteristic path of the
pattern tree. This method must be run for all leaves until the
linear subpatterns are merged into a single"""
node = self
subp = []
while node:
if node.type == TYPE_ALTERNATIVES:
node.alternatives.append(subp)
if len(node.alternatives) == len(node.children):
#last alternative
subp = [tuple(node.alternatives)]
node.alternatives = []
node = node.parent
continue
else:
node = node.parent
subp = None
break
if node.type == TYPE_GROUP:
node.group.append(subp)
#probably should check the number of leaves
if len(node.group) == len(node.children):
subp = get_characteristic_subpattern(node.group)
node.group = []
node = node.parent
continue
else:
node = node.parent
subp = None
break
if node.type == token_labels.NAME and node.name:
#in case of type=name, use the name instead
subp.append(node.name)
else:
subp.append(node.type)
node = node.parent
return subp
def get_linear_subpattern(self):
"""Drives the leaf_to_root method. The reason that
leaf_to_root must be run multiple times is because we need to
reject 'group' matches; for example the alternative form
(a | b c) creates a group [b c] that needs to be matched. Since
matching multiple linear patterns overcomes the automaton's
capabilities, leaf_to_root merges each group into a single
choice based on 'characteristic'ity,
i.e. (a|b c) -> (a|b) if b more characteristic than c
Returns: The most 'characteristic'(as defined by
get_characteristic_subpattern) path for the compiled pattern
tree.
"""
for l in self.leaves():
subp = l.leaf_to_root()
if subp:
return subp
def leaves(self):
"Generator that returns the leaves of the tree"
for child in self.children:
for x in child.leaves():
yield x
if not self.children:
yield self
def reduce_tree(node, parent=None):
"""
Internal function. Reduces a compiled pattern tree to an
intermediate representation suitable for feeding the
automaton. This also trims off any optional pattern elements(like
[a], a*).
"""
new_node = None
#switch on the node type
if node.type == syms.Matcher:
#skip
node = node.children[0]
if node.type == syms.Alternatives :
#2 cases
if len(node.children) <= 2:
#just a single 'Alternative', skip this node
new_node = reduce_tree(node.children[0], parent)
else:
#real alternatives
new_node = MinNode(type=TYPE_ALTERNATIVES)
#skip odd children('|' tokens)
for child in node.children:
if node.children.index(child)%2:
continue
reduced = reduce_tree(child, new_node)
if reduced is not None:
new_node.children.append(reduced)
elif node.type == syms.Alternative:
if len(node.children) > 1:
new_node = MinNode(type=TYPE_GROUP)
for child in node.children:
reduced = reduce_tree(child, new_node)
if reduced:
new_node.children.append(reduced)
if not new_node.children:
# delete the group if all of the children were reduced to None
new_node = None
else:
new_node = reduce_tree(node.children[0], parent)
elif node.type == syms.Unit:
if (isinstance(node.children[0], pytree.Leaf) and
node.children[0].value == '('):
#skip parentheses
return reduce_tree(node.children[1], parent)
if ((isinstance(node.children[0], pytree.Leaf) and
node.children[0].value == '[')
or
(len(node.children)>1 and
hasattr(node.children[1], "value") and
node.children[1].value == '[')):
#skip whole unit if its optional
return None
leaf = True
details_node = None
alternatives_node = None
has_repeater = False
repeater_node = None
has_variable_name = False
for child in node.children:
if child.type == syms.Details:
leaf = False
details_node = child
elif child.type == syms.Repeater:
has_repeater = True
repeater_node = child
elif child.type == syms.Alternatives:
alternatives_node = child
if hasattr(child, 'value') and child.value == '=': # variable name
has_variable_name = True
#skip variable name
if has_variable_name:
#skip variable name, '='
name_leaf = node.children[2]
if hasattr(name_leaf, 'value') and name_leaf.value == '(':
# skip parenthesis
name_leaf = node.children[3]
else:
name_leaf = node.children[0]
#set node type
if name_leaf.type == token_labels.NAME:
#(python) non-name or wildcard
if name_leaf.value == 'any':
new_node = MinNode(type=TYPE_ANY)
else:
if hasattr(token_labels, name_leaf.value):
new_node = MinNode(type=getattr(token_labels, name_leaf.value))
else:
new_node = MinNode(type=getattr(pysyms, name_leaf.value))
elif name_leaf.type == token_labels.STRING:
#(python) name or character; remove the apostrophes from
#the string value
name = name_leaf.value.strip("'")
if name in tokens:
new_node = MinNode(type=tokens[name])
else:
new_node = MinNode(type=token_labels.NAME, name=name)
elif name_leaf.type == syms.Alternatives:
new_node = reduce_tree(alternatives_node, parent)
#handle repeaters
if has_repeater:
if repeater_node.children[0].value == '*':
#reduce to None
new_node = None
elif repeater_node.children[0].value == '+':
#reduce to a single occurence i.e. do nothing
pass
else:
#TODO: handle {min, max} repeaters
raise NotImplementedError
pass
#add children
if details_node and new_node is not None:
for child in details_node.children[1:-1]:
#skip '<', '>' markers
reduced = reduce_tree(child, new_node)
if reduced is not None:
new_node.children.append(reduced)
if new_node:
new_node.parent = parent
return new_node
def get_characteristic_subpattern(subpatterns):
"""Picks the most characteristic from a list of linear patterns
Current order used is:
names > common_names > common_chars
"""
if not isinstance(subpatterns, list):
return subpatterns
if len(subpatterns)==1:
return subpatterns[0]
# first pick out the ones containing variable names
subpatterns_with_names = []
subpatterns_with_common_names = []
common_names = ['in', 'for', 'if' , 'not', 'None']
subpatterns_with_common_chars = []
common_chars = "[]().,:"
for subpattern in subpatterns:
if any(rec_test(subpattern, lambda x: type(x) is str)):
if any(rec_test(subpattern,
lambda x: isinstance(x, str) and x in common_chars)):
subpatterns_with_common_chars.append(subpattern)
elif any(rec_test(subpattern,
lambda x: isinstance(x, str) and x in common_names)):
subpatterns_with_common_names.append(subpattern)
else:
subpatterns_with_names.append(subpattern)
if subpatterns_with_names:
subpatterns = subpatterns_with_names
elif subpatterns_with_common_names:
subpatterns = subpatterns_with_common_names
elif subpatterns_with_common_chars:
subpatterns = subpatterns_with_common_chars
# of the remaining subpatterns pick out the longest one
return max(subpatterns, key=len)
def rec_test(sequence, test_func):
"""Tests test_func on all items of sequence and items of included
sub-iterables"""
for x in sequence:
if isinstance(x, (list, tuple)):
for y in rec_test(x, test_func):
yield y
else:
yield test_func(x)
| gpl-2.0 | -6,496,995,156,116,963,000 | 2,113,215,254,155,776,300 | 34.374558 | 83 | 0.551194 | false |
40223121/w17 | static/Brython3.1.1-20150328-091302/Lib/_thread.py | 740 | 4879 | """Drop-in replacement for the thread module.
Meant to be used as a brain-dead substitute so that threaded code does
not need to be rewritten for when the thread module is not present.
Suggested usage is::
try:
import _thread
except ImportError:
import _dummy_thread as _thread
"""
# Exports only things specified by thread documentation;
# skipping obsolete synonyms allocate(), start_new(), exit_thread().
__all__ = ['error', 'start_new_thread', 'exit', 'get_ident', 'allocate_lock',
'interrupt_main', 'LockType']
# A dummy value
TIMEOUT_MAX = 2**31
# NOTE: this module can be imported early in the extension building process,
# and so top level imports of other modules should be avoided. Instead, all
# imports are done when needed on a function-by-function basis. Since threads
# are disabled, the import lock should not be an issue anyway (??).
error = RuntimeError
def start_new_thread(function, args, kwargs={}):
"""Dummy implementation of _thread.start_new_thread().
Compatibility is maintained by making sure that ``args`` is a
tuple and ``kwargs`` is a dictionary. If an exception is raised
and it is SystemExit (which can be done by _thread.exit()) it is
caught and nothing is done; all other exceptions are printed out
by using traceback.print_exc().
If the executed function calls interrupt_main the KeyboardInterrupt will be
raised when the function returns.
"""
if type(args) != type(tuple()):
raise TypeError("2nd arg must be a tuple")
if type(kwargs) != type(dict()):
raise TypeError("3rd arg must be a dict")
global _main
_main = False
try:
function(*args, **kwargs)
except SystemExit:
pass
except:
import traceback
traceback.print_exc()
_main = True
global _interrupt
if _interrupt:
_interrupt = False
raise KeyboardInterrupt
def exit():
"""Dummy implementation of _thread.exit()."""
raise SystemExit
def get_ident():
"""Dummy implementation of _thread.get_ident().
Since this module should only be used when _threadmodule is not
available, it is safe to assume that the current process is the
only thread. Thus a constant can be safely returned.
"""
return -1
def allocate_lock():
"""Dummy implementation of _thread.allocate_lock()."""
return LockType()
def stack_size(size=None):
"""Dummy implementation of _thread.stack_size()."""
if size is not None:
raise error("setting thread stack size not supported")
return 0
class LockType(object):
"""Class implementing dummy implementation of _thread.LockType.
Compatibility is maintained by maintaining self.locked_status
which is a boolean that stores the state of the lock. Pickling of
the lock, though, should not be done since if the _thread module is
then used with an unpickled ``lock()`` from here problems could
occur from this class not having atomic methods.
"""
def __init__(self):
self.locked_status = False
def acquire(self, waitflag=None, timeout=-1):
"""Dummy implementation of acquire().
For blocking calls, self.locked_status is automatically set to
True and returned appropriately based on value of
``waitflag``. If it is non-blocking, then the value is
actually checked and not set if it is already acquired. This
is all done so that threading.Condition's assert statements
aren't triggered and throw a little fit.
"""
if waitflag is None or waitflag:
self.locked_status = True
return True
else:
if not self.locked_status:
self.locked_status = True
return True
else:
if timeout > 0:
import time
time.sleep(timeout)
return False
__enter__ = acquire
def __exit__(self, typ, val, tb):
self.release()
def release(self):
"""Release the dummy lock."""
# XXX Perhaps shouldn't actually bother to test? Could lead
# to problems for complex, threaded code.
if not self.locked_status:
raise error
self.locked_status = False
return True
def locked(self):
return self.locked_status
# Used to signal that interrupt_main was called in a "thread"
_interrupt = False
# True when not executing in a "thread"
_main = True
def interrupt_main():
"""Set _interrupt flag to True to have start_new_thread raise
KeyboardInterrupt upon exiting."""
if _main:
raise KeyboardInterrupt
else:
global _interrupt
_interrupt = True
# Brython-specific to avoid circular references between threading and _threading_local
class _local:
pass | gpl-3.0 | -3,501,668,755,631,358,500 | 3,845,049,746,755,233,000 | 30.483871 | 86 | 0.651363 | false |
VanirAOSP/external_chromium_org | tools/perf/metrics/statistics_unittest.py | 23 | 6400 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
import random
from metrics import statistics
def Relax(samples, iterations=10):
"""Lloyd relaxation in 1D.
Keeps the position of the first and last sample.
"""
for _ in xrange(0, iterations):
voronoi_boundaries = []
for i in xrange(1, len(samples)):
voronoi_boundaries.append((samples[i] + samples[i-1]) * 0.5)
relaxed_samples = []
relaxed_samples.append(samples[0])
for i in xrange(1, len(samples)-1):
relaxed_samples.append(
(voronoi_boundaries[i-1] + voronoi_boundaries[i]) * 0.5)
relaxed_samples.append(samples[-1])
samples = relaxed_samples
return samples
class StatisticsUnitTest(unittest.TestCase):
def testNormalizeSamples(self):
samples = []
normalized_samples, scale = statistics.NormalizeSamples(samples)
self.assertEquals(normalized_samples, samples)
self.assertEquals(scale, 1.0)
samples = [0.0, 0.0]
normalized_samples, scale = statistics.NormalizeSamples(samples)
self.assertEquals(normalized_samples, samples)
self.assertEquals(scale, 1.0)
samples = [0.0, 1.0/3.0, 2.0/3.0, 1.0]
normalized_samples, scale = statistics.NormalizeSamples(samples)
self.assertEquals(normalized_samples, [1.0/8.0, 3.0/8.0, 5.0/8.0, 7.0/8.0])
self.assertEquals(scale, 0.75)
samples = [1.0/8.0, 3.0/8.0, 5.0/8.0, 7.0/8.0]
normalized_samples, scale = statistics.NormalizeSamples(samples)
self.assertEquals(normalized_samples, samples)
self.assertEquals(scale, 1.0)
def testDiscrepancyRandom(self):
"""Tests NormalizeSamples and Discrepancy with random samples.
Generates 10 sets of 10 random samples, computes the discrepancy,
relaxes the samples using Llloyd's algorithm in 1D, and computes the
discrepancy of the relaxed samples. Discrepancy of the relaxed samples
must be less than or equal to the discrepancy of the original samples.
"""
random.seed(1234567)
for _ in xrange(0, 10):
samples = []
num_samples = 10
clock = 0.0
samples.append(clock)
for _ in xrange(1, num_samples):
clock += random.random()
samples.append(clock)
samples = statistics.NormalizeSamples(samples)[0]
d = statistics.Discrepancy(samples)
relaxed_samples = Relax(samples)
d_relaxed = statistics.Discrepancy(relaxed_samples)
self.assertTrue(d_relaxed <= d)
def testDiscrepancyAnalytic(self):
"""Computes discrepancy for sample sets with known statistics."""
interval_multiplier = 100000
samples = []
d = statistics.Discrepancy(samples, interval_multiplier)
self.assertEquals(d, 1.0)
samples = [0.5]
d = statistics.Discrepancy(samples, interval_multiplier)
self.assertEquals(round(d), 1.0)
samples = [0.0, 1.0]
d = statistics.Discrepancy(samples, interval_multiplier)
self.assertAlmostEquals(round(d, 2), 1.0)
samples = [0.5, 0.5, 0.5]
d = statistics.Discrepancy(samples, interval_multiplier)
self.assertAlmostEquals(d, 1.0)
samples = [1.0/8.0, 3.0/8.0, 5.0/8.0, 7.0/8.0]
d = statistics.Discrepancy(samples, interval_multiplier)
self.assertAlmostEquals(round(d, 2), 0.25)
samples = [0.0, 1.0/3.0, 2.0/3.0, 1.0]
d = statistics.Discrepancy(samples, interval_multiplier)
self.assertAlmostEquals(round(d, 2), 0.5)
samples = statistics.NormalizeSamples(samples)[0]
d = statistics.Discrepancy(samples, interval_multiplier)
self.assertAlmostEquals(round(d, 2), 0.25)
time_stamps_a = [0, 1, 2, 3, 5, 6]
time_stamps_b = [0, 1, 2, 3, 5, 7]
time_stamps_c = [0, 2, 3, 4]
time_stamps_d = [0, 2, 3, 4, 5]
d_abs_a = statistics.FrameDiscrepancy(time_stamps_a, True,
interval_multiplier)
d_abs_b = statistics.FrameDiscrepancy(time_stamps_b, True,
interval_multiplier)
d_abs_c = statistics.FrameDiscrepancy(time_stamps_c, True,
interval_multiplier)
d_abs_d = statistics.FrameDiscrepancy(time_stamps_d, True,
interval_multiplier)
d_rel_a = statistics.FrameDiscrepancy(time_stamps_a, False,
interval_multiplier)
d_rel_b = statistics.FrameDiscrepancy(time_stamps_b, False,
interval_multiplier)
d_rel_c = statistics.FrameDiscrepancy(time_stamps_c, False,
interval_multiplier)
d_rel_d = statistics.FrameDiscrepancy(time_stamps_d, False,
interval_multiplier)
self.assertTrue(d_abs_a < d_abs_b)
self.assertTrue(d_rel_a < d_rel_b)
self.assertTrue(d_rel_d < d_rel_c)
self.assertEquals(round(d_abs_d, 2), round(d_abs_c, 2))
def testPercentile(self):
# The 50th percentile is the median value.
self.assertEquals(3, statistics.Percentile([4, 5, 1, 3, 2], 50))
self.assertEquals(2.5, statistics.Percentile([5, 1, 3, 2], 50))
# When the list of values is empty, 0 is returned.
self.assertEquals(0, statistics.Percentile([], 50))
# When the given percentage is very low, the lowest value is given.
self.assertEquals(1, statistics.Percentile([2, 1, 5, 4, 3], 5))
# When the given percentage is very high, the highest value is given.
self.assertEquals(5, statistics.Percentile([5, 2, 4, 1, 3], 95))
# Linear interpolation between closest ranks is used. Using the example
# from <http://en.wikipedia.org/wiki/Percentile>:
self.assertEquals(27.5, statistics.Percentile([15, 20, 35, 40, 50], 40))
def testArithmeticMean(self):
# The ArithmeticMean function computes the simple average.
self.assertAlmostEquals(40/3.0, statistics.ArithmeticMean([10, 10, 20], 3))
self.assertAlmostEquals(15.0, statistics.ArithmeticMean([10, 20], 2))
# Both lists of values or single values can be given for either argument.
self.assertAlmostEquals(40/3.0, statistics.ArithmeticMean(40, [1, 1, 1]))
# If the 'count' is zero, then zero is returned.
self.assertEquals(0, statistics.ArithmeticMean(4.0, 0))
self.assertEquals(0, statistics.ArithmeticMean(4.0, []))
| bsd-3-clause | 5,734,171,458,428,654,000 | 6,781,525,138,139,979,000 | 39 | 79 | 0.653281 | false |
ocefpaf/cartopy | lib/cartopy/tests/crs/test_transverse_mercator.py | 1 | 4985 | # (C) British Crown Copyright 2013 - 2019, Met Office
#
# This file is part of cartopy.
#
# cartopy is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# cartopy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with cartopy. If not, see <https://www.gnu.org/licenses/>.
"""
Tests for the Transverse Mercator projection, including OSGB and OSNI.
"""
from __future__ import (absolute_import, division, print_function)
import numpy as np
import pytest
import cartopy.crs as ccrs
@pytest.mark.parametrize('approx', [True, False])
class TestTransverseMercator(object):
def setup_class(self):
self.point_a = (-3.474083, 50.727301)
self.point_b = (0.5, 50.5)
self.src_crs = ccrs.PlateCarree()
def test_default(self, approx):
proj = ccrs.TransverseMercator(approx=approx)
res = proj.transform_point(*self.point_a, src_crs=self.src_crs)
np.testing.assert_array_almost_equal(res,
(-245269.53181, 5627508.74355),
decimal=5)
res = proj.transform_point(*self.point_b, src_crs=self.src_crs)
np.testing.assert_array_almost_equal(res, (35474.63566645,
5596583.41949901))
def test_osgb_vals(self, approx):
proj = ccrs.TransverseMercator(central_longitude=-2,
central_latitude=49,
scale_factor=0.9996012717,
false_easting=400000,
false_northing=-100000,
globe=ccrs.Globe(datum='OSGB36',
ellipse='airy'),
approx=approx)
res = proj.transform_point(*self.point_a, src_crs=self.src_crs)
np.testing.assert_array_almost_equal(res, (295971.28668, 93064.27666),
decimal=5)
res = proj.transform_point(*self.point_b, src_crs=self.src_crs)
np.testing.assert_array_almost_equal(res, (577274.98380, 69740.49227),
decimal=5)
def test_nan(self, approx):
if not approx:
pytest.xfail('Proj does not return NaN correctly with etmerc.')
proj = ccrs.TransverseMercator(approx=approx)
res = proj.transform_point(0.0, float('nan'), src_crs=self.src_crs)
assert np.all(np.isnan(res))
res = proj.transform_point(float('nan'), 0.0, src_crs=self.src_crs)
assert np.all(np.isnan(res))
class TestOSGB(object):
def setup_class(self):
self.point_a = (-3.474083, 50.727301)
self.point_b = (0.5, 50.5)
self.src_crs = ccrs.PlateCarree()
self.nan = float('nan')
@pytest.mark.parametrize('approx', [True, False])
def test_default(self, approx):
proj = ccrs.OSGB(approx=approx)
res = proj.transform_point(*self.point_a, src_crs=self.src_crs)
np.testing.assert_array_almost_equal(res, (295971.28668, 93064.27666),
decimal=5)
res = proj.transform_point(*self.point_b, src_crs=self.src_crs)
np.testing.assert_array_almost_equal(res, (577274.98380, 69740.49227),
decimal=5)
def test_nan(self):
proj = ccrs.OSGB(approx=True)
res = proj.transform_point(0.0, float('nan'), src_crs=self.src_crs)
assert np.all(np.isnan(res))
res = proj.transform_point(float('nan'), 0.0, src_crs=self.src_crs)
assert np.all(np.isnan(res))
class TestOSNI(object):
def setup_class(self):
self.point_a = (-6.826286, 54.725116)
self.src_crs = ccrs.PlateCarree()
self.nan = float('nan')
@pytest.mark.parametrize('approx', [True, False])
def test_default(self, approx):
proj = ccrs.OSNI(approx=approx)
res = proj.transform_point(*self.point_a, src_crs=self.src_crs)
np.testing.assert_array_almost_equal(
res, (275614.26762651594, 386984.206429612),
decimal=0 if ccrs.PROJ4_VERSION < (5, 0, 0) else 6)
def test_nan(self):
proj = ccrs.OSNI(approx=True)
res = proj.transform_point(0.0, float('nan'), src_crs=self.src_crs)
assert np.all(np.isnan(res))
res = proj.transform_point(float('nan'), 0.0, src_crs=self.src_crs)
assert np.all(np.isnan(res))
| lgpl-3.0 | -8,031,038,299,114,109,000 | 4,724,787,581,374,706,000 | 41.606838 | 78 | 0.583952 | false |
devGregA/code | build/lib.linux-x86_64-2.7/scrapy/commands/crawl.py | 8 | 2610 | import os
from scrapy.command import ScrapyCommand
from scrapy.utils.conf import arglist_to_dict
from scrapy.exceptions import UsageError
class Command(ScrapyCommand):
requires_project = True
def syntax(self):
return "[options] <spider>"
def short_desc(self):
return "Run a spider"
def add_options(self, parser):
ScrapyCommand.add_options(self, parser)
parser.add_option("-a", dest="spargs", action="append", default=[], metavar="NAME=VALUE",
help="set spider argument (may be repeated)")
parser.add_option("-o", "--output", metavar="FILE",
help="dump scraped items into FILE (use - for stdout)")
parser.add_option("-t", "--output-format", metavar="FORMAT",
help="format to use for dumping items with -o")
def process_options(self, args, opts):
ScrapyCommand.process_options(self, args, opts)
try:
opts.spargs = arglist_to_dict(opts.spargs)
except ValueError:
raise UsageError("Invalid -a value, use -a NAME=VALUE", print_help=False)
if opts.output:
if opts.output == '-':
self.settings.set('FEED_URI', 'stdout:', priority='cmdline')
else:
self.settings.set('FEED_URI', opts.output, priority='cmdline')
valid_output_formats = (
list(self.settings.getdict('FEED_EXPORTERS').keys()) +
list(self.settings.getdict('FEED_EXPORTERS_BASE').keys())
)
if not opts.output_format:
opts.output_format = os.path.splitext(opts.output)[1].replace(".", "")
if opts.output_format not in valid_output_formats:
raise UsageError("Unrecognized output format '%s', set one"
" using the '-t' switch or as a file extension"
" from the supported list %s" % (opts.output_format,
tuple(valid_output_formats)))
self.settings.set('FEED_FORMAT', opts.output_format, priority='cmdline')
def run(self, args, opts):
if len(args) < 1:
raise UsageError()
elif len(args) > 1:
raise UsageError("running 'scrapy crawl' with more than one spider is no longer supported")
spname = args[0]
crawler = self.crawler_process.create_crawler()
spider = crawler.spiders.create(spname, **opts.spargs)
crawler.crawl(spider)
self.crawler_process.start()
| bsd-3-clause | 3,420,976,028,429,563,400 | -199,152,144,977,495,100 | 42.5 | 103 | 0.569349 | false |
edsu/tweepy | tests/test_cursors.py | 44 | 1861 | from tweepy import Cursor
from .config import create_auth
from .config import TweepyTestCase, username, use_replay, tape
import six
if six.PY3:
import unittest
else:
import unittest2 as unittest
class TweepyCursorTests(TweepyTestCase):
@tape.use_cassette('testidcursoritems.json')
def testidcursoritems(self):
items = list(Cursor(self.api.user_timeline).items(25))
self.assertEqual(len(items), 25)
@tape.use_cassette('testidcursorpages.json')
def testidcursorpages(self):
pages = list(Cursor(self.api.user_timeline).pages(5))
self.assertEqual(len(pages), 5)
@tape.use_cassette('testcursorcursoritems.json')
def testcursorcursoritems(self):
items = list(Cursor(self.api.friends_ids).items(10))
self.assertEqual(len(items), 10)
items = list(Cursor(self.api.followers_ids, username).items(10))
self.assertEqual(len(items), 10)
@tape.use_cassette('testcursorcursorpages.json')
def testcursorcursorpages(self):
pages = list(Cursor(self.api.friends_ids).pages(1))
self.assert_(len(pages) == 1)
pages = list(Cursor(self.api.followers_ids, username).pages(1))
self.assert_(len(pages) == 1)
@tape.use_cassette('testcursorsetstartcursor.json')
def testcursorsetstartcursor(self):
c = Cursor(self.api.friends_ids, cursor=123456)
self.assertEqual(c.iterator.next_cursor, 123456)
self.assertFalse('cursor' in c.iterator.kargs)
@tape.use_cassette('testcursornext.json')
def testcursornext(self):
"""
Test cursor.next() behavior, id being passed correctly.
Regression test for issue #518
"""
cursor = Cursor(self.api.user_timeline, id='twitter').items(5)
status = cursor.next()
self.assertEquals(status.user.screen_name, 'twitter')
| mit | 1,448,405,128,780,930,000 | 9,061,512,954,599,228,000 | 32.836364 | 72 | 0.675981 | false |
cytec/SickRage | lib/hachoir_parser/container/riff.py | 86 | 16938 | # -*- coding: UTF-8 -*-
"""
RIFF parser, able to parse:
* AVI video container
* WAV audio container
* CDA file
Documents:
- libavformat source code from ffmpeg library
http://ffmpeg.mplayerhq.hu/
- Video for Windows Programmer's Guide
http://www.opennet.ru/docs/formats/avi.txt
- What is an animated cursor?
http://www.gdgsoft.com/anituner/help/aniformat.htm
Authors:
* Aurélien Jacobs
* Mickaël KENIKSSI
* Victor Stinner
Changelog:
* 2007-03-30: support ACON (animated icons)
* 2006-08-08: merge AVI, WAV and CDA parsers into RIFF parser
* 2006-08-03: creation of CDA parser by Mickaël KENIKSSI
* 2005-06-21: creation of WAV parser by Victor Stinner
* 2005-06-08: creation of AVI parser by Victor Stinner and Aurélien Jacobs
Thanks to:
* Wojtek Kaniewski (wojtekka AT logonet.com.pl) for its CDA file
format information
"""
from hachoir_parser import Parser
from hachoir_core.field import (FieldSet, ParserError,
UInt8, UInt16, UInt32, Enum,
Bit, NullBits, NullBytes,
RawBytes, String, PaddingBytes,
SubFile)
from hachoir_core.tools import alignValue, humanDuration
from hachoir_core.endian import LITTLE_ENDIAN
from hachoir_core.text_handler import filesizeHandler, textHandler
from hachoir_parser.video.fourcc import audio_codec_name, video_fourcc_name
from hachoir_parser.image.ico import IcoFile
from datetime import timedelta
def parseText(self):
yield String(self, "text", self["size"].value,
strip=" \0", truncate="\0",
charset="ISO-8859-1")
def parseRawFormat(self, size):
yield RawBytes(self, "raw_format", size)
def parseVideoFormat(self, size):
yield UInt32(self, "video_size", "Video format: Size")
yield UInt32(self, "width", "Video format: Width")
yield UInt32(self, "height", "Video format: Height")
yield UInt16(self, "panes", "Video format: Panes")
yield UInt16(self, "depth", "Video format: Depth")
yield UInt32(self, "tag1", "Video format: Tag1")
yield UInt32(self, "img_size", "Video format: Image size")
yield UInt32(self, "xpels_meter", "Video format: XPelsPerMeter")
yield UInt32(self, "ypels_meter", "Video format: YPelsPerMeter")
yield UInt32(self, "clr_used", "Video format: ClrUsed")
yield UInt32(self, "clr_important", "Video format: ClrImportant")
def parseAudioFormat(self, size):
yield Enum(UInt16(self, "codec", "Audio format: Codec id"), audio_codec_name)
yield UInt16(self, "channel", "Audio format: Channels")
yield UInt32(self, "sample_rate", "Audio format: Sample rate")
yield UInt32(self, "bit_rate", "Audio format: Bit rate")
yield UInt16(self, "block_align", "Audio format: Block align")
if size >= 16:
yield UInt16(self, "bits_per_sample", "Audio format: Bits per sample")
if size >= 18:
yield UInt16(self, "ext_size", "Audio format: Size of extra information")
if size >= 28: # and self["a_channel"].value > 2
yield UInt16(self, "reserved", "Audio format: ")
yield UInt32(self, "channel_mask", "Audio format: channels placement bitmask")
yield UInt32(self, "subformat", "Audio format: Subformat id")
def parseAVIStreamFormat(self):
size = self["size"].value
strtype = self["../stream_hdr/stream_type"].value
TYPE_HANDLER = {
"vids": (parseVideoFormat, 40),
"auds": (parseAudioFormat, 16)
}
handler = parseRawFormat
if strtype in TYPE_HANDLER:
info = TYPE_HANDLER[strtype]
if info[1] <= size:
handler = info[0]
for field in handler(self, size):
yield field
def parseAVIStreamHeader(self):
if self["size"].value != 56:
raise ParserError("Invalid stream header size")
yield String(self, "stream_type", 4, "Stream type four character code", charset="ASCII")
field = String(self, "fourcc", 4, "Stream four character code", strip=" \0", charset="ASCII")
if self["stream_type"].value == "vids":
yield Enum(field, video_fourcc_name, lambda text: text.upper())
else:
yield field
yield UInt32(self, "flags", "Stream flags")
yield UInt16(self, "priority", "Stream priority")
yield String(self, "language", 2, "Stream language", charset="ASCII", strip="\0")
yield UInt32(self, "init_frames", "InitialFrames")
yield UInt32(self, "scale", "Time scale")
yield UInt32(self, "rate", "Divide by scale to give frame rate")
yield UInt32(self, "start", "Stream start time (unit: rate/scale)")
yield UInt32(self, "length", "Stream length (unit: rate/scale)")
yield UInt32(self, "buf_size", "Suggested buffer size")
yield UInt32(self, "quality", "Stream quality")
yield UInt32(self, "sample_size", "Size of samples")
yield UInt16(self, "left", "Destination rectangle (left)")
yield UInt16(self, "top", "Destination rectangle (top)")
yield UInt16(self, "right", "Destination rectangle (right)")
yield UInt16(self, "bottom", "Destination rectangle (bottom)")
class RedBook(FieldSet):
"""
RedBook offset parser, used in CD audio (.cda) file
"""
def createFields(self):
yield UInt8(self, "frame")
yield UInt8(self, "second")
yield UInt8(self, "minute")
yield PaddingBytes(self, "notused", 1)
def formatSerialNumber(field):
"""
Format an disc serial number.
Eg. 0x00085C48 => "0008-5C48"
"""
sn = field.value
return "%04X-%04X" % (sn >> 16, sn & 0xFFFF)
def parseCDDA(self):
"""
HSG address format: number of 1/75 second
HSG offset = (minute*60 + second)*75 + frame + 150 (from RB offset)
HSG length = (minute*60 + second)*75 + frame (from RB length)
"""
yield UInt16(self, "cda_version", "CD file version (currently 1)")
yield UInt16(self, "track_no", "Number of track")
yield textHandler(UInt32(self, "disc_serial", "Disc serial number"),
formatSerialNumber)
yield UInt32(self, "hsg_offset", "Track offset (HSG format)")
yield UInt32(self, "hsg_length", "Track length (HSG format)")
yield RedBook(self, "rb_offset", "Track offset (Red-book format)")
yield RedBook(self, "rb_length", "Track length (Red-book format)")
def parseWAVFormat(self):
size = self["size"].value
if size not in (16, 18):
self.warning("Format with size of %s bytes is not supported!" % size)
yield Enum(UInt16(self, "codec", "Audio codec"), audio_codec_name)
yield UInt16(self, "nb_channel", "Number of audio channel")
yield UInt32(self, "sample_per_sec", "Sample per second")
yield UInt32(self, "byte_per_sec", "Average byte per second")
yield UInt16(self, "block_align", "Block align")
yield UInt16(self, "bit_per_sample", "Bits per sample")
def parseWAVFact(self):
yield UInt32(self, "nb_sample", "Number of samples in audio stream")
def parseAviHeader(self):
yield UInt32(self, "microsec_per_frame", "Microsecond per frame")
yield UInt32(self, "max_byte_per_sec", "Maximum byte per second")
yield NullBytes(self, "reserved", 4)
# Flags
yield NullBits(self, "reserved[]", 4)
yield Bit(self, "has_index")
yield Bit(self, "must_use_index")
yield NullBits(self, "reserved[]", 2)
yield Bit(self, "is_interleaved")
yield NullBits(self, "reserved[]", 2)
yield Bit(self, "trust_cktype")
yield NullBits(self, "reserved[]", 4)
yield Bit(self, "was_capture_file")
yield Bit(self, "is_copyrighted")
yield NullBits(self, "reserved[]", 14)
yield UInt32(self, "total_frame", "Total number of frames in the video")
yield UInt32(self, "init_frame", "Initial frame (used in interleaved video)")
yield UInt32(self, "nb_stream", "Number of streams")
yield UInt32(self, "sug_buf_size", "Suggested buffer size")
yield UInt32(self, "width", "Width in pixel")
yield UInt32(self, "height", "Height in pixel")
yield UInt32(self, "scale")
yield UInt32(self, "rate")
yield UInt32(self, "start")
yield UInt32(self, "length")
def parseODML(self):
yield UInt32(self, "total_frame", "Real number of frame of OpenDML video")
padding = self["size"].value - 4
if 0 < padding:
yield NullBytes(self, "padding[]", padding)
class AVIIndexEntry(FieldSet):
size = 16*8
def createFields(self):
yield String(self, "tag", 4, "Tag", charset="ASCII")
yield UInt32(self, "flags")
yield UInt32(self, "start", "Offset from start of movie data")
yield UInt32(self, "length")
def parseIndex(self):
while not self.eof:
yield AVIIndexEntry(self, "index[]")
class Chunk(FieldSet):
TAG_INFO = {
# This dictionnary is edited by RiffFile.validate()
"LIST": ("list[]", None, "Sub-field list"),
"JUNK": ("junk[]", None, "Junk (padding)"),
# Metadata
"INAM": ("title", parseText, "Document title"),
"IART": ("artist", parseText, "Artist"),
"ICMT": ("comment", parseText, "Comment"),
"ICOP": ("copyright", parseText, "Copyright"),
"IENG": ("author", parseText, "Author"),
"ICRD": ("creation_date", parseText, "Creation date"),
"ISFT": ("producer", parseText, "Producer"),
"IDIT": ("datetime", parseText, "Date time"),
# TODO: Todo: see below
# "strn": Stream description
# TWOCC code, movie/field[]/tag.value[2:4]:
# "db": "Uncompressed video frame",
# "dc": "Compressed video frame",
# "wb": "Audio data",
# "pc": "Palette change"
}
subtag_info = {
"INFO": ("info", "File informations"),
"hdrl": ("headers", "Headers"),
"strl": ("stream[]", "Stream header list"),
"movi": ("movie", "Movie stream"),
"odml": ("odml", "ODML"),
}
def __init__(self, *args, **kw):
FieldSet.__init__(self, *args, **kw)
self._size = (8 + alignValue(self["size"].value, 2)) * 8
tag = self["tag"].value
if tag in self.TAG_INFO:
self.tag_info = self.TAG_INFO[tag]
if tag == "LIST":
subtag = self["subtag"].value
if subtag in self.subtag_info:
info = self.subtag_info[subtag]
self.tag_info = (info[0], None, info[1])
self._name = self.tag_info[0]
self._description = self.tag_info[2]
else:
self.tag_info = ("field[]", None, None)
def createFields(self):
yield String(self, "tag", 4, "Tag", charset="ASCII")
yield filesizeHandler(UInt32(self, "size", "Size"))
if not self["size"].value:
return
if self["tag"].value == "LIST":
yield String(self, "subtag", 4, "Sub-tag", charset="ASCII")
handler = self.tag_info[1]
while 8 < (self.size - self.current_size)/8:
field = self.__class__(self, "field[]")
yield field
if (field.size/8) % 2 != 0:
yield UInt8(self, "padding[]", "Padding")
else:
handler = self.tag_info[1]
if handler:
for field in handler(self):
yield field
else:
yield RawBytes(self, "raw_content", self["size"].value)
padding = self.seekBit(self._size)
if padding:
yield padding
def createDescription(self):
tag = self["tag"].display
return u"Chunk (tag %s)" % tag
class ChunkAVI(Chunk):
TAG_INFO = Chunk.TAG_INFO.copy()
TAG_INFO.update({
"strh": ("stream_hdr", parseAVIStreamHeader, "Stream header"),
"strf": ("stream_fmt", parseAVIStreamFormat, "Stream format"),
"avih": ("avi_hdr", parseAviHeader, "AVI header"),
"idx1": ("index", parseIndex, "Stream index"),
"dmlh": ("odml_hdr", parseODML, "ODML header"),
})
class ChunkCDDA(Chunk):
TAG_INFO = Chunk.TAG_INFO.copy()
TAG_INFO.update({
'fmt ': ("cdda", parseCDDA, "CD audio informations"),
})
class ChunkWAVE(Chunk):
TAG_INFO = Chunk.TAG_INFO.copy()
TAG_INFO.update({
'fmt ': ("format", parseWAVFormat, "Audio format"),
'fact': ("nb_sample", parseWAVFact, "Number of samples"),
'data': ("audio_data", None, "Audio stream data"),
})
def parseAnimationHeader(self):
yield UInt32(self, "hdr_size", "Size of header (36 bytes)")
if self["hdr_size"].value != 36:
self.warning("Animation header with unknown size (%s)" % self["size"].value)
yield UInt32(self, "nb_frame", "Number of unique Icons in this cursor")
yield UInt32(self, "nb_step", "Number of Blits before the animation cycles")
yield UInt32(self, "cx")
yield UInt32(self, "cy")
yield UInt32(self, "bit_count")
yield UInt32(self, "planes")
yield UInt32(self, "jiffie_rate", "Default Jiffies (1/60th of a second) if rate chunk not present")
yield Bit(self, "is_icon")
yield NullBits(self, "padding", 31)
def parseAnimationSequence(self):
while not self.eof:
yield UInt32(self, "icon[]")
def formatJiffie(field):
sec = float(field.value) / 60
return humanDuration(timedelta(seconds=sec))
def parseAnimationRate(self):
while not self.eof:
yield textHandler(UInt32(self, "rate[]"), formatJiffie)
def parseIcon(self):
yield SubFile(self, "icon_file", self["size"].value, parser_class=IcoFile)
class ChunkACON(Chunk):
TAG_INFO = Chunk.TAG_INFO.copy()
TAG_INFO.update({
'anih': ("anim_hdr", parseAnimationHeader, "Animation header"),
'seq ': ("anim_seq", parseAnimationSequence, "Animation sequence"),
'rate': ("anim_rate", parseAnimationRate, "Animation sequence"),
'icon': ("icon[]", parseIcon, "Icon"),
})
class RiffFile(Parser):
PARSER_TAGS = {
"id": "riff",
"category": "container",
"file_ext": ("avi", "cda", "wav", "ani"),
"min_size": 16*8,
"mime": (u"video/x-msvideo", u"audio/x-wav", u"audio/x-cda"),
# FIXME: Use regex "RIFF.{4}(WAVE|CDDA|AVI )"
"magic": (
("AVI LIST", 8*8),
("WAVEfmt ", 8*8),
("CDDAfmt ", 8*8),
("ACONanih", 8*8),
),
"description": "Microsoft RIFF container"
}
VALID_TYPES = {
"WAVE": (ChunkWAVE, u"audio/x-wav", u"Microsoft WAVE audio", ".wav"),
"CDDA": (ChunkCDDA, u"audio/x-cda", u"Microsoft Windows audio CD file (cda)", ".cda"),
"AVI ": (ChunkAVI, u"video/x-msvideo", u"Microsoft AVI video", ".avi"),
"ACON": (ChunkACON, u"image/x-ani", u"Microsoft Windows animated cursor", ".ani"),
}
endian = LITTLE_ENDIAN
def validate(self):
if self.stream.readBytes(0, 4) != "RIFF":
return "Wrong signature"
if self["type"].value not in self.VALID_TYPES:
return "Unknown RIFF content type"
return True
def createFields(self):
yield String(self, "signature", 4, "AVI header (RIFF)", charset="ASCII")
yield filesizeHandler(UInt32(self, "filesize", "File size"))
yield String(self, "type", 4, "Content type (\"AVI \", \"WAVE\", ...)", charset="ASCII")
# Choose chunk type depending on file type
try:
chunk_cls = self.VALID_TYPES[self["type"].value][0]
except KeyError:
chunk_cls = Chunk
# Parse all chunks up to filesize
while self.current_size < self["filesize"].value*8+8:
yield chunk_cls(self, "chunk[]")
if not self.eof:
yield RawBytes(self, "padding[]", (self.size-self.current_size)/8)
def createMimeType(self):
try:
return self.VALID_TYPES[self["type"].value][1]
except KeyError:
return None
def createDescription(self):
tag = self["type"].value
if tag == "AVI ":
desc = u"Microsoft AVI video"
if "headers/avi_hdr" in self:
header = self["headers/avi_hdr"]
desc += ": %ux%u pixels" % (header["width"].value, header["height"].value)
microsec = header["microsec_per_frame"].value
if microsec:
desc += ", %.1f fps" % (1000000.0 / microsec)
if "total_frame" in header and header["total_frame"].value:
delta = timedelta(seconds=float(header["total_frame"].value) * microsec)
desc += ", " + humanDuration(delta)
return desc
else:
try:
return self.VALID_TYPES[tag][2]
except KeyError:
return u"Microsoft RIFF container"
def createContentSize(self):
size = (self["filesize"].value + 8) * 8
return min(size, self.stream.size)
def createFilenameSuffix(self):
try:
return self.VALID_TYPES[self["type"].value][3]
except KeyError:
return ".riff"
| gpl-3.0 | 7,779,239,212,187,020,000 | 3,347,375,131,289,763,000 | 37.574032 | 103 | 0.604582 | false |
imply/chuu | build/android/pylib/device_stats_monitor.py | 31 | 4305 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utilities for iotop/top style profiling for android."""
import collections
import json
import os
import subprocess
import sys
import urllib
import constants
import io_stats_parser
class DeviceStatsMonitor(object):
"""Class for collecting device stats such as IO/CPU usage.
Args:
adb: Instance of AndroidComannds.
hz: Frequency at which to sample device stats.
"""
DEVICE_PATH = constants.TEST_EXECUTABLE_DIR + '/device_stats_monitor'
PROFILE_PATH = (constants.DEVICE_PERF_OUTPUT_DIR +
'/device_stats_monitor.profile')
RESULT_VIEWER_PATH = os.path.abspath(os.path.join(
os.path.dirname(os.path.realpath(__file__)), 'device_stats_monitor.html'))
def __init__(self, adb, hz, build_type):
self._adb = adb
host_path = os.path.abspath(os.path.join(
constants.DIR_SOURCE_ROOT, 'out', build_type, 'device_stats_monitor'))
self._adb.PushIfNeeded(host_path, DeviceStatsMonitor.DEVICE_PATH)
self._hz = hz
def Start(self):
"""Starts device stats monitor on the device."""
self._adb.SetProtectedFileContents(DeviceStatsMonitor.PROFILE_PATH, '')
self._process = subprocess.Popen(
['adb', 'shell', '%s --hz=%d %s' % (
DeviceStatsMonitor.DEVICE_PATH, self._hz,
DeviceStatsMonitor.PROFILE_PATH)])
def StopAndCollect(self, output_path):
"""Stops monitoring and saves results.
Args:
output_path: Path to save results.
Returns:
String of URL to load results in browser.
"""
assert self._process
self._adb.KillAll(DeviceStatsMonitor.DEVICE_PATH)
self._process.wait()
profile = self._adb.GetFileContents(DeviceStatsMonitor.PROFILE_PATH)
results = collections.defaultdict(list)
last_io_stats = None
last_cpu_stats = None
for line in profile:
if ' mmcblk0 ' in line:
stats = io_stats_parser.ParseIoStatsLine(line)
if last_io_stats:
results['sectors_read'].append(stats.num_sectors_read -
last_io_stats.num_sectors_read)
results['sectors_written'].append(stats.num_sectors_written -
last_io_stats.num_sectors_written)
last_io_stats = stats
elif line.startswith('cpu '):
stats = self._ParseCpuStatsLine(line)
if last_cpu_stats:
results['user'].append(stats.user - last_cpu_stats.user)
results['nice'].append(stats.nice - last_cpu_stats.nice)
results['system'].append(stats.system - last_cpu_stats.system)
results['idle'].append(stats.idle - last_cpu_stats.idle)
results['iowait'].append(stats.iowait - last_cpu_stats.iowait)
results['irq'].append(stats.irq - last_cpu_stats.irq)
results['softirq'].append(stats.softirq- last_cpu_stats.softirq)
last_cpu_stats = stats
units = {
'sectors_read': 'sectors',
'sectors_written': 'sectors',
'user': 'jiffies',
'nice': 'jiffies',
'system': 'jiffies',
'idle': 'jiffies',
'iowait': 'jiffies',
'irq': 'jiffies',
'softirq': 'jiffies',
}
with open(output_path, 'w') as f:
f.write('display(%d, %s, %s);' % (self._hz, json.dumps(results), units))
return 'file://%s?results=file://%s' % (
DeviceStatsMonitor.RESULT_VIEWER_PATH, urllib.quote(output_path))
@staticmethod
def _ParseCpuStatsLine(line):
"""Parses a line of cpu stats into a CpuStats named tuple."""
# Field definitions: http://www.linuxhowtos.org/System/procstat.htm
cpu_stats = collections.namedtuple('CpuStats',
['device',
'user',
'nice',
'system',
'idle',
'iowait',
'irq',
'softirq',
])
fields = line.split()
return cpu_stats._make([fields[0]] + [int(f) for f in fields[1:8]])
| bsd-3-clause | -910,832,750,237,919,900 | 2,427,504,071,661,594,000 | 36.112069 | 80 | 0.586063 | false |
anryko/ansible | lib/ansible/plugins/action/nxos_file_copy.py | 9 | 22728 | #
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import copy
import hashlib
import os
import re
import time
from ansible.errors import AnsibleError
from ansible.module_utils._text import to_text, to_bytes, to_native
from ansible.module_utils.common import validation
from ansible.module_utils.connection import Connection
from ansible.plugins.action import ActionBase
from ansible.utils.display import Display
from ansible.module_utils.compat.paramiko import paramiko
from ansible.module_utils import six
try:
from scp import SCPClient
HAS_SCP = True
except ImportError:
HAS_SCP = False
try:
import pexpect
HAS_PEXPECT = True
except ImportError:
HAS_PEXPECT = False
display = Display()
class ActionModule(ActionBase):
def process_playbook_values(self):
''' Get playbook values and perform input validation '''
argument_spec = dict(
vrf=dict(type='str', default='management'),
connect_ssh_port=dict(type='int', default=22),
file_system=dict(type='str', default='bootflash:'),
file_pull=dict(type='bool', default=False),
file_pull_timeout=dict(type='int', default=300),
file_pull_compact=dict(type='bool', default=False),
file_pull_kstack=dict(type='bool', default=False),
local_file=dict(type='path'),
local_file_directory=dict(type='path'),
remote_file=dict(type='path'),
remote_scp_server=dict(type='str'),
remote_scp_server_user=dict(type='str'),
remote_scp_server_password=dict(no_log=True),
)
playvals = {}
# Process key value pairs from playbook task
for key in argument_spec.keys():
playvals[key] = self._task.args.get(key, argument_spec[key].get('default'))
if playvals[key] is None:
continue
option_type = argument_spec[key].get('type', 'str')
try:
if option_type == 'str':
playvals[key] = validation.check_type_str(playvals[key])
elif option_type == 'int':
playvals[key] = validation.check_type_int(playvals[key])
elif option_type == 'bool':
playvals[key] = validation.check_type_bool(playvals[key])
elif option_type == 'path':
playvals[key] = validation.check_type_path(playvals[key])
else:
raise AnsibleError('Unrecognized type <{0}> for playbook parameter <{1}>'.format(option_type, key))
except (TypeError, ValueError) as e:
raise AnsibleError("argument %s is of type %s and we were unable to convert to %s: %s"
% (key, type(playvals[key]), option_type, to_native(e)))
# Validate playbook dependencies
if playvals['file_pull']:
if playvals.get('remote_file') is None:
raise AnsibleError('Playbook parameter <remote_file> required when <file_pull> is True')
if playvals.get('remote_scp_server') is None:
raise AnsibleError('Playbook parameter <remote_scp_server> required when <file_pull> is True')
if playvals['remote_scp_server'] or \
playvals['remote_scp_server_user']:
if None in (playvals['remote_scp_server'],
playvals['remote_scp_server_user']):
params = '<remote_scp_server>, <remote_scp_server_user>'
raise AnsibleError('Playbook parameters {0} must be set together'.format(params))
return playvals
def check_library_dependencies(self, file_pull):
if file_pull:
if not HAS_PEXPECT:
msg = 'library pexpect is required when file_pull is True but does not appear to be '
msg += 'installed. It can be installed using `pip install pexpect`'
raise AnsibleError(msg)
else:
if paramiko is None:
msg = 'library paramiko is required when file_pull is False but does not appear to be '
msg += 'installed. It can be installed using `pip install paramiko`'
raise AnsibleError(msg)
if not HAS_SCP:
msg = 'library scp is required when file_pull is False but does not appear to be '
msg += 'installed. It can be installed using `pip install scp`'
raise AnsibleError(msg)
def md5sum_check(self, dst, file_system):
command = 'show file {0}{1} md5sum'.format(file_system, dst)
remote_filehash = self.conn.exec_command(command)
remote_filehash = to_bytes(remote_filehash, errors='surrogate_or_strict')
local_file = self.playvals['local_file']
try:
with open(local_file, 'rb') as f:
filecontent = f.read()
except (OSError, IOError) as exc:
raise AnsibleError('Error reading the file: {0}'.format(to_text(exc)))
filecontent = to_bytes(filecontent, errors='surrogate_or_strict')
local_filehash = hashlib.md5(filecontent).hexdigest()
if local_filehash == remote_filehash:
return True
else:
return False
def remote_file_exists(self, remote_file, file_system):
command = 'dir {0}/{1}'.format(file_system, remote_file)
body = self.conn.exec_command(command)
if 'No such file' in body:
return False
else:
return self.md5sum_check(remote_file, file_system)
def verify_remote_file_exists(self, dst, file_system):
command = 'dir {0}/{1}'.format(file_system, dst)
body = self.conn.exec_command(command)
if 'No such file' in body:
return 0
return body.split()[0].strip()
def local_file_exists(self, file):
return os.path.isfile(file)
def get_flash_size(self, file_system):
command = 'dir {0}'.format(file_system)
body = self.conn.exec_command(command)
match = re.search(r'(\d+) bytes free', body)
if match:
bytes_free = match.group(1)
return int(bytes_free)
match = re.search(r'No such file or directory', body)
if match:
raise AnsibleError('Invalid nxos filesystem {0}'.format(file_system))
else:
raise AnsibleError('Unable to determine size of filesystem {0}'.format(file_system))
def enough_space(self, file, file_system):
flash_size = self.get_flash_size(file_system)
file_size = os.path.getsize(file)
if file_size > flash_size:
return False
return True
def transfer_file_to_device(self, remote_file):
timeout = self.socket_timeout
local_file = self.playvals['local_file']
file_system = self.playvals['file_system']
if not self.enough_space(local_file, file_system):
raise AnsibleError('Could not transfer file. Not enough space on device.')
# frp = full_remote_path, flp = full_local_path
frp = '{0}{1}'.format(file_system, remote_file)
flp = os.path.join(os.path.abspath(local_file))
try:
self.conn.copy_file(source=flp, destination=frp, proto='scp', timeout=timeout)
except Exception as exc:
self.results['failed'] = True
self.results['msg'] = ('Exception received : %s' % exc)
def file_push(self):
local_file = self.playvals['local_file']
remote_file = self.playvals['remote_file'] or os.path.basename(local_file)
file_system = self.playvals['file_system']
if not self.local_file_exists(local_file):
raise AnsibleError('Local file {0} not found'.format(local_file))
remote_file = remote_file or os.path.basename(local_file)
remote_exists = self.remote_file_exists(remote_file, file_system)
if not remote_exists:
self.results['changed'] = True
file_exists = False
else:
self.results['transfer_status'] = 'No Transfer: File already copied to remote device.'
file_exists = True
if not self.play_context.check_mode and not file_exists:
self.transfer_file_to_device(remote_file)
self.results['transfer_status'] = 'Sent: File copied to remote device.'
self.results['local_file'] = local_file
if remote_file is None:
remote_file = os.path.basename(local_file)
self.results['remote_file'] = remote_file
def copy_file_from_remote(self, local, local_file_directory, file_system):
self.results['failed'] = False
nxos_hostname = self.play_context.remote_addr
nxos_username = self.play_context.remote_user
nxos_password = self.play_context.password
port = self.playvals['connect_ssh_port']
# Build copy command components that will be used to initiate copy from the nxos device.
cmdroot = 'copy scp://'
ruser = self.playvals['remote_scp_server_user'] + '@'
rserver = self.playvals['remote_scp_server']
rfile = self.playvals['remote_file'] + ' '
vrf = ' vrf ' + self.playvals['vrf']
local_dir_root = '/'
if self.playvals['file_pull_compact']:
compact = ' compact '
else:
compact = ''
if self.playvals['file_pull_kstack']:
kstack = ' use-kstack '
else:
kstack = ''
def process_outcomes(session, timeout=None):
if timeout is None:
timeout = 10
outcome = {}
outcome['user_response_required'] = False
outcome['password_prompt_detected'] = False
outcome['existing_file_with_same_name'] = False
outcome['final_prompt_detected'] = False
outcome['copy_complete'] = False
outcome['expect_timeout'] = False
outcome['error'] = False
outcome['error_data'] = None
# Possible outcomes key:
# 0) - Are you sure you want to continue connecting (yes/no)
# 1) - Password: or @servers's password:
# 2) - Warning: There is already a file existing with this name. Do you want to overwrite (y/n)?[n]
# 3) - Timeout conditions
# 4) - No space on nxos device file_system
# 5) - Username/Password or file permission issues
# 6) - File does not exist on remote scp server
# 7) - invalid nxos command
# 8) - compact option not supported
# 9) - compaction attempt failed
# 10) - other failures like attempting to compact non image file
# 11) - failure to resolve hostname
# 12) - Too many authentication failures
# 13) - Copy to / from this server not permitted
# 14) - Copy completed without issues
# 15) - nxos_router_prompt#
# 16) - pexpect timeout
possible_outcomes = [r'sure you want to continue connecting \(yes/no\)\? ',
'(?i)Password: ',
'file existing with this name',
'timed out',
'(?i)No space.*#',
'(?i)Permission denied.*#',
'(?i)No such file.*#',
'.*Invalid command.*#',
'Compaction is not supported on this platform.*#',
'Compact of.*failed.*#',
'(?i)Failed.*#',
'(?i)Could not resolve hostname',
'(?i)Too many authentication failures',
r'(?i)Copying to\/from this server name is not permitted',
'(?i)Copy complete',
r'#\s',
pexpect.TIMEOUT]
index = session.expect(possible_outcomes, timeout=timeout)
# Each index maps to items in possible_outcomes
if index == 0:
outcome['user_response_required'] = True
return outcome
elif index == 1:
outcome['password_prompt_detected'] = True
return outcome
elif index == 2:
outcome['existing_file_with_same_name'] = True
return outcome
elif index in [3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]:
before = session.before.strip().replace(' \x08', '')
after = session.after.strip().replace(' \x08', '')
outcome['error'] = True
outcome['error_data'] = 'COMMAND {0} ERROR {1}'.format(before, after)
return outcome
elif index == 14:
outcome['copy_complete'] = True
return outcome
elif index == 15:
outcome['final_prompt_detected'] = True
return outcome
elif index == 16:
# The before property will contain all text up to the expected string pattern.
# The after string will contain the text that was matched by the expected pattern.
outcome['expect_timeout'] = True
outcome['error_data'] = 'Expect Timeout error occurred: BEFORE {0} AFTER {1}'.format(session.before, session.after)
return outcome
else:
outcome['error'] = True
outcome['error_data'] = 'Unrecognized error occurred: BEFORE {0} AFTER {1}'.format(session.before, session.after)
return outcome
return outcome
# Spawn pexpect connection to NX-OS device.
nxos_session = pexpect.spawn('ssh ' + nxos_username + '@' + nxos_hostname + ' -p' + str(port))
# There might be multiple user_response_required prompts or intermittent timeouts
# spawning the expect session so loop up to 24 times during the spawn process.
max_attempts = 24
for connect_attempt in range(max_attempts):
outcome = process_outcomes(nxos_session)
if outcome['user_response_required']:
nxos_session.sendline('yes')
continue
if outcome['password_prompt_detected']:
time.sleep(3)
nxos_session.sendline(nxos_password)
continue
if outcome['final_prompt_detected']:
break
if outcome['error'] or outcome['expect_timeout']:
# Error encountered, try to spawn expect session n more times up to max_attempts - 1
if connect_attempt < max_attempts:
outcome['error'] = False
outcome['expect_timeout'] = False
nxos_session.close()
nxos_session = pexpect.spawn('ssh ' + nxos_username + '@' + nxos_hostname + ' -p' + str(port))
continue
self.results['failed'] = True
outcome['error_data'] = re.sub(nxos_password, '', outcome['error_data'])
self.results['error_data'] = 'Failed to spawn expect session! ' + outcome['error_data']
nxos_session.close()
return
else:
# The before property will contain all text up to the expected string pattern.
# The after string will contain the text that was matched by the expected pattern.
msg = 'After {0} attempts, failed to spawn pexpect session to {1}'
msg += 'BEFORE: {2}, AFTER: {3}'
error_msg = msg.format(connect_attempt, nxos_hostname, nxos_session.before, nxos_session.after)
re.sub(nxos_password, '', error_msg)
nxos_session.close()
raise AnsibleError(error_msg)
# Create local file directory under NX-OS filesystem if
# local_file_directory playbook parameter is set.
if local_file_directory:
dir_array = local_file_directory.split('/')
for each in dir_array:
if each:
mkdir_cmd = 'mkdir ' + local_dir_root + each
nxos_session.sendline(mkdir_cmd)
outcome = process_outcomes(nxos_session)
if outcome['error'] or outcome['expect_timeout']:
self.results['mkdir_cmd'] = mkdir_cmd
self.results['failed'] = True
outcome['error_data'] = re.sub(nxos_password, '', outcome['error_data'])
self.results['error_data'] = outcome['error_data']
return
local_dir_root += each + '/'
# Initiate file copy
copy_cmd = (cmdroot + ruser + rserver + rfile + file_system + local_dir_root + local + compact + vrf + kstack)
self.results['copy_cmd'] = copy_cmd
nxos_session.sendline(copy_cmd)
for copy_attempt in range(6):
outcome = process_outcomes(nxos_session, self.playvals['file_pull_timeout'])
if outcome['user_response_required']:
nxos_session.sendline('yes')
continue
if outcome['password_prompt_detected']:
if self.playvals.get('remote_scp_server_password'):
nxos_session.sendline(self.playvals['remote_scp_server_password'])
else:
err_msg = 'Remote scp server {0} requires a password.'.format(rserver)
err_msg += ' Set the <remote_scp_server_password> playbook parameter or configure nxos device for passwordless scp'
raise AnsibleError(err_msg)
continue
if outcome['existing_file_with_same_name']:
nxos_session.sendline('y')
continue
if outcome['copy_complete']:
self.results['transfer_status'] = 'Received: File copied/pulled to nxos device from remote scp server.'
break
if outcome['error'] or outcome['expect_timeout']:
self.results['failed'] = True
outcome['error_data'] = re.sub(nxos_password, '', outcome['error_data'])
if self.playvals.get('remote_scp_server_password'):
outcome['error_data'] = re.sub(self.playvals['remote_scp_server_password'], '', outcome['error_data'])
self.results['error_data'] = outcome['error_data']
nxos_session.close()
return
else:
# The before property will contain all text up to the expected string pattern.
# The after string will contain the text that was matched by the expected pattern.
msg = 'After {0} attempts, failed to copy file to {1}'
msg += 'BEFORE: {2}, AFTER: {3}, CMD: {4}'
error_msg = msg.format(copy_attempt, nxos_hostname, nxos_session.before, nxos_session.before, copy_cmd)
re.sub(nxos_password, '', error_msg)
if self.playvals.get('remote_scp_server_password'):
re.sub(self.playvals['remote_scp_server_password'], '', error_msg)
nxos_session.close()
raise AnsibleError(error_msg)
nxos_session.close()
def file_pull(self):
local_file = self.playvals['local_file']
remote_file = self.playvals['remote_file']
file_system = self.playvals['file_system']
# Note: This is the local file directory on the remote nxos device.
local_file_dir = self.playvals['local_file_directory']
local_file = local_file or self.playvals['remote_file'].split('/')[-1]
if not self.play_context.check_mode:
self.copy_file_from_remote(local_file, local_file_dir, file_system)
if not self.results['failed']:
self.results['changed'] = True
self.results['remote_file'] = remote_file
if local_file_dir:
dir = local_file_dir
else:
dir = ''
self.results['local_file'] = file_system + dir + '/' + local_file
self.results['remote_scp_server'] = self.playvals['remote_scp_server']
# This is the main run method for the action plugin to copy files
def run(self, tmp=None, task_vars=None):
socket_path = None
self.play_context = copy.deepcopy(self._play_context)
self.results = super(ActionModule, self).run(task_vars=task_vars)
if self.play_context.connection.split('.')[-1] != 'network_cli':
# Plugin is supported only with network_cli
self.results['failed'] = True
self.results['msg'] = 'Connection type must be fully qualified name for network_cli connection type, got %s' % self.play_context.connection
return self.results
# Get playbook values
self.playvals = self.process_playbook_values()
file_pull = self.playvals['file_pull']
self.check_library_dependencies(file_pull)
if socket_path is None:
socket_path = self._connection.socket_path
self.conn = Connection(socket_path)
# Call get_capabilities() to start the connection to the device.
self.conn.get_capabilities()
self.socket_timeout = self.conn.get_option('persistent_command_timeout')
# This action plugin support two modes of operation.
# - file_pull is False - Push files from the ansible controller to nxos switch.
# - file_pull is True - Initiate copy from the device to pull files to the nxos switch.
self.results['transfer_status'] = 'No Transfer'
self.results['file_system'] = self.playvals['file_system']
if file_pull:
self.file_pull()
else:
self.file_push()
return self.results
| gpl-3.0 | -2,685,361,830,262,120,000 | 4,348,838,441,171,857,400 | 44.456 | 151 | 0.573258 | false |
lorin/umdinst | umdinst/setup/verifyperms.py | 1 | 3064 | #!/usr/bin/env python
import os
from stat import *
def parent(path):
"""Return the parent of the specified directory"""
# Remove the trailing slash, if any
if path.endswith('/'):
return os.path.dirname(path[:-1])
else:
return os.path.dirname(path)
def create_and_set_permissions(dirname,perms):
"""Create a directory and set permissions"""
try:
os.stat(dirname) # Will raise OSError if the file doesn't exist
if not os.path.isdir(dirname):
raise ValueError, "%s was specified as a directory, but it is not a directory" % mode
except OSError:
print "Directory %s does not exist, creating" % dirname
#os.mkdir(dirname)
os.makedirs(dirname)
# Set permissions on the directory directory
os.chmod(dirname,perms)
def check_parent_permissions(dirname,perms):
"""Check the permissions of the directory its parents to
make sure the appropriate permissions are there."""
pathname = dirname
while(pathname != ''):
st = os.stat(pathname)
dirperms = S_IMODE(st[ST_MODE])
if (dirperms & perms) < perms:
#raise ValueError, pathname + " does not have the right permissions."
print '''Error: %s does not have the right permission.
The installation mode specified requires the installation directory
to be readable and executable by other users. Change the permission setting,
or change the installation directory with the '--prefix' option.''' % pathname
import sys
sys.exit(-1)
pathname = parent(pathname)
def check(installdir):
# Sanity check, make sure that the directories up to this point have
# the appropriate permissions
#
permissions_are_ok = True
# User and group read execute
grp_rx_perms = S_IRWXU | S_IRGRP | S_IXGRP
# All user read execute
all_rx_perms = S_IRWXU | S_IRGRP | S_IXGRP | S_IROTH | S_IXOTH
# The parent directories may not actually all have user write access
# Some systems don't allow all users to access all parent directories,
# So we change it to checking for group accesses.
check_perms = grp_rx_perms & ~S_IWUSR
try:
check_parent_permissions(parent(installdir),check_perms)
except OSError, e:
# I'm going to be naive here and assume that errno's are the same across
# all systems. Hopefully it's a POSIX thing.
if e.errno==2: # No such file or directory
raise ValueError, "There is a problem with the value for 'installdir' in config.py: " + installdir
else:
permissions_are_ok = False
#create_and_set_permissions(installdir,all_rx_perms)
#check_parent_permissions(parent(logfiledir),check_perms)
#allperms = S_IRWXU | S_IRWXG | S_IRWXO
#create_and_set_permissions(logfiledir,allperms)
if not permissions_are_ok:
print "Warning: Cannot confirm that permissions are set correctly."
print "Please check to make sure that subjects have read access to umdinst/setup and umdinst/bin, and write accesss to umdinst/.data"
| bsd-3-clause | 6,261,257,376,952,887,000 | -4,458,307,654,632,435,000 | 38.282051 | 141 | 0.679178 | false |
hydraplatform/hydra-base | unittests/test_templates.py | 1 | 37663 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) Copyright 2013 to 2017 University of Manchester
#
# HydraPlatform is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HydraPlatform is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with HydraPlatform. If not, see <http://www.gnu.org/licenses/>
#
import server
from lxml import etree
from hydra_base import config
from hydra_base.lib.objects import JSONObject
from hydra_base import *
import logging
from hydra_base.exceptions import HydraError
import util
import datetime
log = logging.getLogger(__name__)
class TemplatesTest(server.SoapServerTest):
"""
Test for templates
"""
def set_template(self, template):
if template is None:
self.template = self.test_add_template()
else:
self.template = template
def get_template(self):
if hasattr(self, 'template'):
try:
get_template(self.template.id)
return self.template
except:
self.template = self.test_add_template()
else:
self.template = self.test_add_template()
return self.template
def test_add_xml(self):
template_file = open('template.xml', 'r')
file_contents = template_file.read()
new_tmpl = JSONObject(import_template_xml(file_contents))
assert new_tmpl is not None, "Adding template from XML was not successful!"
assert len(new_tmpl.templatetypes) == 2
for ta in new_tmpl.templatetypes[0].typeattrs:
assert ta.data_type == 'scalar'
assert new_tmpl.templatetypes[0].typeattrs[-1].properties is not None
assert eval(new_tmpl.templatetypes[0].typeattrs[-1].properties)['template_property'] == "Test property from template"
return new_tmpl
def test_get_xml(self):
xml_tmpl = self.test_add_xml()
db_template = get_template_as_xml(xml_tmpl.id)
assert db_template is not None
template_xsd_path = config.get('templates', 'template_xsd_path')
xmlschema_doc = etree.parse(template_xsd_path)
xmlschema = etree.XMLSchema(xmlschema_doc)
xml_tree = etree.fromstring(db_template)
xmlschema.assertValid(xml_tree)
def test_get_dict(self):
#Upload the xml file initally to avoid having to manage 2 template files
xml_tmpl = self.test_add_xml()
template_dict = get_template_as_dict(xml_tmpl.id)
#Error that there's already a template with this name.
self.assertRaises(HydraError, import_template_dict, template_dict, allow_update=False)
typename = template_dict['template']['templatetypes'][0]['name']
template_dict['template']['templatetypes'][0].name = typename + "_updated"
#Finds a template with this name and updates it to mirror this dict.
#This includes deleting types if they're not in this dict.
#Changing the name of a type has this effect, as a new template does not have
#any reference to existing types in Hydra.
updated_template = JSONObject(import_template_dict(template_dict))
assert updated_template['templatetypes'][-1]['name'] == typename + "_updated"
#Now put it back to the original name so other tests will work
template_dict['template']['templatetypes'][0].name = typename
updated_template = JSONObject(import_template_dict(template_dict))
assert updated_template['templatetypes'][-1]['name'] == typename
def test_add_template(self):
link_attr_1 = self.create_attr("link_attr_1", dimension='Pressure')
link_attr_2 = self.create_attr("link_attr_2", dimension='Speed')
node_attr_1 = self.create_attr("node_attr_1", dimension='Volume')
node_attr_2 = self.create_attr("node_attr_2", dimension='Speed')
net_attr_1 = self.create_attr("net_attr_2", dimension='Speed')
template = JSONObject()
template.name = 'Test template @ %s'%datetime.datetime.now()
layout = {}
layout['groups'] = '<groups>...</groups>'
template.layout = layout
template.templatetypes = []
#**********************
#type 1 #
#**********************
type1 = JSONObject()
type1.name = "Node type"
type1.alias = "Node type alias"
type1.resource_type = 'NODE'
type1.typeattrs = []
tattr_1 = JSONObject()
tattr_1.attr_id = node_attr_1.id
tattr_1.description = "Type attribute 1 description"
tattr_1.properties = {'test_property': "test property add type"}
tattr_1.data_restriction = {'LESSTHAN': 10, 'NUMPLACES': 1}
type1.typeattrs.append(tattr_1)
tattr_2 = JSONObject()
tattr_2.attr_id = node_attr_2.id
tattr_2.description = "Type attribute 2 description"
tattr_2.data_restriction = {'INCREASING': None}
type1.typeattrs.append(tattr_2)
template.templatetypes.append(type1)
#**********************
#type 2 #
#**********************
type2 = JSONObject()
type2.name = "Link type"
type2.alias = "Link type alias"
type2.resource_type = 'LINK'
type2.typeattrs = []
tattr_1 = JSONObject()
tattr_1.attr_id = link_attr_1.id
type2.typeattrs.append(tattr_1)
tattr_2 = JSONObject()
tattr_2.attr_id = link_attr_2.id
type2.typeattrs.append(tattr_2)
template.templatetypes.append(type2)
#**********************
#type 3 #
#**********************
type3 = JSONObject()
type3.name = "Network Type"
type3.alias = "Network Type alias"
type3.resource_type = 'NETWORK'
type3.typeattrs = []
tattr_3 = JSONObject()
tattr_3.attr_id = net_attr_1.id
tattr_3.data_restriction = {}
type3.typeattrs.append(tattr_3)
template.templatetypes.append(type3)
new_template_i = add_template(template)
#TODO: HACK to load the attr
for tt in new_template_i.templatetypes:
for ta in tt.typeattrs:
ta.attr
new_template_j = JSONObject(new_template_i)
assert new_template_j.name == template.name, "Names are not the same!"
assert str(new_template_j.layout) == str(template.layout), "Layouts are not the same!"
assert new_template_j.id is not None, "New Template has no ID!"
assert new_template_j.id > 0, "New Template has incorrect ID!"
assert len(new_template_j.templatetypes) == 3, "Resource types did not add correctly"
for t in new_template_j.templatetypes[0].typeattrs:
assert t.attr_id in (node_attr_1.id, node_attr_2.id);
"Node types were not added correctly!"
for t in new_template_j.templatetypes[1].typeattrs:
assert t.attr_id in (link_attr_1.id, link_attr_2.id);
"Node types were not added correctly!"
return new_template_j
def test_update_template(self):
attr_1 = self.create_attr("link_attr_1", dimension='Pressure')
attr_2 = self.create_attr("link_attr_2", dimension='Speed')
attr_3 = self.create_attr("node_attr_1", dimension='Volume')
template = JSONObject()
template.name = 'Test Template @ %s'%datetime.datetime.now()
template.templatetypes = []
type_1 = JSONObject()
type_1.name = "Node type 2"
type_1.alias = "Node type 2 alias"
type_1.resource_type = 'NODE'
type_1.typeattrs = []
type_2 = JSONObject()
type_2.name = "Link type 2"
type_2.alias = "Link type 2 alias"
type_2.resource_type = 'LINK'
type_2.typeattrs = []
tattr_1 = JSONObject()
tattr_1.attr_id = attr_1.id
tattr_1.unit = 'bar'
tattr_1.description = "typeattr description 1"
tattr_1.properties = {"test_property": "property value"}
type_1.typeattrs.append(tattr_1)
tattr_2 = JSONObject()
tattr_2.attr_id = attr_2.id
tattr_2.unit = 'mph'
tattr_2.description = "typeattr description 2"
type_2.typeattrs.append(tattr_2)
template.templatetypes.append(type_1)
template.templatetypes.append(type_2)
new_template_i = add_template(template)
new_template_j = JSONObject(new_template_i)
assert new_template_j.name == template.name, "Names are not the same!"
assert new_template_j.id is not None, "New Template has no ID!"
assert new_template_j.id > 0, "New Template has incorrect ID!"
assert len(new_template_j.templatetypes) == 2, "Resource types did not add correctly"
assert len(new_template_j.templatetypes[0].typeattrs) == 1, "Resource type attrs did not add correctly"
assert new_template_j.templatetypes[0].typeattrs[0].unit == 'bar'
#update the name of one of the types
new_template_j.templatetypes[0].name = "Test type 3"
updated_type_id = new_template_j.templatetypes[0].id
#add an template attr to one of the types
tattr_3 = JSONObject()
tattr_3.attr_id = attr_3.id
tattr_3.description = "updated typeattr description 1"
tattr_3.properties = {"test_property_of_added_type": "property value"}
new_template_j.templatetypes[0].typeattrs.append(tattr_3)
updated_template_i = update_template(new_template_j)
updated_template_j = JSONObject(updated_template_i)
assert updated_template_j.name == template.name, "Names are not the same!"
updated_type = None
for tmpltype in new_template_j.templatetypes:
if tmpltype.id == updated_type_id:
updated_type = tmpltype
break
assert updated_type.name == "Test type 3", "Resource types did not update correctly"
assert len(updated_type.typeattrs) == 2, "Resource type template attr did not update correctly"
#Test that when setting a unit on a type attr, it matches the dimension of its attr
#In this case, setting m^3(Volume) fails as the attr has a dimension of 'Pressure'
updated_template_j.templatetypes[0].typeattrs[0].unit = 'm^3'
self.assertRaises(HydraError, update_template, updated_template_j)
def test_delete_template(self):
network = self.create_network_with_data()
new_template = self.test_add_template()
retrieved_template_i = get_template(new_template.id)
assert retrieved_template_i is not None
retrieved_template_j = JSONObject(retrieved_template_i)
apply_template_to_network(retrieved_template_j.id, network.id)
updated_network = get_network(network.id, user_id=self.user_id)
assert len(updated_network.types) == 2
expected_net_type = None
for t in new_template.templatetypes:
if t.resource_type == 'NETWORK':
expected_net_type = t.id
network_type = updated_network.types[1].type_id
assert expected_net_type == network_type
delete_template(new_template.id)
self.assertRaises(HydraError, get_template, new_template.id)
network_deleted_templatetypes = get_network(network.id, user_id=self.user_id)
assert len(network_deleted_templatetypes.types) == 1
def test_add_type(self):
template = self.get_template()
attr_1 = self.create_attr("link_attr_1", dimension='Pressure')
attr_2 = self.create_attr("link_attr_2", dimension='Speed')
attr_3 = self.create_attr("node_attr_1", dimension='Volume')
templatetype = JSONObject()
templatetype.name = "Test type name @ %s"%(datetime.datetime.now())
templatetype.alias = "%s alias" % templatetype.name
templatetype.resource_type = 'LINK'
templatetype.template_id = template.id
templatetype.layout = {"color": "red", "shapefile": "blah.shp"}
templatetype.typeattrs = []
tattr_1 = JSONObject()
tattr_1.attr_id = attr_1.id
tattr_1.description = "added type description 1"
tattr_1.properties = {"add_type_test_property": "property value"}
templatetype.typeattrs.append(tattr_1)
tattr_2 = JSONObject()
tattr_2.attr_id = attr_2.id
tattr_1.description = "added type description 2"
templatetype.typeattrs.append(tattr_2)
tattr_3 = JSONObject()
tattr_3.attr_id = attr_3.id
templatetype.typeattrs.append(tattr_3)
new_type_i = add_templatetype(templatetype)
new_type_j = JSONObject(new_type_i)
assert new_type_j.name == templatetype.name, "Names are not the same!"
assert new_type_j.alias == templatetype.alias, "Aliases are not the same!"
assert json.loads(new_type_j.layout) == templatetype.layout, "Layouts are not the same!"
assert new_type_j.id is not None, "New type has no ID!"
assert new_type_j.id > 0, "New type has incorrect ID!"
assert len(new_type_j.typeattrs) == 3, "Resource type attrs did not add correctly"
return new_type_j
def test_update_type(self):
template = self.get_template()
attr_1 = self.create_attr("link_attr_1", dimension='Pressure')
attr_2 = self.create_attr("link_attr_2", dimension='Speed')
attr_3 = self.create_attr("node_attr_1", dimension='Volume')
templatetype = JSONObject()
templatetype.name = "Test type name @ %s" % (datetime.datetime.now())
templatetype.alias = templatetype.name + " alias"
templatetype.template_id = self.get_template().id
templatetype.resource_type = 'NODE'
templatetype.template_id = template.id
tattr_1 = JSONObject()
tattr_1.attr_id = attr_1.id
tattr_2 = JSONObject()
tattr_2.attr_id = attr_2.id
templatetype.typeattrs = [tattr_1, tattr_2]
new_type_i = add_templatetype(templatetype)
new_type_j = JSONObject(new_type_i)
assert new_type_j.name == templatetype.name, "Names are not the same!"
assert new_type_j.alias == templatetype.alias, "Aliases are not the same!"
assert new_type_j.id is not templatetype, "New type has no ID!"
assert new_type_j.id > 0, "New type has incorrect ID!"
assert len(new_type_j.typeattrs) == 2, "Resource type attrs did not add correctly"
new_type_j.name = "Updated type name @ %s"%(datetime.datetime.now())
new_type_j.alias = templatetype.name + " alias"
new_type_j.resource_type = 'NODE'
tattr_3 = JSONObject()
tattr_3.attr_id = attr_3.id
tattr_3.description = "Descripton of added typeattr"
tattr_3.properties = {"update_type_test_property": "property value"}
new_type_j.typeattrs.append(tattr_3)
new_type_j.typeattrs[0].description = "Updated typeattr description"
updated_type_i = update_templatetype(new_type_j)
updated_type_j = JSONObject(updated_type_i)
assert new_type_j.name == updated_type_j.name, "Names are not the same!"
assert new_type_j.alias == updated_type_j.alias, "Aliases are not the same!"
assert new_type_j.id == updated_type_j.id, "type ids to not match!"
assert new_type_j.id > 0, "New type has incorrect ID!"
assert new_type_j.typeattrs[0].description == "Updated typeattr description"
assert new_type_j.typeattrs[-1].properties['update_type_test_property'] == "property value"
assert len(updated_type_j.typeattrs) == 3, "Template type attrs did not update correctly"
def test_delete_type(self):
new_template = self.test_add_template()
retrieved_template = get_template(new_template.id)
assert retrieved_template is not None
templatetype = new_template.templatetypes[0]
delete_templatetype(templatetype.id)
updated_template = JSONObject(get_template(new_template.id))
for tmpltype in updated_template.templatetypes:
assert tmpltype.id != templatetype.id
def test_get_type(self):
new_type = self.get_template().templatetypes[0]
new_type = get_templatetype(new_type.id)
assert new_type is not None, "Resource type attrs not retrived by ID!"
def test_get_type_by_name(self):
new_type = self.get_template().templatetypes[0]
new_type = get_templatetype_by_name(new_type.template_id, new_type.name)
assert new_type is not None, "Resource type attrs not retrived by name!"
def test_add_typeattr(self):
attr_1 = self.create_attr("link_attr_1", dimension='Pressure')
attr_2 = self.create_attr("link_attr_2", dimension='Speed')
attr_3 = self.create_attr("node_attr_1", dimension='Volume')
templatetype = JSONObject()
templatetype.name = "Test type name @ %s"%(datetime.datetime.now())
templatetype.alias = templatetype.name + " alias"
templatetype.template_id = self.get_template().id
templatetype.resource_type = 'NODE'
tattr_1 = JSONObject()
tattr_1.attr_id = attr_1.id
tattr_2 = JSONObject()
tattr_2.attr_id = attr_2.id
tattr_2.description = "Description of typeattr from test_add_typeattr"
tattr_2.properties = {"test_property":"property value"}
templatetype.typeattrs = [tattr_1, tattr_2]
new_type = JSONObject(add_templatetype(templatetype))
tattr_3 = JSONObject()
tattr_3.attr_id = attr_3.id
tattr_3.type_id = new_type.id
tattr_3.description = "Description of additional typeattr from test_add_typeattr"
tattr_3.properties = {"add_typeattr_test_property": "property value"}
log.info("Adding Test Type attribute")
add_typeattr(tattr_3)
updated_type = JSONObject(get_templatetype(new_type.id,user_id=self.user_id))
assert len(updated_type.typeattrs) == 3, "Type attr did not add correctly"
assert eval(updated_type.typeattrs[-1].properties)['add_typeattr_test_property'] == "property value"
def test_delete_typeattr(self):
template = self.test_add_template()
attr_1 = self.create_attr("link_attr_1", dimension='Pressure')
attr_2 = self.create_attr("link_attr_2", dimension='Speed')
templatetype = JSONObject()
templatetype.name = "Test type name @ %s"%(datetime.datetime.now())
templatetype.alias = templatetype.name + " alias"
templatetype.resource_type = 'NODE'
templatetype.template_id = template.id
tattr_1 = JSONObject()
tattr_1.attr_id = attr_1.id
tattr_2 = JSONObject()
tattr_2.attr_id = attr_2.id
templatetype.typeattrs = [tattr_1, tattr_2]
new_type = JSONObject(add_templatetype(templatetype))
tattr_2.type_id = new_type.id
delete_typeattr(tattr_2)
updated_type = JSONObject(get_templatetype(new_type.id))
log.info(len(updated_type.typeattrs))
assert len(updated_type.typeattrs) == 1, "Resource type attr did not add correctly"
def test_get_templates(self):
self.get_template()
templates = [JSONObject(t) for t in get_templates()]
for t in templates:
for typ in t.templatetypes:
assert typ.resource_type is not None
assert len(templates) > 0, "Templates were not retrieved!"
def test_get_template(self):
template = self.get_template()
new_template = JSONObject(get_template(template.id))
assert new_template.name == template.name, "Names are not the same! Retrieval by ID did not work!"
def test_get_template_by_name_good(self):
template = self.get_template()
new_template = JSONObject(get_template_by_name(template.name))
assert new_template.name == template.name, "Names are not the same! Retrieval by name did not work!"
def test_get_template_by_name_bad(self):
new_template = get_template_by_name("Not a template!")
assert new_template is None
def test_add_resource_type(self):
template = self.get_template()
types = template.templatetypes
type_name = types[0].name
type_id = types[0].id
project = self.create_project('test')
network = JSONObject()
nnodes = 3
nlinks = 2
x = [0, 0, 1]
y = [0, 1, 0]
network.nodes = []
network.links = []
for i in range(nnodes):
node = JSONObject()
node.id = i * -1
node.name = 'Node ' + str(i)
node.description = 'Test node ' + str(i)
node.x = x[i]
node.y = y[i]
type_summary = JSONObject()
type_summary.template_id = template.id
type_summary.template_name = template.name
type_summary.id = type_id
type_summary.name = type_name
node.types = [type_summary]
network.nodes.append(node)
for i in range(nlinks):
link = JSONObject()
link.id = i * -1
link.name = 'Link ' + str(i)
link.description = 'Test link ' + str(i)
link.node_1_id = network.nodes[i].id
link.node_2_id = network.nodes[i + 1].id
network.links.append(link)
network.project_id = project.id
network.name = 'Test @ %s'%(datetime.datetime.now())
network.description = 'A network for SOAP unit tests.'
net_summary = add_network(network, user_id=self.user_id)
new_net = get_network(net_summary.id, user_id=self.user_id)
for node in new_net.nodes:
assert node.types is not None and node.types[0].type_name == "Node type"; "type was not added correctly!"
def test_find_matching_resource_types(self):
network = self.create_network_with_data()
node_to_check = network.nodes[0]
matching_types_i = get_matching_resource_types('NODE', node_to_check.id)
matching_types_j = [JSONObject(i) for i in matching_types_i]
assert len(matching_types_j) > 0, "No types returned!"
matching_type_ids = []
for tmpltype in matching_types_j:
matching_type_ids.append(tmpltype.id)
assert node_to_check.types[0].id in matching_type_ids, "TemplateType ID not found in matching types!"
def test_assign_type_to_resource(self):
network = self.create_network_with_data()
template = self.get_template()
templatetype = template.templatetypes[0]
node_to_assign = network.nodes[0]
result = JSONObject(assign_type_to_resource(templatetype.id, 'NODE', node_to_assign.id))
node = get_node(node_to_assign.id)
assert node.types is not None, \
'Assigning type did not work properly.'
assert str(result.id) in [str(x.type_id) for x in node.types]
def test_remove_type_from_resource(self):
network = self.create_network_with_data()
template = self.get_template()
templatetype = template.templatetypes[0]
node_to_assign = network.nodes[0]
result1_i = assign_type_to_resource(templatetype.id, 'NODE', node_to_assign.id)
result1_j = JSONObject(result1_i)
node_j = JSONObject(get_node(node_to_assign.id))
assert node_j.types is not None, \
'Assigning type did not work properly.'
assert str(result1_j.id) in [str(x.type_id) for x in node_j.types]
remove_result = remove_type_from_resource(templatetype.id,
'NODE',
node_to_assign.id)
print remove_result
assert remove_result == 'OK'
updated_node_j = JSONObject(get_node(node_to_assign.id))
assert updated_node_j.types is None or str(result1_j.id) not in [str(x.type_id) for x in updated_node_j.types]
def test_create_template_from_network(self):
network = self.create_network_with_data()
net_template = get_network_as_xml_template(network.id)
assert net_template is not None
template_xsd_path = config.get('templates', 'template_xsd_path')
xmlschema_doc = etree.parse(template_xsd_path)
xmlschema = etree.XMLSchema(xmlschema_doc)
xml_tree = etree.fromstring(net_template)
xmlschema.assertValid(xml_tree)
def test_apply_template_to_network(self):
net_to_update = self.create_network_with_data()
template = self.get_template()
#Test the links as it's easier
empty_links = []
for l in net_to_update.links:
if l.types is None:
empty_links.append(l.id)
#Add the resource attributes to the links, so we can guarantee
#that these links will match those in the template.
for t in template.templatetypes:
if t.resource_type == 'LINK':
link_type = t
break
link_ra_1 = JSONObject(dict(
attr_id=link_type.typeattrs[0].attr_id
))
link_ra_2 = JSONObject(dict(
attr_id=link_type.typeattrs[1].attr_id
))
for link in net_to_update.links:
if link.types is None:
link.attributes.append(link_ra_1)
link.attributes.append(link_ra_2)
network = update_network(net_to_update, user_id=self.user_id)
for n in network.nodes:
assert len(n.types) == 1
assert n.types[0].name == 'Default Node'
apply_template_to_network(template.id, network.id)
network = get_network(network.id, user_id=self.user_id)
assert len(network.types) == 2
assert network.types[1].type_name == 'Network Type'
for l in network.links:
if l.id in empty_links:
assert l.types is not None
assert len(l.types) == 1
assert l.types[0].type_name == 'Link type'
#THe assignment of the template hasn't affected the nodes
#as they do not have the appropriate attributes.
for n in network.nodes:
assert len(n.types) == 1
assert n.types[0].template_name == 'Default Template'
def test_apply_template_to_network_twice(self):
net_to_update = self.create_network_with_data()
template = self.get_template()
#Test the links as it's easier
empty_links = []
for l in net_to_update.links:
if l.types is None:
empty_links.append(l.id)
#Add the resource attributes to the links, so we can guarantee
#that these links will match those in the template.
for t in template.templatetypes:
if t.resource_type == 'LINK':
link_type = t
break
link_ra_1 = JSONObject(dict(
attr_id=link_type.typeattrs[0].attr_id
))
link_ra_2 = JSONObject(dict(
attr_id=link_type.typeattrs[1].attr_id
))
for link in net_to_update.links:
if link.types is None:
link.attributes.append(link_ra_1)
link.attributes.append(link_ra_2)
network = update_network(net_to_update, user_id=self.user_id)
for n in network.nodes:
assert len(n.types) == 1
assert n.types[0].name == 'Default Node'
apply_template_to_network(template.id, network.id)
apply_template_to_network(template.id, network.id)
network = get_network(network.id, user_id=self.user_id)
assert len(network.types) == 2
assert network.types[1].name == 'Network Type'
for l in network.links:
if l.id in empty_links:
assert l.types is not None
assert len(n.types) == 1
assert l.types[0].name == 'Link type'
for n in network.nodes:
assert len(n.types) == 1
assert n.types[0].template_name == 'Default Template'
def test_remove_template_from_network(self):
network = self.create_network_with_data()
template_id = network.types[0].template_id
#Test the links as it's easier
empty_links = []
for l in network.links:
if l.types is None or len(l.types) == 0:
empty_links.append(l.id)
for n in network.nodes:
assert len(n.types) == 1
assert n.types[0].type_name == "Default Node"
apply_template_to_network(template_id, network.id)
remove_template_from_network(network.id, template_id, 'N')
network_2 = get_network(network.id, user_id=self.user_id)
assert len(network_2.types) == 0
for l in network_2.links:
if l.id in empty_links:
assert len(l.types) == 0
for n in network_2.nodes:
assert len(n.types) == 0
def test_remove_template_and_attributes_from_network(self):
network = self.create_network_with_data()
template = self.get_template()
#Test the links as it's easier
empty_links = []
for l in network.links:
if l.types is None:
empty_links.append(l.id)
for n in network.nodes:
assert len(n.types) == 1
assert n.types[0].type_name == 'Default Node'
network_1 = get_network(network.id, user_id=self.user_id)
assert len(network_1.types) == 1
apply_template_to_network(template.id, network.id)
network_3 = get_network(network.id, user_id=self.user_id)
assert len(network_3.types) == 2
remove_template_from_network(network.id, template.id, 'Y')
network_2 = get_network(network.id, user_id=self.user_id)
assert len(network_2.types) == 1
link_attrs = []
for tt in template.templatetypes:
if tt.resource_type != 'LINK':
continue
for ta in tt.typeattrs:
attr_id = ta.attr_id
if attr_id not in link_attrs:
link_attrs.append(attr_id)
link_attrs.append(ta.attr_id)
for l in network_2.links:
if l.id in empty_links:
assert l.types is None
if l.attributes is not None:
for a in l.attributes:
assert a.attr_id not in link_attrs
for tt in template.templatetypes:
if tt.resource_type != 'NODE':
continue
for ta in tt.typeattrs:
attr_id = ta.attr_id
if attr_id not in link_attrs:
link_attrs.append(attr_id)
link_attrs.append(ta.attr_id)
for n in network_2.nodes:
assert len(n.types) == 1
if n.attributes is not None:
for a in n.attributes:
assert a.attr_id not in link_attrs
def test_validate_attr(self):
network = self.create_network_with_data()
scenario = network.scenarios[0]
rs_ids = [rs.resource_attr_id for rs in scenario.resourcescenarios]
template_id = network.nodes[0].types[0].template_id
for n in network.nodes:
node_type = get_templatetype(n.types[0].id)
for ra in n.attributes:
for attr in node_type.typeattrs:
if ra.attr_id == attr.attr_id and ra.id in rs_ids and attr.data_restriction is not None:
# logging.info("Validating RA %s in scenario %s", ra.id, scenario.id)
error = validate_attr(ra.id, scenario.id, template_id)
assert error.ref_id == n.id
def test_validate_attrs(self):
network = self.create_network_with_data()
scenario = network.scenarios[0]
ra_ids = []
for rs in scenario.resourcescenarios:
ra_ids.append(rs.resource_attr_id)
template_id = network.types[0].template_id
errors = validate_attrs(ra_ids, scenario.id, template_id)
assert len(errors) > 0
def test_validate_scenario(self):
network = self.create_network_with_data()
scenario = network.scenarios[0]
template_id = network.nodes[0].types[0].template_id
errors = validate_scenario(scenario.id,template_id)
assert len(errors) > 0
def test_validate_network(self):
network = self.create_network_with_data(use_existing_template=False)
util.update_template(network.types[0].template_id)
scenario = network.scenarios[0]
template = network.nodes[0].types[0]
#Validate the network without data: should pass as the network is built
#based on the template in these unit tests
errors1 = validate_network(network['id'], template['template_id'])
#The network should have an error, saying that the template has net_attr_c,
#but the network does not
assert len(errors1) == 1
assert errors1[0].find('net_attr_d') > 0
#Validate the network with data. Should fail as one of the attributes (node_attr_3)
#is specified as being a 'Cost' in the template but 'Speed' is the dimension
#of the dataset used. In addition, node_attr_1 specified a unit of 'm^3'
#whereas the timeseries in the data is in 'cm^3', so each will fail on unit
#mismatch also.
errors2 = validate_network(network['id'], template['template_id'],
scenario['id'])
assert len(errors2) > 0
#every node should have an associated error, plus the network error from above
assert len(errors2) == len(network['nodes'] * 2)+1
for err in errors2[1:]:
try:
assert err.startswith("Unit mismatch")
except AssertionError:
assert err.startswith("Dimension mismatch")
def test_type_compatibility(self):
"""
Check function that thests whether two types are compatible -- the
overlapping attributes are specified with the same unit.
Change the unit associated with an attribute for types in two idencical
templates, and test. There should be 1 error returned.
THen test comparison of identical types. No errors should return.
"""
template_1 = self.test_add_template()
template_2 = self.test_add_template()
diff_type_1_id = None
same_type_1_id = None
for typ in template_1.templatetypes:
if typ.typeattrs:
for ta in typ.typeattrs:
if ta.attr.name == 'node_attr_1':
diff_type_1_id = typ.id
ta.unit = "m^3"
elif ta.attr.name == 'link_attr_1':
same_type_1_id = typ.id
updated_template_1 = JSONObject(update_template(template_1))
diff_type_2_id = None
same_type_2_id = None
for typ in template_2.templatetypes:
if typ.typeattrs:
for ta in typ.typeattrs:
if ta.attr.name == 'node_attr_1':
diff_type_2_id = typ.id
ta.unit = "cm^3"
elif ta.attr.name == 'link_attr_1':
same_type_2_id = typ.id
#Before updating template 2, check compatibility of types, where T1 has
#a unit, but t2 does not.
errors_diff = check_type_compatibility(diff_type_1_id, diff_type_2_id)
assert len(errors_diff) == 1
errors_same = check_type_compatibility(same_type_1_id, same_type_2_id)
assert len(errors_same) == 0
#Now update T2 so that the types have conflicting units.
updated_template_2 = JSONObject(update_template(template_2))
errors_diff = check_type_compatibility(diff_type_1_id, diff_type_2_id)
assert len(errors_diff) == 1
errors_same = check_type_compatibility(same_type_1_id, same_type_2_id)
assert len(errors_same) == 0
print updated_template_1
for typ in updated_template_1.templatetypes:
if typ.typeattrs:
for ta in typ.typeattrs:
if ta.attr.name == 'node_attr_1':
ta.unit = None
#Update template 1 now so that it has no unit, but template 2 does.
updated_template_1_a= update_template(updated_template_1)
errors_diff = check_type_compatibility(diff_type_1_id, diff_type_2_id)
assert len(errors_diff) == 1
errors_same = check_type_compatibility(same_type_1_id, same_type_2_id)
assert len(errors_same) == 0
if __name__ == '__main__':
server.run()
| lgpl-3.0 | -6,006,100,232,963,779,000 | -1,477,107,045,160,496,600 | 35.31919 | 125 | 0.598757 | false |
mlperf/training_results_v0.7 | Fujitsu/benchmarks/transformer/implementations/implementation_closed/fairseq/data/monolingual_dataset.py | 4 | 2728 | # Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
import numpy as np
import torch
from . import data_utils, FairseqDataset
def collate(samples, pad_idx, eos_idx):
if len(samples) == 0:
return {}
def merge(key):
return data_utils.collate_tokens(
[s[key] for s in samples], pad_idx, eos_idx, left_pad=False,
)
return {
'id': torch.LongTensor([s['id'] for s in samples]),
'ntokens': sum(len(s['target']) for s in samples),
'net_input': {
'src_tokens': merge('source'),
},
'target': merge('target'),
}
class MonolingualDataset(FairseqDataset):
"""A wrapper around torch.utils.data.Dataset for monolingual data."""
def __init__(self, dataset, sizes, vocab, shuffle):
self.dataset = dataset
self.sizes = np.array(sizes)
self.vocab = vocab
self.shuffle = shuffle
def __getitem__(self, index):
source, target = self.dataset[index]
return {'id': index, 'source': source, 'target': target}
def __len__(self):
return len(self.dataset)
def collater(self, samples):
"""Merge a list of samples to form a mini-batch."""
return collate(samples, self.vocab.pad(), self.vocab.eos())
def get_dummy_batch(self, num_tokens, max_positions, tgt_len=128):
assert isinstance(max_positions, float) or isinstance(max_positions, int)
tgt_len = min(tgt_len, max_positions)
bsz = num_tokens // tgt_len
target = self.vocab.dummy_sentence(tgt_len + 1)
source, target = target[:-1], target[1:]
return self.collater([
{'id': i, 'source': source, 'target': target}
for i in range(bsz)
])
def num_tokens(self, index):
"""Return an example's length (number of tokens), used for batching."""
source, target = self.dataset[index]
return len(source)
def ordered_indices(self, seed=None):
"""Ordered indices for batching."""
if self.shuffle:
order = [np.random.permutation(len(self))]
else:
order = [np.arange(len(self))]
order.append(np.flip(self.sizes, 0))
return np.lexsort(order)
def valid_size(self, index, max_positions):
"""Check if an example's size is valid according to max_positions."""
assert isinstance(max_positions, float) or isinstance(max_positions, int)
return self.sizes[index] <= max_positions
| apache-2.0 | -7,845,898,102,229,530,000 | 5,466,516,937,641,860,000 | 32.679012 | 81 | 0.611437 | false |
AliLozano/django-guardian | docs/conf.py | 16 | 7163 | # -*- coding: utf-8 -*-
#
# django-guardian documentation build configuration file, created by
# sphinx-quickstart on Thu Feb 18 23:18:28 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__))))
os.environ['DJANGO_SETTINGS_MODULE'] = 'guardian.testsettings'
ANONYMOUS_USER_ID = -1 # Required by guardian
guardian = __import__('guardian')
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'exts']
try:
import rst2pdf
if rst2pdf.version >= '0.16':
extensions.append('rst2pdf.pdfbuilder')
except ImportError:
print "[NOTE] In order to build PDF you need rst2pdf with version >=0.16"
autoclass_content = "both"
# Add any paths that contain templates here, relative to this directory.
templates_path = ['templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'django-guardian'
copyright = u'2010, Lukasz Balcerzak'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = guardian.get_version()
# The full version, including alpha/beta/rc tags.
release = guardian.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
#html_theme = 'default'
# Theme URL: https://github.com/coordt/ADCtheme/
html_theme = 'ADCtheme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['theme']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'guardiandoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'guardian.tex', u'guardian Documentation',
u'Lukasz Balcerzak', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
pdf_documents = [
('index', u'django-guardian', u'Documentation for django-guardian',
u'Lukasz Balcerzak'),
]
pdf_stylesheets = ['sphinx','kerning','a4']
pdf_break_level = 2
pdf_inline_footnotes = True
#pdf_extensions = ['vectorpdf', 'dotted_toc']
| bsd-2-clause | 4,875,501,447,365,800,000 | -2,221,521,016,904,436,500 | 31.707763 | 80 | 0.711992 | false |
wooga/airflow | tests/providers/presto/hooks/test_presto.py | 5 | 4331 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import unittest
from unittest import mock
from unittest.mock import patch
from prestodb.transaction import IsolationLevel
from airflow.models import Connection
from airflow.providers.presto.hooks.presto import PrestoHook
class TestPrestoHookConn(unittest.TestCase):
def setUp(self):
super().setUp()
self.connection = Connection(
login='login',
password='password',
host='host',
schema='hive',
)
class UnitTestPrestoHook(PrestoHook):
conn_name_attr = 'presto_conn_id'
self.db_hook = UnitTestPrestoHook()
self.db_hook.get_connection = mock.Mock()
self.db_hook.get_connection.return_value = self.connection
@patch('airflow.providers.presto.hooks.presto.prestodb.auth.BasicAuthentication')
@patch('airflow.providers.presto.hooks.presto.prestodb.dbapi.connect')
def test_get_conn(self, mock_connect, mock_basic_auth):
self.db_hook.get_conn()
mock_connect.assert_called_once_with(catalog='hive', host='host', port=None, http_scheme='http',
schema='hive', source='airflow', user='login', isolation_level=0,
auth=mock_basic_auth('login', 'password'))
class TestPrestoHook(unittest.TestCase):
def setUp(self):
super().setUp()
self.cur = mock.MagicMock()
self.conn = mock.MagicMock()
self.conn.cursor.return_value = self.cur
conn = self.conn
class UnitTestPrestoHook(PrestoHook):
conn_name_attr = 'test_conn_id'
def get_conn(self):
return conn
def get_isolation_level(self):
return IsolationLevel.READ_COMMITTED
self.db_hook = UnitTestPrestoHook()
@patch('airflow.hooks.dbapi_hook.DbApiHook.insert_rows')
def test_insert_rows(self, mock_insert_rows):
table = "table"
rows = [("hello",),
("world",)]
target_fields = None
commit_every = 10
self.db_hook.insert_rows(table, rows, target_fields, commit_every)
mock_insert_rows.assert_called_once_with(table, rows, None, 10)
def test_get_first_record(self):
statement = 'SQL'
result_sets = [('row1',), ('row2',)]
self.cur.fetchone.return_value = result_sets[0]
self.assertEqual(result_sets[0], self.db_hook.get_first(statement))
self.conn.close.assert_called_once_with()
self.cur.close.assert_called_once_with()
self.cur.execute.assert_called_once_with(statement)
def test_get_records(self):
statement = 'SQL'
result_sets = [('row1',), ('row2',)]
self.cur.fetchall.return_value = result_sets
self.assertEqual(result_sets, self.db_hook.get_records(statement))
self.conn.close.assert_called_once_with()
self.cur.close.assert_called_once_with()
self.cur.execute.assert_called_once_with(statement)
def test_get_pandas_df(self):
statement = 'SQL'
column = 'col'
result_sets = [('row1',), ('row2',)]
self.cur.description = [(column,)]
self.cur.fetchall.return_value = result_sets
df = self.db_hook.get_pandas_df(statement)
self.assertEqual(column, df.columns[0])
self.assertEqual(result_sets[0][0], df.values.tolist()[0][0])
self.assertEqual(result_sets[1][0], df.values.tolist()[1][0])
self.cur.execute.assert_called_once_with(statement, None)
| apache-2.0 | -2,757,493,962,851,200,500 | -5,181,873,128,884,037,000 | 34.5 | 110 | 0.643269 | false |
davemehringer/keplerpp | apps/plotter_mayavi.py | 1 | 10321 | # plotter_mayavi.py
# Copyright (C) 2016 David Mehringer
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from mayavi.mlab import *
import numpy as np
import os
import random
import sys
import threading
import time
def do_colors():
global colors
colors = (np.random.random((nbodies, 4))*255).astype(np.uint16)
colors[:,-1] = 255 # No transparency
for c in colors:
red = c[0]
green = c[1]
blue = c[2]
sum = red + green + blue
if sum < 384:
while True:
red = (np.random.random((1))*255).astype(np.uint16)[0]
green = (np.random.random((1))*255).astype(np.uint16)[0]
blue = (np.random.random((1))*255).astype(np.uint16)[0]
sum = red + green + blue
if sum >= 384:
c[:] = [red, green, blue, 255]
break
norm_colors.append(tuple(c[0:3]/255.0))
def do_labels():
if texts == []:
show_labels()
elif name_toggle or tag_toggle:
modify_labels()
else:
hide_labels()
def do_tags():
global tag_map
global tag_list
n = len(tag_chars)
tag_set_as_list = list(tag_chars)
# if n bodies > n tags, not all bodies will have tags
for i in xrange(nbodies):
if i < n:
z = tag_set_as_list[i % n]
tag = str(z)
tag_map[tag] = i
tag_list.append(tag)
else:
tag_list.append("")
def hide_labels():
global texts
gcf().scene.disable_render = True
for t in texts:
t.remove()
gcf().scene.disable_render = False
texts = []
def hide_paths():
global paths
if not paths:
return
gcf().scene.disable_render = True
for p in paths:
p.remove()
gcf().scene.disable_render = False
paths = []
def init():
global lines
global pts
global colors
global texts
global names
global positions
# first read gets body names
read()
# second read gets initial positions
read()
do_colors()
plot_positions(True, True)
do_tags()
def key_intercept(vtk_obj, event):
global name_toggle
global path_toggle
global scale_factor
global tag_toggle
global cur_key_event
global tag_map
keypress = vtk_obj.GetKeyCode()
if keypress == '>':
scale_factor *= 2
plot_positions(False, True)
hide_paths()
elif keypress == '<':
scale_factor /= 2
plot_positions(False, True)
hide_paths()
elif keypress == 'o':
path_toggle = not path_toggle
if path_toggle:
plot_paths()
else:
hide_paths()
elif keypress == 'n' or keypress == 't':
if keypress == 'n':
name_toggle = not name_toggle
elif keypress == 't':
tag_toggle = not tag_toggle
do_labels()
elif keypress in tag_map:
global center_id
center_id = tag_map[keypress]
replot_all(False)
def modify_labels():
global texts
gcf().scene.disable_render = True
for i in xrange(nbodies):
if name_toggle and tag_toggle:
texts[i].text = names[i] + " " + tag_list[i]
elif name_toggle:
texts[i].text = names[i]
elif tag_toggle:
texts[i].text = tag_list[i]
gcf().scene.disable_render = False
@animate(delay=50)
def myanim():
global count
global done
global positions
prev = 0
mycount = 0
while True:
if not done:
read()
replot_all(False)
yield
def picker_callback(picker):
""" Picker callback: this get called when on pick events.
"""
global pts
if picker.actor in pts.actor.actors:
glyph_points = pts.glyph.glyph_source.glyph_source.output.points.to_array()
# Find which data point corresponds to the point picked:
# we have to account for the fact that each data point is
# represented by a glyph with several points
point_id = picker.point_id/glyph_points.shape[0]
# If the no points have been selected, we have '-1'
if point_id != -1:
# Retrieve the coordinnates coorresponding to that data
# point
global center_id
center_id = point_id
#clf()
replot_all(False)
def plot_paths():
if not path_toggle:
return
(x, y, z) = rel_positions(True)
if x.shape[1] < 2:
return
gcf().scene.disable_render = True
global paths
(azimuth, elevation, distance, focalpoint) = view()
tr = scale_factor/4.0
n = x.shape[1]
if not paths:
if paths:
for p in paths:
p.remove()
paths = []
zero = np.zeros([10000])
for i in xrange(nbodies):
xx = zero
xx[:n] = x[i]
xx[n+1:] = None
yy = zero
yy[:n] = y[i]
yy[n+1:] = None
zz = zero
zz[:n] = z[i]
zz[n+1:] = None
path = plot3d(
xx, yy, zz, color=norm_colors[i],
tube_radius=tr
)
paths.append(path)
else:
for i in xrange(nbodies):
if i != center_id:
xx = paths[i].mlab_source.x
xx[:n] = x[i]
paths[i].mlab_source.x = xx
yy = paths[i].mlab_source.y
yy[:n] = y[i]
paths[i].mlab_source.y = yy
zz = paths[i].mlab_source.z
zz[:n] = z[i]
paths[i].mlab_source.z = zz
view(azimuth=azimuth, elevation=elevation, distance=distance, focalpoint=focalpoint)
gcf().scene.disable_render = False
def plot_positions(reset_zoom, rescale_data):
gcf().scene.disable_render = True
global pts
(azimuth, elevation, distance, focalpoint) = view()
(x, y, z) = rel_positions(False)
if pts and not rescale_data:
pts.mlab_source.x = x
pts.mlab_source.y = y
pts.mlab_source.z = z
else:
if pts:
pts.remove()
pts = points3d(
x, y, z, np.arange(nbodies), scale_factor=scale_factor,
reset_zoom=reset_zoom
)
pts.glyph.color_mode = 'color_by_scalar' # Color by scalar
pts.glyph.scale_mode = 'scale_by_vector' # scale by vector
# Set look-up table and redraw
pts.module_manager.scalar_lut_manager.lut.table = colors
view(azimuth=azimuth, elevation=elevation, distance=distance, focalpoint=focalpoint)
gcf().scene.disable_render = False
def read():
global count
global nbodies
global done
global positions
global names
x = f.readline().rstrip()
if x == "end":
f.close()
done = True
return positions
elif x[0].isalpha():
names = np.array(x.split())
return
data = np.fromstring(x, sep=" ")
if count == 0:
nbodies = len(data)/3
data = data.reshape(nbodies, 3)
if count == 0:
positions = np.expand_dims(data, axis=2)
else:
positions = np.append(positions, np.expand_dims(data, axis=2), axis=2)
count += 1
def rel_positions(all):
if all:
rp = (positions[:, :] - positions[center_id, :])/au
else:
rp = (positions[:, :, -1] - positions[center_id, :, -1])/au
return (rp[:,0], rp[:,1], rp[:,2])
def replot_all(reset_zoom):
if path_toggle:
plot_paths()
plot_positions(reset_zoom, False)
if name_toggle or tag_toggle:
update_labels()
def show_labels():
(x, y, z) = rel_positions(False)
(azimuth, elevation, distance, focalpoint) = view()
#scale = 0.005*distance
scale = 0.007*distance
x += 2*scale
y += 2*scale
z += 2*scale
gcf().scene.disable_render = True
global texts
texts = []
for i in xrange(nbodies):
if name_toggle and tag_toggle:
text = names[i] + " " + tag_list[i]
elif name_toggle:
text = names[i]
elif tag_toggle:
text = tag_list[i]
xx = text3d(
x[i], y[i], z[i], text,
scale=scale, color=norm_colors[i]
)
texts.append(xx)
gcf().scene.disable_render = False
def update_labels():
"""animate text labels to follow objects"""
if not name_toggle and not tag_toggle:
return
(x, y, z) = rel_positions(False)
gcf().scene.disable_render = True
for (tt, xx, yy, zz) in zip(texts, x, y, z):
tt.position = (xx, yy, zz)
gcf().scene.disable_render = False
tag_chars = set([
'0','1','2','4','5','6','7','8','9','b','d','g','h','i','k',
'm','q','r','u','v','x','y','z','A','B','C','D','E','G','H',
'I','J','K','M','N','O','Q','R','T','U','V','X','Y','Z'
])
picker = gcf().on_mouse_pick(picker_callback)
# Decrease the tolerance, so that we can more easily select a precise
# point.
picker.tolerance = 0.01
pipename = '/tmp/fifo_kepler'
if not os.path.exists(pipename):
os.mkfifo(pipename)
f = open(pipename, 'r')
nbodies = 0
names = []
#centeridx = -1
plotlines = True
showtext = False
update = True
live = True
lastidx = -1
scale_factor = 0.03
center_id = 0
tag_map = {}
tag_list = []
gcf().scene.interactor.add_observer('KeyPressEvent', key_intercept);
colors = []
norm_colors = []
pts = None
paths = None
texts = []
done = False
positions = np.zeros([nbodies, 3])
count = 0
name_toggle = False
path_toggle = False
tag_toggle = False
cur_count = count
au = 1.495e8
figure(gcf(), bgcolor=(0, 0, 0))
init()
#t = threading.Thread(target=read_loop)
#t.start()
myanim()
| gpl-3.0 | 5,133,624,821,959,193,000 | -911,278,535,322,937,000 | 26.670241 | 88 | 0.556148 | false |
xchaoinfo/python | burun/0007/codes/find_keywords_in_diary.py | 78 | 1185 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Date: 13-03-15
# Author: Liang
import os
import re
# set diaries path
diaries_path = "diaries/"
diaries = os.listdir(diaries_path)
# set stop words to make informative keywords
stop_words = open("Stop Words.txt", 'r').read()
stop_words_list = stop_words.split(" ")
# Find top 5 keywords in a txt
def find_keywords(words):
words_dictionary = {}
for word in words:
if word.lower() not in words_dictionary and word.lower() not in stop_words_list:
# Put word in dictionary
words_dictionary[word] = 0
for item in words:
if item == word:
words_dictionary[word] += 1
# Find 5 keywords which by highest frequency
keywords = sorted(
words_dictionary, key=words_dictionary.__getitem__, reverse=True)[0:5]
return keywords
for diary in diaries:
# Coding by utf-8
with open(diaries_path + diary, "r", encoding='utf-8', errors='ignore') as content:
diary_words_list = re.findall(r"[\w']+", content.read())
print("The keywords of diary " + diary + " is: ", end="")
print(find_keywords(diary_words_list))
| mit | 7,429,494,649,736,690,000 | -8,563,889,626,720,947,000 | 30.184211 | 88 | 0.621097 | false |
prutseltje/ansible | lib/ansible/modules/network/aci/aci_interface_policy_leaf_policy_group.py | 4 | 18307 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Bruno Calogero <brunocalogero@hotmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: aci_interface_policy_leaf_policy_group
short_description: Manage fabric interface policy leaf policy groups (infra:AccBndlGrp, infra:AccPortGrp)
description:
- Manage fabric interface policy leaf policy groups on Cisco ACI fabrics.
notes:
- When using the module please select the appropriate link_aggregation_type (lag_type).
C(link) for Port Channel(PC), C(node) for Virtual Port Channel(VPC) and C(leaf) for Leaf Access Port Policy Group.
- More information about the internal APIC classes B(infra:AccBndlGrp) and B(infra:AccPortGrp) from
L(the APIC Management Information Model reference,https://developer.cisco.com/docs/apic-mim-ref/).
author:
- Bruno Calogero (@brunocalogero)
version_added: '2.5'
options:
policy_group:
description:
- Name of the leaf policy group to be added/deleted.
aliases: [ name, policy_group_name ]
description:
description:
- Description for the leaf policy group to be created.
aliases: [ descr ]
lag_type:
description:
- Selector for the type of leaf policy group we want to create.
- C(leaf) for Leaf Access Port Policy Group
- C(link) for Port Channel (PC)
- C(node) for Virtual Port Channel (VPC)
aliases: [ lag_type_name ]
choices: [ leaf, link, node ]
link_level_policy:
description:
- Choice of link_level_policy to be used as part of the leaf policy group to be created.
aliases: [ link_level_policy_name ]
cdp_policy:
description:
- Choice of cdp_policy to be used as part of the leaf policy group to be created.
aliases: [ cdp_policy_name ]
mcp_policy:
description:
- Choice of mcp_policy to be used as part of the leaf policy group to be created.
aliases: [ mcp_policy_name ]
lldp_policy:
description:
- Choice of lldp_policy to be used as part of the leaf policy group to be created.
aliases: [ lldp_policy_name ]
stp_interface_policy:
description:
- Choice of stp_interface_policy to be used as part of the leaf policy group to be created.
aliases: [ stp_interface_policy_name ]
egress_data_plane_policing_policy:
description:
- Choice of egress_data_plane_policing_policy to be used as part of the leaf policy group to be created.
aliases: [ egress_data_plane_policing_policy_name ]
ingress_data_plane_policing_policy:
description:
- Choice of ingress_data_plane_policing_policy to be used as part of the leaf policy group to be created.
aliases: [ ingress_data_plane_policing_policy_name ]
priority_flow_control_policy:
description:
- Choice of priority_flow_control_policy to be used as part of the leaf policy group to be created.
aliases: [ priority_flow_control_policy_name ]
fibre_channel_interface_policy:
description:
- Choice of fibre_channel_interface_policy to be used as part of the leaf policy group to be created.
aliases: [ fibre_channel_interface_policy_name ]
slow_drain_policy:
description:
- Choice of slow_drain_policy to be used as part of the leaf policy group to be created.
aliases: [ slow_drain_policy_name ]
port_channel_policy:
description:
- Choice of port_channel_policy to be used as part of the leaf policy group to be created.
aliases: [ port_channel_policy_name ]
monitoring_policy:
description:
- Choice of monitoring_policy to be used as part of the leaf policy group to be created.
aliases: [ monitoring_policy_name ]
storm_control_interface_policy:
description:
- Choice of storm_control_interface_policy to be used as part of the leaf policy group to be created.
aliases: [ storm_control_interface_policy_name ]
l2_interface_policy:
description:
- Choice of l2_interface_policy to be used as part of the leaf policy group to be created.
aliases: [ l2_interface_policy_name ]
port_security_policy:
description:
- Choice of port_security_policy to be used as part of the leaf policy group to be created.
aliases: [ port_security_policy_name ]
aep:
description:
- Choice of attached_entity_profile (AEP) to be used as part of the leaf policy group to be created.
aliases: [ aep_name ]
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
choices: [ absent, present, query ]
default: present
extends_documentation_fragment: aci
'''
# FIXME: Add query examples
EXAMPLES = r'''
- name: Create a Port Channel (PC) Interface Policy Group
aci_interface_policy_leaf_policy_group:
host: apic
username: admin
password: SomeSecretPassword
policy_group: policygroupname
description: policygroupname description
lag_type: link
link_level_policy: whateverlinklevelpolicy
fibre_channel_interface_policy: whateverfcpolicy
state: present
- name: Create a Virtual Port Channel (VPC) Interface Policy Group (no description)
aci_interface_policy_leaf_policy_group:
host: apic
username: admin
password: SomeSecretPassword
policy_group: policygroupname
lag_type: node
link_level_policy: whateverlinklevelpolicy
fibre_channel_interface_policy: whateverfcpolicy
state: present
- name: Create a Leaf Access Port Policy Group (no description)
aci_interface_policy_leaf_policy_group:
host: apic
username: admin
password: SomeSecretPassword
policy_group: policygroupname
lag_type: leaf
link_level_policy: whateverlinklevelpolicy
fibre_channel_interface_policy: whateverfcpolicy
state: present
- name: Delete an Interface policy Leaf Policy Group
aci_interface_policy_leaf_policy_group:
host: apic
username: admin
password: SomeSecretPassword
policy_group: policygroupname
lag_type: type_name
state: absent
'''
RETURN = r'''
current:
description: The existing configuration from the APIC after the module has finished
returned: success
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
error:
description: The error information as returned from the APIC
returned: failure
type: dict
sample:
{
"code": "122",
"text": "unknown managed object class foo"
}
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: string
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
sent:
description: The actual/minimal configuration pushed to the APIC
returned: info
type: list
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment"
}
}
}
previous:
description: The original configuration from the APIC before the module has started
returned: info
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
proposed:
description: The assembled configuration from the user-provided parameters
returned: info
type: dict
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"name": "production"
}
}
}
filter_string:
description: The filter string used for the request
returned: failure or debug
type: string
sample: ?rsp-prop-include=config-only
method:
description: The HTTP method used for the request to the APIC
returned: failure or debug
type: string
sample: POST
response:
description: The HTTP response from the APIC
returned: failure or debug
type: string
sample: OK (30 bytes)
status:
description: The HTTP status from the APIC
returned: failure or debug
type: int
sample: 200
url:
description: The HTTP url used for the request to the APIC
returned: failure or debug
type: string
sample: https://10.11.12.13/api/mo/uni/tn-production.json
'''
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
def main():
argument_spec = aci_argument_spec()
argument_spec.update(
policy_group=dict(type='str', aliases=['name', 'policy_group_name']), # Not required for querying all objects
description=dict(type='str', aliases=['descr']),
# NOTE: Since this module needs to include both infra:AccBndlGrp (for PC and VPC) and infra:AccPortGrp (for leaf access port policy group):
# NOTE: I'll allow the user to make the choice here (link(PC), node(VPC), leaf(leaf-access port policy group))
lag_type=dict(type='str', aliases=['lag_type_name'], choices=['leaf', 'link', 'node']), # Not required for querying all objects
link_level_policy=dict(type='str', aliases=['link_level_policy_name']),
cdp_policy=dict(type='str', aliases=['cdp_policy_name']),
mcp_policy=dict(type='str', aliases=['mcp_policy_name']),
lldp_policy=dict(type='str', aliases=['lldp_policy_name']),
stp_interface_policy=dict(type='str', aliases=['stp_interface_policy_name']),
egress_data_plane_policing_policy=dict(type='str', aliases=['egress_data_plane_policing_policy_name']),
ingress_data_plane_policing_policy=dict(type='str', aliases=['ingress_data_plane_policing_policy_name']),
priority_flow_control_policy=dict(type='str', aliases=['priority_flow_control_policy_name']),
fibre_channel_interface_policy=dict(type='str', aliases=['fibre_channel_interface_policy_name']),
slow_drain_policy=dict(type='str', aliases=['slow_drain_policy_name']),
port_channel_policy=dict(type='str', aliases=['port_channel_policy_name']),
monitoring_policy=dict(type='str', aliases=['monitoring_policy_name']),
storm_control_interface_policy=dict(type='str', aliases=['storm_control_interface_policy_name']),
l2_interface_policy=dict(type='str', aliases=['l2_interface_policy_name']),
port_security_policy=dict(type='str', aliases=['port_security_policy_name']),
aep=dict(type='str', aliases=['aep_name']),
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['lag_type', 'policy_group']],
['state', 'present', ['lag_type', 'policy_group']],
],
)
policy_group = module.params['policy_group']
description = module.params['description']
lag_type = module.params['lag_type']
link_level_policy = module.params['link_level_policy']
cdp_policy = module.params['cdp_policy']
mcp_policy = module.params['mcp_policy']
lldp_policy = module.params['lldp_policy']
stp_interface_policy = module.params['stp_interface_policy']
egress_data_plane_policing_policy = module.params['egress_data_plane_policing_policy']
ingress_data_plane_policing_policy = module.params['ingress_data_plane_policing_policy']
priority_flow_control_policy = module.params['priority_flow_control_policy']
fibre_channel_interface_policy = module.params['fibre_channel_interface_policy']
slow_drain_policy = module.params['slow_drain_policy']
port_channel_policy = module.params['port_channel_policy']
monitoring_policy = module.params['monitoring_policy']
storm_control_interface_policy = module.params['storm_control_interface_policy']
l2_interface_policy = module.params['l2_interface_policy']
port_security_policy = module.params['port_security_policy']
aep = module.params['aep']
state = module.params['state']
if lag_type == 'leaf':
aci_class_name = 'infraAccPortGrp'
dn_name = 'accportgrp'
class_config_dict = dict(
name=policy_group,
descr=description,
)
elif lag_type == 'link' or lag_type == 'node':
aci_class_name = 'infraAccBndlGrp'
dn_name = 'accbundle'
class_config_dict = dict(
name=policy_group,
descr=description,
lagT=lag_type,
)
aci = ACIModule(module)
aci.construct_url(
root_class=dict(
aci_class=aci_class_name,
aci_rn='infra/funcprof/{0}-{1}'.format(dn_name, policy_group),
filter_target='eq({0}.name, "{1}")'.format(aci_class_name, policy_group),
module_object=policy_group,
),
child_classes=[
'infraRsAttEntP',
'infraRsCdpIfPol',
'infraRsFcIfPol',
'infraRsHIfPol',
'infraRsL2IfPol',
'infraRsL2PortSecurityPol',
'infraRsLacpPol',
'infraRsLldpIfPol',
'infraRsMcpIfPol',
'infraRsMonIfInfraPol',
'infraRsQosEgressDppIfPol',
'infraRsQosIngressDppIfPol',
'infraRsQosPfcIfPol',
'infraRsQosSdIfPol',
'infraRsStormctrlIfPol',
'infraRsStpIfPol',
],
)
aci.get_existing()
if state == 'present':
aci.payload(
aci_class=aci_class_name,
class_config=class_config_dict,
child_configs=[
dict(
infraRsAttEntP=dict(
attributes=dict(
tDn='uni/infra/attentp-{0}'.format(aep),
),
),
),
dict(
infraRsCdpIfPol=dict(
attributes=dict(
tnCdpIfPolName=cdp_policy,
),
),
),
dict(
infraRsFcIfPol=dict(
attributes=dict(
tnFcIfPolName=fibre_channel_interface_policy,
),
),
),
dict(
infraRsHIfPol=dict(
attributes=dict(
tnFabricHIfPolName=link_level_policy,
),
),
),
dict(
infraRsL2IfPol=dict(
attributes=dict(
tnL2IfPolName=l2_interface_policy,
),
),
),
dict(
infraRsL2PortSecurityPol=dict(
attributes=dict(
tnL2PortSecurityPolName=port_security_policy,
),
),
),
dict(
infraRsLacpPol=dict(
attributes=dict(
tnLacpLagPolName=port_channel_policy,
),
),
),
dict(
infraRsLldpIfPol=dict(
attributes=dict(
tnLldpIfPolName=lldp_policy,
),
),
),
dict(
infraRsMcpIfPol=dict(
attributes=dict(
tnMcpIfPolName=mcp_policy,
),
),
),
dict(
infraRsMonIfInfraPol=dict(
attributes=dict(
tnMonInfraPolName=monitoring_policy,
),
),
),
dict(
infraRsQosEgressDppIfPol=dict(
attributes=dict(
tnQosDppPolName=egress_data_plane_policing_policy,
),
),
),
dict(
infraRsQosIngressDppIfPol=dict(
attributes=dict(
tnQosDppPolName=ingress_data_plane_policing_policy,
),
),
),
dict(
infraRsQosPfcIfPol=dict(
attributes=dict(
tnQosPfcIfPolName=priority_flow_control_policy,
),
),
),
dict(
infraRsQosSdIfPol=dict(
attributes=dict(
tnQosSdIfPolName=slow_drain_policy,
),
),
),
dict(
infraRsStormctrlIfPol=dict(
attributes=dict(
tnStormctrlIfPolName=storm_control_interface_policy,
),
),
),
dict(
infraRsStpIfPol=dict(
attributes=dict(
tnStpIfPolName=stp_interface_policy,
),
),
),
],
)
aci.get_diff(aci_class=aci_class_name)
aci.post_config()
elif state == 'absent':
aci.delete_config()
aci.exit_json()
if __name__ == "__main__":
main()
| gpl-3.0 | 9,190,524,760,191,193,000 | -6,221,304,108,207,841,000 | 35.108481 | 147 | 0.580106 | false |
mlperf/training_results_v0.7 | Fujitsu/benchmarks/resnet/implementations/implementation_open/mxnet/3rdparty/tvm/vta/tests/python/integration/test_benchmark_gemm.py | 2 | 12873 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import tvm
import numpy as np
from tvm.contrib import util
import vta.testing
from vta.testing import simulator
def test_gemm():
def run_gemm_packed(env, remote, batch_size, channel, block):
data_shape = (batch_size // env.BATCH,
channel // env.BLOCK_IN,
env.BATCH,
env.BLOCK_IN)
weight_shape = (channel // env.BLOCK_OUT,
channel // env.BLOCK_IN,
env.BLOCK_OUT,
env.BLOCK_IN)
res_shape = (batch_size // env.BATCH,
channel // env.BLOCK_OUT,
env.BATCH,
env.BLOCK_OUT)
# To compute number of ops, use a x2 factor for FMA
num_ops = 2 * channel * channel * batch_size
ko = tvm.reduce_axis((0, channel // env.BLOCK_IN), name='ko')
ki = tvm.reduce_axis((0, env.BLOCK_IN), name='ki')
data = tvm.placeholder(data_shape,
name="data",
dtype=env.inp_dtype)
weight = tvm.placeholder(weight_shape,
name="weight",
dtype=env.wgt_dtype)
data_buf = tvm.compute(data_shape,
lambda *i: data(*i),
"data_buf")
weight_buf = tvm.compute(weight_shape,
lambda *i: weight(*i),
"weight_buf")
res_gem = tvm.compute(res_shape,
lambda bo, co, bi, ci: tvm.sum(
data_buf[bo, ko, bi, ki].astype(env.acc_dtype) *
weight_buf[co, ko, ci, ki].astype(env.acc_dtype),
axis=[ko, ki]),
name="res_gem")
res_shf = tvm.compute(res_shape,
lambda *i: res_gem(*i)>>8,
name="res_shf")
res_max = tvm.compute(res_shape,
lambda *i: tvm.max(res_shf(*i), 0),
"res_max") #relu
res_min = tvm.compute(res_shape,
lambda *i: tvm.min(res_max(*i), (1<<(env.INP_WIDTH-1))-1),
"res_min") #relu
res = tvm.compute(res_shape,
lambda *i: res_min(*i).astype(env.inp_dtype),
name="res")
def verify(s, check_correctness=True):
mod = vta.build(s, [data, weight, res],
"ext_dev", env.target_host, name="gemm")
temp = util.tempdir()
mod.save(temp.relpath("gemm.o"))
remote.upload(temp.relpath("gemm.o"))
f = remote.load_module("gemm.o")
# verify
ctx = remote.ext_dev(0)
# Data in original format
data_orig = np.random.randint(
-128, 128, size=(batch_size, channel)).astype(data.dtype)
weight_orig = np.random.randint(
-128, 128, size=(channel, channel)).astype(weight.dtype)
data_packed = data_orig.reshape(
batch_size // env.BATCH, env.BATCH,
channel // env.BLOCK_IN, env.BLOCK_IN).transpose((0, 2, 1, 3))
weight_packed = weight_orig.reshape(
channel // env.BLOCK_OUT, env.BLOCK_OUT,
channel // env.BLOCK_IN, env.BLOCK_IN).transpose((0, 2, 1, 3))
res_np = np.zeros(res_shape).astype(res.dtype)
data_arr = tvm.nd.array(data_packed, ctx)
weight_arr = tvm.nd.array(weight_packed, ctx)
res_arr = tvm.nd.array(res_np, ctx)
res_ref = np.zeros(res_shape).astype(env.acc_dtype)
for b in range(batch_size // env.BATCH):
for i in range(channel // env.BLOCK_OUT):
for j in range(channel // env.BLOCK_IN):
res_ref[b,i,:] += np.dot(data_packed[b,j,:].astype(env.acc_dtype),
weight_packed[i,j].T.astype(env.acc_dtype))
res_ref = np.right_shift(res_ref, 8)
res_ref = np.clip(res_ref, 0, (1<<(env.INP_WIDTH-1))-1).astype(res.dtype)
time_f = f.time_evaluator("gemm", ctx, number=20)
if env.TARGET in ["sim", "tsim"]:
simulator.clear_stats()
cost = time_f(data_arr, weight_arr, res_arr)
if env.TARGET in ["sim", "tsim"]:
stats = simulator.stats()
print("Execution statistics:")
for k, v in stats.items():
print("\t{:<16}: {:>16}".format(k, v))
res_unpack = res_arr.asnumpy().reshape(batch_size // env.BATCH,
channel // env.BLOCK_OUT,
env.BATCH,
env.BLOCK_OUT)
if check_correctness:
tvm.testing.assert_allclose(res_unpack, res_ref)
return cost
def run_schedule(load_inp,
load_wgt,
gemm,
alu,
store_out,
print_ir,
check_correctness):
s = tvm.create_schedule(res.op)
s[data_buf].set_scope(env.inp_scope)
s[weight_buf].set_scope(env.wgt_scope)
s[res_gem].set_scope(env.acc_scope)
s[res_shf].set_scope(env.acc_scope)
s[res_min].set_scope(env.acc_scope)
s[res_max].set_scope(env.acc_scope)
if block:
bblock = block // env.BATCH
iblock = block // env.BLOCK_IN
oblock = block // env.BLOCK_OUT
xbo, xco, xbi, xci = s[res].op.axis
xb1, xco1, xb2, xco2 = s[res].tile(xbo, xco, bblock, oblock)
store_pt = xb2
s[res_gem].compute_at(s[res], xco1)
s[res_shf].compute_at(s[res], xco1)
s[res_min].compute_at(s[res], xco1)
s[res_max].compute_at(s[res], xco1)
xbo, xco, xbi, xci = s[res_gem].op.axis
# Compute one line at a time
ko1, ko2 = s[res_gem].split(ko, iblock)
s[res_gem].reorder(ko1, ko2, xbo, xco, xbi, xci, ki)
s[data_buf].compute_at(s[res_gem], ko1)
s[weight_buf].compute_at(s[res_gem], ko1)
# Use VTA instructions
s[data_buf].pragma(s[data_buf].op.axis[0], load_inp)
s[weight_buf].pragma(s[weight_buf].op.axis[0], load_wgt)
s[res_gem].tensorize(xbi, gemm)
s[res_shf].pragma(s[res_shf].op.axis[0], alu)
s[res_min].pragma(s[res_min].op.axis[0], alu)
s[res_max].pragma(s[res_max].op.axis[0], alu)
s[res].pragma(store_pt, store_out)
else:
xbo, xco, xbi, xci = s[res_gem].op.axis
s[res_gem].reorder(ko, xbo, xco, xbi, xci, ki)
# Use VTA instructions
s[data_buf].pragma(s[data_buf].op.axis[0], load_inp)
s[weight_buf].pragma(s[weight_buf].op.axis[0], load_wgt)
s[res_gem].tensorize(xbi, gemm)
s[res_shf].pragma(s[res_shf].op.axis[0], alu)
s[res_min].pragma(s[res_min].op.axis[0], alu)
s[res_max].pragma(s[res_max].op.axis[0], alu)
s[res].pragma(s[res].op.axis[0], store_out)
if print_ir:
print(tvm.lower(s, [data, weight, res], simple_mode=True))
return verify(s, check_correctness)
def gemm_normal(print_ir):
mock = env.mock
print("----- GEMM GOPS End-to-End Test-------")
def run_test(header, print_ir, check_correctness):
cost = run_schedule(
env.dma_copy, env.dma_copy, env.gemm, env.alu, env.dma_copy,
print_ir, check_correctness)
gops = (num_ops / cost.mean) / float(10 ** 9)
print(header)
print("\tTime cost = %g sec/op, %g GOPS" % (cost.mean, gops))
with vta.build_config():
run_test("NORMAL", print_ir, True)
def gemm_unittest(print_ir):
mock = env.mock
print("----- GEMM Unit Test-------")
def run_test(header, print_ir):
cost = run_schedule(
mock.dma_copy, mock.dma_copy, env.gemm, mock.alu, mock.dma_copy,
print_ir, False)
gops = (num_ops / cost.mean) / float(10 ** 9)
print(header)
print("\tTime cost = %g sec/op, %g GOPS" % (cost.mean, gops))
with vta.build_config():
run_test("NORMAL", print_ir)
def alu_unittest(print_ir):
mock = env.mock
print("----- ALU Unit Test-------")
def run_test(header, print_ir):
cost = run_schedule(
mock.dma_copy, mock.dma_copy, mock.gemm, env.alu, mock.dma_copy,
print_ir, False)
gops = (num_ops / cost.mean) / float(10 ** 9)
print(header)
print("\tTime cost = %g sec/op, %g GOPS" % (cost.mean, gops))
with vta.build_config():
run_test("NORMAL", print_ir)
print("")
def load_inp_unittest(print_ir):
mock = env.mock
print("----- LoadInp Unit Test-------")
def run_test(header, print_ir):
cost = run_schedule(
env.dma_copy, mock.dma_copy, mock.gemm, mock.alu, mock.dma_copy, print_ir, False)
gops = (num_ops / cost.mean) / float(10 ** 9)
bandwith = (batch_size * channel * env.INP_WIDTH / cost.mean) / float(10 ** 9)
print(header)
print("\tTime cost = %g sec/op, %g GOPS, bandwidth=%g Gbits" % (
cost.mean, gops, bandwith))
with vta.build_config():
run_test("NORMAL", print_ir)
print("")
def load_wgt_unittest(print_ir):
mock = env.mock
print("----- LoadWgt Unit Test-------")
def run_test(header, print_ir):
cost = run_schedule(
mock.dma_copy, env.dma_copy, mock.gemm, mock.alu, mock.dma_copy, print_ir, False)
gops = (num_ops / cost.mean) / float(10 ** 9)
bandwith = (channel * channel * env.WGT_WIDTH / cost.mean) / float(10 ** 9)
print(header)
print("\tTime cost = %g sec/op, %g GOPS, bandwidth=%g Gbits" % (
cost.mean, gops, bandwith))
with vta.build_config():
run_test("NORMAL", print_ir)
print("")
def store_out_unittest(print_ir):
mock = env.mock
print("----- StoreOut Unit Test-------")
def run_test(header, print_ir):
cost = run_schedule(
mock.dma_copy, mock.dma_copy, mock.gemm, mock.alu, env.dma_copy,
print_ir, False)
gops = (num_ops / cost.mean) / float(10 ** 9)
bandwith = (batch_size * channel * env.OUT_WIDTH / cost.mean) / float(10 ** 9)
print(header)
print("\tTime cost = %g sec/op, %g GOPS, bandwidth=%g Gbits" % (
cost.mean, gops, bandwith))
with vta.build_config():
run_test("NORMAL", print_ir)
print("")
gemm_normal(False)
gemm_unittest(False)
alu_unittest(False)
def _run(env, remote):
print("========GEMM 128=========")
run_gemm_packed(env, remote, 128, 128, 128)
vta.testing.run(_run)
if __name__ == "__main__":
test_gemm()
| apache-2.0 | -131,401,926,299,793,140 | -1,085,062,898,923,021,800 | 44.648936 | 101 | 0.478754 | false |
adedayo/intellij-community | python/lib/Lib/site-packages/django/template/loader_tags.py | 73 | 10511 | from django.template.base import TemplateSyntaxError, TemplateDoesNotExist, Variable
from django.template.base import Library, Node, TextNode
from django.template.context import Context
from django.template.defaulttags import token_kwargs
from django.template.loader import get_template
from django.conf import settings
from django.utils.safestring import mark_safe
register = Library()
BLOCK_CONTEXT_KEY = 'block_context'
class ExtendsError(Exception):
pass
class BlockContext(object):
def __init__(self):
# Dictionary of FIFO queues.
self.blocks = {}
def add_blocks(self, blocks):
for name, block in blocks.iteritems():
if name in self.blocks:
self.blocks[name].insert(0, block)
else:
self.blocks[name] = [block]
def pop(self, name):
try:
return self.blocks[name].pop()
except (IndexError, KeyError):
return None
def push(self, name, block):
self.blocks[name].append(block)
def get_block(self, name):
try:
return self.blocks[name][-1]
except (IndexError, KeyError):
return None
class BlockNode(Node):
def __init__(self, name, nodelist, parent=None):
self.name, self.nodelist, self.parent = name, nodelist, parent
def __repr__(self):
return "<Block Node: %s. Contents: %r>" % (self.name, self.nodelist)
def render(self, context):
block_context = context.render_context.get(BLOCK_CONTEXT_KEY)
context.push()
if block_context is None:
context['block'] = self
result = self.nodelist.render(context)
else:
push = block = block_context.pop(self.name)
if block is None:
block = self
# Create new block so we can store context without thread-safety issues.
block = BlockNode(block.name, block.nodelist)
block.context = context
context['block'] = block
result = block.nodelist.render(context)
if push is not None:
block_context.push(self.name, push)
context.pop()
return result
def super(self):
render_context = self.context.render_context
if (BLOCK_CONTEXT_KEY in render_context and
render_context[BLOCK_CONTEXT_KEY].get_block(self.name) is not None):
return mark_safe(self.render(self.context))
return ''
class ExtendsNode(Node):
must_be_first = True
def __init__(self, nodelist, parent_name, parent_name_expr, template_dirs=None):
self.nodelist = nodelist
self.parent_name, self.parent_name_expr = parent_name, parent_name_expr
self.template_dirs = template_dirs
self.blocks = dict([(n.name, n) for n in nodelist.get_nodes_by_type(BlockNode)])
def __repr__(self):
if self.parent_name_expr:
return "<ExtendsNode: extends %s>" % self.parent_name_expr.token
return '<ExtendsNode: extends "%s">' % self.parent_name
def get_parent(self, context):
if self.parent_name_expr:
self.parent_name = self.parent_name_expr.resolve(context)
parent = self.parent_name
if not parent:
error_msg = "Invalid template name in 'extends' tag: %r." % parent
if self.parent_name_expr:
error_msg += " Got this from the '%s' variable." % self.parent_name_expr.token
raise TemplateSyntaxError(error_msg)
if hasattr(parent, 'render'):
return parent # parent is a Template object
return get_template(parent)
def render(self, context):
compiled_parent = self.get_parent(context)
if BLOCK_CONTEXT_KEY not in context.render_context:
context.render_context[BLOCK_CONTEXT_KEY] = BlockContext()
block_context = context.render_context[BLOCK_CONTEXT_KEY]
# Add the block nodes from this node to the block context
block_context.add_blocks(self.blocks)
# If this block's parent doesn't have an extends node it is the root,
# and its block nodes also need to be added to the block context.
for node in compiled_parent.nodelist:
# The ExtendsNode has to be the first non-text node.
if not isinstance(node, TextNode):
if not isinstance(node, ExtendsNode):
blocks = dict([(n.name, n) for n in
compiled_parent.nodelist.get_nodes_by_type(BlockNode)])
block_context.add_blocks(blocks)
break
# Call Template._render explicitly so the parser context stays
# the same.
return compiled_parent._render(context)
class BaseIncludeNode(Node):
def __init__(self, *args, **kwargs):
self.extra_context = kwargs.pop('extra_context', {})
self.isolated_context = kwargs.pop('isolated_context', False)
super(BaseIncludeNode, self).__init__(*args, **kwargs)
def render_template(self, template, context):
values = dict([(name, var.resolve(context)) for name, var
in self.extra_context.iteritems()])
if self.isolated_context:
return template.render(Context(values))
context.update(values)
output = template.render(context)
context.pop()
return output
class ConstantIncludeNode(BaseIncludeNode):
def __init__(self, template_path, *args, **kwargs):
super(ConstantIncludeNode, self).__init__(*args, **kwargs)
try:
t = get_template(template_path)
self.template = t
except:
if settings.TEMPLATE_DEBUG:
raise
self.template = None
def render(self, context):
if not self.template:
return ''
return self.render_template(self.template, context)
class IncludeNode(BaseIncludeNode):
def __init__(self, template_name, *args, **kwargs):
super(IncludeNode, self).__init__(*args, **kwargs)
self.template_name = template_name
def render(self, context):
try:
template_name = self.template_name.resolve(context)
template = get_template(template_name)
return self.render_template(template, context)
except TemplateSyntaxError:
if settings.TEMPLATE_DEBUG:
raise
return ''
except:
return '' # Fail silently for invalid included templates.
def do_block(parser, token):
"""
Define a block that can be overridden by child templates.
"""
bits = token.contents.split()
if len(bits) != 2:
raise TemplateSyntaxError("'%s' tag takes only one argument" % bits[0])
block_name = bits[1]
# Keep track of the names of BlockNodes found in this template, so we can
# check for duplication.
try:
if block_name in parser.__loaded_blocks:
raise TemplateSyntaxError("'%s' tag with name '%s' appears more than once" % (bits[0], block_name))
parser.__loaded_blocks.append(block_name)
except AttributeError: # parser.__loaded_blocks isn't a list yet
parser.__loaded_blocks = [block_name]
nodelist = parser.parse(('endblock', 'endblock %s' % block_name))
parser.delete_first_token()
return BlockNode(block_name, nodelist)
def do_extends(parser, token):
"""
Signal that this template extends a parent template.
This tag may be used in two ways: ``{% extends "base" %}`` (with quotes)
uses the literal value "base" as the name of the parent template to extend,
or ``{% extends variable %}`` uses the value of ``variable`` as either the
name of the parent template to extend (if it evaluates to a string) or as
the parent tempate itelf (if it evaluates to a Template object).
"""
bits = token.split_contents()
if len(bits) != 2:
raise TemplateSyntaxError("'%s' takes one argument" % bits[0])
parent_name, parent_name_expr = None, None
if bits[1][0] in ('"', "'") and bits[1][-1] == bits[1][0]:
parent_name = bits[1][1:-1]
else:
parent_name_expr = parser.compile_filter(bits[1])
nodelist = parser.parse()
if nodelist.get_nodes_by_type(ExtendsNode):
raise TemplateSyntaxError("'%s' cannot appear more than once in the same template" % bits[0])
return ExtendsNode(nodelist, parent_name, parent_name_expr)
def do_include(parser, token):
"""
Loads a template and renders it with the current context. You can pass
additional context using keyword arguments.
Example::
{% include "foo/some_include" %}
{% include "foo/some_include" with bar="BAZZ!" baz="BING!" %}
Use the ``only`` argument to exclude the current context when rendering
the included template::
{% include "foo/some_include" only %}
{% include "foo/some_include" with bar="1" only %}
"""
bits = token.split_contents()
if len(bits) < 2:
raise TemplateSyntaxError("%r tag takes at least one argument: the name of the template to be included." % bits[0])
options = {}
remaining_bits = bits[2:]
while remaining_bits:
option = remaining_bits.pop(0)
if option in options:
raise TemplateSyntaxError('The %r option was specified more '
'than once.' % option)
if option == 'with':
value = token_kwargs(remaining_bits, parser, support_legacy=False)
if not value:
raise TemplateSyntaxError('"with" in %r tag needs at least '
'one keyword argument.' % bits[0])
elif option == 'only':
value = True
else:
raise TemplateSyntaxError('Unknown argument for %r tag: %r.' %
(bits[0], option))
options[option] = value
isolated_context = options.get('only', False)
namemap = options.get('with', {})
path = bits[1]
if path[0] in ('"', "'") and path[-1] == path[0]:
return ConstantIncludeNode(path[1:-1], extra_context=namemap,
isolated_context=isolated_context)
return IncludeNode(parser.compile_filter(bits[1]), extra_context=namemap,
isolated_context=isolated_context)
register.tag('block', do_block)
register.tag('extends', do_extends)
register.tag('include', do_include)
| apache-2.0 | 1,302,282,493,516,306,400 | 5,025,807,555,099,383,000 | 38.074349 | 123 | 0.610503 | false |
Lokke/eden | tests/unit_tests/modules/s3/s3gis/GeoRSSLayer.py | 43 | 2220 |
s3gis_tests = load_module("tests.unit_tests.modules.s3.s3gis")
def test_GeoRSSLayer():
# use debug to show up errors
# without debug, errors get to be turned into session warnings
# and the layer skipped altogether. No datastructure.
url = "test://test_GeoRSS"
current.request.utcnow = datetime.datetime.now()
test_utils.clear_table(db, db.gis_cache)
db.gis_cache.insert(
modified_on = datetime.datetime.now(),
source = url
)
db.commit()
current.session.s3.debug = True
s3gis_tests.layer_test(
db,
db.gis_layer_georss,
dict(
name = "Test GeoRSS",
description = "Test GeoRSS layer",
enabled = True,
created_on = datetime.datetime.now(),
modified_on = datetime.datetime.now(),
url = url,
),
"S3.gis.layers_georss",
[
{
"marker_height": 34,
"marker_image": u"gis_marker.image.marker_red.png",
"marker_width": 20,
"name": u"Test GeoRSS",
"url": u"/eden/gis/cache_feed.geojson?cache.source=test://test_GeoRSS"
}
],
session = session,
request = request,
)
test_utils = local_import("test_utils")
s3gis = local_import("s3.s3gis")
def test_no_cached_copy_available():
test_utils.clear_table(db, db.gis_cache)
current.request.utcnow = datetime.datetime.now()
current.session.s3.debug = True
gis = s3gis.GIS()
with s3gis_tests.InsertedRecord(
db,
db.gis_layer_georss,
dict(
name = "Test GeoRSS",
description = "Test GeoRSS layer",
enabled = True,
created_on = datetime.datetime.now(),
modified_on = datetime.datetime.now(),
url = "test://test_GeoRSS",
)
):
with s3gis_tests.ExpectedException(Exception):
gis.show_map(
window = True,
catalogue_toolbar = True,
toolbar = True,
search = True,
catalogue_layers = True,
projection = 900913,
)
| mit | 1,274,376,514,554,722,600 | -7,641,845,870,086,981,000 | 29 | 86 | 0.533333 | false |
cloudbase/nova | nova/tests/unit/api/openstack/compute/test_limits.py | 3 | 9591 | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests dealing with HTTP rate-limiting.
"""
import mock
from oslo_serialization import jsonutils
from oslo_utils import encodeutils
from six.moves import http_client as httplib
from six.moves import StringIO
from nova.api.openstack.compute import limits as limits_v21
from nova.api.openstack.compute import views
from nova.api.openstack import wsgi
import nova.context
from nova import exception
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import matchers
class BaseLimitTestSuite(test.NoDBTestCase):
"""Base test suite which provides relevant stubs and time abstraction."""
def setUp(self):
super(BaseLimitTestSuite, self).setUp()
self.time = 0.0
self.absolute_limits = {}
def stub_get_project_quotas(context, project_id, usages=True):
return {k: dict(limit=v)
for k, v in self.absolute_limits.items()}
self.stubs.Set(nova.quota.QUOTAS, "get_project_quotas",
stub_get_project_quotas)
def _get_time(self):
"""Return the "time" according to this test suite."""
return self.time
class LimitsControllerTestV21(BaseLimitTestSuite):
"""Tests for `limits.LimitsController` class."""
limits_controller = limits_v21.LimitsController
def setUp(self):
"""Run before each test."""
super(LimitsControllerTestV21, self).setUp()
self.controller = wsgi.Resource(self.limits_controller())
self.ctrler = self.limits_controller()
def _get_index_request(self, accept_header="application/json",
tenant_id=None):
"""Helper to set routing arguments."""
request = fakes.HTTPRequest.blank('', version='2.1')
if tenant_id:
request = fakes.HTTPRequest.blank('/?tenant_id=%s' % tenant_id,
version='2.1')
request.accept = accept_header
request.environ["wsgiorg.routing_args"] = (None, {
"action": "index",
"controller": "",
})
context = nova.context.RequestContext('testuser', 'testproject')
request.environ["nova.context"] = context
return request
def test_empty_index_json(self):
# Test getting empty limit details in JSON.
request = self._get_index_request()
response = request.get_response(self.controller)
expected = {
"limits": {
"rate": [],
"absolute": {},
},
}
body = jsonutils.loads(response.body)
self.assertEqual(expected, body)
def test_index_json(self):
self._test_index_json()
def test_index_json_by_tenant(self):
self._test_index_json('faketenant')
def _test_index_json(self, tenant_id=None):
# Test getting limit details in JSON.
request = self._get_index_request(tenant_id=tenant_id)
context = request.environ["nova.context"]
if tenant_id is None:
tenant_id = context.project_id
self.absolute_limits = {
'ram': 512,
'instances': 5,
'cores': 21,
'key_pairs': 10,
'floating_ips': 10,
'security_groups': 10,
'security_group_rules': 20,
}
expected = {
"limits": {
"rate": [],
"absolute": {
"maxTotalRAMSize": 512,
"maxTotalInstances": 5,
"maxTotalCores": 21,
"maxTotalKeypairs": 10,
"maxTotalFloatingIps": 10,
"maxSecurityGroups": 10,
"maxSecurityGroupRules": 20,
},
},
}
def _get_project_quotas(context, project_id, usages=True):
return {k: dict(limit=v) for k, v in self.absolute_limits.items()}
with mock.patch('nova.quota.QUOTAS.get_project_quotas') as \
get_project_quotas:
get_project_quotas.side_effect = _get_project_quotas
response = request.get_response(self.controller)
body = jsonutils.loads(response.body)
self.assertEqual(expected, body)
get_project_quotas.assert_called_once_with(context, tenant_id,
usages=False)
class FakeHttplibSocket(object):
"""Fake `httplib.HTTPResponse` replacement."""
def __init__(self, response_string):
"""Initialize new `FakeHttplibSocket`."""
self._buffer = StringIO(response_string)
def makefile(self, _mode, _other):
"""Returns the socket's internal buffer."""
return self._buffer
class FakeHttplibConnection(object):
"""Fake `httplib.HTTPConnection`."""
def __init__(self, app, host):
"""Initialize `FakeHttplibConnection`."""
self.app = app
self.host = host
def request(self, method, path, body="", headers=None):
"""Requests made via this connection actually get translated and routed
into our WSGI app, we then wait for the response and turn it back into
an `httplib.HTTPResponse`.
"""
if not headers:
headers = {}
req = fakes.HTTPRequest.blank(path)
req.method = method
req.headers = headers
req.host = self.host
req.body = encodeutils.safe_encode(body)
resp = str(req.get_response(self.app))
resp = "HTTP/1.0 %s" % resp
sock = FakeHttplibSocket(resp)
self.http_response = httplib.HTTPResponse(sock)
self.http_response.begin()
def getresponse(self):
"""Return our generated response from the request."""
return self.http_response
class LimitsViewBuilderTest(test.NoDBTestCase):
def setUp(self):
super(LimitsViewBuilderTest, self).setUp()
self.view_builder = views.limits.ViewBuilder()
self.rate_limits = []
self.absolute_limits = {"metadata_items": 1,
"injected_files": 5,
"injected_file_content_bytes": 5}
def test_build_limits(self):
expected_limits = {"limits": {
"rate": [],
"absolute": {"maxServerMeta": 1,
"maxImageMeta": 1,
"maxPersonality": 5,
"maxPersonalitySize": 5}}}
output = self.view_builder.build(self.absolute_limits)
self.assertThat(output, matchers.DictMatches(expected_limits))
def test_build_limits_empty_limits(self):
expected_limits = {"limits": {"rate": [],
"absolute": {}}}
abs_limits = {}
output = self.view_builder.build(abs_limits)
self.assertThat(output, matchers.DictMatches(expected_limits))
class LimitsPolicyEnforcementV21(test.NoDBTestCase):
def setUp(self):
super(LimitsPolicyEnforcementV21, self).setUp()
self.controller = limits_v21.LimitsController()
def test_limits_index_policy_failed(self):
rule_name = "os_compute_api:limits"
self.policy.set_rules({rule_name: "project:non_fake"})
req = fakes.HTTPRequest.blank('')
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.index, req=req)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
class LimitsControllerTestV236(BaseLimitTestSuite):
def setUp(self):
super(LimitsControllerTestV236, self).setUp()
self.controller = limits_v21.LimitsController()
self.req = fakes.HTTPRequest.blank("/?tenant_id=faketenant",
version='2.36')
def test_index_filtered(self):
absolute_limits = {
'ram': 512,
'instances': 5,
'cores': 21,
'key_pairs': 10,
'floating_ips': 10,
'security_groups': 10,
'security_group_rules': 20,
}
def _get_project_quotas(context, project_id, usages=True):
return {k: dict(limit=v) for k, v in absolute_limits.items()}
with mock.patch('nova.quota.QUOTAS.get_project_quotas') as \
get_project_quotas:
get_project_quotas.side_effect = _get_project_quotas
response = self.controller.index(self.req)
expected_response = {
"limits": {
"rate": [],
"absolute": {
"maxTotalRAMSize": 512,
"maxTotalInstances": 5,
"maxTotalCores": 21,
"maxTotalKeypairs": 10,
},
},
}
self.assertEqual(expected_response, response)
| apache-2.0 | 3,231,358,713,042,255,000 | -2,388,188,164,314,536,000 | 33.624549 | 79 | 0.575435 | false |
SnappleCap/oh-mainline | vendor/packages/Django/django/core/management/commands/testserver.py | 307 | 2006 | from django.core.management.base import BaseCommand
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--noinput', action='store_false', dest='interactive', default=True,
help='Tells Django to NOT prompt the user for input of any kind.'),
make_option('--addrport', action='store', dest='addrport',
type='string', default='',
help='port number or ipaddr:port to run the server on'),
make_option('--ipv6', '-6', action='store_true', dest='use_ipv6', default=False,
help='Tells Django to use a IPv6 address.'),
)
help = 'Runs a development server with data from the given fixture(s).'
args = '[fixture ...]'
requires_model_validation = False
def handle(self, *fixture_labels, **options):
from django.core.management import call_command
from django.db import connection
verbosity = int(options.get('verbosity'))
interactive = options.get('interactive')
addrport = options.get('addrport')
# Create a test database.
db_name = connection.creation.create_test_db(verbosity=verbosity, autoclobber=not interactive)
# Import the fixture data into the test database.
call_command('loaddata', *fixture_labels, **{'verbosity': verbosity})
# Run the development server. Turn off auto-reloading because it causes
# a strange error -- it causes this handle() method to be called
# multiple times.
shutdown_message = '\nServer stopped.\nNote that the test database, %r, has not been deleted. You can explore it on your own.' % db_name
use_threading = connection.features.test_db_allows_multiple_connections
call_command('runserver',
addrport=addrport,
shutdown_message=shutdown_message,
use_reloader=False,
use_ipv6=options['use_ipv6'],
use_threading=use_threading
)
| agpl-3.0 | -2,078,632,578,741,604,600 | 7,089,713,207,672,292,000 | 43.577778 | 144 | 0.647557 | false |
Timmenem/micropython | ports/cc3200/tools/update-wipy.py | 67 | 7022 | #!/usr/bin/env python
"""
The WiPy firmware update script. Transmits the specified firmware file
over FTP, and then resets the WiPy and optionally verifies that software
was correctly updated.
Usage:
./update-wipy.py --file "path_to_mcuimg.bin" --verify
Or:
python update-wipy.py --file "path_to_mcuimg.bin"
"""
import sys
import argparse
import time
import socket
from ftplib import FTP
from telnetlib import Telnet
def print_exception(e):
print ('Exception: {}, on line {}'.format(e, sys.exc_info()[-1].tb_lineno))
def ftp_directory_exists(ftpobj, directory_name):
filelist = []
ftpobj.retrlines('LIST',filelist.append)
for f in filelist:
if f.split()[-1] == directory_name:
return True
return False
def transfer_file(args):
with FTP(args.ip, timeout=20) as ftp:
print ('FTP connection established')
if '230' in ftp.login(args.user, args.password):
print ('Login successful')
if '250' in ftp.cwd('/flash'):
if not ftp_directory_exists(ftp, 'sys'):
print ('/flash/sys directory does not exist')
if not '550' in ftp.mkd('sys'):
print ('/flash/sys directory created')
else:
print ('Error: cannot create /flash/sys directory')
return False
if '250' in ftp.cwd('sys'):
print ("Entered '/flash/sys' directory")
with open(args.file, "rb") as fwfile:
print ('Firmware image found, initiating transfer...')
if '226' in ftp.storbinary("STOR " + 'mcuimg.bin', fwfile, 512):
print ('File transfer complete')
return True
else:
print ('Error: file transfer failed')
else:
print ('Error: cannot enter /flash/sys directory')
else:
print ('Error: cannot enter /flash directory')
else:
print ('Error: ftp login failed')
return False
def reset_board(args):
success = False
try:
tn = Telnet(args.ip, timeout=5)
print("Connected via Telnet, trying to login now")
if b'Login as:' in tn.read_until(b"Login as:", timeout=5):
tn.write(bytes(args.user, 'ascii') + b"\r\n")
if b'Password:' in tn.read_until(b"Password:", timeout=5):
# needed because of internal implementation details of the WiPy's telnet server
time.sleep(0.2)
tn.write(bytes(args.password, 'ascii') + b"\r\n")
if b'Type "help()" for more information.' in tn.read_until(b'Type "help()" for more information.', timeout=5):
print("Telnet login succeeded")
tn.write(b'\r\x03\x03') # ctrl-C twice: interrupt any running program
time.sleep(1)
tn.write(b'\r\x02') # ctrl-B: enter friendly REPL
if b'Type "help()" for more information.' in tn.read_until(b'Type "help()" for more information.', timeout=5):
tn.write(b"import machine\r\n")
tn.write(b"machine.reset()\r\n")
time.sleep(2)
print("Reset performed")
success = True
else:
print("Error: cannot enter friendly REPL")
else:
print("Error: telnet login failed")
except Exception as e:
print_exception(e)
finally:
try:
tn.close()
except Exception as e:
pass
return success
def verify_update(args):
success = False
firmware_tag = ''
def find_tag (tag):
if tag in firmware_tag:
print("Verification passed")
return True
else:
print("Error: verification failed, the git tag doesn't match")
return False
retries = 0
while True:
try:
# Specify a longer time out value here because the board has just been
# reset and the wireless connection might not be fully established yet
tn = Telnet(args.ip, timeout=10)
print("Connected via telnet again, lets check the git tag")
break
except socket.timeout:
if retries < 5:
print("Timeout while connecting via telnet, retrying...")
retries += 1
else:
print('Error: Telnet connection timed out!')
return False
try:
firmware_tag = tn.read_until (b'with CC3200')
tag_file_path = args.file.rstrip('mcuimg.bin') + 'genhdr/mpversion.h'
if args.tag is not None:
success = find_tag(bytes(args.tag, 'ascii'))
else:
with open(tag_file_path) as tag_file:
for line in tag_file:
bline = bytes(line, 'ascii')
if b'MICROPY_GIT_HASH' in bline:
bline = bline.lstrip(b'#define MICROPY_GIT_HASH ').replace(b'"', b'').replace(b'\r', b'').replace(b'\n', b'')
success = find_tag(bline)
break
except Exception as e:
print_exception(e)
finally:
try:
tn.close()
except Exception as e:
pass
return success
def main():
cmd_parser = argparse.ArgumentParser(description='Update the WiPy firmware with the specified image file')
cmd_parser.add_argument('-f', '--file', default=None, help='the path of the firmware file')
cmd_parser.add_argument('-u', '--user', default='micro', help='the username')
cmd_parser.add_argument('-p', '--password', default='python', help='the login password')
cmd_parser.add_argument('--ip', default='192.168.1.1', help='the ip address of the WiPy')
cmd_parser.add_argument('--verify', action='store_true', help='verify that the update succeeded')
cmd_parser.add_argument('-t', '--tag', default=None, help='git tag of the firmware image')
args = cmd_parser.parse_args()
result = 1
try:
if args.file is None:
raise ValueError('the image file path must be specified')
if transfer_file(args):
if reset_board(args):
if args.verify:
print ('Waiting for the WiFi connection to come up again...')
# this time is to allow the system's wireless network card to
# connect to the WiPy again.
time.sleep(5)
if verify_update(args):
result = 0
else:
result = 0
except Exception as e:
print_exception(e)
finally:
sys.exit(result)
if __name__ == "__main__":
main()
| mit | -4,601,430,266,902,442,000 | 8,769,710,319,794,710,000 | 34.11 | 133 | 0.533751 | false |
linsalrob/EdwardsLab | mongodb/load_models.py | 1 | 1140 | """
Create a mongo database if it doesn't exist and load a bunch of data into it.
We need a directory with one or more JSON files in it. We look for JSON on the end of the filename.
e.g. python load_models.py -d /data/Genotype-Phenotype-Modeling/models/Citrobacter/Citrobacter/models/ -n fba_models -c citrobacter
"""
import os
import sys
import argparse
import json
from pymongo import MongoClient
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Load some data from a directory of JSON files")
parser.add_argument('-d', help='Directory of files', required=True)
parser.add_argument('-n', help='Database name', required=True)
parser.add_argument('-c', help='Collection name', required=True)
args = parser.parse_args()
client = MongoClient()
db = client[args.n]
coll = db[args.c]
for f in os.listdir(args.d):
if f.lower().endswith('.json'):
sys.stderr.write("Loading file " + f + "\n")
text = json.load(open(os.path.join(args.d, f)))
obj = {'file_name' : os.path.join(args.d, f), 'content' : text}
coll.insert(obj)
| mit | 1,197,647,704,250,072,300 | 6,860,068,281,596,276,000 | 32.529412 | 131 | 0.658772 | false |
neuhofmo/RecBlast | RecBlastUtils.py | 1 | 10025 | #! /usr/bin/env python2
# A set of tools, functions, aliases and more used in RecBlast.
import os
import tarfile
import zipfile
from time import strftime, sleep
import re
import subprocess
from Bio import Entrez
import shutil
Entrez.email = "recblast@gmail.com"
Entrez.tool = "RecBlast"
TEMP_FILES_PATH = os.getcwd()
def prepare_files(items, file_name, user_id, files_path=TEMP_FILES_PATH):
"""Receives a list of items and a file to write them to, then writes them to file and returns the file path."""
full_path = join_folder(files_path, "_".join([user_id, file_name]))
# items = list(set(items)) # make the list unique # unnecessary
with open(full_path, 'w') as f:
for item in items:
f.write("{}\n".format(item)) # improved efficiency
return full_path
def file_to_string(file_name):
"""Reads a file (file_name) and returns the text in it as a string."""
with open(file_name, 'r') as f:
text = f.read()
# delete original file
os.remove(file_name)
return text
def remove_commas(file_name):
"""Replaces commas with newlines in a file."""
with open(file_name, 'r') as f:
text = f.read()
text = replace(text, ',', '\n')
with open(file_name, 'w') as f: # now writing
f.write(text)
return file_name
# def zip_results(fasta_output_path, csv_rbh_output_filename, csv_strict_output_filename, csv_ns_output_filename,
# output_path):
def zip_results(fasta_output_path, zip_list, output_path):
"""
Receives a folder containing fasta sequences and a csv file, adds them all to zip.
:param fasta_output_path:
:param csv_rbh_output_filename:
:param csv_strict_output_filename:
:param csv_ns_output_filename:
:param output_path:
:return:
"""
zip_file = join_folder(output_path, "output.zip")
fastas = [join_folder(fasta_output_path, x) for x in os.listdir(fasta_output_path)]
bname = os.path.basename # for efficiency
with zipfile.ZipFile(zip_file, mode='w') as zf:
# adding all fasta files
for fasta in fastas:
zf.write(fasta, bname(fasta))
# zf.write(csv_file_path, os.path.basename(csv_file_path)) # add csv file
# add csv files
for f_to_zip in zip_list:
zf.write(f_to_zip, bname(f_to_zip))
# zf.write(csv_rbh_output_filename, os.path.basename(csv_rbh_output_filename)) # add csv file
# zf.write(csv_strict_output_filename, os.path.basename(csv_strict_output_filename)) # add csv file
# zf.write(csv_ns_output_filename, os.path.basename(csv_ns_output_filename)) # add csv file
return zip_file
# debugging function
def debug_s(debug_string, to_debug):
"""
Receives a string and prints it, with a timestamp.
:param debug_string: a string to print
:param to_debug: boolean flag: True means print, False - ignore.
:return:
"""
if to_debug:
print "DEBUG {0}: {1}".format(strftime('%H:%M:%S'), debug_string)
def create_folder_if_needed(path):
"""
Receives a path and creates a folder when needed (if it doesn't already exist).
"""
if os.path.exists(path):
print "{} dir exists".format(path)
else:
print "{} dir does not exist. Creating dir.".format(path)
os.mkdir(path)
def file_len(fname):
"""Return the file length in lines."""
with open(fname) as f:
for i, l in enumerate(f):
pass
return i + 1
def targz_folder(archive_name, folder):
"""
Returns True after
:param archive_name:
:param folder:
:return:
"""
with tarfile.open(archive_name, "w:gz") as tar:
tar.add(folder, arcname=os.path.basename(folder))
return True
def cleanup(path, storage_folder, run_id):
"""
Performs tar and gzip on sets of files produced by the program.
Then deletes the files and folders.
:param path: # the run_folder
:param storage_folder: # the main folder, in which the entire run_folder will be stored
:param run_id: # the folder containing the first blast results
:return:
"""
# compress all files in path:
# fasta_path
path_archive = join_folder(storage_folder, "{}.all.tar.gz".format(run_id))
if targz_folder(path_archive, path): # compress run_folder
shutil.rmtree(path) # delete run folder
return True
def write_blast_run_script(command_line, write_folder):
"""Writing a blast run script, and giving it run permissions."""
# script_path = "/tmp/blastp_run.sh" # default script location
script_path = join_folder(write_folder, "blastp_run.sh") # script location
with open(script_path, 'w') as script:
# script.write("#! /bin/tcsh\n")
script.write("#! /bin/bash\n")
script.write("# The script is designed to run the following blastp command from RecBlast\n")
script.write(command_line)
# run permissions for the script:
os.chmod(script_path, 0751)
return script_path
def write_sort_command_script(filename_to_sort, sorted_filename, write_folder):
"""Writing a sort uniq script to edit the gene csv file."""
# script_path = "/tmp/sort_script.sh" # default script location
script_path = join_folder(write_folder, "sort_script.sh") # script location
with open(script_path, 'w') as script:
# script.write("#! /bin/tcsh\n")
script.write("#! /bin/bash\n")
script.write("# The script is designed to run sort, uniq command from RecBlast\n")
command_line = "cat {0} | sort | uniq > {1}.temp; " \
"echo 'gene_id,gene_name,uniprot_id' > {1}; cat {1}.temp >> {1}; " \
"rm {1}.temp\n".format(filename_to_sort, sorted_filename)
# changed to make sure the title only comes after the genes
script.write(command_line)
# run permissions for the script:
os.chmod(script_path, 0751)
return script_path
def merge_two_dicts(x, y):
"""Given two dicts, merge them into a new dict as a shallow copy."""
z = x.copy()
z.update(y)
return z
def is_number(s):
"""The function determines if a string is a number or a text. Returns True if it's a number. """
try:
int(s)
return True
except ValueError:
return False
def blastdb_exit():
"""Exiting if we can't find the $BLASTDB on the local machine"""
print("$BLASTDB was not found! Please set the blast DB path to the right location.")
print("Make sure blast+ is installed correctly.")
exit(1)
def exists_not_empty(path):
"""Receives a file path and checks if it exists and not empty."""
if os.path.exists(path) and os.stat(path).st_size > 0:
return True
else:
return False
def subset_db(tax_id, gi_file_path, db_path, big_db, run_anyway, DEBUG, debug, attempt_no=0):
"""
Subsets a big blast database into a smaller one based on tax_id.
The function connects to entrez and retrieves gi identifiers of sequences with the same tax_id.
:param tax_id: The tax_id (string)
:param gi_file_path: file path of the gi_list file we are creating
:param db_path: the new db path
:param big_db: we are about to subset
:param run_anyway: run on NR if unable to subset
:param attempt_no: counter for the attempts in connecting to Entrez (attempts to connect up to 10 times).
:param DEBUG: A boolean flag: True for debug prints, False for quiet run.
:param debug: A function call to provide debug prints.
:return:
"""
# connecting to ENTREZ protein DB
try:
handle = Entrez.esearch(db="protein", term="txid{}[ORGN]".format(tax_id), retmode="xml", retmax=10000000)
record = Entrez.read(handle)
except Exception, e: # DB connection exception
print "Error connecting to server, trying again..."
print "Error: {}".format(e)
debug("Error connecting to server, trying again...\n")
# sleeping in case it's a temporary database problem
sleep_period = 180
print "restarting attempt in {} seconds...".format(sleep_period)
sleep(sleep_period)
# counting the number of attempts to connect.
attempt_no += 1
if attempt_no >= 10: # If too many:
print "Tried connecting to Entrez DB more than 10 times. Check your connection or try again later."
exit(1)
# try again (recursive until max)
return subset_db(tax_id, gi_file_path, db_path, big_db, run_anyway, DEBUG, debug, attempt_no)
assert int(record["Count"]) == len(record["IdList"]), "Did not fetch all sequences!" # make sure we got it all...
# writing a gi list file
with open(gi_file_path, 'w') as gi_file:
gi_file.write("\n".join(record["IdList"]) + "\n")
# the new target database path
create_folder_if_needed(os.path.join(db_path, tax_id))
target_db = os.path.join(db_path, tax_id, "db")
aliastool_command = ["blastdb_aliastool", "-gilist", gi_file_path, "-db", big_db, "-dbtype", "prot", "-out",
target_db] # TODO: test that blastdb_aliastool works for the user
try:
subprocess.check_call(aliastool_command)
print("Created DB subset from nr protein for {}".format(tax_id))
return target_db
except subprocess.CalledProcessError:
print("Problem with creating DB for tax_id {} from nr.".format(tax_id))
if run_anyway:
print("Running with the heavy nr option. Do some stretches, it might be a long run.")
return big_db
print("Aborting.\n"
"If you want to run the program anyway against the entire nr "
"(which is significantly slower than the default run, please use the --run_even_if_no_db_found flag.")
exit(1)
# for efficiency
strip = str.strip
split = str.split
replace = str.replace
re_search = re.search
re_sub = re.sub
re_match = re.match
upper = str.upper
lower = str.lower
join_folder = os.path.join
| mit | -7,121,937,065,067,037,000 | 6,336,635,697,247,333,000 | 35.587591 | 118 | 0.6399 | false |
andrei-karalionak/ggrc-core | src/ggrc_workflows/migrations/versions/20160104135243_13e52f6a9deb_add_finished_verified_dates_to_cycle_.py | 7 | 1120 | # Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Add finished/verified dates to cycle tasks
Revision ID: 13e52f6a9deb
Revises: 18bdb0671010
Create Date: 2016-01-04 13:52:43.017848
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '13e52f6a9deb'
down_revision = '18bdb0671010'
def upgrade():
op.add_column('cycle_task_group_object_tasks', sa.Column('finished_date', sa.DateTime(), nullable=True))
op.add_column('cycle_task_group_object_tasks', sa.Column('verified_date', sa.DateTime(), nullable=True))
op.execute("""
UPDATE cycle_task_group_object_tasks
SET finished_date = updated_at
WHERE status = "Finished"
""")
op.execute("""
UPDATE cycle_task_group_object_tasks
SET verified_date = updated_at, finished_date = updated_at
WHERE status = "Verified"
""")
def downgrade():
op.drop_column('cycle_task_group_object_tasks', 'verified_date')
op.drop_column('cycle_task_group_object_tasks', 'finished_date')
| apache-2.0 | -5,629,059,039,330,839,000 | 8,742,864,918,987,523,000 | 30.111111 | 106 | 0.715179 | false |
seanfisk/lsf-ibutils | docs/source/conf.py | 1 | 8703 | # -*- coding: utf-8 -*-
#
# This file is based upon the file generated by sphinx-quickstart. However,
# where sphinx-quickstart hardcodes values in this file that you input, this
# file has been changed to pull from your module's metadata module.
#
# This file is execfile()d with the current directory set to its containing
# dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../..'))
# Import project metadata
from lsf_ibutils import metadata
# -- General configuration ----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx',
'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.viewcode']
# show todos
todo_include_todos = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = metadata.project
copyright = metadata.copyright
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = metadata.version
# The full version, including alpha/beta/rc tags.
release = metadata.version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = metadata.project_no_spaces + 'doc'
# -- Options for LaTeX output -------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author,
# documentclass [howto/manual]).
latex_documents = [
('index', metadata.project_no_spaces + '.tex',
metadata.project + ' Documentation', metadata.authors_string,
'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output -------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', metadata.package, metadata.project + ' Documentation',
metadata.authors_string, 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', metadata.project_no_spaces,
metadata.project + ' Documentation', metadata.authors_string,
metadata.project_no_spaces, metadata.description, 'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('http://docs.python.org/', None),
}
# Extra local configuration. This is useful for placing the class description
# in the class docstring and the __init__ parameter documentation in the
# __init__ docstring. See
# <http://sphinx-doc.org/ext/autodoc.html#confval-autoclass_content> for more
# information.
autoclass_content = 'both'
| mit | 3,759,148,028,698,055,700 | -4,536,350,193,862,916,600 | 31.473881 | 79 | 0.708376 | false |
chitr/neutron | neutron/db/migration/alembic_migrations/versions/1955efc66455_weight_scheduler.py | 47 | 1036 | # Copyright 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""weight_scheduler
Revision ID: 1955efc66455
Revises: 35a0f3365720
Create Date: 2015-03-12 22:11:37.607390
"""
# revision identifiers, used by Alembic.
revision = '1955efc66455'
down_revision = '35a0f3365720'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('agents',
sa.Column('load', sa.Integer(),
server_default='0', nullable=False))
| apache-2.0 | -8,723,583,333,923,340,000 | 5,034,230,891,804,296,000 | 28.6 | 78 | 0.698842 | false |
lkl/lkl-linux-2.6 | tools/perf/scripts/python/syscall-counts-by-pid.py | 944 | 1744 | # system call counts, by pid
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
usage = "perf trace -s syscall-counts-by-pid.py [comm]\n";
for_comm = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
pass
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[common_comm][common_pid][id] += 1
except TypeError:
syscalls[common_comm][common_pid][id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events by comm/pid:\n\n",
print "%-40s %10s\n" % ("comm [pid]/syscalls", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id, val in sorted(syscalls[comm][pid].iteritems(), \
key = lambda(k, v): (v, k), reverse = True):
print " %-38d %10d\n" % (id, val),
| gpl-2.0 | -2,512,015,498,150,808,600 | 2,958,397,528,170,757,600 | 26.25 | 77 | 0.610092 | false |
vmpstr/trace-viewer | third_party/closure_linter/closure_linter/common/simplefileflags.py | 285 | 5107 | #!/usr/bin/env python
#
# Copyright 2008 The Closure Linter Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Determines the list of files to be checked from command line arguments."""
__author__ = ('robbyw@google.com (Robert Walker)',
'ajp@google.com (Andy Perelson)')
import glob
import os
import re
import gflags as flags
FLAGS = flags.FLAGS
flags.DEFINE_multistring(
'recurse',
None,
'Recurse in to the subdirectories of the given path',
short_name='r')
flags.DEFINE_list(
'exclude_directories',
('_demos'),
'Exclude the specified directories (only applicable along with -r or '
'--presubmit)',
short_name='e')
flags.DEFINE_list(
'exclude_files',
('deps.js'),
'Exclude the specified files',
short_name='x')
def MatchesSuffixes(filename, suffixes):
"""Returns whether the given filename matches one of the given suffixes.
Args:
filename: Filename to check.
suffixes: Sequence of suffixes to check.
Returns:
Whether the given filename matches one of the given suffixes.
"""
suffix = filename[filename.rfind('.'):]
return suffix in suffixes
def _GetUserSpecifiedFiles(argv, suffixes):
"""Returns files to be linted, specified directly on the command line.
Can handle the '*' wildcard in filenames, but no other wildcards.
Args:
argv: Sequence of command line arguments. The second and following arguments
are assumed to be files that should be linted.
suffixes: Expected suffixes for the file type being checked.
Returns:
A sequence of files to be linted.
"""
files = argv[1:] or []
all_files = []
lint_files = []
# Perform any necessary globs.
for f in files:
if f.find('*') != -1:
for result in glob.glob(f):
all_files.append(result)
else:
all_files.append(f)
for f in all_files:
if MatchesSuffixes(f, suffixes):
lint_files.append(f)
return lint_files
def _GetRecursiveFiles(suffixes):
"""Returns files to be checked specified by the --recurse flag.
Args:
suffixes: Expected suffixes for the file type being checked.
Returns:
A list of files to be checked.
"""
lint_files = []
# Perform any request recursion
if FLAGS.recurse:
for start in FLAGS.recurse:
for root, subdirs, files in os.walk(start):
for f in files:
if MatchesSuffixes(f, suffixes):
lint_files.append(os.path.join(root, f))
return lint_files
def GetAllSpecifiedFiles(argv, suffixes):
"""Returns all files specified by the user on the commandline.
Args:
argv: Sequence of command line arguments. The second and following arguments
are assumed to be files that should be linted.
suffixes: Expected suffixes for the file type
Returns:
A list of all files specified directly or indirectly (via flags) on the
command line by the user.
"""
files = _GetUserSpecifiedFiles(argv, suffixes)
if FLAGS.recurse:
files += _GetRecursiveFiles(suffixes)
return FilterFiles(files)
def FilterFiles(files):
"""Filters the list of files to be linted be removing any excluded files.
Filters out files excluded using --exclude_files and --exclude_directories.
Args:
files: Sequence of files that needs filtering.
Returns:
Filtered list of files to be linted.
"""
num_files = len(files)
ignore_dirs_regexs = []
for ignore in FLAGS.exclude_directories:
ignore_dirs_regexs.append(re.compile(r'(^|[\\/])%s[\\/]' % ignore))
result_files = []
for f in files:
add_file = True
for exclude in FLAGS.exclude_files:
if f.endswith('/' + exclude) or f == exclude:
add_file = False
break
for ignore in ignore_dirs_regexs:
if ignore.search(f):
# Break out of ignore loop so we don't add to
# filtered files.
add_file = False
break
if add_file:
# Convert everything to absolute paths so we can easily remove duplicates
# using a set.
result_files.append(os.path.abspath(f))
skipped = num_files - len(result_files)
if skipped:
print 'Skipping %d file(s).' % skipped
return set(result_files)
def GetFileList(argv, file_type, suffixes):
"""Parse the flags and return the list of files to check.
Args:
argv: Sequence of command line arguments.
suffixes: Sequence of acceptable suffixes for the file type.
Returns:
The list of files to check.
"""
return sorted(GetAllSpecifiedFiles(argv, suffixes))
def IsEmptyArgumentList(argv):
return not (len(argv[1:]) or FLAGS.recurse)
| bsd-3-clause | 4,696,996,898,802,730,000 | -6,412,665,228,169,870,000 | 25.878947 | 80 | 0.684355 | false |
bettiolo/phantomjs | src/breakpad/src/tools/gyp/pylib/gyp/__init__.py | 137 | 17502 | #!/usr/bin/python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import copy
import gyp.input
import optparse
import os.path
import re
import shlex
import sys
# Default debug modes for GYP
debug = {}
# List of "official" debug modes, but you can use anything you like.
DEBUG_GENERAL = 'general'
DEBUG_VARIABLES = 'variables'
DEBUG_INCLUDES = 'includes'
def DebugOutput(mode, message):
if mode in gyp.debug.keys():
print "%s: %s" % (mode.upper(), message)
def FindBuildFiles():
extension = '.gyp'
files = os.listdir(os.getcwd())
build_files = []
for file in files:
if file[-len(extension):] == extension:
build_files.append(file)
return build_files
def Load(build_files, format, default_variables={},
includes=[], depth='.', params={}, check=False):
"""
Loads one or more specified build files.
default_variables and includes will be copied before use.
Returns the generator for the specified format and the
data returned by loading the specified build files.
"""
default_variables = copy.copy(default_variables)
# Default variables provided by this program and its modules should be
# named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace,
# avoiding collisions with user and automatic variables.
default_variables['GENERATOR'] = format
generator_name = 'gyp.generator.' + format
# These parameters are passed in order (as opposed to by key)
# because ActivePython cannot handle key parameters to __import__.
generator = __import__(generator_name, globals(), locals(), generator_name)
default_variables.update(generator.generator_default_variables)
# Give the generator the opportunity to set additional variables based on
# the params it will receive in the output phase.
if getattr(generator, 'CalculateVariables', None):
generator.CalculateVariables(default_variables, params)
# Fetch the generator specific info that gets fed to input, we use getattr
# so we can default things and the generators only have to provide what
# they need.
generator_input_info = {
'generator_wants_absolute_build_file_paths':
getattr(generator, 'generator_wants_absolute_build_file_paths', False),
'generator_handles_variants':
getattr(generator, 'generator_handles_variants', False),
'non_configuration_keys':
getattr(generator, 'generator_additional_non_configuration_keys', []),
'path_sections':
getattr(generator, 'generator_additional_path_sections', []),
'extra_sources_for_rules':
getattr(generator, 'generator_extra_sources_for_rules', []),
'generator_supports_multiple_toolsets':
getattr(generator, 'generator_supports_multiple_toolsets', False),
}
# Process the input specific to this generator.
result = gyp.input.Load(build_files, default_variables, includes[:],
depth, generator_input_info, check)
return [generator] + result
def NameValueListToDict(name_value_list):
"""
Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
of the pairs. If a string is simply NAME, then the value in the dictionary
is set to True. If VALUE can be converted to an integer, it is.
"""
result = { }
for item in name_value_list:
tokens = item.split('=', 1)
if len(tokens) == 2:
# If we can make it an int, use that, otherwise, use the string.
try:
token_value = int(tokens[1])
except ValueError:
token_value = tokens[1]
# Set the variable to the supplied value.
result[tokens[0]] = token_value
else:
# No value supplied, treat it as a boolean and set it.
result[tokens[0]] = True
return result
def ShlexEnv(env_name):
flags = os.environ.get(env_name, [])
if flags:
flags = shlex.split(flags)
return flags
def FormatOpt(opt, value):
if opt.startswith('--'):
return '%s=%s' % (opt, value)
return opt + value
def RegenerateAppendFlag(flag, values, predicate, env_name, options):
"""Regenerate a list of command line flags, for an option of action='append'.
The |env_name|, if given, is checked in the environment and used to generate
an initial list of options, then the options that were specified on the
command line (given in |values|) are appended. This matches the handling of
environment variables and command line flags where command line flags override
the environment, while not requiring the environment to be set when the flags
are used again.
"""
flags = []
if options.use_environment and env_name:
for flag_value in ShlexEnv(env_name):
flags.append(FormatOpt(flag, predicate(flag_value)))
if values:
for flag_value in values:
flags.append(FormatOpt(flag, predicate(flag_value)))
return flags
def RegenerateFlags(options):
"""Given a parsed options object, and taking the environment variables into
account, returns a list of flags that should regenerate an equivalent options
object (even in the absence of the environment variables.)
Any path options will be normalized relative to depth.
The format flag is not included, as it is assumed the calling generator will
set that as appropriate.
"""
def FixPath(path):
path = gyp.common.FixIfRelativePath(path, options.depth)
if not path:
return os.path.curdir
return path
def Noop(value):
return value
# We always want to ignore the environment when regenerating, to avoid
# duplicate or changed flags in the environment at the time of regeneration.
flags = ['--ignore-environment']
for name, metadata in options._regeneration_metadata.iteritems():
opt = metadata['opt']
value = getattr(options, name)
value_predicate = metadata['type'] == 'path' and FixPath or Noop
action = metadata['action']
env_name = metadata['env_name']
if action == 'append':
flags.extend(RegenerateAppendFlag(opt, value, value_predicate,
env_name, options))
elif action in ('store', None): # None is a synonym for 'store'.
if value:
flags.append(FormatOpt(opt, value_predicate(value)))
elif options.use_environment and env_name and os.environ.get(env_name):
flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name))))
elif action in ('store_true', 'store_false'):
if ((action == 'store_true' and value) or
(action == 'store_false' and not value)):
flags.append(opt)
elif options.use_environment and env_name:
print >>sys.stderr, ('Warning: environment regeneration unimplemented '
'for %s flag %r env_name %r' % (action, opt,
env_name))
else:
print >>sys.stderr, ('Warning: regeneration unimplemented for action %r '
'flag %r' % (action, opt))
return flags
class RegeneratableOptionParser(optparse.OptionParser):
def __init__(self):
self.__regeneratable_options = {}
optparse.OptionParser.__init__(self)
def add_option(self, *args, **kw):
"""Add an option to the parser.
This accepts the same arguments as OptionParser.add_option, plus the
following:
regenerate: can be set to False to prevent this option from being included
in regeneration.
env_name: name of environment variable that additional values for this
option come from.
type: adds type='path', to tell the regenerator that the values of
this option need to be made relative to options.depth
"""
env_name = kw.pop('env_name', None)
if 'dest' in kw and kw.pop('regenerate', True):
dest = kw['dest']
# The path type is needed for regenerating, for optparse we can just treat
# it as a string.
type = kw.get('type')
if type == 'path':
kw['type'] = 'string'
self.__regeneratable_options[dest] = {
'action': kw.get('action'),
'type': type,
'env_name': env_name,
'opt': args[0],
}
optparse.OptionParser.add_option(self, *args, **kw)
def parse_args(self, *args):
values, args = optparse.OptionParser.parse_args(self, *args)
values._regeneration_metadata = self.__regeneratable_options
return values, args
def main(args):
my_name = os.path.basename(sys.argv[0])
parser = RegeneratableOptionParser()
usage = 'usage: %s [options ...] [build_file ...]'
parser.set_usage(usage.replace('%s', '%prog'))
parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL',
env_name='GYP_DEFINES',
help='sets variable VAR to value VAL')
parser.add_option('-f', '--format', dest='formats', action='append',
env_name='GYP_GENERATORS', regenerate=False,
help='output formats to generate')
parser.add_option('--msvs-version', dest='msvs_version',
regenerate=False,
help='Deprecated; use -G msvs_version=MSVS_VERSION instead')
parser.add_option('-I', '--include', dest='includes', action='append',
metavar='INCLUDE', type='path',
help='files to include in all loaded .gyp files')
parser.add_option('--depth', dest='depth', metavar='PATH', type='path',
help='set DEPTH gyp variable to a relative path to PATH')
parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE',
action='append', default=[], help='turn on a debugging '
'mode for debugging GYP. Supported modes are "variables" '
'and "general"')
parser.add_option('-S', '--suffix', dest='suffix', default='',
help='suffix to add to generated files')
parser.add_option('-G', dest='generator_flags', action='append', default=[],
metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS',
help='sets generator flag FLAG to VAL')
parser.add_option('--generator-output', dest='generator_output',
action='store', default=None, metavar='DIR', type='path',
env_name='GYP_GENERATOR_OUTPUT',
help='puts generated build files under DIR')
parser.add_option('--ignore-environment', dest='use_environment',
action='store_false', default=True, regenerate=False,
help='do not read options from environment variables')
parser.add_option('--check', dest='check', action='store_true',
help='check format of gyp files')
# We read a few things from ~/.gyp, so set up a var for that.
home_vars = ['HOME']
if sys.platform in ('cygwin', 'win32'):
home_vars.append('USERPROFILE')
home = None
for home_var in home_vars:
home = os.getenv(home_var)
if home != None:
break
home_dot_gyp = None
if home != None:
home_dot_gyp = os.path.join(home, '.gyp')
if not os.path.exists(home_dot_gyp):
home_dot_gyp = None
# TODO(thomasvl): add support for ~/.gyp/defaults
(options, build_files_arg) = parser.parse_args(args)
build_files = build_files_arg
if not options.formats:
# If no format was given on the command line, then check the env variable.
generate_formats = []
if options.use_environment:
generate_formats = os.environ.get('GYP_GENERATORS', [])
if generate_formats:
generate_formats = re.split('[\s,]', generate_formats)
if generate_formats:
options.formats = generate_formats
else:
# Nothing in the variable, default based on platform.
options.formats = [ {'darwin': 'xcode',
'win32': 'msvs',
'cygwin': 'msvs',
'freebsd7': 'make',
'freebsd8': 'make',
'linux2': 'scons',}[sys.platform] ]
if not options.generator_output and options.use_environment:
g_o = os.environ.get('GYP_GENERATOR_OUTPUT')
if g_o:
options.generator_output = g_o
for mode in options.debug:
gyp.debug[mode] = 1
# Do an extra check to avoid work when we're not debugging.
if DEBUG_GENERAL in gyp.debug.keys():
DebugOutput(DEBUG_GENERAL, 'running with these options:')
for (option, value) in options.__dict__.items():
if option[0] == '_':
continue
if isinstance(value, basestring):
DebugOutput(DEBUG_GENERAL, " %s: '%s'" % (option, value))
else:
DebugOutput(DEBUG_GENERAL, " %s: %s" % (option, str(value)))
if not build_files:
build_files = FindBuildFiles()
if not build_files:
print >>sys.stderr, (usage + '\n\n%s: error: no build_file') % \
(my_name, my_name)
return 1
# TODO(mark): Chromium-specific hack!
# For Chromium, the gyp "depth" variable should always be a relative path
# to Chromium's top-level "src" directory. If no depth variable was set
# on the command line, try to find a "src" directory by looking at the
# absolute path to each build file's directory. The first "src" component
# found will be treated as though it were the path used for --depth.
if not options.depth:
for build_file in build_files:
build_file_dir = os.path.abspath(os.path.dirname(build_file))
build_file_dir_components = build_file_dir.split(os.path.sep)
components_len = len(build_file_dir_components)
for index in xrange(components_len - 1, -1, -1):
if build_file_dir_components[index] == 'src':
options.depth = os.path.sep.join(build_file_dir_components)
break
del build_file_dir_components[index]
# If the inner loop found something, break without advancing to another
# build file.
if options.depth:
break
if not options.depth:
raise Exception, \
'Could not automatically locate src directory. This is a ' + \
'temporary Chromium feature that will be removed. Use ' + \
'--depth as a workaround.'
# -D on the command line sets variable defaults - D isn't just for define,
# it's for default. Perhaps there should be a way to force (-F?) a
# variable's value so that it can't be overridden by anything else.
cmdline_default_variables = {}
defines = []
if options.use_environment:
defines += ShlexEnv('GYP_DEFINES')
if options.defines:
defines += options.defines
cmdline_default_variables = NameValueListToDict(defines)
if DEBUG_GENERAL in gyp.debug.keys():
DebugOutput(DEBUG_GENERAL,
"cmdline_default_variables: %s" % cmdline_default_variables)
# Set up includes.
includes = []
# If ~/.gyp/include.gypi exists, it'll be forcibly included into every
# .gyp file that's loaded, before anything else is included.
if home_dot_gyp != None:
default_include = os.path.join(home_dot_gyp, 'include.gypi')
if os.path.exists(default_include):
includes.append(default_include)
# Command-line --include files come after the default include.
if options.includes:
includes.extend(options.includes)
# Generator flags should be prefixed with the target generator since they
# are global across all generator runs.
gen_flags = []
if options.use_environment:
gen_flags += ShlexEnv('GYP_GENERATOR_FLAGS')
if options.generator_flags:
gen_flags += options.generator_flags
generator_flags = NameValueListToDict(gen_flags)
if DEBUG_GENERAL in gyp.debug.keys():
DebugOutput(DEBUG_GENERAL, "generator_flags: %s" % generator_flags)
# TODO: Remove this and the option after we've gotten folks to move to the
# generator flag.
if options.msvs_version:
print >>sys.stderr, \
'DEPRECATED: Use generator flag (-G msvs_version=' + \
options.msvs_version + ') instead of --msvs-version=' + \
options.msvs_version
generator_flags['msvs_version'] = options.msvs_version
# Generate all requested formats (use a set in case we got one format request
# twice)
for format in set(options.formats):
params = {'options': options,
'build_files': build_files,
'generator_flags': generator_flags,
'cwd': os.getcwd(),
'build_files_arg': build_files_arg,
'gyp_binary': sys.argv[0],
'home_dot_gyp': home_dot_gyp}
# Start with the default variables from the command line.
[generator, flat_list, targets, data] = Load(build_files, format,
cmdline_default_variables,
includes, options.depth,
params, options.check)
# TODO(mark): Pass |data| for now because the generator needs a list of
# build files that came in. In the future, maybe it should just accept
# a list, and not the whole data dict.
# NOTE: flat_list is the flattened dependency graph specifying the order
# that targets may be built. Build systems that operate serially or that
# need to have dependencies defined before dependents reference them should
# generate targets in the order specified in flat_list.
generator.GenerateOutput(flat_list, targets, data, params)
# Done
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| bsd-3-clause | 7,972,090,574,789,585,000 | 5,209,006,917,720,626,000 | 38.867882 | 80 | 0.644498 | false |
XperiaSTE/android_external_bluetooth_bluez | test/bluezutils.py | 90 | 1567 | import dbus
SERVICE_NAME = "org.bluez"
ADAPTER_INTERFACE = SERVICE_NAME + ".Adapter1"
DEVICE_INTERFACE = SERVICE_NAME + ".Device1"
def get_managed_objects():
bus = dbus.SystemBus()
manager = dbus.Interface(bus.get_object("org.bluez", "/"),
"org.freedesktop.DBus.ObjectManager")
return manager.GetManagedObjects()
def find_adapter(pattern=None):
return find_adapter_in_objects(get_managed_objects(), pattern)
def find_adapter_in_objects(objects, pattern=None):
bus = dbus.SystemBus()
for path, ifaces in objects.iteritems():
adapter = ifaces.get(ADAPTER_INTERFACE)
if adapter is None:
continue
if not pattern or pattern == adapter["Address"] or \
path.endswith(pattern):
obj = bus.get_object(SERVICE_NAME, path)
return dbus.Interface(obj, ADAPTER_INTERFACE)
raise Exception("Bluetooth adapter not found")
def find_device(device_address, adapter_pattern=None):
return find_device_in_objects(get_managed_objects(), device_address,
adapter_pattern)
def find_device_in_objects(objects, device_address, adapter_pattern=None):
bus = dbus.SystemBus()
path_prefix = ""
if adapter_pattern:
adapter = find_adapter_in_objects(objects, adapter_pattern)
path_prefix = adapter.object_path
for path, ifaces in objects.iteritems():
device = ifaces.get(DEVICE_INTERFACE)
if device is None:
continue
if (device["Address"] == device_address and
path.startswith(path_prefix)):
obj = bus.get_object(SERVICE_NAME, path)
return dbus.Interface(obj, DEVICE_INTERFACE)
raise Exception("Bluetooth device not found")
| gpl-2.0 | -3,134,495,113,202,466,000 | 2,908,530,286,165,053 | 32.340426 | 74 | 0.731334 | false |
TeachAtTUM/edx-platform | lms/djangoapps/courseware/access_response.py | 17 | 4973 | """
This file contains all the classes used by has_access for error handling
"""
from django.utils.translation import ugettext as _
from xmodule.course_metadata_utils import DEFAULT_START_DATE
class AccessResponse(object):
"""Class that represents a response from a has_access permission check."""
def __init__(self, has_access, error_code=None, developer_message=None, user_message=None):
"""
Creates an AccessResponse object.
Arguments:
has_access (bool): if the user is granted access or not
error_code (String): optional - default is None. Unique identifier
for the specific type of error
developer_message (String): optional - default is None. Message
to show the developer
user_message (String): optional - default is None. Message to
show the user
"""
self.has_access = has_access
self.error_code = error_code
self.developer_message = developer_message
self.user_message = user_message
if has_access:
assert error_code is None
def __nonzero__(self):
"""
Overrides bool().
Allows for truth value testing of AccessResponse objects, so callers
who do not need the specific error information can check if access
is granted.
Returns:
bool: whether or not access is granted
"""
return self.has_access
def to_json(self):
"""
Creates a serializable JSON representation of an AccessResponse object.
Returns:
dict: JSON representation
"""
return {
"has_access": self.has_access,
"error_code": self.error_code,
"developer_message": self.developer_message,
"user_message": self.user_message
}
def __repr__(self):
return "AccessResponse({!r}, {!r}, {!r}, {!r})".format(
self.has_access,
self.error_code,
self.developer_message,
self.user_message
)
class AccessError(AccessResponse):
"""
Class that holds information about the error in the case of an access
denial in has_access. Contains the error code, user and developer
messages. Subclasses represent specific errors.
"""
def __init__(self, error_code, developer_message, user_message):
"""
Creates an AccessError object.
An AccessError object represents an AccessResponse where access is
denied (has_access is False).
Arguments:
error_code (String): unique identifier for the specific type of
error developer_message (String): message to show the developer
user_message (String): message to show the user
"""
super(AccessError, self).__init__(False, error_code, developer_message, user_message)
class StartDateError(AccessError):
"""
Access denied because the course has not started yet and the user
is not staff
"""
def __init__(self, start_date):
error_code = "course_not_started"
if start_date == DEFAULT_START_DATE:
developer_message = "Course has not started"
user_message = _("Course has not started")
else:
developer_message = "Course does not start until {}".format(start_date)
user_message = _("Course does not start until {}" # pylint: disable=translation-of-non-string
.format("{:%B %d, %Y}".format(start_date)))
super(StartDateError, self).__init__(error_code, developer_message, user_message)
class MilestoneAccessError(AccessError):
"""
Access denied because the user has unfulfilled milestones
"""
def __init__(self):
error_code = "unfulfilled_milestones"
developer_message = "User has unfulfilled milestones"
user_message = _("You have unfulfilled milestones")
super(MilestoneAccessError, self).__init__(error_code, developer_message, user_message)
class VisibilityError(AccessError):
"""
Access denied because the user does have the correct role to view this
course.
"""
def __init__(self):
error_code = "not_visible_to_user"
developer_message = "Course is not visible to this user"
user_message = _("You do not have access to this course")
super(VisibilityError, self).__init__(error_code, developer_message, user_message)
class MobileAvailabilityError(AccessError):
"""
Access denied because the course is not available on mobile for the user
"""
def __init__(self):
error_code = "mobile_unavailable"
developer_message = "Course is not available on mobile for this user"
user_message = _("You do not have access to this course on a mobile device")
super(MobileAvailabilityError, self).__init__(error_code, developer_message, user_message)
| agpl-3.0 | 5,971,604,902,158,066,000 | -5,873,110,370,515,043,000 | 34.776978 | 106 | 0.627388 | false |
brint/cloud-init | cloudinit/distros/gentoo.py | 7 | 5468 | # vi: ts=4 expandtab
#
# Copyright (C) 2014 Rackspace, US Inc.
#
# Author: Nate House <nathan.house@rackspace.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3, as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from cloudinit import distros
from cloudinit import helpers
from cloudinit import log as logging
from cloudinit import util
from cloudinit.distros.parsers.hostname import HostnameConf
from cloudinit.settings import PER_INSTANCE
LOG = logging.getLogger(__name__)
class Distro(distros.Distro):
locale_conf_fn = "/etc/locale.gen"
network_conf_fn = "/etc/conf.d/net"
init_cmd = [''] # init scripts
def __init__(self, name, cfg, paths):
distros.Distro.__init__(self, name, cfg, paths)
# This will be used to restrict certain
# calls from repeatly happening (when they
# should only happen say once per instance...)
self._runner = helpers.Runners(paths)
self.osfamily = 'gentoo'
# Fix sshd restarts
cfg['ssh_svcname'] = '/etc/init.d/sshd'
def apply_locale(self, locale, out_fn=None):
if not out_fn:
out_fn = self.locale_conf_fn
util.subp(['locale-gen', '-G', locale], capture=False)
# "" provides trailing newline during join
lines = [
util.make_header(),
'LANG="%s"' % (locale),
"",
]
util.write_file(out_fn, "\n".join(lines))
def install_packages(self, pkglist):
self.update_package_sources()
self.package_command('', pkgs=pkglist)
def _write_network(self, settings):
util.write_file(self.network_conf_fn, settings)
return ['all']
def _bring_up_interface(self, device_name):
cmd = ['/etc/init.d/net.%s' % device_name, 'restart']
LOG.debug("Attempting to run bring up interface %s using command %s",
device_name, cmd)
try:
(_out, err) = util.subp(cmd)
if len(err):
LOG.warn("Running %s resulted in stderr output: %s", cmd, err)
return True
except util.ProcessExecutionError:
util.logexc(LOG, "Running interface command %s failed", cmd)
return False
def _bring_up_interfaces(self, device_names):
use_all = False
for d in device_names:
if d == 'all':
use_all = True
if use_all:
# Grab device names from init scripts
cmd = ['ls', '/etc/init.d/net.*']
try:
(_out, err) = util.subp(cmd)
if len(err):
LOG.warn("Running %s resulted in stderr output: %s", cmd,
err)
except util.ProcessExecutionError:
util.logexc(LOG, "Running interface command %s failed", cmd)
return False
devices = [x.split('.')[2] for x in _out.split(' ')]
return distros.Distro._bring_up_interfaces(self, devices)
else:
return distros.Distro._bring_up_interfaces(self, device_names)
def _write_hostname(self, your_hostname, out_fn):
conf = None
try:
# Try to update the previous one
# so lets see if we can read it first.
conf = self._read_hostname_conf(out_fn)
except IOError:
pass
if not conf:
conf = HostnameConf('')
conf.set_hostname(your_hostname)
util.write_file(out_fn, conf, 0o644)
def _read_system_hostname(self):
sys_hostname = self._read_hostname(self.hostname_conf_fn)
return (self.hostname_conf_fn, sys_hostname)
def _read_hostname_conf(self, filename):
conf = HostnameConf(util.load_file(filename))
conf.parse()
return conf
def _read_hostname(self, filename, default=None):
hostname = None
try:
conf = self._read_hostname_conf(filename)
hostname = conf.hostname
except IOError:
pass
if not hostname:
return default
return hostname
def set_timezone(self, tz):
distros.set_etc_timezone(tz=tz, tz_file=self._find_tz_file(tz))
def package_command(self, command, args=None, pkgs=None):
if pkgs is None:
pkgs = []
cmd = ['emerge']
# Redirect output
cmd.append("--quiet")
if args and isinstance(args, str):
cmd.append(args)
elif args and isinstance(args, list):
cmd.extend(args)
if command:
cmd.append(command)
pkglist = util.expand_package_list('%s-%s', pkgs)
cmd.extend(pkglist)
# Allow the output of this to flow outwards (ie not be captured)
util.subp(cmd, capture=False)
def update_package_sources(self):
self._runner.run("update-sources", self.package_command,
["-u", "world"], freq=PER_INSTANCE)
| gpl-3.0 | 3,847,965,385,645,191,000 | -4,404,135,249,835,356,000 | 33.175 | 78 | 0.585955 | false |
dcroc16/skunk_works | google_appengine/lib/django-1.3/django/contrib/webdesign/lorem_ipsum.py | 439 | 4872 | """
Utility functions for generating "lorem ipsum" Latin text.
"""
import random
COMMON_P = 'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.'
WORDS = ('exercitationem', 'perferendis', 'perspiciatis', 'laborum', 'eveniet',
'sunt', 'iure', 'nam', 'nobis', 'eum', 'cum', 'officiis', 'excepturi',
'odio', 'consectetur', 'quasi', 'aut', 'quisquam', 'vel', 'eligendi',
'itaque', 'non', 'odit', 'tempore', 'quaerat', 'dignissimos',
'facilis', 'neque', 'nihil', 'expedita', 'vitae', 'vero', 'ipsum',
'nisi', 'animi', 'cumque', 'pariatur', 'velit', 'modi', 'natus',
'iusto', 'eaque', 'sequi', 'illo', 'sed', 'ex', 'et', 'voluptatibus',
'tempora', 'veritatis', 'ratione', 'assumenda', 'incidunt', 'nostrum',
'placeat', 'aliquid', 'fuga', 'provident', 'praesentium', 'rem',
'necessitatibus', 'suscipit', 'adipisci', 'quidem', 'possimus',
'voluptas', 'debitis', 'sint', 'accusantium', 'unde', 'sapiente',
'voluptate', 'qui', 'aspernatur', 'laudantium', 'soluta', 'amet',
'quo', 'aliquam', 'saepe', 'culpa', 'libero', 'ipsa', 'dicta',
'reiciendis', 'nesciunt', 'doloribus', 'autem', 'impedit', 'minima',
'maiores', 'repudiandae', 'ipsam', 'obcaecati', 'ullam', 'enim',
'totam', 'delectus', 'ducimus', 'quis', 'voluptates', 'dolores',
'molestiae', 'harum', 'dolorem', 'quia', 'voluptatem', 'molestias',
'magni', 'distinctio', 'omnis', 'illum', 'dolorum', 'voluptatum', 'ea',
'quas', 'quam', 'corporis', 'quae', 'blanditiis', 'atque', 'deserunt',
'laboriosam', 'earum', 'consequuntur', 'hic', 'cupiditate',
'quibusdam', 'accusamus', 'ut', 'rerum', 'error', 'minus', 'eius',
'ab', 'ad', 'nemo', 'fugit', 'officia', 'at', 'in', 'id', 'quos',
'reprehenderit', 'numquam', 'iste', 'fugiat', 'sit', 'inventore',
'beatae', 'repellendus', 'magnam', 'recusandae', 'quod', 'explicabo',
'doloremque', 'aperiam', 'consequatur', 'asperiores', 'commodi',
'optio', 'dolor', 'labore', 'temporibus', 'repellat', 'veniam',
'architecto', 'est', 'esse', 'mollitia', 'nulla', 'a', 'similique',
'eos', 'alias', 'dolore', 'tenetur', 'deleniti', 'porro', 'facere',
'maxime', 'corrupti')
COMMON_WORDS = ('lorem', 'ipsum', 'dolor', 'sit', 'amet', 'consectetur',
'adipisicing', 'elit', 'sed', 'do', 'eiusmod', 'tempor', 'incididunt',
'ut', 'labore', 'et', 'dolore', 'magna', 'aliqua')
def sentence():
"""
Returns a randomly generated sentence of lorem ipsum text.
The first word is capitalized, and the sentence ends in either a period or
question mark. Commas are added at random.
"""
# Determine the number of comma-separated sections and number of words in
# each section for this sentence.
sections = [u' '.join(random.sample(WORDS, random.randint(3, 12))) for i in range(random.randint(1, 5))]
s = u', '.join(sections)
# Convert to sentence case and add end punctuation.
return u'%s%s%s' % (s[0].upper(), s[1:], random.choice('?.'))
def paragraph():
"""
Returns a randomly generated paragraph of lorem ipsum text.
The paragraph consists of between 1 and 4 sentences, inclusive.
"""
return u' '.join([sentence() for i in range(random.randint(1, 4))])
def paragraphs(count, common=True):
"""
Returns a list of paragraphs as returned by paragraph().
If `common` is True, then the first paragraph will be the standard
'lorem ipsum' paragraph. Otherwise, the first paragraph will be random
Latin text. Either way, subsequent paragraphs will be random Latin text.
"""
paras = []
for i in range(count):
if common and i == 0:
paras.append(COMMON_P)
else:
paras.append(paragraph())
return paras
def words(count, common=True):
"""
Returns a string of `count` lorem ipsum words separated by a single space.
If `common` is True, then the first 19 words will be the standard
'lorem ipsum' words. Otherwise, all words will be selected randomly.
"""
if common:
word_list = list(COMMON_WORDS)
else:
word_list = []
c = len(word_list)
if count > c:
count -= c
while count > 0:
c = min(count, len(WORDS))
count -= c
word_list += random.sample(WORDS, c)
else:
word_list = word_list[:count]
return u' '.join(word_list)
| mit | 6,109,408,474,068,497,000 | 4,879,852,515,442,423,000 | 47.237624 | 459 | 0.609401 | false |
SA313161/ardupilot | mk/PX4/Tools/genmsg/test/test_genmsg_gentools.py | 215 | 9526 | #!/usr/bin/env python
# Software License Agreement (BSD License)
#
# Copyright (c) 2008, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import os
import sys
TEST_CTX = 'rosgraph_msgs'
def get_test_dir():
return os.path.abspath(os.path.join(os.path.dirname(__file__), 'md5tests'))
def get_test_msg_dir():
return os.path.abspath(os.path.join(os.path.dirname(__file__), 'files'))
def get_search_path():
test_dir = get_test_msg_dir()
search_path = {}
for pkg in ['std_msgs', 'rosgraph_msgs', 'test_ros', 'geometry_msgs']:
search_path[pkg] = [ os.path.join(test_dir, pkg, 'msg') ]
return search_path
def _load_md5_tests(dir_name):
test_dir = os.path.join(get_test_dir(), dir_name)
tests = {}
for f in os.listdir(test_dir):
path = os.path.join(test_dir, f)
if not f.endswith('.txt'):
continue
name = f[:-4]
while name and name[-1].isdigit():
name = name[:-1]
assert bool(name)
if name in tests:
tests[name].append(path)
else:
tests[name] = [path]
return tests
def _compute_md5(msg_context, f):
from genmsg import load_depends, compute_md5
from genmsg.msg_loader import load_msg_from_string
text = open(f, 'r').read()
short_name = os.path.basename(f)[:-len('.msg')]
full_name = "%s/%s"%(TEST_CTX, short_name)
spec = load_msg_from_string(msg_context, text, full_name)
search_path = get_search_path()
load_depends(msg_context, spec, search_path)
return compute_md5(msg_context, spec)
def _compute_md5_text(msg_context, f):
from genmsg import compute_md5_text, load_depends
from genmsg.msg_loader import load_msg_from_string
text = open(f, 'r').read()
short_name = os.path.basename(f)[:-len('.msg')]
full_name = "%s/%s"%(TEST_CTX, short_name)
spec = load_msg_from_string(msg_context, text, full_name)
search_path = get_search_path()
load_depends(msg_context, spec, search_path)
return compute_md5_text(msg_context, spec)
def test_compute_md5_text():
from genmsg import MsgContext
msg_context = MsgContext.create_default()
# this test is just verifying that the md5sum is what it was for cturtle->electric
Header_md5 = "2176decaecbce78abc3b96ef049fabed"
rg_msg_dir = os.path.join(get_test_msg_dir(), TEST_CTX, 'msg')
clock_msg = os.path.join(rg_msg_dir, 'Clock.msg')
# a bit gory, but go ahead and regression test these important messages
assert "time clock" == _compute_md5_text(msg_context, clock_msg)
log_msg = os.path.join(rg_msg_dir, 'Log.msg')
assert "byte DEBUG=1\nbyte INFO=2\nbyte WARN=4\nbyte ERROR=8\nbyte FATAL=16\n%s header\nbyte level\nstring name\nstring msg\nstring file\nstring function\nuint32 line\nstring[] topics"%Header_md5 == _compute_md5_text(msg_context, log_msg)
tests = _load_md5_tests('md5text')
# text file #1 is the reference
for k, files in tests.items():
print("running tests", k)
ref_file = [f for f in files if f.endswith('%s1.txt'%k)]
if not ref_file:
assert False, "failed to load %s"%k
ref_file = ref_file[0]
ref_text = open(ref_file, 'r').read().strip()
print("KEY", k)
files = [f for f in files if not f.endswith('%s1.txt'%k)]
for f in files[1:]:
f_text = _compute_md5_text(msg_context, f)
assert ref_text == f_text, "failed on %s\n%s\n%s: \n[%s]\nvs.\n[%s]\n"%(k, ref_file, f, ref_text, f_text)
def test_md5_equals():
from genmsg import MsgContext
msg_context = MsgContext.create_default()
search_path = get_search_path()
tests = _load_md5_tests('same')
for k, files in tests.items():
print("running tests", k)
md5sum = _compute_md5(msg_context, files[0])
for f in files[1:]:
assert md5sum == _compute_md5(msg_context, f), "failed on %s: \n[%s]\nvs.\n[%s]\n"%(k, _compute_md5_text(msg_context, files[0]), _compute_md5_text(msg_context, f))
def test_md5_not_equals():
from genmsg import MsgContext
msg_context = MsgContext.create_default()
tests = _load_md5_tests('different')
for k, files in tests.items():
print("running tests", k)
md5s = set()
md6md5sum = _compute_md5(msg_context, files[0])
for f in files:
md5s.add(_compute_md5(msg_context, f))
# each md5 should be unique
assert len(md5s) == len(files)
twist_with_covariance_stamped_full_text = """# This represents an estimate twist with reference coordinate frame and timestamp.
Header header
TwistWithCovariance twist
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.secs: seconds (stamp_secs) since epoch
# * stamp.nsecs: nanoseconds since stamp_secs
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
================================================================================
MSG: geometry_msgs/TwistWithCovariance
# This expresses velocity in free space with uncertianty.
Twist twist
# Row-major representation of the 6x6 covariance matrix
# The orientation parameters use a fixed-axis representation.
# In order, the parameters are:
# (x, y, z, rotation about X axis, rotation about Y axis, rotation about Z axis)
float64[36] covariance
================================================================================
MSG: geometry_msgs/Twist
# This expresses velocity in free space broken into it's linear and angular parts.
Vector3 linear
Vector3 angular
================================================================================
MSG: geometry_msgs/Vector3
# This represents a vector in free space.
float64 x
float64 y
float64 z"""
log_full_text = """##
## Severity level constants
##
byte DEBUG=1 #debug level
byte INFO=2 #general level
byte WARN=4 #warning level
byte ERROR=8 #error level
byte FATAL=16 #fatal/critical level
##
## Fields
##
Header header
byte level
string name # name of the node
string msg # message
string file # file the message came from
string function # function the message came from
uint32 line # line the message came from
string[] topics # topic names that the node publishes
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.secs: seconds (stamp_secs) since epoch
# * stamp.nsecs: nanoseconds since stamp_secs
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
"""
def test_compute_full_text():
from genmsg import MsgContext, compute_full_text, load_msg_by_type, load_depends
msg_context = MsgContext.create_default()
search_path = get_search_path()
# regression test against values used for cturtle-electric
spec = load_msg_by_type(msg_context, 'rosgraph_msgs/Log', search_path)
load_depends(msg_context, spec, search_path)
val = compute_full_text(msg_context, spec)
assert val == log_full_text, "[%s][%s]"%(val, log_full_text)
spec = load_msg_by_type(msg_context, 'geometry_msgs/TwistWithCovarianceStamped', search_path)
load_depends(msg_context, spec, search_path)
val = compute_full_text(msg_context, spec)
assert val == twist_with_covariance_stamped_full_text, "[%s][%s]"%(val, twist_with_covariance_stamped_full_text)
| gpl-3.0 | 6,340,684,687,565,678,000 | -8,220,336,553,448,107,000 | 37.104 | 242 | 0.660403 | false |
samkariu/voicex | voicex/migrations/0001_initial.py | 2 | 7576 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Post'
db.create_table('posts', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('phone', self.gf('django.db.models.fields.CharField')(max_length=20)),
('post', self.gf('django.db.models.fields.TextField')()),
('zip_code', self.gf('django.db.models.fields.CharField')(max_length=20)),
('reply_to', self.gf('django.db.models.fields.related.ForeignKey')(related_name='replies', null=True, to=orm['voicex.Post'])),
('public', self.gf('django.db.models.fields.BooleanField')(default=False)),
('timestamp', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal('voicex', ['Post'])
# Adding model 'Subscriber'
db.create_table('subscribers', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('phone', self.gf('django.db.models.fields.CharField')(max_length=20)),
('account', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['voicex.Account'])),
('timestamp', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal('voicex', ['Subscriber'])
# Adding unique constraint on 'Subscriber', fields ['phone', 'account']
db.create_unique('subscribers', ['phone', 'account_id'])
# Adding model 'Account'
db.create_table('accounts', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=20)),
('phone', self.gf('django.db.models.fields.CharField')(unique=True, max_length=20)),
('password', self.gf('django.db.models.fields.CharField')(max_length=20)),
('active', self.gf('django.db.models.fields.BooleanField')(default=True)),
))
db.send_create_signal('voicex', ['Account'])
# Adding model 'Delegate'
db.create_table('delegates', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('phone', self.gf('django.db.models.fields.CharField')(unique=True, max_length=20)),
('account', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['voicex.Account'])),
))
db.send_create_signal('voicex', ['Delegate'])
# Adding model 'Following'
db.create_table('following', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('phone', self.gf('django.db.models.fields.CharField')(max_length=20)),
('tag', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['voicex.Tag'])),
('timestamp', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal('voicex', ['Following'])
# Adding unique constraint on 'Following', fields ['phone', 'tag']
db.create_unique('following', ['phone', 'tag_id'])
# Adding model 'Tag'
db.create_table('tags', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=20)),
))
db.send_create_signal('voicex', ['Tag'])
def backwards(self, orm):
# Removing unique constraint on 'Following', fields ['phone', 'tag']
db.delete_unique('following', ['phone', 'tag_id'])
# Removing unique constraint on 'Subscriber', fields ['phone', 'account']
db.delete_unique('subscribers', ['phone', 'account_id'])
# Deleting model 'Post'
db.delete_table('posts')
# Deleting model 'Subscriber'
db.delete_table('subscribers')
# Deleting model 'Account'
db.delete_table('accounts')
# Deleting model 'Delegate'
db.delete_table('delegates')
# Deleting model 'Following'
db.delete_table('following')
# Deleting model 'Tag'
db.delete_table('tags')
models = {
'voicex.account': {
'Meta': {'object_name': 'Account', 'db_table': "'accounts'"},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'phone': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20'})
},
'voicex.delegate': {
'Meta': {'object_name': 'Delegate', 'db_table': "'delegates'"},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['voicex.Account']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20'})
},
'voicex.following': {
'Meta': {'unique_together': "(('phone', 'tag'),)", 'object_name': 'Following', 'db_table': "'following'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['voicex.Tag']"}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'voicex.post': {
'Meta': {'object_name': 'Post', 'db_table': "'posts'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'post': ('django.db.models.fields.TextField', [], {}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'reply_to': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'replies'", 'null': 'True', 'to': "orm['voicex.Post']"}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'zip_code': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
'voicex.subscriber': {
'Meta': {'unique_together': "(('phone', 'account'),)", 'object_name': 'Subscriber', 'db_table': "'subscribers'"},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['voicex.Account']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'voicex.tag': {
'Meta': {'object_name': 'Tag', 'db_table': "'tags'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20'})
}
}
complete_apps = ['voicex'] | mit | -2,226,096,570,589,248,300 | -1,005,023,322,965,365,200 | 51.255172 | 150 | 0.564942 | false |
e-gob/plataforma-kioscos-autoatencion | scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/modules/cloud/univention/udm_user.py | 29 | 21233 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
# Copyright (c) 2016, Adfinis SyGroup AG
# Tobias Rueetschi <tobias.ruetschi@adfinis-sygroup.ch>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: udm_user
version_added: "2.2"
author: "Tobias Rueetschi (@2-B)"
short_description: Manage posix users on a univention corporate server
description:
- "This module allows to manage posix users on a univention corporate
server (UCS).
It uses the python API of the UCS to create a new object or edit it."
requirements:
- Python >= 2.6
options:
state:
required: false
default: "present"
choices: [ present, absent ]
description:
- Whether the user is present or not.
username:
required: true
description:
- User name
aliases: ['name']
firstname:
required: false
description:
- First name. Required if C(state=present).
lastname:
required: false
description:
- Last name. Required if C(state=present).
password:
required: false
default: None
description:
- Password. Required if C(state=present).
birthday:
required: false
default: None
description:
- Birthday
city:
required: false
default: None
description:
- City of users business address.
country:
required: false
default: None
description:
- Country of users business address.
department_number:
required: false
default: None
description:
- Department number of users business address.
aliases: [ departmentNumber ]
description:
required: false
default: None
description:
- Description (not gecos)
display_name:
required: false
default: None
description:
- Display name (not gecos)
aliases: [ displayName ]
email:
required: false
default: ['']
description:
- A list of e-mail addresses.
employee_number:
required: false
default: None
description:
- Employee number
aliases: [ employeeNumber ]
employee_type:
required: false
default: None
description:
- Employee type
aliases: [ employeeType ]
gecos:
required: false
default: None
description:
- GECOS
groups:
required: false
default: []
description:
- "POSIX groups, the LDAP DNs of the groups will be found with the
LDAP filter for each group as $GROUP:
C((&(objectClass=posixGroup)(cn=$GROUP)))."
home_share:
required: false
default: None
description:
- "Home NFS share. Must be a LDAP DN, e.g.
C(cn=home,cn=shares,ou=school,dc=example,dc=com)."
aliases: [ homeShare ]
home_share_path:
required: false
default: None
description:
- Path to home NFS share, inside the homeShare.
aliases: [ homeSharePath ]
home_telephone_number:
required: false
default: []
description:
- List of private telephone numbers.
aliases: [ homeTelephoneNumber ]
homedrive:
required: false
default: None
description:
- Windows home drive, e.g. C("H:").
mail_alternative_address:
required: false
default: []
description:
- List of alternative e-mail addresses.
aliases: [ mailAlternativeAddress ]
mail_home_server:
required: false
default: None
description:
- FQDN of mail server
aliases: [ mailHomeServer ]
mail_primary_address:
required: false
default: None
description:
- Primary e-mail address
aliases: [ mailPrimaryAddress ]
mobile_telephone_number:
required: false
default: []
description:
- Mobile phone number
aliases: [ mobileTelephoneNumber ]
organisation:
required: false
default: None
description:
- Organisation
override_pw_history:
required: false
default: False
description:
- Override password history
aliases: [ overridePWHistory ]
override_pw_length:
required: false
default: False
description:
- Override password check
aliases: [ overridePWLength ]
pager_telephonenumber:
required: false
default: []
description:
- List of pager telephone numbers.
aliases: [ pagerTelephonenumber ]
phone:
required: false
default: []
description:
- List of telephone numbers.
postcode:
required: false
default: None
description:
- Postal code of users business address.
primary_group:
required: false
default: cn=Domain Users,cn=groups,$LDAP_BASE_DN
description:
- Primary group. This must be the group LDAP DN.
aliases: [ primaryGroup ]
profilepath:
required: false
default: None
description:
- Windows profile directory
pwd_change_next_login:
required: false
default: None
choices: [ '0', '1' ]
description:
- Change password on next login.
aliases: [ pwdChangeNextLogin ]
room_number:
required: false
default: None
description:
- Room number of users business address.
aliases: [ roomNumber ]
samba_privileges:
required: false
default: []
description:
- "Samba privilege, like allow printer administration, do domain
join."
aliases: [ sambaPrivileges ]
samba_user_workstations:
required: false
default: []
description:
- Allow the authentication only on this Microsoft Windows host.
aliases: [ sambaUserWorkstations ]
sambahome:
required: false
default: None
description:
- Windows home path, e.g. C('\\\\$FQDN\\$USERNAME').
scriptpath:
required: false
default: None
description:
- Windows logon script.
secretary:
required: false
default: []
description:
- A list of superiors as LDAP DNs.
serviceprovider:
required: false
default: ['']
description:
- Enable user for the following service providers.
shell:
required: false
default: '/bin/bash'
description:
- Login shell
street:
required: false
default: None
description:
- Street of users business address.
title:
required: false
default: None
description:
- Title, e.g. C(Prof.).
unixhome:
required: false
default: '/home/$USERNAME'
description:
- Unix home directory
userexpiry:
required: false
default: Today + 1 year
description:
- Account expiry date, e.g. C(1999-12-31).
position:
required: false
default: ''
description:
- "Define the whole position of users object inside the LDAP tree,
e.g. C(cn=employee,cn=users,ou=school,dc=example,dc=com)."
update_password:
required: false
default: always
description:
- "C(always) will update passwords if they differ.
C(on_create) will only set the password for newly created users."
version_added: "2.3"
ou:
required: false
default: ''
description:
- "Organizational Unit inside the LDAP Base DN, e.g. C(school) for
LDAP OU C(ou=school,dc=example,dc=com)."
subpath:
required: false
default: 'cn=users'
description:
- "LDAP subpath inside the organizational unit, e.g.
C(cn=teachers,cn=users) for LDAP container
C(cn=teachers,cn=users,dc=example,dc=com)."
'''
EXAMPLES = '''
# Create a user on a UCS
- udm_user:
name: FooBar
password: secure_password
firstname: Foo
lastname: Bar
# Create a user with the DN
# C(uid=foo,cn=teachers,cn=users,ou=school,dc=school,dc=example,dc=com)
- udm_user:
name: foo
password: secure_password
firstname: Foo
lastname: Bar
ou: school
subpath: 'cn=teachers,cn=users'
# or define the position
- udm_user:
name: foo
password: secure_password
firstname: Foo
lastname: Bar
position: 'cn=teachers,cn=users,ou=school,dc=school,dc=example,dc=com'
'''
RETURN = '''# '''
import crypt
from datetime import date, timedelta
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.univention_umc import (
umc_module_for_add,
umc_module_for_edit,
ldap_search,
base_dn,
)
def main():
expiry = date.strftime(date.today() + timedelta(days=365), "%Y-%m-%d")
module = AnsibleModule(
argument_spec = dict(
birthday = dict(default=None,
type='str'),
city = dict(default=None,
type='str'),
country = dict(default=None,
type='str'),
department_number = dict(default=None,
type='str',
aliases=['departmentNumber']),
description = dict(default=None,
type='str'),
display_name = dict(default=None,
type='str',
aliases=['displayName']),
email = dict(default=[''],
type='list'),
employee_number = dict(default=None,
type='str',
aliases=['employeeNumber']),
employee_type = dict(default=None,
type='str',
aliases=['employeeType']),
firstname = dict(default=None,
type='str'),
gecos = dict(default=None,
type='str'),
groups = dict(default=[],
type='list'),
home_share = dict(default=None,
type='str',
aliases=['homeShare']),
home_share_path = dict(default=None,
type='str',
aliases=['homeSharePath']),
home_telephone_number = dict(default=[],
type='list',
aliases=['homeTelephoneNumber']),
homedrive = dict(default=None,
type='str'),
lastname = dict(default=None,
type='str'),
mail_alternative_address= dict(default=[],
type='list',
aliases=['mailAlternativeAddress']),
mail_home_server = dict(default=None,
type='str',
aliases=['mailHomeServer']),
mail_primary_address = dict(default=None,
type='str',
aliases=['mailPrimaryAddress']),
mobile_telephone_number = dict(default=[],
type='list',
aliases=['mobileTelephoneNumber']),
organisation = dict(default=None,
type='str'),
overridePWHistory = dict(default=False,
type='bool',
aliases=['override_pw_history']),
overridePWLength = dict(default=False,
type='bool',
aliases=['override_pw_length']),
pager_telephonenumber = dict(default=[],
type='list',
aliases=['pagerTelephonenumber']),
password = dict(default=None,
type='str',
no_log=True),
phone = dict(default=[],
type='list'),
postcode = dict(default=None,
type='str'),
primary_group = dict(default=None,
type='str',
aliases=['primaryGroup']),
profilepath = dict(default=None,
type='str'),
pwd_change_next_login = dict(default=None,
type='str',
choices=['0', '1'],
aliases=['pwdChangeNextLogin']),
room_number = dict(default=None,
type='str',
aliases=['roomNumber']),
samba_privileges = dict(default=[],
type='list',
aliases=['sambaPrivileges']),
samba_user_workstations = dict(default=[],
type='list',
aliases=['sambaUserWorkstations']),
sambahome = dict(default=None,
type='str'),
scriptpath = dict(default=None,
type='str'),
secretary = dict(default=[],
type='list'),
serviceprovider = dict(default=[''],
type='list'),
shell = dict(default='/bin/bash',
type='str'),
street = dict(default=None,
type='str'),
title = dict(default=None,
type='str'),
unixhome = dict(default=None,
type='str'),
userexpiry = dict(default=expiry,
type='str'),
username = dict(required=True,
aliases=['name'],
type='str'),
position = dict(default='',
type='str'),
update_password = dict(default='always',
choices=['always', 'on_create'],
type='str'),
ou = dict(default='',
type='str'),
subpath = dict(default='cn=users',
type='str'),
state = dict(default='present',
choices=['present', 'absent'],
type='str')
),
supports_check_mode=True,
required_if = ([
('state', 'present', ['firstname', 'lastname', 'password'])
])
)
username = module.params['username']
position = module.params['position']
ou = module.params['ou']
subpath = module.params['subpath']
state = module.params['state']
changed = False
users = list(ldap_search(
'(&(objectClass=posixAccount)(uid={}))'.format(username),
attr=['uid']
))
if position != '':
container = position
else:
if ou != '':
ou = 'ou={},'.format(ou)
if subpath != '':
subpath = '{},'.format(subpath)
container = '{}{}{}'.format(subpath, ou, base_dn())
user_dn = 'uid={},{}'.format(username, container)
exists = bool(len(users))
if state == 'present':
try:
if not exists:
obj = umc_module_for_add('users/user', container)
else:
obj = umc_module_for_edit('users/user', user_dn)
if module.params['displayName'] is None:
module.params['displayName'] = '{} {}'.format(
module.params['firstname'],
module.params['lastname']
)
if module.params['unixhome'] is None:
module.params['unixhome'] = '/home/{}'.format(
module.params['username']
)
for k in obj.keys():
if (k != 'password' and
k != 'groups' and
k != 'overridePWHistory' and
k in module.params and
module.params[k] is not None):
obj[k] = module.params[k]
# handle some special values
obj['e-mail'] = module.params['email']
password = module.params['password']
if obj['password'] is None:
obj['password'] = password
if module.params['update_password'] == 'always':
old_password = obj['password'].split('}', 2)[1]
if crypt.crypt(password, old_password) != old_password:
obj['overridePWHistory'] = module.params['overridePWHistory']
obj['overridePWLength'] = module.params['overridePWLength']
obj['password'] = password
diff = obj.diff()
if exists:
for k in obj.keys():
if obj.hasChanged(k):
changed = True
else:
changed = True
if not module.check_mode:
if not exists:
obj.create()
elif changed:
obj.modify()
except:
module.fail_json(
msg="Creating/editing user {} in {} failed".format(
username,
container
)
)
try:
groups = module.params['groups']
if groups:
filter = '(&(objectClass=posixGroup)(|(cn={})))'.format(
')(cn='.join(groups)
)
group_dns = list(ldap_search(filter, attr=['dn']))
for dn in group_dns:
grp = umc_module_for_edit('groups/group', dn[0])
if user_dn not in grp['users']:
grp['users'].append(user_dn)
if not module.check_mode:
grp.modify()
changed = True
except:
module.fail_json(
msg="Adding groups to user {} failed".format(username)
)
if state == 'absent' and exists:
try:
obj = umc_module_for_edit('users/user', user_dn)
if not module.check_mode:
obj.remove()
changed = True
except:
module.fail_json(
msg="Removing user {} failed".format(username)
)
module.exit_json(
changed=changed,
username=username,
diff=diff,
container=container
)
if __name__ == '__main__':
main()
| bsd-3-clause | -920,928,163,814,296,200 | 8,279,936,363,728,243,000 | 34.625839 | 92 | 0.448453 | false |
tobyclemson/terraform-aws-vpc-auto-peering | lambdas/auto_peering/auto_peering/vpc_peering_routes.py | 1 | 4260 | from botocore.exceptions import ClientError
class VPCPeeringRoutes(object):
def __init__(self, vpc1, vpc2,
vpc_peering_relationship,
ec2_gateways, logger):
self.vpc1 = vpc1
self.vpc2 = vpc2
self.vpc_peering_relationship = vpc_peering_relationship
self.ec2_gateways = ec2_gateways
self.logger = logger
def __private_route_tables_for(self, vpc):
ec2_gateway = self.ec2_gateways.get(vpc.region)
ec2_resource = ec2_gateway.resource
return ec2_resource.route_tables.filter(
Filters=[
{'Name': 'vpc-id', 'Values': [vpc.id]},
{'Name': 'tag:Tier', 'Values': ['private']}])
def __create_routes_in(self, route_tables, destination_vpc,
vpc_peering_connection):
for route_table in route_tables:
try:
route_table.create_route(
DestinationCidrBlock=destination_vpc.cidr_block,
VpcPeeringConnectionId=vpc_peering_connection.id)
self.logger.debug(
"Route creation succeeded for '%s'. Continuing.",
route_table.id)
except ClientError as error:
self.logger.warn(
"Route creation failed for '%s'. Error was: %s",
route_table.id, error)
def __create_routes_for(self, source_vpc, destination_vpc,
vpc_peering_connection):
self.logger.debug(
"Adding routes to private subnets in: '%s' pointing at '%s:%s:%s'.",
source_vpc.id, destination_vpc.id, destination_vpc.cidr_block,
vpc_peering_connection.id)
self.__create_routes_in(
self.__private_route_tables_for(source_vpc),
destination_vpc, vpc_peering_connection)
def __delete_routes_in(self, route_tables, source_vpc, destination_vpc):
for route_table in route_tables:
try:
ec2_gateway = self.ec2_gateways.get(source_vpc.region)
ec2_resource = ec2_gateway.resource
route = ec2_resource.Route(
route_table.id, destination_vpc.cidr_block)
route.delete()
self.logger.debug(
"Route deletion succeeded for '%s'. Continuing.",
route_table.id)
except ClientError as error:
self.logger.warn(
"Route deletion failed for '%s'. Error was: %s",
route_table.id, error)
def __delete_routes_for(self, source_vpc, destination_vpc,
vpc_peering_connection):
self.logger.debug(
"Removing routes from private subnets in: '%s' pointing at "
"'%s:%s:%s'.",
source_vpc.id, destination_vpc.id, destination_vpc.cidr_block,
vpc_peering_connection.id)
self.__delete_routes_in(
self.__private_route_tables_for(source_vpc),
source_vpc,
destination_vpc)
def provision(self):
vpc_peering_connection = self.vpc_peering_relationship.fetch()
self.__create_routes_for(self.vpc1, self.vpc2, vpc_peering_connection)
self.__create_routes_for(self.vpc2, self.vpc1, vpc_peering_connection)
def destroy(self):
vpc_peering_connection = self.vpc_peering_relationship.fetch()
self.__delete_routes_for(self.vpc1, self.vpc2, vpc_peering_connection)
self.__delete_routes_for(self.vpc2, self.vpc1, vpc_peering_connection)
def perform(self, action):
getattr(self, action)()
def _to_dict(self):
return {
'vpcs': frozenset([self.vpc1, self.vpc2]),
'vpc_peering_relationship': self.vpc_peering_relationship,
}
def __eq__(self, other):
if isinstance(other, self.__class__):
return self._to_dict() == other._to_dict()
return NotImplemented
def __ne__(self, other):
if isinstance(other, self.__class__):
return not self.__eq__(other)
return NotImplemented
def __hash__(self):
return hash(tuple(sorted(self._to_dict().items())))
| mit | 600,309,615,924,749,700 | 2,234,873,940,074,196,000 | 37.727273 | 80 | 0.56385 | false |
PopCap/GameIdea | Engine/Source/ThirdParty/HTML5/emsdk/Win64/python/2.7.5.3_64bit/Lib/site-packages/win32com/makegw/makegw.py | 18 | 16995 | """Utility functions for writing out gateway C++ files
This module will generate a C++/Python binding for a specific COM
interface.
At this stage, no command line interface exists. You must start Python,
import this module, change to the directory where the generated code should
be written, and run the public function.
This module is capable of generating both 'Interfaces' (ie, Python
client side support for the interface) and 'Gateways' (ie, Python
server side support for the interface). Many COM interfaces are useful
both as Client and Server. Other interfaces, however, really only make
sense to implement one side or the other. For example, it would be pointless
for Python to implement Server side for 'IRunningObjectTable', unless we were
implementing core COM for an operating system in Python (hey - now there's an idea!)
Most COM interface code is totally boiler-plate - it consists of
converting arguments, dispatching the call to Python, and processing
any result values.
This module automates the generation of such code. It has the ability to
parse a .H file generated by the MIDL tool (ie, almost all COM .h files)
and build almost totally complete C++ code.
The module understands some of the well known data types, and how to
convert them. There are only a couple of places where hand-editing is
necessary, as detailed below:
unsupported types -- If a type is not known, the generator will
pretty much ignore it, but write a comment to the generated code. You
may want to add custom support for this type. In some cases, C++ compile errors
will result. These are intentional - generating code to remove these errors would
imply a false sense of security that the generator has done the right thing.
other return policies -- By default, Python never sees the return SCODE from
a COM function. The interface usually returns None if OK, else a COM exception
if "FAILED(scode)" is TRUE. You may need to change this if:
* EXCEPINFO is passed to the COM function. This is not detected and handled
* For some reason Python should always see the result SCODE, even if it
did fail or succeed. For example, some functions return a BOOLEAN result
in the SCODE, meaning Python should always see it.
* FAILED(scode) for the interface still has valid data to return (by default,
the code generated does not process the return values, and raise an exception
to Python/COM
"""
import re
import makegwparse
def make_framework_support(header_file_name, interface_name, bMakeInterface = 1, bMakeGateway = 1):
"""Generate C++ code for a Python Interface and Gateway
header_file_name -- The full path to the .H file which defines the interface.
interface_name -- The name of the interface to search for, and to generate.
bMakeInterface = 1 -- Should interface (ie, client) support be generated.
bMakeGatewayInterface = 1 -- Should gateway (ie, server) support be generated.
This method will write a .cpp and .h file into the current directory,
(using the name of the interface to build the file name.
"""
fin=open(header_file_name)
try:
interface = makegwparse.parse_interface_info(interface_name, fin)
finally:
fin.close()
if bMakeInterface and bMakeGateway:
desc = "Interface and Gateway"
elif bMakeInterface and not bMakeGateway:
desc = "Interface"
else:
desc = "Gateway"
if interface.name[:5]=="IEnum": # IEnum - use my really simple template-based one
import win32com.makegw.makegwenum
ifc_cpp_writer = win32com.makegw.makegwenum._write_enumifc_cpp
gw_cpp_writer = win32com.makegw.makegwenum._write_enumgw_cpp
else: # Use my harder working ones.
ifc_cpp_writer = _write_ifc_cpp
gw_cpp_writer = _write_gw_cpp
fout=open("Py%s.cpp" % interface.name, "w")
try:
fout.write(\
'''\
// This file implements the %s %s for Python.
// Generated by makegw.py
#include "shell_pch.h"
''' % (interface.name, desc))
# if bMakeGateway:
# fout.write('#include "PythonCOMServer.h"\n')
# if interface.base not in ["IUnknown", "IDispatch"]:
# fout.write('#include "Py%s.h"\n' % interface.base)
fout.write('#include "Py%s.h"\n\n// @doc - This file contains autoduck documentation\n' % interface.name)
if bMakeInterface: ifc_cpp_writer(fout, interface)
if bMakeGateway: gw_cpp_writer(fout, interface)
finally:
fout.close()
fout=open("Py%s.h" % interface.name, "w")
try:
fout.write(\
'''\
// This file declares the %s %s for Python.
// Generated by makegw.py
''' % (interface.name, desc))
if bMakeInterface: _write_ifc_h(fout, interface)
if bMakeGateway: _write_gw_h(fout, interface)
finally:
fout.close()
###########################################################################
#
# INTERNAL FUNCTIONS
#
#
def _write_ifc_h(f, interface):
f.write(\
'''\
// ---------------------------------------------------
//
// Interface Declaration
class Py%s : public Py%s
{
public:
MAKE_PYCOM_CTOR(Py%s);
static %s *GetI(PyObject *self);
static PyComTypeObject type;
// The Python methods
''' % (interface.name, interface.base, interface.name, interface.name))
for method in interface.methods:
f.write('\tstatic PyObject *%s(PyObject *self, PyObject *args);\n' % method.name)
f.write(\
'''\
protected:
Py%s(IUnknown *pdisp);
~Py%s();
};
''' % (interface.name, interface.name))
def _write_ifc_cpp(f, interface):
name = interface.name
f.write(\
'''\
// ---------------------------------------------------
//
// Interface Implementation
Py%(name)s::Py%(name)s(IUnknown *pdisp):
Py%(base)s(pdisp)
{
ob_type = &type;
}
Py%(name)s::~Py%(name)s()
{
}
/* static */ %(name)s *Py%(name)s::GetI(PyObject *self)
{
return (%(name)s *)Py%(base)s::GetI(self);
}
''' % (interface.__dict__))
ptr = re.sub('[a-z]', '', interface.name)
strdict = {'interfacename':interface.name, 'ptr': ptr}
for method in interface.methods:
strdict['method'] = method.name
f.write(\
'''\
// @pymethod |Py%(interfacename)s|%(method)s|Description of %(method)s.
PyObject *Py%(interfacename)s::%(method)s(PyObject *self, PyObject *args)
{
%(interfacename)s *p%(ptr)s = GetI(self);
if ( p%(ptr)s == NULL )
return NULL;
''' % strdict)
argsParseTuple = argsCOM = formatChars = codePost = \
codePobjects = codeCobjects = cleanup = cleanup_gil = ""
needConversion = 0
# if method.name=="Stat": import win32dbg;win32dbg.brk()
for arg in method.args:
try:
argCvt = makegwparse.make_arg_converter(arg)
if arg.HasAttribute("in"):
val = argCvt.GetFormatChar()
if val:
f.write ('\t' + argCvt.GetAutoduckString() + "\n")
formatChars = formatChars + val
argsParseTuple = argsParseTuple + ", " + argCvt.GetParseTupleArg()
codePobjects = codePobjects + argCvt.DeclareParseArgTupleInputConverter()
codePost = codePost + argCvt.GetParsePostCode()
needConversion = needConversion or argCvt.NeedUSES_CONVERSION()
cleanup = cleanup + argCvt.GetInterfaceArgCleanup()
cleanup_gil = cleanup_gil + argCvt.GetInterfaceArgCleanupGIL()
comArgName, comArgDeclString = argCvt.GetInterfaceCppObjectInfo()
if comArgDeclString: # If we should declare a variable
codeCobjects = codeCobjects + "\t%s;\n" % (comArgDeclString)
argsCOM = argsCOM + ", " + comArgName
except makegwparse.error_not_supported, why:
f.write('// *** The input argument %s of type "%s" was not processed ***\n// Please check the conversion function is appropriate and exists!\n' % (arg.name, arg.raw_type))
f.write('\t%s %s;\n\tPyObject *ob%s;\n' % (arg.type, arg.name, arg.name))
f.write('\t// @pyparm <o Py%s>|%s||Description for %s\n' % (arg.type, arg.name, arg.name))
codePost = codePost + '\tif (bPythonIsHappy && !PyObject_As%s( ob%s, &%s )) bPythonIsHappy = FALSE;\n' % (arg.type, arg.name, arg.name)
formatChars = formatChars + "O"
argsParseTuple = argsParseTuple + ", &ob%s" % (arg.name)
argsCOM = argsCOM + ", " + arg.name
cleanup = cleanup + "\tPyObject_Free%s(%s);\n" % (arg.type, arg.name)
if needConversion: f.write("\tUSES_CONVERSION;\n")
f.write(codePobjects);
f.write(codeCobjects);
f.write('\tif ( !PyArg_ParseTuple(args, "%s:%s"%s) )\n\t\treturn NULL;\n' % (formatChars, method.name, argsParseTuple))
if codePost:
f.write('\tBOOL bPythonIsHappy = TRUE;\n')
f.write(codePost);
f.write('\tif (!bPythonIsHappy) return NULL;\n')
strdict['argsCOM'] = argsCOM[1:]
strdict['cleanup'] = cleanup
strdict['cleanup_gil'] = cleanup_gil
f.write(\
''' HRESULT hr;
PY_INTERFACE_PRECALL;
hr = p%(ptr)s->%(method)s(%(argsCOM)s );
%(cleanup)s
PY_INTERFACE_POSTCALL;
%(cleanup_gil)s
if ( FAILED(hr) )
return PyCom_BuildPyException(hr, p%(ptr)s, IID_%(interfacename)s );
''' % strdict)
codePre = codePost = formatChars = codeVarsPass = codeDecl = ""
for arg in method.args:
if not arg.HasAttribute("out"):
continue
try:
argCvt = makegwparse.make_arg_converter(arg)
formatChar = argCvt.GetFormatChar()
if formatChar:
formatChars = formatChars + formatChar
codePre = codePre + argCvt.GetBuildForInterfacePreCode()
codePost = codePost + argCvt.GetBuildForInterfacePostCode()
codeVarsPass = codeVarsPass + ", " + argCvt.GetBuildValueArg()
codeDecl = codeDecl + argCvt.DeclareParseArgTupleInputConverter()
except makegwparse.error_not_supported, why:
f.write('// *** The output argument %s of type "%s" was not processed ***\n// %s\n' % (arg.name, arg.raw_type, why))
continue
if formatChars:
f.write('%s\n%s\tPyObject *pyretval = Py_BuildValue("%s"%s);\n%s\treturn pyretval;' % (codeDecl, codePre, formatChars, codeVarsPass, codePost))
else:
f.write('\tPy_INCREF(Py_None);\n\treturn Py_None;\n')
f.write('\n}\n\n')
f.write ('// @object Py%s|Description of the interface\n' % (name))
f.write('static struct PyMethodDef Py%s_methods[] =\n{\n' % name)
for method in interface.methods:
f.write('\t{ "%s", Py%s::%s, 1 }, // @pymeth %s|Description of %s\n' % (method.name, interface.name, method.name, method.name, method.name))
interfacebase = interface.base
f.write('''\
{ NULL }
};
PyComTypeObject Py%(name)s::type("Py%(name)s",
&Py%(interfacebase)s::type,
sizeof(Py%(name)s),
Py%(name)s_methods,
GET_PYCOM_CTOR(Py%(name)s));
''' % locals())
def _write_gw_h(f, interface):
if interface.name[0] == "I":
gname = 'PyG' + interface.name[1:]
else:
gname = 'PyG' + interface.name
name = interface.name
if interface.base == "IUnknown" or interface.base == "IDispatch":
base_name = "PyGatewayBase"
else:
if interface.base[0] == "I":
base_name = 'PyG' + interface.base[1:]
else:
base_name = 'PyG' + interface.base
f.write(\
'''\
// ---------------------------------------------------
//
// Gateway Declaration
class %s : public %s, public %s
{
protected:
%s(PyObject *instance) : %s(instance) { ; }
PYGATEWAY_MAKE_SUPPORT2(%s, %s, IID_%s, %s)
''' % (gname, base_name, name, gname, base_name, gname, name, name, base_name))
if interface.base != "IUnknown":
f.write("\t// %s\n\t// *** Manually add %s method decls here\n\n" % (interface.base, interface.base))
else:
f.write('\n\n')
f.write("\t// %s\n" % name)
for method in interface.methods:
f.write('\tSTDMETHOD(%s)(\n' % method.name)
if method.args:
for arg in method.args[:-1]:
f.write("\t\t%s,\n" % (arg.GetRawDeclaration()))
arg = method.args[-1]
f.write("\t\t%s);\n\n" % (arg.GetRawDeclaration()))
else:
f.write('\t\tvoid);\n\n')
f.write('};\n')
f.close()
def _write_gw_cpp(f, interface):
if interface.name[0] == "I":
gname = 'PyG' + interface.name[1:]
else:
gname = 'PyG' + interface.name
name = interface.name
if interface.base == "IUnknown" or interface.base == "IDispatch":
base_name = "PyGatewayBase"
else:
if interface.base[0] == "I":
base_name = 'PyG' + interface.base[1:]
else:
base_name = 'PyG' + interface.base
f.write('''\
// ---------------------------------------------------
//
// Gateway Implementation
''' % {'name':name, 'gname':gname, 'base_name':base_name})
for method in interface.methods:
f.write(\
'''\
STDMETHODIMP %s::%s(
''' % (gname, method.name))
if method.args:
for arg in method.args[:-1]:
inoutstr = ']['.join(arg.inout)
f.write("\t\t/* [%s] */ %s,\n" % (inoutstr, arg.GetRawDeclaration()))
arg = method.args[-1]
inoutstr = ']['.join(arg.inout)
f.write("\t\t/* [%s] */ %s)\n" % (inoutstr, arg.GetRawDeclaration()))
else:
f.write('\t\tvoid)\n')
f.write("{\n\tPY_GATEWAY_METHOD;\n")
cout = 0
codePre = codePost = codeVars = ""
argStr = ""
needConversion = 0
formatChars = ""
if method.args:
for arg in method.args:
if arg.HasAttribute("out"):
cout = cout + 1
if arg.indirectionLevel ==2 :
f.write("\tif (%s==NULL) return E_POINTER;\n" % arg.name)
if arg.HasAttribute("in"):
try:
argCvt = makegwparse.make_arg_converter(arg)
argCvt.SetGatewayMode()
formatchar = argCvt.GetFormatChar();
needConversion = needConversion or argCvt.NeedUSES_CONVERSION()
if formatchar:
formatChars = formatChars + formatchar
codeVars = codeVars + argCvt.DeclareParseArgTupleInputConverter()
argStr = argStr + ", " + argCvt.GetBuildValueArg()
codePre = codePre + argCvt.GetBuildForGatewayPreCode()
codePost = codePost + argCvt.GetBuildForGatewayPostCode()
except makegwparse.error_not_supported, why:
f.write('// *** The input argument %s of type "%s" was not processed ***\n// - Please ensure this conversion function exists, and is appropriate\n// - %s\n' % (arg.name, arg.raw_type, why))
f.write('\tPyObject *ob%s = PyObject_From%s(%s);\n' % (arg.name, arg.type, arg.name))
f.write('\tif (ob%s==NULL) return MAKE_PYCOM_GATEWAY_FAILURE_CODE("%s");\n' % (arg.name, method.name))
codePost = codePost + "\tPy_DECREF(ob%s);\n" % arg.name
formatChars = formatChars + "O"
argStr = argStr + ", ob%s" % (arg.name)
if needConversion: f.write('\tUSES_CONVERSION;\n')
f.write(codeVars)
f.write(codePre)
if cout:
f.write("\tPyObject *result;\n")
resStr = "&result"
else:
resStr = "NULL"
if formatChars:
fullArgStr = '%s, "%s"%s' % (resStr, formatChars, argStr)
else:
fullArgStr = resStr
f.write('\tHRESULT hr=InvokeViaPolicy("%s", %s);\n' % (method.name, fullArgStr))
f.write(codePost)
if cout:
f.write("\tif (FAILED(hr)) return hr;\n")
f.write("\t// Process the Python results, and convert back to the real params\n")
# process the output arguments.
formatChars = codePobjects = codePost = argsParseTuple = ""
needConversion = 0
for arg in method.args:
if not arg.HasAttribute("out"):
continue
try:
argCvt = makegwparse.make_arg_converter(arg)
argCvt.SetGatewayMode()
val = argCvt.GetFormatChar()
if val:
formatChars = formatChars + val
argsParseTuple = argsParseTuple + ", " + argCvt.GetParseTupleArg()
codePobjects = codePobjects + argCvt.DeclareParseArgTupleInputConverter()
codePost = codePost + argCvt.GetParsePostCode()
needConversion = needConversion or argCvt.NeedUSES_CONVERSION()
except makegwparse.error_not_supported, why:
f.write('// *** The output argument %s of type "%s" was not processed ***\n// %s\n' % (arg.name, arg.raw_type, why))
if formatChars: # If I have any to actually process.
if len(formatChars)==1:
parseFn = "PyArg_Parse"
else:
parseFn = "PyArg_ParseTuple"
if codePobjects: f.write(codePobjects)
f.write('\tif (!%s(result, "%s" %s))\n\t\treturn MAKE_PYCOM_GATEWAY_FAILURE_CODE("%s");\n' % (parseFn, formatChars, argsParseTuple, method.name))
if codePost:
f.write('\tBOOL bPythonIsHappy = TRUE;\n')
f.write(codePost)
f.write('\tif (!bPythonIsHappy) hr = MAKE_PYCOM_GATEWAY_FAILURE_CODE("%s");\n' % method.name)
f.write('\tPy_DECREF(result);\n');
f.write('\treturn hr;\n}\n\n')
def test():
# make_framework_support("d:\\msdev\\include\\objidl.h", "ILockBytes")
make_framework_support("d:\\msdev\\include\\objidl.h", "IStorage")
# make_framework_support("d:\\msdev\\include\\objidl.h", "IEnumSTATSTG")
| bsd-2-clause | -4,377,715,774,999,593,500 | 8,274,633,996,335,900,000 | 36.682927 | 205 | 0.628361 | false |
IAmWave/trinerdi-icpc | lib/googletest/test/gtest_xml_outfiles_test.py | 2526 | 5340 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for the gtest_xml_output module."""
__author__ = "keith.ray@gmail.com (Keith Ray)"
import os
from xml.dom import minidom, Node
import gtest_test_utils
import gtest_xml_test_utils
GTEST_OUTPUT_SUBDIR = "xml_outfiles"
GTEST_OUTPUT_1_TEST = "gtest_xml_outfile1_test_"
GTEST_OUTPUT_2_TEST = "gtest_xml_outfile2_test_"
EXPECTED_XML_1 = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*" name="AllTests">
<testsuite name="PropertyOne" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="TestSomeProperties" status="run" time="*" classname="PropertyOne" SetUpProp="1" TestSomeProperty="1" TearDownProp="1" />
</testsuite>
</testsuites>
"""
EXPECTED_XML_2 = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*" name="AllTests">
<testsuite name="PropertyTwo" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="TestSomeProperties" status="run" time="*" classname="PropertyTwo" SetUpProp="2" TestSomeProperty="2" TearDownProp="2" />
</testsuite>
</testsuites>
"""
class GTestXMLOutFilesTest(gtest_xml_test_utils.GTestXMLTestCase):
"""Unit test for Google Test's XML output functionality."""
def setUp(self):
# We want the trailing '/' that the last "" provides in os.path.join, for
# telling Google Test to create an output directory instead of a single file
# for xml output.
self.output_dir_ = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_OUTPUT_SUBDIR, "")
self.DeleteFilesAndDir()
def tearDown(self):
self.DeleteFilesAndDir()
def DeleteFilesAndDir(self):
try:
os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_1_TEST + ".xml"))
except os.error:
pass
try:
os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_2_TEST + ".xml"))
except os.error:
pass
try:
os.rmdir(self.output_dir_)
except os.error:
pass
def testOutfile1(self):
self._TestOutFile(GTEST_OUTPUT_1_TEST, EXPECTED_XML_1)
def testOutfile2(self):
self._TestOutFile(GTEST_OUTPUT_2_TEST, EXPECTED_XML_2)
def _TestOutFile(self, test_name, expected_xml):
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(test_name)
command = [gtest_prog_path, "--gtest_output=xml:%s" % self.output_dir_]
p = gtest_test_utils.Subprocess(command,
working_dir=gtest_test_utils.GetTempDir())
self.assert_(p.exited)
self.assertEquals(0, p.exit_code)
# TODO(wan@google.com): libtool causes the built test binary to be
# named lt-gtest_xml_outfiles_test_ instead of
# gtest_xml_outfiles_test_. To account for this possibillity, we
# allow both names in the following code. We should remove this
# hack when Chandler Carruth's libtool replacement tool is ready.
output_file_name1 = test_name + ".xml"
output_file1 = os.path.join(self.output_dir_, output_file_name1)
output_file_name2 = 'lt-' + output_file_name1
output_file2 = os.path.join(self.output_dir_, output_file_name2)
self.assert_(os.path.isfile(output_file1) or os.path.isfile(output_file2),
output_file1)
expected = minidom.parseString(expected_xml)
if os.path.isfile(output_file1):
actual = minidom.parse(output_file1)
else:
actual = minidom.parse(output_file2)
self.NormalizeXml(actual.documentElement)
self.AssertEquivalentNodes(expected.documentElement,
actual.documentElement)
expected.unlink()
actual.unlink()
if __name__ == "__main__":
os.environ["GTEST_STACK_TRACE_DEPTH"] = "0"
gtest_test_utils.Main()
| mit | 3,805,415,399,857,812,500 | -3,314,663,068,776,846,000 | 39.454545 | 140 | 0.699625 | false |
instinct-vfx/rez | src/rez/vendor/packaging/utils.py | 62 | 1520 | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import re
from .version import InvalidVersion, Version
_canonicalize_regex = re.compile(r"[-_.]+")
def canonicalize_name(name):
# This is taken from PEP 503.
return _canonicalize_regex.sub("-", name).lower()
def canonicalize_version(version):
"""
This is very similar to Version.__str__, but has one subtle differences
with the way it handles the release segment.
"""
try:
version = Version(version)
except InvalidVersion:
# Legacy versions cannot be normalized
return version
parts = []
# Epoch
if version.epoch != 0:
parts.append("{0}!".format(version.epoch))
# Release segment
# NB: This strips trailing '.0's to normalize
parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in version.release)))
# Pre-release
if version.pre is not None:
parts.append("".join(str(x) for x in version.pre))
# Post-release
if version.post is not None:
parts.append(".post{0}".format(version.post))
# Development release
if version.dev is not None:
parts.append(".dev{0}".format(version.dev))
# Local version segment
if version.local is not None:
parts.append("+{0}".format(version.local))
return "".join(parts)
| lgpl-3.0 | -3,540,522,032,358,518,000 | -1,427,467,888,948,306,000 | 25.666667 | 83 | 0.649342 | false |
fzheng/codejam | lib/python2.7/site-packages/pygments/lexers/theorem.py | 22 | 19030 | # -*- coding: utf-8 -*-
"""
pygments.lexers.theorem
~~~~~~~~~~~~~~~~~~~~~~~
Lexers for theorem-proving languages.
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, default, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Generic
__all__ = ['CoqLexer', 'IsabelleLexer', 'LeanLexer']
class CoqLexer(RegexLexer):
"""
For the `Coq <http://coq.inria.fr/>`_ theorem prover.
.. versionadded:: 1.5
"""
name = 'Coq'
aliases = ['coq']
filenames = ['*.v']
mimetypes = ['text/x-coq']
keywords1 = (
# Vernacular commands
'Section', 'Module', 'End', 'Require', 'Import', 'Export', 'Variable',
'Variables', 'Parameter', 'Parameters', 'Axiom', 'Hypothesis',
'Hypotheses', 'Notation', 'Local', 'Tactic', 'Reserved', 'Scope',
'Open', 'Close', 'Bind', 'Delimit', 'Definition', 'Let', 'Ltac',
'Fixpoint', 'CoFixpoint', 'Morphism', 'Relation', 'Implicit',
'Arguments', 'Set', 'Unset', 'Contextual', 'Strict', 'Prenex',
'Implicits', 'Inductive', 'CoInductive', 'Record', 'Structure',
'Canonical', 'Coercion', 'Theorem', 'Lemma', 'Corollary',
'Proposition', 'Fact', 'Remark', 'Example', 'Proof', 'Goal', 'Save',
'Qed', 'Defined', 'Hint', 'Resolve', 'Rewrite', 'View', 'Search',
'Show', 'Print', 'Printing', 'All', 'Graph', 'Projections', 'inside',
'outside', 'Check', 'Global', 'Instance', 'Class', 'Existing',
'Universe', 'Polymorphic', 'Monomorphic', 'Context'
)
keywords2 = (
# Gallina
'forall', 'exists', 'exists2', 'fun', 'fix', 'cofix', 'struct',
'match', 'end', 'in', 'return', 'let', 'if', 'is', 'then', 'else',
'for', 'of', 'nosimpl', 'with', 'as',
)
keywords3 = (
# Sorts
'Type', 'Prop',
)
keywords4 = (
# Tactics
'pose', 'set', 'move', 'case', 'elim', 'apply', 'clear', 'hnf', 'intro',
'intros', 'generalize', 'rename', 'pattern', 'after', 'destruct',
'induction', 'using', 'refine', 'inversion', 'injection', 'rewrite',
'congr', 'unlock', 'compute', 'ring', 'field', 'replace', 'fold',
'unfold', 'change', 'cutrewrite', 'simpl', 'have', 'suff', 'wlog',
'suffices', 'without', 'loss', 'nat_norm', 'assert', 'cut', 'trivial',
'revert', 'bool_congr', 'nat_congr', 'symmetry', 'transitivity', 'auto',
'split', 'left', 'right', 'autorewrite', 'tauto', 'setoid_rewrite',
'intuition', 'eauto', 'eapply', 'econstructor', 'etransitivity',
'constructor', 'erewrite', 'red', 'cbv', 'lazy', 'vm_compute',
'native_compute', 'subst',
)
keywords5 = (
# Terminators
'by', 'done', 'exact', 'reflexivity', 'tauto', 'romega', 'omega',
'assumption', 'solve', 'contradiction', 'discriminate',
'congruence',
)
keywords6 = (
# Control
'do', 'last', 'first', 'try', 'idtac', 'repeat',
)
# 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
# 'downto', 'else', 'end', 'exception', 'external', 'false',
# 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
# 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
# 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
# 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
# 'type', 'val', 'virtual', 'when', 'while', 'with'
keyopts = (
'!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-', r'-\.',
'->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<', '<-',
'<->', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|]', r'\}', '~', '=>',
r'/\\', r'\\/', r'\{\|', r'\|\}',
u'Π', u'λ',
)
operators = r'[!$%&*+\./:<=>?@^|~-]'
prefix_syms = r'[!?~]'
infix_syms = r'[=<>@^|&+\*/$%-]'
primitives = ('unit', 'nat', 'bool', 'string', 'ascii', 'list')
tokens = {
'root': [
(r'\s+', Text),
(r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
(r'\(\*', Comment, 'comment'),
(words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
(words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type),
(words(keywords4, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keywords5, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
(words(keywords6, prefix=r'\b', suffix=r'\b'), Keyword.Reserved),
# (r'\b([A-Z][\w\']*)(\.)', Name.Namespace, 'dotted'),
(r'\b([A-Z][\w\']*)', Name),
(r'(%s)' % '|'.join(keyopts[::-1]), Operator),
(r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
(r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
(r"[^\W\d][\w']*", Name),
(r'\d[\d_]*', Number.Integer),
(r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
(r'0[oO][0-7][0-7_]*', Number.Oct),
(r'0[bB][01][01_]*', Number.Bin),
(r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
(r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
String.Char),
(r"'.'", String.Char),
(r"'", Keyword), # a stray quote is another syntax element
(r'"', String.Double, 'string'),
(r'[~?][a-z][\w\']*:', Name),
],
'comment': [
(r'[^(*)]+', Comment),
(r'\(\*', Comment, '#push'),
(r'\*\)', Comment, '#pop'),
(r'[(*)]', Comment),
],
'string': [
(r'[^"]+', String.Double),
(r'""', String.Double),
(r'"', String.Double, '#pop'),
],
'dotted': [
(r'\s+', Text),
(r'\.', Punctuation),
(r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
(r'[A-Z][\w\']*', Name.Class, '#pop'),
(r'[a-z][a-z0-9_\']*', Name, '#pop'),
default('#pop')
],
}
def analyse_text(text):
if text.startswith('(*'):
return True
class IsabelleLexer(RegexLexer):
"""
For the `Isabelle <http://isabelle.in.tum.de/>`_ proof assistant.
.. versionadded:: 2.0
"""
name = 'Isabelle'
aliases = ['isabelle']
filenames = ['*.thy']
mimetypes = ['text/x-isabelle']
keyword_minor = (
'and', 'assumes', 'attach', 'avoids', 'binder', 'checking',
'class_instance', 'class_relation', 'code_module', 'congs',
'constant', 'constrains', 'datatypes', 'defines', 'file', 'fixes',
'for', 'functions', 'hints', 'identifier', 'if', 'imports', 'in',
'includes', 'infix', 'infixl', 'infixr', 'is', 'keywords', 'lazy',
'module_name', 'monos', 'morphisms', 'no_discs_sels', 'notes',
'obtains', 'open', 'output', 'overloaded', 'parametric', 'permissive',
'pervasive', 'rep_compat', 'shows', 'structure', 'type_class',
'type_constructor', 'unchecked', 'unsafe', 'where',
)
keyword_diag = (
'ML_command', 'ML_val', 'class_deps', 'code_deps', 'code_thms',
'display_drafts', 'find_consts', 'find_theorems', 'find_unused_assms',
'full_prf', 'help', 'locale_deps', 'nitpick', 'pr', 'prf',
'print_abbrevs', 'print_antiquotations', 'print_attributes',
'print_binds', 'print_bnfs', 'print_bundles',
'print_case_translations', 'print_cases', 'print_claset',
'print_classes', 'print_codeproc', 'print_codesetup',
'print_coercions', 'print_commands', 'print_context',
'print_defn_rules', 'print_dependencies', 'print_facts',
'print_induct_rules', 'print_inductives', 'print_interps',
'print_locale', 'print_locales', 'print_methods', 'print_options',
'print_orders', 'print_quot_maps', 'print_quotconsts',
'print_quotients', 'print_quotientsQ3', 'print_quotmapsQ3',
'print_rules', 'print_simpset', 'print_state', 'print_statement',
'print_syntax', 'print_theorems', 'print_theory', 'print_trans_rules',
'prop', 'pwd', 'quickcheck', 'refute', 'sledgehammer', 'smt_status',
'solve_direct', 'spark_status', 'term', 'thm', 'thm_deps', 'thy_deps',
'try', 'try0', 'typ', 'unused_thms', 'value', 'values', 'welcome',
'print_ML_antiquotations', 'print_term_bindings', 'values_prolog',
)
keyword_thy = ('theory', 'begin', 'end')
keyword_section = ('header', 'chapter')
keyword_subsection = (
'section', 'subsection', 'subsubsection', 'sect', 'subsect',
'subsubsect',
)
keyword_theory_decl = (
'ML', 'ML_file', 'abbreviation', 'adhoc_overloading', 'arities',
'atom_decl', 'attribute_setup', 'axiomatization', 'bundle',
'case_of_simps', 'class', 'classes', 'classrel', 'codatatype',
'code_abort', 'code_class', 'code_const', 'code_datatype',
'code_identifier', 'code_include', 'code_instance', 'code_modulename',
'code_monad', 'code_printing', 'code_reflect', 'code_reserved',
'code_type', 'coinductive', 'coinductive_set', 'consts', 'context',
'datatype', 'datatype_new', 'datatype_new_compat', 'declaration',
'declare', 'default_sort', 'defer_recdef', 'definition', 'defs',
'domain', 'domain_isomorphism', 'domaindef', 'equivariance',
'export_code', 'extract', 'extract_type', 'fixrec', 'fun',
'fun_cases', 'hide_class', 'hide_const', 'hide_fact', 'hide_type',
'import_const_map', 'import_file', 'import_tptp', 'import_type_map',
'inductive', 'inductive_set', 'instantiation', 'judgment', 'lemmas',
'lifting_forget', 'lifting_update', 'local_setup', 'locale',
'method_setup', 'nitpick_params', 'no_adhoc_overloading',
'no_notation', 'no_syntax', 'no_translations', 'no_type_notation',
'nominal_datatype', 'nonterminal', 'notation', 'notepad', 'oracle',
'overloading', 'parse_ast_translation', 'parse_translation',
'partial_function', 'primcorec', 'primrec', 'primrec_new',
'print_ast_translation', 'print_translation', 'quickcheck_generator',
'quickcheck_params', 'realizability', 'realizers', 'recdef', 'record',
'refute_params', 'setup', 'setup_lifting', 'simproc_setup',
'simps_of_case', 'sledgehammer_params', 'spark_end', 'spark_open',
'spark_open_siv', 'spark_open_vcg', 'spark_proof_functions',
'spark_types', 'statespace', 'syntax', 'syntax_declaration', 'text',
'text_raw', 'theorems', 'translations', 'type_notation',
'type_synonym', 'typed_print_translation', 'typedecl', 'hoarestate',
'install_C_file', 'install_C_types', 'wpc_setup', 'c_defs', 'c_types',
'memsafe', 'SML_export', 'SML_file', 'SML_import', 'approximate',
'bnf_axiomatization', 'cartouche', 'datatype_compat',
'free_constructors', 'functor', 'nominal_function',
'nominal_termination', 'permanent_interpretation',
'binds', 'defining', 'smt2_status', 'term_cartouche',
'boogie_file', 'text_cartouche',
)
keyword_theory_script = ('inductive_cases', 'inductive_simps')
keyword_theory_goal = (
'ax_specification', 'bnf', 'code_pred', 'corollary', 'cpodef',
'crunch', 'crunch_ignore',
'enriched_type', 'function', 'instance', 'interpretation', 'lemma',
'lift_definition', 'nominal_inductive', 'nominal_inductive2',
'nominal_primrec', 'pcpodef', 'primcorecursive',
'quotient_definition', 'quotient_type', 'recdef_tc', 'rep_datatype',
'schematic_corollary', 'schematic_lemma', 'schematic_theorem',
'spark_vc', 'specification', 'subclass', 'sublocale', 'termination',
'theorem', 'typedef', 'wrap_free_constructors',
)
keyword_qed = ('by', 'done', 'qed')
keyword_abandon_proof = ('sorry', 'oops')
keyword_proof_goal = ('have', 'hence', 'interpret')
keyword_proof_block = ('next', 'proof')
keyword_proof_chain = (
'finally', 'from', 'then', 'ultimately', 'with',
)
keyword_proof_decl = (
'ML_prf', 'also', 'include', 'including', 'let', 'moreover', 'note',
'txt', 'txt_raw', 'unfolding', 'using', 'write',
)
keyword_proof_asm = ('assume', 'case', 'def', 'fix', 'presume')
keyword_proof_asm_goal = ('guess', 'obtain', 'show', 'thus')
keyword_proof_script = (
'apply', 'apply_end', 'apply_trace', 'back', 'defer', 'prefer',
)
operators = (
'::', ':', '(', ')', '[', ']', '_', '=', ',', '|',
'+', '-', '!', '?',
)
proof_operators = ('{', '}', '.', '..')
tokens = {
'root': [
(r'\s+', Text),
(r'\(\*', Comment, 'comment'),
(r'\{\*', Comment, 'text'),
(words(operators), Operator),
(words(proof_operators), Operator.Word),
(words(keyword_minor, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
(words(keyword_diag, prefix=r'\b', suffix=r'\b'), Keyword.Type),
(words(keyword_thy, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_theory_decl, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_section, prefix=r'\b', suffix=r'\b'), Generic.Heading),
(words(keyword_subsection, prefix=r'\b', suffix=r'\b'), Generic.Subheading),
(words(keyword_theory_goal, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
(words(keyword_theory_script, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
(words(keyword_abandon_proof, prefix=r'\b', suffix=r'\b'), Generic.Error),
(words(keyword_qed, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_goal, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_block, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_decl, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_chain, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_asm, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_asm_goal, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_script, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
(r'\\<\w*>', Text.Symbol),
(r"[^\W\d][.\w']*", Name),
(r"\?[^\W\d][.\w']*", Name),
(r"'[^\W\d][.\w']*", Name.Type),
(r'\d[\d_]*', Name), # display numbers as name
(r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
(r'0[oO][0-7][0-7_]*', Number.Oct),
(r'0[bB][01][01_]*', Number.Bin),
(r'"', String, 'string'),
(r'`', String.Other, 'fact'),
],
'comment': [
(r'[^(*)]+', Comment),
(r'\(\*', Comment, '#push'),
(r'\*\)', Comment, '#pop'),
(r'[(*)]', Comment),
],
'text': [
(r'[^*}]+', Comment),
(r'\*\}', Comment, '#pop'),
(r'\*', Comment),
(r'\}', Comment),
],
'string': [
(r'[^"\\]+', String),
(r'\\<\w*>', String.Symbol),
(r'\\"', String),
(r'\\', String),
(r'"', String, '#pop'),
],
'fact': [
(r'[^`\\]+', String.Other),
(r'\\<\w*>', String.Symbol),
(r'\\`', String.Other),
(r'\\', String.Other),
(r'`', String.Other, '#pop'),
],
}
class LeanLexer(RegexLexer):
"""
For the `Lean <https://github.com/leanprover/lean>`_
theorem prover.
.. versionadded:: 2.0
"""
name = 'Lean'
aliases = ['lean']
filenames = ['*.lean']
mimetypes = ['text/x-lean']
flags = re.MULTILINE | re.UNICODE
keywords1 = ('import', 'abbreviation', 'opaque_hint', 'tactic_hint', 'definition', 'renaming',
'inline', 'hiding', 'exposing', 'parameter', 'parameters', 'conjecture',
'hypothesis', 'lemma', 'corollary', 'variable', 'variables', 'print', 'theorem',
'axiom', 'inductive', 'structure', 'universe', 'alias', 'help',
'options', 'precedence', 'postfix', 'prefix', 'calc_trans', 'calc_subst', 'calc_refl',
'infix', 'infixl', 'infixr', 'notation', 'eval', 'check', 'exit', 'coercion', 'end',
'private', 'using', 'namespace', 'including', 'instance', 'section', 'context',
'protected', 'expose', 'export', 'set_option', 'add_rewrite', 'extends',
'open', 'example', 'constant', 'constants', 'print', 'opaque', 'reducible', 'irreducible'
)
keywords2 = (
'forall', 'fun', 'Pi', 'obtain', 'from', 'have', 'show', 'assume', 'take',
'let', 'if', 'else', 'then', 'by', 'in', 'with', 'begin', 'proof', 'qed', 'calc', 'match'
)
keywords3 = (
# Sorts
'Type', 'Prop',
)
operators = (
'!=', '#', '&', '&&', '*', '+', '-', '/', '@', '!', '`',
'-.', '->', '.', '..', '...', '::', ':>', ';', ';;', '<',
'<-', '=', '==', '>', '_', '`', '|', '||', '~', '=>', '<=', '>=',
'/\\', '\\/', u'∀', u'Π', u'λ', u'↔', u'∧', u'∨', u'≠', u'≤', u'≥',
u'¬', u'⁻¹', u'⬝', u'▸', u'→', u'∃', u'ℕ', u'ℤ', u'≈', u'×', u'⌞', u'⌟', u'≡',
u'⟨', u'⟩'
)
punctuation = ('(', ')', ':', '{', '}', '[', ']', u'⦃', u'⦄', ':=', ',')
tokens = {
'root': [
(r'\s+', Text),
(r'/-', Comment, 'comment'),
(r'--.*?$', Comment.Single),
(words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
(words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type),
(words(operators), Name.Builtin.Pseudo),
(words(punctuation), Operator),
(u"[A-Za-z_\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2100-\u214f]"
u"[A-Za-z_'\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2070-\u2079"
u"\u207f-\u2089\u2090-\u209c\u2100-\u214f0-9]*", Name),
(r'\d+', Number.Integer),
(r'"', String.Double, 'string'),
(r'[~?][a-z][\w\']*:', Name.Variable)
],
'comment': [
# Multiline Comments
(r'[^/-]', Comment.Multiline),
(r'/-', Comment.Multiline, '#push'),
(r'-/', Comment.Multiline, '#pop'),
(r'[/-]', Comment.Multiline)
],
'string': [
(r'[^\\"]+', String.Double),
(r'\\[n"\\]', String.Escape),
('"', String.Double, '#pop'),
],
}
| mit | -6,031,748,875,636,298,000 | 1,314,595,498,973,403,400 | 40.803965 | 106 | 0.489963 | false |
mcus/SickRage | sickbeard/providers/bluetigers.py | 3 | 6075 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
# Author: echel0n <sickrage.tv@gmail.com>
# URL: http://www.github.com/sickragetv/sickrage/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import traceback
import re
from requests.auth import AuthBase
from sickbeard.providers import generic
import requests
from sickbeard.bs4_parser import BS4Parser
import logging
from sickbeard import tvcache
class BLUETIGERSProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, "BLUETIGERS")
self.supportsBacklog = True
self.username = None
self.password = None
self.ratio = None
self.token = None
self.tokenLastUpdate = None
self.cache = BLUETIGERSCache(self)
self.urls = {
'base_url': 'https://www.bluetigers.ca/',
'search': 'https://www.bluetigers.ca/torrents-search.php',
'login': 'https://www.bluetigers.ca/account-login.php',
'download': 'https://www.bluetigers.ca/torrents-details.php?id=%s&hit=1',
}
self.search_params = {
"c16": 1, "c10": 1, "c130": 1, "c131": 1, "c17": 1, "c18": 1, "c19": 1
}
self.url = self.urls[b'base_url']
def _doLogin(self):
if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()):
return True
login_params = {
'username': self.username,
'password': self.password,
'take_login': '1'
}
response = self.getURL(self.urls[b'login'], post_data=login_params, timeout=30)
if not response:
logging.warning("Unable to connect to provider")
return False
if re.search('/account-logout.php', response):
return True
else:
logging.warning("Invalid username or password. Check your settings")
return False
return True
def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):
results = []
items = {'Season': [], 'Episode': [], 'RSS': []}
# check for auth
if not self._doLogin():
return results
for mode in search_strings.keys():
logging.debug("Search Mode: %s" % mode)
for search_string in search_strings[mode]:
if mode is not 'RSS':
logging.debug("Search string: %s " % search_string)
self.search_params[b'search'] = search_string
data = self.getURL(self.urls[b'search'], params=self.search_params)
if not data:
continue
try:
with BS4Parser(data, features=["html5lib", "permissive"]) as html:
result_linkz = html.findAll('a', href=re.compile("torrents-details"))
if not result_linkz:
logging.debug("Data returned from provider do not contains any torrent")
continue
if result_linkz:
for link in result_linkz:
title = link.text
download_url = self.urls[b'base_url'] + "/" + link[b'href']
download_url = download_url.replace("torrents-details", "download")
# FIXME
size = -1
seeders = 1
leechers = 0
if not title or not download_url:
continue
# Filter unseeded torrent
# if seeders < self.minseed or leechers < self.minleech:
# if mode is not 'RSS':
# logging.debug(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers))
# continue
item = title, download_url, size, seeders, leechers
if mode is not 'RSS':
logging.debug("Found result: %s " % title)
items[mode].append(item)
except Exception as e:
logging.error("Failed parsing provider. Traceback: %s" % traceback.format_exc())
# For each search mode sort all the items by seeders if available
items[mode].sort(key=lambda tup: tup[3], reverse=True)
results += items[mode]
return results
def seedRatio(self):
return self.ratio
class BLUETIGERSAuth(AuthBase):
"""Attaches HTTP Authentication to the given Request object."""
def __init__(self, token):
self.token = token
def __call__(self, r):
r.headers[b'Authorization'] = self.token
return r
class BLUETIGERSCache(tvcache.TVCache):
def __init__(self, provider_obj):
tvcache.TVCache.__init__(self, provider_obj)
# Only poll BLUETIGERS every 10 minutes max
self.minTime = 10
def _getRSSData(self):
search_strings = {'RSS': ['']}
return {'entries': self.provider._doSearch(search_strings)}
provider = BLUETIGERSProvider()
| gpl-3.0 | -4,434,046,333,160,475,600 | 7,280,014,852,162,290,000 | 33.322034 | 185 | 0.550123 | false |
odooindia/odoo | addons/crm_helpdesk/crm_helpdesk.py | 8 | 7478 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import openerp
from openerp.addons.crm import crm
from openerp.osv import fields, osv
from openerp import tools
from openerp.tools.translate import _
from openerp.tools import html2plaintext
class crm_helpdesk(osv.osv):
""" Helpdesk Cases """
_name = "crm.helpdesk"
_description = "Helpdesk"
_order = "id desc"
_inherit = ['mail.thread']
_columns = {
'id': fields.integer('ID', readonly=True),
'name': fields.char('Name', required=True),
'active': fields.boolean('Active', required=False),
'date_action_last': fields.datetime('Last Action', readonly=1),
'date_action_next': fields.datetime('Next Action', readonly=1),
'description': fields.text('Description'),
'create_date': fields.datetime('Creation Date' , readonly=True),
'write_date': fields.datetime('Update Date' , readonly=True),
'date_deadline': fields.date('Deadline'),
'user_id': fields.many2one('res.users', 'Responsible'),
'section_id': fields.many2one('crm.case.section', 'Sales Team', \
select=True, help='Responsible sales team. Define Responsible user and Email account for mail gateway.'),
'company_id': fields.many2one('res.company', 'Company'),
'date_closed': fields.datetime('Closed', readonly=True),
'partner_id': fields.many2one('res.partner', 'Partner'),
'email_cc': fields.text('Watchers Emails', size=252 , help="These email addresses will be added to the CC field of all inbound and outbound emails for this record before being sent. Separate multiple email addresses with a comma"),
'email_from': fields.char('Email', size=128, help="Destination email for email gateway"),
'date': fields.datetime('Date'),
'ref': fields.reference('Reference', selection=openerp.addons.base.res.res_request.referencable_models),
'ref2': fields.reference('Reference 2', selection=openerp.addons.base.res.res_request.referencable_models),
'channel_id': fields.many2one('crm.case.channel', 'Channel', help="Communication channel."),
'planned_revenue': fields.float('Planned Revenue'),
'planned_cost': fields.float('Planned Costs'),
'priority': fields.selection([('0','Low'), ('1','Normal'), ('2','High')], 'Priority'),
'probability': fields.float('Probability (%)'),
'categ_id': fields.many2one('crm.case.categ', 'Category', \
domain="['|',('section_id','=',False),('section_id','=',section_id),\
('object_id.model', '=', 'crm.helpdesk')]"),
'duration': fields.float('Duration', states={'done': [('readonly', True)]}),
'state': fields.selection(
[('draft', 'New'),
('open', 'In Progress'),
('pending', 'Pending'),
('done', 'Closed'),
('cancel', 'Cancelled')], 'Status', readonly=True, track_visibility='onchange',
help='The status is set to \'Draft\', when a case is created.\
\nIf the case is in progress the status is set to \'Open\'.\
\nWhen the case is over, the status is set to \'Done\'.\
\nIf the case needs to be reviewed then the status is set to \'Pending\'.'),
}
_defaults = {
'active': lambda *a: 1,
'user_id': lambda s, cr, uid, c: uid,
'state': lambda *a: 'draft',
'date': fields.datetime.now,
'company_id': lambda s, cr, uid, c: s.pool.get('res.company')._company_default_get(cr, uid, 'crm.helpdesk', context=c),
'priority': '1',
}
def on_change_partner_id(self, cr, uid, ids, partner_id, context=None):
values = {}
if partner_id:
partner = self.pool.get('res.partner').browse(cr, uid, partner_id, context=context)
values = {
'email_from': partner.email,
}
return {'value': values}
def write(self, cr, uid, ids, values, context=None):
""" Override to add case management: open/close dates """
if values.get('state'):
if values.get('state') in ['draft', 'open'] and not values.get('date_open'):
values['date_open'] = fields.datetime.now()
elif values.get('state') == 'close' and not values.get('date_closed'):
values['date_closed'] = fields.datetime.now()
return super(crm_helpdesk, self).write(cr, uid, ids, values, context=context)
def case_escalate(self, cr, uid, ids, context=None):
""" Escalates case to parent level """
data = {'active': True}
for case in self.browse(cr, uid, ids, context=context):
if case.section_id and case.section_id.parent_id:
parent_id = case.section_id.parent_id
data['section_id'] = parent_id.id
if parent_id.change_responsible and parent_id.user_id:
data['user_id'] = parent_id.user_id.id
else:
raise osv.except_osv(_('Error!'), _('You can not escalate, you are already at the top level regarding your sales-team category.'))
self.write(cr, uid, [case.id], data, context=context)
return True
# -------------------------------------------------------
# Mail gateway
# -------------------------------------------------------
def message_new(self, cr, uid, msg, custom_values=None, context=None):
""" Overrides mail_thread message_new that is called by the mailgateway
through message_process.
This override updates the document according to the email.
"""
if custom_values is None:
custom_values = {}
desc = html2plaintext(msg.get('body')) if msg.get('body') else ''
defaults = {
'name': msg.get('subject') or _("No Subject"),
'description': desc,
'email_from': msg.get('from'),
'email_cc': msg.get('cc'),
'user_id': False,
'partner_id': msg.get('author_id', False),
}
defaults.update(custom_values)
return super(crm_helpdesk, self).message_new(cr, uid, msg, custom_values=defaults, context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 595,925,543,655,905,800 | 1,801,327,165,800,275,700 | 50.930556 | 243 | 0.566194 | false |
Kalyzee/edx-platform | cms/djangoapps/contentstore/views/tests/test_credit_eligibility.py | 121 | 2954 | """
Unit tests for credit eligibility UI in Studio.
"""
import mock
from contentstore.tests.utils import CourseTestCase
from contentstore.utils import reverse_course_url
from xmodule.modulestore.tests.factories import CourseFactory
from openedx.core.djangoapps.credit.api import get_credit_requirements
from openedx.core.djangoapps.credit.models import CreditCourse
from openedx.core.djangoapps.credit.signals import on_course_publish
class CreditEligibilityTest(CourseTestCase):
"""Base class to test the course settings details view in Studio for credit
eligibility requirements.
"""
def setUp(self):
super(CreditEligibilityTest, self).setUp()
self.course = CourseFactory.create(org='edX', number='dummy', display_name='Credit Course')
self.course_details_url = reverse_course_url('settings_handler', unicode(self.course.id))
@mock.patch.dict("django.conf.settings.FEATURES", {'ENABLE_CREDIT_ELIGIBILITY': False})
def test_course_details_with_disabled_setting(self):
"""Test that user don't see credit eligibility requirements in response
if the feature flag 'ENABLE_CREDIT_ELIGIBILITY' is not enabled.
"""
response = self.client.get_html(self.course_details_url)
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, "Course Credit Requirements")
self.assertNotContains(response, "Steps required to earn course credit")
@mock.patch.dict("django.conf.settings.FEATURES", {'ENABLE_CREDIT_ELIGIBILITY': True})
def test_course_details_with_enabled_setting(self):
"""Test that credit eligibility requirements are present in
response if the feature flag 'ENABLE_CREDIT_ELIGIBILITY' is enabled.
"""
# verify that credit eligibility requirements block don't show if the
# course is not set as credit course
response = self.client.get_html(self.course_details_url)
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, "Course Credit Requirements")
self.assertNotContains(response, "Steps required to earn course credit")
# verify that credit eligibility requirements block shows if the
# course is set as credit course and it has eligibility requirements
credit_course = CreditCourse(course_key=unicode(self.course.id), enabled=True)
credit_course.save()
self.assertEqual(len(get_credit_requirements(self.course.id)), 0)
# test that after publishing course, minimum grade requirement is added
on_course_publish(self.course.id)
self.assertEqual(len(get_credit_requirements(self.course.id)), 1)
response = self.client.get_html(self.course_details_url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Course Credit Requirements")
self.assertContains(response, "Steps required to earn course credit")
| agpl-3.0 | 3,712,809,117,853,274,000 | -7,085,970,657,762,775,000 | 49.067797 | 99 | 0.727488 | false |
indictranstech/buyback-erp | erpnext/controllers/buying_controller.py | 7 | 13466 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _, msgprint
from frappe.utils import flt, rounded
from erpnext.setup.utils import get_company_currency
from erpnext.accounts.party import get_party_details
from erpnext.controllers.stock_controller import StockController
class BuyingController(StockController):
def __setup__(self):
if hasattr(self, "fname"):
self.table_print_templates = {
self.fname: "templates/print_formats/includes/item_grid.html",
"other_charges": "templates/print_formats/includes/taxes.html",
}
def validate(self):
super(BuyingController, self).validate()
if getattr(self, "supplier", None) and not self.supplier_name:
self.supplier_name = frappe.db.get_value("Supplier",
self.supplier, "supplier_name")
self.is_item_table_empty()
self.set_qty_as_per_stock_uom()
self.validate_stock_or_nonstock_items()
self.validate_warehouse()
def set_missing_values(self, for_validate=False):
super(BuyingController, self).set_missing_values(for_validate)
self.set_supplier_from_item_default()
self.set_price_list_currency("Buying")
# set contact and address details for supplier, if they are not mentioned
if getattr(self, "supplier", None):
self.update_if_missing(get_party_details(self.supplier, party_type="Supplier"))
self.set_missing_item_details()
if self.get("__islocal"):
self.set_taxes("other_charges", "taxes_and_charges")
def set_supplier_from_item_default(self):
if self.meta.get_field("supplier") and not self.supplier:
for d in self.get(self.fname):
supplier = frappe.db.get_value("Item", d.item_code, "default_supplier")
if supplier:
self.supplier = supplier
break
def validate_warehouse(self):
from erpnext.stock.utils import validate_warehouse_company
warehouses = list(set([d.warehouse for d in
self.get(self.fname) if getattr(d, "warehouse", None)]))
for w in warehouses:
validate_warehouse_company(w, self.company)
def validate_stock_or_nonstock_items(self):
if self.meta.get_field("other_charges") and not self.get_stock_items():
tax_for_valuation = [d.account_head for d in self.get("other_charges")
if d.category in ["Valuation", "Valuation and Total"]]
if tax_for_valuation:
frappe.throw(_("Tax Category can not be 'Valuation' or 'Valuation and Total' as all items are non-stock items"))
def set_total_in_words(self):
from frappe.utils import money_in_words
company_currency = get_company_currency(self.company)
if self.meta.get_field("in_words"):
self.in_words = money_in_words(self.grand_total, company_currency)
if self.meta.get_field("in_words_import"):
self.in_words_import = money_in_words(self.grand_total_import,
self.currency)
def calculate_taxes_and_totals(self):
self.other_fname = "other_charges"
super(BuyingController, self).calculate_taxes_and_totals()
self.calculate_total_advance("Purchase Invoice", "advance_allocation_details")
def calculate_item_values(self):
for item in self.item_doclist:
self.round_floats_in(item)
if item.discount_percentage == 100.0:
item.rate = 0.0
elif not item.rate:
item.rate = flt(item.price_list_rate * (1.0 - (item.discount_percentage / 100.0)),
self.precision("rate", item))
item.amount = flt(item.rate * item.qty,
self.precision("amount", item))
item.item_tax_amount = 0.0;
self._set_in_company_currency(item, "amount", "base_amount")
self._set_in_company_currency(item, "price_list_rate", "base_price_list_rate")
self._set_in_company_currency(item, "rate", "base_rate")
def calculate_net_total(self):
self.net_total = self.net_total_import = 0.0
for item in self.item_doclist:
self.net_total += item.base_amount
self.net_total_import += item.amount
self.round_floats_in(self, ["net_total", "net_total_import"])
def calculate_totals(self):
self.grand_total = flt(self.tax_doclist[-1].total if self.tax_doclist else self.net_total)
self.grand_total_import = flt(self.grand_total / self.conversion_rate) \
if self.tax_doclist else self.net_total_import
self.total_tax = flt(self.grand_total - self.net_total, self.precision("total_tax"))
self.grand_total = flt(self.grand_total, self.precision("grand_total"))
self.grand_total_import = flt(self.grand_total_import, self.precision("grand_total_import"))
if self.meta.get_field("rounded_total"):
self.rounded_total = rounded(self.grand_total)
if self.meta.get_field("rounded_total_import"):
self.rounded_total_import = rounded(self.grand_total_import)
if self.meta.get_field("other_charges_added"):
self.other_charges_added = flt(sum([flt(d.tax_amount) for d in self.tax_doclist
if d.add_deduct_tax=="Add" and d.category in ["Valuation and Total", "Total"]]),
self.precision("other_charges_added"))
if self.meta.get_field("other_charges_deducted"):
self.other_charges_deducted = flt(sum([flt(d.tax_amount) for d in self.tax_doclist
if d.add_deduct_tax=="Deduct" and d.category in ["Valuation and Total", "Total"]]),
self.precision("other_charges_deducted"))
if self.meta.get_field("other_charges_added_import"):
self.other_charges_added_import = flt(self.other_charges_added /
self.conversion_rate, self.precision("other_charges_added_import"))
if self.meta.get_field("other_charges_deducted_import"):
self.other_charges_deducted_import = flt(self.other_charges_deducted /
self.conversion_rate, self.precision("other_charges_deducted_import"))
def calculate_outstanding_amount(self):
if self.doctype == "Purchase Invoice" and self.docstatus == 0:
self.total_advance = flt(self.total_advance,
self.precision("total_advance"))
self.total_amount_to_pay = flt(self.grand_total - flt(self.write_off_amount,
self.precision("write_off_amount")), self.precision("total_amount_to_pay"))
self.outstanding_amount = flt(self.total_amount_to_pay - self.total_advance,
self.precision("outstanding_amount"))
# update valuation rate
def update_valuation_rate(self, parentfield):
"""
item_tax_amount is the total tax amount applied on that item
stored for valuation
TODO: rename item_tax_amount to valuation_tax_amount
"""
stock_items = self.get_stock_items()
stock_items_qty, stock_items_amount = 0, 0
last_stock_item_idx = 1
for d in self.get(parentfield):
if d.item_code and d.item_code in stock_items:
stock_items_qty += flt(d.qty)
stock_items_amount += flt(d.base_amount)
last_stock_item_idx = d.idx
total_valuation_amount = sum([flt(d.tax_amount) for d in
self.get("other_charges")
if d.category in ["Valuation", "Valuation and Total"]])
valuation_amount_adjustment = total_valuation_amount
for i, item in enumerate(self.get(parentfield)):
if item.item_code and item.qty and item.item_code in stock_items:
item_proportion = flt(item.base_amount) / stock_items_amount if stock_items_amount \
else flt(item.qty) / stock_items_qty
if i == (last_stock_item_idx - 1):
item.item_tax_amount = flt(valuation_amount_adjustment,
self.precision("item_tax_amount", item))
else:
item.item_tax_amount = flt(item_proportion * total_valuation_amount,
self.precision("item_tax_amount", item))
valuation_amount_adjustment -= item.item_tax_amount
self.round_floats_in(item)
item.conversion_factor = item.conversion_factor or flt(frappe.db.get_value(
"UOM Conversion Detail", {"parent": item.item_code, "uom": item.uom},
"conversion_factor")) or 1
qty_in_stock_uom = flt(item.qty * item.conversion_factor)
rm_supp_cost = flt(item.rm_supp_cost) if self.doctype=="Purchase Receipt" else 0.0
landed_cost_voucher_amount = flt(item.landed_cost_voucher_amount) \
if self.doctype == "Purchase Receipt" else 0.0
item.valuation_rate = ((item.base_amount + item.item_tax_amount + rm_supp_cost
+ landed_cost_voucher_amount) / qty_in_stock_uom)
else:
item.valuation_rate = 0.0
def validate_for_subcontracting(self):
if not self.is_subcontracted and self.sub_contracted_items:
frappe.throw(_("Please enter 'Is Subcontracted' as Yes or No"))
if self.doctype == "Purchase Receipt" and self.is_subcontracted=="Yes" \
and not self.supplier_warehouse:
frappe.throw(_("Supplier Warehouse mandatory for sub-contracted Purchase Receipt"))
def create_raw_materials_supplied(self, raw_material_table):
if self.is_subcontracted=="Yes":
parent_items = []
rm_supplied_idx = 0
for item in self.get(self.fname):
if self.doctype == "Purchase Receipt":
item.rm_supp_cost = 0.0
if item.item_code in self.sub_contracted_items:
self.update_raw_materials_supplied(item, raw_material_table, rm_supplied_idx)
if [item.item_code, item.name] not in parent_items:
parent_items.append([item.item_code, item.name])
self.cleanup_raw_materials_supplied(parent_items, raw_material_table)
elif self.doctype == "Purchase Receipt":
for item in self.get(self.fname):
item.rm_supp_cost = 0.0
def update_raw_materials_supplied(self, item, raw_material_table, rm_supplied_idx):
bom_items = self.get_items_from_default_bom(item.item_code)
raw_materials_cost = 0
for bom_item in bom_items:
# check if exists
exists = 0
for d in self.get(raw_material_table):
if d.main_item_code == item.item_code and d.rm_item_code == bom_item.item_code \
and d.reference_name == item.name:
rm, exists = d, 1
break
if not exists:
rm = self.append(raw_material_table, {})
required_qty = flt(bom_item.qty_consumed_per_unit) * flt(item.qty) * flt(item.conversion_factor)
rm.reference_name = item.name
rm.bom_detail_no = bom_item.name
rm.main_item_code = item.item_code
rm.rm_item_code = bom_item.item_code
rm.stock_uom = bom_item.stock_uom
rm.required_qty = required_qty
rm.conversion_factor = item.conversion_factor
rm.idx = rm_supplied_idx
if self.doctype == "Purchase Receipt":
rm.consumed_qty = required_qty
rm.description = bom_item.description
if item.batch_no and not rm.batch_no:
rm.batch_no = item.batch_no
rm_supplied_idx += 1
# get raw materials rate
if self.doctype == "Purchase Receipt":
from erpnext.stock.utils import get_incoming_rate
rm.rate = get_incoming_rate({
"item_code": bom_item.item_code,
"warehouse": self.supplier_warehouse,
"posting_date": self.posting_date,
"posting_time": self.posting_time,
"qty": -1 * required_qty,
"serial_no": rm.serial_no
})
if not rm.rate:
from erpnext.stock.stock_ledger import get_valuation_rate
rm.rate = get_valuation_rate(bom_item.item_code, self.supplier_warehouse)
else:
rm.rate = bom_item.rate
rm.amount = required_qty * flt(rm.rate)
raw_materials_cost += flt(rm.amount)
if self.doctype == "Purchase Receipt":
item.rm_supp_cost = raw_materials_cost
def cleanup_raw_materials_supplied(self, parent_items, raw_material_table):
"""Remove all those child items which are no longer present in main item table"""
delete_list = []
for d in self.get(raw_material_table):
if [d.main_item_code, d.reference_name] not in parent_items:
# mark for deletion from doclist
delete_list.append(d)
# delete from doclist
if delete_list:
rm_supplied_details = self.get(raw_material_table)
self.set(raw_material_table, [])
for d in rm_supplied_details:
if d not in delete_list:
self.append(raw_material_table, d)
def get_items_from_default_bom(self, item_code):
bom_items = frappe.db.sql("""select t2.item_code,
ifnull(t2.qty, 0) / ifnull(t1.quantity, 1) as qty_consumed_per_unit,
t2.rate, t2.stock_uom, t2.name, t2.description
from `tabBOM` t1, `tabBOM Item` t2
where t2.parent = t1.name and t1.item = %s and t1.is_default = 1
and t1.docstatus = 1 and t1.is_active = 1""", item_code, as_dict=1)
if not bom_items:
msgprint(_("No default BOM exists for Item {0}").format(item_code), raise_exception=1)
return bom_items
@property
def sub_contracted_items(self):
if not hasattr(self, "_sub_contracted_items"):
self._sub_contracted_items = []
item_codes = list(set(item.item_code for item in
self.get(self.fname)))
if item_codes:
self._sub_contracted_items = [r[0] for r in frappe.db.sql("""select name
from `tabItem` where name in (%s) and is_sub_contracted_item='Yes'""" % \
(", ".join((["%s"]*len(item_codes))),), item_codes)]
return self._sub_contracted_items
@property
def purchase_items(self):
if not hasattr(self, "_purchase_items"):
self._purchase_items = []
item_codes = list(set(item.item_code for item in
self.get(self.fname)))
if item_codes:
self._purchase_items = [r[0] for r in frappe.db.sql("""select name
from `tabItem` where name in (%s) and is_purchase_item='Yes'""" % \
(", ".join((["%s"]*len(item_codes))),), item_codes)]
return self._purchase_items
def is_item_table_empty(self):
if not len(self.get(self.fname)):
frappe.throw(_("Item table can not be blank"))
def set_qty_as_per_stock_uom(self):
for d in self.get(self.fname):
if d.meta.get_field("stock_qty") and not d.stock_qty:
if not d.conversion_factor:
frappe.throw(_("Row {0}: Conversion Factor is mandatory"))
d.stock_qty = flt(d.qty) * flt(d.conversion_factor)
| agpl-3.0 | 9,192,716,296,658,684,000 | 6,878,351,907,142,174,000 | 36.719888 | 116 | 0.697535 | false |
Wattpad/dd-agent | tests/core/test_service_discovery.py | 4 | 27446 | # stdlib
import copy
import mock
import unittest
# 3p
from nose.plugins.attrib import attr
# project
from utils.service_discovery.config_stores import get_config_store
from utils.service_discovery.consul_config_store import ConsulStore
from utils.service_discovery.etcd_config_store import EtcdStore
from utils.service_discovery.abstract_config_store import AbstractConfigStore
from utils.service_discovery.sd_backend import get_sd_backend
from utils.service_discovery.sd_docker_backend import SDDockerBackend
def clear_singletons(agentConfig):
get_config_store(agentConfig)._drop()
get_sd_backend(agentConfig)._drop()
class Response(object):
"""Dummy response class for mocking purpose"""
def __init__(self, content):
self.content = content
def json(self):
return self.content
def raise_for_status(self):
pass
def _get_container_inspect(c_id):
"""Return a mocked container inspect dict from self.container_inspects."""
for co, _, _, _ in TestServiceDiscovery.container_inspects:
if co.get('Id') == c_id:
return co
return None
def _get_conf_tpls(image_name, trace_config=False, kube_annotations=None):
"""Return a mocked configuration template from self.mock_templates."""
return copy.deepcopy(TestServiceDiscovery.mock_templates.get(image_name)[0])
def _get_check_tpls(image_name, **kwargs):
if image_name in TestServiceDiscovery.mock_templates:
return [copy.deepcopy(TestServiceDiscovery.mock_templates.get(image_name)[0][0][0:3])]
elif image_name in TestServiceDiscovery.bad_mock_templates:
try:
return [copy.deepcopy(TestServiceDiscovery.bad_mock_templates.get(image_name))]
except Exception:
return None
def client_read(path, **kwargs):
"""Return a mocked string that would normally be read from a config store (etcd, consul...)."""
parts = path.split('/')
config_parts = ['check_names', 'init_configs', 'instances']
image, config_part = parts[-2], parts[-1]
if 'all' in kwargs:
return {}
else:
return TestServiceDiscovery.mock_tpls.get(image)[0][config_parts.index(config_part)]
def issue_read(identifier):
return TestServiceDiscovery.mock_tpls.get(identifier)
@attr('unix')
class TestServiceDiscovery(unittest.TestCase):
docker_container_inspect = {
u'Id': u'69ff25598b2314d1cdb7752cc3a659fb1c1352b32546af4f1454321550e842c0',
u'Image': u'nginx',
u'Name': u'/nginx',
u'NetworkSettings': {u'IPAddress': u'172.17.0.21', u'Ports': {u'443/tcp': None, u'80/tcp': None}}
}
docker_container_inspect_with_label = {
u'Id': u'69ff25598b2314d1cdb7752cc3a659fb1c1352b32546af4f1454321550e842c0',
u'Image': u'nginx',
u'Name': u'/nginx',
u'NetworkSettings': {u'IPAddress': u'172.17.0.21', u'Ports': {u'443/tcp': None, u'80/tcp': None}},
u'Labels': {'com.datadoghq.sd.check.id': 'custom-nginx'}
}
kubernetes_container_inspect = {
u'Id': u'389dc8a4361f3d6c866e9e9a7b6972b26a31c589c4e2f097375d55656a070bc9',
u'Image': u'foo',
u'Name': u'/k8s_sentinel.38057ab9_redis-master_default_27b84e1e-a81c-11e5-8347-42010af00002_f70875a1',
u'Config': {u'ExposedPorts': {u'6379/tcp': {}}},
u'NetworkSettings': {u'IPAddress': u'', u'Ports': None}
}
malformed_container_inspect = {
u'Id': u'69ff25598b2314d1cdb7752cc3a659fb1c1352b32546af4f1454321550e842c0',
u'Image': u'foo',
u'Name': u'/nginx'
}
container_inspects = [
# (inspect_dict, expected_ip, tpl_var, expected_port, expected_ident)
(docker_container_inspect, '172.17.0.21', 'port', '443', 'nginx'),
(docker_container_inspect_with_label, '172.17.0.21', 'port', '443', 'custom-nginx'),
(kubernetes_container_inspect, None, 'port', '6379', 'foo'), # arbitrarily defined in the mocked pod_list
(malformed_container_inspect, None, 'port', KeyError, 'foo')
]
# templates with variables already extracted
mock_templates = {
# image_name: ([(check_name, init_tpl, instance_tpl, variables)], (expected_config_template))
'image_0': (
[('check_0', {}, {'host': '%%host%%'}, ['host'])],
('check_0', {}, {'host': '127.0.0.1'})),
'image_1': (
[('check_1', {}, {'port': '%%port%%'}, ['port'])],
('check_1', {}, {'port': '1337'})),
'image_2': (
[('check_2', {}, {'host': '%%host%%', 'port': '%%port%%'}, ['host', 'port'])],
('check_2', {}, {'host': '127.0.0.1', 'port': '1337'})),
}
# raw templates coming straight from the config store
mock_tpls = {
# image_name: ('[check_name]', '[init_tpl]', '[instance_tpl]', expected_python_tpl_list)
'image_0': (
('["check_0"]', '[{}]', '[{"host": "%%host%%"}]'),
[('check_0', {}, {"host": "%%host%%"})]),
'image_1': (
('["check_1"]', '[{}]', '[{"port": "%%port%%"}]'),
[('check_1', {}, {"port": "%%port%%"})]),
'image_2': (
('["check_2"]', '[{}]', '[{"host": "%%host%%", "port": "%%port%%"}]'),
[('check_2', {}, {"host": "%%host%%", "port": "%%port%%"})]),
'bad_image_0': ((['invalid template']), []),
'bad_image_1': (('invalid template'), []),
'bad_image_2': (None, []),
'nginx': ('["nginx"]', '[{}]', '[{"host": "localhost"}]'),
'nginx:latest': ('["nginx"]', '[{}]', '[{"host": "localhost", "tags": ["foo"]}]'),
'custom-nginx': ('["nginx"]', '[{}]', '[{"host": "localhost"}]'),
'repo/custom-nginx': ('["nginx"]', '[{}]', '[{"host": "localhost", "tags": ["bar"]}]'),
'repo/dir:5000/custom-nginx:latest': ('["nginx"]', '[{}]', '[{"host": "local", "tags": ["foobar"]}]')
}
bad_mock_templates = {
'bad_image_0': ('invalid template'),
'bad_image_1': [('invalid template')],
'bad_image_2': None
}
def setUp(self):
self.etcd_agentConfig = {
'service_discovery': True,
'service_discovery_backend': 'docker',
'sd_template_dir': '/datadog/check_configs',
'sd_config_backend': 'etcd',
'sd_backend_host': '127.0.0.1',
'sd_backend_port': '2380'
}
self.consul_agentConfig = {
'service_discovery': True,
'service_discovery_backend': 'docker',
'sd_template_dir': '/datadog/check_configs',
'sd_config_backend': 'consul',
'sd_backend_host': '127.0.0.1',
'sd_backend_port': '8500'
}
self.auto_conf_agentConfig = {
'service_discovery': True,
'service_discovery_backend': 'docker',
'sd_template_dir': '/datadog/check_configs',
'additional_checksd': '/etc/dd-agent/checks.d/',
}
self.agentConfigs = [self.etcd_agentConfig, self.consul_agentConfig, self.auto_conf_agentConfig]
# sd_backend tests
@mock.patch('utils.http.requests.get')
@mock.patch('utils.kubernetes.kubeutil.check_yaml')
def test_get_host_address(self, mock_check_yaml, mock_get):
kubernetes_config = {'instances': [{'kubelet_port': 1337}]}
pod_list = {
'items': [{
'status': {
'podIP': '127.0.0.1',
'containerStatuses': [
{'containerID': 'docker://389dc8a4361f3d6c866e9e9a7b6972b26a31c589c4e2f097375d55656a070bc9'}
]
}
}]
}
# (inspect, tpl_var, expected_result)
ip_address_inspects = [
({'NetworkSettings': {}}, 'host', None),
({'NetworkSettings': {'IPAddress': ''}}, 'host', None),
({'NetworkSettings': {'IPAddress': '127.0.0.1'}}, 'host', '127.0.0.1'),
({'NetworkSettings': {'IPAddress': '127.0.0.1', 'Networks': {}}}, 'host', '127.0.0.1'),
({'NetworkSettings': {
'IPAddress': '127.0.0.1',
'Networks': {'bridge': {'IPAddress': '127.0.0.1'}}}},
'host', '127.0.0.1'),
({'NetworkSettings': {
'IPAddress': '',
'Networks': {'bridge': {'IPAddress': '127.0.0.1'}}}},
'host_bridge', '127.0.0.1'),
({'NetworkSettings': {
'IPAddress': '127.0.0.1',
'Networks': {
'bridge': {'IPAddress': '172.17.0.2'},
'foo': {'IPAddress': '192.168.0.2'}}}},
'host', '172.17.0.2'),
({'NetworkSettings': {'Networks': {}}}, 'host', None),
({'NetworkSettings': {'Networks': {}}}, 'host_bridge', None),
({'NetworkSettings': {'Networks': {'bridge': {}}}}, 'host', None),
({'NetworkSettings': {'Networks': {'bridge': {}}}}, 'host_bridge', None),
({'NetworkSettings': {
'Networks': {
'bridge': {'IPAddress': '172.17.0.2'}
}}},
'host_bridge', '172.17.0.2'),
({'NetworkSettings': {
'Networks': {
'bridge': {'IPAddress': '172.17.0.2'},
'foo': {'IPAddress': '192.168.0.2'}
}}},
'host_foo', '192.168.0.2')
]
mock_check_yaml.return_value = kubernetes_config
mock_get.return_value = Response(pod_list)
for c_ins, tpl_var, expected_ip in ip_address_inspects:
with mock.patch.object(AbstractConfigStore, '__init__', return_value=None):
with mock.patch('utils.dockerutil.DockerUtil.client', return_value=None):
with mock.patch('utils.kubernetes.kubeutil.get_conf_path', return_value=None):
sd_backend = get_sd_backend(agentConfig=self.auto_conf_agentConfig)
self.assertEquals(sd_backend._get_host_address(c_ins, tpl_var), expected_ip)
clear_singletons(self.auto_conf_agentConfig)
def test_get_port(self):
with mock.patch('utils.dockerutil.DockerUtil.client', return_value=None):
for c_ins, _, var_tpl, expected_ports, _ in self.container_inspects:
sd_backend = get_sd_backend(agentConfig=self.auto_conf_agentConfig)
if isinstance(expected_ports, str):
self.assertEquals(sd_backend._get_port(c_ins, var_tpl), expected_ports)
else:
self.assertRaises(expected_ports, sd_backend._get_port, c_ins, var_tpl)
clear_singletons(self.auto_conf_agentConfig)
@mock.patch('docker.Client.inspect_container', side_effect=_get_container_inspect)
@mock.patch.object(SDDockerBackend, '_get_config_templates', side_effect=_get_conf_tpls)
def test_get_check_configs(self, mock_inspect_container, mock_get_conf_tpls):
"""Test get_check_config with mocked container inspect and config template"""
with mock.patch('utils.dockerutil.DockerUtil.client', return_value=None):
with mock.patch.object(SDDockerBackend, '_get_host_address', return_value='127.0.0.1'):
with mock.patch.object(SDDockerBackend, '_get_port', return_value='1337'):
c_id = self.docker_container_inspect.get('Id')
for image in self.mock_templates.keys():
sd_backend = get_sd_backend(agentConfig=self.auto_conf_agentConfig)
self.assertEquals(
sd_backend._get_check_configs(c_id, image)[0],
self.mock_templates[image][1])
clear_singletons(self.auto_conf_agentConfig)
@mock.patch.object(AbstractConfigStore, 'get_check_tpls', side_effect=_get_check_tpls)
def test_get_config_templates(self, mock_get_check_tpls):
"""Test _get_config_templates with mocked get_check_tpls"""
with mock.patch('utils.dockerutil.DockerUtil.client', return_value=None):
with mock.patch.object(EtcdStore, 'get_client', return_value=None):
with mock.patch.object(ConsulStore, 'get_client', return_value=None):
for agentConfig in self.agentConfigs:
sd_backend = get_sd_backend(agentConfig=agentConfig)
# normal cases
for image in self.mock_templates.keys():
template = sd_backend._get_config_templates(image)
expected_template = self.mock_templates.get(image)[0]
self.assertEquals(template, expected_template)
# error cases
for image in self.bad_mock_templates.keys():
self.assertEquals(sd_backend._get_config_templates(image), None)
clear_singletons(agentConfig)
def test_render_template(self):
"""Test _render_template"""
valid_configs = [
(({}, {'host': '%%host%%'}, {'host': 'foo'}),
({}, {'host': 'foo'})),
(({}, {'host': '%%host%%', 'port': '%%port%%'}, {'host': 'foo', 'port': '1337'}),
({}, {'host': 'foo', 'port': '1337'})),
(({'foo': '%%bar%%'}, {}, {'bar': 'w00t'}),
({'foo': 'w00t'}, {})),
(({'foo': '%%bar%%'}, {'host': '%%host%%'}, {'bar': 'w00t', 'host': 'localhost'}),
({'foo': 'w00t'}, {'host': 'localhost'}))
]
invalid_configs = [
({}, {'host': '%%host%%'}, {}), # no value to use
({}, {'host': '%%host%%'}, {'port': 42}), # the variable name doesn't match
({'foo': '%%bar%%'}, {'host': '%%host%%'}, {'host': 'foo'}) # not enough value/no matching var name
]
with mock.patch('utils.dockerutil.DockerUtil.client', return_value=None):
with mock.patch.object(EtcdStore, 'get_client', return_value=None):
with mock.patch.object(ConsulStore, 'get_client', return_value=None):
for agentConfig in self.agentConfigs:
sd_backend = get_sd_backend(agentConfig=agentConfig)
for tpl, res in valid_configs:
init, instance, variables = tpl
config = sd_backend._render_template(init, instance, variables)
self.assertEquals(config, res)
for init, instance, variables in invalid_configs:
config = sd_backend._render_template(init, instance, variables)
self.assertEquals(config, None)
clear_singletons(agentConfig)
def test_fill_tpl(self):
"""Test _fill_tpl with mocked docker client"""
valid_configs = [
# ((inspect, instance_tpl, variables, tags), (expected_instance_tpl, expected_var_values))
(({}, {'host': 'localhost'}, [], None), ({'host': 'localhost'}, {})),
(
({'NetworkSettings': {'IPAddress': ''}}, {'host': 'localhost'}, [], None),
({'host': 'localhost'}, {})
),
(
({'NetworkSettings': {'Networks': {}}}, {'host': 'localhost'}, [], None),
({'host': 'localhost'}, {})
),
(
({'NetworkSettings': {'Networks': {'bridge': {}}}}, {'host': 'localhost'}, [], None),
({'host': 'localhost'}, {})
),
(
({'NetworkSettings': {'IPAddress': '127.0.0.1'}},
{'host': '%%host%%', 'port': 1337}, ['host'], ['foo', 'bar:baz']),
({'host': '%%host%%', 'port': 1337, 'tags': ['foo', 'bar:baz']}, {'host': '127.0.0.1'}),
),
(
({'NetworkSettings': {'IPAddress': '127.0.0.1', 'Networks': {}}},
{'host': '%%host%%', 'port': 1337}, ['host'], ['foo', 'bar:baz']),
({'host': '%%host%%', 'port': 1337, 'tags': ['foo', 'bar:baz']}, {'host': '127.0.0.1'}),
),
(
({'NetworkSettings': {
'IPAddress': '127.0.0.1',
'Networks': {'bridge': {'IPAddress': '172.17.0.2'}}}
},
{'host': '%%host%%', 'port': 1337}, ['host'], ['foo', 'bar:baz']),
({'host': '%%host%%', 'port': 1337, 'tags': ['foo', 'bar:baz']}, {'host': '172.17.0.2'}),
),
(
({'NetworkSettings': {
'IPAddress': '',
'Networks': {
'bridge': {'IPAddress': '172.17.0.2'},
'foo': {'IPAddress': '192.168.0.2'}
}}
},
{'host': '%%host_bridge%%', 'port': 1337}, ['host_bridge'], ['foo', 'bar:baz']),
({'host': '%%host_bridge%%', 'port': 1337, 'tags': ['foo', 'bar:baz']},
{'host_bridge': '172.17.0.2'}),
),
(
({'NetworkSettings': {
'IPAddress': '',
'Networks': {
'bridge': {'IPAddress': '172.17.0.2'},
'foo': {'IPAddress': '192.168.0.2'}
}}
},
{'host': '%%host_foo%%', 'port': 1337}, ['host_foo'], ['foo', 'bar:baz']),
({'host': '%%host_foo%%', 'port': 1337, 'tags': ['foo', 'bar:baz']},
{'host_foo': '192.168.0.2'}),
),
(
({'NetworkSettings': {'IPAddress': '127.0.0.1', 'Ports': {'42/tcp': None, '22/tcp': None}}},
{'host': '%%host%%', 'port': '%%port_1%%', 'tags': ['env:test']},
['host', 'port_1'], ['foo', 'bar:baz']),
({'host': '%%host%%', 'port': '%%port_1%%', 'tags': ['env:test', 'foo', 'bar:baz']},
{'host': '127.0.0.1', 'port_1': '42'})
)
]
# should not fail but return something specific
edge_cases = [
# ((inspect, instance_tpl, variables, tags), (expected_instance_tpl, expected_var_values))
# specify bridge but there is also a default IPAddress (networks should be preferred)
(
({'NetworkSettings': {
'IPAddress': '127.0.0.1',
'Networks': {'bridge': {'IPAddress': '172.17.0.2'}}}},
{'host': '%%host_bridge%%', 'port': 1337}, ['host_bridge'], ['foo', 'bar:baz']),
({'host': '%%host_bridge%%', 'port': 1337, 'tags': ['foo', 'bar:baz']},
{'host_bridge': '172.17.0.2'})
),
# specify index but there is a default IPAddress (there's a specifier, even if it's wrong, walking networks should be preferred)
(
({'NetworkSettings': {
'IPAddress': '127.0.0.1',
'Networks': {'bridge': {'IPAddress': '172.17.0.2'}}}},
{'host': '%%host_0%%', 'port': 1337}, ['host_0'], ['foo', 'bar:baz']),
({'host': '%%host_0%%', 'port': 1337, 'tags': ['foo', 'bar:baz']}, {'host_0': '172.17.0.2'}),
),
# missing key for host, bridge network should be preferred
(
({'NetworkSettings': {'Networks': {
'bridge': {'IPAddress': '127.0.0.1'},
'foo': {'IPAddress': '172.17.0.2'}}}},
{'host': '%%host_bar%%', 'port': 1337}, ['host_bar'], []),
({'host': '%%host_bar%%', 'port': 1337}, {'host_bar': '127.0.0.1'}),
),
# missing index for port
(
({'NetworkSettings': {'IPAddress': '127.0.0.1', 'Ports': {'42/tcp': None, '22/tcp': None}}},
{'host': '%%host%%', 'port': '%%port_2%%', 'tags': ['env:test']},
['host', 'port_2'], ['foo', 'bar:baz']),
({'host': '%%host%%', 'port': '%%port_2%%', 'tags': ['env:test', 'foo', 'bar:baz']},
{'host': '127.0.0.1', 'port_2': '42'})
)
]
# should raise
invalid_config = [
# ((inspect, instance_tpl, variables, tags), expected_exception)
# template variable but no IPAddress available
(
({'NetworkSettings': {'Networks': {}}},
{'host': '%%host%%', 'port': 1337}, ['host'], ['foo', 'bar:baz']),
Exception,
),
# index but no IPAddress available
(
({'NetworkSettings': {'Networks': {}}},
{'host': '%%host_0%%', 'port': 1337}, ['host_0'], ['foo', 'bar:baz']),
Exception,
),
# key but no IPAddress available
(
({'NetworkSettings': {'Networks': {}}},
{'host': '%%host_foo%%', 'port': 1337}, ['host_foo'], ['foo', 'bar:baz']),
Exception,
),
# template variable but no port available
(
({'NetworkSettings': {'Networks': {}}},
{'host': 'localhost', 'port': '%%port%%'}, ['port'], []),
Exception,
),
# index but no port available
(
({'NetworkSettings': {'Networks': {}}},
{'host': 'localhost', 'port_0': '%%port%%'}, ['port_0'], []),
Exception,
),
# key but no port available
(
({'NetworkSettings': {'Networks': {}}},
{'host': 'localhost', 'port': '%%port_foo%%'}, ['port_foo'], []),
Exception,
)
]
with mock.patch('utils.dockerutil.DockerUtil.client', return_value=None):
with mock.patch.object(EtcdStore, 'get_client', return_value=None):
with mock.patch.object(ConsulStore, 'get_client', return_value=None):
for ac in self.agentConfigs:
sd_backend = get_sd_backend(agentConfig=ac)
try:
for co in valid_configs + edge_cases:
inspect, tpl, variables, tags = co[0]
instance_tpl, var_values = sd_backend._fill_tpl(inspect, tpl, variables, tags)
for key in instance_tpl.keys():
if isinstance(instance_tpl[key], list):
self.assertEquals(len(instance_tpl[key]), len(co[1][0].get(key)))
for elem in instance_tpl[key]:
self.assertTrue(elem in co[1][0].get(key))
else:
self.assertEquals(instance_tpl[key], co[1][0].get(key))
self.assertEquals(var_values, co[1][1])
for co in invalid_config:
inspect, tpl, variables, tags = co[0]
self.assertRaises(co[1], sd_backend._fill_tpl(inspect, tpl, variables, tags))
clear_singletons(ac)
except Exception:
clear_singletons(ac)
raise
# config_stores tests
def test_get_auto_config(self):
"""Test _get_auto_config"""
expected_tpl = {
'redis': ('redisdb', None, {"host": "%%host%%", "port": "%%port%%"}),
'consul': ('consul', None, {
"url": "http://%%host%%:%%port%%", "catalog_checks": True, "new_leader_checks": True
}),
'redis:v1': ('redisdb', None, {"host": "%%host%%", "port": "%%port%%"}),
'foobar': None
}
config_store = get_config_store(self.auto_conf_agentConfig)
for image in expected_tpl.keys():
config = config_store._get_auto_config(image)
self.assertEquals(config, expected_tpl.get(image))
@mock.patch.object(AbstractConfigStore, 'client_read', side_effect=client_read)
def test_get_check_tpls(self, mock_client_read):
"""Test get_check_tpls"""
valid_config = ['image_0', 'image_1', 'image_2']
invalid_config = ['bad_image_0', 'bad_image_1']
config_store = get_config_store(self.auto_conf_agentConfig)
for image in valid_config:
tpl = self.mock_tpls.get(image)[1]
self.assertEquals(tpl, config_store.get_check_tpls(image))
for image in invalid_config:
tpl = self.mock_tpls.get(image)[1]
self.assertEquals(tpl, config_store.get_check_tpls(image))
@mock.patch.object(AbstractConfigStore, 'client_read', side_effect=client_read)
def test_get_check_tpls_kube(self, mock_client_read):
"""Test get_check_tpls"""
valid_config = ['image_0', 'image_1', 'image_2']
invalid_config = ['bad_image_0']
config_store = get_config_store(self.auto_conf_agentConfig)
for image in valid_config + invalid_config:
tpl = self.mock_tpls.get(image)[1]
if tpl:
self.assertNotEquals(
tpl,
config_store.get_check_tpls('k8s-' + image, auto_conf=True))
self.assertEquals(
tpl,
config_store.get_check_tpls(
'k8s-' + image, auto_conf=True,
kube_annotations=dict(zip(
['com.datadoghq.sd/check_names',
'com.datadoghq.sd/init_configs',
'com.datadoghq.sd/instances'],
self.mock_tpls[image][0]))))
def test_get_config_id(self):
"""Test get_config_id"""
with mock.patch('utils.dockerutil.DockerUtil.client', return_value=None):
for c_ins, _, _, _, expected_ident in self.container_inspects:
sd_backend = get_sd_backend(agentConfig=self.auto_conf_agentConfig)
self.assertEqual(
sd_backend.get_config_id(c_ins.get('Image'), c_ins.get('Labels', {})),
expected_ident)
clear_singletons(self.auto_conf_agentConfig)
@mock.patch.object(AbstractConfigStore, '_issue_read', side_effect=issue_read)
def test_read_config_from_store(self, issue_read):
"""Test read_config_from_store"""
valid_idents = [('nginx', 'nginx'), ('nginx:latest', 'nginx:latest'),
('custom-nginx', 'custom-nginx'), ('custom-nginx:latest', 'custom-nginx'),
('repo/custom-nginx:latest', 'custom-nginx'),
('repo/dir:5000/custom-nginx:latest', 'repo/dir:5000/custom-nginx:latest')]
invalid_idents = ['foo']
config_store = get_config_store(self.auto_conf_agentConfig)
for ident, expected_key in valid_idents:
tpl = config_store.read_config_from_store(ident)
# source is added after reading from the store
self.assertEquals(tpl, ('template',) + self.mock_tpls.get(expected_key))
for ident in invalid_idents:
self.assertEquals(config_store.read_config_from_store(ident), [])
| bsd-3-clause | -7,502,216,058,776,267,000 | -4,584,558,327,360,826,400 | 46.898778 | 140 | 0.496612 | false |
billy-inn/scikit-learn | examples/linear_model/lasso_dense_vs_sparse_data.py | 348 | 1862 | """
==============================
Lasso on dense and sparse data
==============================
We show that linear_model.Lasso provides the same results for dense and sparse
data and that in the case of sparse data the speed is improved.
"""
print(__doc__)
from time import time
from scipy import sparse
from scipy import linalg
from sklearn.datasets.samples_generator import make_regression
from sklearn.linear_model import Lasso
###############################################################################
# The two Lasso implementations on Dense data
print("--- Dense matrices")
X, y = make_regression(n_samples=200, n_features=5000, random_state=0)
X_sp = sparse.coo_matrix(X)
alpha = 1
sparse_lasso = Lasso(alpha=alpha, fit_intercept=False, max_iter=1000)
dense_lasso = Lasso(alpha=alpha, fit_intercept=False, max_iter=1000)
t0 = time()
sparse_lasso.fit(X_sp, y)
print("Sparse Lasso done in %fs" % (time() - t0))
t0 = time()
dense_lasso.fit(X, y)
print("Dense Lasso done in %fs" % (time() - t0))
print("Distance between coefficients : %s"
% linalg.norm(sparse_lasso.coef_ - dense_lasso.coef_))
###############################################################################
# The two Lasso implementations on Sparse data
print("--- Sparse matrices")
Xs = X.copy()
Xs[Xs < 2.5] = 0.0
Xs = sparse.coo_matrix(Xs)
Xs = Xs.tocsc()
print("Matrix density : %s %%" % (Xs.nnz / float(X.size) * 100))
alpha = 0.1
sparse_lasso = Lasso(alpha=alpha, fit_intercept=False, max_iter=10000)
dense_lasso = Lasso(alpha=alpha, fit_intercept=False, max_iter=10000)
t0 = time()
sparse_lasso.fit(Xs, y)
print("Sparse Lasso done in %fs" % (time() - t0))
t0 = time()
dense_lasso.fit(Xs.toarray(), y)
print("Dense Lasso done in %fs" % (time() - t0))
print("Distance between coefficients : %s"
% linalg.norm(sparse_lasso.coef_ - dense_lasso.coef_))
| bsd-3-clause | -8,734,045,557,271,746,000 | -3,230,442,875,596,224,500 | 27.212121 | 79 | 0.616004 | false |
stephen144/odoo | openerp/addons/base/ir/ir_ui_menu.py | 30 | 14357 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import base64
import operator
import re
import threading
import openerp
from openerp.osv import fields, osv
from openerp import api, tools
from openerp.http import request
from openerp.tools.safe_eval import safe_eval as eval
from openerp.tools.translate import _
MENU_ITEM_SEPARATOR = "/"
class ir_ui_menu(osv.osv):
_name = 'ir.ui.menu'
def __init__(self, *args, **kwargs):
super(ir_ui_menu, self).__init__(*args, **kwargs)
self.pool['ir.model.access'].register_cache_clearing_method(self._name, 'clear_caches')
@api.model
@tools.ormcache('frozenset(self.env.user.groups_id.ids)', 'debug')
def _visible_menu_ids(self, debug=False):
""" Return the ids of the menu items visible to the user. """
# retrieve all menus, and determine which ones are visible
context = {'ir.ui.menu.full_list': True}
menus = self.with_context(context).search([])
groups = self.env.user.groups_id if debug else self.env.user.groups_id - self.env.ref('base.group_no_one')
# first discard all menus with groups the user does not have
menus = menus.filtered(
lambda menu: not menu.groups_id or menu.groups_id & groups)
# take apart menus that have an action
action_menus = menus.filtered(lambda m: m.action and m.action.exists())
folder_menus = menus - action_menus
visible = self.browse()
# process action menus, check whether their action is allowed
access = self.env['ir.model.access']
model_fname = {
'ir.actions.act_window': 'res_model',
'ir.actions.report.xml': 'model',
'ir.actions.server': 'model_id',
}
for menu in action_menus:
fname = model_fname.get(menu.action._name)
if not fname or not menu.action[fname] or \
access.check(menu.action[fname], 'read', False):
# make menu visible, and its folder ancestors, too
visible += menu
menu = menu.parent_id
while menu and menu in folder_menus and menu not in visible:
visible += menu
menu = menu.parent_id
return set(visible.ids)
@api.multi
@api.returns('self')
def _filter_visible_menus(self):
""" Filter `self` to only keep the menu items that should be visible in
the menu hierarchy of the current user.
Uses a cache for speeding up the computation.
"""
visible_ids = self._visible_menu_ids(request.debug if request else False)
return self.filtered(lambda menu: menu.id in visible_ids)
def search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False):
if context is None:
context = {}
ids = super(ir_ui_menu, self).search(cr, uid, args, offset=0,
limit=None, order=order, context=context, count=False)
if not ids:
if count:
return 0
return []
# menu filtering is done only on main menu tree, not other menu lists
if context.get('ir.ui.menu.full_list'):
result = ids
else:
result = self._filter_visible_menus(cr, uid, ids, context=context)
if offset:
result = result[long(offset):]
if limit:
result = result[:long(limit)]
if count:
return len(result)
return result
def name_get(self, cr, uid, ids, context=None):
res = []
for id in ids:
elmt = self.browse(cr, uid, id, context=context)
res.append((id, self._get_one_full_name(elmt)))
return res
def _get_full_name(self, cr, uid, ids, name=None, args=None, context=None):
if context is None:
context = {}
res = {}
for elmt in self.browse(cr, uid, ids, context=context):
res[elmt.id] = self._get_one_full_name(elmt)
return res
def _get_one_full_name(self, elmt, level=6):
if level<=0:
return '...'
if elmt.parent_id:
parent_path = self._get_one_full_name(elmt.parent_id, level-1) + MENU_ITEM_SEPARATOR
else:
parent_path = ''
return parent_path + elmt.name
def create(self, cr, uid, values, context=None):
self.clear_caches()
return super(ir_ui_menu, self).create(cr, uid, values, context=context)
def write(self, cr, uid, ids, values, context=None):
self.clear_caches()
return super(ir_ui_menu, self).write(cr, uid, ids, values, context=context)
def unlink(self, cr, uid, ids, context=None):
# Detach children and promote them to top-level, because it would be unwise to
# cascade-delete submenus blindly. We also can't use ondelete=set null because
# that is not supported when _parent_store is used (would silently corrupt it).
# TODO: ideally we should move them under a generic "Orphans" menu somewhere?
if isinstance(ids, (int, long)):
ids = [ids]
local_context = dict(context or {})
local_context['ir.ui.menu.full_list'] = True
direct_children_ids = self.search(cr, uid, [('parent_id', 'in', ids)], context=local_context)
if direct_children_ids:
self.write(cr, uid, direct_children_ids, {'parent_id': False})
result = super(ir_ui_menu, self).unlink(cr, uid, ids, context=context)
self.clear_caches()
return result
def copy(self, cr, uid, id, default=None, context=None):
res = super(ir_ui_menu, self).copy(cr, uid, id, default=default, context=context)
datas=self.read(cr,uid,[res],['name'])[0]
rex=re.compile('\([0-9]+\)')
concat=rex.findall(datas['name'])
if concat:
next_num=int(concat[0])+1
datas['name']=rex.sub(('(%d)'%next_num),datas['name'])
else:
datas['name'] += '(1)'
self.write(cr,uid,[res],{'name':datas['name']})
return res
def read_image(self, path):
if not path:
return False
path_info = path.split(',')
icon_path = openerp.modules.get_module_resource(path_info[0],path_info[1])
icon_image = False
if icon_path:
try:
icon_file = tools.file_open(icon_path,'rb')
icon_image = base64.encodestring(icon_file.read())
finally:
icon_file.close()
return icon_image
def get_needaction_data(self, cr, uid, ids, context=None):
""" Return for each menu entry of ids :
- if it uses the needaction mechanism (needaction_enabled)
- the needaction counter of the related action, taking into account
the action domain
"""
if context is None:
context = {}
res = {}
menu_ids = set()
for menu in self.browse(cr, uid, ids, context=context):
menu_ids.add(menu.id)
ctx = None
if menu.action and menu.action.type in ('ir.actions.act_window', 'ir.actions.client') and menu.action.context:
try:
# use magical UnquoteEvalContext to ignore undefined client-side variables such as `active_id`
eval_ctx = tools.UnquoteEvalContext(**context)
ctx = eval(menu.action.context, locals_dict=eval_ctx, nocopy=True) or None
except Exception:
# if the eval still fails for some reason, we'll simply skip this menu
pass
menu_ref = ctx and ctx.get('needaction_menu_ref')
if menu_ref:
if not isinstance(menu_ref, list):
menu_ref = [menu_ref]
model_data_obj = self.pool.get('ir.model.data')
for menu_data in menu_ref:
try:
model, id = model_data_obj.get_object_reference(cr, uid, menu_data.split('.')[0], menu_data.split('.')[1])
if (model == 'ir.ui.menu'):
menu_ids.add(id)
except Exception:
pass
menu_ids = list(menu_ids)
for menu in self.browse(cr, uid, menu_ids, context=context):
res[menu.id] = {
'needaction_enabled': False,
'needaction_counter': False,
}
if menu.action and menu.action.type in ('ir.actions.act_window', 'ir.actions.client') and menu.action.res_model:
if menu.action.res_model in self.pool:
obj = self.pool[menu.action.res_model]
if obj._needaction:
if menu.action.type == 'ir.actions.act_window':
eval_context = self.pool['ir.actions.act_window']._get_eval_context(cr, uid, context=context)
dom = menu.action.domain and eval(menu.action.domain, eval_context) or []
else:
dom = eval(menu.action.params_store or '{}', {'uid': uid}).get('domain')
res[menu.id]['needaction_enabled'] = obj._needaction
res[menu.id]['needaction_counter'] = obj._needaction_count(cr, uid, dom, context=context)
return res
def get_user_roots(self, cr, uid, context=None):
""" Return all root menu ids visible for the user.
:return: the root menu ids
:rtype: list(int)
"""
menu_domain = [('parent_id', '=', False)]
return self.search(cr, uid, menu_domain, context=context)
@api.cr_uid_context
@tools.ormcache_context('uid', keys=('lang',))
def load_menus_root(self, cr, uid, context=None):
fields = ['name', 'sequence', 'parent_id', 'action', 'web_icon_data']
menu_root_ids = self.get_user_roots(cr, uid, context=context)
menu_roots = self.read(cr, uid, menu_root_ids, fields, context=context) if menu_root_ids else []
return {
'id': False,
'name': 'root',
'parent_id': [-1, ''],
'children': menu_roots,
'all_menu_ids': menu_root_ids,
}
@api.cr_uid_context
@tools.ormcache_context('uid', 'debug', keys=('lang',))
def load_menus(self, cr, uid, debug, context=None):
""" Loads all menu items (all applications and their sub-menus).
:return: the menu root
:rtype: dict('children': menu_nodes)
"""
fields = ['name', 'sequence', 'parent_id', 'action', 'web_icon_data']
menu_root_ids = self.get_user_roots(cr, uid, context=context)
menu_roots = self.read(cr, uid, menu_root_ids, fields, context=context) if menu_root_ids else []
menu_root = {
'id': False,
'name': 'root',
'parent_id': [-1, ''],
'children': menu_roots,
'all_menu_ids': menu_root_ids,
}
if not menu_roots:
return menu_root
# menus are loaded fully unlike a regular tree view, cause there are a
# limited number of items (752 when all 6.1 addons are installed)
menu_ids = self.search(cr, uid, [('id', 'child_of', menu_root_ids)], 0, False, False, context=context)
menu_items = self.read(cr, uid, menu_ids, fields, context=context)
# adds roots at the end of the sequence, so that they will overwrite
# equivalent menu items from full menu read when put into id:item
# mapping, resulting in children being correctly set on the roots.
menu_items.extend(menu_roots)
menu_root['all_menu_ids'] = menu_ids # includes menu_root_ids!
# make a tree using parent_id
menu_items_map = dict(
(menu_item["id"], menu_item) for menu_item in menu_items)
for menu_item in menu_items:
if menu_item['parent_id']:
parent = menu_item['parent_id'][0]
else:
parent = False
if parent in menu_items_map:
menu_items_map[parent].setdefault(
'children', []).append(menu_item)
# sort by sequence a tree using parent_id
for menu_item in menu_items:
menu_item.setdefault('children', []).sort(
key=operator.itemgetter('sequence'))
return menu_root
_columns = {
'name': fields.char('Menu', required=True, translate=True),
'sequence': fields.integer('Sequence'),
'child_id': fields.one2many('ir.ui.menu', 'parent_id', 'Child IDs'),
'parent_id': fields.many2one('ir.ui.menu', 'Parent Menu', select=True, ondelete="restrict"),
'parent_left': fields.integer('Parent Left', select=True),
'parent_right': fields.integer('Parent Right', select=True),
'groups_id': fields.many2many('res.groups', 'ir_ui_menu_group_rel',
'menu_id', 'gid', 'Groups', help="If you have groups, the visibility of this menu will be based on these groups. "\
"If this field is empty, Odoo will compute visibility based on the related object's read access."),
'complete_name': fields.function(_get_full_name, string='Full Path', type='char'),
'web_icon': fields.char('Web Icon File'),
'action': fields.reference('Action', selection=[
('ir.actions.report.xml', 'ir.actions.report.xml'),
('ir.actions.act_window', 'ir.actions.act_window'),
('ir.actions.act_url', 'ir.actions.act_url'),
('ir.actions.server', 'ir.actions.server'),
('ir.actions.client', 'ir.actions.client'),
]),
}
web_icon_data = openerp.fields.Binary('Web Icon Image',
compute="_compute_web_icon", store=True, attachment=True)
@api.depends('web_icon')
def _compute_web_icon(self):
for menu in self:
menu.web_icon_data = self.read_image(menu.web_icon)
_constraints = [
(osv.osv._check_recursion, 'Error ! You can not create recursive Menu.', ['parent_id'])
]
_defaults = {
'sequence': 10,
}
_order = "sequence,id"
_parent_store = True
| agpl-3.0 | 7,066,258,027,997,612,000 | 9,146,951,987,558,829,000 | 41.226471 | 130 | 0.568294 | false |
mweisman/QGIS | python/plugins/db_manager/db_manager.py | 5 | 13145 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
Name : DB Manager
Description : Database manager plugin for QGIS
Date : May 23, 2011
copyright : (C) 2011 by Giuseppe Sucameli
email : brush.tyler@gmail.com
The content of this file is based on
- PG_Manager by Martin Dobias (GPLv2 license)
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from .info_viewer import InfoViewer
from .table_viewer import TableViewer
from .layer_preview import LayerPreview
from .db_tree import DBTree
from .db_plugins.plugin import BaseError
from .dlg_db_error import DlgDbError
class DBManager(QMainWindow):
def __init__(self, iface, parent=None):
QMainWindow.__init__(self, parent)
self.setAttribute(Qt.WA_DeleteOnClose)
self.setupUi()
self.iface = iface
# restore the window state
settings = QSettings()
self.restoreGeometry( settings.value("/DB_Manager/mainWindow/geometry", QByteArray(), type=QByteArray ) )
self.restoreState( settings.value("/DB_Manager/mainWindow/windowState", QByteArray(), type=QByteArray ) )
self.connect(self.tabs, SIGNAL("currentChanged(int)"), self.tabChanged)
self.connect(self.tree, SIGNAL("selectedItemChanged"), self.itemChanged)
self.itemChanged(None)
def closeEvent(self, e):
self.unregisterAllActions()
# save the window state
settings = QSettings()
settings.setValue( "/DB_Manager/mainWindow/windowState", self.saveState() )
settings.setValue( "/DB_Manager/mainWindow/geometry", self.saveGeometry() )
QMainWindow.closeEvent(self, e)
def refreshItem(self, item=None):
QApplication.setOverrideCursor(Qt.WaitCursor)
try:
if item == None:
item = self.tree.currentItem()
self.tree.refreshItem(item) # refresh item children in the db tree
except BaseError, e:
DlgDbError.showError(e, self)
return
finally:
QApplication.restoreOverrideCursor()
def itemChanged(self, item):
QApplication.setOverrideCursor(Qt.WaitCursor)
try:
self.reloadButtons()
self.refreshTabs()
except BaseError, e:
DlgDbError.showError(e, self)
return
finally:
QApplication.restoreOverrideCursor()
def reloadButtons(self):
db = self.tree.currentDatabase()
if not hasattr(self, '_lastDb'):
self._lastDb = db
elif db == self._lastDb:
return
# remove old actions
if self._lastDb != None:
self.unregisterAllActions()
# add actions of the selected database
self._lastDb = db
if self._lastDb != None:
self._lastDb.registerAllActions(self)
def tabChanged(self, index):
QApplication.setOverrideCursor(Qt.WaitCursor)
try:
self.refreshTabs()
except BaseError, e:
DlgDbError.showError(e, self)
return
finally:
QApplication.restoreOverrideCursor()
def refreshTabs(self):
index = self.tabs.currentIndex()
item = self.tree.currentItem()
table = self.tree.currentTable()
# enable/disable tabs
self.tabs.setTabEnabled( self.tabs.indexOf(self.table), table != None )
self.tabs.setTabEnabled( self.tabs.indexOf(self.preview), table != None and table.type in [table.VectorType, table.RasterType] and table.geomColumn != None )
# show the info tab if the current tab is disabled
if not self.tabs.isTabEnabled( index ):
self.tabs.setCurrentWidget( self.info )
current_tab = self.tabs.currentWidget()
if current_tab == self.info:
self.info.showInfo( item )
elif current_tab == self.table:
self.table.loadData( item )
elif current_tab == self.preview:
self.preview.loadPreview( item )
def refreshActionSlot(self):
self.info.setDirty()
self.table.setDirty()
self.preview.setDirty()
self.refreshItem()
def importActionSlot(self):
db = self.tree.currentDatabase()
if db is None:
QMessageBox.information(self, self.tr("Sorry"), self.tr("No database selected or you are not connected to it."))
return
outUri = db.uri()
schema = self.tree.currentSchema()
if schema:
outUri.setDataSource( schema.name, "", "", "" )
from .dlg_import_vector import DlgImportVector
dlg = DlgImportVector(None, db, outUri, self)
dlg.exec_()
def exportActionSlot(self):
table = self.tree.currentTable()
if table is None:
QMessageBox.information(self, self.tr("Sorry"), self.tr("Select the table you want export to file."))
return
inLayer = table.toMapLayer()
from .dlg_export_vector import DlgExportVector
dlg = DlgExportVector(inLayer, table.database(), self)
dlg.exec_()
inLayer.deleteLater()
def runSqlWindow(self):
db = self.tree.currentDatabase()
if db == None:
QMessageBox.information(self, self.tr("Sorry"), self.tr("No database selected or you are not connected to it."))
return
from dlg_sql_window import DlgSqlWindow
dlg = DlgSqlWindow(self.iface, db, self)
#refreshDb = lambda x: self.refreshItem( db.connection() ) # refresh the database tree
#self.connect( dlg, SIGNAL( "queryExecuted(const QString &)" ), refreshDb )
dlg.show()
dlg.exec_()
def showSystemTables(self):
self.tree.showSystemTables( self.actionShowSystemTables.isChecked() )
def registerAction(self, action, menuName, callback=None):
""" register an action to the manager's main menu """
if not hasattr(self, '_registeredDbActions'):
self._registeredDbActions = {}
if callback != None:
invoke_callback = lambda x: self.invokeCallback( callback )
if menuName == None or menuName == "":
self.addAction( action )
if not self._registeredDbActions.has_key(menuName):
self._registeredDbActions[menuName] = list()
self._registeredDbActions[menuName].append(action)
if callback != None:
QObject.connect( action, SIGNAL("triggered(bool)"), invoke_callback )
return True
# search for the menu
actionMenu = None
helpMenuAction = None
for a in self.menuBar.actions():
if not a.menu() or a.menu().title() != menuName:
continue
if a.menu() != self.menuHelp:
helpMenuAction = a
actionMenu = a
break
# not found, add a new menu before the help menu
if actionMenu == None:
menu = QMenu(menuName, self)
if helpMenuAction != None:
actionMenu = self.menuBar.insertMenu(helpMenuAction, menu)
else:
actionMenu = self.menuBar.addMenu(menu)
menu = actionMenu.menu()
menuActions = menu.actions()
# get the placeholder's position to insert before it
pos = 0
for pos in range(len(menuActions)):
if menuActions[pos].isSeparator() and menuActions[pos].objectName().endswith("_placeholder"):
menuActions[pos].setVisible(True)
break
if pos < len(menuActions):
before = menuActions[pos]
menu.insertAction( before, action )
else:
menu.addAction( action )
actionMenu.setVisible(True) # show the menu
if not self._registeredDbActions.has_key(menuName):
self._registeredDbActions[menuName] = list()
self._registeredDbActions[menuName].append(action)
if callback != None:
QObject.connect( action, SIGNAL("triggered(bool)"), invoke_callback )
return True
def invokeCallback(self, callback, params=None):
""" Call a method passing the selected item in the database tree,
the sender (usually a QAction), the plugin mainWindow and
optionally additional parameters.
This method takes care to override and restore the cursor,
but also catches exceptions and displays the error dialog.
"""
QApplication.setOverrideCursor(Qt.WaitCursor)
try:
if params is None:
callback( self.tree.currentItem(), self.sender(), self )
else:
callback( self.tree.currentItem(), self.sender(), self, *params )
except BaseError, e:
# catch database errors and display the error dialog
DlgDbError.showError(e, self)
return
finally:
QApplication.restoreOverrideCursor()
def unregisterAction(self, action, menuName):
if not hasattr(self, '_registeredDbActions'):
return
if menuName == None or menuName == "":
self.removeAction( action )
if self._registeredDbActions.has_key(menuName):
if self._registeredDbActions[menuName].count( action ) > 0:
self._registeredDbActions[menuName].remove( action )
action.deleteLater()
return True
for a in self.menuBar.actions():
if not a.menu() or a.menu().title() != menuName:
continue
menu = a.menu()
menuActions = menu.actions()
menu.removeAction( action )
if menu.isEmpty(): # hide the menu
a.setVisible(False)
if self._registeredDbActions.has_key(menuName):
if self._registeredDbActions[menuName].count( action ) > 0:
self._registeredDbActions[menuName].remove( action )
# hide the placeholder if there're no other registered actions
if len(self._registeredDbActions[menuName]) <= 0:
for i in range(len(menuActions)):
if menuActions[i].isSeparator() and menuActions[i].objectName().endswith("_placeholder"):
menuActions[i].setVisible(False)
break
action.deleteLater()
return True
return False
def unregisterAllActions(self):
if not hasattr(self, '_registeredDbActions'):
return
for menuName in self._registeredDbActions:
for action in list(self._registeredDbActions[menuName]):
self.unregisterAction( action, menuName )
del self._registeredDbActions
def setupUi(self):
self.setWindowTitle(self.tr("DB Manager"))
self.setWindowIcon(QIcon(":/db_manager/icon"))
self.resize(QSize(700,500).expandedTo(self.minimumSizeHint()))
# create central tab widget
self.tabs = QTabWidget()
self.info = InfoViewer(self)
self.tabs.addTab(self.info, self.tr("Info"))
self.table = TableViewer(self)
self.tabs.addTab(self.table, self.tr("Table"))
self.preview = LayerPreview(self)
self.tabs.addTab(self.preview, self.tr("Preview"))
self.setCentralWidget(self.tabs)
# create database tree
self.dock = QDockWidget("Tree", self)
self.dock.setObjectName("DB_Manager_DBView")
self.dock.setFeatures(QDockWidget.DockWidgetMovable)
self.tree = DBTree(self)
self.dock.setWidget(self.tree)
self.addDockWidget(Qt.LeftDockWidgetArea, self.dock)
# create status bar
self.statusBar = QStatusBar(self)
self.setStatusBar(self.statusBar)
# create menus
self.menuBar = QMenuBar(self)
self.menuDb = QMenu(self.tr("&Database"), self)
actionMenuDb = self.menuBar.addMenu(self.menuDb)
self.menuSchema = QMenu(self.tr("&Schema"), self)
actionMenuSchema = self.menuBar.addMenu(self.menuSchema)
self.menuTable = QMenu(self.tr("&Table"), self)
actionMenuTable = self.menuBar.addMenu(self.menuTable)
self.menuHelp = None # QMenu(self.tr("&Help"), self)
#actionMenuHelp = self.menuBar.addMenu(self.menuHelp)
self.setMenuBar(self.menuBar)
# create toolbar
self.toolBar = QToolBar("Default", self)
self.toolBar.setObjectName("DB_Manager_ToolBar")
self.addToolBar(self.toolBar)
# create menus' actions
# menu DATABASE
sep = self.menuDb.addSeparator(); sep.setObjectName("DB_Manager_DbMenu_placeholder"); sep.setVisible(False)
self.actionRefresh = self.menuDb.addAction( QIcon(":/db_manager/actions/refresh"), self.tr("&Refresh"), self.refreshActionSlot, QKeySequence("F5") )
self.actionSqlWindow = self.menuDb.addAction( QIcon(":/db_manager/actions/sql_window"), self.tr("&SQL window"), self.runSqlWindow, QKeySequence("F2") )
self.menuDb.addSeparator()
self.actionClose = self.menuDb.addAction( QIcon(), self.tr("&Exit"), self.close, QKeySequence("CTRL+Q") )
# menu SCHEMA
sep = self.menuSchema.addSeparator(); sep.setObjectName("DB_Manager_SchemaMenu_placeholder"); sep.setVisible(False)
actionMenuSchema.setVisible(False)
# menu TABLE
sep = self.menuTable.addSeparator(); sep.setObjectName("DB_Manager_TableMenu_placeholder"); sep.setVisible(False)
self.actionImport = self.menuTable.addAction( QIcon(":/db_manager/actions/import"), self.tr("&Import layer/file"), self.importActionSlot )
self.actionExport = self.menuTable.addAction( QIcon(":/db_manager/actions/export"), self.tr("&Export to file"), self.exportActionSlot )
self.menuTable.addSeparator()
#self.actionShowSystemTables = self.menuTable.addAction(self.tr("Show system tables/views"), self.showSystemTables)
#self.actionShowSystemTables.setCheckable(True)
#self.actionShowSystemTables.setChecked(True)
actionMenuTable.setVisible(False)
# add actions to the toolbar
self.toolBar.addAction( self.actionRefresh )
self.toolBar.addAction( self.actionSqlWindow )
self.toolBar.addAction( self.actionImport )
self.toolBar.addAction( self.actionExport )
| gpl-2.0 | 4,266,899,500,364,563,500 | 5,301,849,898,620,288,000 | 31.297297 | 159 | 0.691974 | false |
klickagent/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/port/factory_unittest.py | 118 | 3965 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest2 as unittest
from webkitpy.tool.mocktool import MockOptions
from webkitpy.common.system.systemhost_mock import MockSystemHost
from webkitpy.port import factory
from webkitpy.port import gtk
from webkitpy.port import mac
from webkitpy.port import qt
from webkitpy.port import test
from webkitpy.port import win
class FactoryTest(unittest.TestCase):
"""Test that the factory creates the proper port object for given combination of port_name, host.platform, and options."""
# FIXME: The ports themselves should expose what options they require,
# instead of passing generic "options".
def setUp(self):
self.webkit_options = MockOptions(pixel_tests=False)
def assert_port(self, port_name=None, os_name=None, os_version=None, options=None, cls=None):
host = MockSystemHost(os_name=os_name, os_version=os_version)
port = factory.PortFactory(host).get(port_name, options=options)
self.assertIsInstance(port, cls)
def test_mac(self):
self.assert_port(port_name='mac-lion', cls=mac.MacPort)
self.assert_port(port_name='mac-lion-wk2', cls=mac.MacPort)
self.assert_port(port_name='mac', os_name='mac', os_version='lion', cls=mac.MacPort)
self.assert_port(port_name=None, os_name='mac', os_version='lion', cls=mac.MacPort)
def test_win(self):
self.assert_port(port_name='win-xp', cls=win.WinPort)
self.assert_port(port_name='win-xp-wk2', cls=win.WinPort)
self.assert_port(port_name='win', os_name='win', os_version='xp', cls=win.WinPort)
self.assert_port(port_name=None, os_name='win', os_version='xp', cls=win.WinPort)
self.assert_port(port_name=None, os_name='win', os_version='xp', options=self.webkit_options, cls=win.WinPort)
def test_gtk(self):
self.assert_port(port_name='gtk', cls=gtk.GtkPort)
def test_qt(self):
self.assert_port(port_name='qt', cls=qt.QtPort)
def test_unknown_specified(self):
self.assertRaises(NotImplementedError, factory.PortFactory(MockSystemHost()).get, port_name='unknown')
def test_unknown_default(self):
self.assertRaises(NotImplementedError, factory.PortFactory(MockSystemHost(os_name='vms')).get)
def test_get_from_builder_name(self):
self.assertEqual(factory.PortFactory(MockSystemHost()).get_from_builder_name('Apple Lion Release WK1 (Tests)').name(),
'mac-lion')
| bsd-3-clause | 7,463,027,508,938,297,000 | 7,433,573,094,319,749,000 | 47.353659 | 126 | 0.732409 | false |
firerszd/kbengine | kbe/res/scripts/common/Lib/test/test_multibytecodec.py | 72 | 9977 | #
# test_multibytecodec.py
# Unit test for multibytecodec itself
#
from test import support
from test.support import TESTFN
import unittest, io, codecs, sys, os
import _multibytecodec
ALL_CJKENCODINGS = [
# _codecs_cn
'gb2312', 'gbk', 'gb18030', 'hz',
# _codecs_hk
'big5hkscs',
# _codecs_jp
'cp932', 'shift_jis', 'euc_jp', 'euc_jisx0213', 'shift_jisx0213',
'euc_jis_2004', 'shift_jis_2004',
# _codecs_kr
'cp949', 'euc_kr', 'johab',
# _codecs_tw
'big5', 'cp950',
# _codecs_iso2022
'iso2022_jp', 'iso2022_jp_1', 'iso2022_jp_2', 'iso2022_jp_2004',
'iso2022_jp_3', 'iso2022_jp_ext', 'iso2022_kr',
]
class Test_MultibyteCodec(unittest.TestCase):
def test_nullcoding(self):
for enc in ALL_CJKENCODINGS:
self.assertEqual(b''.decode(enc), '')
self.assertEqual(str(b'', enc), '')
self.assertEqual(''.encode(enc), b'')
def test_str_decode(self):
for enc in ALL_CJKENCODINGS:
self.assertEqual('abcd'.encode(enc), b'abcd')
def test_errorcallback_longindex(self):
dec = codecs.getdecoder('euc-kr')
myreplace = lambda exc: ('', sys.maxsize+1)
codecs.register_error('test.cjktest', myreplace)
self.assertRaises(IndexError, dec,
b'apple\x92ham\x93spam', 'test.cjktest')
def test_codingspec(self):
try:
for enc in ALL_CJKENCODINGS:
code = '# coding: {}\n'.format(enc)
exec(code)
finally:
support.unlink(TESTFN)
def test_init_segfault(self):
# bug #3305: this used to segfault
self.assertRaises(AttributeError,
_multibytecodec.MultibyteStreamReader, None)
self.assertRaises(AttributeError,
_multibytecodec.MultibyteStreamWriter, None)
def test_decode_unicode(self):
# Trying to decode an unicode string should raise a TypeError
for enc in ALL_CJKENCODINGS:
self.assertRaises(TypeError, codecs.getdecoder(enc), "")
class Test_IncrementalEncoder(unittest.TestCase):
def test_stateless(self):
# cp949 encoder isn't stateful at all.
encoder = codecs.getincrementalencoder('cp949')()
self.assertEqual(encoder.encode('\ud30c\uc774\uc36c \ub9c8\uc744'),
b'\xc6\xc4\xc0\xcc\xbd\xe3 \xb8\xb6\xc0\xbb')
self.assertEqual(encoder.reset(), None)
self.assertEqual(encoder.encode('\u2606\u223c\u2606', True),
b'\xa1\xd9\xa1\xad\xa1\xd9')
self.assertEqual(encoder.reset(), None)
self.assertEqual(encoder.encode('', True), b'')
self.assertEqual(encoder.encode('', False), b'')
self.assertEqual(encoder.reset(), None)
def test_stateful(self):
# jisx0213 encoder is stateful for a few codepoints. eg)
# U+00E6 => A9DC
# U+00E6 U+0300 => ABC4
# U+0300 => ABDC
encoder = codecs.getincrementalencoder('jisx0213')()
self.assertEqual(encoder.encode('\u00e6\u0300'), b'\xab\xc4')
self.assertEqual(encoder.encode('\u00e6'), b'')
self.assertEqual(encoder.encode('\u0300'), b'\xab\xc4')
self.assertEqual(encoder.encode('\u00e6', True), b'\xa9\xdc')
self.assertEqual(encoder.reset(), None)
self.assertEqual(encoder.encode('\u0300'), b'\xab\xdc')
self.assertEqual(encoder.encode('\u00e6'), b'')
self.assertEqual(encoder.encode('', True), b'\xa9\xdc')
self.assertEqual(encoder.encode('', True), b'')
def test_stateful_keep_buffer(self):
encoder = codecs.getincrementalencoder('jisx0213')()
self.assertEqual(encoder.encode('\u00e6'), b'')
self.assertRaises(UnicodeEncodeError, encoder.encode, '\u0123')
self.assertEqual(encoder.encode('\u0300\u00e6'), b'\xab\xc4')
self.assertRaises(UnicodeEncodeError, encoder.encode, '\u0123')
self.assertEqual(encoder.reset(), None)
self.assertEqual(encoder.encode('\u0300'), b'\xab\xdc')
self.assertEqual(encoder.encode('\u00e6'), b'')
self.assertRaises(UnicodeEncodeError, encoder.encode, '\u0123')
self.assertEqual(encoder.encode('', True), b'\xa9\xdc')
def test_issue5640(self):
encoder = codecs.getincrementalencoder('shift-jis')('backslashreplace')
self.assertEqual(encoder.encode('\xff'), b'\\xff')
self.assertEqual(encoder.encode('\n'), b'\n')
class Test_IncrementalDecoder(unittest.TestCase):
def test_dbcs(self):
# cp949 decoder is simple with only 1 or 2 bytes sequences.
decoder = codecs.getincrementaldecoder('cp949')()
self.assertEqual(decoder.decode(b'\xc6\xc4\xc0\xcc\xbd'),
'\ud30c\uc774')
self.assertEqual(decoder.decode(b'\xe3 \xb8\xb6\xc0\xbb'),
'\uc36c \ub9c8\uc744')
self.assertEqual(decoder.decode(b''), '')
def test_dbcs_keep_buffer(self):
decoder = codecs.getincrementaldecoder('cp949')()
self.assertEqual(decoder.decode(b'\xc6\xc4\xc0'), '\ud30c')
self.assertRaises(UnicodeDecodeError, decoder.decode, b'', True)
self.assertEqual(decoder.decode(b'\xcc'), '\uc774')
self.assertEqual(decoder.decode(b'\xc6\xc4\xc0'), '\ud30c')
self.assertRaises(UnicodeDecodeError, decoder.decode,
b'\xcc\xbd', True)
self.assertEqual(decoder.decode(b'\xcc'), '\uc774')
def test_iso2022(self):
decoder = codecs.getincrementaldecoder('iso2022-jp')()
ESC = b'\x1b'
self.assertEqual(decoder.decode(ESC + b'('), '')
self.assertEqual(decoder.decode(b'B', True), '')
self.assertEqual(decoder.decode(ESC + b'$'), '')
self.assertEqual(decoder.decode(b'B@$'), '\u4e16')
self.assertEqual(decoder.decode(b'@$@'), '\u4e16')
self.assertEqual(decoder.decode(b'$', True), '\u4e16')
self.assertEqual(decoder.reset(), None)
self.assertEqual(decoder.decode(b'@$'), '@$')
self.assertEqual(decoder.decode(ESC + b'$'), '')
self.assertRaises(UnicodeDecodeError, decoder.decode, b'', True)
self.assertEqual(decoder.decode(b'B@$'), '\u4e16')
def test_decode_unicode(self):
# Trying to decode an unicode string should raise a TypeError
for enc in ALL_CJKENCODINGS:
decoder = codecs.getincrementaldecoder(enc)()
self.assertRaises(TypeError, decoder.decode, "")
class Test_StreamReader(unittest.TestCase):
def test_bug1728403(self):
try:
f = open(TESTFN, 'wb')
try:
f.write(b'\xa1')
finally:
f.close()
f = codecs.open(TESTFN, encoding='cp949')
try:
self.assertRaises(UnicodeDecodeError, f.read, 2)
finally:
f.close()
finally:
support.unlink(TESTFN)
class Test_StreamWriter(unittest.TestCase):
def test_gb18030(self):
s= io.BytesIO()
c = codecs.getwriter('gb18030')(s)
c.write('123')
self.assertEqual(s.getvalue(), b'123')
c.write('\U00012345')
self.assertEqual(s.getvalue(), b'123\x907\x959')
c.write('\uac00\u00ac')
self.assertEqual(s.getvalue(),
b'123\x907\x959\x827\xcf5\x810\x851')
def test_utf_8(self):
s= io.BytesIO()
c = codecs.getwriter('utf-8')(s)
c.write('123')
self.assertEqual(s.getvalue(), b'123')
c.write('\U00012345')
self.assertEqual(s.getvalue(), b'123\xf0\x92\x8d\x85')
c.write('\uac00\u00ac')
self.assertEqual(s.getvalue(),
b'123\xf0\x92\x8d\x85'
b'\xea\xb0\x80\xc2\xac')
def test_streamwriter_strwrite(self):
s = io.BytesIO()
wr = codecs.getwriter('gb18030')(s)
wr.write('abcd')
self.assertEqual(s.getvalue(), b'abcd')
class Test_ISO2022(unittest.TestCase):
def test_g2(self):
iso2022jp2 = b'\x1b(B:hu4:unit\x1b.A\x1bNi de famille'
uni = ':hu4:unit\xe9 de famille'
self.assertEqual(iso2022jp2.decode('iso2022-jp-2'), uni)
def test_iso2022_jp_g0(self):
self.assertNotIn(b'\x0e', '\N{SOFT HYPHEN}'.encode('iso-2022-jp-2'))
for encoding in ('iso-2022-jp-2004', 'iso-2022-jp-3'):
e = '\u3406'.encode(encoding)
self.assertFalse(any(x > 0x80 for x in e))
def test_bug1572832(self):
for x in range(0x10000, 0x110000):
# Any ISO 2022 codec will cause the segfault
chr(x).encode('iso_2022_jp', 'ignore')
class TestStateful(unittest.TestCase):
text = '\u4E16\u4E16'
encoding = 'iso-2022-jp'
expected = b'\x1b$B@$@$'
reset = b'\x1b(B'
expected_reset = expected + reset
def test_encode(self):
self.assertEqual(self.text.encode(self.encoding), self.expected_reset)
def test_incrementalencoder(self):
encoder = codecs.getincrementalencoder(self.encoding)()
output = b''.join(
encoder.encode(char)
for char in self.text)
self.assertEqual(output, self.expected)
self.assertEqual(encoder.encode('', final=True), self.reset)
self.assertEqual(encoder.encode('', final=True), b'')
def test_incrementalencoder_final(self):
encoder = codecs.getincrementalencoder(self.encoding)()
last_index = len(self.text) - 1
output = b''.join(
encoder.encode(char, index == last_index)
for index, char in enumerate(self.text))
self.assertEqual(output, self.expected_reset)
self.assertEqual(encoder.encode('', final=True), b'')
class TestHZStateful(TestStateful):
text = '\u804a\u804a'
encoding = 'hz'
expected = b'~{ADAD'
reset = b'~}'
expected_reset = expected + reset
def test_main():
support.run_unittest(__name__)
if __name__ == "__main__":
test_main()
| lgpl-3.0 | 4,671,490,101,903,074,000 | -2,292,973,168,025,507,600 | 36.935361 | 79 | 0.602486 | false |
openhatch/oh-mainline | mysite/search/migrations/0021_remove_icon_for_profile_since_we_realized_we_do_not_need_it.py | 17 | 3787 | # This file is part of OpenHatch.
# Copyright (C) 2009 OpenHatch, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from south.db import db
from django.db import models
from mysite.search.models import *
class Migration:
def forwards(self, orm):
# Deleting field 'Project.icon_for_profile'
db.delete_column('search_project', 'icon_for_profile')
def backwards(self, orm):
# Adding field 'Project.icon_for_profile'
db.add_column('search_project', 'icon_for_profile', orm['search.project:icon_for_profile'])
models = {
'search.bug': {
'bize_size_tag_name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'canonical_bug_link': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'date_reported': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.TextField', [], {}),
'good_for_newcomers': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'importance': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'last_polled': ('django.db.models.fields.DateTimeField', [], {}),
'last_touched': ('django.db.models.fields.DateTimeField', [], {}),
'looks_closed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'people_involved': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['search.Project']"}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'submitter_realname': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'submitter_username': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'search.project': {
'date_icon_was_fetched_from_ohloh': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'icon_for_search_result': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'icon_raw': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'icon_smaller_for_badge': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'icon_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'})
}
}
complete_apps = ['search']
| agpl-3.0 | 8,279,844,568,427,015,000 | -8,349,779,340,473,757,000 | 53.884058 | 145 | 0.597043 | false |
broxtronix/distributed | distributed/tests/test_joblib.py | 2 | 1853 | from __future__ import print_function, division, absolute_import
import pytest
from random import random
from time import sleep
from distributed.utils_test import inc, cluster, loop
backend = pytest.importorskip('distributed.joblib')
joblibs = [backend.joblib, backend.sk_joblib]
def slow_raise_value_error(condition, duration=0.05):
sleep(duration)
if condition:
raise ValueError("condition evaluated to True")
@pytest.mark.parametrize('joblib', joblibs)
def test_simple(loop, joblib):
if joblib is None:
pytest.skip()
Parallel = joblib.Parallel
delayed = joblib.delayed
with cluster() as (s, [a, b]):
with joblib.parallel_backend('dask.distributed', loop=loop,
scheduler_host=('127.0.0.1', s['port'])):
seq = Parallel()(delayed(inc)(i) for i in range(10))
assert seq == [inc(i) for i in range(10)]
with pytest.raises(ValueError):
Parallel()(delayed(slow_raise_value_error)(i == 3)
for i in range(10))
seq = Parallel()(delayed(inc)(i) for i in range(10))
assert seq == [inc(i) for i in range(10)]
ba, _ = joblib.parallel.get_active_backend()
ba.client.shutdown()
def random2():
return random()
@pytest.mark.parametrize('joblib', joblibs)
def test_dont_assume_function_purity(loop, joblib):
if joblib is None:
pytest.skip()
Parallel = joblib.Parallel
delayed = joblib.delayed
with cluster() as (s, [a, b]):
with joblib.parallel_backend('dask.distributed', loop=loop,
scheduler_host=('127.0.0.1', s['port'])):
x, y = Parallel()(delayed(random2)() for i in range(2))
assert x != y
ba, _ = joblib.parallel.get_active_backend()
ba.client.shutdown()
| bsd-3-clause | 4,387,543,228,742,184,400 | 4,833,430,122,672,293,000 | 29.377049 | 67 | 0.609282 | false |
chrish42/pylearn | pylearn2/sandbox/cuda_convnet/tests/test_common.py | 49 | 2802 | __authors__ = "Ian Goodfellow, David Warde-Farley"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow, David Warde-Farley"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
__email__ = "pylearn-dev@googlegroups"
from pylearn2.testing.skip import skip_if_no_gpu
skip_if_no_gpu()
import numpy as np
from theano import shared
from pylearn2.sandbox.cuda_convnet.filter_acts import FilterActs
from pylearn2.sandbox.cuda_convnet.img_acts import ImageActs
from theano.sandbox.cuda import gpu_from_host
from theano import function
from theano.tensor import as_tensor_variable
def test_reject_rect():
for cls in (FilterActs, ImageActs):
# Tests that running FilterActs with a non-square
# kernel is an error
rng = np.random.RandomState([2012, 10, 9])
batch_size = 5
rows = 10
cols = 9
channels = 3
filter_rows = 4
filter_cols = filter_rows + 1
num_filters = 6
images = shared(rng.uniform(-1., 1., (channels, rows, cols,
batch_size)).astype('float32'), name='images')
filters = shared(rng.uniform(-1., 1., (channels, filter_rows,
filter_cols, num_filters)).astype('float32'), name='filters')
gpu_images = gpu_from_host(images)
gpu_filters = gpu_from_host(filters)
if cls is ImageActs:
output = cls()(gpu_images, gpu_filters,
as_tensor_variable((rows, cols)))
else:
output = cls()(gpu_images, gpu_filters)
f = function([], output)
try:
output = f()
except ValueError:
continue
assert False
def test_reject_bad_filt_number():
for cls in (FilterActs, ImageActs):
# Tests that running FilterActs with a # of filters per
# group that is not 16 is an error
rng = np.random.RandomState([2012, 10, 9])
batch_size = 5
rows = 10
cols = 9
channels = 3
filter_rows = 4
filter_cols = filter_rows
num_filters = 6
images = shared(rng.uniform(-1., 1., (channels, rows, cols,
batch_size)).astype('float32'), name='images')
filters = shared(rng.uniform(-1., 1., (channels, filter_rows,
filter_cols, num_filters)).astype('float32'), name='filters')
gpu_images = gpu_from_host(images)
gpu_filters = gpu_from_host(filters)
if cls is ImageActs:
output = cls()(gpu_images, gpu_filters,
as_tensor_variable((rows, cols)))
else:
output = cls()(gpu_images, gpu_filters)
f = function([], output)
try:
output = f()
except ValueError:
continue
assert False
| bsd-3-clause | -7,604,740,986,467,939,000 | -5,647,571,466,348,021,000 | 31.964706 | 73 | 0.586724 | false |
sdh11/gnuradio | grc/converter/block.py | 3 | 8171 | # Copyright 2016 Free Software Foundation, Inc.
# This file is part of GNU Radio
#
# GNU Radio Companion is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# GNU Radio Companion is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
Converter for legacy block definitions in XML format
- Cheetah expressions that can not be converted are passed to Cheetah for now
- Instead of generating a Block subclass directly a string representation is
used and evaluated. This is slower / lamer but allows us to show the user
how a converted definition would look like
"""
from __future__ import absolute_import, division, print_function
from collections import OrderedDict, defaultdict
from itertools import chain
from ..core.io import yaml
from . import cheetah_converter, xml
current_file_format = 1
reserved_block_keys = ('import', ) # todo: add more keys
def from_xml(filename):
"""Load block description from xml file"""
element, version_info = xml.load(filename, 'block.dtd')
try:
data = convert_block_xml(element)
except NameError:
raise ValueError('Conversion failed', filename)
return data
def dump(data, stream):
out = yaml.dump(data)
replace = [
('parameters:', '\nparameters:'),
('inputs:', '\ninputs:'),
('outputs:', '\noutputs:'),
('templates:', '\ntemplates:'),
('documentation:', '\ndocumentation:'),
('file_format:', '\nfile_format:'),
]
for r in replace:
out = out.replace(*r)
prefix = '# auto-generated by grc.converter\n\n'
stream.write(prefix + out)
no_value = object()
dummy = cheetah_converter.DummyConverter()
def convert_block_xml(node):
converter = cheetah_converter.Converter(names={
param_node.findtext('key'): {
opt_node.text.split(':')[0]
for opt_node in next(param_node.iterfind('option'), param_node).iterfind('opt')
} for param_node in node.iterfind('param')
})
block_id = node.findtext('key')
if block_id in reserved_block_keys:
block_id += '_'
data = OrderedDict()
data['id'] = block_id
data['label'] = node.findtext('name') or no_value
data['category'] = node.findtext('category') or no_value
data['flags'] = [n.text for n in node.findall('flags')]
data['flags'] += ['show_id'] if block_id.startswith('variable') else []
if not data['flags']:
data['flags'] = no_value
data['parameters'] = [convert_param_xml(param_node, converter.to_python_dec)
for param_node in node.iterfind('param')] or no_value
# data['params'] = {p.pop('key'): p for p in data['params']}
data['inputs'] = [convert_port_xml(port_node, converter.to_python_dec)
for port_node in node.iterfind('sink')] or no_value
data['outputs'] = [convert_port_xml(port_node, converter.to_python_dec)
for port_node in node.iterfind('source')] or no_value
data['value'] = (
converter.to_python_dec(node.findtext('var_value')) or
('${ value }' if block_id.startswith('variable') else no_value)
)
data['asserts'] = [converter.to_python_dec(check_node.text)
for check_node in node.iterfind('check')] or no_value
data['templates'] = convert_templates(node, converter.to_mako, block_id) or no_value
docs = node.findtext('doc')
if docs:
docs = docs.strip().replace('\\\n', '')
data['documentation'] = yaml.MultiLineString(docs)
data['file_format'] = current_file_format
data = OrderedDict((key, value) for key, value in data.items() if value is not no_value)
auto_hide_params_for_item_sizes(data)
return data
def auto_hide_params_for_item_sizes(data):
item_size_templates = []
vlen_templates = []
for port in chain(*[data.get(direction, []) for direction in ['inputs', 'outputs']]):
for key in ['dtype', 'multiplicity']:
item_size_templates.append(str(port.get(key, '')))
vlen_templates.append(str(port.get('vlen', '')))
item_size_templates = ' '.join(value for value in item_size_templates if '${' in value)
vlen_templates = ' '.join(value for value in vlen_templates if '${' in value)
for param in data.get('parameters', []):
if param['id'] in item_size_templates:
param.setdefault('hide', 'part')
if param['id'] in vlen_templates:
param.setdefault('hide', "${ 'part' if vlen == 1 else 'none' }")
def convert_templates(node, convert, block_id=''):
templates = OrderedDict()
imports = '\n'.join(convert(import_node.text)
for import_node in node.iterfind('import'))
if '\n' in imports:
imports = yaml.MultiLineString(imports)
templates['imports'] = imports or no_value
templates['var_make'] = convert(node.findtext('var_make') or '') or no_value
make = convert(node.findtext('make') or '')
if make:
check_mako_template(block_id, make)
if '\n' in make:
make = yaml.MultiLineString(make)
templates['make'] = make or no_value
templates['callbacks'] = [
convert(cb_node.text) for cb_node in node.iterfind('callback')
] or no_value
return OrderedDict((key, value) for key, value in templates.items() if value is not no_value)
def convert_param_xml(node, convert):
param = OrderedDict()
param['id'] = node.findtext('key').strip()
param['label'] = node.findtext('name').strip()
param['category'] = node.findtext('tab') or no_value
param['dtype'] = convert(node.findtext('type') or '')
param['default'] = node.findtext('value') or no_value
options = yaml.ListFlowing(on.findtext('key') for on in node.iterfind('option'))
option_labels = yaml.ListFlowing(on.findtext('name') for on in node.iterfind('option'))
param['options'] = options or no_value
if not all(str(o).title() == l for o, l in zip(options, option_labels)):
param['option_labels'] = option_labels
attributes = defaultdict(yaml.ListFlowing)
for option_n in node.iterfind('option'):
for opt_n in option_n.iterfind('opt'):
key, value = opt_n.text.split(':', 2)
attributes[key].append(value)
param['option_attributes'] = dict(attributes) or no_value
param['hide'] = convert(node.findtext('hide')) or no_value
return OrderedDict((key, value) for key, value in param.items() if value is not no_value)
def convert_port_xml(node, convert):
port = OrderedDict()
label = node.findtext('name')
# default values:
port['label'] = label if label not in ('in', 'out') else no_value
dtype = convert(node.findtext('type'))
# TODO: detect dyn message ports
port['domain'] = domain = 'message' if dtype == 'message' else 'stream'
if domain == 'message':
port['id'], port['label'] = label, no_value
else:
port['dtype'] = dtype
vlen = node.findtext('vlen')
port['vlen'] = int(vlen) if vlen and vlen.isdigit() else convert(vlen) or no_value
port['multiplicity'] = convert(node.findtext('nports')) or no_value
port['optional'] = bool(node.findtext('optional')) or no_value
port['hide'] = convert(node.findtext('hide')) or no_value
return OrderedDict((key, value) for key, value in port.items() if value is not no_value)
def check_mako_template(block_id, expr):
import sys
from mako.template import Template
try:
Template(expr)
except Exception as error:
print(block_id, expr, type(error), error, '', sep='\n', file=sys.stderr)
| gpl-3.0 | -4,633,540,521,074,930,000 | -8,986,914,465,271,122,000 | 35.806306 | 97 | 0.642761 | false |
trevorlinton/skia | bench/bench_util.py | 29 | 11022 | '''
Created on May 19, 2011
@author: bungeman
'''
import re
import math
# bench representation algorithm constant names
ALGORITHM_AVERAGE = 'avg'
ALGORITHM_MEDIAN = 'med'
ALGORITHM_MINIMUM = 'min'
ALGORITHM_25TH_PERCENTILE = '25th'
# Regular expressions used throughout.
PER_SETTING_RE = '([^\s=]+)(?:=(\S+))?'
SETTINGS_RE = 'skia bench:((?:\s+' + PER_SETTING_RE + ')*)'
BENCH_RE = 'running bench (?:\[\d+ \d+\] )?\s*(\S+)'
TIME_RE = '(?:(\w*)msecs = )?\s*((?:\d+\.\d+)(?:,\s*\d+\.\d+)*)'
# non-per-tile benches have configs that don't end with ']' or '>'
CONFIG_RE = '(\S+[^\]>]):\s+((?:' + TIME_RE + '\s+)+)'
# per-tile bench lines are in the following format. Note that there are
# non-averaged bench numbers in separate lines, which we ignore now due to
# their inaccuracy.
TILE_RE = (' tile_(\S+): tile \[\d+,\d+\] out of \[\d+,\d+\] <averaged>:'
' ((?:' + TIME_RE + '\s+)+)')
# for extracting tile layout
TILE_LAYOUT_RE = ' out of \[(\d+),(\d+)\] <averaged>: '
PER_SETTING_RE_COMPILED = re.compile(PER_SETTING_RE)
SETTINGS_RE_COMPILED = re.compile(SETTINGS_RE)
BENCH_RE_COMPILED = re.compile(BENCH_RE)
TIME_RE_COMPILED = re.compile(TIME_RE)
CONFIG_RE_COMPILED = re.compile(CONFIG_RE)
TILE_RE_COMPILED = re.compile(TILE_RE)
TILE_LAYOUT_RE_COMPILED = re.compile(TILE_LAYOUT_RE)
class BenchDataPoint:
"""A single data point produced by bench.
(str, str, str, float, {str:str}, str, [floats])"""
def __init__(self, bench, config, time_type, time, settings,
tile_layout='', per_tile_values=[]):
self.bench = bench
self.config = config
self.time_type = time_type
self.time = time
self.settings = settings
# how tiles cover the whole picture. '5x3' means 5 columns and 3 rows.
self.tile_layout = tile_layout
# list of per_tile bench values, if applicable
self.per_tile_values = per_tile_values
def __repr__(self):
return "BenchDataPoint(%s, %s, %s, %s, %s)" % (
str(self.bench),
str(self.config),
str(self.time_type),
str(self.time),
str(self.settings),
)
class _ExtremeType(object):
"""Instances of this class compare greater or less than other objects."""
def __init__(self, cmpr, rep):
object.__init__(self)
self._cmpr = cmpr
self._rep = rep
def __cmp__(self, other):
if isinstance(other, self.__class__) and other._cmpr == self._cmpr:
return 0
return self._cmpr
def __repr__(self):
return self._rep
Max = _ExtremeType(1, "Max")
Min = _ExtremeType(-1, "Min")
class _ListAlgorithm(object):
"""Algorithm for selecting the representation value from a given list.
representation is one of the ALGORITHM_XXX representation types."""
def __init__(self, data, representation=None):
if not representation:
representation = ALGORITHM_AVERAGE # default algorithm
self._data = data
self._len = len(data)
if representation == ALGORITHM_AVERAGE:
self._rep = sum(self._data) / self._len
else:
self._data.sort()
if representation == ALGORITHM_MINIMUM:
self._rep = self._data[0]
else:
# for percentiles, we use the value below which x% of values are
# found, which allows for better detection of quantum behaviors.
if representation == ALGORITHM_MEDIAN:
x = int(round(0.5 * self._len + 0.5))
elif representation == ALGORITHM_25TH_PERCENTILE:
x = int(round(0.25 * self._len + 0.5))
else:
raise Exception("invalid representation algorithm %s!" %
representation)
self._rep = self._data[x - 1]
def compute(self):
return self._rep
def _ParseAndStoreTimes(config_re_compiled, is_per_tile, line, bench,
value_dic, layout_dic, representation=None):
"""Parses given bench time line with regex and adds data to value_dic.
config_re_compiled: precompiled regular expression for parsing the config
line.
is_per_tile: boolean indicating whether this is a per-tile bench.
If so, we add tile layout into layout_dic as well.
line: input string line to parse.
bench: name of bench for the time values.
value_dic: dictionary to store bench values. See bench_dic in parse() below.
layout_dic: dictionary to store tile layouts. See parse() for descriptions.
representation: should match one of the ALGORITHM_XXX types."""
for config in config_re_compiled.finditer(line):
current_config = config.group(1)
tile_layout = ''
if is_per_tile: # per-tile bench, add name prefix
current_config = 'tile_' + current_config
layouts = TILE_LAYOUT_RE_COMPILED.search(line)
if layouts and len(layouts.groups()) == 2:
tile_layout = '%sx%s' % layouts.groups()
times = config.group(2)
for new_time in TIME_RE_COMPILED.finditer(times):
current_time_type = new_time.group(1)
iters = [float(i) for i in
new_time.group(2).strip().split(',')]
value_dic.setdefault(bench, {}).setdefault(
current_config, {}).setdefault(current_time_type, []).append(
_ListAlgorithm(iters, representation).compute())
layout_dic.setdefault(bench, {}).setdefault(
current_config, {}).setdefault(current_time_type, tile_layout)
# TODO(bensong): switch to reading JSON output when available. This way we don't
# need the RE complexities.
def parse(settings, lines, representation=None):
"""Parses bench output into a useful data structure.
({str:str}, __iter__ -> str) -> [BenchDataPoint]
representation is one of the ALGORITHM_XXX types."""
benches = []
current_bench = None
bench_dic = {} # [bench][config][time_type] -> [list of bench values]
# [bench][config][time_type] -> tile_layout
layout_dic = {}
for line in lines:
# see if this line is a settings line
settingsMatch = SETTINGS_RE_COMPILED.search(line)
if (settingsMatch):
settings = dict(settings)
for settingMatch in PER_SETTING_RE_COMPILED.finditer(settingsMatch.group(1)):
if (settingMatch.group(2)):
settings[settingMatch.group(1)] = settingMatch.group(2)
else:
settings[settingMatch.group(1)] = True
# see if this line starts a new bench
new_bench = BENCH_RE_COMPILED.search(line)
if new_bench:
current_bench = new_bench.group(1)
# add configs on this line to the bench_dic
if current_bench:
if line.startswith(' tile_') :
_ParseAndStoreTimes(TILE_RE_COMPILED, True, line, current_bench,
bench_dic, layout_dic, representation)
else:
_ParseAndStoreTimes(CONFIG_RE_COMPILED, False, line,
current_bench,
bench_dic, layout_dic, representation)
# append benches to list, use the total time as final bench value.
for bench in bench_dic:
for config in bench_dic[bench]:
for time_type in bench_dic[bench][config]:
tile_layout = ''
per_tile_values = []
if len(bench_dic[bench][config][time_type]) > 1:
# per-tile values, extract tile_layout
per_tile_values = bench_dic[bench][config][time_type]
tile_layout = layout_dic[bench][config][time_type]
benches.append(BenchDataPoint(
bench,
config,
time_type,
sum(bench_dic[bench][config][time_type]),
settings,
tile_layout,
per_tile_values))
return benches
class LinearRegression:
"""Linear regression data based on a set of data points.
([(Number,Number)])
There must be at least two points for this to make sense."""
def __init__(self, points):
n = len(points)
max_x = Min
min_x = Max
Sx = 0.0
Sy = 0.0
Sxx = 0.0
Sxy = 0.0
Syy = 0.0
for point in points:
x = point[0]
y = point[1]
max_x = max(max_x, x)
min_x = min(min_x, x)
Sx += x
Sy += y
Sxx += x*x
Sxy += x*y
Syy += y*y
denom = n*Sxx - Sx*Sx
if (denom != 0.0):
B = (n*Sxy - Sx*Sy) / denom
else:
B = 0.0
a = (1.0/n)*(Sy - B*Sx)
se2 = 0
sB2 = 0
sa2 = 0
if (n >= 3 and denom != 0.0):
se2 = (1.0/(n*(n-2)) * (n*Syy - Sy*Sy - B*B*denom))
sB2 = (n*se2) / denom
sa2 = sB2 * (1.0/n) * Sxx
self.slope = B
self.intercept = a
self.serror = math.sqrt(max(0, se2))
self.serror_slope = math.sqrt(max(0, sB2))
self.serror_intercept = math.sqrt(max(0, sa2))
self.max_x = max_x
self.min_x = min_x
def __repr__(self):
return "LinearRegression(%s, %s, %s, %s, %s)" % (
str(self.slope),
str(self.intercept),
str(self.serror),
str(self.serror_slope),
str(self.serror_intercept),
)
def find_min_slope(self):
"""Finds the minimal slope given one standard deviation."""
slope = self.slope
intercept = self.intercept
error = self.serror
regr_start = self.min_x
regr_end = self.max_x
regr_width = regr_end - regr_start
if slope < 0:
lower_left_y = slope*regr_start + intercept - error
upper_right_y = slope*regr_end + intercept + error
return min(0, (upper_right_y - lower_left_y) / regr_width)
elif slope > 0:
upper_left_y = slope*regr_start + intercept + error
lower_right_y = slope*regr_end + intercept - error
return max(0, (lower_right_y - upper_left_y) / regr_width)
return 0
def CreateRevisionLink(revision_number):
"""Returns HTML displaying the given revision number and linking to
that revision's change page at code.google.com, e.g.
http://code.google.com/p/skia/source/detail?r=2056
"""
return '<a href="http://code.google.com/p/skia/source/detail?r=%s">%s</a>'%(
revision_number, revision_number)
def main():
foo = [[0.0, 0.0], [0.0, 1.0], [0.0, 2.0], [0.0, 3.0]]
LinearRegression(foo)
if __name__ == "__main__":
main()
| bsd-3-clause | 164,446,411,244,192,000 | -8,641,101,596,720,553,000 | 35.74 | 89 | 0.551805 | false |
dumengnanbuaa/awesome-python-webapp | www/config.py | 17 | 1202 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Configuration
'''
__author__ = 'Michael Liao'
import config_default
class Dict(dict):
'''
Simple dict but support access as x.y style.
'''
def __init__(self, names=(), values=(), **kw):
super(Dict, self).__init__(**kw)
for k, v in zip(names, values):
self[k] = v
def __getattr__(self, key):
try:
return self[key]
except KeyError:
raise AttributeError(r"'Dict' object has no attribute '%s'" % key)
def __setattr__(self, key, value):
self[key] = value
def merge(defaults, override):
r = {}
for k, v in defaults.iteritems():
if k in override:
if isinstance(v, dict):
r[k] = merge(v, override[k])
else:
r[k] = override[k]
else:
r[k] = v
return r
def toDict(d):
D = Dict()
for k, v in d.iteritems():
D[k] = toDict(v) if isinstance(v, dict) else v
return D
configs = config_default.configs
try:
import config_override
configs = merge(configs, config_override.configs)
except ImportError:
pass
configs = toDict(configs)
| gpl-2.0 | -3,862,301,146,414,014,500 | -4,624,639,911,275,312,000 | 20.464286 | 78 | 0.53827 | false |
LuoZijun/uOffice | temp/pydocxx/tests/oxml/unitdata/text.py | 10 | 3187 | # encoding: utf-8
"""
Test data builders for text XML elements
"""
from ...unitdata import BaseBuilder
from .shared import CT_OnOffBuilder, CT_StringBuilder
class CT_BrBuilder(BaseBuilder):
__tag__ = 'w:br'
__nspfxs__ = ('w',)
__attrs__ = ('w:type', 'w:clear')
class CT_EmptyBuilder(BaseBuilder):
__nspfxs__ = ('w',)
__attrs__ = ()
def __init__(self, tag):
self.__tag__ = tag
super(CT_EmptyBuilder, self).__init__()
class CT_JcBuilder(BaseBuilder):
__tag__ = 'w:jc'
__nspfxs__ = ('w',)
__attrs__ = ('w:val',)
class CT_PBuilder(BaseBuilder):
__tag__ = 'w:p'
__nspfxs__ = ('w',)
__attrs__ = ()
class CT_PPrBuilder(BaseBuilder):
__tag__ = 'w:pPr'
__nspfxs__ = ('w',)
__attrs__ = ()
class CT_RBuilder(BaseBuilder):
__tag__ = 'w:r'
__nspfxs__ = ('w',)
__attrs__ = ()
class CT_RPrBuilder(BaseBuilder):
__tag__ = 'w:rPr'
__nspfxs__ = ('w',)
__attrs__ = ()
class CT_SectPrBuilder(BaseBuilder):
__tag__ = 'w:sectPr'
__nspfxs__ = ('w',)
__attrs__ = ()
class CT_TextBuilder(BaseBuilder):
__tag__ = 'w:t'
__nspfxs__ = ('w',)
__attrs__ = ()
def with_space(self, value):
self._set_xmlattr('xml:space', str(value))
return self
class CT_UnderlineBuilder(BaseBuilder):
__tag__ = 'w:u'
__nspfxs__ = ('w',)
__attrs__ = (
'w:val', 'w:color', 'w:themeColor', 'w:themeTint', 'w:themeShade'
)
def a_b():
return CT_OnOffBuilder('w:b')
def a_bCs():
return CT_OnOffBuilder('w:bCs')
def a_br():
return CT_BrBuilder()
def a_caps():
return CT_OnOffBuilder('w:caps')
def a_cr():
return CT_EmptyBuilder('w:cr')
def a_cs():
return CT_OnOffBuilder('w:cs')
def a_dstrike():
return CT_OnOffBuilder('w:dstrike')
def a_jc():
return CT_JcBuilder()
def a_noProof():
return CT_OnOffBuilder('w:noProof')
def a_shadow():
return CT_OnOffBuilder('w:shadow')
def a_smallCaps():
return CT_OnOffBuilder('w:smallCaps')
def a_snapToGrid():
return CT_OnOffBuilder('w:snapToGrid')
def a_specVanish():
return CT_OnOffBuilder('w:specVanish')
def a_strike():
return CT_OnOffBuilder('w:strike')
def a_tab():
return CT_EmptyBuilder('w:tab')
def a_vanish():
return CT_OnOffBuilder('w:vanish')
def a_webHidden():
return CT_OnOffBuilder('w:webHidden')
def a_p():
return CT_PBuilder()
def a_pPr():
return CT_PPrBuilder()
def a_pStyle():
return CT_StringBuilder('w:pStyle')
def a_sectPr():
return CT_SectPrBuilder()
def a_t():
return CT_TextBuilder()
def a_u():
return CT_UnderlineBuilder()
def an_emboss():
return CT_OnOffBuilder('w:emboss')
def an_i():
return CT_OnOffBuilder('w:i')
def an_iCs():
return CT_OnOffBuilder('w:iCs')
def an_imprint():
return CT_OnOffBuilder('w:imprint')
def an_oMath():
return CT_OnOffBuilder('w:oMath')
def an_outline():
return CT_OnOffBuilder('w:outline')
def an_r():
return CT_RBuilder()
def an_rPr():
return CT_RPrBuilder()
def an_rStyle():
return CT_StringBuilder('w:rStyle')
def an_rtl():
return CT_OnOffBuilder('w:rtl')
| gpl-3.0 | -1,486,115,750,656,573,000 | -7,133,921,697,137,539,000 | 14.248804 | 73 | 0.578287 | false |
temasek/android_external_chromium_org | chrome/common/extensions/docs/server2/features_utility_test.py | 26 | 2661 | #!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from features_utility import Parse, Filtered, MergedWith
class FeaturesUtilityTest(unittest.TestCase):
def testFromJson(self):
raw_features_json = {
'doc1': {
'extension_types': ['extension', 'platform_app']
},
'doc2': {
'extension_types': ['hosted_app', 'packaged_app']
},
'doc3': {
'whitelist': 'hashhashashhashashhashashhash'
},
'doc4': [
{ 'extension_types': 'all' },
{ 'whitelist': 'hashhashashhashashhashashhash' }
],
'doc5': {
'extension_types': ['extension']
},
'doc1.sub1': {
'extension_types': ['platform_app', 'hosted_app', 'packaged_app']
}
}
expected = {
'doc1': {
'platforms': ['apps', 'extensions'],
'name': 'doc1'
},
'doc2': {
'platforms': [],
'name': 'doc2'
},
'doc4': {
'platforms': ['apps', 'extensions'],
'name': 'doc4'
},
'doc5': {
'platforms': ['extensions'],
'name': 'doc5'
},
'doc1.sub1': {
'platforms': ['apps'],
'name': 'doc1.sub1'
}
}
self.assertEqual(expected, Parse(raw_features_json))
def testFilter(self):
unfiltered = {
'doc1': { 'platforms': ['apps'] },
'doc2': { 'platforms': ['extensions'] },
'doc3': { 'platforms': ['apps', 'extensions'] },
'doc4': { 'platforms': [] }
}
apps_names = set(('doc1', 'doc3'))
extension_names = set(('doc2', 'doc3'))
self.assertEqual(
apps_names, set(Filtered(unfiltered, 'apps').keys()))
self.assertEqual(
extension_names, set(Filtered(unfiltered, 'extensions').keys()))
def testMergeFeatures(self):
features = {
'doc1': {
'platforms': ['apps']
},
'doc3': {
'name': 'doc3'
}
}
other = {
'doc1': {
'name': 'doc1',
'platforms': ['extensions']
},
'doc2': {
'name': 'doc2'
},
'doc3': {
'platforms': ['extensions', 'apps']
}
}
expected = {
'doc1': {
'name': 'doc1',
'platforms': ['extensions']
},
'doc2': {
'name': 'doc2',
'platforms': []
},
'doc3': {
'name': 'doc3',
'platforms': ['extensions', 'apps']
}
}
self.assertEqual(expected, MergedWith(features, other))
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | 4,469,364,734,635,377,000 | 8,241,382,125,955,609,000 | 21.939655 | 73 | 0.491169 | false |
diascreative/opencore | opencore/views/files.py | 4 | 2160 | # Copyright (C) 2008-2009 Open Society Institute
# Thomas Moroz: tmoroz.org
# 2010-2011 Large Blue
# Fergus Doyle: fergus.doyle@largeblue.com
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License Version 2 as published
# by the Free Software Foundation. You may not use, modify or distribute
# this program under any other version of the GNU General Public License.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
from webob import Response
from opencore.models.interfaces import IImage
def download_file_view(context, request):
# To view image-ish files in-line, use thumbnail_view.
f = context.blobfile.open()
headers = [
('Content-Type', context.mimetype),
('Content-Length', str(context.size)),
]
if 'save' in request.params:
fname = context.filename
if isinstance(fname, unicode):
fname = fname.encode('utf-8')
fname = fname.replace('\n', ' ').replace('\r', ' ').replace('\t', ' ')
headers.append(
('Content-Disposition', 'attachment; filename=%s' % fname)
)
response = Response(headerlist=headers, app_iter=f)
return response
def thumbnail_view(context, request):
assert IImage.providedBy(context), "Context must be an image."
filename = request.subpath[0] # <width>x<length>.jpg
size = map(int, filename[:-4].split('x'))
thumb = context.thumbnail(tuple(size))
# XXX Allow browser caching be setting Last-modified and Expires
# and respecting If-Modified-Since requests with 302 responses.
data = thumb.blobfile.open().read()
return Response(body=data, content_type=thumb.mimetype)
| gpl-2.0 | -3,391,067,532,186,793,500 | 1,906,227,071,439,189,000 | 40.538462 | 78 | 0.686574 | false |
yg257/Pangea | templates/root/ec2/lib/boto-2.34.0/boto/ses/__init__.py | 131 | 2013 | # Copyright (c) 2010 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2011 Harry Marr http://hmarr.com/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from boto.ses.connection import SESConnection
from boto.regioninfo import RegionInfo, get_regions
def regions():
"""
Get all available regions for the SES service.
:rtype: list
:return: A list of :class:`boto.regioninfo.RegionInfo` instances
"""
return get_regions('ses', connection_cls=SESConnection)
def connect_to_region(region_name, **kw_params):
"""
Given a valid region name, return a
:class:`boto.ses.connection.SESConnection`.
:type: str
:param region_name: The name of the region to connect to.
:rtype: :class:`boto.ses.connection.SESConnection` or ``None``
:return: A connection to the given region, or None if an invalid region
name is given
"""
for region in regions():
if region.name == region_name:
return region.connect(**kw_params)
return None
| apache-2.0 | 2,155,694,751,830,676,000 | -2,826,733,364,146,680,000 | 37.711538 | 75 | 0.724789 | false |
dhoffman34/django | django/template/defaulttags.py | 1 | 52686 | """Default tags used by the template system, available to all templates."""
from __future__ import unicode_literals
import os
import sys
import re
from datetime import datetime
from itertools import groupby, cycle as itertools_cycle
import warnings
from django.conf import settings
from django.template.base import (Node, NodeList, Template, Context, Library,
TemplateSyntaxError, VariableDoesNotExist, InvalidTemplateLibrary,
BLOCK_TAG_START, BLOCK_TAG_END, VARIABLE_TAG_START, VARIABLE_TAG_END,
SINGLE_BRACE_START, SINGLE_BRACE_END, COMMENT_TAG_START, COMMENT_TAG_END,
VARIABLE_ATTRIBUTE_SEPARATOR, get_library, token_kwargs, kwarg_re,
render_value_in_context)
from django.template.smartif import IfParser, Literal
from django.template.defaultfilters import date
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import force_text, smart_text
from django.utils.lorem_ipsum import words, paragraphs
from django.utils.safestring import mark_safe
from django.utils.html import format_html
from django.utils import six
from django.utils import timezone
register = Library()
class AutoEscapeControlNode(Node):
"""Implements the actions of the autoescape tag."""
def __init__(self, setting, nodelist):
self.setting, self.nodelist = setting, nodelist
def render(self, context):
old_setting = context.autoescape
context.autoescape = self.setting
output = self.nodelist.render(context)
context.autoescape = old_setting
if self.setting:
return mark_safe(output)
else:
return output
class CommentNode(Node):
def render(self, context):
return ''
class CsrfTokenNode(Node):
def render(self, context):
csrf_token = context.get('csrf_token', None)
if csrf_token:
if csrf_token == 'NOTPROVIDED':
return format_html("")
else:
return format_html("<input type='hidden' name='csrfmiddlewaretoken' value='{0}' />", csrf_token)
else:
# It's very probable that the token is missing because of
# misconfiguration, so we raise a warning
if settings.DEBUG:
warnings.warn("A {% csrf_token %} was used in a template, but the context did not provide the value. This is usually caused by not using RequestContext.")
return ''
class CycleNode(Node):
def __init__(self, cyclevars, variable_name=None, silent=False):
self.cyclevars = cyclevars
self.variable_name = variable_name
self.silent = silent
def render(self, context):
if self not in context.render_context:
# First time the node is rendered in template
context.render_context[self] = itertools_cycle(self.cyclevars)
cycle_iter = context.render_context[self]
value = next(cycle_iter).resolve(context)
if self.variable_name:
context[self.variable_name] = value
if self.silent:
return ''
return render_value_in_context(value, context)
class DebugNode(Node):
def render(self, context):
from pprint import pformat
output = [pformat(val) for val in context]
output.append('\n\n')
output.append(pformat(sys.modules))
return ''.join(output)
class FilterNode(Node):
def __init__(self, filter_expr, nodelist):
self.filter_expr, self.nodelist = filter_expr, nodelist
def render(self, context):
output = self.nodelist.render(context)
# Apply filters.
with context.push(var=output):
return self.filter_expr.resolve(context)
class FirstOfNode(Node):
def __init__(self, variables):
self.vars = variables
def render(self, context):
for var in self.vars:
value = var.resolve(context, True)
if value:
return render_value_in_context(value, context)
return ''
class ForNode(Node):
child_nodelists = ('nodelist_loop', 'nodelist_empty')
def __init__(self, loopvars, sequence, is_reversed, nodelist_loop, nodelist_empty=None):
self.loopvars, self.sequence = loopvars, sequence
self.is_reversed = is_reversed
self.nodelist_loop = nodelist_loop
if nodelist_empty is None:
self.nodelist_empty = NodeList()
else:
self.nodelist_empty = nodelist_empty
def __repr__(self):
reversed_text = ' reversed' if self.is_reversed else ''
return "<For Node: for %s in %s, tail_len: %d%s>" % \
(', '.join(self.loopvars), self.sequence, len(self.nodelist_loop),
reversed_text)
def __iter__(self):
for node in self.nodelist_loop:
yield node
for node in self.nodelist_empty:
yield node
def render(self, context):
if 'forloop' in context:
parentloop = context['forloop']
else:
parentloop = {}
with context.push():
try:
values = self.sequence.resolve(context, True)
except VariableDoesNotExist:
values = []
if values is None:
values = []
if not hasattr(values, '__len__'):
values = list(values)
len_values = len(values)
if len_values < 1:
return self.nodelist_empty.render(context)
nodelist = []
if self.is_reversed:
values = reversed(values)
num_loopvars = len(self.loopvars)
unpack = num_loopvars > 1
# Create a forloop value in the context. We'll update counters on each
# iteration just below.
loop_dict = context['forloop'] = {'parentloop': parentloop}
for i, item in enumerate(values):
# Shortcuts for current loop iteration number.
loop_dict['counter0'] = i
loop_dict['counter'] = i + 1
# Reverse counter iteration numbers.
loop_dict['revcounter'] = len_values - i
loop_dict['revcounter0'] = len_values - i - 1
# Boolean values designating first and last times through loop.
loop_dict['first'] = (i == 0)
loop_dict['last'] = (i == len_values - 1)
pop_context = False
if unpack:
# If there are multiple loop variables, unpack the item into
# them.
# To complete this deprecation, remove from here to the
# try/except block as well as the try/except itself,
# leaving `unpacked_vars = ...` and the "else" statements.
if not isinstance(item, (list, tuple)):
len_item = 1
else:
len_item = len(item)
# Check loop variable count before unpacking
if num_loopvars != len_item:
warnings.warn(
"Need {0} values to unpack in for loop; got {1}. "
"This will raise an exception in Django 2.0."
.format(num_loopvars, len_item),
RemovedInDjango20Warning)
try:
unpacked_vars = dict(zip(self.loopvars, item))
except TypeError:
pass
else:
pop_context = True
context.update(unpacked_vars)
else:
context[self.loopvars[0]] = item
# In TEMPLATE_DEBUG mode provide source of the node which
# actually raised the exception
if settings.TEMPLATE_DEBUG:
for node in self.nodelist_loop:
try:
nodelist.append(node.render(context))
except Exception as e:
if not hasattr(e, 'django_template_source'):
e.django_template_source = node.source
raise
else:
for node in self.nodelist_loop:
nodelist.append(node.render(context))
if pop_context:
# The loop variables were pushed on to the context so pop them
# off again. This is necessary because the tag lets the length
# of loopvars differ to the length of each set of items and we
# don't want to leave any vars from the previous loop on the
# context.
context.pop()
return mark_safe(''.join(force_text(n) for n in nodelist))
class IfChangedNode(Node):
child_nodelists = ('nodelist_true', 'nodelist_false')
def __init__(self, nodelist_true, nodelist_false, *varlist):
self.nodelist_true, self.nodelist_false = nodelist_true, nodelist_false
self._varlist = varlist
def render(self, context):
# Init state storage
state_frame = self._get_context_stack_frame(context)
if self not in state_frame:
state_frame[self] = None
nodelist_true_output = None
try:
if self._varlist:
# Consider multiple parameters. This automatically behaves
# like an OR evaluation of the multiple variables.
compare_to = [var.resolve(context, True) for var in self._varlist]
else:
# The "{% ifchanged %}" syntax (without any variables) compares the rendered output.
compare_to = nodelist_true_output = self.nodelist_true.render(context)
except VariableDoesNotExist:
compare_to = None
if compare_to != state_frame[self]:
state_frame[self] = compare_to
return nodelist_true_output or self.nodelist_true.render(context) # render true block if not already rendered
elif self.nodelist_false:
return self.nodelist_false.render(context)
return ''
def _get_context_stack_frame(self, context):
# The Context object behaves like a stack where each template tag can create a new scope.
# Find the place where to store the state to detect changes.
if 'forloop' in context:
# Ifchanged is bound to the local for loop.
# When there is a loop-in-loop, the state is bound to the inner loop,
# so it resets when the outer loop continues.
return context['forloop']
else:
# Using ifchanged outside loops. Effectively this is a no-op because the state is associated with 'self'.
return context.render_context
class IfEqualNode(Node):
child_nodelists = ('nodelist_true', 'nodelist_false')
def __init__(self, var1, var2, nodelist_true, nodelist_false, negate):
self.var1, self.var2 = var1, var2
self.nodelist_true, self.nodelist_false = nodelist_true, nodelist_false
self.negate = negate
def __repr__(self):
return "<IfEqualNode>"
def render(self, context):
val1 = self.var1.resolve(context, True)
val2 = self.var2.resolve(context, True)
if (self.negate and val1 != val2) or (not self.negate and val1 == val2):
return self.nodelist_true.render(context)
return self.nodelist_false.render(context)
class IfNode(Node):
def __init__(self, conditions_nodelists):
self.conditions_nodelists = conditions_nodelists
def __repr__(self):
return "<IfNode>"
def __iter__(self):
for _, nodelist in self.conditions_nodelists:
for node in nodelist:
yield node
@property
def nodelist(self):
return NodeList(node for _, nodelist in self.conditions_nodelists for node in nodelist)
def render(self, context):
for condition, nodelist in self.conditions_nodelists:
if condition is not None: # if / elif clause
try:
match = condition.eval(context)
except VariableDoesNotExist:
match = None
else: # else clause
match = True
if match:
return nodelist.render(context)
return ''
class LoremNode(Node):
def __init__(self, count, method, common):
self.count, self.method, self.common = count, method, common
def render(self, context):
try:
count = int(self.count.resolve(context))
except (ValueError, TypeError):
count = 1
if self.method == 'w':
return words(count, common=self.common)
else:
paras = paragraphs(count, common=self.common)
if self.method == 'p':
paras = ['<p>%s</p>' % p for p in paras]
return '\n\n'.join(paras)
class RegroupNode(Node):
def __init__(self, target, expression, var_name):
self.target, self.expression = target, expression
self.var_name = var_name
def resolve_expression(self, obj, context):
# This method is called for each object in self.target. See regroup()
# for the reason why we temporarily put the object in the context.
context[self.var_name] = obj
return self.expression.resolve(context, True)
def render(self, context):
obj_list = self.target.resolve(context, True)
if obj_list is None:
# target variable wasn't found in context; fail silently.
context[self.var_name] = []
return ''
# List of dictionaries in the format:
# {'grouper': 'key', 'list': [list of contents]}.
context[self.var_name] = [
{'grouper': key, 'list': list(val)}
for key, val in
groupby(obj_list, lambda obj: self.resolve_expression(obj, context))
]
return ''
def include_is_allowed(filepath):
filepath = os.path.abspath(filepath)
for root in settings.ALLOWED_INCLUDE_ROOTS:
if filepath.startswith(root):
return True
return False
class SsiNode(Node):
def __init__(self, filepath, parsed):
self.filepath = filepath
self.parsed = parsed
def render(self, context):
filepath = self.filepath.resolve(context)
if not include_is_allowed(filepath):
if settings.DEBUG:
return "[Didn't have permission to include file]"
else:
return '' # Fail silently for invalid includes.
try:
with open(filepath, 'r') as fp:
output = fp.read()
except IOError:
output = ''
if self.parsed:
try:
t = Template(output, name=filepath)
return t.render(context)
except TemplateSyntaxError as e:
if settings.DEBUG:
return "[Included template had syntax error: %s]" % e
else:
return '' # Fail silently for invalid included templates.
return output
class LoadNode(Node):
def render(self, context):
return ''
class NowNode(Node):
def __init__(self, format_string):
self.format_string = format_string
def render(self, context):
tzinfo = timezone.get_current_timezone() if settings.USE_TZ else None
return date(datetime.now(tz=tzinfo), self.format_string)
class SpacelessNode(Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
from django.utils.html import strip_spaces_between_tags
return strip_spaces_between_tags(self.nodelist.render(context).strip())
class TemplateTagNode(Node):
mapping = {'openblock': BLOCK_TAG_START,
'closeblock': BLOCK_TAG_END,
'openvariable': VARIABLE_TAG_START,
'closevariable': VARIABLE_TAG_END,
'openbrace': SINGLE_BRACE_START,
'closebrace': SINGLE_BRACE_END,
'opencomment': COMMENT_TAG_START,
'closecomment': COMMENT_TAG_END,
}
def __init__(self, tagtype):
self.tagtype = tagtype
def render(self, context):
return self.mapping.get(self.tagtype, '')
class URLNode(Node):
def __init__(self, view_name, args, kwargs, asvar):
self.view_name = view_name
self.args = args
self.kwargs = kwargs
self.asvar = asvar
def render(self, context):
from django.core.urlresolvers import reverse, NoReverseMatch
args = [arg.resolve(context) for arg in self.args]
kwargs = dict((smart_text(k, 'ascii'), v.resolve(context))
for k, v in self.kwargs.items())
view_name = self.view_name.resolve(context)
# Try to look up the URL twice: once given the view name, and again
# relative to what we guess is the "main" app. If they both fail,
# re-raise the NoReverseMatch unless we're using the
# {% url ... as var %} construct in which case return nothing.
url = ''
try:
url = reverse(view_name, args=args, kwargs=kwargs, current_app=context.current_app)
except NoReverseMatch:
exc_info = sys.exc_info()
if settings.SETTINGS_MODULE:
project_name = settings.SETTINGS_MODULE.split('.')[0]
try:
url = reverse(project_name + '.' + view_name,
args=args, kwargs=kwargs,
current_app=context.current_app)
except NoReverseMatch:
if self.asvar is None:
# Re-raise the original exception, not the one with
# the path relative to the project. This makes a
# better error message.
six.reraise(*exc_info)
else:
if self.asvar is None:
raise
if self.asvar:
context[self.asvar] = url
return ''
else:
return url
class VerbatimNode(Node):
def __init__(self, content):
self.content = content
def render(self, context):
return self.content
class WidthRatioNode(Node):
def __init__(self, val_expr, max_expr, max_width, asvar=None):
self.val_expr = val_expr
self.max_expr = max_expr
self.max_width = max_width
self.asvar = asvar
def render(self, context):
try:
value = self.val_expr.resolve(context)
max_value = self.max_expr.resolve(context)
max_width = int(self.max_width.resolve(context))
except VariableDoesNotExist:
return ''
except (ValueError, TypeError):
raise TemplateSyntaxError("widthratio final argument must be a number")
try:
value = float(value)
max_value = float(max_value)
ratio = (value / max_value) * max_width
result = str(int(round(ratio)))
except ZeroDivisionError:
return '0'
except (ValueError, TypeError, OverflowError):
return ''
if self.asvar:
context[self.asvar] = result
return ''
else:
return result
class WithNode(Node):
def __init__(self, var, name, nodelist, extra_context=None):
self.nodelist = nodelist
# var and name are legacy attributes, being left in case they are used
# by third-party subclasses of this Node.
self.extra_context = extra_context or {}
if name:
self.extra_context[name] = var
def __repr__(self):
return "<WithNode>"
def render(self, context):
values = dict((key, val.resolve(context)) for key, val in
six.iteritems(self.extra_context))
with context.push(**values):
return self.nodelist.render(context)
@register.tag
def autoescape(parser, token):
"""
Force autoescape behavior for this block.
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
args = token.contents.split()
if len(args) != 2:
raise TemplateSyntaxError("'autoescape' tag requires exactly one argument.")
arg = args[1]
if arg not in ('on', 'off'):
raise TemplateSyntaxError("'autoescape' argument should be 'on' or 'off'")
nodelist = parser.parse(('endautoescape',))
parser.delete_first_token()
return AutoEscapeControlNode((arg == 'on'), nodelist)
@register.tag
def comment(parser, token):
"""
Ignores everything between ``{% comment %}`` and ``{% endcomment %}``.
"""
parser.skip_past('endcomment')
return CommentNode()
@register.tag
def cycle(parser, token):
"""
Cycles among the given strings each time this tag is encountered.
Within a loop, cycles among the given strings each time through
the loop::
{% for o in some_list %}
<tr class="{% cycle 'row1' 'row2' %}">
...
</tr>
{% endfor %}
Outside of a loop, give the values a unique name the first time you call
it, then use that name each successive time through::
<tr class="{% cycle 'row1' 'row2' 'row3' as rowcolors %}">...</tr>
<tr class="{% cycle rowcolors %}">...</tr>
<tr class="{% cycle rowcolors %}">...</tr>
You can use any number of values, separated by spaces. Commas can also
be used to separate values; if a comma is used, the cycle values are
interpreted as literal strings.
The optional flag "silent" can be used to prevent the cycle declaration
from returning any value::
{% for o in some_list %}
{% cycle 'row1' 'row2' as rowcolors silent %}
<tr class="{{ rowcolors }}">{% include "subtemplate.html " %}</tr>
{% endfor %}
"""
# Note: This returns the exact same node on each {% cycle name %} call;
# that is, the node object returned from {% cycle a b c as name %} and the
# one returned from {% cycle name %} are the exact same object. This
# shouldn't cause problems (heh), but if it does, now you know.
#
# Ugly hack warning: This stuffs the named template dict into parser so
# that names are only unique within each template (as opposed to using
# a global variable, which would make cycle names have to be unique across
# *all* templates.
args = token.split_contents()
if len(args) < 2:
raise TemplateSyntaxError("'cycle' tag requires at least two arguments")
if ',' in args[1]:
# Backwards compatibility: {% cycle a,b %} or {% cycle a,b as foo %}
# case.
args[1:2] = ['"%s"' % arg for arg in args[1].split(",")]
if len(args) == 2:
# {% cycle foo %} case.
name = args[1]
if not hasattr(parser, '_namedCycleNodes'):
raise TemplateSyntaxError("No named cycles in template. '%s' is not defined" % name)
if name not in parser._namedCycleNodes:
raise TemplateSyntaxError("Named cycle '%s' does not exist" % name)
return parser._namedCycleNodes[name]
as_form = False
if len(args) > 4:
# {% cycle ... as foo [silent] %} case.
if args[-3] == "as":
if args[-1] != "silent":
raise TemplateSyntaxError("Only 'silent' flag is allowed after cycle's name, not '%s'." % args[-1])
as_form = True
silent = True
args = args[:-1]
elif args[-2] == "as":
as_form = True
silent = False
if as_form:
name = args[-1]
values = [parser.compile_filter(arg) for arg in args[1:-2]]
node = CycleNode(values, name, silent=silent)
if not hasattr(parser, '_namedCycleNodes'):
parser._namedCycleNodes = {}
parser._namedCycleNodes[name] = node
else:
values = [parser.compile_filter(arg) for arg in args[1:]]
node = CycleNode(values)
return node
@register.tag
def csrf_token(parser, token):
return CsrfTokenNode()
@register.tag
def debug(parser, token):
"""
Outputs a whole load of debugging information, including the current
context and imported modules.
Sample usage::
<pre>
{% debug %}
</pre>
"""
return DebugNode()
@register.tag('filter')
def do_filter(parser, token):
"""
Filters the contents of the block through variable filters.
Filters can also be piped through each other, and they can have
arguments -- just like in variable syntax.
Sample usage::
{% filter force_escape|lower %}
This text will be HTML-escaped, and will appear in lowercase.
{% endfilter %}
Note that the ``escape`` and ``safe`` filters are not acceptable arguments.
Instead, use the ``autoescape`` tag to manage autoescaping for blocks of
template code.
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
_, rest = token.contents.split(None, 1)
filter_expr = parser.compile_filter("var|%s" % (rest))
for func, unused in filter_expr.filters:
filter_name = getattr(func, '_filter_name', None)
if filter_name in ('escape', 'safe'):
raise TemplateSyntaxError('"filter %s" is not permitted. Use the "autoescape" tag instead.' % filter_name)
nodelist = parser.parse(('endfilter',))
parser.delete_first_token()
return FilterNode(filter_expr, nodelist)
@register.tag
def firstof(parser, token):
"""
Outputs the first variable passed that is not False, without escaping.
Outputs nothing if all the passed variables are False.
Sample usage::
{% firstof var1 var2 var3 %}
This is equivalent to::
{% if var1 %}
{{ var1|safe }}
{% elif var2 %}
{{ var2|safe }}
{% elif var3 %}
{{ var3|safe }}
{% endif %}
but obviously much cleaner!
You can also use a literal string as a fallback value in case all
passed variables are False::
{% firstof var1 var2 var3 "fallback value" %}
If you want to escape the output, use a filter tag::
{% filter force_escape %}
{% firstof var1 var2 var3 "fallback value" %}
{% endfilter %}
"""
bits = token.split_contents()[1:]
if len(bits) < 1:
raise TemplateSyntaxError("'firstof' statement requires at least one argument")
return FirstOfNode([parser.compile_filter(bit) for bit in bits])
@register.tag('for')
def do_for(parser, token):
"""
Loops over each item in an array.
For example, to display a list of athletes given ``athlete_list``::
<ul>
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% endfor %}
</ul>
You can loop over a list in reverse by using
``{% for obj in list reversed %}``.
You can also unpack multiple values from a two-dimensional array::
{% for key,value in dict.items %}
{{ key }}: {{ value }}
{% endfor %}
The ``for`` tag can take an optional ``{% empty %}`` clause that will
be displayed if the given array is empty or could not be found::
<ul>
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% empty %}
<li>Sorry, no athletes in this list.</li>
{% endfor %}
<ul>
The above is equivalent to -- but shorter, cleaner, and possibly faster
than -- the following::
<ul>
{% if althete_list %}
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% endfor %}
{% else %}
<li>Sorry, no athletes in this list.</li>
{% endif %}
</ul>
The for loop sets a number of variables available within the loop:
========================== ================================================
Variable Description
========================== ================================================
``forloop.counter`` The current iteration of the loop (1-indexed)
``forloop.counter0`` The current iteration of the loop (0-indexed)
``forloop.revcounter`` The number of iterations from the end of the
loop (1-indexed)
``forloop.revcounter0`` The number of iterations from the end of the
loop (0-indexed)
``forloop.first`` True if this is the first time through the loop
``forloop.last`` True if this is the last time through the loop
``forloop.parentloop`` For nested loops, this is the loop "above" the
current one
========================== ================================================
"""
bits = token.split_contents()
if len(bits) < 4:
raise TemplateSyntaxError("'for' statements should have at least four"
" words: %s" % token.contents)
is_reversed = bits[-1] == 'reversed'
in_index = -3 if is_reversed else -2
if bits[in_index] != 'in':
raise TemplateSyntaxError("'for' statements should use the format"
" 'for x in y': %s" % token.contents)
loopvars = re.split(r' *, *', ' '.join(bits[1:in_index]))
for var in loopvars:
if not var or ' ' in var:
raise TemplateSyntaxError("'for' tag received an invalid argument:"
" %s" % token.contents)
sequence = parser.compile_filter(bits[in_index + 1])
nodelist_loop = parser.parse(('empty', 'endfor',))
token = parser.next_token()
if token.contents == 'empty':
nodelist_empty = parser.parse(('endfor',))
parser.delete_first_token()
else:
nodelist_empty = None
return ForNode(loopvars, sequence, is_reversed, nodelist_loop, nodelist_empty)
def do_ifequal(parser, token, negate):
bits = list(token.split_contents())
if len(bits) != 3:
raise TemplateSyntaxError("%r takes two arguments" % bits[0])
end_tag = 'end' + bits[0]
nodelist_true = parser.parse(('else', end_tag))
token = parser.next_token()
if token.contents == 'else':
nodelist_false = parser.parse((end_tag,))
parser.delete_first_token()
else:
nodelist_false = NodeList()
val1 = parser.compile_filter(bits[1])
val2 = parser.compile_filter(bits[2])
return IfEqualNode(val1, val2, nodelist_true, nodelist_false, negate)
@register.tag
def ifequal(parser, token):
"""
Outputs the contents of the block if the two arguments equal each other.
Examples::
{% ifequal user.id comment.user_id %}
...
{% endifequal %}
{% ifnotequal user.id comment.user_id %}
...
{% else %}
...
{% endifnotequal %}
"""
return do_ifequal(parser, token, False)
@register.tag
def ifnotequal(parser, token):
"""
Outputs the contents of the block if the two arguments are not equal.
See ifequal.
"""
return do_ifequal(parser, token, True)
class TemplateLiteral(Literal):
def __init__(self, value, text):
self.value = value
self.text = text # for better error messages
def display(self):
return self.text
def eval(self, context):
return self.value.resolve(context, ignore_failures=True)
class TemplateIfParser(IfParser):
error_class = TemplateSyntaxError
def __init__(self, parser, *args, **kwargs):
self.template_parser = parser
super(TemplateIfParser, self).__init__(*args, **kwargs)
def create_var(self, value):
return TemplateLiteral(self.template_parser.compile_filter(value), value)
@register.tag('if')
def do_if(parser, token):
"""
The ``{% if %}`` tag evaluates a variable, and if that variable is "true"
(i.e., exists, is not empty, and is not a false boolean value), the
contents of the block are output:
::
{% if athlete_list %}
Number of athletes: {{ athlete_list|count }}
{% elif athlete_in_locker_room_list %}
Athletes should be out of the locker room soon!
{% else %}
No athletes.
{% endif %}
In the above, if ``athlete_list`` is not empty, the number of athletes will
be displayed by the ``{{ athlete_list|count }}`` variable.
As you can see, the ``if`` tag may take one or several `` {% elif %}``
clauses, as well as an ``{% else %}`` clause that will be displayed if all
previous conditions fail. These clauses are optional.
``if`` tags may use ``or``, ``and`` or ``not`` to test a number of
variables or to negate a given variable::
{% if not athlete_list %}
There are no athletes.
{% endif %}
{% if athlete_list or coach_list %}
There are some athletes or some coaches.
{% endif %}
{% if athlete_list and coach_list %}
Both athletes and coaches are available.
{% endif %}
{% if not athlete_list or coach_list %}
There are no athletes, or there are some coaches.
{% endif %}
{% if athlete_list and not coach_list %}
There are some athletes and absolutely no coaches.
{% endif %}
Comparison operators are also available, and the use of filters is also
allowed, for example::
{% if articles|length >= 5 %}...{% endif %}
Arguments and operators _must_ have a space between them, so
``{% if 1>2 %}`` is not a valid if tag.
All supported operators are: ``or``, ``and``, ``in``, ``not in``
``==`` (or ``=``), ``!=``, ``>``, ``>=``, ``<`` and ``<=``.
Operator precedence follows Python.
"""
# {% if ... %}
bits = token.split_contents()[1:]
condition = TemplateIfParser(parser, bits).parse()
nodelist = parser.parse(('elif', 'else', 'endif'))
conditions_nodelists = [(condition, nodelist)]
token = parser.next_token()
# {% elif ... %} (repeatable)
while token.contents.startswith('elif'):
bits = token.split_contents()[1:]
condition = TemplateIfParser(parser, bits).parse()
nodelist = parser.parse(('elif', 'else', 'endif'))
conditions_nodelists.append((condition, nodelist))
token = parser.next_token()
# {% else %} (optional)
if token.contents == 'else':
nodelist = parser.parse(('endif',))
conditions_nodelists.append((None, nodelist))
token = parser.next_token()
# {% endif %}
assert token.contents == 'endif'
return IfNode(conditions_nodelists)
@register.tag
def ifchanged(parser, token):
"""
Checks if a value has changed from the last iteration of a loop.
The ``{% ifchanged %}`` block tag is used within a loop. It has two
possible uses.
1. Checks its own rendered contents against its previous state and only
displays the content if it has changed. For example, this displays a
list of days, only displaying the month if it changes::
<h1>Archive for {{ year }}</h1>
{% for date in days %}
{% ifchanged %}<h3>{{ date|date:"F" }}</h3>{% endifchanged %}
<a href="{{ date|date:"M/d"|lower }}/">{{ date|date:"j" }}</a>
{% endfor %}
2. If given one or more variables, check whether any variable has changed.
For example, the following shows the date every time it changes, while
showing the hour if either the hour or the date has changed::
{% for date in days %}
{% ifchanged date.date %} {{ date.date }} {% endifchanged %}
{% ifchanged date.hour date.date %}
{{ date.hour }}
{% endifchanged %}
{% endfor %}
"""
bits = token.split_contents()
nodelist_true = parser.parse(('else', 'endifchanged'))
token = parser.next_token()
if token.contents == 'else':
nodelist_false = parser.parse(('endifchanged',))
parser.delete_first_token()
else:
nodelist_false = NodeList()
values = [parser.compile_filter(bit) for bit in bits[1:]]
return IfChangedNode(nodelist_true, nodelist_false, *values)
@register.tag
def ssi(parser, token):
"""
Outputs the contents of a given file into the page.
Like a simple "include" tag, the ``ssi`` tag includes the contents
of another file -- which must be specified using an absolute path --
in the current page::
{% ssi "/home/html/ljworld.com/includes/right_generic.html" %}
If the optional "parsed" parameter is given, the contents of the included
file are evaluated as template code, with the current context::
{% ssi "/home/html/ljworld.com/includes/right_generic.html" parsed %}
"""
bits = token.split_contents()
parsed = False
if len(bits) not in (2, 3):
raise TemplateSyntaxError("'ssi' tag takes one argument: the path to"
" the file to be included")
if len(bits) == 3:
if bits[2] == 'parsed':
parsed = True
else:
raise TemplateSyntaxError("Second (optional) argument to %s tag"
" must be 'parsed'" % bits[0])
filepath = parser.compile_filter(bits[1])
return SsiNode(filepath, parsed)
@register.tag
def load(parser, token):
"""
Loads a custom template tag set.
For example, to load the template tags in
``django/templatetags/news/photos.py``::
{% load news.photos %}
Can also be used to load an individual tag/filter from
a library::
{% load byline from news %}
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
bits = token.contents.split()
if len(bits) >= 4 and bits[-2] == "from":
try:
taglib = bits[-1]
lib = get_library(taglib)
except InvalidTemplateLibrary as e:
raise TemplateSyntaxError("'%s' is not a valid tag library: %s" %
(taglib, e))
else:
temp_lib = Library()
for name in bits[1:-2]:
if name in lib.tags:
temp_lib.tags[name] = lib.tags[name]
# a name could be a tag *and* a filter, so check for both
if name in lib.filters:
temp_lib.filters[name] = lib.filters[name]
elif name in lib.filters:
temp_lib.filters[name] = lib.filters[name]
else:
raise TemplateSyntaxError("'%s' is not a valid tag or filter in tag library '%s'" %
(name, taglib))
parser.add_library(temp_lib)
else:
for taglib in bits[1:]:
# add the library to the parser
try:
lib = get_library(taglib)
parser.add_library(lib)
except InvalidTemplateLibrary as e:
raise TemplateSyntaxError("'%s' is not a valid tag library: %s" %
(taglib, e))
return LoadNode()
@register.tag
def lorem(parser, token):
"""
Creates random Latin text useful for providing test data in templates.
Usage format::
{% lorem [count] [method] [random] %}
``count`` is a number (or variable) containing the number of paragraphs or
words to generate (default is 1).
``method`` is either ``w`` for words, ``p`` for HTML paragraphs, ``b`` for
plain-text paragraph blocks (default is ``b``).
``random`` is the word ``random``, which if given, does not use the common
paragraph (starting "Lorem ipsum dolor sit amet, consectetuer...").
Examples:
* ``{% lorem %}`` will output the common "lorem ipsum" paragraph
* ``{% lorem 3 p %}`` will output the common "lorem ipsum" paragraph
and two random paragraphs each wrapped in HTML ``<p>`` tags
* ``{% lorem 2 w random %}`` will output two random latin words
"""
bits = list(token.split_contents())
tagname = bits[0]
# Random bit
common = bits[-1] != 'random'
if not common:
bits.pop()
# Method bit
if bits[-1] in ('w', 'p', 'b'):
method = bits.pop()
else:
method = 'b'
# Count bit
if len(bits) > 1:
count = bits.pop()
else:
count = '1'
count = parser.compile_filter(count)
if len(bits) != 1:
raise TemplateSyntaxError("Incorrect format for %r tag" % tagname)
return LoremNode(count, method, common)
@register.tag
def now(parser, token):
"""
Displays the date, formatted according to the given string.
Uses the same format as PHP's ``date()`` function; see http://php.net/date
for all the possible values.
Sample usage::
It is {% now "jS F Y H:i" %}
"""
bits = token.split_contents()
if len(bits) != 2:
raise TemplateSyntaxError("'now' statement takes one argument")
format_string = bits[1][1:-1]
return NowNode(format_string)
@register.tag
def regroup(parser, token):
"""
Regroups a list of alike objects by a common attribute.
This complex tag is best illustrated by use of an example: say that
``people`` is a list of ``Person`` objects that have ``first_name``,
``last_name``, and ``gender`` attributes, and you'd like to display a list
that looks like:
* Male:
* George Bush
* Bill Clinton
* Female:
* Margaret Thatcher
* Colendeeza Rice
* Unknown:
* Pat Smith
The following snippet of template code would accomplish this dubious task::
{% regroup people by gender as grouped %}
<ul>
{% for group in grouped %}
<li>{{ group.grouper }}
<ul>
{% for item in group.list %}
<li>{{ item }}</li>
{% endfor %}
</ul>
{% endfor %}
</ul>
As you can see, ``{% regroup %}`` populates a variable with a list of
objects with ``grouper`` and ``list`` attributes. ``grouper`` contains the
item that was grouped by; ``list`` contains the list of objects that share
that ``grouper``. In this case, ``grouper`` would be ``Male``, ``Female``
and ``Unknown``, and ``list`` is the list of people with those genders.
Note that ``{% regroup %}`` does not work when the list to be grouped is not
sorted by the key you are grouping by! This means that if your list of
people was not sorted by gender, you'd need to make sure it is sorted
before using it, i.e.::
{% regroup people|dictsort:"gender" by gender as grouped %}
"""
bits = token.split_contents()
if len(bits) != 6:
raise TemplateSyntaxError("'regroup' tag takes five arguments")
target = parser.compile_filter(bits[1])
if bits[2] != 'by':
raise TemplateSyntaxError("second argument to 'regroup' tag must be 'by'")
if bits[4] != 'as':
raise TemplateSyntaxError("next-to-last argument to 'regroup' tag must"
" be 'as'")
var_name = bits[5]
# RegroupNode will take each item in 'target', put it in the context under
# 'var_name', evaluate 'var_name'.'expression' in the current context, and
# group by the resulting value. After all items are processed, it will
# save the final result in the context under 'var_name', thus clearing the
# temporary values. This hack is necessary because the template engine
# doesn't provide a context-aware equivalent of Python's getattr.
expression = parser.compile_filter(var_name +
VARIABLE_ATTRIBUTE_SEPARATOR +
bits[3])
return RegroupNode(target, expression, var_name)
@register.tag
def spaceless(parser, token):
"""
Removes whitespace between HTML tags, including tab and newline characters.
Example usage::
{% spaceless %}
<p>
<a href="foo/">Foo</a>
</p>
{% endspaceless %}
This example would return this HTML::
<p><a href="foo/">Foo</a></p>
Only space between *tags* is normalized -- not space between tags and text.
In this example, the space around ``Hello`` won't be stripped::
{% spaceless %}
<strong>
Hello
</strong>
{% endspaceless %}
"""
nodelist = parser.parse(('endspaceless',))
parser.delete_first_token()
return SpacelessNode(nodelist)
@register.tag
def templatetag(parser, token):
"""
Outputs one of the bits used to compose template tags.
Since the template system has no concept of "escaping", to display one of
the bits used in template tags, you must use the ``{% templatetag %}`` tag.
The argument tells which template bit to output:
================== =======
Argument Outputs
================== =======
``openblock`` ``{%``
``closeblock`` ``%}``
``openvariable`` ``{{``
``closevariable`` ``}}``
``openbrace`` ``{``
``closebrace`` ``}``
``opencomment`` ``{#``
``closecomment`` ``#}``
================== =======
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
bits = token.contents.split()
if len(bits) != 2:
raise TemplateSyntaxError("'templatetag' statement takes one argument")
tag = bits[1]
if tag not in TemplateTagNode.mapping:
raise TemplateSyntaxError("Invalid templatetag argument: '%s'."
" Must be one of: %s" %
(tag, list(TemplateTagNode.mapping)))
return TemplateTagNode(tag)
@register.tag
def url(parser, token):
"""
Returns an absolute URL matching given view with its parameters.
This is a way to define links that aren't tied to a particular URL
configuration::
{% url "path.to.some_view" arg1 arg2 %}
or
{% url "path.to.some_view" name1=value1 name2=value2 %}
The first argument is a path to a view. It can be an absolute Python path
or just ``app_name.view_name`` without the project name if the view is
located inside the project.
Other arguments are space-separated values that will be filled in place of
positional and keyword arguments in the URL. Don't mix positional and
keyword arguments.
All arguments for the URL should be present.
For example if you have a view ``app_name.client`` taking client's id and
the corresponding line in a URLconf looks like this::
('^client/(\d+)/$', 'app_name.client')
and this app's URLconf is included into the project's URLconf under some
path::
('^clients/', include('project_name.app_name.urls'))
then in a template you can create a link for a certain client like this::
{% url "app_name.client" client.id %}
The URL will look like ``/clients/client/123/``.
The first argument can also be a named URL instead of the Python path to
the view callable. For example if the URLconf entry looks like this::
url('^client/(\d+)/$', name='client-detail-view')
then in the template you can use::
{% url "client-detail-view" client.id %}
There is even another possible value type for the first argument. It can be
the name of a template variable that will be evaluated to obtain the view
name or the URL name, e.g.::
{% with view_path="app_name.client" %}
{% url view_path client.id %}
{% endwith %}
or,
{% with url_name="client-detail-view" %}
{% url url_name client.id %}
{% endwith %}
"""
bits = token.split_contents()
if len(bits) < 2:
raise TemplateSyntaxError("'%s' takes at least one argument"
" (path to a view)" % bits[0])
viewname = parser.compile_filter(bits[1])
args = []
kwargs = {}
asvar = None
bits = bits[2:]
if len(bits) >= 2 and bits[-2] == 'as':
asvar = bits[-1]
bits = bits[:-2]
if len(bits):
for bit in bits:
match = kwarg_re.match(bit)
if not match:
raise TemplateSyntaxError("Malformed arguments to url tag")
name, value = match.groups()
if name:
kwargs[name] = parser.compile_filter(value)
else:
args.append(parser.compile_filter(value))
return URLNode(viewname, args, kwargs, asvar)
@register.tag
def verbatim(parser, token):
"""
Stops the template engine from rendering the contents of this block tag.
Usage::
{% verbatim %}
{% don't process this %}
{% endverbatim %}
You can also designate a specific closing tag block (allowing the
unrendered use of ``{% endverbatim %}``)::
{% verbatim myblock %}
...
{% endverbatim myblock %}
"""
nodelist = parser.parse(('endverbatim',))
parser.delete_first_token()
return VerbatimNode(nodelist.render(Context()))
@register.tag
def widthratio(parser, token):
"""
For creating bar charts and such, this tag calculates the ratio of a given
value to a maximum value, and then applies that ratio to a constant.
For example::
<img src="bar.png" alt="Bar"
height="10" width="{% widthratio this_value max_value max_width %}" />
If ``this_value`` is 175, ``max_value`` is 200, and ``max_width`` is 100,
the image in the above example will be 88 pixels wide
(because 175/200 = .875; .875 * 100 = 87.5 which is rounded up to 88).
In some cases you might want to capture the result of widthratio in a
variable. It can be useful for instance in a blocktrans like this::
{% widthratio this_value max_value max_width as width %}
{% blocktrans %}The width is: {{ width }}{% endblocktrans %}
"""
bits = token.split_contents()
if len(bits) == 4:
tag, this_value_expr, max_value_expr, max_width = bits
asvar = None
elif len(bits) == 6:
tag, this_value_expr, max_value_expr, max_width, as_, asvar = bits
if as_ != 'as':
raise TemplateSyntaxError("Invalid syntax in widthratio tag. Expecting 'as' keyword")
else:
raise TemplateSyntaxError("widthratio takes at least three arguments")
return WidthRatioNode(parser.compile_filter(this_value_expr),
parser.compile_filter(max_value_expr),
parser.compile_filter(max_width),
asvar=asvar)
@register.tag('with')
def do_with(parser, token):
"""
Adds one or more values to the context (inside of this block) for caching
and easy access.
For example::
{% with total=person.some_sql_method %}
{{ total }} object{{ total|pluralize }}
{% endwith %}
Multiple values can be added to the context::
{% with foo=1 bar=2 %}
...
{% endwith %}
The legacy format of ``{% with person.some_sql_method as total %}`` is
still accepted.
"""
bits = token.split_contents()
remaining_bits = bits[1:]
extra_context = token_kwargs(remaining_bits, parser, support_legacy=True)
if not extra_context:
raise TemplateSyntaxError("%r expected at least one variable "
"assignment" % bits[0])
if remaining_bits:
raise TemplateSyntaxError("%r received an invalid token: %r" %
(bits[0], remaining_bits[0]))
nodelist = parser.parse(('endwith',))
parser.delete_first_token()
return WithNode(None, None, nodelist, extra_context=extra_context)
| bsd-3-clause | -8,001,503,329,952,422,000 | 5,820,791,328,130,160,000 | 33.593565 | 171 | 0.575675 | false |