code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
import urllib
import requests
from .exceptions import DocTypeException, DocIDException
from .settings import *
def validate_doc_type(doc_type):
"""Make sure the provided doc_type is
supported
"""
try:
DOC_TYPES.index(doc_type)
except ValueError:
raise DocTypeException
def validate_doc_id(doc_id, doc_type):
"""Some of the document endpoints take the unique document id
as a number and not a string. For the ones that take a string,
we have to add a single quotes to the doc_id
"""
if doc_type not in NUMBER_DOC_TYPE:
try:
doc_id = "\'" + doc_id + "\'"
except TypeError:
raise DocIDException
return doc_id
def construct_url(doc_type, **kwargs):
"""Build a URL to query the API
"""
# Construct a dict of just the ODATA query parameters
query_params = {}
for arg in kwargs:
if arg in QUERY_PARAMS:
query_params[arg] = kwargs[arg]
# Count isn't a real query param, but better than inlinecount=allpages
# We let user say count=True, then add inlinecount=allpages for them
if 'count' in kwargs and kwargs['count'] is True:
query_params['inlinecount'] = 'allpages'
f_query_params = construct_params(query_params)
url = API_SERVER + API_BASE + doc_type + f_query_params
return url
def construct_params(query_params):
"""
:query_params: a dictionary with param_name:value
"""
params = '?'
full_params = apply_default_params(query_params)
# We need to put a '$' in front of every parameter name
modified_params = {}
for k, v in full_params.items():
modified_params['$' + k] = v
params += urllib.urlencode(modified_params, True)
return params
def apply_default_params(query_params):
"""Apply the default parameters to the query_params
specified by the user
"""
for k, v in DEFAULT_PARAMS.items():
if k not in query_params.keys():
query_params[k] = v
return query_params
def invoke_api(url):
"""Make a call to the API with the provided URL
and return the results
"""
r = requests.get(url)
results = r.json()
response = process_results(results)
return response
def process_results(results):
"""Construct the request response into a
slightly more intuitive structure
"""
response = {}
try:
response['count'] = int(results['d']['__count'])
except:
response['count'] = None
if 'error' in results.keys():
response['error'] = results['error']
response['results'] = None
elif type(results['d']) is list:
response['results'] = results['d']
elif 'results' in results['d'].keys():
response['results'] = results['d']['results']
else:
response['results'] = results['d']
return response
def get_related(response):
"""Make calls to the 'deferred'/related document
types contained in the provided results object
"""
deferred_urls = get_deferred_urls(response['results'])
for entity, url in deferred_urls.items():
r = requests.get(url + '?$format=json')
related = r.json()
response['results'][entity] = related['d']
return response
def get_deferred_urls(results):
"""Returns a list of URLS for all
the deferred entities for a particular entity
:results: the result for a call to get_permit,
get_case, etc
"""
urls = {}
for doc in DOC_TYPES:
if doc in results.keys():
urls[doc] = results[doc]['__deferred']['uri']
return urls
| AxisPhilly/py-li | li/utils.py | Python | mit | 3,627 |
import numpy as np
import scipy.optimize as op
from .nncostFunction import getCost, getGrad
def optimize(X, y, theta):
Result = op.minimize(fun = getCost, x0 = theta, args=(X, y), method='TNC', jac= getGrad, options={'maxiter': 400})
return Result.x | pk-ai/training | machine-learning/coursera_exercises/ex4/in_python/exercises/advOptimze.py | Python | mit | 257 |
'''
Created on Jun 16, 2015
@author: theo
'''
from django.contrib import admin
from django import forms
from django.forms import Textarea
from django.contrib.gis.db import models
from models import UserProfile, Adres, Waarnemer, Meetpunt, Organisatie, AkvoFlow, CartoDb, Waarneming, Phone
from acacia.data.models import DataPoint, ManualSeries, ProjectLocatie,\
MeetLocatie, Series
from acacia.data.events.models import Event
from django.core.exceptions import ValidationError
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from util import maak_meetpunt_grafiek, zoek_tijdreeksen
import re
from models import Alias, Logo, RegisteredUser
import util
from django.shortcuts import get_object_or_404
class UserProfileInline(admin.StackedInline):
model = UserProfile
can_delete = False
verbose_name_plural = 'profile'
class UserAdmin(UserAdmin):
inlines = (UserProfileInline, )
# Re-register UserAdmin
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
class DataPointInline(admin.TabularInline):
#class DataPointInline(nested_admin.TabularInline):
model = DataPoint
class SeriesInline(admin.StackedInline):
#class SeriesInline(nested_admin.NestedStackedInline):
model = ManualSeries
fields = ('name',)
inlines = (DataPointInline,)
verbose_name = 'Tijdreeks'
verbose_name_plural = 'Tijdreeksen'
def meetpunt_elevation_from_ahn(modeladmin, request, queryset):
from acacia.ahn.models import AHN
from django.contrib.gis.geos import Point
ahn = get_object_or_404(AHN,name='AHN3 0.5m DTM')
for mp in queryset:
x = mp.location.x
y = mp.location.y
mp.ahn = ahn.get_elevation(x,y)
mp.save()
meetpunt_elevation_from_ahn.short_description = 'Bepaal NAP hoogte adhv AHN3'
def maak_grafiek(modeladmin, request, queryset):
for m in queryset:
maak_meetpunt_grafiek(m,request.user)
maak_grafiek.short_description = "Maak grafieken voor geselecteerde meetpunten"
def update_series(modeladmin, request, queryset):
util.updateSeries(queryset, request.user)
update_series.short_description = 'Tijdreeksen actualiseren van geselecteerde meetpunten'
class WaarnemingInline(admin.TabularInline):
model = Waarneming
exclude = ('opmerking',)
extra = 0
def update_cdb_meetpunten(modeladmin, request, queryset):
util.updateSeries(queryset, request.user)
util.exportCartodb(CartoDb.objects.get(pk=1), queryset)
update_cdb_meetpunten.short_description = 'cartodb en tijdreeksen actualiseren met waarnemingen van geselecteerde meetpunten'
def update_cdb_waarnemers(modeladmin, request, queryset):
mps = []
for w in queryset:
mps.extend(w.meetpunt_set.all())
util.updateSeries(mps, request.user)
util.updateCartodb(CartoDb.objects.get(pk=1), mps)
update_cdb_waarnemers.short_description = 'cartodb en tijdreeksen actualiseren voor meetpunten van geselecteerde waarnemers'
def update_cdb_locations(modeladmin, request, queryset):
util.updateCartodbLocation(CartoDb.objects.get(pk=1), queryset, 'allemetingen')
update_cdb_locations.short_description = 'actualiseren van de locaties van geselecteerde meetpunten bij cartodb'
def export_cdb_waarnemingen(modeladmin, request, queryset):
util.exportCartodb2(CartoDb.objects.get(pk=1), queryset)
export_cdb_waarnemingen.short_description = 'geselecteerde waarnemingen exporteren naar cartodb'
def export_cdb_meetpunten(modeladmin, request, queryset):
util.exportCartodb(CartoDb.objects.get(pk=1), queryset)
export_cdb_meetpunten.short_description = 'geselecteerde meetpunten exporteren naar cartodb'
class EventInline(admin.TabularInline):
model = Event
def link_series1(modeladmin, request, queryset):
for m in queryset:
series = zoek_tijdreeksen(m.location,1)
for s in series:
if not s.mlocatie:
s.mlocatie = m
s.save()
link_series1.short_description = 'Koppel gerelateerde tijdreeksen aan geselecteerde meetpunten'
def link_series(modeladmin, request, queryset):
for m in queryset:
for cs in m.chart.series.all():
for s in [cs.series, cs.series2]:
if s and not s.mlocatie:
s.mlocatie = m
s.save()
link_series.short_description = 'Koppel gerelateerde tijdreeksen aan geselecteerde meetpunten'
@admin.register(Meetpunt)
class MeetpuntAdmin(admin.ModelAdmin):
#class MeetpuntAdmin(nested_admin.NestedAdmin):
actions = [maak_grafiek,update_series,update_cdb_locations, update_cdb_meetpunten,link_series,export_cdb_meetpunten,meetpunt_elevation_from_ahn]
list_display = ('identifier', 'projectlocatie', 'name', 'waarnemer', 'displayname', 'description', 'ahn', 'aantal_waarnemingen', 'photo')
list_filter = ('waarnemer', 'projectlocatie')
inlines = [WaarnemingInline,]
search_fields = ('name', 'waarnemer__achternaam', )
fields = ('name', 'waarnemer', 'projectlocatie', 'location', 'photo_url', 'chart_thumbnail', 'description',)
formfield_overrides = {models.PointField:{'widget': Textarea}}
class AdresForm(forms.ModelForm):
model = Adres
def clean_postcode(self):
pattern = r'\d{4}\s*[A-Za-z]{2}'
data = self.cleaned_data['postcode']
if re.search(pattern, data) is None:
raise ValidationError('Onjuiste postcode')
return data
@admin.register(Adres)
class AdresAdmin(admin.ModelAdmin):
form = AdresForm
fieldsets = (
('', {'fields': (('straat', 'huisnummer', 'toevoeging'),('postcode', 'plaats')),
'classes': ('grp-collapse grp-open',),
}
),
)
@admin.register(Alias)
class AliasAdmin(admin.ModelAdmin):
list_display = ('alias', 'waarnemer')
list_filter = ('waarnemer', )
search_fields = ('alias', 'waarnemer', )
class AliasInline(admin.TabularInline):
model = Alias
extra = 0
@admin.register(Waarnemer)
class WaarnemerAdmin(admin.ModelAdmin):
class LocatieFilter(admin.SimpleListFilter):
title = 'locatie'
parameter_name = 'locatie'
def lookups(self, request, modeladmin):
return [(p.pk, p.name) for p in ProjectLocatie.objects.all()]
def queryset(self, request, queryset):
# if self.value() is not None:
# mps = Meetpunt.objects.filter(waarnemer__in=queryset, projectlocatie=self.value)
# return queryset.filter(meetpunt_set__projectlocatie__name = self.value())
return queryset
actions = [update_cdb_waarnemers,]
list_display = ('achternaam', 'tussenvoegsel', 'voornaam', 'initialen','organisatie', 'projectlocaties', 'aantal_meetpunten', 'aantal_waarnemingen')
list_filter = ('achternaam', 'organisatie', LocatieFilter)
search_fields = ('achternaam', 'voornaam', )
ordering = ('achternaam', )
inlines = [AliasInline]
@admin.register(Organisatie)
class OrganisatieAdmin(admin.ModelAdmin):
raw_id_fields = ('adres',)
autocomplete_lookup_fields = {
'fk': ['adres',],
}
@admin.register(AkvoFlow)
class AkvoAdmin(admin.ModelAdmin):
list_display = ('name', 'instance', 'description')
list_filter = ('name', )
search_fields = ('name', 'instance', )
@admin.register(CartoDb)
class CartodbAdmin(admin.ModelAdmin):
list_display = ('name', 'url', 'description')
list_filter = ('name', )
search_fields = ('name', 'url', )
@admin.register(Waarneming)
class WaarnemingAdmin(admin.ModelAdmin):
list_display = ('naam', 'datum', 'waarnemer', 'locatie', 'device','waarde', 'eenheid', 'photo')
list_filter = ('naam', 'waarnemer', 'locatie', 'device', 'datum' )
actions = [export_cdb_waarnemingen,]
@admin.register(Logo)
class LogoAdmin(admin.ModelAdmin):
list_display = ('name','order','img')
@admin.register(RegisteredUser)
class RegisteredUserAdmin(admin.ModelAdmin):
exclude = ('website', 'status', 'organisatie')
fieldsets = (
('Persoonsgegevens', {'fields': (('voornaam', 'tussenvoegsel', 'achternaam'),('email', 'telefoon')),
'classes': ('grp-collapse grp-open',),
}
),
('Telefoon', {'fields': ('akvo_name', 'device_id',),
'classes': ('grp-collapse grp-open',),
}
),
)
@admin.register(Phone)
class PhoneAdmin(admin.ModelAdmin):
list_display = ('device_id','last_contact', 'latitude', 'longitude')
def importWaarnemingenAction(modeladmin, request, queryset):
user = request.user
try:
alias = Alias.objects.get(alias=user.get_username())
except:
alias = Alias.objects.create(alias=user.get_username(),
waarnemer = Waarnemer.objects.create(achternaam=user.last_name or user.get_username(), voornaam=request.user.first_name, email = user.email))
waarnemer = alias.waarnemer
for obj in queryset:
if isinstance(obj, MeetLocatie):
series = obj.series_set.all()
elif isinstance(obj,Datasource):
series = obj.getseries()
elif isinstance(obj,Series):
series = [obj]
for s in series:
util.importSeries(s,waarnemer)
importWaarnemingenAction.short_description = 'importeer waarnemingen van geselecteerde onderdelen'
def importMeetpuntenAction(modeladmin, request, queryset):
user = request.user
try:
alias = Alias.objects.get(alias=user.get_username())
except:
alias = Alias.objects.create(alias=user.get_username(),
waarnemer = Waarnemer.objects.create(achternaam=user.last_name or user.get_username(), voornaam=request.user.first_name, email = user.email))
waarnemer = alias.waarnemer
for obj in queryset:
if isinstance(obj,Datasource):
locs = obj.locations.all()
for loc in locs:
util.importMeetpunt(loc,waarnemer)
elif isinstance(obj,Series):
loc = obj.meetlocatie()
util.importMeetpunt(loc,waarnemer)
elif isinstance(obj,MeetLocatie):
util.importMeetpunt(obj,waarnemer)
importMeetpuntenAction.short_description = 'importeer meetpunten van geselecteerde onderdelen'
# Add custom action to datasource admin page
from acacia.data.models import Datasource, MeetLocatie
from acacia.data.admin import DatasourceAdmin, MeetLocatieAdmin
class MyDatasourceAdmin(DatasourceAdmin):
def __init__(self, model, admin_site):
super(MyDatasourceAdmin,self).__init__(model,admin_site)
self.actions.extend([importWaarnemingenAction,importMeetpuntenAction])
admin.site.unregister(Datasource)
admin.site.register(Datasource, MyDatasourceAdmin)
class MyMeetLocatieAdmin(MeetLocatieAdmin):
def __init__(self, model, admin_site):
super(MyMeetLocatieAdmin,self).__init__(model,admin_site)
self.actions.extend([importWaarnemingenAction,importMeetpuntenAction])
admin.site.unregister(MeetLocatie)
admin.site.register(MeetLocatie, MyMeetLocatieAdmin)
| acaciawater/iom | iom/admin.py | Python | apache-2.0 | 11,264 |
#!/usr/bin/env python
import argparse
import gzip
import logging
import os
import shutil
import subprocess
browser_specific_args = {
"firefox": ["--install-browser"]
}
def tests_affected(commit_range):
output = subprocess.check_output([
"python", "./wpt", "tests-affected", "--null", commit_range
], stderr=open(os.devnull, "w"))
tests = output.split("\0")
# Account for trailing null byte
if tests and not tests[-1]:
tests.pop()
return tests
def find_wptreport(args):
parser = argparse.ArgumentParser()
parser.add_argument('--log-wptreport', action='store')
return parser.parse_known_args(args)[0].log_wptreport
def gzip_file(filename, delete_original=True):
with open(filename, 'rb') as f_in:
with gzip.open('%s.gz' % filename, 'wb') as f_out:
shutil.copyfileobj(f_in, f_out)
if delete_original:
os.unlink(filename)
def main(product, commit_range, wpt_args):
"""Invoke the `wpt run` command according to the needs of the TaskCluster
continuous integration service."""
logger = logging.getLogger("tc-run")
logger.setLevel(logging.INFO)
handler = logging.StreamHandler()
handler.setFormatter(
logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
)
logger.addHandler(handler)
child = subprocess.Popen(['python', './wpt', 'manifest-download'])
child.wait()
if commit_range:
logger.info(
"Identifying tests affected in range '%s'..." % commit_range
)
tests = tests_affected(commit_range)
logger.info("Identified %s affected tests" % len(tests))
if not tests:
logger.info("Quitting because no tests were affected.")
return
else:
tests = []
logger.info("Running all tests")
wpt_args += [
"--log-tbpl-level=info",
"--log-tbpl=-",
"-y",
"--no-pause",
"--no-restart-on-unexpected",
"--install-fonts",
"--no-headless"
]
wpt_args += browser_specific_args.get(product, [])
command = ["python", "./wpt", "run"] + wpt_args + [product] + tests
logger.info("Executing command: %s" % " ".join(command))
subprocess.check_call(command)
wptreport = find_wptreport(wpt_args)
if wptreport:
gzip_file(wptreport)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=main.__doc__)
parser.add_argument("--commit-range", action="store",
help="""Git commit range. If specified, this will be
supplied to the `wpt tests-affected` command to
determine the list of test to execute""")
parser.add_argument("product", action="store",
help="Browser to run tests in")
parser.add_argument("wpt_args", nargs="*",
help="Arguments to forward to `wpt run` command")
main(**vars(parser.parse_args()))
| jimberlage/servo | tests/wpt/web-platform-tests/tools/ci/taskcluster-run.py | Python | mpl-2.0 | 3,009 |
invalid_syntax(
| neomake/neomake | tests/fixtures/errors.py | Python | mit | 16 |
"""
An abstract representation of NetCDF data for manipulation purposes.
The purpose of this is to allow arbitrary manipulation of NetCDF data,
decoupled from the netCDF4 file-based API.
For example::
import ncobj.nc_dataset as ncds
with netCDF4.Dataset(file_in_path) as ds_in:
in_group = ncds.read(ds_in)
out_group = ncobj.Group()
# Copy selected variables to output.
for var_name in ('measure', 'x_points', 'y_points'):
var = in_group.variables[var_name]
# Remove any bounds references.
var.attributes.pop('bounds', None)
out_group.variables.add(var)
# Save selected variables (includes necessary dimensions).
ncds.write(file_out_path, out_group)
A separate 'nc_dataset' submodule provides an interface for reading and
writing this form to and from NetCDF4.Dataset objects.
The containment of elements within other elements is two-way navigable, so a
reference to any part of a data structure potentially references the entire
object. This enables all elements to provide a "remove" method.
(For this purpose, Attributes are also full independent objects.)
Elements which may be the target of internal naming "references", such as
user-types and dimensions, can be either duplicate object references or
independent objects. Any inconsistent references are automatically reconciled
when writing the dataset to an actual file.
This enables freely moving sections of data between files, with any
referenced elements being re-created as required.
.. note::
Does not yet support extended (user) datatypes.
"""
from abc import ABCMeta, abstractmethod, abstractproperty
import numpy as np
__version__ = '0.4.x'
class NcObj(object):
"""
A generic (abstract) object representing a named element, aka a NetCDF
"component".
"""
__metaclass__ = ABCMeta
@abstractmethod
def detached_copy(self):
"""Return an independent 'unlinked' copy of this element."""
pass
@abstractmethod
def __eq__(self, other):
"""Return whether equal to another."""
pass
def __init__(self, name=None):
"""
Args:
* name (string):
The name of this element (unique within any containing element).
"""
if name is None:
name = ''
self._name = name
# The container this is in -- initially none.
self._container = None
@property
def container(self):
"""The :class:`NcobjContainer` this is in, if any."""
return self._container
def is_definition(self):
"""Return whether this element is a definition within a group."""
return self.container and self.container.is_definitions()
def definitions_group(self):
"""Return the Group in which this element is a definition, or fail."""
if not self.is_definition():
raise ValueError('element {} is not a definition: Its container '
'is {}.'.format(self, self._container))
return self.container.in_element
@property
def name(self):
"""Name of the element."""
return self._name
def rename(self, name):
"""
Rename the element.
Args:
* name (string):
the new name for this element.
.. note::
This affects the container, if it is in one, and can raise an
error if the name already exists in the container.
"""
if self.container:
self.container.rename_element(self, name)
else:
# detached object.
self._name = name
def remove(self):
"""
Remove from the parent container, if any.
"""
if self.container:
self.container.remove(self)
def __ne__(self, other):
return not (self == other)
def _prop_repr(obj, property_name):
"""Make an optional initialisation string for a property."""
result = ''
if hasattr(obj, property_name):
val = getattr(obj, property_name)
if val:
result = ', {}={!r}'.format(property_name, val)
return result
class Dimension(NcObj):
"""A NetCDF dimension object."""
def __init__(self, name, length=None, unlimited=False):
NcObj.__init__(self, name)
#: The length of the dimension.
self.length = length
#: Whether the dimension is unlimited.
self.unlimited = unlimited
def detached_copy(self):
return Dimension(name=self.name, length=self.length,
unlimited=self.unlimited)
def __str__(self):
return '<Dimension "{}" = {}>'.format(self.name, self.length)
def __repr__(self):
return 'Dimension({}, length={}{}{})'.format(
self.name, self.length,
_prop_repr(self, 'container'),
_prop_repr(self, 'unlimited'))
def __eq__(self, other):
return (isinstance(other, Dimension) and
other.name == self.name and
other.length == self.length and
other.unlimited == self.unlimited)
class Attribute(NcObj):
"""A NetCDF attribute object."""
def __init__(self, name, value):
NcObj.__init__(self, name)
#: The value of the attribute.
self.value = value
def detached_copy(self):
return Attribute(name=self.name, value=self.value)
def __eq__(self, other):
# NOTE: attributes do not have a type. Is this correct ???
return (isinstance(other, Attribute) and
other.name == self.name and other.value == self.value)
def __str__(self):
return '<Attribute "{}" = {}>'.format(self.name, self.value)
def __repr__(self):
return 'Attribute({}, value={}{})'.format(
self.name, self.value,
_prop_repr(self, 'container'))
class Variable(NcObj):
"""A NetCDF variable object."""
def __init__(self, name,
dimensions=None, dtype=None, data=None, attributes=None):
NcObj.__init__(self, name)
if dimensions is None:
dimensions = []
elif isinstance(dimensions, Dimension):
dimensions = [dimensions]
#: :class:`Dimension` s of the variable.
self.dimensions = list(dimensions)
#: :class:`Attribute` s of the variable.
self.attributes = NcAttributesContainer(attributes)
if hasattr(dtype, 'detached_copy'):
# Needed for user-types.
dtype = dtype.detached_copy()
self.dtype = dtype
#: Variable data (indexable, with shape). Typically a
#: :class:`NetCDF4.Variable`, or :class:`numpy.ndarray`.
self.data = data
def detached_copy(self):
return Variable(name=self.name, dtype=self.dtype, data=self.data,
dimensions=[dim.detached_copy()
for dim in self.dimensions],
attributes=self.attributes.detached_contents_copy())
def __eq__(self, other):
return (isinstance(other, Variable) and
self.name == other.name and
self.dtype == other.dtype and
np.all(self.data == other.data) and
self.dimensions == other.dimensions and
self.attributes == other.attributes)
def __str__(self):
repstr = '<Variable "{}":'.format(self.name)
repstr += ' dims=({})'.format(
', '.join(d.name for d in self.dimensions))
# repstr += ', data={}'.format(self.data)
if self.attributes:
repstr += ', attrs=({})'.format(
', '.join(str(a) for a in self.attributes))
return repstr + ')'
def __repr__(self):
repstr = 'Variable({}, dtype={!r}'.format(self.name, self.dtype)
if self.dimensions:
repstr += ', dimensions={!r}'.format(self.dimensions)
# repstr += ', data={}'.format(self.data)
repstr += _prop_repr(self, 'attributes')
repstr += _prop_repr(self, 'container')
return repstr + ')'
class NcobjContainer(object):
"""
A generic (abstract) container object for :class:`NcObj` objects
(aka "elements").
"""
__metaclass__ = ABCMeta
@abstractproperty
# N.B. this should really also be *static*, but apparently can't have this
# in Python 2. Ref: http://bugs.python.org/issue5867
def element_type(self):
"""The type (class) of elements this can contain."""
return None
def __init__(self, contents=None, in_element=None):
"""
Args:
* contents (iterable):
A set of elements specifying the initial contents.
* in_element (:class:`NcObj`):
The element that this container exists in (if any).
If this is a group, then the container's elements are definitions
in that group (and self.is_definitions() is True).
Note: the containers mostly emulate a dictionary. A variety of
indexing methods are provided -- __setitem__, __getitem__,
__delitem__, pop, add and remove (the last two take the element not
the name).
Use names() for the names, and iter() or list() for the contents.
Assigning to an existing name is an error, so "self[name].name == name"
is always true. A blank name is also forbidden.
len() is also supported.
TODO: probably more constraints on names for NetCDF validity ??
"""
self._in_element = in_element
self._content = {}
if contents:
self.add_allof(contents)
@property
def in_element(self):
"""The element that this container exists in, if any."""
return self._in_element
def is_definitions(self):
"""
Return whether this contains definitions in a :class:`Group`.
"""
return isinstance(self.in_element, Group)
def _check_element_type(self, element):
if not isinstance(element, self.element_type):
raise TypeError('Element named "{}" is not a {}, so cannot be '
'included in a {} container.'.format(
element.name,
self.element_type.__name__,
self.__class__.__name__))
def _check_element_name(self, name):
if not isinstance(name, basestring) or len(name) == 0:
raise ValueError('invalid element name "{}"'.format(name))
def detached_contents_copy(self):
"""
Return a copy of the container with detached copies of the elements.
"""
elements = [element.detached_copy()
for element in self._content.itervalues()]
return self.__class__(contents=elements)
def names(self):
"""Return a list of names of the contained elements."""
return self._content.keys()
def __getitem__(self, name):
"""Return the named element."""
return self._content[name]
def get(self, name, default=None):
"""Return the named element, if any, or a default value."""
return self._content.get(name, default)
def _setitem_ref_or_copy(self, name, element, detached_copy=False):
# Assign as self[name]=element, taking a copy if specified.
# NOTE: *ALL* element-adding operations must come through here.
self._check_element_type(element)
self._check_element_name(name)
if name in self.names():
raise ValueError('An element named "{}" already exists.'.format(
name))
if detached_copy:
# Make a de-referenced copy of the element to add in.
element = element.detached_copy()
else:
# Adding this actual element. Remove from any existing.
element.remove()
element._name = name
self._content[name] = element
element._container = self
def setitem_reference(self, name, element):
"""
Put an element reference in the container, as _content[name]=value.
This is a lower-level operation than
:meth:`~NcobjContainer.__setitem__`, with important
side-effects on the 'element' arg: Whereas __setitem__ treats the
assigned element simply as a value, of which it makes a detached copy,
this method inserts the actual element specified (first removing it
from any existing parent container).
"""
self._setitem_ref_or_copy(name, element, detached_copy=False)
def __setitem__(self, name, element):
"""
Place an element in the container under a given name.
Note: content is copied from the provided element. To insert an
actual existing NcObj, use :meth:`~NcobjContainer.setitem_reference`.
"""
self._setitem_ref_or_copy(name, element, detached_copy=True)
def pop(self, name, *args):
"""Remove and return the named element, or return default."""
if len(args) > 1:
# behaviour of "pop" is slightly odd : can't use 'default=None'
raise TypeError('pop expected at most 2 arguments, got {}'.format(
1 + len(args)))
if name in self._content:
# Extract and detach.
result = self._content.pop(name)
result._container = None
else:
# Return supplied default, or fail if none given.
if len(args):
result = args[0]
else:
raise KeyError(name)
return result
def __delitem__(self, name):
"""Remove the named element."""
self.pop(name)
def remove(self, element):
"""Remove the matching element."""
if element not in self._content.values():
raise KeyError(element)
return self.pop(element.name)
def add(self, element):
"""Place an element in the container under its existing name."""
self[element.name] = element
def add_allof(self, elements):
"""Add multiple elements."""
for element in elements:
self.add(element)
def remove_allof(self, elements):
"""Remove multiple elements."""
for element in elements:
self.remove(element)
def __iter__(self):
"""Iterate over contents."""
return self._content.itervalues()
def __len__(self):
"""Return length."""
return len(self._content)
def __eq__(self, other):
"""Return whether equal to another."""
return (isinstance(other, NcobjContainer) and
other.element_type == self.element_type and
self._content == other._content)
def __ne__(self, other):
return not (self == other)
def rename_element(self, element, new_name):
"""Change content name (can raise KeyError)."""
element = self.remove(element)
self.setitem_reference(new_name, element)
def __str__(self):
contents = ', '.join('{}'.format(el) for el in self)
return '<NcContainer({}): {}>'.format(
self.element_type.__name__, contents)
class Group(NcObj):
"""A NetCdf Group object."""
def __init__(self, name='',
dimensions=None, variables=None, attributes=None,
sub_groups=None,
parent_group=None):
NcObj.__init__(self, name)
self._parent = parent_group
#: An :class:`NcDimensionsContainer` of dimensions in this Group.
self.dimensions = NcDimensionsContainer(dimensions, in_element=self)
#: An :class:`NcVariablesContainer` of variables in this Group.
self.variables = NcVariablesContainer(variables, in_element=self)
#: An :class:`NcAttributeContainer` of attributes in this Group.
self.attributes = NcAttributesContainer(attributes, in_element=self)
#: An :class:`NcGroupContainer` of subgroups of this Group.
self.groups = NcGroupsContainer(sub_groups, in_element=self)
for group in self.groups:
group._parent = self
@property
def parent_group(self):
"""Return the parent Group of this, if any."""
return self._parent
# NOTE: at present, parent links are correctly established in __init__ and
# detached_copy, but not automatically preserved by add+remove in
# NcGroupsContainer. This probably needs addressing.
def detached_copy(self):
return Group(name=self.name,
dimensions=self.dimensions,
variables=self.variables,
attributes=self.attributes,
sub_groups=self.groups,
parent_group=None)
def __eq__(self, other):
# Don't see a purpose for group equality ?
return (isinstance(other, Group) and
other.name == self.name and
other.dimensions == self.dimensions and
other.variables == self.variables and
other.attributes == self.attributes and
other.groups == self.groups)
def __str__(self, indent=None):
indent = indent or ' '
strmsg = '<Group "{}":'.format(self.name)
strmsg += '\n{}dims=({})'.format(indent, self.dimensions)
strmsg += '\n{}vars=({})'.format(indent, self.variables)
if self.attributes:
strmsg += '\n{}attrs=({})'.format(indent, self.attributes)
if self.groups:
strmsg += ''.join('\n' + group.__str__(indent + ' ')
for group in self.groups)
strmsg += '\n>'
return strmsg
class NcAttributesContainer(NcobjContainer):
"""An :class:`Attribute` container."""
@property
def element_type(self):
return Attribute
class NcDimensionsContainer(NcobjContainer):
"""A :class:`Dimension` container."""
@property
def element_type(self):
return Dimension
class NcVariablesContainer(NcobjContainer):
"""A :class:`Variable` container."""
@property
def element_type(self):
return Variable
# TODO: wrap generic contents handling to allow specifying dims by name
class NcGroupsContainer(NcobjContainer):
"""A :class:`Group` container."""
@property
def element_type(self):
return Group
def _setitem_ref_or_copy(self, name, element, detached_copy=False):
NcobjContainer._setitem_ref_or_copy(self, name, element,
detached_copy=detached_copy)
in_group = self.in_element
if isinstance(in_group, Group):
self[name]._parent = in_group
def pop(self, name, *args):
extract_ok = name in self._content
result = NcobjContainer.pop(self, name, *args)
if extract_ok:
result._parent = None
return result
| pp-mo/ncobj | lib/ncobj/__init__.py | Python | gpl-3.0 | 19,019 |
# BASE CACHE KEYS
IMAGE_PREVIEW_CACHE = "image.preview."
RENDERED_CONTENT_CACHE = "rendered.content." | alirizakeles/tendenci | tendenci/apps/base/cache.py | Python | gpl-3.0 | 101 |
from pprint import pformat
class Model(object):
"""
Implements a generic object.
"""
def __init__(self, data, api):
self.temp_id = ""
self.data = data
self.api = api
def __setitem__(self, key, value):
self.data[key] = value
def __getitem__(self, key):
return self.data[key]
def __repr__(self):
formatted_dict = pformat(dict(self.data))
classname = self.__class__.__name__
return "%s(%s)" % (classname, formatted_dict)
def __contains__(self, value):
return value in self.data
class Collaborator(Model):
"""
Implements a collaborator.
"""
def delete(self, project_id):
"""
Deletes a collaborator from a shared project.
"""
self.api.collaborators.delete(project_id, self["email"])
class CollaboratorState(Model):
"""
Implements a collaborator state.
"""
pass
class Filter(Model):
"""
Implements a filter.
"""
def update(self, **kwargs):
"""
Updates filter.
"""
self.api.filters.update(self["id"], **kwargs)
self.data.update(kwargs)
def delete(self):
"""
Deletes filter.
"""
self.api.filters.delete(self["id"])
self.data["is_deleted"] = 1
class Item(Model):
"""
Implements an item.
"""
def update(self, **kwargs):
"""
Updates item.
"""
self.api.items.update(self["id"], **kwargs)
self.data.update(kwargs)
def delete(self):
"""
Deletes item.
"""
self.api.items.delete(self["id"])
self.data["is_deleted"] = 1
def move(self, **kwargs):
"""
Moves item to another parent, project, or section.
"""
if "parent_id" in kwargs:
self.api.items.move(self["id"], parent_id=kwargs.get("parent_id"))
self.data["parent_id"] = kwargs.get("parent_id")
elif "project_id" in kwargs:
self.api.items.move(self["id"], project_id=kwargs.get("project_id"))
self.data["project_id"] = kwargs.get("project_id")
elif "section_id" in kwargs:
self.api.items.move(self["id"], section_id=kwargs.get("section_id"))
self.data["section_id"] = kwargs.get("section_id")
else:
raise TypeError("move() takes one of parent_id, project_id, or section_id arguments")
def reorder(self, child_order):
"""
Reorder item.
"""
self.api.items.reorder([{"id": self["id"], "child_order": child_order}])
self.data["child_order"] = child_order
def close(self):
"""
Marks item as closed
"""
self.api.items.close(self["id"])
def complete(self, date_completed=None):
"""
Marks item as completed.
"""
self.api.items.complete(self["id"], date_completed=date_completed)
self.data["checked"] = 1
def uncomplete(self):
"""
Marks item as uncompleted.
"""
self.api.items.uncomplete(self["id"])
self.data["checked"] = 0
def archive(self):
"""
Marks item as archived.
"""
self.api.items.archive(self["id"])
self.data["in_history"] = 1
def unarchive(self):
"""
Marks item as unarchived.
"""
self.api.items.unarchive(self["id"])
self.data["in_history"] = 0
def update_date_complete(self, due=None):
"""
Completes a recurring task.
"""
self.api.items.update_date_complete(self["id"], due=due)
if due:
self.data["due"] = due
class Label(Model):
"""
Implements a label.
"""
def update(self, **kwargs):
"""
Updates label.
"""
self.api.labels.update(self["id"], **kwargs)
self.data.update(kwargs)
def delete(self):
"""
Deletes label.
"""
self.api.labels.delete(self["id"])
self.data["is_deleted"] = 1
class LiveNotification(Model):
"""
Implements a live notification.
"""
pass
class GenericNote(Model):
"""
Implements a note.
"""
#: has to be defined in subclasses
local_manager = None
def update(self, **kwargs):
"""
Updates note.
"""
self.local_manager.update(self["id"], **kwargs)
self.data.update(kwargs)
def delete(self):
"""
Deletes note.
"""
self.local_manager.delete(self["id"])
self.data["is_deleted"] = 1
class Note(GenericNote):
"""
Implement an item note.
"""
def __init__(self, data, api):
GenericNote.__init__(self, data, api)
self.local_manager = self.api.notes
class ProjectNote(GenericNote):
"""
Implement a project note.
"""
def __init__(self, data, api):
GenericNote.__init__(self, data, api)
self.local_manager = self.api.project_notes
class Project(Model):
"""
Implements a project.
"""
def update(self, **kwargs):
"""
Updates project.
"""
self.api.projects.update(self["id"], **kwargs)
self.data.update(kwargs)
def delete(self):
"""
Deletes project.
"""
self.api.projects.delete(self["id"])
self.data["is_deleted"] = 1
def archive(self):
"""
Marks project as archived.
"""
self.api.projects.archive(self["id"])
self.data["is_archived"] = 1
def unarchive(self):
"""
Marks project as unarchived.
"""
self.api.projects.unarchive(self["id"])
self.data["is_archived"] = 0
def move(self, parent_id):
"""
Moves project to another parent.
"""
self.api.projects.move(self["id"], parent_id)
def reorder(self, child_order):
"""
Reorder project.
"""
self.api.projects.reorder([{"id": self["id"], "child_order": child_order}])
self.data["child_order"] = child_order
def share(self, email):
"""
Shares projects with a user.
"""
self.api.projects.share(self["id"], email)
def take_ownership(self):
"""
Takes ownership of a shared project.
"""
self.api.projects.take_ownership(self["id"])
class Reminder(Model):
"""
Implements a reminder.
"""
def update(self, **kwargs):
"""
Updates reminder.
"""
self.api.reminders.update(self["id"], **kwargs)
self.data.update(kwargs)
def delete(self):
"""
Deletes reminder.
"""
self.api.reminders.delete(self["id"])
self.data["is_deleted"] = 1
class Section(Model):
"""
Implements a section.
"""
def update(self, **kwargs):
"""
Updates section.
"""
self.api.sections.update(self["id"], **kwargs)
self.data.update(kwargs)
def delete(self):
"""
Deletes section.
"""
self.api.sections.delete(self["id"])
self.data["is_deleted"] = 1
def move(self, project_id):
"""
Moves section to another project.
"""
self.api.sections.move(self["id"], project_id=project_id)
self.data["project_id"] = project_id
def reorder(self, section_order):
"""
Reorder section.
"""
self.api.sections.reorder([{"id": self["id"], "section_order": section_order}])
self.data["section_order"] = section_order
def archive(self, date_archived=None):
"""
Marks section as archived.
"""
self.api.sections.archive(self["id"], date_archived=date_archived)
self.data["is_archived"] = 1
def unarchive(self):
"""
Marks section as unarchived.
"""
self.api.sections.unarchive(self["id"])
self.data["is_archived"] = 0
| Doist/todoist-python | todoist/models.py | Python | mit | 8,035 |
import time
import logging
from collections import Counter
from utils.choice_enum import ChoiceEnum
from alarms.connectors import CdbConnector
logger = logging.getLogger(__name__)
class OperationalMode(ChoiceEnum):
""" Operational Mode of a monitor point value. """
STARTUP = 0
INITIALIZATION = 1
CLOSING = 2
SHUTTEDDOWN = 3
MAINTENANCE = 4
OPERATIONAL = 5
DEGRADED = 6
UNKNOWN = 7
MALFUNCTIONING = 8
@classmethod
def options(cls):
""" Return a list of tuples with the valid options. """
return cls.get_choices()
class Value(ChoiceEnum):
""" Value of the Alarm. """
SET_CRITICAL = 4
SET_HIGH = 3
SET_MEDIUM = 2
SET_LOW = 1
CLEARED = 0
@classmethod
def options(cls):
""" Return a list of tuples with the valid options. """
return cls.get_choices()
@classmethod
def unset_options(cls):
""" Return a list of tuples with the valid options. """
return [0]
class Validity(ChoiceEnum):
""" Possible validity states of an Alarm """
RELIABLE = 1
""" The value has been provided in time and the operator can trust what the IAS shows"""
UNRELIABLE = 0
""" The values has not been produced in time either by the IAS Core or due
to network problems or any other reason."""
@classmethod
def options(cls):
""" Returns a list of tuples with the valid options. """
return cls.get_choices()
class AlarmCountManager:
""" Class to manage the counter by view. """
counter_by_view = {}
def reset_counter_by_view(self):
""" Method to clear the counter by view """
self.counter_by_view = {}
def update_counter_by_view_if_new_alarm_in_collection(self, alarm):
""" Increase counter for a new SET UNACK alarm
Note: This method is used in the AlarmCollection
"""
if alarm.is_stored():
views = alarm.views
current_views = self.counter_by_view.keys()
for view in views:
# initialize count if no key
if view not in current_views:
self.counter_by_view[view] = 0
current_views = self.counter_by_view.keys()
# update count
if alarm.value > 0:
if alarm.ack is not True:
# unacknowledged alarm in set status
self.counter_by_view[view] += 1
def update_counter_by_view_if_alarm_is_acknowledged(self, after_ack_alarm, initial_ack_state):
""" Update counter after acknowledgment action """
alarm = after_ack_alarm
if alarm.is_stored():
views = alarm.views
current_views = self.counter_by_view.keys()
for view in views:
# initialize count if no key
if view not in current_views:
self.counter_by_view[view] = 0
current_views = self.counter_by_view.keys()
if alarm.value > 0:
# set alarm
if initial_ack_state is False:
# from unack state
if alarm.ack is True:
# to ack state
self.counter_by_view[view] -= 1
else:
# cleared alarm
if initial_ack_state is False:
# from unack state
if alarm.ack is True:
# to ack state
self.counter_by_view[view] += 0
def update_counter_by_view_if_alarm_is_unacknowledged(
self, after_ack_alarm, initial_ack_state
):
""" Update counter after unacknowledgment action """
alarm = after_ack_alarm
if alarm.is_stored():
views = alarm.views
current_views = self.counter_by_view.keys()
for view in views:
# initialize count if no key
if view not in current_views:
self.counter_by_view[view] = 0
current_views = self.counter_by_view.keys()
if alarm.value > 0:
# set alarm
if initial_ack_state is True:
# from ack state
if alarm.ack is False:
# to unack state
self.counter_by_view[view] += 1
else:
# cleared alarm
if initial_ack_state is True:
# from ack state
if alarm.ack is False:
# to unack state
self.counter_by_view[view] += 0
def update_counter_by_view_if_alarm_has_value_update(
self, alarm, initial_ack_state, transition
):
""" Update counter after value (set or cleared) update """
if alarm.is_stored():
views = alarm.views
current_views = self.counter_by_view.keys()
for view in views:
# initialize count if no key
if view not in current_views:
self.counter_by_view[view] = 0
current_views = self.counter_by_view.keys()
if transition == 'clear-set':
# set alarm
if initial_ack_state is False:
# from ack state
if alarm.ack is False:
# to unack state
self.counter_by_view[view] += 1
if transition == 'set-clear':
# cleared alarm
if initial_ack_state is False:
# from ack state
if alarm.ack is False:
# to unack state
self.counter_by_view[view] -= 1
class AlarmManager(AlarmCountManager):
""" Set of auxiliary methods for the alarm model. """
class Alarm:
""" Alarm generated by some device in the observatory. """
objects = AlarmManager()
def __init__(self, core_timestamp, core_id, running_id, value=0, mode=0,
validity=0, dependencies=[], properties={}, timestamps={},
ack=False, shelved=False, state_change_timestamp=0,
description='', url='', sound='', can_shelve=False, views=[],
stored=False, value_change_timestamp=0,
value_change_transition=[0, 0]):
""" Constructor of the class,
only executed when there a new instance is created.
Receives and validates values for the attributes of the object """
self.core_timestamp = core_timestamp
""" Core timestamp of the alarm """
self.core_id = core_id
""" Core ID of the alarm """
self.running_id = running_id
""" Running ID of the alarm """
self.value = value
""" Value of the alarm """
self.mode = mode
""" Operational mode of the alarm """
self.validity = validity
""" Validity of the alarm """
self.dependencies = dependencies # optiona
""" Children Alarms, alarms on which this Alarm depends """
self.properties = properties # optiona
""" Properties of the core """
self.timestamps = timestamps # optiona
""" Timestamps of the core """
self.ack = ack
""" True if the alarm is acknowledged, False if not """
self.shelved = shelved
""" True if the alarm is shelved, False if not """
self.state_change_timestamp = state_change_timestamp
""" Timestamp of the last important (notified) change in the alarm """
self.description = description
""" Description of the alarm """
self.url = url
""" URL to go for documentation of the alarm """
self.sound = sound
""" Sound associated to the alarm """
self.can_shelve = can_shelve
""" Flag that defines weteher or not the alarm can be shelved """
self.views = views # optional
"""List of views for which the alarm must be considered for counting"""
self.stored = stored
""" Flag that defines weteher or not the alarm is stored """
self.value_change_timestamp = value_change_timestamp
""" Timestamp of the last change in the alarm value """
self.value_change_transition = value_change_transition
"""
Transition of the last change in the alarm value
Stored as a list with 2 elements in order: [previous_value, new_value]
"""
def __str__(self):
""" Returns a string representation of the object """
return str(self.core_id) + '=' + str(self.value)
def to_dict(self):
""" Returns a dict with all the values of the different attributes """
return {
'value': self.value,
'mode': self.mode,
'validity': self.validity,
'core_timestamp': self.core_timestamp,
'state_change_timestamp': self.state_change_timestamp,
'core_id': self.core_id,
'running_id': self.running_id,
'timestamps': self.timestamps,
'properties': self.properties,
'dependencies': self.dependencies,
'ack': self.ack,
'shelved': self.shelved,
'description': self.description,
'url': self.url,
'sound': self.sound,
'can_shelve': self.can_shelve,
'value_change_timestamp': self.value_change_timestamp,
'value_change_transition': self.value_change_transition,
}
def update(self, alarm):
"""
Updates the alarm with attributes from another given alarm if the
timestamp of the given alarm is greater than the stored alarm.
Args:
alarm (Alarm): The new alarm object
Returns:
(string, string, boolean): A tuple with the state of the update
(not-updated, updated-equal, updated-different), the
transition of the alarm value (clear-set, set-clear or None) and
wether or not the dependencies of the alarm have been updated
"""
initial_ack_state = self.ack # counter by view variable
if alarm.core_timestamp <= self.core_timestamp:
logger.debug(
'alarm %s was not updated (tstamp is older than the last one)',
alarm.core_id)
return ('not-updated', None, False)
# Evaluate alarm state transition between set and unset states:
if self.value == 0 and alarm.value > 0:
transition = 'clear-set'
elif self.value > 0 and alarm.value == 0:
transition = 'set-clear'
else:
transition = None
if self.mode != alarm.mode or \
(self.state_change_timestamp == 0 and alarm.validity == 1):
self.state_change_timestamp = alarm.core_timestamp
if self.value != alarm.value:
self.state_change_timestamp = alarm.core_timestamp
self.value_change_timestamp = alarm.core_timestamp
self.value_change_transition = [self.value, alarm.value]
ignored_fields = ['core_timestamp', 'id', 'timestamps']
unchanged_fields = \
['ack', 'shelved', 'description', 'url', 'sound', 'can_shelve',
'state_change_timestamp', 'views', 'stored',
'value_change_timestamp', 'value_change_transition']
notify = 'updated-equal'
if Counter(self.dependencies) == Counter(alarm.dependencies):
dependencies_changed = True
else:
dependencies_changed = False
for field in alarm.__dict__.keys():
if field in unchanged_fields:
continue
old_value = getattr(self, field)
new_value = getattr(alarm, field)
if (field not in ignored_fields) and old_value != new_value:
notify = 'updated-different'
setattr(self, field, new_value)
# start block - counter by view
self.objects.update_counter_by_view_if_alarm_has_value_update(self, initial_ack_state, transition)
# end block - counter by view
return (notify, transition, dependencies_changed)
def update_validity(self):
"""
Calculate the validity of the alarm considering the current time,
the refresh rate and a previously defined delta time
"""
if self.validity == 0:
return self
validity_threshold = CdbConnector.validity_threshold
current_timestamp = int(round(time.time() * 1000))
if current_timestamp - self.core_timestamp > validity_threshold:
self.validity = 0
return self
else:
return self
def acknowledge(self, ack=True):
"""
Acknowledges the Alarm if its value is SET
Args:
ack (optional boolean): acknowledge status to update,
True by default
Returns:
boolean: the final ack status
"""
initial_ack_state = self.ack # counter variable
self.ack = ack
self.objects.update_counter_by_view_if_alarm_is_acknowledged(self, initial_ack_state)
return self.ack
def unacknowledge(self):
"""
Unacknowledge the Alarm
Returns:
boolean: the final ack status
"""
initial_ack_state = self.ack # counter variable
self.ack = False
self.objects.update_counter_by_view_if_alarm_is_unacknowledged(self, initial_ack_state)
return self.ack
def shelve(self):
"""
Shelves the Alarm
Returns:
int: 1 if it was shelved, 0 if not, -1 if shelving is not allowed
"""
if not self.can_shelve:
return -1
if self.shelved:
return 0
self.shelved = True
return 1
def unshelve(self):
"""
Unshelves the Alarm
Returns:
boolean: True if it was unshelved, False if not
"""
if not self.shelved:
return False
self.shelved = False
return True
def is_set(self):
""" Method to check is the alarm is set """
return True if self.value > 0 else False
def is_not_set(self):
""" Method to check is the alarm is not set """
return True if self.value == 0 else False
def is_stored(self):
""" Method to check is the alarm was stored in the collection """
return self.stored
class IASValue(Alarm):
""" IASValue from some device in the observatory. """
def __init__(self, core_timestamp, core_id, running_id, value, mode=0,
validity=0, timestamps={}, state_change_timestamp=0):
""" Constructor of the class,
only executed when there a new instance is created.
Receives and validates values for the attributes of the object """
Alarm.__init__(
self, core_timestamp, core_id, running_id, mode=mode,
validity=validity, timestamps=timestamps,
state_change_timestamp=state_change_timestamp
)
self.value = self.__check_value(value)
def __check_value(self, value):
""" Validates the IASValue value """
if type(value) is not str:
raise TypeError
else:
return value
def to_dict(self):
""" Returns a dict with all the values of the different attributes """
return {
'value': self.value,
'mode': self.mode,
'validity': self.validity,
'core_timestamp': self.core_timestamp,
'state_change_timestamp': self.state_change_timestamp,
'core_id': self.core_id,
'running_id': self.running_id,
'timestamps': self.timestamps
}
def update(self, ias_value):
"""
Updates the ias_value with attributes from another given ias_value if
the timestamp of the given ias_value is greater than the stored ias value.
Args:
ias_value (dict): The new ias_value object
Returns:
string: the state of the update (not-updated, updated-equal,
updated-different)
"""
if ias_value.core_timestamp <= self.core_timestamp:
logger.debug('value %s was not updated (tstamp is older than the last one)', ias_value.core_id)
return ('not-updated', None, False)
if self.mode != ias_value.mode or self.value != ias_value.value or \
(self.state_change_timestamp == 0 and ias_value.validity == 1):
self.state_change_timestamp = ias_value.core_timestamp
ignored_fields = ['core_timestamp', 'id', 'timestamps', 'properties', 'mode', 'validity']
unchanged_fields = ['ack', 'shelved', 'description', 'url', 'state_change_timestamp']
notify = 'updated-equal'
for field in ias_value.__dict__.keys():
if field in unchanged_fields:
continue
old_value = getattr(self, field)
new_value = getattr(ias_value, field)
if (field not in ignored_fields) and old_value != new_value:
notify = 'updated-different'
setattr(self, field, new_value)
return notify
| IntegratedAlarmSystem-Group/ias-webserver | alarms/models.py | Python | lgpl-3.0 | 17,630 |
# coding=utf-8
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from stevedore import dispatch
from ironic.common import driver_factory
from ironic.common import exception
from ironic.conductor import task_manager
from ironic.drivers import base as drivers_base
from ironic.tests import base
from ironic.tests.unit.db import base as db_base
from ironic.tests.unit.objects import utils as obj_utils
class FakeEp(object):
name = 'fake'
class DriverLoadTestCase(base.TestCase):
def setUp(self):
super(DriverLoadTestCase, self).setUp()
driver_factory.DriverFactory._extension_manager = None
def _fake_init_name_err(self, *args, **kwargs):
kwargs['on_load_failure_callback'](None, FakeEp, NameError('aaa'))
def _fake_init_driver_err(self, *args, **kwargs):
kwargs['on_load_failure_callback'](None, FakeEp,
exception.DriverLoadError(
driver='aaa', reason='bbb'))
def test_driver_load_error_if_driver_enabled(self):
self.config(enabled_drivers=['fake'])
with mock.patch.object(dispatch.NameDispatchExtensionManager,
'__init__', self._fake_init_driver_err):
self.assertRaises(
exception.DriverLoadError,
driver_factory.DriverFactory._init_extension_manager)
def test_wrap_in_driver_load_error_if_driver_enabled(self):
self.config(enabled_drivers=['fake'])
with mock.patch.object(dispatch.NameDispatchExtensionManager,
'__init__', self._fake_init_name_err):
self.assertRaises(
exception.DriverLoadError,
driver_factory.DriverFactory._init_extension_manager)
@mock.patch.object(dispatch.NameDispatchExtensionManager, 'names',
autospec=True)
def test_no_driver_load_error_if_driver_disabled(self, mock_em):
self.config(enabled_drivers=[])
with mock.patch.object(dispatch.NameDispatchExtensionManager,
'__init__', self._fake_init_driver_err):
driver_factory.DriverFactory._init_extension_manager()
self.assertEqual(2, mock_em.call_count)
@mock.patch.object(driver_factory.LOG, 'warning', autospec=True)
def test_driver_duplicated_entry(self, mock_log):
self.config(enabled_drivers=['fake', 'fake'])
driver_factory.DriverFactory._init_extension_manager()
self.assertEqual(
['fake'], driver_factory.DriverFactory._extension_manager.names())
self.assertTrue(mock_log.called)
class GetDriverTestCase(base.TestCase):
def setUp(self):
super(GetDriverTestCase, self).setUp()
driver_factory.DriverFactory._extension_manager = None
self.config(enabled_drivers=['fake'])
def test_get_driver_known(self):
driver = driver_factory.get_driver('fake')
self.assertIsInstance(driver, drivers_base.BaseDriver)
def test_get_driver_unknown(self):
self.assertRaises(exception.DriverNotFound,
driver_factory.get_driver, 'unknown_driver')
class NetworkInterfaceFactoryTestCase(db_base.DbTestCase):
def setUp(self):
super(NetworkInterfaceFactoryTestCase, self).setUp()
driver_factory.DriverFactory._extension_manager = None
driver_factory.NetworkInterfaceFactory._extension_manager = None
self.config(enabled_drivers=['fake'])
def test_build_driver_for_task(self):
# flat and noop network interfaces are enabled in base test case
factory = driver_factory.NetworkInterfaceFactory
node = obj_utils.create_test_node(self.context, driver='fake',
network_interface='flat')
with task_manager.acquire(self.context, node.id) as task:
extension_mgr = factory._extension_manager
self.assertIn('flat', extension_mgr)
self.assertIn('noop', extension_mgr)
self.assertEqual(extension_mgr['flat'].obj, task.driver.network)
self.assertEqual('ironic.hardware.interfaces.network',
factory._entrypoint_name)
self.assertEqual(['flat', 'neutron', 'noop'],
sorted(factory._enabled_driver_list))
def test_build_driver_for_task_default_is_none(self):
# flat and noop network interfaces are enabled in base test case
factory = driver_factory.NetworkInterfaceFactory
self.config(dhcp_provider='none', group='dhcp')
node = obj_utils.create_test_node(self.context, driver='fake')
with task_manager.acquire(self.context, node.id) as task:
extension_mgr = factory._extension_manager
self.assertIn('flat', extension_mgr)
self.assertIn('noop', extension_mgr)
self.assertEqual(extension_mgr['noop'].obj, task.driver.network)
def test_build_driver_for_task_default_network_interface_is_set(self):
# flat and noop network interfaces are enabled in base test case
factory = driver_factory.NetworkInterfaceFactory
self.config(dhcp_provider='none', group='dhcp')
self.config(default_network_interface='flat')
node = obj_utils.create_test_node(self.context, driver='fake')
with task_manager.acquire(self.context, node.id) as task:
extension_mgr = factory._extension_manager
self.assertIn('flat', extension_mgr)
self.assertIn('noop', extension_mgr)
self.assertEqual(extension_mgr['flat'].obj, task.driver.network)
def test_build_driver_for_task_default_is_flat(self):
# flat and noop network interfaces are enabled in base test case
factory = driver_factory.NetworkInterfaceFactory
node = obj_utils.create_test_node(self.context, driver='fake')
with task_manager.acquire(self.context, node.id) as task:
extension_mgr = factory._extension_manager
self.assertIn('flat', extension_mgr)
self.assertIn('noop', extension_mgr)
self.assertEqual(extension_mgr['flat'].obj, task.driver.network)
def test_build_driver_for_task_unknown_network_interface(self):
node = obj_utils.create_test_node(self.context, driver='fake',
network_interface='meow')
self.assertRaises(exception.DriverNotFoundInEntrypoint,
task_manager.acquire, self.context, node.id)
class NewDriverFactory(driver_factory.BaseDriverFactory):
_entrypoint_name = 'woof'
class NewFactoryTestCase(db_base.DbTestCase):
def test_new_driver_factory_unknown_entrypoint(self):
factory = NewDriverFactory()
self.assertEqual('woof', factory._entrypoint_name)
self.assertEqual([], factory._enabled_driver_list)
| bacaldwell/ironic | ironic/tests/unit/common/test_driver_factory.py | Python | apache-2.0 | 7,434 |
# -*- coding: utf8 -*-
"""
eventlogging unit tests
~~~~~~~~~~~~~~~~~~~~~~~
This module contains test fixtures.
"""
from __future__ import unicode_literals
import copy
import io
import signal
import eventlogging
import sqlalchemy
TEST_SCHEMA_SCID = ('TestSchema', 123)
_schemas = {
eventlogging.schema.CAPSULE_SCID: {
'properties': {
'clientIp': {
'type': 'string'
},
'event': {
'type': 'object',
'required': True
},
'isTruncated': {
'type': 'boolean'
},
'clientValidated': {
'type': 'boolean'
},
'wiki': {
'type': 'string',
'required': True
},
'webHost': {
'type': 'string'
},
'revision': {
'type': 'integer',
'required': True
},
'schema': {
'type': 'string',
'required': True
},
'recvFrom': {
'type': 'string',
'required': True
},
'seqId': {
'type': 'integer'
},
'timestamp': {
'type': 'number',
'required': True,
'format': 'utc-millisec'
},
'uuid': {
'type': 'string'
}
},
'additionalProperties': False
},
TEST_SCHEMA_SCID: {
'properties': {
'value': {
'type': 'string',
'required': True
},
'nested': {
'type': 'object',
'properties': {
'deeplyNested': {
'type': 'object',
'properties': {
'pi': {
'type': 'number',
}
}
}
}
}
}
}
}
_event = {
'event': {
'value': '☆ 彡',
'nested': {
'deeplyNested': {
'pi': 3.14159
}
}
},
'seqId': 12345,
'clientIp': '127.0.0.1',
'timestamp': 1358791834912,
'isTruncated': False,
'wiki': 'enwiki',
'webHost': 'en.m.wikipedia.org',
'recvFrom': 'fenari',
'clientValidated': True,
'revision': 123,
'schema': 'TestSchema',
'uuid': 'babb66f34a0a5de3be0c6513088be33e'
}
class HttpRequestAttempted(RuntimeError):
"""Raised on attempt to retrieve a schema via HTTP."""
pass
# We'll be replacing :func:`eventlogging.schemas.http_get_schema` with a
# mock object, so set aside an unpatched copy so we can clean up.
orig_http_get_schema = eventlogging.schema.http_get_schema
def mock_http_get_schema(scid):
"""Mock of :func:`eventlogging.schemas.http_get_schema`
Used to detect when :func:`eventlogging.schemas.get_schema`
delegates to HTTP retrieval.
"""
raise HttpRequestAttempted('Attempted HTTP fetch: %s' % (scid,))
class SchemaTestMixin(object):
"""A :class:`unittest.TestCase` mix-in for test cases that depend on
schema look-ups."""
def setUp(self):
"""Stub `http_get_schema` and pre-fill schema cache."""
super(SchemaTestMixin, self).setUp()
self.event = copy.deepcopy(_event)
eventlogging.schema.schema_cache = copy.deepcopy(_schemas)
eventlogging.schema.http_get_schema = mock_http_get_schema
def tearDown(self):
"""Clear schema cache and restore stubbed `http_get_schema`."""
eventlogging.schema.schema_cache.clear()
eventlogging.schema.http_get_schema = orig_http_get_schema
def assertIsValid(self, event, msg=None):
"""Assert that capsule 'event' object validates."""
return self.assertIsNone(eventlogging.validate(event), msg)
def assertIsInvalid(self, event, msg=None):
"""Assert that capsule 'event' object fails validation."""
with self.assertRaises(eventlogging.ValidationError, msg):
eventlogging.validate(event)
class DatabaseTestMixin(SchemaTestMixin):
"""A :class:`unittest.TestCase` mix-in for database testing using an
in-memory sqlite database."""
def setUp(self):
"""Configure :class:`sqlalchemy.engine.Engine` and
:class:`sqlalchemy.schema.MetaData` objects."""
super(DatabaseTestMixin, self).setUp()
self.engine = sqlalchemy.create_engine('sqlite:///:memory:', echo=True)
self.meta = sqlalchemy.MetaData(bind=self.engine)
class HttpSchemaTestMixin(object):
"""A :class:`unittest.TestCase` mix-in for stubbing HTTP responses."""
http_resp = b''
def setUp(self):
"""Replace `urlopen` with stub."""
super(HttpSchemaTestMixin, self).setUp()
self.orig_urlopen = eventlogging.schema.urlopen
eventlogging.schema.urlopen = self.urlopen_stub
eventlogging.schema.schema_cache.clear()
def tearDown(self):
"""Restore original `urlopen`."""
eventlogging.schema.urlopen = self.orig_urlopen
def urlopen_stub(self, url):
"""Test stub for `urlopen`."""
return io.BytesIO(self.http_resp)
class TimeoutTestMixin(object):
"""A :class:`unittest.TestCase` mix-in that imposes a time-limit on
tests. Tests exceeding the limit are failed."""
#: Max time (in seconds) to allow tests to run before failing.
max_time = 2
def setUp(self):
"""Set the alarm."""
super(TimeoutTestMixin, self).setUp()
signal.signal(signal.SIGALRM, self.timeOut)
signal.alarm(self.max_time)
def tearDown(self):
"""Disable the alarm."""
signal.alarm(0)
def timeOut(self, signum, frame):
"""SIGALRM handler. Fails test if triggered."""
self.fail('Timed out.')
| legoktm/wikihow-src | extensions/EventLogging/server/tests/fixtures.py | Python | gpl-2.0 | 5,994 |
from distutils.core import setup
setup(
name='mcloud_iside',
version='0.1dev',
url='https://github.com/MutakamwoyoCloud/MCloud',
description='part of the MCloud service',
packages=['mcloud_iside',],
license='GNU General Public License v3 or later (GPLv3+)',
long_description=open('README.md').read(),
)
| MutakamwoyoCloud/MCloud | inet_side/setup.py | Python | agpl-3.0 | 332 |
import nose
from nose.tools import *
from unittest import TestCase
from datetime import datetime, timedelta
from repo.date_iterator import DateIterator
class DateIteratorTests(TestCase):
def test_date_iterator_returns_self_on_iter(self):
d = DateIterator(datetime.now(), datetime.now())
eq_(d, d.__iter__())
def test_date_iterator_gives_first_date_as_start_date(self):
start = datetime(2011, 3, 3)
end = datetime(2011, 3, 4)
d = DateIterator(start, end)
first = d.next()
eq_(start, first)
def test_date_iterator_gives_next_date_30_days_by_default(self):
start = datetime(2011, 3, 3)
next = datetime(2011, 4, 2)
end = datetime(2011, 4, 3)
d = DateIterator(start, end)
first = d.next()
second = d.next()
eq_(next, second)
def test_date_iterator_gives_next_date_7_days(self):
start = datetime(2011, 3, 3)
next = datetime(2011, 3, 10)
end = datetime(2011, 3, 14)
d = DateIterator(start, end, delta=timedelta(days=7))
first = d.next()
second = d.next()
eq_(next, second)
@raises(StopIteration)
def test_date_iterator_raises_stop_exception(self):
start = datetime(2011, 3, 3)
end = datetime(2011, 4, 1)
d = DateIterator(start, end)
first = d.next()
second = d.next()
| markdrago/caboose | src/test/repo/date_iterator_tests.py | Python | mit | 1,409 |
# Copyright (c) 2015, Laurent Duchesne <l@urent.org>
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
import time
import string
import random
from keystoneauth1.session import Session
from keystoneclient.exceptions import AuthorizationFailure
def random_str_generator(size=6, chars=string.ascii_uppercase + string.digits):
"""Random string generator.
Written by Ignacio Vazquez-Abrams.
Source: http://stackoverflow.com/a/2257449/4871858
"""
return ''.join(random.choice(chars) for _ in range(size))
class HubiCAuthenticator:
"""Provide a HubiC authentication interface compatible with the OpenStack SDK.
This is necessary as HubiC doesn't provide OpenStack-compatible identity
services, but still provide a compatible object store (v1).
References:
- https://hubic.com/en/
- https://api.hubic.com/
:param string client_id: The HubiC client identifier.
:param string client_secret: The HubiC client secret.
:param string email: The account email address.
:param string password: The account password.
:param string redirect_uri: The registered redirect URI (optional).
"""
def __init__(self, client_id, client_secret, email, password,
redirect_uri="http://localhost/", **kwargs):
self.email = email
self.password = password
self.client_id = client_id
self.client_secret = client_secret
self.redirect_uri = redirect_uri
self.auth_token = None
self.endpoint = None
self.access_token = None
self.refresh_token = None
def get_headers(self, session, **kwargs):
"""Get the authentication header.
If the current session has not been authenticated, this will trigger a
new authentication to the HubiC OAuth service.
:param keystoneclient.Session session: The session object to use for
queries.
:raises keystoneclient.exceptions.AuthorizationFailure: if something
goes wrong.
:returns: The headers used for authenticating requests.
:rtype: dict
"""
if self.auth_token is None:
try:
self._refresh_tokens(session)
self._fetch_credentials(session)
except:
raise AuthorizationFailure()
return {
'X-Auth-Token': self.auth_token,
}
def get_endpoint(self, session, **kwargs):
"""Get the HubiC storage endpoint uri.
If the current session has not been authenticated, this will trigger a
new authentication to the HubiC OAuth service.
:param keystoneclient.Session session: The session object to use for
queries.
:raises keystoneclient.exceptions.AuthorizationFailure: if something
goes wrong.
:returns: The uri to use for object-storage v1 requests.
:rtype: string
"""
if self.endpoint is None:
try:
self._refresh_tokens(session)
self._fetch_credentials(session)
except:
raise AuthorizationFailure()
return self.endpoint
def get_connection_params(self, session, **kwargs):
"""Connection parameters used for all requests.
:returns: An empty dictionary.
:rtype: dict
"""
return {}
def invalidate(self):
"""Invalidate the current authenticator.
Once this has been called, any call to get_endpoint or get_headers will
trigger a new authentication to the HubiC OAuth service.
"""
self.endpoint = None
self.auth_token = None
def _refresh_tokens(self, session):
"""Request an access and a refresh token from the HubiC API.
Those tokens are mandatory and will be used for subsequent file
operations. They are not returned and will be stored internaly.
:param keystoneclient.Session session: The session object to use for
queries.
:raises keystoneclient.exceptions.AuthorizationFailure: if something
goes wrong.
"""
params = {
'client_id': self.client_id,
'client_secret': self.client_secret,
}
payload = {
'client_id': self.client_id,
'client_secret': self.client_secret,
}
if self.refresh_token is None:
# if we don't have a refresh token, we need an authorization token
# first
payload['grant_type'] = 'authorization_code'
payload['code'] = self._get_authorization_token(session)
payload['redirect_uri'] = self.redirect_uri
else:
# when we have a refresh token, we DON'T need an authorization
# token to request a new one
payload['grant_type'] = 'refresh_token'
payload['refresh_token'] = self.refresh_token
r = session.post("https://api.hubic.com/oauth/token",
params=params,
data=payload,
authenticated=False)
if r.status_code != 200 and self.refresh_token is not None:
# if we had a refresh token, try again without it
# (might be expired)
payload['grant_type'] = 'authorization_code'
payload['code'] = self._get_authorization_token(session)
payload['redirect_uri'] = self.redirect_uri
r = session.post("https://api.hubic.com/oauth/token",
params=params,
data=payload,
authenticated=False)
if r.status_code != 200:
raise AuthorizationFailure()
response = r.json()
if 'error' in response:
raise AuthorizationFailure()
self.access_token = response['access_token']
# refresh_token entry will not be there is we are just refreshing an
# old token.
if 'refresh_token' in response:
self.refresh_token = response['refresh_token']
def _fetch_credentials(self, session):
"""Fetch the endpoint URI and authorization token for this session.
Those two information are the basis for all future calls to the Swift
(OpenStack) API for the storage container.
:param keystoneclient.Session session: The session object to use for
queries.
:raises keystoneclient.exceptions.AuthorizationFailure: if something
goes wrong.
"""
headers = {
'Authorization': 'Bearer {0}'.format(self.access_token),
}
r = session.get("https://api.hubic.com/1.0/account/credentials",
headers=headers,
authenticated=False)
response = r.json()
# if we get an error here, the OpenStack SDK will take care to try
# again for us.
if 'error' in response:
raise AuthorizationFailure()
self.endpoint = response['endpoint']
self.auth_token = response['token']
def _get_authorization_token(self, session):
"""Load the HubiC form, submit it and return an authorization token.
This will load the HTML form to accept if the application can access
the user account and submit the form using the user's credentials.
:raises keystoneclient.exceptions.AuthorizationFailure: if something
goes wrong.
:returns: The (short lived) authorization code to use to get the
refresh token.
:rtype: string
"""
request_scope = 'account.r,credentials.r'
params = {
'client_id': self.client_id,
'redirect_uri': self.redirect_uri,
'response_type': 'code',
'scope': request_scope,
'state': random_str_generator(),
}
r = session.get("https://api.hubic.com/oauth/auth",
params=params,
authenticated=False)
if r.status_code != 200:
raise AuthorizationFailure()
oauth_match = re.search(r'name="oauth" value="([0-9]+)"', r.text)
if oauth_match is None:
raise AuthorizationFailure()
oauth_value = oauth_match.group(1)
if oauth_value is None:
AuthorizationFailure()
payload = {
'oauth': oauth_value,
'action': 'accepted',
'account': 'r',
'credentials': 'r',
'login': self.email,
'user_pwd': self.password,
}
# this is necessary because the API will return a 509 error
# (bandwidth exceeded) if we don't wait a little
time.sleep(2)
headers = {
'Referer': r.url,
'Content-Type': 'application/x-www-form-urlencoded',
}
r = session.post("https://api.hubic.com/oauth/auth",
headers=headers,
data=payload,
redirect=False,
authenticated=False)
if r.status_code != 302:
raise AuthorizationFailure()
# location looks like this, and we need the code:
# http://localhost/?code=...&scope=account.r&state=randomstring
location_info = dict(
map(lambda item: item.split('='),
r.headers['location'].split('?')[1].split('&')
)
)
assert (
'code' in location_info and
'scope' in location_info and location_info['scope'] == request_scope and
'state' in location_info and location_info['state'] == params['state']
)
return location_info['code']
if __name__ == "__main__":
configuration = {
'client_id': '',
'client_secret': '',
'email': '',
'password': '',
}
authenticator = HubiCAuthenticator(**configuration)
from openstack import connection
conn = connection.Connection(
session=Session(auth=authenticator),
authenticator=authenticator,
)
# just list the containers to see if this works
print(list(conn.object_store.containers()))
| lduchesne/python-openstacksdk-hubic | hubic/hubic.py | Python | apache-2.0 | 11,237 |
#!/usr/bin/python3
#
# The Qubes OS Project, http://www.qubes-os.org
#
# Copyright (C) 2021 Norbert Kamiński <norbert.kaminski@3mdeb.com>
#
# SPDX-License-Identifier: LGPL-2.1+
#
import sys
import subprocess
import os
from fwupd_common_vm import FwupdVmCommon
FWUPD_VM_DIR = "/home/user/.cache/fwupd"
FWUPD_VM_UPDATES_DIR = os.path.join(FWUPD_VM_DIR, "updates")
FWUPD_VM_METADATA_DIR = os.path.join(FWUPD_VM_DIR, "metadata")
FWUPD_DOWNLOAD_PREFIX = "https://fwupd.org/downloads/"
METADATA_URL = "https://fwupd.org/downloads/firmware.xml.gz"
METADATA_URL_JCAT = "https://fwupd.org/downloads/firmware.xml.gz.jcat"
class DownloadData(FwupdVmCommon):
def _decrypt_update_url(self, url):
self.dec_url = url
if "--and--" in url:
self.dec_url = self.dec_url.replace("--and--", "&")
self.arch_name = "untrusted.cab"
if "--or--" in url:
self.dec_url = self.dec_url.replace("--or--", "|")
self.arch_name = "untrusted.cab"
if "--hash--" in url:
self.dec_url = self.dec_url.replace("--hash--", "#")
self.arch_name = "untrusted.cab"
if "%20" in url:
self.arch_name = "untrusted.cab"
def _download_metadata_file(self):
"""Download metadata file"""
if self.custom_url is None:
metadata_url = METADATA_URL
else:
metadata_url = self.custom_url
cmd_metadata = ["wget", "-P", FWUPD_VM_METADATA_DIR, metadata_url]
p = subprocess.Popen(cmd_metadata)
p.wait()
if p.returncode != 0:
raise Exception("fwudp-qubes: Downloading metadata file failed")
if not os.path.exists(self.metadata_file):
raise FileNotFoundError(
"fwudp-qubes: Downloaded metadata file does not exist"
)
def _download_metadata_jcat(self):
"""Download metadata jcat signature"""
if self.custom_url is None:
metadata_url = METADATA_URL
else:
metadata_url = self.custom_url
cmd_metadata = ["wget", "-P", FWUPD_VM_METADATA_DIR, f"{metadata_url}.jcat"]
p = subprocess.Popen(cmd_metadata)
p.wait()
if p.returncode != 0:
raise Exception("fwudp-qubes: Downloading metadata file failed")
if not os.path.exists(f"{self.metadata_file}.jcat"):
raise FileNotFoundError(
"fwudp-qubes: Downloaded metadata file does not exist"
)
def download_metadata(self, url=None):
"""Downloads default metadata and its signatures"""
if url is not None:
self.custom_url = url
custom_metadata_name = url.replace(FWUPD_DOWNLOAD_PREFIX, "")
self.metadata_file = os.path.join(
FWUPD_VM_METADATA_DIR, custom_metadata_name
)
else:
self.custom_url = None
self.metadata_file = os.path.join(FWUPD_VM_METADATA_DIR, "firmware.xml.gz")
self.validate_vm_dirs()
self._download_metadata_file()
self._download_metadata_jcat()
def download_updates(self, url, sha):
"""
Downloads update form given url
Keyword argument:
url - url address of the update
"""
self.validate_vm_dirs()
self.arch_name = url.replace("https://fwupd.org/downloads/", "")
self._decrypt_update_url(url)
update_path = os.path.join(FWUPD_VM_UPDATES_DIR, self.arch_name)
cmd_update = ["wget", "-O", update_path, self.dec_url]
p = subprocess.Popen(cmd_update)
p.wait()
if p.returncode != 0:
raise Exception("fwudp-qubes: Downloading update file failed")
if not os.path.exists(update_path):
raise FileNotFoundError(
"fwudp-qubes: Downloaded update file does not exist"
)
self.check_shasum(update_path, sha)
print("Update file downloaded successfully")
def main():
url = None
sha = None
dn = DownloadData()
for arg in sys.argv:
if "--url=" in arg:
url = arg.replace("--url=", "")
if "--sha=" in arg:
sha = arg.replace("--sha=", "")
if "--metadata" in sys.argv:
dn.download_metadata(url=url)
elif url and sha:
dn.download_updates(url, sha)
else:
raise Exception("Invalid command!!!")
if __name__ == "__main__":
main()
| hughsie/fwupd | contrib/qubes/src/vms/fwupd_download_updates.py | Python | lgpl-2.1 | 4,434 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# init.py file is part of slpkg.
# Copyright 2014-2017 Dimitris Zlatanidis <d.zlatanidis@gmail.com>
# All rights reserved.
# Slpkg is a user-friendly package manager for Slackware installations
# https://github.com/dslackw/slpkg
# Slpkg is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import shutil
from slpkg.repositories import Repo
from slpkg.file_size import FileSize
from slpkg.downloader import Download
from slpkg.__metadata__ import MetaData as _meta_
from slpkg.slack.mirrors import mirrors
from slpkg.slack.slack_version import slack_ver
class Initialization(object):
"""Slpkg initialization start all from here. Create local
package lists and update or upgrade these.
"""
def __init__(self, check):
self.check = check
self.meta = _meta_
self.def_repos_dict = Repo().default_repository()
self.conf_path = self.meta.conf_path
self.log_path = self.meta.log_path
self.lib_path = self.meta.lib_path
self.tmp_path = self.meta.tmp_path
self.build_path = self.meta.build_path
self._SOURCES = self.meta.SBo_SOURCES
self.slpkg_tmp_packages = self.meta.slpkg_tmp_packages
self.slpkg_tmp_patches = self.meta.slpkg_tmp_patches
if not os.path.exists(self.conf_path):
os.mkdir(self.conf_path)
if not os.path.exists(self.log_path):
os.mkdir(self.log_path)
if not os.path.exists(self.lib_path):
os.mkdir(self.lib_path)
if not os.path.exists(self.tmp_path):
os.mkdir(self.tmp_path)
if not os.path.exists(self.build_path):
os.makedirs(self.build_path)
if not os.path.exists(self._SOURCES):
os.makedirs(self._SOURCES)
if not os.path.exists(self.slpkg_tmp_packages):
os.makedirs(self.slpkg_tmp_packages)
if not os.path.exists(self.slpkg_tmp_patches):
os.makedirs(self.slpkg_tmp_patches)
def custom(self, name):
"""Creating user select repository local library
"""
repo = Repo().custom_repository()[name]
log = self.log_path + name + "/"
lib = self.lib_path + "{0}_repo/".format(name)
repo_name = log[:-1].split("/")[-1]
lib_file = "PACKAGES.TXT"
# lst_file = ""
md5_file = "CHECKSUMS.md5"
log_file = "ChangeLog.txt"
if not os.path.exists(log):
os.mkdir(log)
if not os.path.exists(lib):
os.mkdir(lib)
PACKAGES_TXT = "{0}{1}".format(repo, lib_file)
FILELIST_TXT = ""
CHECKSUMS_MD5 = "{0}{1}".format(repo, md5_file)
ChangeLog_txt = "{0}{1}".format(repo, log_file)
if self.check:
return self.checks_logs(log, ChangeLog_txt)
self.down(lib, PACKAGES_TXT, repo_name)
self.down(lib, CHECKSUMS_MD5, repo_name)
self.down(log, ChangeLog_txt, repo_name)
self.remote(log, ChangeLog_txt, lib, PACKAGES_TXT, CHECKSUMS_MD5,
FILELIST_TXT, repo_name)
def slack(self):
"""Creating slack local libraries
"""
log = self.log_path + "slack/"
lib = self.lib_path + "slack_repo/"
repo_name = log[:-1].split("/")[-1]
lib_file = "PACKAGES.TXT"
# lst_file = ""
md5_file = "CHECKSUMS.md5"
log_file = "ChangeLog.txt"
if not os.path.exists(log):
os.mkdir(log)
if not os.path.exists(lib):
os.mkdir(lib)
dirs = ["core/", "extra/", "pasture/"]
for d in dirs:
if not os.path.exists(lib + d):
os.mkdir(lib + d)
PACKAGES_TXT = mirrors(lib_file, "")
FILELIST_TXT = ""
CHECKSUMS_MD5 = mirrors(md5_file, "")
self.EXTRA = mirrors(lib_file, dirs[1])
self.EXT_CHECKSUMS = mirrors(md5_file, dirs[1])
self.PASTURE = mirrors(lib_file, dirs[2])
self.PAS_CHECKSUMS = mirrors(md5_file, dirs[2])
ChangeLog_txt = mirrors(log_file, "")
if self.check:
return self.checks_logs(log, ChangeLog_txt)
self.down(lib + dirs[0], PACKAGES_TXT, repo_name)
self.down(lib + dirs[0], CHECKSUMS_MD5, repo_name)
self.down(lib + dirs[1], self.EXTRA, repo_name)
self.down(lib + dirs[1], self.EXT_CHECKSUMS, repo_name)
if slack_ver() != "14.0": # no pasture/ folder for 14.0 version
self.down(lib + dirs[2], self.PASTURE, repo_name)
self.down(lib + dirs[2], self.PAS_CHECKSUMS, repo_name)
self.down(log, ChangeLog_txt, repo_name)
self.remote(log, ChangeLog_txt, lib, PACKAGES_TXT, CHECKSUMS_MD5,
FILELIST_TXT, repo_name)
self.merge(lib, "PACKAGES.TXT", ["core/PACKAGES.TXT",
"extra/PACKAGES.TXT",
"pasture/PACKAGES.TXT"])
self.merge(lib, "CHECKSUMS.md5", ["core/CHECKSUMS.md5",
"extra/CHECKSUMS.md5",
"pasture/CHECKSUMS.md5"])
def sbo(self):
"""Creating sbo local library
"""
repo = self.def_repos_dict["sbo"]
log = self.log_path + "sbo/"
lib = self.lib_path + "sbo_repo/"
repo_name = log[:-1].split("/")[-1]
lib_file = "SLACKBUILDS.TXT"
# lst_file = ""
# md5_file = ""
log_file = "ChangeLog.txt"
if not os.path.exists(log):
os.mkdir(log)
if not os.path.exists(lib):
os.mkdir(lib)
SLACKBUILDS_TXT = "{0}{1}/{2}".format(repo, slack_ver(), lib_file)
FILELIST_TXT = ""
CHECKSUMS_MD5 = ""
ChangeLog_txt = "{0}{1}/{2}".format(repo, slack_ver(), log_file)
if self.check:
return self.checks_logs(log, ChangeLog_txt)
self.down(lib, SLACKBUILDS_TXT, repo_name)
self.down(log, ChangeLog_txt, repo_name)
self.remote(log, ChangeLog_txt, lib, SLACKBUILDS_TXT, CHECKSUMS_MD5,
FILELIST_TXT, repo_name)
def rlw(self):
"""Creating rlw local library
"""
repo = self.def_repos_dict["rlw"]
log = self.log_path + "rlw/"
lib = self.lib_path + "rlw_repo/"
repo_name = log[:-1].split("/")[-1]
lib_file = "PACKAGES.TXT"
# lst_file = ""
md5_file = "CHECKSUMS.md5"
log_file = "ChangeLog.txt"
if not os.path.exists(log):
os.mkdir(log)
if not os.path.exists(lib):
os.mkdir(lib)
PACKAGES_TXT = "{0}{1}/{2}".format(repo, slack_ver(), lib_file)
FILELIST_TXT = ""
CHECKSUMS_MD5 = "{0}{1}/{2}".format(repo, slack_ver(), md5_file)
ChangeLog_txt = "{0}{1}/{2}".format(repo, slack_ver(), log_file)
if self.check:
return self.checks_logs(log, ChangeLog_txt)
self.down(lib, PACKAGES_TXT, repo_name)
self.down(lib, CHECKSUMS_MD5, repo_name)
self.down(log, ChangeLog_txt, repo_name)
self.remote(log, ChangeLog_txt, lib, PACKAGES_TXT, CHECKSUMS_MD5,
FILELIST_TXT, repo_name)
def alien(self):
"""Creating alien local library
"""
ar = "x86"
ver = slack_ver()
arch = self.meta.arch
repo = self.def_repos_dict["alien"]
log = self.log_path + "alien/"
lib = self.lib_path + "alien_repo/"
repo_name = log[:-1].split("/")[-1]
lib_file = "PACKAGES.TXT"
# lst_file = ""
md5_file = "CHECKSUMS.md5"
log_file = "ChangeLog.txt"
if not os.path.exists(log):
os.mkdir(log)
if not os.path.exists(lib):
os.mkdir(lib)
if arch == "x86_64":
ar = arch
if self.meta.slack_rel == "current":
ver = self.meta.slack_rel
PACKAGES_TXT = "{0}/{1}/{2}/{3}".format(repo, ver, ar, lib_file)
FILELIST_TXT = ""
CHECKSUMS_MD5 = "{0}/{1}/{2}/{3}".format(repo, ver, ar, md5_file)
ChangeLog_txt = "{0}{1}".format(repo, log_file)
if self.check:
return self.checks_logs(log, ChangeLog_txt)
self.down(lib, PACKAGES_TXT, repo_name)
self.down(lib, CHECKSUMS_MD5, repo_name)
self.down(log, ChangeLog_txt, repo_name)
self.remote(log, ChangeLog_txt, lib, PACKAGES_TXT, CHECKSUMS_MD5,
FILELIST_TXT, repo_name)
def slacky(self):
"""Creating slacky.eu local library
"""
ar = ""
arch = self.meta.arch
repo = self.def_repos_dict["slacky"]
log = self.log_path + "slacky/"
lib = self.lib_path + "slacky_repo/"
repo_name = log[:-1].split("/")[-1]
lib_file = "PACKAGES.TXT"
# lst_file = ""
md5_file = "CHECKSUMS.md5"
log_file = "ChangeLog.txt"
if not os.path.exists(log):
os.mkdir(log)
if not os.path.exists(lib):
os.mkdir(lib)
if arch == "x86_64":
ar = "64"
PACKAGES_TXT = "{0}slackware{1}-{2}/{3}".format(repo, ar, slack_ver(),
lib_file)
FILELIST_TXT = ""
CHECKSUMS_MD5 = "{0}slackware{1}-{2}/{3}".format(repo, ar, slack_ver(),
md5_file)
ChangeLog_txt = "{0}slackware{1}-{2}/{3}".format(repo, ar, slack_ver(),
log_file)
if self.check:
return self.checks_logs(log, ChangeLog_txt)
self.down(lib, PACKAGES_TXT, repo_name)
self.down(lib, CHECKSUMS_MD5, repo_name)
self.down(log, ChangeLog_txt, repo_name)
self.remote(log, ChangeLog_txt, lib, PACKAGES_TXT, CHECKSUMS_MD5,
FILELIST_TXT, repo_name)
def conrad(self):
"""Creating slackers local library
"""
repo = self.def_repos_dict["conrad"]
log = self.log_path + "conrad/"
lib = self.lib_path + "conrad_repo/"
repo_name = log[:-1].split("/")[-1]
lib_file = "PACKAGES.TXT"
# lst_file = ""
md5_file = "CHECKSUMS.md5"
log_file = "ChangeLog.txt"
if not os.path.exists(log):
os.mkdir(log)
if not os.path.exists(lib):
os.mkdir(lib)
PACKAGES_TXT = "{0}{1}".format(repo, lib_file)
FILELIST_TXT = ""
CHECKSUMS_MD5 = "{0}{1}".format(repo, md5_file)
ChangeLog_txt = "{0}{1}".format(repo, log_file)
if self.check:
return self.checks_logs(log, ChangeLog_txt)
self.down(lib, PACKAGES_TXT, repo_name)
self.down(lib, CHECKSUMS_MD5, repo_name)
self.down(log, ChangeLog_txt, repo_name)
self.remote(log, ChangeLog_txt, lib, PACKAGES_TXT, CHECKSUMS_MD5,
FILELIST_TXT, repo_name)
def slonly(self):
"""Creating slackers local library
"""
ver = slack_ver()
ar = "{0}-x86".format(ver)
arch = self.meta.arch
repo = self.def_repos_dict["slonly"]
log = self.log_path + "slonly/"
lib = self.lib_path + "slonly_repo/"
repo_name = log[:-1].split("/")[-1]
lib_file = "PACKAGES.TXT"
# lst_file = ""
md5_file = "CHECKSUMS.md5"
log_file = "ChangeLog.txt"
if not os.path.exists(log):
os.mkdir(log)
if not os.path.exists(lib):
os.mkdir(lib)
if arch == "x86_64":
ar = "{0}-x86_64".format(ver)
if self.meta.slack_rel == "current":
ar = "{0}-x86".format(self.meta.slack_rel)
if self.meta.slack_rel == "current" and arch == "x86_64":
ar = "{0}-x86_64".format(self.meta.slack_rel)
PACKAGES_TXT = "{0}{1}/{2}".format(repo, ar, lib_file)
FILELIST_TXT = ""
CHECKSUMS_MD5 = "{0}{1}/{2}".format(repo, ar, md5_file)
ChangeLog_txt = "{0}{1}/{2}".format(repo, ar, log_file)
if self.check:
return self.checks_logs(log, ChangeLog_txt)
self.down(lib, PACKAGES_TXT, repo_name)
self.down(lib, CHECKSUMS_MD5, repo_name)
self.down(log, ChangeLog_txt, repo_name)
self.remote(log, ChangeLog_txt, lib, PACKAGES_TXT, CHECKSUMS_MD5,
FILELIST_TXT, repo_name)
def ktown(self):
"""Creating alien ktown local library
"""
repo = self.def_repos_dict["ktown"]
log = self.log_path + "ktown/"
lib = self.lib_path + "ktown_repo/"
repo_name = log[:-1].split("/")[-1]
lib_file = "PACKAGES.TXT"
# lst_file = ""
md5_file = "CHECKSUMS.md5"
log_file = "ChangeLog.txt"
if not os.path.exists(log):
os.mkdir(log)
if not os.path.exists(lib):
os.mkdir(lib)
PACKAGES_TXT = "{0}{1}".format(repo, lib_file)
FILELIST_TXT = ""
CHECKSUMS_MD5 = "{0}{1}".format(repo, md5_file)
ChangeLog_txt = "{0}{1}".format(repo, log_file)
if self.check:
return self.checks_logs(log, ChangeLog_txt)
self.down(lib, PACKAGES_TXT, repo_name)
self.down(lib, CHECKSUMS_MD5, repo_name)
self.down(log, ChangeLog_txt, repo_name)
self.remote(log, ChangeLog_txt, lib, PACKAGES_TXT, CHECKSUMS_MD5,
FILELIST_TXT, repo_name)
def multi(self):
"""Creating alien multilib local library
"""
ver = slack_ver()
repo = self.def_repos_dict["multi"]
log = self.log_path + "multi/"
lib = self.lib_path + "multi_repo/"
repo_name = log[:-1].split("/")[-1]
lib_file = "PACKAGES.TXT"
# lst_file = ""
md5_file = "CHECKSUMS.md5"
log_file = "ChangeLog.txt"
if not os.path.exists(log):
os.mkdir(log)
if not os.path.exists(lib):
os.mkdir(lib)
if self.meta.slack_rel == "current":
ver = self.meta.slack_rel
PACKAGES_TXT = "{0}{1}/{2}".format(repo, ver, lib_file)
FILELIST_TXT = ""
CHECKSUMS_MD5 = "{0}{1}/{2}".format(repo, ver, md5_file)
ChangeLog_txt = "{0}{1}".format(repo, log_file)
if self.check:
return self.checks_logs(log, ChangeLog_txt)
self.down(lib, PACKAGES_TXT, repo_name)
self.down(lib, CHECKSUMS_MD5, repo_name)
self.down(log, ChangeLog_txt, repo_name)
self.remote(log, ChangeLog_txt, lib, PACKAGES_TXT, CHECKSUMS_MD5,
FILELIST_TXT, repo_name)
def slacke(self):
"""Creating Slacke local library
"""
ar = ""
arch = self.meta.arch
repo = self.def_repos_dict["slacke"]
log = self.log_path + "slacke/"
lib = self.lib_path + "slacke_repo/"
repo_name = log[:-1].split("/")[-1]
lib_file = "PACKAGES.TXT"
# lst_file = ""
md5_file = "CHECKSUMS.md5"
log_file = "ChangeLog.txt"
if not os.path.exists(log):
os.mkdir(log)
if not os.path.exists(lib):
os.mkdir(lib)
if arch == "x86_64":
ar = "64"
version = self.meta.slacke_sub_repo[1:-1]
PACKAGES_TXT = "{0}slacke{1}/slackware{2}-{3}/{4}".format(
repo, version, ar, slack_ver(), lib_file)
FILELIST_TXT = ""
CHECKSUMS_MD5 = "{0}slacke{1}/slackware{2}-{3}/{4}".format(
repo, version, ar, slack_ver(), md5_file)
ChangeLog_txt = "{0}slacke{1}/slackware{2}-{3}/{4}".format(
repo, version, ar, slack_ver(), log_file)
if self.check:
return self.checks_logs(log, ChangeLog_txt)
self.down(lib, PACKAGES_TXT, repo_name)
self.down(lib, CHECKSUMS_MD5, repo_name)
self.down(log, ChangeLog_txt, repo_name)
self.remote(log, ChangeLog_txt, lib, PACKAGES_TXT, CHECKSUMS_MD5,
FILELIST_TXT, repo_name)
def salix(self):
"""Creating SalixOS local library
"""
ar = "i486"
arch = self.meta.arch
repo = self.def_repos_dict["salix"]
log = self.log_path + "salix/"
lib = self.lib_path + "salix_repo/"
repo_name = log[:-1].split("/")[-1]
lib_file = "PACKAGES.TXT"
# lst_file = ""
md5_file = "CHECKSUMS.md5"
log_file = "ChangeLog.txt"
if not os.path.exists(log):
os.mkdir(log)
if not os.path.exists(lib):
os.mkdir(lib)
if arch == "x86_64":
ar = "x86_64"
PACKAGES_TXT = "{0}{1}/{2}/{3}".format(repo, ar, slack_ver(), lib_file)
FILELIST_TXT = ""
CHECKSUMS_MD5 = "{0}{1}/{2}/{3}".format(repo, ar, slack_ver(), md5_file)
ChangeLog_txt = "{0}{1}/{2}/{3}".format(repo, ar, slack_ver(), log_file)
if self.check:
return self.checks_logs(log, ChangeLog_txt)
self.down(lib, PACKAGES_TXT, repo_name)
self.down(lib, CHECKSUMS_MD5, repo_name)
self.down(log, ChangeLog_txt, repo_name)
self.remote(log, ChangeLog_txt, lib, PACKAGES_TXT, CHECKSUMS_MD5,
FILELIST_TXT, repo_name)
def slackl(self):
"""Creating slackel.gr local library
"""
ar = "i486"
arch = self.meta.arch
repo = self.def_repos_dict["slackl"]
log = self.log_path + "slackl/"
lib = self.lib_path + "slackl_repo/"
repo_name = log[:-1].split("/")[-1]
lib_file = "PACKAGES.TXT"
# lst_file = ""
md5_file = "CHECKSUMS.md5"
log_file = "ChangeLog.txt"
if not os.path.exists(log):
os.mkdir(log)
if not os.path.exists(lib):
os.mkdir(lib)
if arch == "x86_64":
ar = "x86_64"
PACKAGES_TXT = "{0}{1}/current/{2}".format(repo, ar, lib_file)
FILELIST_TXT = ""
CHECKSUMS_MD5 = "{0}{1}/current/{2}".format(repo, ar, md5_file)
ChangeLog_txt = "{0}{1}/current/{2}".format(repo, ar, log_file)
if self.check:
return self.checks_logs(log, ChangeLog_txt)
self.down(lib, PACKAGES_TXT, repo_name)
self.down(lib, CHECKSUMS_MD5, repo_name)
self.down(log, ChangeLog_txt, repo_name)
self.remote(log, ChangeLog_txt, lib, PACKAGES_TXT, CHECKSUMS_MD5,
FILELIST_TXT, repo_name)
def rested(self):
"""Creating alien restricted local library
"""
repo = self.def_repos_dict["rested"]
log = self.log_path + "rested/"
lib = self.lib_path + "rested_repo/"
repo_name = log[:-1].split("/")[-1]
lib_file = "PACKAGES.TXT"
# lst_file = ""
md5_file = "CHECKSUMS.md5"
log_file = "ChangeLog.txt"
if not os.path.exists(log):
os.mkdir(log)
if not os.path.exists(lib):
os.mkdir(lib)
PACKAGES_TXT = "{0}{1}".format(repo, lib_file)
FILELIST_TXT = ""
CHECKSUMS_MD5 = "{0}{1}".format(repo, md5_file)
ChangeLog_txt = "{0}{1}".format(repo, log_file)
if self.check:
return self.checks_logs(log, ChangeLog_txt)
self.down(lib, PACKAGES_TXT, repo_name)
self.down(lib, CHECKSUMS_MD5, repo_name)
self.down(log, ChangeLog_txt, repo_name)
self.remote(log, ChangeLog_txt, lib, PACKAGES_TXT, CHECKSUMS_MD5,
FILELIST_TXT, repo_name)
def msb(self):
"""Creating MATE local library
"""
ar = "x86"
ver_slack = slack_ver()
arch = self.meta.arch
repo = self.def_repos_dict["msb"]
log = self.log_path + "msb/"
lib = self.lib_path + "msb_repo/"
repo_name = log[:-1].split("/")[-1]
lib_file = "PACKAGES.TXT"
# lst_file = ""
md5_file = "CHECKSUMS.md5"
log_file = "ChangeLog.txt"
if not os.path.exists(log):
os.mkdir(log)
if not os.path.exists(lib):
os.mkdir(lib)
if arch == "x86_64":
ar = "x86_64"
version = self.meta.msb_sub_repo[1:-1]
if self.meta.slack_rel == "current":
ver_slack = self.meta.slack_rel
PACKAGES_TXT = "{0}{1}/{2}/{3}/{4}".format(
repo, ver_slack, version, ar, lib_file)
FILELIST_TXT = ""
CHECKSUMS_MD5 = "{0}{1}/{2}/{3}/{4}".format(
repo, ver_slack, version, ar, md5_file)
ChangeLog_txt = "{0}{1}".format(repo, log_file)
if self.check:
return self.checks_logs(log, ChangeLog_txt)
self.down(lib, PACKAGES_TXT, repo_name)
self.down(lib, CHECKSUMS_MD5, repo_name)
self.down(log, ChangeLog_txt, repo_name)
self.remote(log, ChangeLog_txt, lib, PACKAGES_TXT, CHECKSUMS_MD5,
FILELIST_TXT, repo_name)
def csb(self):
"""Creating Cinnamon local library
"""
ar = "x86"
ver_slack = slack_ver()
arch = self.meta.arch
repo = self.def_repos_dict["csb"]
log = self.log_path + "csb/"
lib = self.lib_path + "csb_repo/"
repo_name = log[:-1].split("/")[-1]
lib_file = "PACKAGES.TXT"
# lst_file = ""
md5_file = "CHECKSUMS.md5"
log_file = "ChangeLog.txt"
if not os.path.exists(log):
os.mkdir(log)
if not os.path.exists(lib):
os.mkdir(lib)
if arch == "x86_64":
ar = "x86_64"
if self.meta.slack_rel == "current":
ver_slack = self.meta.slack_rel
PACKAGES_TXT = "{0}{1}/{2}/{3}".format(
repo, ver_slack, ar, lib_file)
FILELIST_TXT = ""
CHECKSUMS_MD5 = "{0}{1}/{2}/{3}".format(
repo, ver_slack, ar, md5_file)
ChangeLog_txt = "{0}{1}".format(repo, log_file)
if self.check:
return self.checks_logs(log, ChangeLog_txt)
self.down(lib, PACKAGES_TXT, repo_name)
self.down(lib, CHECKSUMS_MD5, repo_name)
self.down(log, ChangeLog_txt, repo_name)
self.remote(log, ChangeLog_txt, lib, PACKAGES_TXT, CHECKSUMS_MD5,
FILELIST_TXT, repo_name)
def connos(self):
"""Creating connochaetos (slack-n-free) local library
"""
nickname = "slack-n-free"
ar = ""
arch = self.meta.arch
repo = self.def_repos_dict["connos"]
log = self.log_path + "connos/"
lib = self.lib_path + "connos_repo/"
repo_name = log[:-1].split("/")[-1]
lib_file = "PACKAGES.TXT"
# lst_file = ""
md5_file = "CHECKSUMS.md5"
log_file = "ChangeLog.txt"
if not os.path.exists(log):
os.mkdir(log)
if not os.path.exists(lib):
os.mkdir(lib)
if arch == "x86_64":
ar = "64"
PACKAGES_TXT = "{0}{1}{2}-{3}/{4}".format(repo, nickname, ar,
slack_ver(), lib_file)
FILELIST_TXT = ""
CHECKSUMS_MD5 = "{0}{1}{2}-{3}/{4}".format(repo, nickname, ar,
slack_ver(), md5_file)
ChangeLog_txt = "{0}{1}{2}-{3}/{4}".format(repo, nickname, ar,
slack_ver(), log_file)
if self.check:
return self.checks_logs(log, ChangeLog_txt)
self.down(lib, PACKAGES_TXT, repo_name)
self.down(lib, CHECKSUMS_MD5, repo_name)
self.down(log, ChangeLog_txt, repo_name)
self.remote(log, ChangeLog_txt, lib, PACKAGES_TXT, CHECKSUMS_MD5,
FILELIST_TXT, repo_name)
def mles(self):
"""Creating Microlinux local library
"""
ar = "32"
arch = self.meta.arch
repo = self.def_repos_dict["mles"]
log = self.log_path + "mles/"
lib = self.lib_path + "mles_repo/"
repo_name = log[:-1].split("/")[-1]
lib_file = "PACKAGES.TXT"
# lst_file = ""
md5_file = "CHECKSUMS.md5"
log_file = "ChangeLog.txt"
if not os.path.exists(log):
os.mkdir(log)
if not os.path.exists(lib):
os.mkdir(lib)
if arch == "x86_64":
ar = "64"
version = self.meta.mles_sub_repo[1:-1]
PACKAGES_TXT = "{0}{1}-{2}-{3}bit/{4}".format(
repo, version, slack_ver(), ar, lib_file)
FILELIST_TXT = ""
CHECKSUMS_MD5 = "{0}{1}-{2}-{3}bit/{4}".format(
repo, version, slack_ver(), ar, md5_file)
ChangeLog_txt = "{0}{1}-{2}-{3}bit/{4}".format(
repo, version, slack_ver(), ar, log_file)
if self.check:
return self.checks_logs(log, ChangeLog_txt)
self.down(lib, PACKAGES_TXT, repo_name)
self.down(lib, CHECKSUMS_MD5, repo_name)
self.down(log, ChangeLog_txt, repo_name)
self.remote(log, ChangeLog_txt, lib, PACKAGES_TXT, CHECKSUMS_MD5,
FILELIST_TXT, repo_name)
def down(self, path, link, repo):
"""Download files
"""
filename = link.split("/")[-1]
if not os.path.isfile(path + filename):
Download(path, link.split(), repo).start()
def remote(self, *args):
"""Remove and recreate files
"""
log_path = args[0]
ChangeLog_txt = args[1]
lib_path = args[2]
PACKAGES_TXT = args[3]
CHECKSUMS_MD5 = args[4]
FILELIST_TXT = args[5]
repo = args[6]
if self.checks_logs(log_path, ChangeLog_txt):
# remove old files
self.file_remove(log_path, ChangeLog_txt.split("/")[-1])
self.file_remove(lib_path, PACKAGES_TXT.split("/")[-1])
self.file_remove(lib_path, CHECKSUMS_MD5.split("/")[-1])
self.file_remove(lib_path, FILELIST_TXT.split("/")[-1])
if repo == "slack":
dirs = ["core/", "extra/", "pasture/"]
for d in dirs:
self.file_remove(lib_path + d, "PACKAGES.TXT")
self.file_remove(lib_path + d, "CHECKSUMS.md5")
self.down(lib_path + "core/", PACKAGES_TXT, repo)
self.down(lib_path + "core/", CHECKSUMS_MD5, repo)
self.down(lib_path + "extra/", self.EXTRA, repo)
self.down(lib_path + "extra/", self.EXT_CHECKSUMS, repo)
if slack_ver() != "14.0": # no pasture/ folder for 14.0 version
self.down(lib_path + "pasture/", self.PASTURE, repo)
self.down(lib_path + "pasture/", self.PAS_CHECKSUMS, repo)
# download new files
if repo != "slack":
self.down(lib_path, PACKAGES_TXT, repo)
self.down(lib_path, CHECKSUMS_MD5, repo)
self.down(lib_path, FILELIST_TXT, repo)
self.down(log_path, ChangeLog_txt, repo)
def merge(self, path, outfile, infiles):
"""Merge files
"""
with open(path + outfile, 'w') as out_f:
for i in infiles:
if os.path.isfile("{0}{1}".format(path, i)):
with open(path + i, "r") as in_f:
for line in in_f:
out_f.write(line)
def file_remove(self, path, filename):
"""Check if filename exists and remove
"""
if os.path.isfile(path + filename):
os.remove(path + filename)
def checks_logs(self, log_path, url):
"""Checks ChangeLog.txt for changes
"""
local = ""
filename = url.split("/")[-1]
server = FileSize(url).server()
if os.path.isfile(log_path + filename):
local = FileSize(log_path + filename).local()
if server != local:
return True
return False
def upgrade(self, only):
"""Remove all package lists with changelog and checksums files
and create lists again"""
repositories = self.meta.repositories
if only:
repositories = only
for repo in repositories:
changelogs = "{0}{1}{2}".format(self.log_path, repo,
"/ChangeLog.txt")
if os.path.isfile(changelogs):
os.remove(changelogs)
if os.path.isdir(self.lib_path + "{0}_repo/".format(repo)):
for f in (os.listdir(self.lib_path + "{0}_repo/".format(
repo))):
files = "{0}{1}_repo/{2}".format(self.lib_path, repo, f)
if os.path.isfile(files):
os.remove(files)
elif os.path.isdir(files):
shutil.rmtree(files)
Update().repository(only)
class Update(object):
def __init__(self):
self._init = "Initialization(False)"
self.meta = _meta_
self.done = "{0}Done{1}\n".format(self.meta.color["GREY"],
self.meta.color["ENDC"])
self.error = "{0}Error{1}\n".format(self.meta.color["RED"],
self.meta.color["ENDC"])
def repository(self, only):
"""Update repositories lists
"""
print("\nCheck and update repositories:\n")
default = self.meta.default_repositories
enabled = self.meta.repositories
if only:
enabled = only
for repo in enabled:
if check_for_local_repos(repo) is True:
continue
sys.stdout.write("{0}Check repository [{1}{2}{3}] ... "
"{4}".format(
self.meta.color["GREY"],
self.meta.color["CYAN"], repo,
self.meta.color["GREY"],
self.meta.color["ENDC"]))
sys.stdout.flush()
if repo in default:
exec("{0}.{1}()".format(self._init, repo))
sys.stdout.write(self.done)
elif repo in enabled:
Initialization(False).custom(repo)
sys.stdout.write(self.done)
else:
sys.stdout.write(self.error)
print("") # new line at end
raise SystemExit()
def check_exists_repositories(repo):
"""Checking if repositories exists by PACKAGES.TXT file
"""
pkg_list = "PACKAGES.TXT"
if repo == "sbo":
pkg_list = "SLACKBUILDS.TXT"
if check_for_local_repos(repo) is True:
pkg_list = "PACKAGES.TXT"
return ""
if not os.path.isfile("{0}{1}{2}".format(
_meta_.lib_path, repo, "_repo/{0}".format(pkg_list))):
return repo
return ""
def check_for_local_repos(repo):
"""Check if repository is local
"""
repos_dict = Repo().default_repository()
if repo in repos_dict:
repo_url = repos_dict[repo]
if repo_url.startswith("file:///"):
return True
| websafe/slpkg | slpkg/init.py | Python | gpl-3.0 | 31,534 |
"""
Socket IO connections
"""
from datetime import timedelta
from tornado import ioloop, gen
from tornadio2 import SocketConnection, TornadioRouter, SocketServer, event, gen
class QueryConnection(SocketConnection):
def long_running(self, value, callback):
"""Long running task implementation.
Simply adds 3 second timeout and then calls provided callback method.
"""
def finish():
callback('Handled %s.' % value)
ioloop.IOLoop.instance().add_timeout(timedelta(seconds=3), finish)
@event
def query(self, num):
"""Event implementation
Because on_event() was wrapped with ``gen.sync_engine``, yield will be treated
as asynchronous task.
"""
response = yield gen.Task(self.long_running, num)
self.emit('response', response)
@gen.engine
def on_event(self, name, *args, **kwargs):
"""Wrapped ``on_event`` handler, which will queue events and will allow usage
of the ``yield`` in the event handlers.
If you want to use non-queued version, just wrap ``on_event`` with ``gen.engine``.
"""
return super(QueryConnection, self).on_event(name, *args, **kwargs)
| godsarmy/BAT | connections.py | Python | mit | 1,218 |
from rest_framework import viewsets
from rest_framework.response import Response
from treeherder.model.derived import (ArtifactsModel,
JobsModel)
from treeherder.model.error_summary import get_artifacts_that_need_bug_suggestions
from treeherder.model.tasks import populate_error_summary
from treeherder.webapp.api import permissions
from treeherder.webapp.api.utils import UrlQueryFilter
class ArtifactViewSet(viewsets.ViewSet):
permission_classes = (permissions.HasHawkOrLegacyOauthPermissionsOrReadOnly,)
"""
This viewset is responsible for the artifact endpoint.
"""
def retrieve(self, request, project, pk=None):
"""
retrieve a single instance of job_artifact
"""
filter = UrlQueryFilter({"id": pk})
with ArtifactsModel(project) as artifactModel:
objs = artifactModel.get_job_artifact_list(0, 1, filter.conditions)
if objs:
return Response(objs[0])
else:
return Response("job_artifact {0} not found".format(pk), 404)
def list(self, request, project):
"""
return a list of job artifacts
"""
# @todo: remove after old data expires from this change on 3/5/2015
qparams = request.query_params.copy()
name = qparams.get('name', None)
if name and name == 'text_log_summary':
qparams['name__in'] = 'text_log_summary,Structured Log'
del(qparams['name'])
# end remove block
# @todo: change ``qparams`` back to ``request.query_params``
filter = UrlQueryFilter(qparams)
offset = int(filter.pop("offset", 0))
count = min(int(filter.pop("count", 10)), 1000)
with ArtifactsModel(project) as artifacts_model:
objs = artifacts_model.get_job_artifact_list(
offset,
count,
filter.conditions
)
return Response(objs)
def create(self, request, project):
artifacts = ArtifactsModel.serialize_artifact_json_blobs(request.data)
job_guids = [x['job_guid'] for x in artifacts]
with JobsModel(project) as jobs_model, ArtifactsModel(project) as artifacts_model:
job_id_lookup = jobs_model.get_job_ids_by_guid(job_guids)
artifacts_model.load_job_artifacts(artifacts, job_id_lookup)
# If a ``text_log_summary`` and ``Bug suggestions`` artifact are
# posted here together, for the same ``job_guid``, then just load
# them. This is how it is done internally in our log parser
# so there is no delay in creation and the bug suggestions show
# as soon as the log is parsed.
#
# If a ``text_log_summary`` is posted WITHOUT an accompanying
# ``Bug suggestions`` artifact, then schedule to create it
# asynchronously so that this api does not take too long.
tls_list = get_artifacts_that_need_bug_suggestions(artifacts)
# tls_list will contain all ``text_log_summary`` artifacts that
# do NOT have an accompanying ``Bug suggestions`` artifact in this
# current list of artifacts. If it's empty, then we don't need
# to schedule anything.
if tls_list:
populate_error_summary.apply_async(
args=[project, tls_list, job_id_lookup],
routing_key='error_summary'
)
return Response({'message': 'Artifacts stored successfully'})
| gbrmachado/treeherder | treeherder/webapp/api/artifact.py | Python | mpl-2.0 | 3,604 |
"""
This module provides classes to build an OrderList for input to a plant,
including the Order instances and their Recipe instances.
"""
from xml.dom import minidom
from plant import CraneMoveTime
class Recipe(object):
"""
This class provides a Recipe for an Order. It is a list (or dictionary) of
tuples (str machineName, int timeAtMachine).
"""
def __init__(self):
"""
recipe is a list (or dictionary) that contains the tuples of time
information for the Recipe.
"""
object.__init__(self)
self.recipe = []
def indexOfMachine(self, machineName):
"""
Returns the index of the Machine with machineName in the recipe list.
"""
for i, r in enumerate(self.recipe):
if r[0] == machineName:
return i
return -1
def calcMinProcTime(self, machineName = None):
"""
This method calculates the minimum processing time of the Recipe
starting from Machine with machineName (Considers the constant plant
delays for the crane movement time between machines).
"""
if machineName == None:
index = 0
else:
index = self.indexOfMachine(machineName)
res = (len(self.recipe) - 1 - index) * CraneMoveTime
while index < len(self.recipe):
res += self.recipe[index][1]
if self.recipe[index][1] == 0:
res -= CraneMoveTime
index += 1
return res
def __getitem__(self, key):
"""
Returns the time in the Recipe at Machine with name key.
"""
assert type(key) == str or type(key) == unicode
for r in self.recipe:
if r[0] == key:
return r[1]
return None
def __setitem__(self, key, value):
"""
Adds a Recipe item (a tuple of (str machineName, int time)) to the
Recipe list (or dictionary). It will not add the item if machineName
is already in the list.
"""
assert type(key) == str or type(key) == unicode
assert type(value) == int
if self.__getitem__(key) == None:
self.recipe.append((key, value))
else:
for i, r in enumerate(self.recipe):
if r[0] == key:
del self.recipe[i]
self.recipe.insert(i, (key, value))
return
def toXml(self, xmlDoc):
"""
Converts the Recipe to an XML description and returns the XML tree
node. XmlDoc is the document to create the tree element from.
"""
node = xmlDoc.createElement("recipe")
itemNode = None
for i, r in enumerate(self.recipe):
itemNode = xmlDoc.createElement("machine")
itemNode.setAttribute("name", r[0])
itemNode.setAttribute("time", str(r[1]))
node.appendChild(itemNode)
return node
@staticmethod
def fromXml(element):
"""
A static method that creates a Recipe instance from an XML tree node
and returns it.
"""
recipe = Recipe()
for e in element.getElementsByTagName("machine"):
recipe[e.getAttribute("name")] = int(e.getAttribute("time"))
return recipe
class Order(object):
"""
This class provides an Order with id, deadline and recipe.
"""
def __init__(self, id = 0, deadline = 0, currentMachine = "", currentOvertime = 0):
"""
id is a unique int for the Order.
deadline is the int deadline for the Order.
recipe is the Recipe instance for the order.
"""
object.__init__(self)
assert deadline >= 0
assert id >= 0
self.id = id
self.deadline = deadline
self.recipe = None
self.currentMachine = currentMachine
self.currentOvertime = currentOvertime
def toXml(self, xmlDoc):
"""
Converts the Order to an XML description and returns the XML tree
node. XmlDoc is the document to create the tree element from.
"""
node = xmlDoc.createElement("order")
node.setAttribute("id", str(self.id))
node.setAttribute("deadline", str(self.deadline))
node.appendChild(self.recipe.toXml(xmlDoc))
return node
@staticmethod
def fromXml(element):
"""
A static method that creates an Order instance from an XML tree node
and returns it. The number of children (Recipe instances) of the node
have to be exactly one since each Order can only have a single Recipe.
"""
assert len(element.getElementsByTagName("recipe")) == 1
order = Order(
deadline = int(element.getAttribute("deadline")),
id = int(element.getAttribute("id")),
currentMachine = element.getAttribute("current_machine"),
currentOvertime = int(element.getAttribute("current_overtime"))
)
order.recipe = Recipe.fromXml(element.getElementsByTagName("recipe")[0])
return order
class OrderList(object):
"""
This class provides a list of Order instances.
"""
def __init__(self):
"""
orders is a list of Order instances.
"""
object.__init__(self)
self.orders = []
def toXmlFile(self, filename):
"""
Exports the OrderList instance to an xml file (str filename).
"""
file = open(filename, "w")
file.write(self.toXml().toprettyxml())
file.close()
def toXml(self):
"""
Creates an XML tree node element of the OrderList instance and returns
it.
"""
domImp = minidom.getDOMImplementation()
xmlDoc = domImp.createDocument(None, "order-list", None)
for o in self.orders:
xmlDoc.documentElement.appendChild(o.toXml(xmlDoc))
return xmlDoc.documentElement
@staticmethod
def fromXml(xmlDoc):
"""
A static method that creates an OrderList instance from an XML tree
node and returns it.
"""
orderList = OrderList()
for e in xmlDoc.getElementsByTagName("order"):
orderList.addOrder(Order.fromXml(e))
return orderList
@staticmethod
def fromXmlFile(filename):
"""
A static method that loads an OrderList from a file (str filename) and
returns an instance.
"""
file = open(filename, "r")
doc = minidom.parse(file)
orderList = OrderList.fromXml(doc)
file.close()
return orderList
def addOrder(self, order):
"""
Adds an Order to the OrderList. The Order instance and the Order.id
cannot be already in the list.
"""
assert order not in self.orders
for o in self.orders:
if o.id == order.id:
raise Exception("Order id already in order list")
self.orders.append(order)
| fredmorcos/attic | projects/plantmaker/archive/20100505/order.py | Python | isc | 5,912 |
#!/usr/bin/env python
import re
pair = re.compile(r'(.)\1')
def contains_two_pairs(string):
return len(re.findall(pair, string)) >= 2
def contains_iol(string):
banned_letters = ['i','o','l']
for letter in banned_letters:
if letter in string:
return True
return False
def contains_abc_string(string):
ords = [ord(x) for x in string]
for i in xrange(len(ords) - 2):
if ords[i + 1] - ords[i] != 1:
continue
if ords[i + 2] - ords[i + 1] != 1:
continue
return True
return False
def valid_password(string):
return (~contains_iol(string) and
contains_two_pairs(string) and
contains_abc_string(string))
def increment_string(string):
s = list(reversed(string))
for i in xrange(len(s)):
if s[i] == 'z':
s[i] = 'a'
continue
s[i] = chr(ord(s[i]) + 1)
# If we're banned, go ahead and skip it
if s[i] in ['i','o','l']:
s[i] = chr(ord(s[i]) + 1)
# We are done incrementing now
break
return ''.join(reversed(s))
def next_password(password):
while not valid_password(password):
password = increment_string(password)
return password
with open('input', 'r') as f:
password = f.read()
if password.endswith('\n'):
password = password[:-1]
second_password = next_password(password)
print 'Next: {}'.format(second_password)
third_password = next_password(increment_string(second_password))
print 'Then: {}'.format(third_password)
| jatowler/adventofcode-2015 | 11/part2.py | Python | mit | 1,461 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#from openerp.tools import #debug
UNIDADES = (
'',
'UN ',
'DOS ',
'TRES ',
'CUATRO ',
'CINCO ',
'SEIS ',
'SIETE ',
'OCHO ',
'NUEVE ',
'DIEZ ',
'ONCE ',
'DOCE ',
'TRECE ',
'CATORCE ',
'QUINCE ',
'DIECISEIS ',
'DIECISIETE ',
'DIECIOCHO ',
'DIECINUEVE ',
'VEINTE '
)
DECENAS = (
'VENTI',
'TREINTA ',
'CUARENTA ',
'CINCUENTA ',
'SESENTA ',
'SETENTA ',
'OCHENTA ',
'NOVENTA ',
'CIEN '
)
CENTENAS = (
'CIENTO ',
'DOSCIENTOS ',
'TRESCIENTOS ',
'CUATROCIENTOS ',
'QUINIENTOS ',
'SEISCIENTOS ',
'SETECIENTOS ',
'OCHOCIENTOS ',
'NOVECIENTOS '
)
def number_to_text_es(number_in,currency,join_dec=' Y ',separator=',',decimal_point='.'):
converted = ''
if currency == False:
currency = ''
if type(number_in) != 'str':
number = str(number_in)
else:
number = number_in
number_str=number
#if we are using the coma as separator we need to remove them from the string
try:
number_str = number_str.replace(separator,'')
except ValueError:
print 'The separator used for the thousands its not supported'
#debug(number_str)
try:
number_int, number_dec = number_str.split(decimal_point)
except ValueError:
number_int = number_str
number_dec = ""
number_str = number_int.zfill(9)
millones = number_str[:3]
miles = number_str[3:6]
cientos = number_str[6:]
if(millones):
if(millones == '001'):
converted += 'UN MILLON '
elif(int(millones) > 0):
converted += '%sMILLONES ' % __convertNumber(millones)
if(miles):
if(miles == '001'):
converted += 'MIL '
elif(int(miles) > 0):
converted += '%sMIL ' % __convertNumber(miles)
if(cientos):
if(cientos == '001'):
converted += 'UN '
elif(int(cientos) > 0):
converted += '%s ' % __convertNumber(cientos)
if number_dec == "":
number_dec = "00"
if (len(number_dec) < 2 ):
number_dec+='0'
has_decimal = float(number_dec) != 0 and join_dec + number_dec + "/100" or ' EXACTOS'
converted += currency + has_decimal
return converted
def __convertNumber(n):
output = ''
if(n == '100'):
output = "CIEN "
elif(n[0] != '0'):
output = CENTENAS[int(n[0])-1]
k = int(n[1:])
if(k <= 20):
output += UNIDADES[k]
else:
if((k > 30) & (n[2] != '0')):
output += '%sY %s' % (DECENAS[int(n[1])-2], UNIDADES[int(n[2])])
else:
output += '%s%s' % (DECENAS[int(n[1])-2], UNIDADES[int(n[2])])
return output
| ClearCorp-dev/odoo-clearcorp | TODO-8.0/payment_receipt_report/report/amount_to_text.py | Python | agpl-3.0 | 3,788 |
from tools.load import LoadMatrix
lm=LoadMatrix()
data = lm.load_numbers('../data/fm_train_real.dat')
parameter_list = [[data,10],[data,20]]
def converter_laplacianeigenmaps_modular(data,k):
from shogun.Features import RealFeatures
from shogun.Converter import LaplacianEigenmaps
features = RealFeatures(data)
converter = LaplacianEigenmaps()
converter.set_target_dim(1)
converter.set_k(k)
converter.set_tau(2.0)
converter.apply(features)
return features
if __name__=='__main__':
print('LaplacianEigenmaps')
converter_laplacianeigenmaps_modular(*parameter_list[0])
| ratschlab/ASP | examples/undocumented/python_modular/converter_laplacianeigenmaps_modular.py | Python | gpl-2.0 | 588 |
from ztag.annotation import *
class QNXFox(Annotation):
port = None
protocol = protocols.FOX
subprotocol = protocols.FOX.DEVICE_ID
tests = {
"qnx_npm6": {
"global_metadata": {
"os": OperatingSystem.QNX,
}
}
}
def process(self, obj, meta):
os_name = obj["os_name"]
if os_name.lower().strip() == "qnx":
meta.global_metadata.os = OperatingSystem.QNX
return meta
class QNXNPMFox(Annotation):
port = None
protocol = protocols.FOX
subprotocol = protocols.FOX.DEVICE_ID
_prefixes = [
("qnx-npm2", "NPM2"),
("qnx-npm3", "NPM3"),
("qnx-npm6", "NPM6"),
]
tests = {
"qnx_npm6": {
"global_metadata": {
"os": OperatingSystem.QNX,
},
"tags": ["NPM", "NPM6"],
}
}
def process(self, obj, meta):
host_id = obj["host_id"].lower().strip()
for prefix in self._prefixes:
if host_id.lower().strip().startswith(prefix[0]):
meta.global_metadata.os = OperatingSystem.QNX
meta.tags.add("NPM")
meta.tags.add(prefix[1])
return meta
class QNXJACEFox(Annotation):
port = None
protocol = protocols.FOX
subprotocol = protocols.FOX.DEVICE_ID
_prefixes = [
("qnx-j402", "JACE-402"),
("qnx-j403", "JACE-403"),
("qnx-j404", "JACE-545"),
("qnx-jvln", "JACE-7"),
]
tests = {
"qnx_jace": {
"global_metadata": {
"os": OperatingSystem.QNX,
},
"tags": ["JACE", "JACE-7"],
}
}
def process(self, obj, meta):
host_id = obj["host_id"].lower().strip()
for prefix in self._prefixes:
if host_id.lower().strip().startswith(prefix[0]):
meta.global_metadata.os = OperatingSystem.QNX
meta.tags.add("JACE")
meta.tags.add(prefix[1])
return meta
| zmap/ztag | ztag/annotations/fox_qns.py | Python | apache-2.0 | 2,067 |
from django.urls import path, re_path, include
from django.utils.translation import ugettext_lazy as _
from . import views as www
urlpatterns = [
re_path(r'^$', www.Index.as_view(), name='index'),
re_path(_('^topic_collections_url$'), www.TopicCollections.as_view(),
name='topic_collections'),
re_path(_('^topic_collections_url/(?P<slug>[\w-]+)$'),
www.CollectionDetail.as_view(),
name='collection_detail'),
re_path(_('^about_url$'), www.About.as_view(), name='about'),
re_path(_('^more_about_url$'), www.MoreAbout.as_view(), name='more_about'),
re_path(_('^about_harvests_url$'),
www.AboutHarvest.as_view(), name='about_harvests'),
re_path(_('^about_terminology_url$'),
www.AboutTerminology.as_view(), name='about_terminology'),
re_path(_('^about_documents_url$'),
www.AboutDocuments.as_view(), name='about_documents'),
re_path(_('^about_graphics_url$'),
www.AboutGraphics.as_view(), name='about_graphics'),
re_path(_('^about_contact_url$'),
www.AboutContact.as_view(), name='about_contact'),
re_path(_('^about_faq_url$'), www.AboutFAQ.as_view(), name='about_faq'),
re_path(_('^categories_url$'), www.Categories.as_view(), name='categories'),
re_path(_('^categories_url/(?P<slug>[\w-]+)$'),
www.CategoryDetail.as_view(),
name='category_detail'),
re_path(_('^categories_url/(?P<category_slug>[\w-]+)/(?P<slug>[\w-]+)$'),
www.SubCategoryDetail.as_view(),
name='sub_category_detail'),
re_path(_('^change_list_view/(?P<list_type>visual|text)$'),
www.ChangeListView.as_view(),
name='change_list_view'),
re_path(_('^keyword_url/(?P<slug>[\w-]+)$'),
www.KeywordViews.as_view(),
name='keyword'),
re_path(_('^search_url$'), www.SearchRedirectView.as_view(),
name='search_redirect'),
re_path(_('^search_url/(?P<query>.*)'), www.SearchView.as_view(),
name='search'),
re_path(_('^www_source_url/(?P<slug>[\w-]+)$'),
www.SourceDetail.as_view(),
name='source_detail'),
re_path(_('^www_nominate_url$'), www.Nominate.as_view(), name='nominate'),
re_path(_('^www_nominate_success_url$'), www.NominateSuccess.as_view(),
name='nominate_success'),
re_path(_('^www_nominate_url/contract_url$'),
www.NominateContractView.as_view(),
name='nominate_contract'),
re_path(_('^www_nominate_url/cooperation_url$'),
www.NominateCooperationView.as_view(),
name='nominate_cooperation'),
re_path(_('^www_nominate_url/creative_commons_url$'),
www.NominateCreativeCommonsView.as_view(),
name='nominate_creative_commons'),
re_path(_('^www_nominate_url/source_selection_url$'),
www.NominateSourceSelectionView.as_view(),
name='nominate_source_selection'),
re_path(_('^disclaimer_url$'),
www.DisclaimerView.as_view(),
name='disclaimer'),
re_path(_('^embed_url$'),
www.EmbedView.as_view(),
name='embed'),
]
| WebArchivCZ/Seeder | Seeder/www/urls.py | Python | mit | 3,183 |
import pychemia
import tempfile
from .samples import CaTiO3
def test_xyz():
"""
Test (pychemia.io.xyz) :
"""
st1 = CaTiO3()
st1.set_periodicity(False)
file = tempfile.NamedTemporaryFile()
pychemia.io.xyz.save(st1, file.name)
st2 = pychemia.io.xyz.load(file.name)
assert st1 == st2
def test_ascii():
"""
Test (pychemia.io.ascii) :
"""
st1 = CaTiO3()
file = tempfile.NamedTemporaryFile()
pychemia.io.ascii.save(st1, file.name)
st2 = pychemia.io.ascii.load(file.name)
return st1, st2
| MaterialsDiscovery/PyChemia | tests/test_io.py | Python | mit | 625 |
'''
DATE CREATED: 14-05-2014
DATE FINISHED: 21-05-2014
CREATOR: Martin Dessauer
CONTACT: martin.dessauer@me.com, @codezeb (Github)
COPYRIGHT: 2014 Martin Dessauer
LICENSE: GPLv3
'''
import sys # for sys.argv
import getch # reading input (arrow keys)
from binascii import hexlify # l158
from time import sleep # ... guess.
from random import randint,shuffle # should be obvious aswell
''' SETTINGS '''
cash = 200
minBet = 100
unit = "$"
winset = ["JACKS OR BETTER","TWO PAIRS","PAIR O' THREE","STRAIGHT","FLUSH","FULL HOUSE","PAIR O' FOUR","STRAIGHT FLUSH","ROYAL FLUSH"]
payset = [1,2,3,4,6,9,25,50,250] # multiplies with bet/minBet
betset = [1,2,5,10,50,100,250] # factor
charset = ["A","K","D","B","10","9","8","7","6","5","4","3","2"]
cardtypes = ["♠","♣","♥","♦"] # 2x black, 2x red
version = "1.0b"
# DO NOT TOUCH THIS.
bet = 0
''' PARAMETER SETUP '''
for i in range(1,len(sys.argv)):
if(sys.argv[i][:2] == "-c"):
cash = int(sys.argv[i][2:])
''' FUNCTIONS '''
def initCards(): # builds up the deck
global charset,cardtypes
cardset = []
for i in range(0,len(charset)):
for j in range(0,len(cardtypes)):
if(len(charset[i]) == 1):
cardset.append(charset[i] + " " + cardtypes[j])
else:
cardset.append(charset[i] + " " + cardtypes[j])
return cardset
def gameLoop(): # is called upon start and enters an eternal loop (runs alas person got enough cash ;) )
firstRun = True
global cash,bet
print("Pyker v" + version)
sleep(1)
if(len(winset) != len(payset)):
print("Configuration error! (|winSet| != |paySet|)")
if(cash < minBet):
print("Configuration error! (cash < minBet)")
Quit()
print("\033[2A")
while(not cash<minBet):
print("You've got \033[92m" + str(cash) + unit + "\033[0m.")
if(not firstRun):
if(choice("Continue with bets?",["Yes","No"]) == "No"):
end()
else:
firstRun = False
bet = 0
betPossibilities = []
for i in range(0,len(betset)-1):
if(cash>=betset[i]*minBet):
betPossibilities.append(str(betset[i]*minBet)+unit)
bet = int(choice("Place your bet:",betPossibilities,0)[:-1])
cash = cash - bet
cards()
# We just arrive here if cash<minBet.
choice("\n\033[31mYou ran out of money!\033[0m",["Exit"])
def winType(wT): # prints out the thing we've won.
global cash
print("You've got a \033[32m" + winset[wT-1] + "\033[0m! \033[32m+" + str(payset[wT-1]*bet) + unit + "\033[0m")
cash = cash + payset[wT-1]*bet
def analyseCards(randomCards): # analyses the cards the player got
cleanCards = [] # stores pure int values w/o card type
sameType = False
print("\r \r",end="")
for i in range(0,5):
cleanCards.append(int(randomCards[i][:2].replace(" ","").replace("B","11").replace("D","12").replace("K","13").replace("A","14")))
cleanCards.sort()
if(randomCards[0][-1] == randomCards[1][-1] == randomCards[2][-1] == randomCards[3][-1] == randomCards[4][-1]):
sameType = True
if(sum(cleanCards) == 60):
winType(9) # royal flush
elif(cleanCards[0]+4 == cleanCards[1]+3 == cleanCards[2]+2 == cleanCards[3]+1 == cleanCards[4]):
winType(8) # straight flush
else:
winType(5) # flush
elif(cleanCards[0] == cleanCards[1] == cleanCards[2] == cleanCards[3]) or (cleanCards[1] == cleanCards[2] == cleanCards[3] == cleanCards[4]):
winType(7) # pairo 4
elif(cleanCards[0] == cleanCards[1] == cleanCards[2] and cleanCards[3] == cleanCards[4]) or (cleanCards[0] == cleanCards[1] and cleanCards[2] == cleanCards[3] == cleanCards[4]):
winType(6) # full house
elif(cleanCards[0]+4 == cleanCards[1]+3 == cleanCards[2]+2 == cleanCards[3]+1 == cleanCards[4]):
winType(4) # straight
elif(cleanCards[0] == cleanCards[1] == cleanCards[2]) or (cleanCards[1] == cleanCards[2] == cleanCards[3]) or (cleanCards[2] == cleanCards[3] == cleanCards[4]):
winType(3) # pairo 3
elif(((cleanCards[0] == cleanCards[1]) and (cleanCards[2] == cleanCards[3])) or ((cleanCards[1] == cleanCards[2]) and (cleanCards[3] == cleanCards[4])) or ((cleanCards[0] == cleanCards[1]) and (cleanCards[3] == cleanCards[4]))):
winType(2) # 2pairs
elif((cleanCards[0] + cleanCards[1] >= 22) and (cleanCards[0] == cleanCards[1])) or ((cleanCards[1] + cleanCards[2] >= 22) and (cleanCards[1] == cleanCards[2])) or ((cleanCards[2] + cleanCards[3] >= 22) and (cleanCards[2] == cleanCards[3])) or ((cleanCards[3] + cleanCards[4] >= 22) and (cleanCards[3] == cleanCards[4])):
winType(1) # jack o better
else:
print("\033[31mNothing\033[0m. \033[31m-" + str(bet) + unit + "\033[0m")
def cards(): # shuffles & presents cards to user
tmpCardset = initCards()
randomCards = []
selection = 0
keep = [False,False,False,False,False]
dropout = False
for i in range(0,5):
shuffle(tmpCardset)
index = randint(0,len(tmpCardset)-1)
randomCards.append(tmpCardset[index])
print(" " + randomCards[i] + " ",end="")
tmpCardset.remove(tmpCardset[index])
print("\n")
######################
while(not dropout):
print("\033[1A",end="")
for i in range(0,5):
if(i==selection):
if(keep[i]):
print("\033[7m[KEEP]\033[0m ",end="")
else:
print("\033[7m[ ]\033[0m ",end="")
else:
if(keep[i]):
print("[KEEP] ",end="")
else:
print("[ ] ",end="")
if(5==selection):
print("\033[7m[ACCEPT]\033[0m ")
else:
print("[ACCEPT]")
key = getch.getch()
if(hexlify(bytes(key,"UTF-8")) != bytes("1b","UTF-8")):
if(selection==5):
dropout = True
else:
keep[selection] = not keep[selection]
else:
getch.getch() # \_ SIC!
key = getch.getch()# /
if(key == "D"):
if(selection>0):
selection = selection - 1
elif(key == "C"):
if(selection<5):
selection = selection + 1
print("\033[3A")
for i in range(0,5):
shuffle(tmpCardset)
if(keep[i]):
print(" " + randomCards[i] + " ",end="")
else:
index = randint(0,len(tmpCardset)-1)
randomCards[i] = tmpCardset[index]
print(" " + randomCards[i] + " ",end="")
tmpCardset.remove(tmpCardset[index])
print("")
sleep(1)
analyseCards(randomCards)
def choice(question,options,selection = 0): # selection prompt
print("") # so that \033[1A gets a fresh new line on *first* cycle
while(True):
print("\033[1A" + question,end="")
for i in range(0,len(options)):
if(i==selection):
print(" \033[7m[" + str(options[i]) + "]\033[0m ",end="")
else:
print(" " + str(options[i]) + " ",end="")
print("")
key = getch.getch()
if(key == "\n"):
return options[selection]
else:
getch.getch() # \_ SIC!
key = getch.getch()# /
if(key == "D"):
if(selection>0):
selection = selection - 1
elif(key == "C"):
if(selection<len(options)-1):
selection = selection + 1
def end():
print("Thanks for playing! :)")
exit()
''' GAME LOOP '''
gameLoop() | maride/pyker | pyker3.py | Python | gpl-3.0 | 6,844 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "gitzen.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| LHN/lhn-gitzen | manage.py | Python | bsd-3-clause | 249 |
""" NOTE: Probably do not actually need any of this: equivalent tests have been incorporated into get_lines in
process_methods.py
Contains methods used to "clean up" a lines_list, i.e.
remove lines that are likely not actually there """
import numpy as np
from main import *
# from process_methods import *
import math
def cleanup(lines_list, num_chambers):
assert num_chambers*2 <= lines_list, "Didn't find enough lines (cleanup)"
# TODO something going wrong here: check slope-related methods
""" Return list of the actual lines by using only the (num_chambers*2) lowest scores """
cleaned_list = []
avgscore_list = []
for l1 in lines_list:
score = 0 # smaller score better
for l2 in lines_list:
score = score + pairwise_score(l1, l2)
avgscore = (score/len(lines_list))**-1
avgscore_list.append(avgscore)
keylist = np.argsort(avgscore_list)
i = 0
while i < num_chambers*2:
cleaned_list.append(lines_list[keylist[i]])
i += 1
return cleaned_list
def pairwise_score(l1, l2):
""" Might not actually need this
return how "similar" the slopes are """
theta1 = math.atan(slope(l1))
theta2 = math.atan(slope(l2))
return (theta1-theta2)**2
def slope(l):
""" Return the slope of line. If vertical, return infinity """
if l[1] == l[0]:
return float("inf")
else:
return float(l[3]-l[2])/(l[1]-l[0])
| justinjoh/get-ROI | cleanup_methods.py | Python | mit | 1,444 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from pyramid.response import Response
from pyramid.view import view_config
from sqlalchemy.orm.exc import NoResultFound
from warehouse.integrations import vulnerabilities
from warehouse.integrations.vulnerabilities import osv, utils
from warehouse.metrics import IMetricsService
@view_config(
require_methods=["POST"],
require_csrf=False,
renderer="json",
route_name="integrations.vulnerabilities.osv.report",
# If those headers are missing, response will be a 404
require_headers=["VULN-PUBLIC-KEY-IDENTIFIER", "VULN-PUBLIC-KEY-SIGNATURE"],
has_translations=False,
)
def report_vulnerabilities(request):
# Vulnerability sources call this API view when they have identified a
# vulnerability that affects a project release.
body = request.body
# Thanks to the predicates, we know the headers we need are defined.
key_id = request.headers.get("VULN-PUBLIC-KEY-IDENTIFIER")
signature = request.headers.get("VULN-PUBLIC-KEY-SIGNATURE")
metrics = request.find_service(IMetricsService, context=None)
verifier = osv.VulnerabilityReportVerifier(
session=request.http,
metrics=metrics,
)
if not verifier.verify(payload=body, key_id=key_id, signature=signature):
return Response(status=400)
try:
vulnerability_reports = request.json_body
except json.decoder.JSONDecodeError:
metrics.increment(
"warehouse.vulnerabilties.error.payload.json_error", tags=["origin:osv"]
)
return Response(status=400)
try:
utils.analyze_vulnerabilities(
request=request,
vulnerability_reports=vulnerability_reports,
origin="osv",
metrics=metrics,
)
except vulnerabilities.InvalidVulnerabilityReportError:
return Response(status=400)
except NoResultFound:
return Response(status=404)
# 204 No Content: we acknowledge but we won't comment on the outcome.
return Response(status=204)
| pypa/warehouse | warehouse/integrations/vulnerabilities/osv/views.py | Python | apache-2.0 | 2,559 |
import tkinter as tk
import tkinter.simpledialog as tksimpledialog
from tkinter import ttk
from itertools import accumulate
import bisect
from sol.config import GlobalConfig
C = GlobalConfig()
class ClipControl:
def __init__(self, root, backend, layer):
self.root = root
self.backend = backend
# important savedata
self.width = 330
self.layer = layer
self.cur_pos = 0.0
self.cur_clip = None
self.pad_buts = []
self.cue_but_states = []
self.loop_but_states = []
self.lp_data = None
self.pad_but_cmds = []
self.deleting_mode = False
# all tk vars
self.name_var = tk.StringVar()
self.zoom_follow_var = tk.BooleanVar()
self.sens_var, self.xtra_sens_var = tk.DoubleVar(), tk.StringVar()
self.speed_var, self.xtra_speed_var = tk.DoubleVar(), tk.StringVar()
self.loop_selected_text_var = tk.StringVar()
# these are overriden upon creation
self.qp_lp_var = None
self.loop_on_var = None
self.loop_type_var = None
self.loop_on_toggle = None
self.loop_type_switch = None
# clip parameter to update function
self.param_dispatch = {
'cue_points': self.update_cues,
'loop_points': self.update_loop,
'loop_on': self.update_loop_on,
'loop_type': self.update_loop_type,
'loop_selection': self.update_loop,
'playback_speed': self.update_speed,
'control_sens': self.update_sens
}
# direct update fun
self.direct_updates = {
'pad_activate': self.activate_pad,
'pad_deactivate': self.delet_pad,
'pads_toggle': self.toggle_pad_mode,
'pad_press': self.press_pad,
'qp_pad_press': self.press_pad_qp,
'lp_pad_press': self.press_pad_lp,
}
base_addr = '/magi/layer{}'.format(self.layer)
bfn = self.backend.fun_store
self.send_back = {
# playback
'play': bfn[base_addr + '/playback/play'],
'pause': bfn[base_addr + '/playback/pause'],
'reverse': bfn[base_addr + '/playback/reverse'],
'random': bfn[base_addr + '/playback/random'],
'clear': bfn[base_addr + '/playback/clear'],
'seek': bfn[base_addr + '/playback/seek'],
# params
'speed': bfn[base_addr + '/playback/speed'],
'sens': bfn['/magi/control{}/sens'.format(self.layer)],
# cue funs
'cue_set': bfn[base_addr + '/cue'],
'cue_clear': bfn[base_addr + '/cue/clear'],
# loop funs
'loop_set_a': bfn[base_addr + '/loop/set/a'],
'loop_set_b': bfn[base_addr + '/loop/set/b'],
'loop_set_a_cur': bfn[base_addr + '/loop/set/cur/a'],
'loop_set_b_cur': bfn[base_addr + '/loop/set/cur/b'],
'loop_on_off': bfn[base_addr + '/loop/set/on_off'],
'loop_type': bfn[base_addr + '/loop/type'],
'loop_select': bfn[base_addr + '/loop/select'],
'loop_move': bfn[base_addr + '/loop/select/move'],
'loop_clear': bfn[base_addr + '/loop/clear'],
# scratching
'scratch_start': bfn['/magi/control{}/start'.format(self.layer)],
'scratch_do': bfn['/magi/control{}/do'.format(self.layer)],
'scratch_stop': bfn['/magi/control{}/stop'.format(self.layer)],
}
self.speed_var.trace('w', self.gen_update_cmd('speed', self.speed_var))
self.sens_var.trace('w', self.gen_update_cmd('sens', self.sens_var))
# colors
self.no_pad_rows = 1
if C.NO_Q > 4:
self.no_pad_rows = C.NO_Q // 4
if C.NO_Q % 4 != 0:
self.no_pad_rows += 1
self.pad_colors = [C.themer.linerp_colors(C.CURRENT_THEME.pad_colors[i], self.no_pad_rows) for i in range(4)]
# let's setup the gooey
# it looks like
# __________________________________________________________
# |________________________________________________________|
# | | < || > X |
# | | + spd sens |
# | [progressbar] | - | | |
# | | O + + |
# | | [x] | | |
# |_______________________________________|________________|
# | | | | | [qp/lp switch] |
# | [qp/lp] | | | | [loop ctrls] |
# | ________|_________|_________|_________| |
# | | | | | [loop nxt/prv] |
# | | | | | [loop in/out] |
# | ________|_________|_________|_________|________________|
self.setup_main_tk()
# curry
def gen_update_cmd(self, key, var):
fun = self.send_back[key]
tk_var = var
def fun_tor(*args):
fun('', tk_var.get())
return fun_tor
def gen_send_cmd(self, key, default_value=1):
fun = self.send_back[key]
val = default_value
def fun_tor(*args):
# print(key, val)
fun('', val)
return fun_tor
def gen_toggle_cmd(self, key, default_values=[False, True]):
fun = self.send_back[key]
toggle_lookup = default_values[:]
def fun_tor(new_val, *args):
send_val = toggle_lookup[int(new_val)] # 0, 1 = False, True
fun('', send_val)
return fun_tor
# send funs
def activate_pad(self, i=-1):
if (self.qp_lp_var.get()): # if lp selected
fun_to_call = self.send_back['loop_select']
else:
fun_to_call = self.send_back['cue_set']
fun_to_call('', i)
def delet_pad(self, i=-1):
if (self.qp_lp_var.get()): # if lp selected
fun_to_call = self.send_back['loop_clear']
else:
fun_to_call = self.send_back['cue_clear']
fun_to_call('', i)
def press_pad(self, i=-1):
if self.deleting_mode:
self.delet_pad(i)
else:
self.activate_pad(i)
def press_pad_qp(self, i=-1):
if self.deleting_mode:
self.send_back['cue_clear'](i)
else:
self.send_back['cue_set'](i)
def press_pad_lp(self, i=-1):
if self.deleting_mode:
self.send_back['loop_clear'](i)
else:
self.send_back['loop_select'](i)
def toggle_pad_mode(self, i=-1):
self.deleting_mode = not self.deleting_mode
# maybe update gui somehow
self.update_loop(self.cur_clip)
# update dispatch
def update_directly(self, what, n=-1):
if what in self.direct_updates:
self.direct_updates[what](n)
def update_cur_pos(self, pos):
self.cur_pos = pos
self.progressbar.pbar_pos = pos
def update_clip(self, clip):
self.cur_clip = clip
if clip is None:
self.name_var.set("------")
self.update_clip_params(clip)
self.name_label.unbind("<Double-Button-1>")
return
self.update_name(clip.name)
self.update_clip_params(clip)
self.name_label.bind("<Double-Button-1>", self.change_name_dialog)
def update_clip_params(self, clip, param=None):
# print('updating', param)
if param in self.param_dispatch:
self.param_dispatch[param](clip)
elif param is None:
for fun in self.param_dispatch.values():
fun(clip)
def update_name(self, new_name):
new_text = new_name
text_meas = []
for c in new_text:
if c in C.FONT_WIDTHS:
text_meas.append(C.FONT_WIDTHS[c])
else:
text_meas.append(C.FONT_AVG_WIDTH)
cumm_text_meas = list(accumulate(text_meas))
if cumm_text_meas[-1] > self.width - 25:
to_i = bisect.bisect_left(cumm_text_meas, self.width - 25 - 5 * C.FONT_WIDTHS['.'])
new_text = new_text[:to_i].strip() + ".."
self.name_var.set(new_text)
def update_speed(self, clip):
if clip is None:
spd = 0.0
else:
spd = clip.params['playback_speed']
self.speed_var.set(spd)
def update_sens(self, clip):
if clip is None:
sens = 0.0
else:
sens = clip.params['control_sens']
self.sens_var.set(sens)
def update_cues(self, clip):
if clip is None:
for i in range(C.NO_Q):
but = self.pad_buts[i]
but.config(state='disabled', relief='groove', background='')
# unbind
but.unbind("<ButtonPress-1>")
but.unbind("<ButtonPress-3>")
self.progressbar.draw_cue_points()
return
cp = clip.params['cue_points']
self.cue_but_states = [cp[i] is not None for i in range(C.NO_Q)]
self.progressbar.draw_cue_points(cp, self.cue_but_states)
self.pad_reconfigure()
def update_loop(self, clip=None):
self.update_loop_on(clip)
self.update_loop_type(clip)
lp = None
selected_ind = '-'
i = -1
if clip is not None:
ls = clip.params['loop_selection']
if ls >= 0:
selected_ind = str(ls)
i = ls
lp = clip.params['loop_points']
self.loop_but_states = [(lp[i] is not None) and (None not in lp[i][:2]) for i in range(C.NO_LP)]
else:
self.loop_but_states = [False for i in range(C.NO_LP)]
if self.deleting_mode:
self.loop_selected_text_var.set('DELETE')
self.lp_selected_label.config(background=C.CURRENT_THEME.delete_bg)
else:
self.loop_selected_text_var.set('selected [{}]'.format(selected_ind))
if i >= 0:
self.lp_selected_label.config(background=self.pad_colors[i % 4][i // 4])
else:
self.lp_selected_label.config(background='')
self.lp_data = lp
self.pad_reconfigure()
def update_loop_on(self, clip=None):
if clip is None:
loop_state = False
else:
loop_state = clip.params['loop_on']
if self.loop_on_toggle is not None:
self.loop_on_toggle.update(loop_state)
x1, x2 = 0, 1
if loop_state:
ls = clip.params['loop_selection']
if (0 <= ls < C.NO_LP):
lp = clip.params['loop_points']
check = lp[ls]
if check is not None:
if check[0] is None:
check[0] = 0
if check[1] is None:
check[1] = 1
x1, x2 = check[0], check[1]
self.progressbar.draw_loop_boundaries(x1, x2)
def update_loop_type(self, clip=None):
loop_data = self.backend.loop_get(self.layer)
if loop_data is None:
loop_type = False
else:
loop_type = (loop_data[1][2] == 'b')
if self.loop_type_switch is not None:
self.loop_type_switch.update(loop_type)
def pad_reconfigure(self, *args):
if self.cur_clip is None:
return
if (self.qp_lp_var.get()): # if lp selected
from_what = self.loop_but_states
self.progressbar.draw_loop_bars(self.lp_data, from_what)
self.progressbar.exit_cue_mode_binds()
else:
from_what = self.cue_but_states
self.progressbar.draw_loop_bars()
self.progressbar.cue_mode_only_binds()
for i, yn in enumerate(from_what):
but = self.pad_buts[i]
but.config(state='active')
but.bind("<ButtonPress-1>", self.pad_but_cmds[i][0])
but.bind("<ButtonPress-3>", self.pad_but_cmds[i][1])
if yn:
r, c = i // 4, i % 4
but.config(background=self.pad_colors[c][r])
but.config(relief='raised')
else:
but.config(background='')
but.config(relief='flat')
def resize(self, new_width, minus_controls=False):
if minus_controls:
new_width -= self.bottom_right_frame.winfo_width()
self.width = new_width
pad_padding = '{0} 15 {0} 15'.format(new_width // 8 - 4)
for but in self.pad_buts:
but.config(padding=pad_padding)
self.progressbar.resize(new_width)
self.update_cues(self.cur_clip)
self.update_loop(self.cur_clip)
# tk setup
def format_name(self, wname, i=-1):
if i > -1:
wname = wname.format(i)
wname += '_l{}'.format(self.layer)
return wname
def setup_main_tk(self):
self.root_frame = ttk.Frame(self.root, padding='5 1 5 2')
self.root_frame.dnd_accept = self.dnd_accept # for dnd
self.info_frame = ttk.Frame(self.root_frame, relief='ridge', padding='2')
self.name_label = ttk.Label(self.info_frame, textvariable=self.name_var,
anchor='center', padding='0 1 0 2')
left_frame_padding = '2 0 5 0'
self.top_frame = ttk.Frame(self.root_frame)
self.progress_frame = ttk.Frame(self.top_frame, padding=left_frame_padding)
self.top_right_frame = ttk.Frame(self.top_frame)
self.bottom_frame = ttk.Frame(self.root_frame)
self.pad_but_frame = ttk.Frame(self.bottom_frame, padding=left_frame_padding)
self.bottom_right_frame = ttk.Frame(self.bottom_frame)
# pack it up
self.root_frame.pack(fill=tk.X, expand=True)
self.info_frame.pack(side=tk.TOP, fill=tk.X, expand=True)
self.name_label.pack(expand=True, pady=2, fill=tk.X)
self.top_frame.pack(side=tk.TOP)
self.progress_frame.pack(side=tk.LEFT)
self.top_right_frame.pack(side=tk.LEFT)
self.bottom_frame.pack(side=tk.TOP)
self.pad_but_frame.pack(side=tk.LEFT)
self.bottom_right_frame.pack(side=tk.LEFT)
# control areas
self.setup_control_frame_top()
self.setup_control_frame_bottom()
# progressbar
self.setup_progressbar()
# pads
self.setup_pads()
def setup_control_frame_top(self):
self.control_bottom_frame = ttk.Frame(self.top_right_frame)
control_slice_pads = '6 0 6 2'
self.control_sens_frame = ttk.Frame(self.control_bottom_frame, padding=control_slice_pads,
name=self.format_name('ctrl_sens'))
self.control_spd_frame = ttk.Frame(self.control_bottom_frame, padding=control_slice_pads,
name=self.format_name('pb_speed'))
self.control_bottom_frame.pack(side=tk.TOP, anchor='w')
self.control_sens_frame.grid(row=1, column=1, rowspan=4)
self.control_spd_frame.grid(row=1, column=2, rowspan=4)
# ctrl buts
ctrl_but_pad = '12 1 12 1'
playbut = ttk.Button(self.control_bottom_frame, text=">", width=2, padding=ctrl_but_pad, takefocus=False,
command=self.gen_send_cmd('play'), name=self.format_name('pb_play'))
pausebut = ttk.Button(self.control_bottom_frame, text="||", width=2, padding=ctrl_but_pad, takefocus=False,
command=self.gen_send_cmd('pause'), name=self.format_name('pb_pause'))
rvrsbut = ttk.Button(self.control_bottom_frame, text="<", width=2, padding=ctrl_but_pad, takefocus=False,
command=self.gen_send_cmd('reverse'), name=self.format_name('pb_reverse'))
clearbut = ttk.Button(self.control_bottom_frame, text="X", width=2, padding=ctrl_but_pad, takefocus=False,
command=self.gen_send_cmd('clear'), name=self.format_name('pb_clear'))
for i, but in enumerate([rvrsbut, pausebut, playbut, clearbut]):
but.grid(row=0, column=i, pady=2, padx=2)
# zoom buts
def update_zoom_follow(*args):
self.progressbar.auto_scroll = self.zoom_follow_var.get()
self.zoom_follow_var.trace('w', update_zoom_follow)
zoom_in_but = ttk.Button(self.control_bottom_frame, text="+", width=1, takefocus=False,
command=lambda: self.progressbar.adjust_zoom(1.25))
zoom_out_but = ttk.Button(self.control_bottom_frame, text="-", width=1, takefocus=False,
command=lambda: self.progressbar.adjust_zoom(.75))
zoom_reset_but = ttk.Button(self.control_bottom_frame, text="o", width=1, takefocus=False,
command=lambda: self.progressbar.reset_zoom())
zoom_follow_cb = ttk.Checkbutton(self.control_bottom_frame, width=0,
variable=self.zoom_follow_var, takefocus=False)
self.zoom_control_buts = [zoom_in_but, zoom_out_but, zoom_reset_but, zoom_follow_cb]
for i, zcb in enumerate(self.zoom_control_buts):
zcb.grid(row=(i + 1), column=0, sticky='w')
spd_sens_vars = [(self.sens_var, self.xtra_sens_var), (self.speed_var, self.xtra_speed_var)]
def gen_update_trace(v1, v2):
var1, var2 = v1, v2
def curry_trace(*args):
get_got = var1.get()
new_txt = '{:01.2f}'.format(get_got)
new_txt = new_txt[:4]
var2.set(new_txt)
return curry_trace
def testVal(test_inp):
if len(test_inp) > 4:
return False
elif len(test_inp) == 0:
return True
try:
float(test_inp)
return True
except:
return False
for svp in spd_sens_vars:
t_fun = gen_update_trace(*svp)
svp[0].trace('w', t_fun)
def setup_slider(frame, text, var1, var2, style):
label = ttk.Label(frame, text=text, width=4, relief='groove', borderwidth=2)
scale = ttk.Scale(frame, from_=10.0, to=0.0, variable=var1,
orient=tk.VERTICAL, length=66, style=style)
scale.bind("<MouseWheel>", lambda e: var1.set(var1.get() + (e.delta / (120 / 0.1))))
val_entry = ttk.Entry(frame, textvariable=var2, width=4,
validate="key")
val_entry['validatecommand'] = (val_entry.register(testVal), '%P')
val_entry.bind('<Return>', lambda e: var1.set(var2.get()))
val_entry.bind('<Up>', lambda e: var1.set(min(var1.get() + 0.05, 10)))
val_entry.bind('<Down>', lambda e: var1.set(max(var1.get() - 0.05, 0)))
label.grid(row=1, column=0)
scale.grid(row=2, column=0, ipady=2)
val_entry.grid(row=3, column=0)
# dont want ultra thicc handles
s = ttk.Style()
ss = 'Poop.Vertical.TScale'
s.configure(ss, sliderlength='17.5')
setup_slider(self.control_sens_frame, 'sens', *spd_sens_vars[0], ss)
setup_slider(self.control_spd_frame, 'spd', *spd_sens_vars[1], ss)
# spd buts
double_but = ttk.Button(self.control_bottom_frame, text="* 2", width=3, takefocus=False,
command=lambda: self.speed_var.set(min(10, 2 * self.speed_var.get())),
name=self.format_name('pb_spd_double'))
half_but = ttk.Button(self.control_bottom_frame, text="/ 2", width=3, takefocus=False,
command=lambda: self.speed_var.set(0.5 * self.speed_var.get()),
name=self.format_name('pb_spd_halve'))
double_but.grid(row=2, column=3)
half_but.grid(row=3, column=3)
def setup_control_frame_bottom(self):
self.qp_lp_switch = SwitchButton(self.bottom_right_frame, 'QP', 'LP', padding='2',
name=self.format_name('qp_lp_toggle'))
self.qp_lp_var = self.qp_lp_switch.bool_var
self.qp_lp_var.trace('w', self.pad_reconfigure)
self.direct_updates['qp_lp_toggle'] = self.qp_lp_switch.manual_toggle
self.qp_lp_switch.but_1.grid(row=0, column=0, sticky='we')
self.qp_lp_switch.but_2.grid(row=0, column=1, sticky='we')
self.lp_selected_label = ttk.Label(self.bottom_right_frame, textvariable=self.loop_selected_text_var,
relief='groove', padding='2 4 2 4', anchor='center', borderwidth=2,
name=self.format_name('pb_delet_toggle'))
self.lp_selected_label.grid(row=0, column=3, columnspan=2, sticky='we')
self.loop_selected_text_var.set('selected [-]')
self.loop_on_toggle = ToggleButton(self.bottom_right_frame, 'loop on', 7,
padding='20 4 20 4', name=self.format_name('pb_lp_on_off'))
self.loop_on_var = self.loop_on_toggle.bool_var
self.loop_on_toggle.send_cmd = self.gen_toggle_cmd('loop_on_off')
self.loop_on_toggle.but.grid(row=2, column=0, columnspan=2, sticky='we', pady=2)
ttk.Frame(self.bottom_right_frame, style='fakesep.TFrame', width=1).grid(row=1, column=0, columnspan=5, sticky='we', pady=4)
ttk.Frame(self.bottom_right_frame, style='fakesep.TFrame', width=1).grid(row=2, column=2, rowspan=2, sticky='ns', padx=2)
self.loop_type_switch = SwitchButton(self.bottom_right_frame, 'dflt', 'bnce',
padding='2 4 2 4', name=self.format_name('pb_lp_type'))
self.loop_type_var = self.loop_type_switch.bool_var
self.loop_type_switch.send_cmd = self.gen_toggle_cmd('loop_type', ['d', 'b'])
self.loop_type_switch.but_1.grid(row=2, column=3, sticky='we')
self.loop_type_switch.but_2.grid(row=2, column=4, sticky='we')
loop_but_pad = '10 4 10 4'
loop_in_but = ttk.Button(self.bottom_right_frame, text="in", width=3, padding=loop_but_pad, takefocus=False,
command=self.gen_send_cmd('loop_set_a_cur'), name=self.format_name('pb_lp_set_a'))
loop_out_but = ttk.Button(self.bottom_right_frame, text="out", width=3, padding=loop_but_pad, takefocus=False,
command=self.gen_send_cmd('loop_set_b_cur'), name=self.format_name('pb_lp_set_b'))
loop_prev_but = ttk.Button(self.bottom_right_frame, text="\\/", width=2, padding=loop_but_pad, takefocus=False,
command=self.gen_send_cmd('loop_move', -1), name=self.format_name('pb_lp_select_next'))
loop_next_but = ttk.Button(self.bottom_right_frame, text="/\\", width=2, padding=loop_but_pad, takefocus=False,
command=self.gen_send_cmd('loop_move', +1), name=self.format_name('pb_lp_select_prev'))
for i, lpb in enumerate([loop_in_but, loop_out_but, loop_prev_but, loop_next_but]):
c = i + (i > 1)
lpb.grid(row=3, column=c, sticky='we')
def setup_progressbar(self):
self.progressbar = ProgressBar(self.progress_frame, self, self.width, 85)
self.progressbar.send_funs['seek'] = self.send_back['seek']
self.progressbar.send_funs['cue'] = self.send_back['cue_set']
self.progressbar.send_funs['loop_set_a'] = self.send_back['loop_set_a']
self.progressbar.send_funs['loop_set_b'] = self.send_back['loop_set_b']
self.progressbar.send_funs['loop_set_a_cur'] = self.send_back['loop_set_a_cur']
self.progressbar.send_funs['loop_set_b_cur'] = self.send_back['loop_set_b_cur']
self.progressbar.send_funs['loop_select'] = self.send_back['loop_select']
def set_cue(i, pos):
self.backend.set_cue_point(self.layer, i, pos)
self.progressbar.send_funs['set_cue'] = set_cue
# scroll scratch
for k in ['scratch_start', 'scratch_do', 'scratch_stop']:
self.progressbar.send_funs[k] = self.send_back[k]
# colors
self.progressbar.colors['loop_bars'] = self.pad_colors
self.progressbar.setup_after_color_set()
def setup_pads(self):
pad_x = self.width // 8 - 4
pad_str = '{0} 15 {0} 15'.format(pad_x)
def gen_but_funs(no):
i = no
def activate(*args):
self.activate_pad(i)
def deactivate(*args):
self.delet_pad(i)
return [activate, deactivate]
for r in range(self.no_pad_rows):
for c in range(4):
i = r * 4 + c
but = ttk.Label(self.pad_but_frame, text=str(i), borderwidth=4,
padding=pad_str, relief='flat', name=self.format_name('pb_pad_{}', i))
but.grid(row=r, column=c)
but.config(state='disabled')
but.bind("<ButtonPress-1>", lambda e: print(e))
self.pad_buts.append(but)
self.cue_but_states.append(False)
self.loop_but_states.append(False)
self.pad_but_cmds.append(gen_but_funs(i))
def change_name_dialog(self, *args):
cur_clip = self.backend.clip_storage.current_clips[self.layer]
if cur_clip is None:
return
new_name = tksimpledialog.askstring("rename clip", '', initialvalue=cur_clip.name)
if new_name:
# change name
self.backend.rename_clip(cur_clip, new_name) # have to do this to update search properly etc
# tkdnd stuff
def dnd_accept(self, source, event):
return self
def dnd_enter(self, source, event):
pass
def dnd_motion(self, source, event):
pass
def dnd_leave(self, source, event):
pass
def dnd_commit(self, source, event):
if source.clip is None:
return
self.backend.select_clip(source.clip, self.layer)
def dnd_end(self, target, event):
pass
class SwitchButton:
def __init__(self, frame, text1, text2, min_width=5, padding=None, name=None):
self.bool_var = tk.BooleanVar()
self.bool_var.set(False)
self.send_cmd = None
if name is not None:
self.but_1 = ttk.Label(frame, text=text1, borderwidth=4, name=name,
width=min_width, anchor='e', style='fakebut.TLabel')
else:
self.but_1 = ttk.Label(frame, text=text1, borderwidth=4,
width=min_width, anchor='e', style='fakebut.TLabel')
self.but_1.bind('<Button-1>', lambda e: self.switch(False))
self.but_2 = ttk.Label(frame, text=text2, borderwidth=4,
width=min_width, style='fakebut.TLabel')
self.but_2.bind('<Button-1>', lambda e: self.switch(True))
if padding is not None:
self.but_1.config(padding=padding)
self.but_2.config(padding=padding)
self.switch(False)
def switch(self, new_val):
if self.send_cmd is not None:
self.send_cmd(new_val)
else:
self.update(new_val)
def manual_toggle(self, *args):
self.switch(not bool(self.bool_var.get()))
def update(self, new_val):
self.bool_var.set(new_val)
if (new_val):
# button 2 now
self.but_2.config(relief='sunken', state='disabled')
self.but_1.config(relief='raised', state='')
else:
self.but_1.config(relief='sunken', state='disabled')
self.but_2.config(relief='raised', state='')
class ToggleButton:
def __init__(self, frame, text, min_width=5, padding=None, name=None):
self.bool_var = tk.BooleanVar()
if name is not None:
self.but = ttk.Label(frame, text=text, borderwidth=4, width=min_width,
style='fakebut.TLabel', name=name)
else:
self.but = ttk.Label(frame, text=text, borderwidth=4, width=min_width, style='fakebut.TLabel')
self.but.bind('<Button-1>', self.toggle)
self.send_cmd = None
if padding is not None:
self.but.config(padding=padding)
def toggle(self, *args):
self.switch((not self.bool_var.get()))
def switch(self, new_val):
if self.send_cmd is not None:
self.send_cmd(new_val)
else:
self.update(new_val)
def update(self, new_val):
self.bool_var.set(new_val)
if (new_val):
self.but.config(relief='sunken', state='disabled')
else:
self.but.config(relief='raised', state='')
class ProgressBar:
def __init__(self, root, parent, width=300, height=33):
self.width, self.height = width, height
self._drag_data = {'x': 0, 'y': 0, 'item': None, 'label': [], 'type': None}
self.parent = parent
self.colors = {
'bg': 'black',
'bottom_bar': '#aaa',
'pbar': 'gray',
'loop_range': '#333',
'loop_bar': '#666'
}
self.pbar_pos = 0
self.zoom_factor = 1.0
self.total_width = width
self.auto_scroll = False
self.currently_scratching = False
self.scratch_job = None
self.refresh_interval = 100
# for cue points
self.qp_lines = [None] * C.NO_Q
self.qp_labels = [None] * C.NO_Q
# for loops
self.loop_bars = [None] * C.NO_LP
self.loop_bars_data = [None] * C.NO_LP
self.loop_labels = [None] * C.NO_LP
# fun dispatch
self.send_funs = {}
# tk stuff
self.root = root
self.frame = ttk.Frame(self.root)
self.canvas_frame = ttk.Frame(self.frame)
self.canvas = tk.Canvas(self.canvas_frame, width=width, height=height + 15,
bg=self.colors['bg'], scrollregion=(0, 0, width, height),
name=self.parent.format_name('ctrl_bar'))
self.hbar = ttk.Scrollbar(self.canvas_frame, orient=tk.HORIZONTAL)
self.hbar.config(command=self.canvas.xview)
self.canvas.config(xscrollcommand=self.hbar.set)
self.canvas.pack(anchor=tk.W)
self.canvas_frame.pack(anchor=tk.W, side=tk.LEFT, expand=tk.YES, fill=tk.BOTH)
self.hbar.pack(anchor=tk.W, side=tk.BOTTOM, expand=tk.YES, fill=tk.BOTH)
self.frame.pack(anchor=tk.W, side=tk.TOP, expand=tk.YES, fill=tk.BOTH)
self.setup_canvas()
self.root.after(self.refresh_interval, self.update_pbar)
# # # setup
def setup_canvas(self):
w, h = self.width, self.height
self.canvasbg = self.canvas.create_rectangle(0, 0, w, h,
fill=self.colors['bg'], tag='bg')
self.bottombg = self.canvas.create_rectangle(0, h, w, h + 15,
fill=self.colors['bottom_bar'])
def setup_after_color_set(self):
for i in range(C.NO_LP):
r, c = i // 4, i % 4
self.loop_bars[i] = self.canvas.create_rectangle(0, 0, 0, 0,
fill=self.colors['loop_bars'][c][r], tag='loop_bar')
# activefill=self.colors['?'])
self.pbar = self.canvas.create_line(0, 0, 0, self.height, fill=self.colors['pbar'], width=3)
self.outside_loop_rect_l = self.canvas.create_rectangle(0, 0, 0, 0,
fill=self.colors['loop_range'], stipple='gray50',
tag='loop_limit')
self.outside_loop_rect_r = self.canvas.create_rectangle(0, 0, 0, 0,
fill=self.colors['loop_range'], stipple='gray50',
tag='loop_limit')
self.actions_binding()
def cue_mode_only_binds(self):
self.canvas.tag_bind("loop_limit", "<B1-Motion>", self.find_mouse)
self.canvas.tag_bind("loop_limit", "<ButtonRelease-1>", self.find_mouse)
def exit_cue_mode_binds(self):
self.canvas.tag_unbind("loop_limit", "<B1-Motion>")
self.canvas.tag_unbind("loop_limit", "<ButtonRelease-1>")
def actions_binding(self):
# seeking
self.canvas.tag_bind("bg", "<B1-Motion>", self.find_mouse)
self.canvas.tag_bind("bg", "<ButtonRelease-1>", self.find_mouse)
# cue points
self.canvas.tag_bind("qp_line", "<ButtonPress-1>", self.find_nearest)
self.canvas.tag_bind("qp_label", "<ButtonPress-1>", self.find_nearest)
self.canvas.tag_bind("qp_line", "<B1-Motion>", self.find_mouse)
self.canvas.tag_bind("qp_line", "<ButtonPress-3>", self.drag_begin)
self.canvas.tag_bind("qp_line", "<ButtonRelease-3>", self.drag_end)
self.canvas.tag_bind("qp_line", "<B3-Motion>", self.drag)
# looping
self.canvas.tag_bind("loop_bar", "<ButtonPress-1>", self.find_nearest_loop)
# self.canvas.tag_bind("loop_label", "<ButtonPress-1>", self.find_nearest_loop)
self.canvas.bind("<Shift-ButtonPress-2>", self.loop_set_a_cur)
self.canvas.bind("<Control-ButtonPress-2>", self.loop_set_b_cur)
self.canvas.tag_bind("loop_limit", "<ButtonPress-3>", self.drag_begin)
self.canvas.tag_bind("loop_limit", "<ButtonRelease-3>", self.drag_end)
self.canvas.tag_bind("loop_limit", "<B3-Motion>", self.drag)
# scratching
self.canvas.bind("<MouseWheel>", self.scroll_scratch)
# # # draw helpers
# the actual bar
def move_bar(self, x):
new_x = self.total_width * x
self.canvas.coords(self.pbar, new_x, 0, new_x, self.height)
def update_pbar(self):
self.move_bar(self.pbar_pos)
# check if need to scroll
if self.auto_scroll:
csp = self.hbar.get()
if self.pbar_pos < csp[0]:
self.canvas.xview('moveto', max(0, self.pbar_pos - (csp[1] - csp[0])))
elif self.pbar_pos > csp[1]:
self.canvas.xview('moveto', self.pbar_pos)
self.root.after(self.refresh_interval, self.update_pbar)
# cue points
def draw_cue_points(self, qp_data=None, qp_on_off=None):
if qp_data is None:
for i in range(C.NO_Q):
self.remove_qp(i)
else:
for i, qp in enumerate(qp_data[:len(qp_on_off)]):
if qp_on_off[i]:
self.add_qp(qp, i)
else:
self.remove_qp(i)
def add_qp(self, x_pos, i):
x_coord = x_pos * self.total_width
if self.qp_lines[i] is None:
r, c = i // 4, i % 4
self.qp_lines[i] = self.canvas.create_line(x_coord, 0, x_coord, self.height,
activefill='white', fill='#ccc',
width=3, dash=(4, ), tags='qp_line')
labelbox = self.canvas.create_rectangle(x_coord, self.height, x_coord + 15,
self.height + 15, tags='qp_label',
fill=self.colors['loop_bars'][c][r])
labeltext = self.canvas.create_text(x_coord, self.height + 14, anchor=tk.SW,
text=" {}".format(i), fill='black',
activefill='white', justify='center',
tags='qp_label')
self.qp_labels[i] = [labelbox, labeltext]
else: # if qp already exists, move its things
self.canvas.coords(self.qp_lines[i], x_coord, 0, x_coord, self.height)
self.canvas.coords(self.qp_labels[i][0], x_coord, self.height,
x_coord + 15, self.height + 15)
self.canvas.coords(self.qp_labels[i][1], x_coord, self.height + 14)
def remove_qp(self, i):
if self.qp_lines[i] is None:
return
self.canvas.delete(self.qp_lines[i])
self.qp_lines[i] = None
if self.qp_labels[i]:
for label_item in self.qp_labels[i]:
self.canvas.delete(label_item)
self.qp_labels[i] = None
# loop points
def draw_loop_boundaries(self, x1, x2):
x1, x2 = x1 * self.total_width, x2 * self.total_width
self.canvas.coords(self.outside_loop_rect_l, 0, 0, x1, self.height)
self.canvas.coords(self.outside_loop_rect_r, x2, 0, self.total_width, self.height)
def draw_loop_bars(self, lp_data=None, lp_on_off=None):
if lp_data is None:
for i in range(C.NO_LP):
self.remove_lp(i)
return
for i, lpd in enumerate(lp_data):
if lp_on_off[i]:
self.add_lp(i, lpd)
else:
self.remove_lp(i)
top_height, nei = self.do_loop_gravity()
dy = self.height / top_height
for i in nei:
lpd = self.loop_bars_data[i]
self.canvas.coords(self.loop_bars[i],
lpd[0], dy * lpd[1],
lpd[2], dy * lpd[3])
def add_lp(self, i, lpd):
if None in lpd[:3]:
self.remove_lp(i)
return
x1, x2 = lpd[0] * self.total_width, lpd[1] * self.total_width
# lpd[2] is loop type .. maybe alternative bar config for this?
# dy = self.height / C.NO_LP
# y1 = i * dy
# y2 = y1 + dy
self.loop_bars_data[i] = [x1, 0, x2, 1]
# self.canvas.coords(self.loop_bars[i], x1, y1, x2, y2)
def remove_lp(self, i):
self.loop_bars_data[i] = None
if self.loop_bars[i] is None:
return
self.canvas.coords(self.loop_bars[i], 0, 0, 0, 0)
if self.loop_labels[i] is not None:
self.canvas.coords(self.loop_labels[i], 0, 0, 0, 0)
def do_loop_gravity(self):
lbbd = self.loop_bars_data
# non empty indices
nei = [i for i, lpd in enumerate(lbbd) if lpd is not None]
def check_intersect(i1, i2):
b0, b1 = lbbd[i1][0], lbbd[i1][2]
c0, c1 = lbbd[i2][0], lbbd[i2][2]
if c0 < b0:
return c1 > b0
else:
return c0 < b1
new_y1 = 0
for j in range(1, len(nei)):
# check any of the below loop ranges for intersect
# next bar must go on top of it
intersect_heights = [lbbd[nei[k]][3]
if check_intersect(nei[k], nei[j])
else 0
for k in range(0, j)]
new_y1 = max(intersect_heights)
lbbd[nei[j]][1] = new_y1
lbbd[nei[j]][3] = new_y1 + 1
# return the max height & nei
return (new_y1 + 1, nei)
# # # event actions
def resize(self, new_width):
self.width = new_width
self.total_width = new_width * self.zoom_factor
self.canvas.config(width=self.width, scrollregion=(0, 0, self.width, self.height))
self.canvas.coords(self.canvasbg, 0, 0, self.width, self.height)
self.canvas.coords(self.bottombg, 0, self.height, self.width, self.height + 15)
def adjust_zoom(self, by_factor):
new_factor = self.zoom_factor * by_factor
new_factor = max(1.0, new_factor)
actual_scale = new_factor / self.zoom_factor
self.canvas.scale(tk.ALL, 0, 0, actual_scale, 1)
self.total_width = new_factor * self.width
bbox = (0, 0, self.total_width, self.height)
self.canvas.configure(scrollregion=bbox)
self.zoom_factor = new_factor
def reset_zoom(self):
self.adjust_zoom(1.0 / self.zoom_factor)
def find_mouse(self, event):
# for progress bar to follow mouse
new_x = self.canvas.canvasx(event.x) / self.total_width
new_x = max(0, (min(new_x, 1)))
self.pbar_pos = new_x
self.move_bar(new_x)
if 'seek' in self.send_funs:
self.send_funs['seek']('', new_x)
# loop funs
def loop_set_a_cur(self, event):
if 'loop_set_a_cur' in self.send_funs:
self.send_funs['loop_set_a_cur']('', True)
def loop_set_b_cur(self, event):
if 'loop_set_b_cur' in self.send_funs:
self.send_funs['loop_set_b_cur']('', True)
# scratching
def scroll_scratch(self, event):
if not self.currently_scratching:
self.currently_scratching = True
self.send_funs['scratch_start']('', True)
dt = event.delta / 12
self.send_funs['scratch_do']('', dt)
if self.scratch_job is not None:
self.root.after_cancel(self.scratch_job)
self.scratch_job = self.root.after(25, self.stop_scratch)
def stop_scratch(self):
self.scratch_job = None
self.currently_scratching = False
self.send_funs['scratch_stop']('', True)
# drag n drop
def drag_begin(self, event):
# record the item, its location, any associated labels and what type it is
item = self.canvas.find_closest(self.canvas.canvasx(event.x), self.canvas.canvasy(event.y), halo=5)[0]
item_tags = self.canvas.gettags(item)
if not ('qp_line' or 'loop_limit' in item_tags):
return
self._drag_data["item"] = item
if 'qp_line' in item_tags:
self._drag_data['label'] = self.qp_labels[self.qp_lines.index(item)]
self._drag_data['type'] = 'qp'
else:
self._drag_data['type'] = 'll'
self._drag_data['x'] = event.x
def drag_end(self, event):
if self._drag_data['item'] is None:
return
# just to be safe
if 'set_cue' in self.send_funs: # don't do anything until we've set our funs
new_x = self.canvas.canvasx(event.x)
if new_x < 0:
new_x = 0
elif new_x > self.total_width:
new_x = self.total_width - 2
send_x = new_x / self.total_width
if self._drag_data['type'] == 'qp':
i = self.qp_lines.index(self._drag_data["item"])
self.send_funs['set_cue'](i, send_x)
elif self._drag_data['type'] == 'll':
if self._drag_data['item'] == self.outside_loop_rect_l:
self.send_funs['loop_set_a']('', send_x)
else:
self.send_funs['loop_set_b']('', send_x)
# reset the drag information
self._drag_data['item'] = None
self._drag_data['label'] = []
self._drag_data['type'] = None
self._drag_data['x'] = 0
def drag(self, event):
# move the object the appropriate amount
if self._drag_data['item']:
if self._drag_data['type'] == 'qp':
# compute how much this object has moved
delta_x = event.x - self._drag_data['x']
coord = self.canvas.coords(self._drag_data['item'])
if coord[0] + delta_x < 0:
delta_x = -coord[0]
elif coord[2] + delta_x > self.total_width:
delta_x = self.total_width - coord[2]
self.canvas.move(self._drag_data['item'], delta_x, 0) # delta_y)
for label_item in self._drag_data['label']:
self.canvas.move(label_item, delta_x, 0)
else:
if self._drag_data['item'] == self.outside_loop_rect_l:
self.canvas.coords(self.outside_loop_rect_l,
0, 0, event.x, self.height)
else:
self.canvas.coords(self.outside_loop_rect_r,
event.x, 0, self.total_width, self.height)
# record the new position
self._drag_data['x'] = event.x
def find_nearest(self, event):
if 'cue' not in self.send_funs:
return
item = self.canvas.find_closest(event.x, event.y, halo=5)[0]
if 'qp_label' in self.canvas.gettags(item):
item = self.canvas.find_closest(event.x - 10, event.y - 20, halo=5)[0]
if 'qp_line' in self.canvas.gettags(item):
i = self.qp_lines.index(item)
else:
return
self.send_funs['cue']('', i)
def find_nearest_loop(self, event):
if 'loop_select' not in self.send_funs:
return
item = self.canvas.find_closest(event.x, event.y, halo=5)[0]
if item in self.loop_bars:
i = self.loop_bars.index(item)
elif item in self.loop_labels:
i = self.loop_labels.index(item)
else:
return
self.send_funs['loop_select']('', i)
if __name__ == '__main__':
rootwin = tk.Tk()
ttk.Style().theme_use('clam')
rootwin.title('test_cc')
# class FakeBackend:
# def __init__(self):
# pass
# def return_fun(self, *args, **kwds):
# pass
# def __getattr__(self, *args, **kwds):
# tor_str = 'call: {}'.format(args[0])
# if len(args) > 1:
# tor_str += ' args: [{}]'.format(','.join(args[1:]))
# if len(kwds) > 0:
# tor_str += ' kwds: {}'.format(kwds)
# print(tor_str)
# return self.return_fun
# fake_backend = FakeBackend()
class FakeGUI:
def __init__(self, backend, root):
self.root = root
self.backend = backend
self.clip_controls = [None] * C.NO_LAYERS
C.themer.setup(C.CURRENT_THEME, self.root)
def update_clip(self, layer, clip):
# print('update?')
cc = self.clip_controls[layer]
if cc is not None:
cc.update_clip(clip)
def update_clip_params(self, layer, clip, param):
# dispatch things according to param
cc = self.clip_controls[layer]
if cc is not None:
cc.update_clip_params(clip, param)
def update_cur_pos(self, layer, pos):
# pass along the current position
cc = self.clip_controls[layer]
if cc is not None:
cc.update_cur_pos(pos)
def update_search(self):
pass
def update_cols(self, what, ij=None):
pass
def update_clip_names(self):
for i in range(C.NO_LAYERS):
cc = self.clip_controls[i]
if cc is not None:
cc.update_name(self.backend.clip_storage.current_clips[i].name)
def restart(self):
for i in range(C.NO_LAYERS):
self.update_clip(i, self.backend.clip_storage.current_clips[i])
def quit(self, *args):
self.backend.stop()
self.root.destroy()
from sol import magi
test_backend = magi.Magi()
test_gui = FakeGUI(test_backend, rootwin)
test_backend.gui = test_gui
test_cc = ClipControl(rootwin, test_backend, 0)
test_gui.clip_controls[0] = test_cc
test_backend.start()
rootwin.protocol("WM_DELETE_WINDOW", test_gui.quit)
rootwin.bind("<Control-q>", test_gui.quit)
test_gui.restart()
rootwin.mainloop()
| pussinboot/sol | sol/gui/tk_gui/clip_control.py | Python | mit | 47,930 |
from sklearn.cluster import MiniBatchKMeans, KMeans
from scikits.talkbox.features import mfcc
import numpy as np
def mfcc_atomic(x, fs, nwin=256, nfft=512, nceps=10, drop=1):
return mfcc(x, nwin, nfft, fs, nceps)[0][:, drop:]
def stack_double_deltas(x):
'''Stacks x on top of the various derivatives of x'''
z = np.diff(x, axis=0)
z2 = np.diff(z, axis=0)
return np.hstack([x[:-2, ...], z[:-1, ...], z2])
def low_energy_filter(x, q, axis=1):
'''Returns a view of x where each elements magnitude is greater than
the q-th percentile.
Within this context, I'm trying to filter out the 'quiet' segments of
the recording, hopefully getting more signal coming from speech rather than
rest'''
norms = np.linalg.norm(x, axis=axis)
p = np.percentile(norms, q)
return x[norms > p]
def norm_ordered_centroids(x, km_kwargs={}):
'''Return norm-ordered centroids'''
km = MiniBatchKMeans(**km_kwargs)
trained = km.fit(x)
centroids = trained.cluster_centers_
ind = np.argsort(np.linalg.norm(centroids, axis=1))
return centroids[ind]
def pop_ordered_centroids(x, km_kwargs={}):
'''Hack for now, hopefully it makes everything better...'''
km = KMeans(**km_kwargs)
trained = km.fit(x)
centroids = trained.cluster_centers_
popularity = np.bincount(trained.labels_)
ind = np.argsort(popularity)
return centroids[ind]
def filtered_mfcc_centroid(x, fs, filter_percentile=10,
kmeans_kwargs={'n_clusters': 30, 'max_iter': 1000,
'precompute_distances': True},
mfcc_kwargs={'nwin':256, 'nfft':512, 'nceps':14}):
'''Generates the centroids of the MFCC spectrogram of the signal x.
1. Collect MFCC coefficients based on mfcc_kwargs,
2. Calculate the deltas from MFCCs
3. Filter out vectors smaller than *filter_percentile*,
4. Normalise the MFCC coeffs (TODO: this),
5. Calculate norm-ordered centroids using KMeans'''
MFCCs = mfcc_atomic(x, fs, **mfcc_kwargs)
features = stack_double_deltas(MFCCs)
if filter_percentile != 0:
features = low_energy_filter(features, filter_percentile)
centroids = pop_ordered_centroids(features, km_kwargs=kmeans_kwargs)
return centroids
| athuras/attention | features.py | Python | gpl-2.0 | 2,232 |
#!/usr/bin/python3
import os
import json
import urllib.request
from config import *
try:
nodes_request = urllib.request.Request(nodes_json_url)
nodes_json_response = urllib.request.urlopen(nodes_request)
graph_request = urllib.request.Request(graph_json_url)
graph_json_response = urllib.request.urlopen(graph_request)
except urllib.error.URLError:
print("URL Error occured")
nodes_data = json.loads(nodes_json_response.read().decode())
graph_data = json.loads(graph_json_response.read().decode())
# modify nodes_data
nodes_buffer = {}
nodes_buffer['timestamp'] = nodes_data['timestamp']
nodes_buffer['version'] = nodes_data['version']
nodes_buffer['nodes'] = {}
# interesting macs:
interesting_nodes = []
# append each node
for mac in nodes_data['nodes']:
node = nodes_data['nodes'][mac]
try:
# TODO check wether the neccesity of the following if is a bugs result
if node['nodeinfo']['system'] != []:
if node['nodeinfo']['system']['site_code'] in site_codes:
interesting_nodes.append(node['nodeinfo']['network']['mac'])
nodes_buffer['nodes'][mac] = node
except KeyError:
pass
shortened_interesting = []
for each in interesting_nodes:
shortened_interesting.append(each.replace(":", ""))
# modify graph.json
graph_buffer = {}
graph_buffer['version'] = graph_data["version"]
graph_buffer['batadv'] = {}
graph_buffer['batadv']['directed'] = graph_data['batadv']['directed']
graph_buffer['batadv']['graph'] = graph_data['batadv']['graph']
graph_buffer['batadv']['nodes'] = []
graph_buffer['batadv']['links'] = []
graph_buffer['batadv']['multigraph'] = graph_data['batadv']['multigraph']
translate = []
for node in graph_data['batadv']['nodes']:
try:
if node['node_id'] in shortened_interesting:
graph_buffer['batadv']['nodes'].append(node)
translate.append(graph_data['batadv']['nodes'].index(node))
except KeyError:
# print("KeyError")
pass
for link in graph_data['batadv']['links']:
try:
if link['source'] in translate and link['target'] in translate:
link['source'] = translate.index(link['source'])
link['target'] = translate.index(link['target'])
graph_buffer['batadv']['links'].append(link)
except KeyError:
pass
# store the jsons
with open('nodes.json', 'w') as outfile:
json.dump(nodes_buffer, outfile)
with open('graph.json', 'w') as graph_file:
json.dump(graph_buffer, graph_file)
# print where the script has been executed and where the json files are
print(os.getcwd())
| freifunkh/mesh_branchfilter | mesh_branchfilter.py | Python | mit | 2,620 |
# Authors : Denis A. Engemann <denis.engemann@gmail.com>
# Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
#
# License : BSD 3-clause
from copy import deepcopy
import math
import numpy as np
from scipy import fftpack
# XXX explore cuda optimazation at some point.
from ..io.pick import pick_types, pick_info
from ..utils import logger, verbose
from ..parallel import parallel_func, check_n_jobs
from .tfr import AverageTFR, _get_data
def _check_input_st(x_in, n_fft):
"""Aux function"""
# flatten to 2 D and memorize original shape
n_times = x_in.shape[-1]
def _is_power_of_two(n):
return not (n > 0 and ((n & (n - 1))))
if n_fft is None or (not _is_power_of_two(n_fft) and n_times > n_fft):
# Compute next power of 2
n_fft = 2 ** int(math.ceil(math.log(n_times, 2)))
elif n_fft < n_times:
raise ValueError("n_fft cannot be smaller than signal size. "
"Got %s < %s." % (n_fft, n_times))
zero_pad = None
if n_times < n_fft:
msg = ('The input signal is shorter ({0}) than "n_fft" ({1}). '
'Applying zero padding.').format(x_in.shape[-1], n_fft)
logger.warning(msg)
zero_pad = n_fft - n_times
pad_array = np.zeros(x_in.shape[:-1] + (zero_pad,), x_in.dtype)
x_in = np.concatenate((x_in, pad_array), axis=-1)
return x_in, n_fft, zero_pad
def _precompute_st_windows(n_samp, start_f, stop_f, sfreq, width):
"""Precompute stockwell gausian windows (in the freq domain)"""
tw = fftpack.fftfreq(n_samp, 1. / sfreq) / n_samp
tw = np.r_[tw[:1], tw[1:][::-1]]
k = width # 1 for classical stowckwell transform
f_range = np.arange(start_f, stop_f, 1)
windows = np.empty((len(f_range), len(tw)), dtype=np.complex)
for i_f, f in enumerate(f_range):
if f == 0.:
window = np.ones(len(tw))
else:
window = ((f / (np.sqrt(2. * np.pi) * k)) *
np.exp(-0.5 * (1. / k ** 2.) * (f ** 2.) * tw ** 2.))
window /= window.sum() # normalisation
windows[i_f] = fftpack.fft(window)
return windows
def _st(x, start_f, windows):
"""Implementation based on Ali Moukadem Matlab code (only used in tests)"""
n_samp = x.shape[-1]
ST = np.empty(x.shape[:-1] + (len(windows), n_samp), dtype=np.complex)
# do the work
Fx = fftpack.fft(x)
XF = np.concatenate([Fx, Fx], axis=-1)
for i_f, window in enumerate(windows):
f = start_f + i_f
ST[..., i_f, :] = fftpack.ifft(XF[..., f:f + n_samp] * window)
return ST
def _st_power_itc(x, start_f, compute_itc, zero_pad, decim, W):
"""Aux function"""
n_samp = x.shape[-1]
n_out = (n_samp - zero_pad)
n_out = n_out // decim + bool(n_out % decim)
psd = np.empty((len(W), n_out))
itc = np.empty_like(psd) if compute_itc else None
X = fftpack.fft(x)
XX = np.concatenate([X, X], axis=-1)
for i_f, window in enumerate(W):
f = start_f + i_f
ST = fftpack.ifft(XX[:, f:f + n_samp] * window)
TFR = ST[:, :-zero_pad:decim]
TFR_abs = np.abs(TFR)
if compute_itc:
TFR /= TFR_abs
itc[i_f] = np.abs(np.mean(TFR, axis=0))
TFR_abs *= TFR_abs
psd[i_f] = np.mean(TFR_abs, axis=0)
return psd, itc
def _induced_power_stockwell(data, sfreq, fmin, fmax, n_fft=None, width=1.0,
decim=1, return_itc=False, n_jobs=1):
"""Computes power and intertrial coherence using Stockwell (S) transform
Parameters
----------
data : ndarray
The signal to transform. Any dimensionality supported as long
as the last dimension is time.
sfreq : float
The sampling frequency.
fmin : None, float
The minimum frequency to include. If None defaults to the minimum fft
frequency greater than zero.
fmax : None, float
The maximum frequency to include. If None defaults to the maximum fft.
n_fft : int | None
The length of the windows used for FFT. If None, it defaults to the
next power of 2 larger than the signal length.
width : float
The width of the Gaussian window. If < 1, increased temporal
resolution, if > 1, increased frequency resolution. Defaults to 1.
(classical S-Transform).
decim : int
The decimation factor on the time axis. To reduce memory usage.
return_itc : bool
Return intertrial coherence (ITC) as well as averaged power.
n_jobs : int
Number of parallel jobs to use.
Returns
-------
st_power : ndarray
The multitaper power of the Stockwell transformed data.
The last two dimensions are frequency and time.
itc : ndarray
The intertrial coherence. Only returned if return_itc is True.
freqs : ndarray
The frequencies.
References
----------
Stockwell, R. G. "Why use the S-transform." AMS Pseudo-differential
operators: Partial differential equations and time-frequency
analysis 52 (2007): 279-309.
Moukadem, A., Bouguila, Z., Abdeslam, D. O, and Dieterlen, A. Stockwell
transform optimization applied on the detection of split in heart
sounds (2014). Signal Processing Conference (EUSIPCO), 2013 Proceedings
of the 22nd European, pages 2015--2019.
Wheat, K., Cornelissen, P. L., Frost, S.J, and Peter C. Hansen (2010).
During Visual Word Recognition, Phonology Is Accessed
within 100 ms and May Be Mediated by a Speech Production
Code: Evidence from Magnetoencephalography. The Journal of
Neuroscience, 30 (15), 5229-5233.
K. A. Jones and B. Porjesz and D. Chorlian and M. Rangaswamy and C.
Kamarajan and A. Padmanabhapillai and A. Stimus and H. Begleiter
(2006). S-transform time-frequency analysis of P300 reveals deficits in
individuals diagnosed with alcoholism.
Clinical Neurophysiology 117 2128--2143
"""
n_epochs, n_channels = data.shape[:2]
n_out = data.shape[2] // decim + bool(data.shape[2] % decim)
data, n_fft_, zero_pad = _check_input_st(data, n_fft)
freqs = fftpack.fftfreq(n_fft_, 1. / sfreq)
if fmin is None:
fmin = freqs[freqs > 0][0]
if fmax is None:
fmax = freqs.max()
start_f = np.abs(freqs - fmin).argmin()
stop_f = np.abs(freqs - fmax).argmin()
freqs = freqs[start_f:stop_f]
W = _precompute_st_windows(data.shape[-1], start_f, stop_f, sfreq, width)
n_freq = stop_f - start_f
psd = np.empty((n_channels, n_freq, n_out))
itc = np.empty((n_channels, n_freq, n_out)) if return_itc else None
parallel, my_st, _ = parallel_func(_st_power_itc, n_jobs)
tfrs = parallel(my_st(data[:, c, :], start_f, return_itc, zero_pad,
decim, W)
for c in range(n_channels))
for c, (this_psd, this_itc) in enumerate(iter(tfrs)):
psd[c] = this_psd
if this_itc is not None:
itc[c] = this_itc
return psd, itc, freqs
@verbose
def tfr_stockwell(inst, fmin=None, fmax=None, n_fft=None,
width=1.0, decim=1, return_itc=False, n_jobs=1,
verbose=None):
"""Time-Frequency Representation (TFR) using Stockwell Transform
Parameters
----------
inst : Epochs | Evoked
The epochs or evoked object.
fmin : None, float
The minimum frequency to include. If None defaults to the minimum fft
frequency greater than zero.
fmax : None, float
The maximum frequency to include. If None defaults to the maximum fft.
n_fft : int | None
The length of the windows used for FFT. If None, it defaults to the
next power of 2 larger than the signal length.
width : float
The width of the Gaussian window. If < 1, increased temporal
resolution, if > 1, increased frequency resolution. Defaults to 1.
(classical S-Transform).
decim : int
The decimation factor on the time axis. To reduce memory usage.
return_itc : bool
Return intertrial coherence (ITC) as well as averaged power.
n_jobs : int
The number of jobs to run in parallel (over channels).
verbose : bool, str, int, or None
If not None, override default verbose level (see mne.verbose).
Returns
-------
power : AverageTFR
The averaged power.
itc : AverageTFR
The intertrial coherence. Only returned if return_itc is True.
See Also
--------
cwt : Compute time-frequency decomposition with user-provided wavelets
cwt_morlet, multitaper_psd
Notes
-----
.. versionadded:: 0.9.0
"""
# verbose dec is used b/c subfunctions are verbose
data = _get_data(inst, return_itc)
picks = pick_types(inst.info, meg=True, eeg=True)
info = pick_info(inst.info, picks)
data = data[:, picks, :]
n_jobs = check_n_jobs(n_jobs)
power, itc, freqs = _induced_power_stockwell(data,
sfreq=info['sfreq'],
fmin=fmin, fmax=fmax,
n_fft=n_fft,
width=width,
decim=decim,
return_itc=return_itc,
n_jobs=n_jobs)
times = inst.times[::decim].copy()
nave = len(data)
out = AverageTFR(info, power, times, freqs, nave, method='stockwell-power')
if return_itc:
out = (out, AverageTFR(deepcopy(info), itc, times.copy(),
freqs.copy(), nave, method='stockwell-itc'))
return out
| rajegannathan/grasp-lift-eeg-cat-dog-solution-updated | python-packages/mne-python-0.10/mne/time_frequency/_stockwell.py | Python | bsd-3-clause | 9,819 |
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import cint, cstr, flt, fmt_money, formatdate, getdate
from frappe import msgprint, _, scrub
from erpnext.setup.utils import get_company_currency
from erpnext.controllers.accounts_controller import AccountsController
class JournalVoucher(AccountsController):
def __init__(self, arg1, arg2=None):
super(JournalVoucher, self).__init__(arg1, arg2)
self.master_type = {}
self.credit_days_for = {}
self.credit_days_global = -1
self.is_approving_authority = -1
def validate(self):
if not self.is_opening:
self.is_opening='No'
self.clearance_date = None
super(JournalVoucher, self).validate_date_with_fiscal_year()
self.validate_cheque_info()
self.validate_entries_for_advance()
self.validate_debit_and_credit()
self.validate_against_jv()
self.validate_against_sales_invoice()
self.validate_against_purchase_invoice()
self.set_against_account()
self.create_remarks()
self.set_aging_date()
self.set_print_format_fields()
self.validate_against_sales_order()
self.validate_against_purchase_order()
def on_submit(self):
#frappe.errprint("hello ggangadhar")
if self.voucher_type in ['Bank Voucher', 'Contra Voucher', 'Journal Entry']:
self.check_credit_days()
self.make_gl_entries()
self.check_credit_limit()
self.update_advance_paid()
self.super_admin()
def super_admin(self):
#frappe.errprint("hello ggangadhar")
res=frappe.db.sql("select b.against_invoice from `tabJournal Voucher` a ,`tabJournal Voucher Detail` b where a.name=b.parent and a.name=%s and b.against_invoice is not null",self.name)
frappe.errprint(['res',res[0][0]])
res1=frappe.db.sql("select a.customer,a.email_id,a.posting_date,a.is_new_subscription,a.domain_name,a.rounded_total_export,b.item_code from `tabSales Invoice` a, `tabSales Invoice Item` b where a.name=b.parent and a.name=%s",res[0][0],debug=1)
res2=frappe.db.sql("select country from `tabCustomer` where name=%s",res1 and res1[0][0])
frappe.errprint(res1)
if res1 and res1[0][3]==1:
frappe.errprint("creating sitemster from jv")
frappe.get_doc({
"doctype": "Site Master",
"client_name": res1 and res1[0][0],
"site_name": ((res1 and res1[0][4]).replace(' ','_')).lower()+".tailorpad.com",
"email_id__if_administrator": res1 and res1[0][1],
"country": res2 and res2[0][0]
}).insert()
else:
frappe.errprint("in super admin else updating packages")
import requests
import json
pr1 = frappe.db.sql("""select site_name,email_id__if_administrator,country from `tabSite Master` where client_name=%s""",res1[0][0])
st=pr1 and pr1[0][0] or ''
eml=pr1 and pr1[0][1] or ''
cnt=pr1 and pr1[0][2] or ''
val=usr=0
#frappe.errprint(val)
headers = {'content-type': 'application/x-www-form-urlencoded'}
sup={'usr':'administrator','pwd':'admin'}
url = 'http://'+st+'/api/method/login'
frappe.errprint(url)
response = requests.get(url, data=sup, headers=headers)
frappe.errprint(response.text)
frappe.errprint("logged in new site for update")
if st.find('.')!= -1:
db=st.split('.')[0][:16]
else:
db=st[:16]
frappe.errprint(db)
item_code = frappe.db.sql("""select item_code from `tabSales Invoice Item` where parent = %s """, res[0][0])
for ic in item_code:
qr="select no_of_users,validity from `tabItem` where name = '"+cstr(ic[0])+"'"
pro = frappe.db.sql(qr)
frappe.errprint(pro)
if (pro [0][0]== 0) and (pro[0][1]>0):
frappe.errprint("0 and >0")
vldt={}
vldt['validity']=pro[0][1]
vldt['country']=cnt
vldt['email_id_admin']=eml
url = 'http://'+st+'/api/resource/User/Administrator'
frappe.errprint(url)
frappe.errprint('data='+json.dumps(vldt))
response = requests.put(url, data='data='+json.dumps(vldt), headers=headers)
frappe.errprint("responce")
#frappe.errprint(response.text)
elif (pro [0][0]>0 ) and (pro[0][1]==0):
frappe.errprint(">0 and 0")
vldtt={}
vldtt['no_of_users']=pro[0][0]
vldtt['country']=cnt
vldtt['email_id_admin']=eml
url = 'http://'+st+'/api/resource/User/Administrator'
frappe.errprint(url)
frappe.errprint('data='+json.dumps(vldtt))
response = requests.put(url, data='data='+json.dumps(vldtt), headers=headers)
frappe.errprint("responce")
#frappe.errprint(response.text)
elif (pro [0][0]> 0) and (pro[0][1]>0):
frappe.errprint(" >0 and >0")
user_val={}
user_val['validity']=pro [0][1]
user_val['user_name']=pro [0][0]
user_val['flag']='false'
url = 'http://'+st+'/api/resource/User Validity'
frappe.errprint(url)
frappe.errprint('data='+json.dumps(user_val))
response = requests.post(url, data='data='+json.dumps(user_val), headers=headers)
frappe.errprint("responce")
#frappe.errprint(response.text)
else:
frappe.errprint("0 and 0")
def update_advance_paid(self):
advance_paid = frappe._dict()
for d in self.get("entries"):
if d.is_advance:
if d.against_sales_order:
advance_paid.setdefault("Sales Order", []).append(d.against_sales_order)
elif d.against_purchase_order:
advance_paid.setdefault("Purchase Order", []).append(d.against_purchase_order)
for voucher_type, order_list in advance_paid.items():
for voucher_no in list(set(order_list)):
frappe.get_doc(voucher_type, voucher_no).set_total_advance_paid()
def on_cancel(self):
from erpnext.accounts.utils import remove_against_link_from_jv
remove_against_link_from_jv(self.doctype, self.name, "against_jv")
self.make_gl_entries(1)
self.update_advance_paid()
def validate_cheque_info(self):
if self.voucher_type in ['Bank Voucher']:
if not self.cheque_no or not self.cheque_date:
msgprint(_("Reference No & Reference Date is required for {0}").format(self.voucher_type),
raise_exception=1)
if self.cheque_date and not self.cheque_no:
msgprint(_("Reference No is mandatory if you entered Reference Date"), raise_exception=1)
def validate_entries_for_advance(self):
for d in self.get('entries'):
if not d.is_advance and not d.against_voucher and \
not d.against_invoice and not d.against_jv:
master_type = frappe.db.get_value("Account", d.account, "master_type")
if (master_type == 'Customer' and flt(d.credit) > 0) or \
(master_type == 'Supplier' and flt(d.debit) > 0):
msgprint(_("Row {0}: Please check 'Is Advance' against Account {1} if this \
is an advance entry.").format(d.idx, d.account))
def validate_against_jv(self):
for d in self.get('entries'):
if d.against_jv:
if d.against_jv == self.name:
frappe.throw(_("You can not enter current voucher in 'Against Journal Voucher' column"))
against_entries = frappe.db.sql("""select * from `tabJournal Voucher Detail`
where account = %s and docstatus = 1 and parent = %s
and ifnull(against_jv, '') = ''""", (d.account, d.against_jv), as_dict=True)
if not against_entries:
frappe.throw(_("Journal Voucher {0} does not have account {1} or already matched")
.format(d.against_jv, d.account))
else:
dr_or_cr = "debit" if d.credit > 0 else "credit"
valid = False
for jvd in against_entries:
if flt(jvd[dr_or_cr]) > 0:
valid = True
if not valid:
frappe.throw(_("Against Journal Voucher {0} does not have any unmatched {1} entry")
.format(d.against_jv, dr_or_cr))
def validate_against_sales_invoice(self):
payment_against_voucher = self.validate_account_in_against_voucher("against_invoice", "Sales Invoice")
self.validate_against_invoice_fields("Sales Invoice", payment_against_voucher)
def validate_against_purchase_invoice(self):
payment_against_voucher = self.validate_account_in_against_voucher("against_voucher", "Purchase Invoice")
self.validate_against_invoice_fields("Purchase Invoice", payment_against_voucher)
def validate_against_sales_order(self):
payment_against_voucher = self.validate_account_in_against_voucher("against_sales_order", "Sales Order")
self.validate_against_order_fields("Sales Order", payment_against_voucher)
def validate_against_purchase_order(self):
payment_against_voucher = self.validate_account_in_against_voucher("against_purchase_order", "Purchase Order")
self.validate_against_order_fields("Purchase Order", payment_against_voucher)
def validate_account_in_against_voucher(self, against_field, doctype):
payment_against_voucher = frappe._dict()
field_dict = {'Sales Invoice': "Debit To",
'Purchase Invoice': "Credit To",
'Sales Order': "Customer",
'Purchase Order': "Supplier"
}
for d in self.get("entries"):
if d.get(against_field):
dr_or_cr = "credit" if against_field in ["against_invoice", "against_sales_order"] \
else "debit"
if against_field in ["against_invoice", "against_sales_order"] \
and flt(d.debit) > 0:
frappe.throw(_("Row {0}: Debit entry can not be linked with a {1}").format(d.idx, doctype))
if against_field in ["against_voucher", "against_purchase_order"] \
and flt(d.credit) > 0:
frappe.throw(_("Row {0}: Credit entry can not be linked with a {1}").format(d.idx, doctype))
voucher_account = frappe.db.get_value(doctype, d.get(against_field), \
scrub(field_dict.get(doctype)))
account_master_name = frappe.db.get_value("Account", d.account, "master_name")
if against_field in ["against_invoice", "against_voucher"] \
and voucher_account != d.account:
frappe.throw(_("Row {0}: Account {1} does not match with {2} {3} account") \
.format(d.idx, d.account, doctype, field_dict.get(doctype)))
if against_field in ["against_sales_order", "against_purchase_order"]:
if voucher_account != account_master_name:
frappe.throw(_("Row {0}: Account {1} does not match with {2} {3} Name") \
.format(d.idx, d.account, doctype, field_dict.get(doctype)))
elif d.is_advance == "Yes":
payment_against_voucher.setdefault(d.get(against_field), []).append(flt(d.get(dr_or_cr)))
return payment_against_voucher
def validate_against_invoice_fields(self, doctype, payment_against_voucher):
for voucher_no, payment_list in payment_against_voucher.items():
voucher_properties = frappe.db.get_value(doctype, voucher_no,
["docstatus", "outstanding_amount"])
if voucher_properties[0] != 1:
frappe.throw(_("{0} {1} is not submitted").format(doctype, voucher_no))
if flt(voucher_properties[1]) < flt(sum(payment_list)):
frappe.throw(_("Payment against {0} {1} cannot be greater \
than Outstanding Amount {2}").format(doctype, voucher_no, voucher_properties[1]))
def validate_against_order_fields(self, doctype, payment_against_voucher):
for voucher_no, payment_list in payment_against_voucher.items():
voucher_properties = frappe.db.get_value(doctype, voucher_no,
["docstatus", "per_billed", "advance_paid", "grand_total"])
if voucher_properties[0] != 1:
frappe.throw(_("{0} {1} is not submitted").format(doctype, voucher_no))
if flt(voucher_properties[1]) >= 100:
frappe.throw(_("{0} {1} is fully billed").format(doctype, voucher_no))
if flt(voucher_properties[3]) < flt(voucher_properties[2]) + flt(sum(payment_list)):
frappe.throw(_("Advance paid against {0} {1} cannot be greater \
than Grand Total {2}").format(doctype, voucher_no, voucher_properties[3]))
def set_against_account(self):
accounts_debited, accounts_credited = [], []
for d in self.get("entries"):
if flt(d.debit > 0): accounts_debited.append(d.account)
if flt(d.credit) > 0: accounts_credited.append(d.account)
for d in self.get("entries"):
if flt(d.debit > 0): d.against_account = ", ".join(list(set(accounts_credited)))
if flt(d.credit > 0): d.against_account = ", ".join(list(set(accounts_debited)))
def validate_debit_and_credit(self):
self.total_debit, self.total_credit, self.difference = 0, 0, 0
for d in self.get("entries"):
if d.debit and d.credit:
frappe.throw(_("You cannot credit and debit same account at the same time"))
self.total_debit = flt(self.total_debit) + flt(d.debit, self.precision("debit", "entries"))
self.total_credit = flt(self.total_credit) + flt(d.credit, self.precision("credit", "entries"))
self.difference = flt(self.total_debit, self.precision("total_debit")) - \
flt(self.total_credit, self.precision("total_credit"))
if self.difference:
frappe.throw(_("Total Debit must be equal to Total Credit. The difference is {0}")
.format(self.difference))
def create_remarks(self):
r = []
if self.cheque_no:
if self.cheque_date:
r.append(_('Reference #{0} dated {1}').format(self.cheque_no, formatdate(self.cheque_date)))
else:
msgprint(_("Please enter Reference date"), raise_exception=frappe.MandatoryError)
for d in self.get('entries'):
if d.against_invoice and d.credit:
currency = frappe.db.get_value("Sales Invoice", d.against_invoice, "currency")
r.append(_("{0} against Sales Invoice {1}").format(fmt_money(flt(d.credit), currency = currency), \
d.against_invoice))
if d.against_sales_order and d.credit:
currency = frappe.db.get_value("Sales Order", d.against_sales_order, "currency")
r.append(_("{0} against Sales Order {1}").format(fmt_money(flt(d.credit), currency = currency), \
d.against_sales_order))
if d.against_voucher and d.debit:
bill_no = frappe.db.sql("""select bill_no, bill_date, currency
from `tabPurchase Invoice` where name=%s""", d.against_voucher)
if bill_no and bill_no[0][0] and bill_no[0][0].lower().strip() \
not in ['na', 'not applicable', 'none']:
r.append(_('{0} {1} against Bill {2} dated {3}').format(bill_no[0][2],
fmt_money(flt(d.debit)), bill_no[0][0],
bill_no[0][1] and formatdate(bill_no[0][1].strftime('%Y-%m-%d'))))
if d.against_purchase_order and d.debit:
currency = frappe.db.get_value("Purchase Order", d.against_purchase_order, "currency")
r.append(_("{0} against Purchase Order {1}").format(fmt_money(flt(d.credit), currency = currency), \
d.against_purchase_order))
if self.user_remark:
r.append(_("Note: {0}").format(self.user_remark))
if r:
self.remark = ("\n").join(r) #User Remarks is not mandatory
def set_aging_date(self):
if self.is_opening != 'Yes':
self.aging_date = self.posting_date
else:
# check account type whether supplier or customer
exists = False
for d in self.get('entries'):
account_type = frappe.db.get_value("Account", d.account, "account_type")
if account_type in ["Supplier", "Customer"]:
exists = True
break
# If customer/supplier account, aging date is mandatory
if exists and not self.aging_date:
msgprint(_("Aging Date is mandatory for opening entry"), raise_exception=1)
else:
self.aging_date = self.posting_date
def set_print_format_fields(self):
for d in self.get('entries'):
result = frappe.db.get_value("Account", d.account,
["account_type", "master_type"])
if not result:
continue
account_type, master_type = result
if master_type in ['Supplier', 'Customer']:
if not self.pay_to_recd_from:
self.pay_to_recd_from = frappe.db.get_value(master_type,
' - '.join(d.account.split(' - ')[:-1]),
master_type == 'Customer' and 'customer_name' or 'supplier_name')
if account_type in ['Bank', 'Cash']:
company_currency = get_company_currency(self.company)
amt = flt(d.debit) and d.debit or d.credit
self.total_amount = fmt_money(amt, currency=company_currency)
from frappe.utils import money_in_words
self.total_amount_in_words = money_in_words(amt, company_currency)
def check_credit_days(self):
date_diff = 0
if self.cheque_date:
date_diff = (getdate(self.cheque_date)-getdate(self.posting_date)).days
if date_diff <= 0: return
# Get List of Customer Account
acc_list = filter(lambda d: frappe.db.get_value("Account", d.account,
"master_type")=='Customer', self.get('entries'))
for d in acc_list:
credit_days = self.get_credit_days_for(d.account)
# Check credit days
if credit_days > 0 and not self.get_authorized_user() and cint(date_diff) > credit_days:
msgprint(_("Maximum allowed credit is {0} days after posting date").format(credit_days),
raise_exception=1)
def get_credit_days_for(self, ac):
if not self.credit_days_for.has_key(ac):
self.credit_days_for[ac] = cint(frappe.db.get_value("Account", ac, "credit_days"))
if not self.credit_days_for[ac]:
if self.credit_days_global==-1:
self.credit_days_global = cint(frappe.db.get_value("Company",
self.company, "credit_days"))
return self.credit_days_global
else:
return self.credit_days_for[ac]
def get_authorized_user(self):
if self.is_approving_authority==-1:
self.is_approving_authority = 0
# Fetch credit controller role
approving_authority = frappe.db.get_value("Accounts Settings", None,
"credit_controller")
# Check logged-in user is authorized
if approving_authority in frappe.user.get_roles():
self.is_approving_authority = 1
return self.is_approving_authority
def make_gl_entries(self, cancel=0, adv_adj=0):
from erpnext.accounts.general_ledger import make_gl_entries
gl_map = []
for d in self.get("entries"):
if d.debit or d.credit:
gl_map.append(
self.get_gl_dict({
"account": d.account,
"against": d.against_account,
"debit": flt(d.debit, self.precision("debit", "entries")),
"credit": flt(d.credit, self.precision("credit", "entries")),
"against_voucher_type": (("Purchase Invoice" if d.against_voucher else None)
or ("Sales Invoice" if d.against_invoice else None)
or ("Journal Voucher" if d.against_jv else None)
or ("Sales Order" if d.against_sales_order else None)
or ("Purchase Order" if d.against_purchase_order else None)),
"against_voucher": d.against_voucher or d.against_invoice or d.against_jv
or d.against_sales_order or d.against_purchase_order,
"remarks": self.remark,
"cost_center": d.cost_center
})
)
if gl_map:
make_gl_entries(gl_map, cancel=cancel, adv_adj=adv_adj)
def check_credit_limit(self):
for d in self.get("entries"):
master_type, master_name = frappe.db.get_value("Account", d.account,
["master_type", "master_name"])
if master_type == "Customer" and master_name:
super(JournalVoucher, self).check_credit_limit(d.account)
def get_balance(self):
if not self.get('entries'):
msgprint(_("'Entries' cannot be empty"), raise_exception=True)
else:
flag, self.total_debit, self.total_credit = 0, 0, 0
diff = flt(self.difference, self.precision("difference"))
# If any row without amount, set the diff on that row
for d in self.get('entries'):
if not d.credit and not d.debit and diff != 0:
if diff>0:
d.credit = diff
elif diff<0:
d.debit = diff
flag = 1
# Set the diff in a new row
if flag == 0 and diff != 0:
jd = self.append('entries', {})
if diff>0:
jd.credit = abs(diff)
elif diff<0:
jd.debit = abs(diff)
self.validate_debit_and_credit()
def get_outstanding_invoices(self):
self.set('entries', [])
total = 0
for d in self.get_values():
total += flt(d.outstanding_amount, self.precision("credit", "entries"))
jd1 = self.append('entries', {})
jd1.account = d.account
if self.write_off_based_on == 'Accounts Receivable':
jd1.credit = flt(d.outstanding_amount, self.precision("credit", "entries"))
jd1.against_invoice = cstr(d.name)
elif self.write_off_based_on == 'Accounts Payable':
jd1.debit = flt(d.outstanding_amount, self.precision("debit", "entries"))
jd1.against_voucher = cstr(d.name)
jd2 = self.append('entries', {})
if self.write_off_based_on == 'Accounts Receivable':
jd2.debit = total
elif self.write_off_based_on == 'Accounts Payable':
jd2.credit = total
self.validate_debit_and_credit()
def get_values(self):
cond = " and outstanding_amount <= {0}".format(self.write_off_amount) \
if flt(self.write_off_amount) > 0 else ""
if self.write_off_based_on == 'Accounts Receivable':
return frappe.db.sql("""select name, debit_to as account, outstanding_amount
from `tabSales Invoice` where docstatus = 1 and company = %s
and outstanding_amount > 0 %s""" % ('%s', cond), self.company, as_dict=True)
elif self.write_off_based_on == 'Accounts Payable':
return frappe.db.sql("""select name, credit_to as account, outstanding_amount
from `tabPurchase Invoice` where docstatus = 1 and company = %s
and outstanding_amount > 0 %s""" % ('%s', cond), self.company, as_dict=True)
@frappe.whitelist()
def get_default_bank_cash_account(company, voucher_type):
from erpnext.accounts.utils import get_balance_on
account = frappe.db.get_value("Company", company,
voucher_type=="Bank Voucher" and "default_bank_account" or "default_cash_account")
if account:
return {
"account": account,
"balance": get_balance_on(account)
}
@frappe.whitelist()
def get_payment_entry_from_sales_invoice(sales_invoice):
from erpnext.accounts.utils import get_balance_on
si = frappe.get_doc("Sales Invoice", sales_invoice)
jv = get_payment_entry(si)
jv.remark = 'Payment received against Sales Invoice {0}. {1}'.format(si.name, si.remarks)
# credit customer
jv.get("entries")[0].account = si.debit_to
jv.get("entries")[0].balance = get_balance_on(si.debit_to)
jv.get("entries")[0].credit = si.outstanding_amount
jv.get("entries")[0].against_invoice = si.name
# debit bank
jv.get("entries")[1].debit = si.outstanding_amount
return jv.as_dict()
@frappe.whitelist()
def get_payment_entry_from_purchase_invoice(purchase_invoice):
from erpnext.accounts.utils import get_balance_on
pi = frappe.get_doc("Purchase Invoice", purchase_invoice)
jv = get_payment_entry(pi)
jv.remark = 'Payment against Purchase Invoice {0}. {1}'.format(pi.name, pi.remarks)
# credit supplier
jv.get("entries")[0].account = pi.credit_to
jv.get("entries")[0].balance = get_balance_on(pi.credit_to)
jv.get("entries")[0].debit = pi.outstanding_amount
jv.get("entries")[0].against_voucher = pi.name
# credit bank
jv.get("entries")[1].credit = pi.outstanding_amount
return jv.as_dict()
def get_payment_entry(doc):
bank_account = get_default_bank_cash_account(doc.company, "Bank Voucher")
jv = frappe.new_doc('Journal Voucher')
jv.voucher_type = 'Bank Voucher'
jv.company = doc.company
jv.fiscal_year = doc.fiscal_year
jv.append("entries")
d2 = jv.append("entries")
if bank_account:
d2.account = bank_account["account"]
d2.balance = bank_account["balance"]
return jv
@frappe.whitelist()
def get_opening_accounts(company):
"""get all balance sheet accounts for opening entry"""
from erpnext.accounts.utils import get_balance_on
accounts = frappe.db.sql_list("""select name from tabAccount
where group_or_ledger='Ledger' and report_type='Balance Sheet' and company=%s""", company)
return [{"account": a, "balance": get_balance_on(a)} for a in accounts]
def get_against_purchase_invoice(doctype, txt, searchfield, start, page_len, filters):
return frappe.db.sql("""select name, credit_to, outstanding_amount, bill_no, bill_date
from `tabPurchase Invoice` where credit_to = %s and docstatus = 1
and outstanding_amount > 0 and %s like %s order by name desc limit %s, %s""" %
("%s", searchfield, "%s", "%s", "%s"),
(filters["account"], "%%%s%%" % txt, start, page_len))
def get_against_sales_invoice(doctype, txt, searchfield, start, page_len, filters):
return frappe.db.sql("""select name, debit_to, outstanding_amount
from `tabSales Invoice` where debit_to = %s and docstatus = 1
and outstanding_amount > 0 and `%s` like %s order by name desc limit %s, %s""" %
("%s", searchfield, "%s", "%s", "%s"),
(filters["account"], "%%%s%%" % txt, start, page_len))
def get_against_jv(doctype, txt, searchfield, start, page_len, filters):
return frappe.db.sql("""select jv.name, jv.posting_date, jv.user_remark
from `tabJournal Voucher` jv, `tabJournal Voucher Detail` jv_detail
where jv_detail.parent = jv.name and jv_detail.account = %s and jv.docstatus = 1
and jv.%s like %s order by jv.name desc limit %s, %s""" %
("%s", searchfield, "%s", "%s", "%s"),
(filters["account"], "%%%s%%" % txt, start, page_len))
@frappe.whitelist()
def get_outstanding(args):
args = eval(args)
if args.get("doctype") == "Journal Voucher" and args.get("account"):
against_jv_amount = frappe.db.sql("""
select sum(ifnull(debit, 0)) - sum(ifnull(credit, 0))
from `tabJournal Voucher Detail` where parent=%s and account=%s
and ifnull(against_invoice, '')='' and ifnull(against_voucher, '')=''
and ifnull(against_jv, '')=''""", (args['docname'], args['account']))
against_jv_amount = flt(against_jv_amount[0][0]) if against_jv_amount else 0
if against_jv_amount > 0:
return {"credit": against_jv_amount}
else:
return {"debit": -1* against_jv_amount}
elif args.get("doctype") == "Sales Invoice":
return {
"credit": flt(frappe.db.get_value("Sales Invoice", args["docname"],
"outstanding_amount"))
}
elif args.get("doctype") == "Purchase Invoice":
return {
"debit": flt(frappe.db.get_value("Purchase Invoice", args["docname"],
"outstanding_amount"))
}
| gangadhar-kadam/laganerp | erpnext/accounts/doctype/journal_voucher/journal_voucher.py | Python | agpl-3.0 | 25,574 |
#==========================================================================
#
# Copyright Insight Software Consortium
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#==========================================================================*/
# GeodesicActiveContourImageFilter.py
# Translated by Charl P. Botha <http://cpbotha.net/> from the cxx original.
# $Id: GeodesicActiveContourImageFilter.py,v 1.1 2006/09/06 20:58:42 glehmann Exp $
# example runs:
# ------------
# 1. Left ventricle:
# python GeodesicActiveContourImageFilter.py \
# ../Data/BrainProtonDensitySlice.png lventricle.png \
# 81 114 5 1 -0.5 3 2
#
# 2. White matter:
# python GeodesicActiveContourImageFilter.py \
# ../Data/BrainProtonDensitySlice.png wmatter.png \
# 56 92 5 1 -0.3 2 10
#
# See the ITK Software Guide, section 9.3.3 "Geodesic Active Contours
# Segmentation" as well as the CXX example for more comments.
import itk
from sys import argv, stderr
itk.auto_progress(2)
def main():
if len(argv) < 10:
errMsg = "Missing parameters\n" \
"Usage: %s\n" % (argv[0],) + \
" inputImage outputImage\n" \
" seedX seedY InitialDistance\n" \
" Sigma SigmoidAlpha SigmoidBeta\n" \
" PropagationScaling\n"
print >> stderr, errMsg
return
# We're going to build the following pipelines:
# 1. reader -> smoothing -> gradientMagnitude -> sigmoid -> FI
# 2. fastMarching -> geodesicActiveContour(FI) -> thresholder -> writer
# The output of pipeline 1 is a feature image that is used by the
# geodesicActiveContour object. Also see figure 9.18 in the ITK
# Software Guide.
# we wan't to know what is happening
# itk.auto_progress(True)
InternalPixelType = itk.F
Dimension = 2
InternalImageType = itk.Image[InternalPixelType, Dimension]
OutputPixelType = itk.UC
OutputImageType = itk.Image[OutputPixelType, Dimension]
reader = itk.ImageFileReader[InternalImageType].New(FileName=argv[1])
# needed to give the size to the fastmarching filter
reader.Update()
smoothing = itk.CurvatureAnisotropicDiffusionImageFilter[InternalImageType, InternalImageType].New(reader,
TimeStep=0.125,
NumberOfIterations=5,
ConductanceParameter=9.0)
gradientMagnitude = itk.GradientMagnitudeRecursiveGaussianImageFilter[InternalImageType, InternalImageType].New(smoothing,
Sigma=float(argv[6]))
sigmoid = itk.SigmoidImageFilter[InternalImageType, InternalImageType].New(gradientMagnitude,
OutputMinimum=0.0,
OutputMaximum=1.1,
Alpha=float(argv[7]),
Beta=float(argv[8]))
seedPosition = itk.Index[2]()
seedPosition.SetElement(0, int(argv[3]))
seedPosition.SetElement(1, int(argv[4]))
node = itk.LevelSetNode[InternalPixelType, Dimension]()
node.SetValue(-float(argv[5]))
node.SetIndex(seedPosition)
seeds = itk.VectorContainer[itk.UI, itk.LevelSetNode[InternalPixelType, Dimension]].New()
seeds.Initialize()
seeds.InsertElement(0, node)
fastMarching = itk.FastMarchingImageFilter[InternalImageType, InternalImageType].New(sigmoid,
TrialPoints=seeds,
SpeedConstant=1.0,
OutputSize=reader.GetOutput().GetBufferedRegion().GetSize() )
geodesicActiveContour = itk.GeodesicActiveContourLevelSetImageFilter[InternalImageType, InternalImageType, InternalPixelType].New(fastMarching,
FeatureImage=sigmoid.GetOutput(), # it is required to use the explicitly the FeatureImage - itk segfault without that :-(
PropagationScaling=float(argv[9]),
CurvatureScaling=1.0,
AdvectionScaling=1.0,
MaximumRMSError=0.02,
NumberOfIterations=800
)
thresholder = itk.BinaryThresholdImageFilter[InternalImageType, OutputImageType].New(geodesicActiveContour,
LowerThreshold=-1000,
UpperThreshold=0,
OutsideValue=0,
InsideValue=255)
writer = itk.ImageFileWriter[OutputImageType].New(thresholder, FileName=argv[2])
def rescaleAndWrite(filter, fileName):
caster = itk.RescaleIntensityImageFilter[InternalImageType, OutputImageType].New(filter,
OutputMinimum=0,
OutputMaximum=255)
itk.write(caster, fileName)
rescaleAndWrite(smoothing, "GeodesicActiveContourImageFilterOutput1.png")
rescaleAndWrite(gradientMagnitude, "GeodesicActiveContourImageFilterOutput2.png")
rescaleAndWrite(sigmoid, "GeodesicActiveContourImageFilterOutput3.png")
rescaleAndWrite(fastMarching, "GeodesicActiveContourImageFilterOutput4.png")
writer.Update()
print
print "Max. no. iterations: %d" % (geodesicActiveContour.GetNumberOfIterations())
print "Max. RMS error: %.3f" % (geodesicActiveContour.GetMaximumRMSError())
print
print "No. elapsed iterations: %d" % (geodesicActiveContour.GetElapsedIterations())
print "RMS change: %.3f" % (geodesicActiveContour.GetRMSChange())
itk.write(fastMarching, "GeodesicActiveContourImageFilterOutput4.mha")
itk.write(sigmoid, "GeodesicActiveContourImageFilterOutput3.mha")
itk.write(gradientMagnitude, "GeodesicActiveContourImageFilterOutput2.mha")
if __name__ == "__main__":
main()
| daviddoria/itkHoughTransform | Wrapping/WrapITK/Languages/Python/Tests/GeodesicActiveContourImageFilter.py | Python | apache-2.0 | 6,169 |
# -*- coding: utf-8 -*-
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os, sys
from core import jsontools as json
from core import scrapertools
from core import servertools
from core.item import Item
from platformcode import config, logger
from core import httptools
host = 'https://www.vporn.com'
def mainlist(item):
logger.info()
itemlist = []
itemlist.append( Item(channel=item.channel, title="Novedades" , action="lista", url=host + "/newest/month/"))
itemlist.append( Item(channel=item.channel, title="Mas Vistas" , action="lista", url=host + "/views/month/"))
itemlist.append( Item(channel=item.channel, title="Mejor Valoradas" , action="lista", url=host + "/rating/month/"))
itemlist.append( Item(channel=item.channel, title="Favoritas" , action="lista", url=host + "/favorites/month/"))
itemlist.append( Item(channel=item.channel, title="Mas Votada" , action="lista", url=host + "/votes/month/"))
itemlist.append( Item(channel=item.channel, title="Longitud" , action="lista", url=host + "/longest/month/"))
itemlist.append( Item(channel=item.channel, title="PornStar" , action="catalogo", url=host + "/pornstars/"))
itemlist.append( Item(channel=item.channel, title="Categorias" , action="categorias", url=host + "/categories/"))
itemlist.append( Item(channel=item.channel, title="Buscar", action="search"))
return itemlist
def search(item, texto):
logger.info()
texto = texto.replace(" ", "+")
item.url = host + "/search?q=%s" % texto
try:
return lista(item)
except:
import sys
for line in sys.exc_info():
logger.error("%s" % line)
return []
def catalogo(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t| |<br>", "", data)
patron = '<div class=\'star\'>.*?'
patron += '<a href="([^"]+)".*?'
patron += '<img src="([^"]+)" alt="([^"]+)".*?'
patron += '<span> (\d+) Videos'
matches = re.compile(patron,re.DOTALL).findall(data)
for scrapedurl,scrapedthumbnail,scrapedtitle,cantidad in matches:
scrapedplot = ""
scrapedtitle = scrapedtitle + " (" + cantidad + ")"
scrapedurl = host + scrapedurl
itemlist.append( Item(channel=item.channel, action="lista", title=scrapedtitle, url=scrapedurl,
fanart=scrapedthumbnail, thumbnail=scrapedthumbnail, plot=scrapedplot) )
next_page = scrapertools.find_single_match(data,'<a class="next" href="([^"]+)">')
if next_page!="":
next_page = urlparse.urljoin(item.url,next_page)
itemlist.append(item.clone(action="catalogo", title="Next page >>", text_color="blue", url=next_page) )
return itemlist
def categorias(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t| |<br>", "", data)
patron = '"name":"([^"]+)".*?'
patron += '"image":"([^"]+)".*?'
patron += '"url":"([^"]+)"'
matches = re.compile(patron,re.DOTALL).findall(data)
for scrapedtitle,scrapedthumbnail,scrapedurl in matches:
scrapedplot = ""
scrapedthumbnail = "https://th-us2.vporn.com" + scrapedthumbnail
scrapedthumbnail= scrapedthumbnail.replace("\/", "/")
scrapedurl = host + scrapedurl
scrapedurl = scrapedurl.replace("\/", "/")
itemlist.append( Item(channel=item.channel, action="lista", title=scrapedtitle, url=scrapedurl,
thumbnail=scrapedthumbnail, plot=scrapedplot) )
return itemlist
def lista(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t| |<br>", "", data)
patron = '<div class="video">.*?'
patron += '<a href="([^"]+)".*?'
patron += '<span class="time">(.*?)</span>(.*?)</span>.*?'
patron += '<img src="([^"]+)" alt="([^"]+)"'
matches = re.compile(patron,re.DOTALL).findall(data)
for scrapedurl,time,calidad,scrapedthumbnail,scrapedtitle in matches:
scrapedtitle = scrapedtitle.replace(", ", " & ").replace("(", "(").replace(")", ")")
title = "[COLOR yellow]" + time + " [/COLOR]" + scrapedtitle
if "hd-marker is-hd" in calidad:
title = "[COLOR yellow]" + time + " [/COLOR]" + "[COLOR red]" + "HD" + " [/COLOR]" + scrapedtitle
thumbnail = scrapedthumbnail
plot = ""
itemlist.append( Item(channel=item.channel, action="play" , title=title , url=scrapedurl,
fanart=thumbnail, thumbnail=thumbnail, plot=plot, contentTitle = title))
next_page = scrapertools.find_single_match(data,'<a class="next.*?title="Next Page" href="([^"]+)">')
if next_page!="":
next_page = urlparse.urljoin(item.url,next_page)
itemlist.append(item.clone(action="lista", title="Next page >>", text_color="blue", url=next_page) )
return itemlist
def play(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
patron = '<source src="([^"]+)" type="video/mp4" label="([^"]+)"'
matches = scrapertools.find_multiple_matches(data, patron)
for scrapedurl,scrapedtitle in matches:
itemlist.append(item.clone(action="play", title=scrapedtitle, url=scrapedurl))
return itemlist
| alfa-jor/addon | plugin.video.alfa/channels/vporn.py | Python | gpl-3.0 | 5,411 |
from .models import BlogPost
from os import path, listdir
import yaml
import time
from gitcms.parsedate import parsedatetime
from gitcms.pages.load import preprocess_rst_content
from gitcms.tagging.models import tag_for
def loaddir(directory, clear=False):
if clear:
BlogPost.objects.all().delete()
queue = listdir(directory)
while queue:
next = queue.pop()
if next[0] == '.': continue
if next in ('template.rst', 'template'): continue
next = path.join(directory, next)
if path.isdir(next):
queue.extend([
path.join(next,f) for f in listdir(next)
])
continue
filecontent = open(next).read()
parts = filecontent.split('\n---\n', 1)
if len(parts) != 2:
raise IOError('gitcms.blog.load: expected "---" separator in file %s' % next)
fields, content = parts
fields = yaml.load(fields)
fields['content'] = preprocess_rst_content(content)
fields['timestamp'] = parsedatetime(fields['timestamp'])
fields['timestamp'] = time.strftime('%Y-%m-%d %H:%M', fields['timestamp'])
categories = fields.get('categories', '')
if 'categories' in fields: del fields['categories']
ptags = []
if categories:
for c in categories.split():
ptags.append(tag_for(c))
# if we arrived here and no errors, then it is safe
# to add our post.
#
P = BlogPost(**fields)
P.save()
for t in ptags:
P.tags.add(t)
dependencies = ['tagging']
| luispedro/django-gitcms | gitcms/blog/load.py | Python | agpl-3.0 | 1,618 |
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from . import test_ui
| Vauxoo/e-commerce | website_sale_require_legal/tests/__init__.py | Python | agpl-3.0 | 88 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2015 Nandaja Varma <nvarma@redhat.com>
# Copyright 2018 Red Hat, Inc.
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gluster_peer
short_description: Attach/Detach peers to/from the cluster
description:
- Create or diminish a GlusterFS trusted storage pool. A set of nodes can be
added into an existing trusted storage pool or a new storage pool can be
formed. Or, nodes can be removed from an existing trusted storage pool.
version_added: "2.6"
author: Sachidananda Urs (@sac)
options:
state:
choices: ["present", "absent"]
default: "present"
description:
- Determines whether the nodes should be attached to the pool or
removed from the pool. If the state is present, nodes will be
attached to the pool. If state is absent, nodes will be detached
from the pool.
required: true
nodes:
description:
- List of nodes that have to be probed into the pool.
required: true
force:
type: bool
default: "false"
description:
- Applicable only while removing the nodes from the pool. gluster
will refuse to detach a node from the pool if any one of the node
is down, in such cases force can be used.
requirements:
- GlusterFS > 3.2
notes:
- This module does not support check mode.
'''
EXAMPLES = '''
- name: Create a trusted storage pool
gluster_peer:
state: present
nodes:
- 10.0.1.5
- 10.0.1.10
- name: Delete a node from the trusted storage pool
gluster_peer:
state: absent
nodes:
- 10.0.1.10
- name: Delete a node from the trusted storage pool by force
gluster_peer:
state: absent
nodes:
- 10.0.0.1
force: true
'''
RETURN = '''
'''
from ansible.module_utils.basic import AnsibleModule
from distutils.version import LooseVersion
class Peer(object):
def __init__(self, module):
self.module = module
self.state = self.module.params['state']
self.nodes = self.module.params['nodes']
self.glustercmd = self.module.get_bin_path('gluster', True)
self.lang = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C')
self.action = ''
self.force = ''
def gluster_peer_ops(self):
if not self.nodes:
self.module.fail_json(msg="nodes list cannot be empty")
self.force = 'force' if self.module.params.get('force') else ''
if self.state == 'present':
self.nodes = self.get_to_be_probed_hosts(self.nodes)
self.action = 'probe'
# In case of peer probe, we do not need `force'
self.force = ''
else:
self.action = 'detach'
self.call_peer_commands()
def get_to_be_probed_hosts(self, hosts):
peercmd = [self.glustercmd, 'pool', 'list', '--mode=script']
rc, output, err = self.module.run_command(peercmd,
environ_update=self.lang)
peers_in_cluster = [line.split('\t')[1].strip() for
line in filter(None, output.split('\n')[1:])]
try:
peers_in_cluster.remove('localhost')
except ValueError:
# It is ok not to have localhost in list
pass
hosts_to_be_probed = [host for host in hosts if host not in
peers_in_cluster]
return hosts_to_be_probed
def call_peer_commands(self):
result = {}
result['msg'] = ''
result['changed'] = False
for node in self.nodes:
peercmd = [self.glustercmd, 'peer', self.action, node, '--mode=script']
if self.force:
peercmd.append(self.force)
rc, out, err = self.module.run_command(peercmd,
environ_update=self.lang)
if rc:
result['rc'] = rc
result['msg'] = err
# Fail early, do not wait for the loop to finish
self.module.fail_json(**result)
else:
if 'already in peer' in out or \
'localhost not needed' in out:
result['changed'] |= False
else:
result['changed'] = True
self.module.exit_json(**result)
def main():
module = AnsibleModule(
argument_spec=dict(
force=dict(type='bool', required=False),
nodes=dict(type='list', required=True),
state=dict(type='str', choices=['absent', 'present'],
default='present'),
),
supports_check_mode=False
)
pops = Peer(module)
required_version = "3.2"
# Verify if required GlusterFS version is installed
if is_invalid_gluster_version(module, required_version):
module.fail_json(msg="GlusterFS version > %s is required" %
required_version)
pops.gluster_peer_ops()
def is_invalid_gluster_version(module, required_version):
cmd = module.get_bin_path('gluster', True) + ' --version'
result = module.run_command(cmd)
ver_line = result[1].split('\n')[0]
version = ver_line.split(' ')[1]
# If the installed version is less than 3.2, it is an invalid version
# return True
return LooseVersion(version) < LooseVersion(required_version)
if __name__ == "__main__":
main()
| kustodian/ansible | lib/ansible/modules/storage/glusterfs/gluster_peer.py | Python | gpl-3.0 | 5,845 |
# Tweepy
# Copyright 2009-2010 Joshua Roesslein
# See LICENSE for details.
import os
import mimetypes
from tweepy.binder import bind_api
from tweepy.error import TweepError
from tweepy.parsers import ModelParser
from tweepy.utils import list_to_csv
class API(object):
"""Twitter API"""
def __init__(self, auth_handler=None,
host='api.twitter.com', search_host='search.twitter.com',
cache=None, secure=False, api_root='/1', search_root='',
retry_count=0, retry_delay=0, retry_errors=None,
parser=None):
self.auth = auth_handler
self.host = host
self.search_host = search_host
self.api_root = api_root
self.search_root = search_root
self.cache = cache
self.secure = secure
self.retry_count = retry_count
self.retry_delay = retry_delay
self.retry_errors = retry_errors
self.parser = parser or ModelParser()
""" statuses/public_timeline """
public_timeline = bind_api(
path = '/statuses/public_timeline.json',
payload_type = 'status', payload_list = True,
allowed_param = []
)
""" statuses/home_timeline """
home_timeline = bind_api(
path = '/statuses/home_timeline.json',
payload_type = 'status', payload_list = True,
allowed_param = ['since_id', 'max_id', 'count', 'page'],
require_auth = True
)
""" statuses/friends_timeline """
friends_timeline = bind_api(
path = '/statuses/friends_timeline.json',
payload_type = 'status', payload_list = True,
allowed_param = ['since_id', 'max_id', 'count', 'page'],
require_auth = True
)
""" statuses/user_timeline """
user_timeline = bind_api(
path = '/statuses/user_timeline.json',
payload_type = 'status', payload_list = True,
allowed_param = ['id', 'user_id', 'screen_name', 'since_id',
'max_id', 'count', 'page', 'include_rts']
)
""" statuses/mentions """
mentions = bind_api(
path = '/statuses/mentions.json',
payload_type = 'status', payload_list = True,
allowed_param = ['since_id', 'max_id', 'count', 'page'],
require_auth = True
)
"""/statuses/:id/retweeted_by.format"""
retweeted_by = bind_api(
path = '/statuses/{id}/retweeted_by.json',
payload_type = 'status', payload_list = True,
allowed_param = ['id', 'count', 'page'],
require_auth = True
)
"""/related_results/show/:id.format"""
related_results = bind_api(
path = '/related_results/show/{id}.json',
payload_type = 'relation', payload_list = True,
allowed_param = ['id'],
require_auth = False
)
"""/statuses/:id/retweeted_by/ids.format"""
retweeted_by_ids = bind_api(
path = '/statuses/{id}/retweeted_by/ids.json',
payload_type = 'ids',
allowed_param = ['id', 'count', 'page'],
require_auth = True
)
""" statuses/retweeted_by_me """
retweeted_by_me = bind_api(
path = '/statuses/retweeted_by_me.json',
payload_type = 'status', payload_list = True,
allowed_param = ['since_id', 'max_id', 'count', 'page'],
require_auth = True
)
""" statuses/retweeted_to_me """
retweeted_to_me = bind_api(
path = '/statuses/retweeted_to_me.json',
payload_type = 'status', payload_list = True,
allowed_param = ['since_id', 'max_id', 'count', 'page'],
require_auth = True
)
""" statuses/retweets_of_me """
retweets_of_me = bind_api(
path = '/statuses/retweets_of_me.json',
payload_type = 'status', payload_list = True,
allowed_param = ['since_id', 'max_id', 'count', 'page'],
require_auth = True
)
""" statuses/show """
get_status = bind_api(
path = '/statuses/show.json',
payload_type = 'status',
allowed_param = ['id']
)
""" statuses/update """
update_status = bind_api(
path = '/statuses/update.json',
method = 'POST',
payload_type = 'status',
allowed_param = ['status', 'in_reply_to_status_id', 'lat', 'long', 'source', 'place_id'],
require_auth = True
)
""" statuses/destroy """
destroy_status = bind_api(
path = '/statuses/destroy.json',
method = 'DELETE',
payload_type = 'status',
allowed_param = ['id'],
require_auth = True
)
""" statuses/retweet """
retweet = bind_api(
path = '/statuses/retweet/{id}.json',
method = 'POST',
payload_type = 'status',
allowed_param = ['id'],
require_auth = True
)
""" statuses/retweets """
retweets = bind_api(
path = '/statuses/retweets/{id}.json',
payload_type = 'status', payload_list = True,
allowed_param = ['id', 'count'],
require_auth = True
)
""" users/show """
get_user = bind_api(
path = '/users/show.json',
payload_type = 'user',
allowed_param = ['id', 'user_id', 'screen_name']
)
""" Perform bulk look up of users from user ID or screenname """
def lookup_users(self, user_ids=None, screen_names=None):
return self._lookup_users(list_to_csv(user_ids), list_to_csv(screen_names))
_lookup_users = bind_api(
path = '/users/lookup.json',
payload_type = 'user', payload_list = True,
allowed_param = ['user_id', 'screen_name'],
require_auth = True
)
""" Get the authenticated user """
def me(self):
return self.get_user(screen_name=self.auth.get_username())
""" users/search """
search_users = bind_api(
path = '/users/search.json',
payload_type = 'user', payload_list = True,
require_auth = True,
allowed_param = ['q', 'per_page', 'page']
)
""" statuses/friends """
friends = bind_api(
path = '/statuses/friends.json',
payload_type = 'user', payload_list = True,
allowed_param = ['id', 'user_id', 'screen_name', 'page', 'cursor']
)
""" statuses/followers """
followers = bind_api(
path = '/statuses/followers.json',
payload_type = 'user', payload_list = True,
allowed_param = ['id', 'user_id', 'screen_name', 'page', 'cursor']
)
""" direct_messages """
direct_messages = bind_api(
path = '/direct_messages.json',
payload_type = 'direct_message', payload_list = True,
allowed_param = ['since_id', 'max_id', 'count', 'page'],
require_auth = True
)
""" direct_messages/sent """
sent_direct_messages = bind_api(
path = '/direct_messages/sent.json',
payload_type = 'direct_message', payload_list = True,
allowed_param = ['since_id', 'max_id', 'count', 'page'],
require_auth = True
)
""" direct_messages/new """
send_direct_message = bind_api(
path = '/direct_messages/new.json',
method = 'POST',
payload_type = 'direct_message',
allowed_param = ['user', 'screen_name', 'user_id', 'text'],
require_auth = True
)
""" direct_messages/destroy """
destroy_direct_message = bind_api(
path = '/direct_messages/destroy.json',
method = 'DELETE',
payload_type = 'direct_message',
allowed_param = ['id'],
require_auth = True
)
""" friendships/create """
create_friendship = bind_api(
path = '/friendships/create.json',
method = 'POST',
payload_type = 'user',
allowed_param = ['id', 'user_id', 'screen_name', 'follow'],
require_auth = True
)
""" friendships/destroy """
destroy_friendship = bind_api(
path = '/friendships/destroy.json',
method = 'DELETE',
payload_type = 'user',
allowed_param = ['id', 'user_id', 'screen_name'],
require_auth = True
)
""" friendships/exists """
exists_friendship = bind_api(
path = '/friendships/exists.json',
payload_type = 'json',
allowed_param = ['user_a', 'user_b']
)
""" friendships/show """
show_friendship = bind_api(
path = '/friendships/show.json',
payload_type = 'friendship',
allowed_param = ['source_id', 'source_screen_name',
'target_id', 'target_screen_name']
)
""" friends/ids """
friends_ids = bind_api(
path = '/friends/ids.json',
payload_type = 'ids',
allowed_param = ['id', 'user_id', 'screen_name', 'cursor']
)
""" friendships/incoming """
friendships_incoming = bind_api(
path = '/friendships/incoming.json',
payload_type = 'ids',
allowed_param = ['cursor']
)
""" friendships/outgoing"""
friendships_outgoing = bind_api(
path = '/friendships/outgoing.json',
payload_type = 'ids',
allowed_param = ['cursor']
)
""" followers/ids """
followers_ids = bind_api(
path = '/followers/ids.json',
payload_type = 'ids',
allowed_param = ['id', 'user_id', 'screen_name', 'cursor']
)
""" account/verify_credentials """
def verify_credentials(self):
try:
return bind_api(
path = '/account/verify_credentials.json',
payload_type = 'user',
require_auth = True
)(self)
except TweepError, e:
if e.response and e.response.status == 401:
return False
raise
""" account/rate_limit_status """
rate_limit_status = bind_api(
path = '/account/rate_limit_status.json',
payload_type = 'json'
)
""" account/update_delivery_device """
set_delivery_device = bind_api(
path = '/account/update_delivery_device.json',
method = 'POST',
allowed_param = ['device'],
payload_type = 'user',
require_auth = True
)
""" account/update_profile_colors """
update_profile_colors = bind_api(
path = '/account/update_profile_colors.json',
method = 'POST',
payload_type = 'user',
allowed_param = ['profile_background_color', 'profile_text_color',
'profile_link_color', 'profile_sidebar_fill_color',
'profile_sidebar_border_color'],
require_auth = True
)
""" account/update_profile_image """
def update_profile_image(self, filename):
headers, post_data = API._pack_image(filename, 700)
return bind_api(
path = '/account/update_profile_image.json',
method = 'POST',
payload_type = 'user',
require_auth = True
)(self, post_data=post_data, headers=headers)
""" account/update_profile_background_image """
def update_profile_background_image(self, filename, *args, **kargs):
headers, post_data = API._pack_image(filename, 800)
bind_api(
path = '/account/update_profile_background_image.json',
method = 'POST',
payload_type = 'user',
allowed_param = ['tile'],
require_auth = True
)(self, post_data=post_data, headers=headers)
""" account/update_profile """
update_profile = bind_api(
path = '/account/update_profile.json',
method = 'POST',
payload_type = 'user',
allowed_param = ['name', 'url', 'location', 'description'],
require_auth = True
)
""" favorites """
favorites = bind_api(
path = '/favorites.json',
payload_type = 'status', payload_list = True,
allowed_param = ['id', 'page']
)
""" favorites/create """
create_favorite = bind_api(
path = '/favorites/create/{id}.json',
method = 'POST',
payload_type = 'status',
allowed_param = ['id'],
require_auth = True
)
""" favorites/destroy """
destroy_favorite = bind_api(
path = '/favorites/destroy/{id}.json',
method = 'DELETE',
payload_type = 'status',
allowed_param = ['id'],
require_auth = True
)
""" notifications/follow """
enable_notifications = bind_api(
path = '/notifications/follow.json',
method = 'POST',
payload_type = 'user',
allowed_param = ['id', 'user_id', 'screen_name'],
require_auth = True
)
""" notifications/leave """
disable_notifications = bind_api(
path = '/notifications/leave.json',
method = 'POST',
payload_type = 'user',
allowed_param = ['id', 'user_id', 'screen_name'],
require_auth = True
)
""" blocks/create """
create_block = bind_api(
path = '/blocks/create.json',
method = 'POST',
payload_type = 'user',
allowed_param = ['id', 'user_id', 'screen_name'],
require_auth = True
)
""" blocks/destroy """
destroy_block = bind_api(
path = '/blocks/destroy.json',
method = 'DELETE',
payload_type = 'user',
allowed_param = ['id', 'user_id', 'screen_name'],
require_auth = True
)
""" blocks/exists """
def exists_block(self, *args, **kargs):
try:
bind_api(
path = '/blocks/exists.json',
allowed_param = ['id', 'user_id', 'screen_name'],
require_auth = True
)(self, *args, **kargs)
except TweepError:
return False
return True
""" blocks/blocking """
blocks = bind_api(
path = '/blocks/blocking.json',
payload_type = 'user', payload_list = True,
allowed_param = ['page'],
require_auth = True
)
""" blocks/blocking/ids """
blocks_ids = bind_api(
path = '/blocks/blocking/ids.json',
payload_type = 'json',
require_auth = True
)
""" report_spam """
report_spam = bind_api(
path = '/report_spam.json',
method = 'POST',
payload_type = 'user',
allowed_param = ['id', 'user_id', 'screen_name'],
require_auth = True
)
""" saved_searches """
saved_searches = bind_api(
path = '/saved_searches.json',
payload_type = 'saved_search', payload_list = True,
require_auth = True
)
""" saved_searches/show """
get_saved_search = bind_api(
path = '/saved_searches/show/{id}.json',
payload_type = 'saved_search',
allowed_param = ['id'],
require_auth = True
)
""" saved_searches/create """
create_saved_search = bind_api(
path = '/saved_searches/create.json',
method = 'POST',
payload_type = 'saved_search',
allowed_param = ['query'],
require_auth = True
)
""" saved_searches/destroy """
destroy_saved_search = bind_api(
path = '/saved_searches/destroy/{id}.json',
method = 'DELETE',
payload_type = 'saved_search',
allowed_param = ['id'],
require_auth = True
)
""" help/test """
def test(self):
try:
bind_api(
path = '/help/test.json',
)(self)
except TweepError:
return False
return True
def create_list(self, *args, **kargs):
return bind_api(
path = '/%s/lists.json' % self.auth.get_username(),
method = 'POST',
payload_type = 'list',
allowed_param = ['name', 'mode', 'description'],
require_auth = True
)(self, *args, **kargs)
def destroy_list(self, slug):
return bind_api(
path = '/%s/lists/%s.json' % (self.auth.get_username(), slug),
method = 'DELETE',
payload_type = 'list',
require_auth = True
)(self)
def update_list(self, slug, *args, **kargs):
return bind_api(
path = '/%s/lists/%s.json' % (self.auth.get_username(), slug),
method = 'POST',
payload_type = 'list',
allowed_param = ['name', 'mode', 'description'],
require_auth = True
)(self, *args, **kargs)
lists = bind_api(
path = '/{user}/lists.json',
payload_type = 'list', payload_list = True,
allowed_param = ['user', 'cursor'],
require_auth = True
)
lists_memberships = bind_api(
path = '/{user}/lists/memberships.json',
payload_type = 'list', payload_list = True,
allowed_param = ['user', 'cursor'],
require_auth = True
)
lists_subscriptions = bind_api(
path = '/{user}/lists/subscriptions.json',
payload_type = 'list', payload_list = True,
allowed_param = ['user', 'cursor'],
require_auth = True
)
list_timeline = bind_api(
path = '/{owner}/lists/{slug}/statuses.json',
payload_type = 'status', payload_list = True,
allowed_param = ['owner', 'slug', 'since_id', 'max_id', 'per_page', 'page']
)
get_list = bind_api(
path = '/{owner}/lists/{slug}.json',
payload_type = 'list',
allowed_param = ['owner', 'slug']
)
def add_list_member(self, slug, *args, **kargs):
return bind_api(
path = '/%s/%s/members.json' % (self.auth.get_username(), slug),
method = 'POST',
payload_type = 'list',
allowed_param = ['id'],
require_auth = True
)(self, *args, **kargs)
def remove_list_member(self, slug, *args, **kargs):
return bind_api(
path = '/%s/%s/members.json' % (self.auth.get_username(), slug),
method = 'DELETE',
payload_type = 'list',
allowed_param = ['id'],
require_auth = True
)(self, *args, **kargs)
list_members = bind_api(
path = '/{owner}/{slug}/members.json',
payload_type = 'user', payload_list = True,
allowed_param = ['owner', 'slug', 'cursor']
)
def is_list_member(self, owner, slug, user_id):
try:
return bind_api(
path = '/%s/%s/members/%s.json' % (owner, slug, user_id),
payload_type = 'user'
)(self)
except TweepError:
return False
subscribe_list = bind_api(
path = '/{owner}/{slug}/subscribers.json',
method = 'POST',
payload_type = 'list',
allowed_param = ['owner', 'slug'],
require_auth = True
)
unsubscribe_list = bind_api(
path = '/{owner}/{slug}/subscribers.json',
method = 'DELETE',
payload_type = 'list',
allowed_param = ['owner', 'slug'],
require_auth = True
)
list_subscribers = bind_api(
path = '/{owner}/{slug}/subscribers.json',
payload_type = 'user', payload_list = True,
allowed_param = ['owner', 'slug', 'cursor']
)
def is_subscribed_list(self, owner, slug, user_id):
try:
return bind_api(
path = '/%s/%s/subscribers/%s.json' % (owner, slug, user_id),
payload_type = 'user'
)(self)
except TweepError:
return False
""" trends/available """
trends_available = bind_api(
path = '/trends/available.json',
payload_type = 'json',
allowed_param = ['lat', 'long']
)
""" trends/location """
trends_location = bind_api(
path = '/trends/{woeid}.json',
payload_type = 'json',
allowed_param = ['woeid']
)
""" search """
search = bind_api(
search_api = True,
path = '/search.json',
payload_type = 'search_result', payload_list = True,
allowed_param = ['q', 'lang', 'locale', 'rpp', 'page', 'since_id', 'geocode', 'show_user', 'max_id', 'since', 'until', 'result_type']
)
search.pagination_mode = 'page'
""" trends """
trends = bind_api(
path = '/trends.json',
payload_type = 'json'
)
""" trends/current """
trends_current = bind_api(
path = '/trends/current.json',
payload_type = 'json',
allowed_param = ['exclude']
)
""" trends/daily """
trends_daily = bind_api(
path = '/trends/daily.json',
payload_type = 'json',
allowed_param = ['date', 'exclude']
)
""" trends/weekly """
trends_weekly = bind_api(
path = '/trends/weekly.json',
payload_type = 'json',
allowed_param = ['date', 'exclude']
)
""" geo/reverse_geocode """
reverse_geocode = bind_api(
path = '/geo/reverse_geocode.json',
payload_type = 'json',
allowed_param = ['lat', 'long', 'accuracy', 'granularity', 'max_results']
)
""" geo/nearby_places """
nearby_places = bind_api(
path = '/geo/nearby_places.json',
payload_type = 'json',
allowed_param = ['lat', 'long', 'ip', 'accuracy', 'granularity', 'max_results']
)
""" geo/id """
geo_id = bind_api(
path = '/geo/id/{id}.json',
payload_type = 'json',
allowed_param = ['id']
)
""" Internal use only """
@staticmethod
def _pack_image(filename, max_size):
"""Pack image from file into multipart-formdata post body"""
# image must be less than 700kb in size
try:
if os.path.getsize(filename) > (max_size * 1024):
raise TweepError('File is too big, must be less than 700kb.')
except os.error, e:
raise TweepError('Unable to access file')
# image must be gif, jpeg, or png
file_type = mimetypes.guess_type(filename)
if file_type is None:
raise TweepError('Could not determine file type')
file_type = file_type[0]
if file_type not in ['image/gif', 'image/jpeg', 'image/png']:
raise TweepError('Invalid file type for image: %s' % file_type)
# build the mulitpart-formdata body
fp = open(filename, 'rb')
BOUNDARY = 'Tw3ePy'
body = []
body.append('--' + BOUNDARY)
body.append('Content-Disposition: form-data; name="image"; filename="%s"' % filename)
body.append('Content-Type: %s' % file_type)
body.append('')
body.append(fp.read())
body.append('--' + BOUNDARY + '--')
body.append('')
fp.close()
body = '\r\n'.join(body)
# build headers
headers = {
'Content-Type': 'multipart/form-data; boundary=Tw3ePy',
'Content-Length': len(body)
}
return headers, body
| tkaitchuck/nupic | external/linux64/lib/python2.6/site-packages/tweepy/api.py | Python | gpl-3.0 | 22,718 |
#!/usr/bin/python3
def sanitize(time_string):
if '-' in time_string:
splitter = '-'
elif ':' in time_string:
splitter = ':'
else:
return(time_string)
(mins, secs) = time_string.strip().split(splitter)
return(mins + '.' + secs)
def get_coach_data(filename):
try:
with open(filename) as fn:
data = fn.readline()
return(data.strip().split(','))
except IOError as ioerr:
print('File Error:' + str(ioerr))
return(None)
sarah = get_coach_data('sarah2.txt')
(sarah_name, sarach_dob) = sarah.pop(0), sarah.pop(0)
print(sarah_name + "'s fastest time are:"+
str(sorted(set([sanitize(t) for t in sarah]))[0:3]))
| clovemfeng/studydemo | 20140617/userlist_data.py | Python | gpl-2.0 | 657 |
"""
Acceptance tests for Studio related to the split_test module.
"""
from unittest import skip
from ..fixtures.course import CourseFixture, XBlockFixtureDesc
from ..pages.studio.component_editor import ComponentEditorView
from test_studio_container import ContainerBase
from ..pages.studio.utils import add_advanced_component
from xmodule.partitions.partitions import Group, UserPartition
from bok_choy.promise import Promise
class SplitTest(ContainerBase):
"""
Tests for creating and editing split test instances in Studio.
"""
__test__ = True
def setup_fixtures(self):
course_fix = CourseFixture(
self.course_info['org'],
self.course_info['number'],
self.course_info['run'],
self.course_info['display_name']
)
course_fix.add_advanced_settings(
{
u"advanced_modules": ["split_test"],
u"user_partitions": [
UserPartition(0, 'Configuration alpha,beta', 'first', [Group("0", 'alpha'), Group("1", 'beta')]).to_json(),
UserPartition(1, 'Configuration 0,1,2', 'second', [Group("0", 'Group 0'), Group("1", 'Group 1'), Group("2", 'Group 2')]).to_json()
]
}
)
course_fix.add_children(
XBlockFixtureDesc('chapter', 'Test Section').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection').add_children(
XBlockFixtureDesc('vertical', 'Test Unit')
)
)
).install()
self.course_fix = course_fix
self.user = course_fix.user
def verify_groups(self, container, active_groups, inactive_groups, verify_missing_groups_not_present=True):
"""
Check that the groups appear and are correctly categorized as to active and inactive.
Also checks that the "add missing groups" button/link is not present unless a value of False is passed
for verify_missing_groups_not_present.
"""
def wait_for_xblocks_to_render():
# First xblock is the container for the page, subtract 1.
return (len(active_groups) + len(inactive_groups) == len(container.xblocks) - 1, len(active_groups))
Promise(wait_for_xblocks_to_render, "Number of xblocks on the page are incorrect").fulfill()
def check_xblock_names(expected_groups, actual_blocks):
self.assertEqual(len(expected_groups), len(actual_blocks))
for idx, expected in enumerate(expected_groups):
self.assertEqual('Expand or Collapse\n{}'.format(expected), actual_blocks[idx].name)
check_xblock_names(active_groups, container.active_xblocks)
check_xblock_names(inactive_groups, container.inactive_xblocks)
# Verify inactive xblocks appear after active xblocks
check_xblock_names(active_groups + inactive_groups, container.xblocks[1:])
if verify_missing_groups_not_present:
self.verify_add_missing_groups_button_not_present(container)
def verify_add_missing_groups_button_not_present(self, container):
"""
Checks that the "add missing gorups" button/link is not present.
"""
def missing_groups_button_not_present():
button_present = container.missing_groups_button_present()
return (not button_present, not button_present)
Promise(missing_groups_button_not_present, "Add missing groups button should not be showing.").fulfill()
def create_poorly_configured_split_instance(self):
"""
Creates a split test instance with a missing group and an inactive group.
Returns the container page.
"""
unit = self.go_to_unit_page(make_draft=True)
add_advanced_component(unit, 0, 'split_test')
container = self.go_to_container_page()
container.edit()
component_editor = ComponentEditorView(self.browser, container.locator)
component_editor.set_select_value_and_save('Group Configuration', 'Configuration alpha,beta')
self.course_fix.add_advanced_settings(
{
u"user_partitions": [
UserPartition(0, 'Configuration alpha,beta', 'first',
[Group("0", 'alpha'), Group("2", 'gamma')]).to_json()
]
}
)
self.course_fix._add_advanced_settings()
return self.go_to_container_page()
def test_create_and_select_group_configuration(self):
"""
Tests creating a split test instance on the unit page, and then
assigning the group configuration.
"""
unit = self.go_to_unit_page(make_draft=True)
add_advanced_component(unit, 0, 'split_test')
container = self.go_to_container_page()
container.edit()
component_editor = ComponentEditorView(self.browser, container.locator)
component_editor.set_select_value_and_save('Group Configuration', 'Configuration alpha,beta')
self.verify_groups(container, ['alpha', 'beta'], [])
# Switch to the other group configuration. Must navigate again to the container page so
# that there is only a single "editor" on the page.
container = self.go_to_container_page()
container.edit()
component_editor = ComponentEditorView(self.browser, container.locator)
component_editor.set_select_value_and_save('Group Configuration', 'Configuration 0,1,2')
self.verify_groups(container, ['Group 0', 'Group 1', 'Group 2'], ['alpha', 'beta'])
# Reload the page to make sure the groups were persisted.
container = self.go_to_container_page()
self.verify_groups(container, ['Group 0', 'Group 1', 'Group 2'], ['alpha', 'beta'])
@skip("This fails periodically where it fails to trigger the add missing groups action.Dis")
def test_missing_group(self):
"""
The case of a split test with invalid configuration (missing group).
"""
container = self.create_poorly_configured_split_instance()
container.add_missing_groups()
self.verify_groups(container, ['alpha', 'gamma'], ['beta'])
# Reload the page to make sure the groups were persisted.
container = self.go_to_container_page()
self.verify_groups(container, ['alpha', 'gamma'], ['beta'])
def test_delete_inactive_group(self):
"""
Test deleting an inactive group.
"""
container = self.create_poorly_configured_split_instance()
container.delete(0)
self.verify_groups(container, ['alpha'], [], verify_missing_groups_not_present=False)
| geekaia/edx-platform | common/test/acceptance/tests/test_studio_split_test.py | Python | agpl-3.0 | 6,703 |
"""Extracts features for the training set of the given file lists using the given feature extractor."""
import argparse
import bob.ip.facedetect
import importlib
import os, math
import bob.core
logger = bob.core.log.setup('bob.ip.facedetect')
# create feature extractor
LBP_VARIANTS = {
'ell' : {'circular' : True},
'u2' : {'uniform' : True},
'ri' : {'rotation_invariant' : True},
'mct' : {'to_average' : True, 'add_average_bit' : True},
'tran' : {'elbp_type' : 'transitional'},
'dir' : {'elbp_type' : 'direction-coded'}
}
def command_line_options(command_line_arguments):
parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--file-lists', '-i', nargs='+', help = "Select the training lists to extract features for.")
parser.add_argument('--feature-directory', '-d', default = "features", help = "The output directory, where features will be stores")
parser.add_argument('--parallel', '-P', type=int, help = "Use this option to run the script in parallel in the SGE grid, using the given number of parallel processes")
parser.add_argument('--patch-size', '-p', type=int, nargs=2, default=(24,20), help = "The size of the patch for the image in y and x.")
parser.add_argument('--distance', '-s', type=int, default=2, help = "The distance with which the image should be scanned.")
parser.add_argument('--scale-base', '-S', type=float, default=math.pow(2.,-1./8.), help = "The logarithmic distance between two scales (should be between 0 and 1).")
parser.add_argument('--negative-examples-every', '-N', type=int, default=4, help = "Use only every nth scale to extract negative examples.")
parser.add_argument('--lowest-scale', '-f', type=float, default=0, help = "Patches which will be lower than the given scale times the image resolution will not be taken into account; if 0. (the default) all patches will be considered.")
parser.add_argument('--similarity-thresholds', '-t', type=float, nargs=2, default=(0.2, 0.8), help = "The bounding box overlap thresholds for which negative (< thres[0]) and positive (> thers[1]) examples are accepted.")
parser.add_argument('--no-mirror-samples', '-M', action='store_true', help = "Disable mirroring of the training samples.")
parser.add_argument('--examples-per-image-scale', '-e', type=int, nargs=2, default = [100, 100], help = "The number of positive and negative training examples for each image scale.")
parser.add_argument('--lbp-multi-block', '-m', action='store_true', help = "If given multi-block LBP features will be extracted (otherwise, it's regular LBP).")
parser.add_argument('--lbp-variant', '-l', choices=LBP_VARIANTS.keys(), nargs='+', default = [], help = "Specify, which LBP variant(s) are wanted (ell is not available for MBLPB codes).")
parser.add_argument('--lbp-overlap', '-o', action='store_true', help = "Specify the overlap of the MBLBP.")
parser.add_argument('--lbp-scale', '-L', type=int, help="If given, only a single LBP extractor with the given LBP scale will be extracted, otherwise all possible scales are generated.")
parser.add_argument('--lbp-square', '-Q', action='store_true', help="Generate only square feature extractors, and no rectangular ones.")
bob.core.log.add_command_line_option(parser)
args = parser.parse_args(command_line_arguments)
bob.core.log.set_verbosity_level(logger, args.verbose)
return args
def main(command_line_arguments = None):
args = command_line_options(command_line_arguments)
train_set = bob.ip.facedetect.train.TrainingSet(feature_directory = args.feature_directory)
# create feature extractor
res = {}
for t in args.lbp_variant:
res.update(LBP_VARIANTS[t])
if args.lbp_scale is not None:
if args.lbp_multi_block:
feature_extractor = bob.ip.facedetect.FeatureExtractor(patch_size = args.patch_size, extractors = [bob.ip.base.LBP(8, block_size=(args.lbp_scale,args.lbp_scale), block_overlap=(args.lbp_cale-1, args.lbp_scale-1) if args.lbp_overlap else (0,0), **res)])
else:
feature_extractor = bob.ip.facedetect.FeatureExtractor(patch_size = args.patch_size, extractors = [bob.ip.base.LBP(8, radius=args.lbp_scale, **res)])
else:
if args.lbp_multi_block:
feature_extractor = bob.ip.facedetect.FeatureExtractor(patch_size = args.patch_size, template = bob.ip.base.LBP(8, block_size=(1,1), **res), overlap=args.lbp_overlap, square=args.lbp_square)
else:
feature_extractor = bob.ip.facedetect.FeatureExtractor(patch_size = args.patch_size, template = bob.ip.base.LBP(8, radius=1, **res), square=args.lbp_square)
# load training sets
for file_list in args.file_lists:
logger.info("Loading file list %s", file_list)
train_set.load(file_list)
# generate sampler
sampler = bob.ip.facedetect.detector.Sampler(patch_size=args.patch_size, scale_factor=args.scale_base, lowest_scale=args.lowest_scale, distance=args.distance)
# extract features
train_set.extract(sampler, feature_extractor, number_of_examples_per_scale = args.examples_per_image_scale, similarity_thresholds = args.similarity_thresholds, parallel = args.parallel, mirror = not args.no_mirror_samples, use_every_nth_negative_scale = args.negative_examples_every)
| bioidiap/bob.ip.facedetect | bob/ip/facedetect/script/extract_training_features.py | Python | gpl-3.0 | 5,265 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2016-09-04 23:50
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('courses', '0013_auto_20160903_0212'),
]
operations = [
migrations.RenameField(
model_name='section',
old_name='approachable_rating',
new_name='cached_approachable_rating',
),
migrations.RenameField(
model_name='section',
old_name='competency_rating',
new_name='cached_competency_rating',
),
migrations.RenameField(
model_name='section',
old_name='difficulty_rating',
new_name='cached_difficulty_rating',
),
migrations.RenameField(
model_name='section',
old_name='engagement_rating',
new_name='cached_engagement_rating',
),
migrations.RenameField(
model_name='section',
old_name='enthusiasm_rating',
new_name='cached_enthusiasm_rating',
),
migrations.RenameField(
model_name='section',
old_name='lecturing_rating',
new_name='cached_lecturing_rating',
),
migrations.RenameField(
model_name='section',
old_name='rating',
new_name='cached_rating',
),
migrations.RenameField(
model_name='section',
old_name='useful_rating',
new_name='cached_useful_rating',
),
]
| aspc/mainsite | aspc/courses/migrations/0014_auto_20160904_2350.py | Python | mit | 1,605 |
""" Compatibility functions for older versions of nibabel
Nibabel <= 1.3.0 do not have these attributes:
* header
* affine
* dataobj
The equivalents for these older versions of nibabel are:
* obj.get_header()
* obj.get_affine()
* obj._data
With old nibabel, getting unscaled data used `read_img_data(img,
prefer="unscaled"). Newer nibabel should prefer the `get_unscaled` method on
the image proxy object
"""
from __future__ import absolute_import
import numpy as np
import nibabel as nib
def get_dataobj(img):
""" Return data object for nibabel image
Parameters
----------
img : ``SpatialImage`` instance
Instance of nibabel ``SpatialImage`` class
Returns
-------
dataobj : object
``ArrayProxy`` or ndarray object containing data for `img`
"""
try:
return img.dataobj
except AttributeError:
return img._data
def get_header(img):
""" Return header from nibabel image
Parameters
----------
img : ``SpatialImage`` instance
Instance of nibabel ``SpatialImage`` class
Returns
-------
header : object
header object from `img`
"""
try:
return img.header
except AttributeError:
return img.get_header()
def get_affine(img):
""" Return affine from nibabel image
Parameters
----------
img : ``SpatialImage`` instance
Instance of nibabel ``SpatialImage`` class
Returns
-------
affine : object
affine object from `img`
"""
try:
return img.affine
except AttributeError:
return img.get_affine()
def get_unscaled_data(img):
""" Get the data from a nibabel image, maybe without applying scaling
Parameters
----------
img : ``SpatialImage`` instance
Instance of nibabel ``SpatialImage`` class
Returns
-------
data : ndarray
Data as loaded from image, not applying scaling if this can be avoided
"""
if hasattr(nib.AnalyzeImage.ImageArrayProxy, 'get_unscaled'):
try:
return img.dataobj.get_unscaled()
except AttributeError:
return np.array(img.dataobj)
return nib.loadsave.read_img_data(img, prefer='unscaled')
| alexis-roche/nipy | nipy/io/nibcompat.py | Python | bsd-3-clause | 2,229 |
"""Utils for time travel testings."""
def _t(rel=0.0):
"""Return an absolute time from the relative time given.
The minimal allowed time in windows is 86400 seconds, for some reason. In
stead of doing the arithmetic in the tests themselves, this function should
be used.
The value `86400` is exported in `time_travel.MIN_START_TIME`, but I shant
use it for it is forbidden to test the code using the code that is being
tested.
"""
return 86400.0 + rel
| snudler6/time-travel | src/tests/utils.py | Python | mit | 492 |
# Tests for rich comparisons
import unittest
from test import test_support
import operator
class Number:
def __init__(self, x):
self.x = x
def __lt__(self, other):
return self.x < other
def __le__(self, other):
return self.x <= other
def __eq__(self, other):
return self.x == other
def __ne__(self, other):
return self.x != other
def __gt__(self, other):
return self.x > other
def __ge__(self, other):
return self.x >= other
def __cmp__(self, other):
raise test_support.TestFailed, "Number.__cmp__() should not be called"
def __repr__(self):
return "Number(%r)" % (self.x, )
class Vector:
def __init__(self, data):
self.data = data
def __len__(self):
return len(self.data)
def __getitem__(self, i):
return self.data[i]
def __setitem__(self, i, v):
self.data[i] = v
def __hash__(self):
raise TypeError, "Vectors cannot be hashed"
def __nonzero__(self):
raise TypeError, "Vectors cannot be used in Boolean contexts"
def __cmp__(self, other):
raise test_support.TestFailed, "Vector.__cmp__() should not be called"
def __repr__(self):
return "Vector(%r)" % (self.data, )
def __lt__(self, other):
return Vector([a < b for a, b in zip(self.data, self.__cast(other))])
def __le__(self, other):
return Vector([a <= b for a, b in zip(self.data, self.__cast(other))])
def __eq__(self, other):
return Vector([a == b for a, b in zip(self.data, self.__cast(other))])
def __ne__(self, other):
return Vector([a != b for a, b in zip(self.data, self.__cast(other))])
def __gt__(self, other):
return Vector([a > b for a, b in zip(self.data, self.__cast(other))])
def __ge__(self, other):
return Vector([a >= b for a, b in zip(self.data, self.__cast(other))])
def __cast(self, other):
if isinstance(other, Vector):
other = other.data
if len(self.data) != len(other):
raise ValueError, "Cannot compare vectors of different length"
return other
opmap = {
"lt": (lambda a,b: a< b, operator.lt, operator.__lt__),
"le": (lambda a,b: a<=b, operator.le, operator.__le__),
"eq": (lambda a,b: a==b, operator.eq, operator.__eq__),
"ne": (lambda a,b: a!=b, operator.ne, operator.__ne__),
"gt": (lambda a,b: a> b, operator.gt, operator.__gt__),
"ge": (lambda a,b: a>=b, operator.ge, operator.__ge__)
}
class VectorTest(unittest.TestCase):
def checkfail(self, error, opname, *args):
for op in opmap[opname]:
self.assertRaises(error, op, *args)
def checkequal(self, opname, a, b, expres):
for op in opmap[opname]:
realres = op(a, b)
# can't use assertEqual(realres, expres) here
self.assertEqual(len(realres), len(expres))
for i in xrange(len(realres)):
# results are bool, so we can use "is" here
self.assert_(realres[i] is expres[i])
def test_mixed(self):
# check that comparisons involving Vector objects
# which return rich results (i.e. Vectors with itemwise
# comparison results) work
a = Vector(range(2))
b = Vector(range(3))
# all comparisons should fail for different length
for opname in opmap:
self.checkfail(ValueError, opname, a, b)
a = range(5)
b = 5 * [2]
# try mixed arguments (but not (a, b) as that won't return a bool vector)
args = [(a, Vector(b)), (Vector(a), b), (Vector(a), Vector(b))]
for (a, b) in args:
self.checkequal("lt", a, b, [True, True, False, False, False])
self.checkequal("le", a, b, [True, True, True, False, False])
self.checkequal("eq", a, b, [False, False, True, False, False])
self.checkequal("ne", a, b, [True, True, False, True, True ])
self.checkequal("gt", a, b, [False, False, False, True, True ])
self.checkequal("ge", a, b, [False, False, True, True, True ])
for ops in opmap.itervalues():
for op in ops:
# calls __nonzero__, which should fail
self.assertRaises(TypeError, bool, op(a, b))
class NumberTest(unittest.TestCase):
def test_basic(self):
# Check that comparisons involving Number objects
# give the same results give as comparing the
# corresponding ints
for a in xrange(3):
for b in xrange(3):
for typea in (int, Number):
for typeb in (int, Number):
if typea==typeb==int:
continue # the combination int, int is useless
ta = typea(a)
tb = typeb(b)
for ops in opmap.itervalues():
for op in ops:
realoutcome = op(a, b)
testoutcome = op(ta, tb)
self.assertEqual(realoutcome, testoutcome)
def checkvalue(self, opname, a, b, expres):
for typea in (int, Number):
for typeb in (int, Number):
ta = typea(a)
tb = typeb(b)
for op in opmap[opname]:
realres = op(ta, tb)
realres = getattr(realres, "x", realres)
self.assert_(realres is expres)
def test_values(self):
# check all operators and all comparison results
self.checkvalue("lt", 0, 0, False)
self.checkvalue("le", 0, 0, True )
self.checkvalue("eq", 0, 0, True )
self.checkvalue("ne", 0, 0, False)
self.checkvalue("gt", 0, 0, False)
self.checkvalue("ge", 0, 0, True )
self.checkvalue("lt", 0, 1, True )
self.checkvalue("le", 0, 1, True )
self.checkvalue("eq", 0, 1, False)
self.checkvalue("ne", 0, 1, True )
self.checkvalue("gt", 0, 1, False)
self.checkvalue("ge", 0, 1, False)
self.checkvalue("lt", 1, 0, False)
self.checkvalue("le", 1, 0, False)
self.checkvalue("eq", 1, 0, False)
self.checkvalue("ne", 1, 0, True )
self.checkvalue("gt", 1, 0, True )
self.checkvalue("ge", 1, 0, True )
class MiscTest(unittest.TestCase):
def test_misbehavin(self):
class Misb:
def __lt__(self, other): return 0
def __gt__(self, other): return 0
def __eq__(self, other): return 0
def __le__(self, other): raise TestFailed, "This shouldn't happen"
def __ge__(self, other): raise TestFailed, "This shouldn't happen"
def __ne__(self, other): raise TestFailed, "This shouldn't happen"
def __cmp__(self, other): raise RuntimeError, "expected"
a = Misb()
b = Misb()
self.assertEqual(a<b, 0)
self.assertEqual(a==b, 0)
self.assertEqual(a>b, 0)
self.assertRaises(RuntimeError, cmp, a, b)
def test_not(self):
# Check that exceptions in __nonzero__ are properly
# propagated by the not operator
import operator
class Exc:
pass
class Bad:
def __nonzero__(self):
raise Exc
def do(bad):
not bad
for func in (do, operator.not_):
self.assertRaises(Exc, func, Bad())
def test_recursion(self):
# Check comparison for recursive objects
from UserList import UserList
a = UserList(); a.append(a)
b = UserList(); b.append(b)
self.assert_(a == b)
self.assert_(not a != b)
a.append(1)
self.assert_(a == a[0])
self.assert_(not a != a[0])
self.assert_(a != b)
self.assert_(not a == b)
b.append(0)
self.assert_(a != b)
self.assert_(not a == b)
a[1] = -1
self.assert_(a != b)
self.assert_(not a == b)
a = UserList()
b = UserList()
a.append(b)
b.append(a)
self.assert_(a == b)
self.assert_(not a != b)
b.append(17)
self.assert_(a != b)
self.assert_(not a == b)
a.append(17)
self.assert_(a == b)
self.assert_(not a != b)
def test_recursion2(self):
# This test exercises the circular structure handling code
# in PyObject_RichCompare()
class Weird(object):
def __eq__(self, other):
return self != other
def __ne__(self, other):
return self == other
def __lt__(self, other):
return self > other
def __gt__(self, other):
return self < other
self.assert_(Weird() == Weird())
self.assert_(not (Weird() != Weird()))
for op in opmap["lt"]:
self.assertRaises(ValueError, op, Weird(), Weird())
class DictTest(unittest.TestCase):
def test_dicts(self):
# Verify that __eq__ and __ne__ work for dicts even if the keys and
# values don't support anything other than __eq__ and __ne__. Complex
# numbers are a fine example of that.
import random
imag1a = {}
for i in range(50):
imag1a[random.randrange(100)*1j] = random.randrange(100)*1j
items = imag1a.items()
random.shuffle(items)
imag1b = {}
for k, v in items:
imag1b[k] = v
imag2 = imag1b.copy()
imag2[k] = v + 1.0
self.assert_(imag1a == imag1a)
self.assert_(imag1a == imag1b)
self.assert_(imag2 == imag2)
self.assert_(imag1a != imag2)
for opname in ("lt", "le", "gt", "ge"):
for op in opmap[opname]:
self.assertRaises(TypeError, op, imag1a, imag2)
class ListTest(unittest.TestCase):
def assertIs(self, a, b):
self.assert_(a is b)
def test_coverage(self):
# exercise all comparisons for lists
x = [42]
self.assertIs(x<x, False)
self.assertIs(x<=x, True)
self.assertIs(x==x, True)
self.assertIs(x!=x, False)
self.assertIs(x>x, False)
self.assertIs(x>=x, True)
y = [42, 42]
self.assertIs(x<y, True)
self.assertIs(x<=y, True)
self.assertIs(x==y, False)
self.assertIs(x!=y, True)
self.assertIs(x>y, False)
self.assertIs(x>=y, False)
def test_badentry(self):
# make sure that exceptions for item comparison are properly
# propagated in list comparisons
class Exc:
pass
class Bad:
def __eq__(self, other):
raise Exc
x = [Bad()]
y = [Bad()]
for op in opmap["eq"]:
self.assertRaises(Exc, op, x, y)
def test_goodentry(self):
# This test exercises the final call to PyObject_RichCompare()
# in Objects/listobject.c::list_richcompare()
class Good:
def __lt__(self, other):
return True
x = [Good()]
y = [Good()]
for op in opmap["lt"]:
self.assertIs(op(x, y), True)
def test_main():
test_support.run_unittest(VectorTest, NumberTest, MiscTest, DictTest, ListTest)
if __name__ == "__main__":
test_main()
| trivoldus28/pulsarch-verilog | tools/local/bas-release/bas,3.9/lib/python/lib/python2.3/test/test_richcmp.py | Python | gpl-2.0 | 11,493 |
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'curated'}
DOCUMENTATION = '''
---
module: iam
short_description: Manage IAM users, groups, roles and keys
description:
- Allows for the management of IAM users, user API keys, groups, roles.
version_added: "2.0"
options:
iam_type:
description:
- Type of IAM resource
required: true
default: null
choices: ["user", "group", "role"]
name:
description:
- Name of IAM resource to create or identify
required: true
new_name:
description:
- When state is update, will replace name with new_name on IAM resource
required: false
default: null
new_path:
description:
- When state is update, will replace the path with new_path on the IAM resource
required: false
default: null
state:
description:
- Whether to create, delete or update the IAM resource. Note, roles cannot be updated.
required: true
default: null
choices: [ "present", "absent", "update" ]
path:
description:
- When creating or updating, specify the desired path of the resource. If state is present,
it will replace the current path to match what is passed in when they do not match.
required: false
default: "/"
trust_policy:
description:
- The inline (JSON or YAML) trust policy document that grants an entity permission to assume the role. Mutually exclusive with C(trust_policy_filepath).
required: false
default: null
version_added: "2.2"
trust_policy_filepath:
description:
- The path to the trust policy document that grants an entity permission to assume the role. Mutually exclusive with C(trust_policy).
required: false
default: null
version_added: "2.2"
access_key_state:
description:
- When type is user, it creates, removes, deactivates or activates a user's access key(s). Note that actions apply only to keys specified.
required: false
default: null
choices: [ "create", "remove", "active", "inactive"]
key_count:
description:
- When access_key_state is create it will ensure this quantity of keys are present. Defaults to 1.
required: false
default: '1'
access_key_ids:
description:
- A list of the keys that you want impacted by the access_key_state parameter.
groups:
description:
- A list of groups the user should belong to. When update, will gracefully remove groups not listed.
required: false
default: null
password:
description:
- When type is user and state is present, define the users login password. Also works with update. Note that always returns changed.
required: false
default: null
update_password:
required: false
default: always
choices: ['always', 'on_create']
description:
- C(always) will update passwords if they differ. C(on_create) will only set the password for newly created users.
notes:
- 'Currently boto does not support the removal of Managed Policies, the module will error out if your
user/group/role has managed policies when you try to do state=absent. They will need to be removed manually.'
author:
- "Jonathan I. Davila (@defionscode)"
- "Paul Seiffert (@seiffert)"
extends_documentation_fragment: aws
'''
EXAMPLES = '''
# Basic user creation example
tasks:
- name: Create two new IAM users with API keys
iam:
iam_type: user
name: "{{ item }}"
state: present
password: "{{ temp_pass }}"
access_key_state: create
with_items:
- jcleese
- mpython
# Advanced example, create two new groups and add the pre-existing user
# jdavila to both groups.
task:
- name: Create Two Groups, Mario and Luigi
iam:
iam_type: group
name: "{{ item }}"
state: present
with_items:
- Mario
- Luigi
register: new_groups
- name:
iam:
iam_type: user
name: jdavila
state: update
groups: "{{ item.created_group.group_name }}"
with_items: "{{ new_groups.results }}"
# Example of role with custom trust policy for Lambda service
- name: Create IAM role with custom trust relationship
iam:
iam_type: role
name: AAALambdaTestRole
state: present
trust_policy:
Version: '2012-10-17'
Statement:
- Action: sts:AssumeRole
Effect: Allow
Principal:
Service: lambda.amazonaws.com
'''
import json
import itertools
import sys
try:
import boto
import boto.iam
import boto.ec2
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def boto_exception(err):
'''generic error message handler'''
if hasattr(err, 'error_message'):
error = err.error_message
elif hasattr(err, 'message'):
error = err.message
else:
error = '%s: %s' % (Exception, err)
return error
def _paginate(func, attr):
'''
paginates the results from func by continuously passing in
the returned marker if the results were truncated. this returns
an iterator over the items in the returned response. `attr` is
the name of the attribute to iterate over in the response.
'''
finished, marker = False, None
while not finished:
res = func(marker=marker)
for item in getattr(res, attr):
yield item
finished = res.is_truncated == 'false'
if not finished:
marker = res.marker
def list_all_groups(iam):
return [item['group_name'] for item in _paginate(iam.get_all_groups, 'groups')]
def list_all_users(iam):
return [item['user_name'] for item in _paginate(iam.get_all_users, 'users')]
def list_all_roles(iam):
return [item['role_name'] for item in _paginate(iam.list_roles, 'roles')]
def list_all_instance_profiles(iam):
return [item['instance_profile_name'] for item in _paginate(iam.list_instance_profiles, 'instance_profiles')]
def create_user(module, iam, name, pwd, path, key_state, key_count):
key_qty = 0
keys = []
try:
user_meta = iam.create_user(
name, path).create_user_response.create_user_result.user
changed = True
if pwd is not None:
pwd = iam.create_login_profile(name, pwd)
if key_state in ['create']:
if key_count:
while key_count > key_qty:
keys.append(iam.create_access_key(
user_name=name).create_access_key_response.\
create_access_key_result.\
access_key)
key_qty += 1
else:
keys = None
except boto.exception.BotoServerError as err:
module.fail_json(changed=False, msg=str(err))
else:
user_info = dict(created_user=user_meta, password=pwd, access_keys=keys)
return (user_info, changed)
def delete_user(module, iam, name):
changed = False
try:
current_keys = [ck['access_key_id'] for ck in
iam.get_all_access_keys(name).list_access_keys_result.access_key_metadata]
for key in current_keys:
iam.delete_access_key(key, name)
try:
login_profile = iam.get_login_profiles(name).get_login_profile_response
except boto.exception.BotoServerError as err:
error_msg = boto_exception(err)
if ('Cannot find Login Profile') in error_msg:
iam.delete_user(name)
else:
iam.delete_login_profile(name)
iam.delete_user(name)
except Exception as ex:
module.fail_json(changed=False, msg="delete failed %s" %ex)
if ('must detach all policies first') in error_msg:
for policy in iam.get_all_user_policies(name).list_user_policies_result.policy_names:
iam.delete_user_policy(name, policy)
try:
iam.delete_user(name)
except boto.exception.BotoServerError as err:
error_msg = boto_exception(err)
if ('must detach all policies first') in error_msg:
module.fail_json(changed=changed, msg="All inline polices have been removed. Though it appears"
"that %s has Managed Polices. This is not "
"currently supported by boto. Please detach the polices "
"through the console and try again." % name)
else:
module.fail_json(changed=changed, msg=str(error_msg))
else:
changed = True
else:
module.fail_json(changed=changed, msg=str(error_msg))
else:
changed = True
return name, changed
def update_user(module, iam, name, new_name, new_path, key_state, key_count, keys, pwd, updated):
changed = False
name_change = False
if updated and new_name:
name = new_name
try:
current_keys = [ck['access_key_id'] for ck in
iam.get_all_access_keys(name).list_access_keys_result.access_key_metadata]
status = [ck['status'] for ck in
iam.get_all_access_keys(name).list_access_keys_result.access_key_metadata]
key_qty = len(current_keys)
except boto.exception.BotoServerError as err:
error_msg = boto_exception(err)
if 'cannot be found' in error_msg and updated:
current_keys = [ck['access_key_id'] for ck in
iam.get_all_access_keys(new_name).list_access_keys_result.access_key_metadata]
status = [ck['status'] for ck in
iam.get_all_access_keys(new_name).list_access_keys_result.access_key_metadata]
name = new_name
else:
module.fail_json(changed=False, msg=str(err))
updated_key_list = {}
if new_name or new_path:
c_path = iam.get_user(name).get_user_result.user['path']
if (name != new_name) or (c_path != new_path):
changed = True
try:
if not updated:
user = iam.update_user(
name, new_user_name=new_name, new_path=new_path).update_user_response.response_metadata
else:
user = iam.update_user(
name, new_path=new_path).update_user_response.response_metadata
user['updates'] = dict(
old_username=name, new_username=new_name, old_path=c_path, new_path=new_path)
except boto.exception.BotoServerError as err:
error_msg = boto_exception(err)
module.fail_json(changed=False, msg=str(err))
else:
if not updated:
name_change = True
if pwd:
try:
iam.update_login_profile(name, pwd)
changed = True
except boto.exception.BotoServerError:
try:
iam.create_login_profile(name, pwd)
changed = True
except boto.exception.BotoServerError as err:
error_msg = boto_exception(str(err))
if 'Password does not conform to the account password policy' in error_msg:
module.fail_json(changed=False, msg="Password doesn't conform to policy")
else:
module.fail_json(msg=error_msg)
try:
current_keys = [ck['access_key_id'] for ck in
iam.get_all_access_keys(name).list_access_keys_result.access_key_metadata]
status = [ck['status'] for ck in
iam.get_all_access_keys(name).list_access_keys_result.access_key_metadata]
key_qty = len(current_keys)
except boto.exception.BotoServerError as err:
error_msg = boto_exception(err)
if 'cannot be found' in error_msg and updated:
current_keys = [ck['access_key_id'] for ck in
iam.get_all_access_keys(new_name).list_access_keys_result.access_key_metadata]
status = [ck['status'] for ck in
iam.get_all_access_keys(new_name).list_access_keys_result.access_key_metadata]
name = new_name
else:
module.fail_json(changed=False, msg=str(err))
new_keys = []
if key_state == 'create':
try:
while key_count > key_qty:
new_keys.append(iam.create_access_key(
user_name=name).create_access_key_response.create_access_key_result.access_key)
key_qty += 1
changed = True
except boto.exception.BotoServerError as err:
module.fail_json(changed=False, msg=str(err))
if keys and key_state:
for access_key in keys:
if key_state in ('active', 'inactive'):
if access_key in current_keys:
for current_key, current_key_state in zip(current_keys, status):
if key_state != current_key_state.lower():
try:
iam.update_access_key(access_key, key_state.capitalize(), user_name=name)
changed = True
except boto.exception.BotoServerError as err:
module.fail_json(changed=False, msg=str(err))
else:
module.fail_json(msg="Supplied keys not found for %s. "
"Current keys: %s. "
"Supplied key(s): %s" %
(name, current_keys, keys)
)
if key_state == 'remove':
if access_key in current_keys:
try:
iam.delete_access_key(access_key, user_name=name)
except boto.exception.BotoServerError as err:
module.fail_json(changed=False, msg=str(err))
else:
changed = True
try:
final_keys, final_key_status = \
[ck['access_key_id'] for ck in
iam.get_all_access_keys(name).
list_access_keys_result.
access_key_metadata],\
[ck['status'] for ck in
iam.get_all_access_keys(name).
list_access_keys_result.
access_key_metadata]
except boto.exception.BotoServerError as err:
module.fail_json(changed=changed, msg=str(err))
for fk, fks in zip(final_keys, final_key_status):
updated_key_list.update({fk: fks})
return name_change, updated_key_list, changed, new_keys
def set_users_groups(module, iam, name, groups, updated=None,
new_name=None):
""" Sets groups for a user, will purge groups not explicitly passed, while
retaining pre-existing groups that also are in the new list.
"""
changed = False
if updated:
name = new_name
try:
orig_users_groups = [og['group_name'] for og in iam.get_groups_for_user(
name).list_groups_for_user_result.groups]
remove_groups = [
rg for rg in frozenset(orig_users_groups).difference(groups)]
new_groups = [
ng for ng in frozenset(groups).difference(orig_users_groups)]
except boto.exception.BotoServerError as err:
module.fail_json(changed=changed, msg=str(err))
else:
if len(orig_users_groups) > 0:
for new in new_groups:
iam.add_user_to_group(new, name)
for rm in remove_groups:
iam.remove_user_from_group(rm, name)
else:
for group in groups:
try:
iam.add_user_to_group(group, name)
except boto.exception.BotoServerError as err:
error_msg = boto_exception(err)
if ('The group with name %s cannot be found.' % group) in error_msg:
module.fail_json(changed=False, msg="Group %s doesn't exist" % group)
if len(remove_groups) > 0 or len(new_groups) > 0:
changed = True
return (groups, changed)
def create_group(module=None, iam=None, name=None, path=None):
changed = False
try:
iam.create_group(
name, path).create_group_response.create_group_result.group
except boto.exception.BotoServerError as err:
module.fail_json(changed=changed, msg=str(err))
else:
changed = True
return name, changed
def delete_group(module=None, iam=None, name=None):
changed = False
try:
iam.delete_group(name)
except boto.exception.BotoServerError as err:
error_msg = boto_exception(err)
if ('must delete policies first') in error_msg:
for policy in iam.get_all_group_policies(name).list_group_policies_result.policy_names:
iam.delete_group_policy(name, policy)
try:
iam.delete_group(name)
except boto.exception.BotoServerError as err:
error_msg = boto_exception(err)
if ('must delete policies first') in error_msg:
module.fail_json(changed=changed, msg="All inline polices have been removed. Though it appears"
"that %s has Managed Polices. This is not "
"currently supported by boto. Please detach the polices "
"through the console and try again." % name)
else:
module.fail_json(changed=changed, msg=str(error_msg))
else:
changed = True
else:
module.fail_json(changed=changed, msg=str(error_msg))
else:
changed = True
return changed, name
def update_group(module=None, iam=None, name=None, new_name=None, new_path=None):
changed = False
try:
current_group_path = iam.get_group(
name).get_group_response.get_group_result.group['path']
if new_path:
if current_group_path != new_path:
iam.update_group(name, new_path=new_path)
changed = True
if new_name:
if name != new_name:
iam.update_group(name, new_group_name=new_name, new_path=new_path)
changed = True
name = new_name
except boto.exception.BotoServerError as err:
module.fail_json(changed=changed, msg=str(err))
return changed, name, new_path, current_group_path
def create_role(module, iam, name, path, role_list, prof_list, trust_policy_doc):
changed = False
iam_role_result = None
instance_profile_result = None
try:
if name not in role_list:
changed = True
iam_role_result = iam.create_role(name,
assume_role_policy_document=trust_policy_doc,
path=path).create_role_response.create_role_result.role.role_name
if name not in prof_list:
instance_profile_result = iam.create_instance_profile(name,
path=path).create_instance_profile_response.create_instance_profile_result.instance_profile
iam.add_role_to_instance_profile(name, name)
else:
instance_profile_result = iam.get_instance_profile(name).get_instance_profile_response.get_instance_profile_result.instance_profile
except boto.exception.BotoServerError as err:
module.fail_json(changed=changed, msg=str(err))
else:
updated_role_list = list_all_roles(iam)
return changed, updated_role_list, iam_role_result, instance_profile_result
def delete_role(module, iam, name, role_list, prof_list):
changed = False
iam_role_result = None
instance_profile_result = None
try:
if name in role_list:
cur_ins_prof = [rp['instance_profile_name'] for rp in
iam.list_instance_profiles_for_role(name).
list_instance_profiles_for_role_result.
instance_profiles]
for profile in cur_ins_prof:
iam.remove_role_from_instance_profile(profile, name)
try:
iam.delete_role(name)
except boto.exception.BotoServerError as err:
error_msg = boto_exception(err)
if ('must detach all policies first') in error_msg:
for policy in iam.list_role_policies(name).list_role_policies_result.policy_names:
iam.delete_role_policy(name, policy)
try:
iam_role_result = iam.delete_role(name)
except boto.exception.BotoServerError as err:
error_msg = boto_exception(err)
if ('must detach all policies first') in error_msg:
module.fail_json(changed=changed, msg="All inline polices have been removed. Though it appears"
"that %s has Managed Polices. This is not "
"currently supported by boto. Please detach the polices "
"through the console and try again." % name)
else:
module.fail_json(changed=changed, msg=str(err))
else:
changed = True
else:
changed = True
for prof in prof_list:
if name == prof:
instance_profile_result = iam.delete_instance_profile(name)
except boto.exception.BotoServerError as err:
module.fail_json(changed=changed, msg=str(err))
else:
updated_role_list = list_all_roles(iam)
return changed, updated_role_list, iam_role_result, instance_profile_result
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
iam_type=dict(
default=None, required=True, choices=['user', 'group', 'role']),
groups=dict(type='list', default=None, required=False),
state=dict(
default=None, required=True, choices=['present', 'absent', 'update']),
password=dict(default=None, required=False, no_log=True),
update_password=dict(default='always', required=False, choices=['always', 'on_create']),
access_key_state=dict(default=None, required=False, choices=[
'active', 'inactive', 'create', 'remove',
'Active', 'Inactive', 'Create', 'Remove']),
access_key_ids=dict(type='list', default=None, required=False),
key_count=dict(type='int', default=1, required=False),
name=dict(default=None, required=False),
trust_policy_filepath=dict(default=None, required=False),
trust_policy=dict(type='dict', default=None, required=False),
new_name=dict(default=None, required=False),
path=dict(default='/', required=False),
new_path=dict(default=None, required=False)
)
)
module = AnsibleModule(
argument_spec=argument_spec,
mutually_exclusive=[['trust_policy', 'trust_policy_filepath']],
)
if not HAS_BOTO:
module.fail_json(msg='This module requires boto, please install it')
state = module.params.get('state').lower()
iam_type = module.params.get('iam_type').lower()
groups = module.params.get('groups')
name = module.params.get('name')
new_name = module.params.get('new_name')
password = module.params.get('password')
update_pw = module.params.get('update_password')
path = module.params.get('path')
new_path = module.params.get('new_path')
key_count = module.params.get('key_count')
key_state = module.params.get('access_key_state')
trust_policy = module.params.get('trust_policy')
trust_policy_filepath = module.params.get('trust_policy_filepath')
key_ids = module.params.get('access_key_ids')
if key_state:
key_state = key_state.lower()
if any([n in key_state for n in ['active', 'inactive']]) and not key_ids:
module.fail_json(changed=False, msg="At least one access key has to be defined in order"
" to use 'active' or 'inactive'")
if iam_type == 'user' and module.params.get('password') is not None:
pwd = module.params.get('password')
elif iam_type != 'user' and module.params.get('password') is not None:
module.fail_json(msg="a password is being specified when the iam_type "
"is not user. Check parameters")
else:
pwd = None
if iam_type != 'user' and (module.params.get('access_key_state') is not None or
module.params.get('access_key_id') is not None):
module.fail_json(msg="the IAM type must be user, when IAM access keys "
"are being modified. Check parameters")
if iam_type == 'role' and state == 'update':
module.fail_json(changed=False, msg="iam_type: role, cannot currently be updated, "
"please specify present or absent")
# check if trust_policy is present -- it can be inline JSON or a file path to a JSON file
if trust_policy_filepath:
try:
with open(trust_policy_filepath, 'r') as json_data:
trust_policy_doc = json.dumps(json.load(json_data))
except Exception as e:
module.fail_json(msg=str(e) + ': ' + trust_policy_filepath)
elif trust_policy:
try:
trust_policy_doc = json.dumps(trust_policy)
except Exception as e:
module.fail_json(msg=str(e) + ': ' + trust_policy)
else:
trust_policy_doc = None
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module)
try:
if region:
iam = connect_to_aws(boto.iam, region, **aws_connect_kwargs)
else:
iam = boto.iam.connection.IAMConnection(**aws_connect_kwargs)
except boto.exception.NoAuthHandlerFound as e:
module.fail_json(msg=str(e))
result = {}
changed = False
try:
orig_group_list = list_all_groups(iam)
orig_user_list = list_all_users(iam)
orig_role_list = list_all_roles(iam)
orig_prof_list = list_all_instance_profiles(iam)
except boto.exception.BotoServerError as err:
module.fail_json(msg=err.message)
if iam_type == 'user':
been_updated = False
user_groups = None
user_exists = any([n in [name, new_name] for n in orig_user_list])
if user_exists:
current_path = iam.get_user(name).get_user_result.user['path']
if not new_path and current_path != path:
new_path = path
path = current_path
if state == 'present' and not user_exists and not new_name:
(meta, changed) = create_user(
module, iam, name, password, path, key_state, key_count)
keys = iam.get_all_access_keys(name).list_access_keys_result.\
access_key_metadata
if groups:
(user_groups, changed) = set_users_groups(
module, iam, name, groups, been_updated, new_name)
module.exit_json(
user_meta=meta, groups=user_groups, keys=keys, changed=changed)
elif state in ['present', 'update'] and user_exists:
if update_pw == 'on_create':
password = None
if name not in orig_user_list and new_name in orig_user_list:
been_updated = True
name_change, key_list, user_changed, new_key = update_user(
module, iam, name, new_name, new_path, key_state, key_count, key_ids, password, been_updated)
if new_key:
user_meta = {'access_keys': list(new_key)}
user_meta['access_keys'].extend(
[{'access_key_id': key, 'status': value} for key, value in key_list.items() if
key not in [it['access_key_id'] for it in new_key]])
else:
user_meta = {
'access_keys': [{'access_key_id': key, 'status': value} for key, value in key_list.items()]}
if name_change and new_name:
orig_name = name
name = new_name
if isinstance(groups, list):
user_groups, groups_changed = set_users_groups(
module, iam, name, groups, been_updated, new_name)
if groups_changed == user_changed:
changed = groups_changed
else:
changed = True
else:
changed = user_changed
if new_name and new_path:
module.exit_json(changed=changed, groups=user_groups, old_user_name=orig_name,
new_user_name=new_name, old_path=path, new_path=new_path, keys=key_list,
created_keys=new_key, user_meta=user_meta)
elif new_name and not new_path and not been_updated:
module.exit_json(
changed=changed, groups=user_groups, old_user_name=orig_name, new_user_name=new_name, keys=key_list,
created_keys=new_key, user_meta=user_meta)
elif new_name and not new_path and been_updated:
module.exit_json(
changed=changed, groups=user_groups, user_name=new_name, keys=key_list, key_state=key_state,
created_keys=new_key, user_meta=user_meta)
elif not new_name and new_path:
module.exit_json(
changed=changed, groups=user_groups, user_name=name, old_path=path, new_path=new_path,
keys=key_list, created_keys=new_key, user_meta=user_meta)
else:
module.exit_json(
changed=changed, groups=user_groups, user_name=name, keys=key_list, created_keys=new_key,
user_meta=user_meta)
elif state == 'update' and not user_exists:
module.fail_json(
msg="The user %s does not exist. No update made." % name)
elif state == 'absent':
if user_exists:
try:
set_users_groups(module, iam, name, '')
name, changed = delete_user(module, iam, name)
module.exit_json(deleted_user=name, changed=changed)
except Exception as ex:
module.fail_json(changed=changed, msg=str(ex))
else:
module.exit_json(
changed=False, msg="User %s is already absent from your AWS IAM users" % name)
elif iam_type == 'group':
group_exists = name in orig_group_list
if state == 'present' and not group_exists:
new_group, changed = create_group(module=module, iam=iam, name=name, path=path)
module.exit_json(changed=changed, group_name=new_group)
elif state in ['present', 'update'] and group_exists:
changed, updated_name, updated_path, cur_path = update_group(
module=module, iam=iam, name=name, new_name=new_name,
new_path=new_path)
if new_path and new_name:
module.exit_json(changed=changed, old_group_name=name,
new_group_name=updated_name, old_path=cur_path,
new_group_path=updated_path)
if new_path and not new_name:
module.exit_json(changed=changed, group_name=name,
old_path=cur_path,
new_group_path=updated_path)
if not new_path and new_name:
module.exit_json(changed=changed, old_group_name=name,
new_group_name=updated_name, group_path=cur_path)
if not new_path and not new_name:
module.exit_json(
changed=changed, group_name=name, group_path=cur_path)
elif state == 'update' and not group_exists:
module.fail_json(
changed=changed, msg="Update Failed. Group %s doesn't seem to exist!" % name)
elif state == 'absent':
if name in orig_group_list:
removed_group, changed = delete_group(module=module, iam=iam, name=name)
module.exit_json(changed=changed, delete_group=removed_group)
else:
module.exit_json(changed=changed, msg="Group already absent")
elif iam_type == 'role':
role_list = []
if state == 'present':
changed, role_list, role_result, instance_profile_result = create_role(
module, iam, name, path, orig_role_list, orig_prof_list, trust_policy_doc)
elif state == 'absent':
changed, role_list, role_result, instance_profile_result = delete_role(
module, iam, name, orig_role_list, orig_prof_list)
elif state == 'update':
module.fail_json(
changed=False, msg='Role update not currently supported by boto.')
module.exit_json(changed=changed, roles=role_list, role_result=role_result,
instance_profile_result=instance_profile_result)
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
| Tatsh-ansible/ansible | lib/ansible/modules/cloud/amazon/iam.py | Python | gpl-3.0 | 34,464 |
#!/usr/bin/env python
import sys, getopt, argparse
from kazoo.client import KazooClient
import json
def loadZookeeperOptions(opts,zk):
node = "/all_clients/"+opts['client']+"/offline/semvec"
if zk.exists(node):
data, stat = zk.get(node)
jStr = data.decode("utf-8")
print "Found zookeeper configuration:",jStr
j = json.loads(jStr)
for key in j:
opts[key] = j[key]
def activateModel(args,folder,zk):
node = "/all_clients/"+args.client+"/svtext"
print "Activating model in zookeper at node ",node," with data ",folder
if zk.exists(node):
zk.set(node,folder)
else:
zk.create(node,folder,makepath=True)
if __name__ == '__main__':
parser = argparse.ArgumentParser(prog='set-client-config')
parser.add_argument('-z', '--zookeeper', help='zookeeper hosts', required=True)
parser.add_argument('--clientVariable', help='client variable name', default="$CLIENT")
args = parser.parse_args()
opts = vars(args)
zk = KazooClient(hosts=args.zookeeper)
zk.start()
for line in sys.stdin:
line = line.rstrip()
parts = line.split()
if not line.startswith("#"):
clients = parts[0].split(',')
node = parts[1]
value = " ".join(parts[2:])
print "--------------------------"
print parts[0],node,"->",value
for client in clients:
nodeClient = node.replace(args.clientVariable,client)
valueClient = value.replace(args.clientVariable,client)
print "----"
print nodeClient
print valueClient
if zk.exists(nodeClient):
zk.set(nodeClient,valueClient)
else:
zk.create(nodeClient,valueClient,makepath=True)
zk.stop()
| SeldonIO/seldon-server | scripts/zookeeper/set-client-config.py | Python | apache-2.0 | 1,889 |
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2018, Exa Analytics Development Team
# Distributed under the terms of the Apache License 2.0
"""
Input Generator and Parser
###############################
"""
| avmarchenko/exatomic | exatomic/adf/inputs.py | Python | apache-2.0 | 205 |
# mssql/information_schema.py
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
# TODO: should be using the sys. catalog with SQL Server, not information
# schema
from ... import Table, MetaData, Column
from ...types import String, Unicode, UnicodeText, Integer, TypeDecorator
from ... import cast
from ... import util
from ...sql import expression
from ...ext.compiler import compiles
ischema = MetaData()
class CoerceUnicode(TypeDecorator):
impl = Unicode
def process_bind_param(self, value, dialect):
if util.py2k and isinstance(value, util.binary_type):
value = value.decode(dialect.encoding)
return value
def bind_expression(self, bindvalue):
return _cast_on_2005(bindvalue)
class _cast_on_2005(expression.ColumnElement):
def __init__(self, bindvalue):
self.bindvalue = bindvalue
@compiles(_cast_on_2005)
def _compile(element, compiler, **kw):
from . import base
if compiler.dialect.server_version_info is None or \
compiler.dialect.server_version_info < base.MS_2005_VERSION:
return compiler.process(element.bindvalue, **kw)
else:
return compiler.process(cast(element.bindvalue, Unicode), **kw)
schemata = Table("SCHEMATA", ischema,
Column("CATALOG_NAME", CoerceUnicode, key="catalog_name"),
Column("SCHEMA_NAME", CoerceUnicode, key="schema_name"),
Column("SCHEMA_OWNER", CoerceUnicode, key="schema_owner"),
schema="INFORMATION_SCHEMA")
tables = Table("TABLES", ischema,
Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"),
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
Column(
"TABLE_TYPE", String(convert_unicode=True),
key="table_type"),
schema="INFORMATION_SCHEMA")
columns = Table("COLUMNS", ischema,
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
Column("COLUMN_NAME", CoerceUnicode, key="column_name"),
Column("IS_NULLABLE", Integer, key="is_nullable"),
Column("DATA_TYPE", String, key="data_type"),
Column("ORDINAL_POSITION", Integer, key="ordinal_position"),
Column("CHARACTER_MAXIMUM_LENGTH", Integer,
key="character_maximum_length"),
Column("NUMERIC_PRECISION", Integer, key="numeric_precision"),
Column("NUMERIC_SCALE", Integer, key="numeric_scale"),
Column("COLUMN_DEFAULT", Integer, key="column_default"),
Column("COLLATION_NAME", String, key="collation_name"),
schema="INFORMATION_SCHEMA")
constraints = Table("TABLE_CONSTRAINTS", ischema,
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
Column("CONSTRAINT_NAME", CoerceUnicode,
key="constraint_name"),
Column("CONSTRAINT_TYPE", String(
convert_unicode=True), key="constraint_type"),
schema="INFORMATION_SCHEMA")
column_constraints = Table("CONSTRAINT_COLUMN_USAGE", ischema,
Column("TABLE_SCHEMA", CoerceUnicode,
key="table_schema"),
Column("TABLE_NAME", CoerceUnicode,
key="table_name"),
Column("COLUMN_NAME", CoerceUnicode,
key="column_name"),
Column("CONSTRAINT_NAME", CoerceUnicode,
key="constraint_name"),
schema="INFORMATION_SCHEMA")
key_constraints = Table("KEY_COLUMN_USAGE", ischema,
Column("TABLE_SCHEMA", CoerceUnicode,
key="table_schema"),
Column("TABLE_NAME", CoerceUnicode,
key="table_name"),
Column("COLUMN_NAME", CoerceUnicode,
key="column_name"),
Column("CONSTRAINT_NAME", CoerceUnicode,
key="constraint_name"),
Column("ORDINAL_POSITION", Integer,
key="ordinal_position"),
schema="INFORMATION_SCHEMA")
ref_constraints = Table("REFERENTIAL_CONSTRAINTS", ischema,
Column("CONSTRAINT_CATALOG", CoerceUnicode,
key="constraint_catalog"),
Column("CONSTRAINT_SCHEMA", CoerceUnicode,
key="constraint_schema"),
Column("CONSTRAINT_NAME", CoerceUnicode,
key="constraint_name"),
# TODO: is CATLOG misspelled ?
Column("UNIQUE_CONSTRAINT_CATLOG", CoerceUnicode,
key="unique_constraint_catalog"),
Column("UNIQUE_CONSTRAINT_SCHEMA", CoerceUnicode,
key="unique_constraint_schema"),
Column("UNIQUE_CONSTRAINT_NAME", CoerceUnicode,
key="unique_constraint_name"),
Column("MATCH_OPTION", String, key="match_option"),
Column("UPDATE_RULE", String, key="update_rule"),
Column("DELETE_RULE", String, key="delete_rule"),
schema="INFORMATION_SCHEMA")
views = Table("VIEWS", ischema,
Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"),
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
Column("VIEW_DEFINITION", CoerceUnicode, key="view_definition"),
Column("CHECK_OPTION", String, key="check_option"),
Column("IS_UPDATABLE", String, key="is_updatable"),
schema="INFORMATION_SCHEMA")
| fernandog/Medusa | ext/sqlalchemy/dialects/mssql/information_schema.py | Python | gpl-3.0 | 6,481 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2010-2011 BAAMTU SARL (<http://www.baamtu.sn>).
# contact: leadsn@baamtu.com
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name" : "OHADA - Accounting",
"version" : "1.0",
"author" : "Baamtu Senegal",
"category" : "Localization/Account Charts",
"description": """This module implements the accounting chart for OHADA area.
It allows any company or association to manage its financial accounting.
Countries that use OHADA are the following:
Benin, Burkina Faso, Cameroon, Central African Republic, Comoros, Congo,
Ivory Coast, Gabon, Guinea, Guinea Bissau,
Equatorial Guinea, Mali, Niger, Replica of Democratic Congo, Senegal, Chad, Togo.
""",
"website": "http://www.baamtu.com",
"depends" : ["account", "base_vat"],
"demo_xml" : [],
"init_xml":[],
"update_xml" : ["l10n_syscohada_data.xml","l10n_syscohada_wizard.xml"],
"auto_install": False,
"installable": True,
'certificate': '0074187989333',
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| ksrajkumar/openerp-6.1 | openerp/addons/l10n_syscohada/__openerp__.py | Python | agpl-3.0 | 1,893 |
# --------------------------------------------------------
# FCN
# Copyright (c) 2016 RSE at UW
# Licensed under The MIT License [see LICENSE for details]
# Written by Yu Xiang
# --------------------------------------------------------
import numpy as np
import normals.gpu_normals
from fcn.config import cfg
import cv2
# backproject pixels into 3D points
def backproject(im_depth, meta_data):
# convert depth
depth = im_depth.astype(np.float32, copy=True) / meta_data['factor_depth']
near = meta_data['near_plane']
far = meta_data['far_plane']
depth = (far + near) / (far - near) - (2 * far * near) / ((far - near) * depth)
depth = (depth + 1) / 2
# compute projection matrix
P = meta_data['projection_matrix']
P = np.matrix(P)
Pinv = np.linalg.pinv(P)
# construct the 2D points matrix
width = depth.shape[1]
height = depth.shape[0]
x, y = np.meshgrid(np.arange(width), np.arange(height))
ones = np.ones((height, width), dtype=np.float32)
x2d = np.stack((x, height-1-y, depth, ones), axis=2).reshape(width*height, 4)
# Map x and y from window coordinates
viewport = meta_data['viewport']
x2d[:, 0] = (x2d[:, 0] - viewport[0]) / viewport[2];
x2d[:, 1] = (x2d[:, 1] - viewport[1]) / viewport[3];
# Map to range -1 to 1
x2d[:, 0] = x2d[:, 0] * 2 - 1;
x2d[:, 1] = x2d[:, 1] * 2 - 1;
x2d[:, 2] = x2d[:, 2] * 2 - 1;
# backprojection
x3d = Pinv * x2d.transpose()
x3d[0,:] = x3d[0,:] / x3d[3,:]
x3d[1,:] = x3d[1,:] / x3d[3,:]
x3d[2,:] = x3d[2,:] / x3d[3,:]
x3d = x3d[:3,:].astype(np.float32)
norms = normals.gpu_normals.gpu_normals(x3d, width, height, cfg.GPU_ID)
# convert normals to an image
N = np.zeros((height, width, 3), dtype=np.float32)
N[y, x, 0] = norms[:, 0].reshape(height, width)
N[y, x, 1] = norms[:, 1].reshape(height, width)
N[y, x, 2] = norms[:, 2].reshape(height, width)
N = 127.5*N + 127.5
N = N.astype(np.uint8)
# show the 3D points
if 0:
# construct the 3D points
points = np.zeros((height, width, 3), dtype=np.float32)
points[y, x, 0] = x3d[0, :].reshape(height, width)
points[y, x, 1] = x3d[1, :].reshape(height, width)
points[y, x, 2] = x3d[2, :].reshape(height, width)
ns = np.zeros((height, width, 3), dtype=np.float32)
ns[y, x, 0] = norms[:, 0].reshape(height, width)
ns[y, x, 1] = norms[:, 1].reshape(height, width)
ns[y, x, 2] = norms[:, 2].reshape(height, width)
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
fig = plt.figure()
ax = fig.add_subplot(131, projection='3d')
perm = np.random.permutation(np.arange(height*width))
index = perm[:10000]
X = points[:,:,0].flatten()
Y = points[:,:,2].flatten()
Z = points[:,:,1].flatten()
# U = ns[:,:,0].flatten()
# V = ns[:,:,2].flatten()
# W = ns[:,:,1].flatten()
ax.scatter(X[index], Y[index], Z[index], c='r', marker='o')
# ax.quiver(X[index], Y[index], Z[index], U[index], V[index], W[index], length=0.1)
ax.set_xlabel('X')
ax.set_ylabel('Y')
ax.set_zlabel('Z')
ax.set_aspect('equal')
fig.add_subplot(132)
plt.imshow(im_depth)
fig.add_subplot(133)
plt.imshow(N)
plt.show()
return N
| yuxng/Deep_ISM | FCN/lib/utils/backprojection.py | Python | mit | 3,426 |
from django.db import models
from django.utils import timezone
from simple_history.models import HistoricalRecords
from edc_base.model.models.base_uuid_model import BaseUuidModel
from edc_consent.field_mixins import PersonalFieldsMixin
from edc_consent.managers import ConsentManager
from edc_consent.model_mixins import ConsentModelMixin
from edc_identifier.subject.classes import SubjectIdentifier
from edc_registration.model_mixins import RegistrationMixin
class AlreadyAllocatedError(Exception):
pass
class SubjectConsent(ConsentModelMixin, RegistrationMixin, PersonalFieldsMixin, BaseUuidModel):
"""This is a dummy consent added for schema completeness."""
objects = models.Manager()
history = HistoricalRecords()
consent = ConsentManager()
def __str__(self):
return '{}'.format(self.subject_identifier)
def natural_key(self):
return (self.subject_identifier, )
def save(self, *args, **kwargs):
if not self.id:
self.subject_identifier = SubjectIdentifier(
site_code=self.study_site).get_identifier()
self.consent_datetime = timezone.now()
super(SubjectConsent, self).save(*args, **kwargs)
class Meta(ConsentModelMixin.Meta):
app_label = 'ba_namotswe'
get_latest_by = 'consent_datetime'
ordering = ('created', )
| botswana-harvard/ba-namotswe | ba_namotswe/models/subject_consent.py | Python | gpl-3.0 | 1,362 |
"""This module contains a demo class
The Class has four methods
- __init__
- add
- subtract
- operations_completed
>>> from classes import Foo
>>> foo = Foo()
>>> foo.add(2, 4)
6
>>> foo.subtract(5,2)
3
>>> foo.operations_completed()
2
"""
class Foo(object):
"""The Foo object!"""
def __init__(self):
"""Constructor - set completed operations to 0"""
self.completed_operations = 0
def add(self, x, y):
"""Add two objects - increment completed operations"""
result = x + y
self.completed_operations += 1
return result
def subtract(self, x, y):
"""Subtract two objects - increment completed operations"""
result = x - y
self.completed_operations += 1
return result
def operations_completed(self):
"""Return the number of operations this class has completed"""
return self.completed_operations
| CLEpy/CLEpy-MotM | Sphinx/package/code/classes.py | Python | mit | 914 |
import pyttsx
engine = pyttsx.init()
engine.say("Sally sells seashells by the seashore.")
engine.say("The quick brown fox jumped over the lazy dog.")
engine.runAndWait()
| AdrienVR/NaoSimulator | dep/code/speak.py | Python | lgpl-3.0 | 170 |
#!/bin/env python
from sys import exit
import argparse
import rsa
rsaBits = 1024
pubName = 'pub_rsa.pem'
priName = 'pri_rsa.pem'
def genKey():
# generate key couple
(pubKey, priKey) = rsa.newkeys(rsaBits)
# write public key
pub = pubKey.save_pkcs1()
pubFile = open(pubName, 'w+')
pubFile.write(pub)
pubFile.close()
# write private key
pri = priKey.save_pkcs1()
priFile = open(priName, 'w+')
priFile.write(pri)
priFile.close()
return True
def enCryMes(message):
# get public key to encry
with open(pubName) as pubFile:
pub = pubFile.read()
pubKey = rsa.PublicKey.load_pkcs1(pub)
# encry message
enCry = rsa.encrypt(message, pubKey)
enCryStr = ''
for interMes in enCry:
enCryStr += "%2.2x" % ord(interMes)
return enCryStr
def deCryMes(message):
# get private key to decry
with open(priName) as priFile:
pri = priFile.read()
priKey = rsa.PrivateKey.load_pkcs1(pri)
# check the len of the message
lenmess = len(message)
if lenmess % 2 != 0 : # the message must be even
exit('error: your encry mes must be error')
# get encryed message
enCry = ''
for i in range(lenmess/2):
enTmp = message[i * 2 : (i + 1) * 2]
enCry += chr(int(enTmp, 16))
# decry mess
deCry = rsa.decrypt(enCry, priKey)
return deCry
def main():
parser = argparse.ArgumentParser(description = 'Encry/Decry message')
parser.add_argument('--genkey', action = 'store_true',
dest = 'genKey', default = False,
help = 'generate public and private key couple')
parser.add_argument('--encry' , action = 'store', dest = 'enCry',
help = 'the message you need to encrypt')
parser.add_argument('--decry' , action = 'store', dest = 'deCry',
help = 'the message you need to decrypt')
args = parser.parse_args()
if args.genKey :
genKey()
if args.enCry:
print enCryMes(args.enCry)
if args.deCry:
print deCryMes(args.deCry)
if __name__ == '__main__':
main()
| zhaozq/rsaencrypt_self_data | rsacry.py | Python | mit | 2,154 |
#!/usr/bin/env python
"""
Common utility functions
"""
import os
import re
import sys
import gzip
import bz2
import numpy
def init_gene_DE():
"""
Initializing the gene structure for DE
"""
gene_det = [('id', 'f8'),
('chr', 'S15'),
('exons', numpy.dtype),
('gene_info', numpy.dtype),
('is_alt_spliced', 'f8'),
('name', 'S25'),
('source', 'S25'),
('start', 'f8'),
('stop', 'f8'),
('strand', 'S2'),
('transcripts', numpy.dtype)]
return gene_det
def _open_file(fname):
"""
Open the file (supports .gz .bz2) and returns the handler
"""
try:
if os.path.splitext(fname)[1] == ".gz":
FH = gzip.open(fname, 'rb')
elif os.path.splitext(fname)[1] == ".bz2":
FH = bz2.BZ2File(fname, 'rb')
else:
FH = open(fname, 'rU')
except Exception as error:
sys.exit(error)
return FH
def make_Exon_cod(strand_p, five_p_utr, cds_cod, three_p_utr):
"""
Create exon cordinates from UTR's and CDS region
"""
exon_pos = []
if strand_p == '+':
utr5_start, utr5_end = 0, 0
if five_p_utr != []:
utr5_start, utr5_end = five_p_utr[-1][0], five_p_utr[-1][1]
cds_5start, cds_5end = cds_cod[0][0], cds_cod[0][1]
jun_exon = []
if cds_5start-utr5_end == 0 or cds_5start-utr5_end == 1:
jun_exon = [utr5_start, cds_5end]
if len(cds_cod) == 1:
five_prime_flag = 0
if jun_exon != []:
five_p_utr = five_p_utr[:-1]
five_prime_flag = 1
for utr5 in five_p_utr:
exon_pos.append(utr5)
jun_exon = []
utr3_start, utr3_end = 0, 0
if three_p_utr != []:
utr3_start = three_p_utr[0][0]
utr3_end = three_p_utr[0][1]
if utr3_start-cds_5end == 0 or utr3_start-cds_5end == 1:
jun_exon = [cds_5start, utr3_end]
three_prime_flag = 0
if jun_exon != []:
cds_cod = cds_cod[:-1]
three_p_utr = three_p_utr[1:]
three_prime_flag = 1
if five_prime_flag == 1 and three_prime_flag == 1:
exon_pos.append([utr5_start, utr3_end])
if five_prime_flag == 1 and three_prime_flag == 0:
exon_pos.append([utr5_start, cds_5end])
cds_cod = cds_cod[:-1]
if five_prime_flag == 0 and three_prime_flag == 1:
exon_pos.append([cds_5start, utr3_end])
for cds in cds_cod:
exon_pos.append(cds)
for utr3 in three_p_utr:
exon_pos.append(utr3)
else:
if jun_exon != []:
five_p_utr = five_p_utr[:-1]
cds_cod = cds_cod[1:]
for utr5 in five_p_utr:
exon_pos.append(utr5)
exon_pos.append(jun_exon) if jun_exon != [] else ''
jun_exon = []
utr3_start, utr3_end = 0, 0
if three_p_utr != []:
utr3_start = three_p_utr[0][0]
utr3_end = three_p_utr[0][1]
cds_3start = cds_cod[-1][0]
cds_3end = cds_cod[-1][1]
if utr3_start-cds_3end == 0 or utr3_start-cds_3end == 1:
jun_exon = [cds_3start, utr3_end]
if jun_exon != []:
cds_cod = cds_cod[:-1]
three_p_utr = three_p_utr[1:]
for cds in cds_cod:
exon_pos.append(cds)
exon_pos.append(jun_exon) if jun_exon != [] else ''
for utr3 in three_p_utr:
exon_pos.append(utr3)
elif strand_p == '-':
utr3_start, utr3_end = 0, 0
if three_p_utr != []:
utr3_start = three_p_utr[-1][0]
utr3_end = three_p_utr[-1][1]
cds_3start = cds_cod[0][0]
cds_3end = cds_cod[0][1]
jun_exon = []
if cds_3start-utr3_end == 0 or cds_3start-utr3_end == 1:
jun_exon = [utr3_start, cds_3end]
if len(cds_cod) == 1:
three_prime_flag = 0
if jun_exon != []:
three_p_utr = three_p_utr[:-1]
three_prime_flag = 1
for utr3 in three_p_utr:
exon_pos.append(utr3)
jun_exon = []
(utr5_start, utr5_end) = (0, 0)
if five_p_utr != []:
utr5_start = five_p_utr[0][0]
utr5_end = five_p_utr[0][1]
if utr5_start-cds_3end == 0 or utr5_start-cds_3end == 1:
jun_exon = [cds_3start, utr5_end]
five_prime_flag = 0
if jun_exon != []:
cds_cod = cds_cod[:-1]
five_p_utr = five_p_utr[1:]
five_prime_flag = 1
if three_prime_flag == 1 and five_prime_flag == 1:
exon_pos.append([utr3_start, utr5_end])
if three_prime_flag == 1 and five_prime_flag == 0:
exon_pos.append([utr3_start, cds_3end])
cds_cod = cds_cod[:-1]
if three_prime_flag == 0 and five_prime_flag == 1:
exon_pos.append([cds_3start, utr5_end])
for cds in cds_cod:
exon_pos.append(cds)
for utr5 in five_p_utr:
exon_pos.append(utr5)
else:
if jun_exon != []:
three_p_utr = three_p_utr[:-1]
cds_cod = cds_cod[1:]
for utr3 in three_p_utr:
exon_pos.append(utr3)
if jun_exon != []:
exon_pos.append(jun_exon)
jun_exon = []
(utr5_start, utr5_end) = (0, 0)
if five_p_utr != []:
utr5_start = five_p_utr[0][0]
utr5_end = five_p_utr[0][1]
cds_5start = cds_cod[-1][0]
cds_5end = cds_cod[-1][1]
if utr5_start-cds_5end == 0 or utr5_start-cds_5end == 1:
jun_exon = [cds_5start, utr5_end]
if jun_exon != []:
cds_cod = cds_cod[:-1]
five_p_utr = five_p_utr[1:]
for cds in cds_cod:
exon_pos.append(cds)
if jun_exon != []:
exon_pos.append(jun_exon)
for utr5 in five_p_utr:
exon_pos.append(utr5)
return exon_pos
| ratschlab/oqtans_tools | rQuant/2.2/tools/helper.py | Python | mit | 6,595 |
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
"""Checker for anything related to the async protocol (PEP 492)."""
import astroid
from astroid import exceptions
from pylint import checkers
from pylint.checkers import utils as checker_utils
from pylint import interfaces
from pylint import utils
class AsyncChecker(checkers.BaseChecker):
__implements__ = interfaces.IAstroidChecker
name = 'async'
msgs = {
'E1700': ('Yield inside async function',
'yield-inside-async-function',
'Used when an `yield` or `yield from` statement is '
'found inside an async function.',
{'minversion': (3, 5)}),
'E1701': ("Async context manager '%s' doesn't implement __aenter__ and __aexit__.",
'not-async-context-manager',
'Used when an async context manager is used with an object '
'that does not implement the async context management protocol.',
{'minversion': (3, 5)}),
}
def open(self):
self._ignore_mixin_members = utils.get_global_option(self, 'ignore-mixin-members')
@checker_utils.check_messages('yield-inside-async-function')
def visit_asyncfunctiondef(self, node):
for child in node.nodes_of_class(astroid.Yield):
if child.scope() is node:
self.add_message('yield-inside-async-function', node=child)
@checker_utils.check_messages('not-async-context-manager')
def visit_asyncwith(self, node):
for ctx_mgr, _ in node.items:
infered = checker_utils.safe_infer(ctx_mgr)
if infered is None or infered is astroid.YES:
continue
if isinstance(infered, astroid.Instance):
try:
infered.getattr('__aenter__')
infered.getattr('__aexit__')
except exceptions.NotFoundError:
if isinstance(infered, astroid.Instance):
# If we do not know the bases of this class,
# just skip it.
if not checker_utils.has_known_bases(infered):
continue
# Just ignore mixin classes.
if self._ignore_mixin_members:
if infered.name[-5:].lower() == 'mixin':
continue
else:
continue
self.add_message('not-async-context-manager',
node=node, args=(infered.name, ))
def register(linter):
"""required method to auto register this checker"""
linter.register_checker(AsyncChecker(linter))
| axbaretto/beam | sdks/python/.tox/lint/lib/python2.7/site-packages/pylint/checkers/async.py | Python | apache-2.0 | 2,898 |
from runscript import *
"""
Interpolation options:
keys - list of frames to interpolate in order
n - distance between keyframes
settings - interpolation settings (see bottom of script)
"""
def interpolation(keys, n=50, **kwargs):
last = 0
cache = []
#Set defaults
loop = kwargs.get('loop',True)
loops = kwargs.get('loops',True)
tmp = []
if loop:
for i in xrange(len(keys)-1):
k1 = Flame(string=keys[i].to_string())
k2 = Flame(string=keys[i+1].to_string())
equalize_flame_attributes(k1, k2)
if loops: tmp.append(k1)
tmp.append(k1)
if i==len(keys)-2:tmp.append(k2)
tmp.append(Flame(string=keys[-1].to_string()))
else:
for i in xrange(len(keys)):
k1 = Flame(string=keys[i-1].to_string())
k2 = Flame(string=keys[i].to_string())
equalize_flame_attributes(k1, k2)
if loops: tmp.append(k2)
tmp.append(k2)
keys = tmp
nk = len(keys)
if loop: nf = nk * n
else: nf = (nk-1) * n
while last < nf:
if len(cache) < last + 1:
cache.append(get_flame(keys, n, last, **kwargs))
if last%10<>9: print '.',
if last%10==9: print str(last+1) + '/' + str(nf)
if last==nf-1: print "Calculations complete"
yield cache[last]
last += 1
if last >= nf:
last = 0
#---end
def get_flame(keys, n, i, **kwargs):
#Make new, empty flame
flame = Flame()
flame.name = kwargs.get('flamename') + str(kwargs.get('offset')+i)
rotate = kwargs.get('rotate')
pivot = kwargs.get('pivot')
#Flame attrs
interp_attrs = ['scale', 'rotate', 'brightness', 'gamma']
for name, test in keys[0].iter_attributes():
cps = []
if name in interp_attrs:
for k in keys:
tmp = getattr(k,name)
if type(tmp) == list: cps.append(tuple(tmp))
else: cps.append(tmp)
val = interp(cps, n, i, **kwargs)
if type(val)==tuple: setattr(flame, name, list(val))
else: setattr(flame, name, val)
else:
pass
#end flame attrs
maxi = 0
for k in keys:
if len(k.xform)>maxi: maxi=len(k.xform)
#Xform attrs
for x in xrange(maxi):
#Add xform
flame.add_xform()
#coef interp
cpsx = []
cpsy = []
cpso = []
attrset = []
for k in keys:
if len(k.xform)<x+1:
cpsx.append((1,0))
cpsy.append((0,1))
cpso.append((0,0))
else:
a,d,b,e,c,f = k.xform[x].coefs
cpsx.append((a,d))
cpsy.append((b,e))
cpso.append((c,f))
attrset += set(attrset).union(k.xform[x].attributes)
vx = interp(cpsx, n, i, **kwargs)
vy = interp(cpsy, n, i, **kwargs)
vo = interp(cpso, n, i, **kwargs)
flame.xform[x].coefs = tuple(vx + vy + vo)
if rotate['count']<>0:
spin = drange(0,rotate['count']*360,n,i%n,**rotate)
flame.xform[x].rotate(spin)
if pivot['count']<>0:
spin = drange(0,pivot['count']*360,n,i%n,**pivot)
flame.xform[x].orbit(spin)
#attribute intep
for name in attrset:
cps = []
for k in keys:
if len(k.xform)>x and hasattr(k.xform[x], name):
cps.append(getattr(k.xform[x], name))
else:
cps.append(0)
val = interp(cps, n, i, **kwargs)
if name=='weight': val = clip(val, 0, 100)
setattr(flame.xform[x], name, val)
#end xforms
#gradient
for c, cps in enumerate(zip(*(key.gradient for key in keys))):
val = interp(cps, n, i, **kwargs)
flame.gradient[c] = val
return flame
#end get_flame
#end interpolation
"""
Erik's secret sauce added for better flava
"""
def get_pad(xform, target):
HOLES = ['spherical', 'ngon', 'julian', 'juliascope', 'polar', 'wedge_sph', 'wedge_julia']
target.add_xform()
target.xform[-1] = copy.deepcopy(xform)
t = target.xform[-1]
t.coefs = [0.0,1.0,1.0,0.0,0.0,0.0]
if len(set(t).intersection(HOLES)) > 0:
#negative ident
t.coefs = [-1.0,0.0,0.0,-1.0,0.0,0.0]
t.linear = -1.0
if 'rectangles' in t.attributes:
t.rectangles = 1.0
t.rectangles_x = 0.0
t.rectangles_y = 0.0
if 'rings2' in t.attributes:
t.rings2 = 1.0
t.rings2_val = 0.0
if 'fan2' in t.attributes:
t.fan2 = 1.0
t.fan2_x = 0.0
t.fan2_y = 0.0
if 'blob' in t.attributes:
t.blob = 1.0
t.blob_low = 1.0
t.blob_high = 1.0
t.blob_waves = 1.0
if 'perspective' in t.attributes:
t.perspective = 1.0
t.perspective_angle = 0.0
if 'curl' in t.attributes:
t.curl = 1.0
t.curl_c1 = 0.0
t.curl_c2 = 0.0
if 'super_shape' in t.attributes:
t.super_shape = 1.0
t.super_shape_n1 = 2.0
t.super_shape_n2 = 2.0
t.super_shape_n3 = 2.0
t.super_shape_rnd = 0.0
t.super_shape_holes = 0.0
if 'fan' in t.attributes:
t.fan = 1.0
if 'rings' in t.attributes:
t.rings = 1.0
t.weight = 0
#-----------------------------------------------------------------------------
def equalize_flame_attributes(flame1,flame2):
"""Make two flames have the same number of xforms and the same
attributes. Also moves the final xform (if any) to flame.xform"""
diff = len(flame1.xform) - len(flame2.xform)
if diff < 0:
for i in range(-diff):
# get_pad(flame2.xform[diff+i], flame1)
flame1.add_xform()
flame1.xform[-1].symmetry = 1
elif diff > 0:
for i in range(diff):
# get_pad(flame1.xform[diff+i], flame2)
flame2.add_xform()
flame2.xform[-1].symmetry = 1
if flame1.final or flame2.final:
# flame1.create_final()
# flame2.create_final()
for flame in flame1,flame2:
flame.create_final()
flame.xform.append(flame.final)
flame.final = None
# Size can be interpolated correctly, but it's pointless to
# produce frames that can't be turned into an animation.
flame1.size = flame2.size
for name in set(flame1.attributes).union(flame2.attributes):
if not hasattr(flame2,name):
val = getattr(flame1,name)
_type = type(val)
if _type is list or _type is tuple:
setattr(flame2,name,[0 for i in val])
elif _type is float:
setattr(flame2,name,0.0)
elif _type is str:
delattr(flame1,name)
else:
raise TypeError, "flame.%s can't be %s" %(name,_type)
elif not hasattr(flame1,name):
val = getattr(flame2,name)
_type = type(val)
if _type is list or _type is tuple:
setattr(flame1,[0 for i in val])
elif _type is float:
setattr(flame1,name,0.0)
elif _type is str:
delattr(flame2,name)
else:
raise TypeError, "flame.%s can't be %s" %(name,_type)
#------------------------------------------------
if __name__ == '__main__':
#load flames
f1 = Flame(file='samples.flame',name='linear')
f2 = Flame(file='samples.flame',name='julia')
f3 = Flame(file='samples.flame',name='heart')
f4 = Flame(file='test_interpolation.flame',name='A')
f5 = Flame(file='test_interpolation.flame',name='B')
settings = {'flamename':'frame' #base name for frames
,'offset': 0 #offset for frame index value
,'curve': 'lin' #lin, par, npar, cos, sinh, tanh
,'a': 1 #curve parameter (slope)
,'t': 0.5 #spline tension (0.5 = catmull-rom)
,'smooth': True #use smoothing
,'loop': False #loop animation
,'loops': True #loop keyframes between interpolations
,'p_space': 'polar' #coordinate interpolation space: rect, polar
,'c_space': 'hls' #color interpolation space: rgb, hls
,'rotate': {'count': 1 #number of rotations (- for counter-clockwise)
,'curve': 'lin' #rotation curve
,'a': 1} #rotation curve param
,'pivot': {'count': 1 #number of pivots (- for counter-clockwise)
,'curve': 'lin' #pivot curve
,'a': 1} #pivot curve param
}
#interpolation options - check top of file.
i = interpolation([f2,f3,f4], **settings)
buff = i.next() #buffer to take advantage of threading
while True:
# SetActiveFlame(buff)
# preview()
buff = i.next()
| bobbyrward/fr0st | scripts/interp_gen.py | Python | gpl-3.0 | 9,398 |
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^process/$', views.payment_process, name='process'),
url(r'^done/$', views.payment_done, name='done'),
url(r'^canceled/$', views.payment_canceled, name='canceled'),
]
| EssaAlshammri/django-by-example | online-shop/myshop/payment/urls.py | Python | mit | 257 |
# Copyright 2010 Hakan Kjellerstrand hakank@bonetmail.com
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Organizing a day in Google CP Solver.
Simple scheduling problem.
Problem formulation from ECLiPSe:
Slides on (Finite Domain) Constraint Logic Programming, page 38f
http://eclipse-clp.org/reports/eclipse.ppt
Compare with the following models:
* MiniZinc: http://www.hakank.org/minizinc/organize_day.mzn
* Comet: http://www.hakank.org/comet/organize_day.co
* Gecode: http://hakank.org/gecode/organize_day.cpp
This model was created by Hakan Kjellerstrand (hakank@bonetmail.com)
Also see my other Google CP Solver models:
http://www.hakank.org/google_or_tools/
"""
from __future__ import print_function
import sys
from ortools.constraint_solver import pywrapcp
#
# No overlapping of tasks s1 and s2
#
def no_overlap(solver, s1, d1, s2, d2):
b1 = solver.IsLessOrEqualVar(s1 + d1, s2) # s1 + d1 <= s2
b2 = solver.IsLessOrEqualVar(s2 + d2, s1) # s2 + d2 <= s1
solver.Add(b1 + b2 >= 1)
def main():
# Create the solver.
solver = pywrapcp.Solver('Organizing a day')
#
# data
#
n = 4
tasks = list(range(n))
work, mail, shop, bank = tasks
durations = [4, 1, 2, 1]
# task [i,0] must be finished before task [i,1]
before_tasks = [
[bank, shop],
[mail, work]
]
# the valid times of the day
begin = 9
end = 17
#
# declare variables
#
begins = [solver.IntVar(begin, end, 'begins[%i]% % i') for i in tasks]
ends = [solver.IntVar(begin, end, 'ends[%i]% % i') for i in tasks]
#
# constraints
#
for i in tasks:
solver.Add(ends[i] == begins[i] + durations[i])
for i in tasks:
for j in tasks:
if i < j:
no_overlap(solver,
begins[i], durations[i],
begins[j], durations[j])
# specific constraints
for (before, after) in before_tasks:
solver.Add(ends[before] <= begins[after])
solver.Add(begins[work] >= 11)
#
# solution and search
#
db = solver.Phase(begins + ends,
solver.INT_VAR_DEFAULT,
solver.INT_VALUE_DEFAULT)
solver.NewSearch(db)
num_solutions = 0
while solver.NextSolution():
num_solutions += 1
print('begins:', [begins[i].Value() for i in tasks])
print('ends:', [ends[i].Value() for i in tasks])
print()
print('num_solutions:', num_solutions)
print('failures:', solver.Failures())
print('branches:', solver.Branches())
print('WallTime:', solver.WallTime(), 'ms')
if __name__ == '__main__':
main()
| linsicai/or-tools | examples/python/organize_day.py | Python | apache-2.0 | 3,062 |
from invoke import task, run
#from fabric.api import local, lcd, get, env
#from fabric.operations import require, prompt
#from fabric.utils import abort
import requests
import rdflib
import getpass
import os.path
import os
import setlr
from os import listdir
from rdflib import *
import logging
CHEAR_DIR='chear.d/'
HHEAR_DIR='hhear.d/'
SETL_FILE='ontology.setl.ttl'
ontology_setl = Namespace('https://hadatac.org/setl/')
setl = Namespace('http://purl.org/twc/vocab/setl/')
prov = Namespace('http://www.w3.org/ns/prov#')
dc = Namespace('http://purl.org/dc/terms/')
pv = Namespace('http://purl.org/net/provenance/ns#')
logging_level = logging.INFO
logging.basicConfig(level=logging_level)
@task
def buildchear(ctx):
setl_graph = Graph()
setl_graph.parse(SETL_FILE,format="turtle")
cwd = os.getcwd()
formats = ['ttl','owl','json']
ontology_output_files = [setl_graph.resource(URIRef('file://'+cwd+'/chear.'+x)) for x in formats]
print (len(setl_graph))
for filename in os.listdir(CHEAR_DIR):
if not filename.endswith('.ttl') or filename.startswith('#'):
continue
print('Adding fragment', filename)
fragment = setl_graph.resource(BNode())
for ontology_output_file in ontology_output_files:
print(ontology_output_file.identifier, list(ontology_output_file[prov.wasGeneratedBy]))
ontology_output_file.value(prov.wasGeneratedBy).add(prov.used, fragment)
fragment.add(RDF.type, setlr.void.Dataset)
fragment_extract = setl_graph.resource(BNode())
fragment.add(prov.wasGeneratedBy, fragment_extract)
fragment_extract.add(RDF.type, setl.Extract)
fragment_extract.add(prov.used, URIRef('file://'+CHEAR_DIR+filename))
setlr._setl(setl_graph)
@task
def buildhhear(ctx):
setl_graph = Graph()
setl_graph.parse('hhear-ontology.setl.ttl',format="turtle")
cwd = os.getcwd()
formats = ['ttl','owl','json']
ontology_output_files = [setl_graph.resource(URIRef('file://'+cwd+'/hhear.'+x)) for x in formats]
print (len(setl_graph))
for filename in os.listdir(HHEAR_DIR):
if not filename.endswith('.ttl') or filename.startswith('#'):
continue
print('Adding fragment', filename)
fragment = setl_graph.resource(BNode())
for ontology_output_file in ontology_output_files:
print(ontology_output_file.identifier, list(ontology_output_file[prov.wasGeneratedBy]))
ontology_output_file.value(prov.wasGeneratedBy).add(prov.used, fragment)
fragment.add(RDF.type, setlr.void.Dataset)
fragment_extract = setl_graph.resource(BNode())
fragment.add(prov.wasGeneratedBy, fragment_extract)
fragment_extract.add(RDF.type, setl.Extract)
fragment_extract.add(prov.used, URIRef('file://'+HHEAR_DIR+filename))
setlr._setl(setl_graph)
@task
def chear2hhear(c, inputfile, outputfile):
import openpyxl
import re
import pandas as pd
mappings = {}
mappings.update(dict([(row['label_uri'], row['numeric_uri'])
for i, row in pd.read_csv('sio_mappings.csv').iterrows()]))
mappings.update(dict([(row['label_uri'], row['numeric_uri'])
for i, row in pd.read_csv('chear2hhear_mappings.csv').iterrows()]))
wb = openpyxl.load_workbook(inputfile)
for sheet in wb:
for row in sheet.rows:
for cell in row:
if isinstance(cell.value, str):
cellValues = []
for c in re.split('\\s*[,&]\\s*', cell.value):
if c in mappings:
print('Replacing',c,'with',mappings[c])
c = mappings[c]
cellValues.append(c)
cell.value = ', '.join(cellValues)
wb.save(outputfile)
| tetherless-world/chear-ontology | tasks.py | Python | apache-2.0 | 3,872 |
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'starter.views.home', name='home'),
# url(r'^starter/', include('starter.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
if settings.ENV == 'dev':
urlpatterns = patterns('',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
url(r'', include('django.contrib.staticfiles.urls')),
) + urlpatterns
| Plexical/django-starter | starter/urls.py | Python | isc | 818 |
#!/usr/bin/python
# The usual preamble
import numpy as np
import grizzly.numpy_weld as npw
import pandas as pd
import grizzly.grizzly as gr
import time
# Get data (NYC 311 service request dataset) and start cleanup
raw_data = pd.read_csv('data/us_cities_states_counties.csv', delimiter='|')
raw_data.dropna(inplace=True)
data = gr.DataFrameWeld(raw_data)
print "Done reading input file..."
start = time.time()
# Get all city information with total population greater than 500,000
data_big_cities = data[data["Total population"] > 500000]
# Compute "crime index" proportional to
# (Total population + 2*(Total adult population) - 2000*(Number of robberies)) / 100000
data_big_cities_stats = data_big_cities[
["Total population", "Total adult population", "Number of robberies"]].values
predictions = npw.dot(data_big_cities_stats, np.array(
[1, 2, -2000], dtype=np.int64)) / 100000.0
data_big_cities["Crime index"] = predictions
# Aggregate "crime index" scores by state
data_big_cities["Crime index"][data_big_cities["Crime index"] >= 0.02] = 0.032
data_big_cities["Crime index"][data_big_cities["Crime index"] < 0.01] = 0.005
print data_big_cities["Crime index"].sum().evaluate()
end = time.time()
print "Total end-to-end time: %.2f" % (end - start)
| weld-project/weld | examples/python/grizzly/get_population_stats_simplified_grizzly.py | Python | bsd-3-clause | 1,266 |
#!/usr/bin/python3
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Lint as: python3
"""The tool converts a textpb into a binary proto using chromium protoc binary.
After converting a feed response textpb file into a mockserver textpb file using
the proto_convertor script, then a engineer runs this script to encode the
mockserver textpb file into a binary proto file that is being used by the feed
card render test (Refers to go/create-a-feed-card-render-test for more).
Make sure you have absl-py installed via 'python3 -m pip install absl-py'.
Usage example:
python3 ./mockserver_textpb_to_binary.py
--chromium_path ~/chromium/src
--output_file /tmp/binary.pb
--source_file /tmp/original.textpb
--alsologtostderr
"""
import glob
import os
import protoc_util
import subprocess
from absl import app
from absl import flags
FLAGS = flags.FLAGS
FLAGS = flags.FLAGS
flags.DEFINE_string('chromium_path', '', 'The path of your chromium depot.')
flags.DEFINE_string('output_file', '', 'The target output binary file path.')
flags.DEFINE_string('source_file', '',
'The source proto file, in textpb format, path.')
ENCODE_NAMESPACE = 'components.feed.core.proto.wire.mockserver.MockServer'
COMPONENT_FEED_PROTO_PATH = 'components/feed/core/proto'
def main(argv):
if len(argv) > 1:
raise app.UsageError('Too many command-line arguments.')
if not FLAGS.chromium_path:
raise app.UsageError('chromium_path flag must be set.')
if not FLAGS.source_file:
raise app.UsageError('source_file flag must be set.')
if not FLAGS.output_file:
raise app.UsageError('output_file flag must be set.')
with open(FLAGS.source_file) as file:
value_text_proto = file.read()
encoded = protoc_util.encode_proto(value_text_proto, ENCODE_NAMESPACE,
FLAGS.chromium_path,
COMPONENT_FEED_PROTO_PATH)
with open(FLAGS.output_file, 'wb') as file:
file.write(encoded)
if __name__ == '__main__':
app.run(main)
| endlessm/chromium-browser | components/feed/tools/mockserver_textpb_to_binary.py | Python | bsd-3-clause | 2,151 |
import warnings
from pathlib import Path
from typing import List, Tuple, Union
import fire
from torch import nn
from transformers import AutoModelForSeq2SeqLM, AutoTokenizer, PreTrainedModel
from transformers.utils import logging
logger = logging.get_logger(__name__)
def copy_layers(src_layers: nn.ModuleList, dest_layers: nn.ModuleList, layers_to_copy: List[int]) -> None:
layers_to_copy = nn.ModuleList([src_layers[i] for i in layers_to_copy])
assert len(dest_layers) == len(layers_to_copy), f"{len(dest_layers)} != {len(layers_to_copy)}"
dest_layers.load_state_dict(layers_to_copy.state_dict())
LAYERS_TO_COPY = {
# maps num layers in teacher -> num_layers in student -> which teacher layers to copy.
# 12: bart, 16: pegasus, 6: marian/Helsinki-NLP
12: {
1: [0], # This says that if the teacher has 12 layers and the student has 1, copy layer 0 of the teacher
2: [0, 6],
3: [0, 6, 11],
4: [0, 4, 8, 11],
6: [0, 2, 4, 7, 9, 11],
9: [0, 1, 2, 4, 5, 7, 9, 10, 11],
12: list(range(12)),
},
16: { # maps num layers in student -> which teacher layers to copy
1: [0],
2: [0, 15],
3: [0, 8, 15],
4: [0, 5, 10, 15],
6: [0, 3, 6, 9, 12, 15],
8: [0, 2, 4, 6, 8, 10, 12, 15],
9: [0, 1, 3, 5, 7, 9, 11, 13, 15],
12: [0, 1, 2, 3, 4, 5, 6, 7, 9, 11, 13, 15],
16: list(range(16)),
},
6: {1: [0], 2: [0, 5], 3: [0, 2, 5], 4: [0, 1, 3, 5], 6: list(range(6))},
}
LAYERS_TO_SUPERVISE = {
# maps num layers in student -> which teacher layers to copy.
6: {1: [5], 2: [3, 5], 3: [1, 4, 5], 4: [1, 2, 4, 5]},
12: {1: [11], 2: [5, 11], 3: [3, 7, 11], 6: [1, 3, 5, 8, 10, 11]},
16: {1: [15], 4: [4, 9, 12, 15], 8: [1, 3, 5, 7, 9, 11, 13, 15]},
}
def pick_layers_to_copy(n_student, n_teacher):
try:
val = LAYERS_TO_COPY[n_teacher][n_student]
return val
except KeyError:
if n_student != n_teacher:
warnings.warn(
f"no hardcoded layers to copy for teacher {n_teacher} -> student {n_student}, defaulting to first {n_student}"
)
return list(range(n_student))
def get_layers_to_supervise(n_student, n_teacher) -> List[int]:
"""Used or the --supervise_forward kwarg"""
if n_student > n_teacher:
raise ValueError(f"Cannot perform intermediate supervision for student {n_student} > teacher {n_teacher}")
elif n_teacher == n_student:
return list(range(n_teacher))
elif n_student == 1:
return [n_teacher - 1]
else:
return LAYERS_TO_SUPERVISE[n_teacher][n_student]
def create_student_by_copying_alternating_layers(
teacher: Union[str, PreTrainedModel],
save_path: Union[str, Path] = "student",
e: Union[int, None] = None,
d: Union[int, None] = None,
copy_first_teacher_layers=False,
e_layers_to_copy=None,
d_layers_to_copy=None,
**extra_config_kwargs
) -> Tuple[PreTrainedModel, List[int], List[int]]:
"""Make a student by copying alternating layers from a teacher, save it to save_path.
Args:
teacher: str or PreTrainedModel if str, this will call AutoModelForSeq2SeqLM.from_pretrained(teacher) before
copying layers
save_path: where to save the student, defaults to student directory.
e: how many Encoder layers should the student have, default is fully copy of teacher
d: how many Decoder layers should the student have, default is fully copy of teacher
copy_first_teacher_layers: [bool] dont copy alternating layers, just the first e/d.
**extra_config_kwargs: extra kwargs to pass to the student, by default the teacher config is used.
Returns:
student: new, smaller model. (Also saves it to save_path)
e_layers_to_copy: list of which teacher encoder layers were used
d_layers_to_copy: list of which teacher decoder layers were used
"""
_msg = "encoder_layers and decoder_layers cannot be both None-- you would just have an identical teacher."
assert (e is not None) or (d is not None), _msg
if isinstance(teacher, str):
AutoTokenizer.from_pretrained(teacher).save_pretrained(save_path) # purely for convenience
teacher = AutoModelForSeq2SeqLM.from_pretrained(teacher).eval()
else:
assert isinstance(teacher, PreTrainedModel), f"teacher must be a model or string got type {type(teacher)}"
init_kwargs = teacher.config.to_diff_dict()
try:
teacher_e, teacher_d = teacher.config.encoder_layers, teacher.config.decoder_layers
if e is None:
e = teacher_e
if d is None:
d = teacher_d
init_kwargs.update({"encoder_layers": e, "decoder_layers": d})
except AttributeError: # T5
if hasattr(teacher.config, "num_encoder_layers"):
teacher_e, teacher_d = teacher.config.num_encoder_layers, teacher.config.num_decoder_layers
else:
teacher_e, teacher_d = teacher.config.num_layers, teacher.config.num_decoder_layers
if e is None:
e = teacher_e
if d is None:
d = teacher_d
if hasattr(teacher.config, "num_encoder_layers"):
init_kwargs.update({"num_encoder_layers": e, "num_decoder_layers": d})
else:
init_kwargs.update({"num_layers": e, "num_decoder_layers": d})
# Kwargs to instantiate student: teacher kwargs with updated layer numbers + **extra_config_kwargs
init_kwargs.update(extra_config_kwargs)
# Copy weights
student_cfg = teacher.config_class(**init_kwargs)
student = AutoModelForSeq2SeqLM.from_config(student_cfg)
# Start by copying the full teacher state dict this will copy the first N teacher layers to the student.
info = student.load_state_dict(teacher.state_dict(), strict=False)
assert info.missing_keys == [], info.missing_keys # every student key should have a teacher keys.
if copy_first_teacher_layers: # Our copying is done. We just log and save
e_layers_to_copy, d_layers_to_copy = list(range(e)), list(range(d))
logger.info(
f"Copied encoder layers {e_layers_to_copy} and decoder layers {d_layers_to_copy}. Saving them to {save_path}"
)
student.save_pretrained(save_path)
return student, e_layers_to_copy, d_layers_to_copy
# Decide which layers of the teacher to copy. Not exactly alternating -- we try to keep first and last layer.
if e_layers_to_copy is None:
e_layers_to_copy: List[int] = pick_layers_to_copy(e, teacher_e)
if d_layers_to_copy is None:
d_layers_to_copy: List[int] = pick_layers_to_copy(d, teacher_d)
try:
if hasattr(
teacher, "prophetnet"
): # For ProphetNet, student.model.encoder.layers is called student.prophetnet.encoder.layers
copy_layers(teacher.prophetnet.encoder.layers, student.prophetnet.encoder.layers, e_layers_to_copy)
copy_layers(teacher.prophetnet.decoder.layers, student.prophetnet.decoder.layers, d_layers_to_copy)
else:
copy_layers(teacher.model.encoder.layers, student.model.encoder.layers, e_layers_to_copy)
copy_layers(teacher.model.decoder.layers, student.model.decoder.layers, d_layers_to_copy)
except AttributeError: # For t5, student.model.encoder.layers is called student.encoder.block
copy_layers(teacher.encoder.block, student.encoder.block, e_layers_to_copy)
copy_layers(teacher.decoder.block, student.decoder.block, d_layers_to_copy)
logger.info(
f"Copied encoder layers {e_layers_to_copy} and decoder layers {d_layers_to_copy}. Saving them to {save_path}"
)
student.config.init_metadata = dict(
teacher_type=teacher.config.model_type,
copied_encoder_layers=e_layers_to_copy,
copied_decoder_layers=d_layers_to_copy,
)
student.save_pretrained(save_path)
# Save information about copying for easier reproducibility
return student, e_layers_to_copy, d_layers_to_copy
if __name__ == "__main__":
fire.Fire(create_student_by_copying_alternating_layers)
| huggingface/transformers | examples/research_projects/seq2seq-distillation/make_student.py | Python | apache-2.0 | 8,172 |
"""Testcase for RCU - RSP data interface using PRSG, based on TCL testcase 5.10
Note: No specific arguments
"""
################################################################################
# Constants
nof_reflets_ap = rsp.c_nof_reflets_ap
nof_beamlets = rsp.c_nof_beamlets # maximum capable by RSP gateware
#nof_beamlets = 216 # sufficient nof beamlets for 32 MHz BW
nof_beamlets_ap = rsp.c_nof_beamlets_ap # including reflets
nof_beamlets_ap = nof_reflets_ap + nof_beamlets # including reflets
# - SS output size
c_ss_reflets_size = rsp.c_pol * nof_reflets_ap
c_ss_size = rsp.c_pol * nof_beamlets_ap
c_ss_gap = rsp.c_slice_size - rsp.c_cpx * rsp.c_pol * nof_beamlets_ap
# - Datapath result buffer
c_res_word_width = 4
c_res_nof_words = rsp.c_cpx * rsp.c_pol * nof_beamlets_ap # include read reflets
c_res_nof_words_per_pol = rsp.c_cpx * nof_beamlets # skipped reflets
################################################################################
# - Verify options
rspId = tc.rspId
blpId = tc.blpId
polId = tc.polId
repeat = tc.repeat
tc.setResult('PASSED') # self checking test, so start assuming it will run PASSED
tc.appendLog(11,'')
tc.appendLog(11,'>>> Capture PRSG data for RSP-%s, BLP-%s, RCU-%s' % (rspId, blpId, polId))
tc.appendLog(11,'')
################################################################################
# - Testcase initializations
# - Set RCU in PRSG mode
rsp.rspctl(tc, '--rcuprsg')
rsp.rspctl(tc, '--rcuenable=1')
bypass = 0x8F # Bypass data path to have direct access to RCU data via SS, use resync
# version of pps to preserve X and Y order in captured data
# - Set SS mapping
# Default use incrementing SS mapping
ss_map = []
for i in range(0, rsp.c_pol * nof_beamlets_ap):
ss_map.append(i)
# Use rspctl to write the SS mapping
rsp.rspctl(tc, '--subbands=0:%d' % (nof_beamlets-1))
# Write the SS mapping, repeat twice to ensure both pages are written
#r = 1
#for i in range(0,r):
# rsp.write_ss(tc, msg, ss_map, blpId, rspId)
# tc.sleep(1000)
# rsp.write_ss(tc, msg, ss_map, blpId, rspId)
# tc.sleep(1000)
# Apparently rspctl updates the SS every pps, so overwriting it does not work.
# Disabling SS update in RSPDriver.conf may be an option. However instead adapt
# this test case to the actual SS by reordering the captured data accordingly.
# Therefore read the actual SS into ss_map.
# Read actual SS mapping, to use it to reorder the read DIAG result
# . assume both pages and all BLP use same mapping
# . strip the reflets
# . assume that the beamlets mapping uses all subbands
bi = [blpId[0]]
ri = [rspId[0]]
ss_map = rsp.read_ss(tc, msg, c_ss_size, bi, ri)
ss_map = ss_map[c_ss_reflets_size:]
tc.appendLog(21,'Active SS map (length %d):' % len(ss_map))
tc.appendLog(21,'%s' % ss_map)
rsp.write_cr_syncoff(tc, msg, blpId, rspId)
rsp.write_diag_bypass(tc, msg, bypass, blpId, rspId)
################################################################################
# Run the test
for k in range(0, repeat):
rsp.write_rsu_altsync(tc, msg, rspId) # Apply altsync to capture a new result buffer
tc.sleep(100)
for ri in rspId:
for bi in blpId:
res_buffer = rsp.read_diag_result_buffer(tc, msg, c_res_nof_words, c_res_word_width, [bi], [ri])
res ={'x':[], 'y':[]}
for x in range(0, len(res_buffer), 2):
res['x'].append(res_buffer[x] & rsp.c_rcu_dat_mask)
for y in range(1, len(res_buffer), 2):
res['y'].append(res_buffer[y] & rsp.c_rcu_dat_mask)
res['x'] = res['x'][c_ss_reflets_size:] # strip the reflets
res['y'] = res['y'][c_ss_reflets_size:]
res['x'] = rsp.reorder(res['x'], ss_map) # reorder according to SS map
res['y'] = rsp.reorder(res['y'], ss_map)
for pi in polId:
first = 1 # First result sample is used as starting seed for the expected samples
ok = 0 # 0 = OK
if len(res[pi]) == c_res_nof_words_per_pol:
for rs in res[pi]:
if first == 0:
if ok == 0:
if rs != rsp.calculate_next_sequence_value(rs_prev):
# Mismatch, so bridge the potential SS gap in the sample stream to verify whether this is the cause
rs_gap = rs_prev
for i in range(0, c_ss_gap):
rs_gap = rsp.calculate_next_sequence_value(rs_gap)
if rs != rsp.calculate_next_sequence_value(rs_prev):
# Mismatch again, so assume the potential SS gap was not the cause of the initial mismatch
nxt_rs_prev = rsp.calculate_next_sequence_value(rs_prev)
exp.append(nxt_rs_prev)
ok = 1 # 1 = sample mismatch
else:
# OK, so bridge the SS gap in the expected results
nxt_rs_prev = rsp.calculate_next_sequence_value(rs_gap)
exp.append(nxt_rs_prev)
else:
# OK, no SS gap to bridge
nxt_rs_prev = rsp.calculate_next_sequence_value(rs_prev)
exp.append(nxt_rs_prev)
else:
# A mismatch has aleready occured, no need to check for more mismatches
nxt_rs_prev = rsp.calculate_next_sequence_value(rs_prev)
exp.append(nxt_rs_prev)
else:
first = 0
nxt_rs_prev = res[pi][0]
exp = [nxt_rs_prev]
rs_prev = nxt_rs_prev
else:
ok = 2 # 2 = length error
# Report results
if ok == 0:
tc.appendLog(11,'>>> %d : RSP-%s, BLP-%s, RCU-%s PRSG data is OK.' % (k, ri, bi, pi))
elif ok == 1:
tc.appendLog(11,'>>> %d : RSP-%s, BLP-%s, RCU-%s PRSG data mismatch.' % (k, ri, bi, pi))
tc.appendLog(11,'- Expected data:')
tc.appendLog(11,'%s' % exp)
tc.appendLog(11,'- Captured data:')
tc.appendLog(11,'%s' % res[pi])
tc.setResult('FAILED')
else:
tc.appendLog(11,'>>> %d : RSP-%s, BLP-%s, RCU-%s PRSG data length mismatch.' % (k, ri, bi, pi))
tc.appendLog(11,'Captured length %d != expected length %d' % (len(res[pi]), c_res_nof_words_per_pol))
tc.setResult('FAILED')
# Restore defaults
bypass = 1
rsp.write_diag_bypass(tc, msg, bypass, blpId, rspId, 99)
rsp.write_cr_syncon(tc, msg, blpId, rspId)
# Disable prsg after test
rsp.rspctl(tc, '--rcuprsg=0')
| kernsuite-debian/lofar | LCU/StationTest/tc/prsg.py | Python | gpl-3.0 | 6,582 |
import os, sys
try:
import ctypeslib.h2xml as h2xml
import ctypeslib.xml2py as xml2py
import ctypeslib.codegen as codegen
except:
print ('Error: required Python ctypeslib module not installed!')
sys.exit(-1)
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from distutils.core import Command
src_path = os.path.dirname(os.path.realpath(__file__)) + '/'
try:
obj_path = sys.argv[sys.argv.index('--build-base') + 1] + '/'
except:
obj_path = src_path
c_h_file = src_path + '../mark5access/mark5_stream.h'
c_lib_file = obj_path + '../mark5access/.libs/libmark5access.so'
c_lib_obj = 'libmark5access.so'
ctypes_xml_file = 'mark5access_api.xml'
ctypes_api_file = 'mark5access/mark5access.py'
check_for_func = 'new_mark5_format'
# Ctypeslib generator invocations based on "reference" examples at
# https://github.com/piranna/cusepy/blob/master/setup.py
# http://s3ql.googlecode.com/hg-history/release-0.20/setup.py
def build_ctypes():
# Step 1: Remove old files
try:
os.remove(ctypes_xml_file)
except:
pass
try:
os.remove(ctypes_api_file)
except:
pass
# Configure ctypeslib
codegen.ASSUME_STRINGS = False
codegen.CDLL_SET_ERRNO = False
codegen.PREFIX = ('# Code autogenerated by ctypeslib. Any changes will be lost!\n\n'
'#pylint: disable-all\n'
'#@PydevCodeAnalysisIgnore\n\n')
# Modify a copy of mark5access headers
# By design h2xml is using C++ to convert headers.
# There are issues with C++ <complex> and the Python conversion.
# We create a copy of 'mark5_stream.h' to force C-only headers
# and the C <complex.h> definition of complex numbers
c_h_file_tmp = 'mark5_stream_tmp.h'
with open(c_h_file_tmp, 'w') as fout:
with open(c_h_file, 'r') as fin:
fout.write('#undef __cplusplus\n')
for line in fin:
if not('complex' in line):
fout.write(line)
# Generate XML file from C/C++ header file
h2xml_args = ['h2xml.py', c_h_file_tmp, '-o',ctypes_xml_file, '-I',src_path]
h2xml.main(h2xml_args)
os.remove(c_h_file_tmp)
# Generate Python bindings from XML file
# Note 1: to get -r <regex> to work correctly in xml2py.py v0.5.6 a patch is necessary (see README)
# Note 2: uses libvdifio.so from the C/C++ build tree because 'make install' may not have been done yet
print ('creating bindings %s ...' % (ctypes_api_file))
xml2py_flags = ['-o',ctypes_api_file+'.tmp']
xml2py_flags.extend(['-k','esf']) # standard flags
xml2py_flags.extend(['-s','Mark5Format']) # the enums to include in wrapper
xml2py_flags.extend(['-s','Mark5Blanker'])
xml2py_flags.extend(['-s','mark5_stream']) # structs to include in wrapper
xml2py_flags.extend(['-s','mark5_stream_generic', '-s','mark5_format_generic', '-s','mark5_format'])
xml2py_flags.extend(['-r','mark5']) # functions to include in wrapper
xml2py_flags.extend(['-l',c_lib_file])
xml2py_args = ['xml2py.py', ctypes_xml_file]
xml2py_args.extend(xml2py_flags)
xml2py.main(xml2py_args)
# Rename
try:
with open(ctypes_api_file, 'w') as fout:
with open(ctypes_api_file+'.tmp', 'r') as fin:
for line in fin:
fout.write(line.replace(c_lib_file, c_lib_obj))
os.remove(ctypes_api_file+'.tmp')
except:
pass
# Make sure the generated .py seems okay -- regexp to select mark5* functions was ok?
func_found = False
with open(ctypes_api_file, 'r') as fin:
for line in fin:
if check_for_func in line:
func_found = True
if not func_found:
print ('Error: ctypeslib did not extract function names. For old ctypeslib might need a patch.')
sys.exit(-1)
if 'build' in sys.argv:
build_ctypes()
setup(
name = 'mark5access',
packages=['mark5access'],
version = '1.5.3',
description = ('A ctypes-based Python wrapper to the mark5access C/C++ library'),
long_description=open('README').read(),
license = 'LICENSE',
# install_requires = 'ctypes>=1.1.0',
requires = 'ctypes',
# cmdclass={'build_ctypes': build_ctypes},
)
| demorest/mark5access | python/setup.py | Python | gpl-3.0 | 4,324 |
from __future__ import print_function
from numpy import *
from scipy.io import readsav
print('Calculating P..')
a=transpose(readsav('phi.idl.dat')['phi'])
fa=fft.fft(a,axis=2)
save('fp',fa)
| kevinpetersavage/BOUT-dev | examples/elm-pb/Python/fftall.py | Python | gpl-3.0 | 192 |
import numpy
from models.game.bots.MinimaxBot import MinimaxBot
from models.game.Board import Board
class Heuristic1Bot(MinimaxBot):
""" Minimax bot that plays using the H1 heuristic
"""
def __init__(self, number, time_limit=10, name=None):
""" This bot plays using a simple heuristic based on the weighted sum of board positions
:param number: Board.X for player1 or Board.O for player2
:param time_limit: The maximum time that the bot has to make a move
:param name: A descriptive name for the Bot
"""
if name is None:
name = "Heuristic1 Minimax"
MinimaxBot.__init__(self, number, time_limit, name=name)
self.player_type = 'h1 minimax'
def compute_score(self, board):
"""
This heuristic scores the board by awarding:
10 points for capturing the center small board
8 points for capturing the corner small boards
6 points for capturing the edge small boards
2 points for capturing the center of a small board
1.5 points for capturing the corner of a small board
1 point for capturing the edge of a small board
It deducts points if the opponent has captured any of these positions.
:param board: the board state to score
:return: the score computed by this heuristic
"""
# weights used to score board
G_CENTER = 10
G_CORNER = 8
G_EDGE = 6
L_CENTER = 3
L_CORNER = 2
L_EDGE = 1
global_capture_weight = [G_CORNER, G_EDGE, G_CORNER, G_EDGE, G_CENTER, G_EDGE, G_CORNER, G_EDGE, G_CORNER]
local_capture_weight = [L_CORNER, L_EDGE, L_CORNER, L_EDGE, L_CENTER, L_EDGE, L_CORNER, L_EDGE, L_CORNER]
our_capture_vector = board.get_capture_vector(Board.X)
opponent_capture_vector = board.get_capture_vector(Board.O)
score = 0
# modify score for global board
score += numpy.dot(our_capture_vector, global_capture_weight) - numpy.dot(opponent_capture_vector, global_capture_weight)
# modify score for each local board:
for row in [0, 1, 2]:
for col in [0, 1, 2]:
local_board = board.board[row][col]
if not local_board.board_completed:
our_capture_vector = local_board.get_capture_vector(Board.X)
opponent_capture_vector = local_board.get_capture_vector(Board.O)
score += numpy.dot(our_capture_vector, local_capture_weight) - numpy.dot(opponent_capture_vector, local_capture_weight)
return score
| zachdj/ultimate-tic-tac-toe | models/game/bots/Heuristic1Bot.py | Python | mit | 2,617 |
# Copyright (C) 2008-2011 Dejan Muhamedagic <dmuhamedagic@suse.de>
# See COPYING for license information.
import os
import sys
from tempfile import mkstemp
import subprocess
import re
import glob
import time
import datetime
import shutil
import shlex
import bz2
import fnmatch
import gc
import ipaddress
import argparse
import random
import string
from pathlib import Path
from contextlib import contextmanager, closing
from stat import S_ISBLK
from lxml import etree
from . import config
from . import userdir
from . import constants
from . import options
from . import term
from . import parallax
from distutils.version import LooseVersion
from .constants import SSH_OPTION
from . import log
logger = log.setup_logger(__name__)
logger_utils = log.LoggerUtils(logger)
class TerminateSubCommand(Exception):
"""
This is an exception to jump out of subcommand when meeting errors while staying interactive shell
"""
def to_ascii(input_str):
"""Convert the bytes string to a ASCII string
Usefull to remove accent (diacritics)"""
if input_str is None:
return input_str
if isinstance(input_str, str):
return input_str
try:
return str(input_str, 'utf-8')
except UnicodeDecodeError:
if config.core.debug or options.regression_tests:
import traceback
traceback.print_exc()
return input_str.decode('utf-8', errors='ignore')
def filter_keys(key_list, args, sign="="):
"""Return list item which not be completed yet"""
return [s+sign for s in key_list if any_startswith(args, s+sign) is None]
def any_startswith(iterable, prefix):
"""Return first element in iterable which startswith prefix, or None."""
for element in iterable:
if element.startswith(prefix):
return element
return None
def rindex(iterable, value):
return len(iterable) - iterable[::-1].index(value) - 1
def memoize(function):
"Decorator to invoke a function once only for any argument"
memoized = {}
def inner(*args):
if args in memoized:
return memoized[args]
r = function(*args)
memoized[args] = r
return r
return inner
@contextmanager
def nogc():
gc.disable()
try:
yield
finally:
gc.enable()
getuser = userdir.getuser
gethomedir = userdir.gethomedir
@memoize
def this_node():
'returns name of this node (hostname)'
return os.uname()[1]
_cib_shadow = 'CIB_shadow'
_cib_in_use = ''
def set_cib_in_use(name):
os.putenv(_cib_shadow, name)
global _cib_in_use
_cib_in_use = name
def clear_cib_in_use():
os.unsetenv(_cib_shadow)
global _cib_in_use
_cib_in_use = ''
def get_cib_in_use():
return _cib_in_use
def get_tempdir():
return os.getenv("TMPDIR") or "/tmp"
def is_program(prog):
"""Is this program available?"""
def isexec(filename):
return os.path.isfile(filename) and os.access(filename, os.X_OK)
for p in os.getenv("PATH").split(os.pathsep):
f = os.path.join(p, prog)
if isexec(f):
return f
return None
def pacemaker_20_daemon(new, old):
"helper to discover renamed pacemaker daemons"
if is_program(new):
return new
return old
@memoize
def pacemaker_attrd():
return pacemaker_20_daemon("pacemaker-attrd", "attrd")
@memoize
def pacemaker_based():
return pacemaker_20_daemon("pacemaker-based", "cib")
@memoize
def pacemaker_controld():
return pacemaker_20_daemon("pacemaker-controld", "crmd")
@memoize
def pacemaker_execd():
return pacemaker_20_daemon("pacemaker-execd", "lrmd")
@memoize
def pacemaker_fenced():
return pacemaker_20_daemon("pacemaker-fenced", "stonithd")
@memoize
def pacemaker_remoted():
return pacemaker_20_daemon("pacemaker-remoted", "pacemaker_remoted")
@memoize
def pacemaker_schedulerd():
return pacemaker_20_daemon("pacemaker-schedulerd", "pengine")
def pacemaker_daemon(name):
if name == "attrd" or name == "pacemaker-attrd":
return pacemaker_attrd()
if name == "cib" or name == "pacemaker-based":
return pacemaker_based()
if name == "crmd" or name == "pacemaker-controld":
return pacemaker_controld()
if name == "lrmd" or name == "pacemaker-execd":
return pacemaker_execd()
if name == "stonithd" or name == "pacemaker-fenced":
return pacemaker_fenced()
if name == "pacemaker_remoted" or name == "pacemeaker-remoted":
return pacemaker_remoted()
if name == "pengine" or name == "pacemaker-schedulerd":
return pacemaker_schedulerd()
raise ValueError("Not a Pacemaker daemon name: {}".format(name))
def can_ask():
"""
Is user-interactivity possible?
Checks if connected to a TTY.
"""
return (not options.ask_no) and sys.stdin.isatty()
def ask(msg):
"""
Ask for user confirmation.
If core.force is true, always return true.
If not interactive and core.force is false, always return false.
"""
if config.core.force:
logger.info("%s [YES]", msg)
return True
if not can_ask():
return False
msg += ' '
if msg.endswith('? '):
msg = msg[:-2] + ' (y/n)? '
while True:
try:
ans = input(msg)
except EOFError:
ans = 'n'
if ans:
ans = ans[0].lower()
if ans in 'yn':
return ans == 'y'
# holds part of line before \ split
# for a multi-line input
_LINE_BUFFER = ''
def get_line_buffer():
return _LINE_BUFFER
def multi_input(prompt=''):
"""
Get input from user
Allow multiple lines using a continuation character
"""
global _LINE_BUFFER
line = []
_LINE_BUFFER = ''
while True:
try:
text = input(prompt)
except EOFError:
return None
if options.regression_tests:
logger_utils.incr_lineno()
print(".INP:", text)
sys.stdout.flush()
sys.stderr.flush()
stripped = text.strip()
if stripped.endswith('\\'):
stripped = stripped.rstrip('\\')
line.append(stripped)
_LINE_BUFFER += stripped
if prompt:
prompt = ' > '
else:
line.append(stripped)
break
return ''.join(line)
def verify_boolean(opt):
return opt.lower() in ("yes", "true", "on", "1") or \
opt.lower() in ("no", "false", "off", "0")
def is_boolean_true(opt):
if opt in (None, False):
return False
if opt is True:
return True
return opt.lower() in ("yes", "true", "on", "1")
def is_boolean_false(opt):
if opt in (None, False):
return True
if opt is True:
return False
return opt.lower() in ("no", "false", "off", "0")
def get_boolean(opt, dflt=False):
if not opt:
return dflt
return is_boolean_true(opt)
def canonical_boolean(opt):
return 'true' if is_boolean_true(opt) else 'false'
def keyword_cmp(string1, string2):
return string1.lower() == string2.lower()
class olist(list):
"""
Implements the 'in' operator
in a case-insensitive manner,
allowing "if x in olist(...)"
"""
def __init__(self, keys):
super(olist, self).__init__([k.lower() for k in keys])
def __contains__(self, key):
return super(olist, self).__contains__(key.lower())
def append(self, key):
super(olist, self).append(key.lower())
def os_types_list(path):
l = []
for f in glob.glob(path):
if os.access(f, os.X_OK) and os.path.isfile(f):
a = f.split("/")
l.append(a[-1])
return l
def listtemplates():
l = []
templates_dir = os.path.join(config.path.sharedir, 'templates')
for f in os.listdir(templates_dir):
if os.path.isfile("%s/%s" % (templates_dir, f)):
l.append(f)
return l
def listconfigs():
l = []
for f in os.listdir(userdir.CRMCONF_DIR):
if os.path.isfile("%s/%s" % (userdir.CRMCONF_DIR, f)):
l.append(f)
return l
def add_sudo(cmd):
if config.core.user:
return "sudo -E -u %s %s" % (config.core.user, cmd)
return cmd
def add_su(cmd, user):
"""
Wrapped cmd with su -c "<cmd>" <user>
"""
if user == "root":
return cmd
return "su -c \"{}\" {}".format(cmd, user)
def chown(path, user, group):
if isinstance(user, int):
uid = user
else:
import pwd
uid = pwd.getpwnam(user).pw_uid
if isinstance(group, int):
gid = group
else:
import grp
gid = grp.getgrnam(group).gr_gid
os.chown(path, uid, gid)
def ensure_sudo_readable(f):
# make sure the tempfile is readable to crm_diff (bsc#999683)
if config.core.user:
from pwd import getpwnam
uid = getpwnam(config.core.user).pw_uid
try:
os.chown(f, uid, -1)
except os.error as err:
logger.error('Failed setting temporary file permissions: %s', err)
return False
return True
def pipe_string(cmd, s):
rc = -1 # command failed
cmd = add_sudo(cmd)
logger.debug("piping string to %s", cmd)
if options.regression_tests:
print(".EXT", cmd)
p = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE)
try:
# communicate() expects encoded bytes
if isinstance(s, str):
s = s.encode('utf-8')
p.communicate(s)
p.wait()
rc = p.returncode
except IOError as msg:
if "Broken pipe" not in str(msg):
logger.error(msg)
return rc
def filter_string(cmd, s, stderr_on=True, shell=True):
rc = -1 # command failed
outp = ''
if stderr_on is True:
stderr = None
else:
stderr = subprocess.PIPE
cmd = add_sudo(cmd)
logger.debug("pipe through %s", cmd)
if options.regression_tests:
print(".EXT", cmd)
p = subprocess.Popen(cmd,
shell=shell,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=stderr)
try:
# bytes expected here
if isinstance(s, str):
s = s.encode('utf-8')
ret = p.communicate(s)
if stderr_on == 'stdout':
outp = b"\n".join(ret)
else:
outp = ret[0]
p.wait()
rc = p.returncode
except OSError as err:
if err.errno != os.errno.EPIPE:
logger.error(err.strerror)
logger.error("from: %s", cmd)
except Exception as msg:
logger.error("from: %s: %s", cmd, str(msg))
return rc, to_ascii(outp)
def str2tmp(_str, suffix=".pcmk"):
'''
Write the given string to a temporary file. Return the name
of the file.
'''
s = to_ascii(_str)
fd, tmp = mkstemp(suffix=suffix)
try:
f = os.fdopen(fd, "w")
except IOError as msg:
logger.error(msg)
return
f.write(s)
if not s.endswith('\n'):
f.write("\n")
f.close()
return tmp
@contextmanager
def create_tempfile(suffix='', dir=None):
""" Context for temporary file.
Will find a free temporary filename upon entering
and will try to delete the file on leaving, even in case of an exception.
Parameters
----------
suffix : string
optional file suffix
dir : string
optional directory to save temporary file in
(from http://stackoverflow.com/a/29491523)
"""
import tempfile
tf = tempfile.NamedTemporaryFile(delete=False, suffix=suffix, dir=dir)
tf.file.close()
try:
yield tf.name
finally:
try:
os.remove(tf.name)
except OSError as e:
if e.errno == 2:
pass
else:
raise
@contextmanager
def open_atomic(filepath, mode="r", buffering=-1, fsync=False, encoding=None):
""" Open temporary file object that atomically moves to destination upon
exiting.
Allows reading and writing to and from the same filename.
The file will not be moved to destination in case of an exception.
Parameters
----------
filepath : string
the file path to be opened
fsync : bool
whether to force write the file to disk
(from http://stackoverflow.com/a/29491523)
"""
with create_tempfile(dir=os.path.dirname(os.path.abspath(filepath))) as tmppath:
with open(tmppath, mode, buffering, encoding=encoding) as file:
try:
yield file
finally:
if fsync:
file.flush()
os.fsync(file.fileno())
os.rename(tmppath, filepath)
def str2file(s, fname, mod=0o644):
'''
Write a string to a file.
'''
try:
with open_atomic(fname, 'w', encoding='utf-8', fsync=True) as dst:
dst.write(to_ascii(s))
os.chmod(fname, mod)
except IOError as msg:
logger.error(msg)
return False
return True
def file2str(fname, noerr=True):
'''
Read a one line file into a string, strip whitespace around.
'''
try:
f = open(fname, "r")
except IOError as msg:
if not noerr:
logger.error(msg)
return None
s = f.readline()
f.close()
return s.strip()
def file2list(fname):
'''
Read a file into a list (newlines dropped).
'''
try:
return open(fname).read().split('\n')
except IOError as msg:
logger.error(msg)
return None
def safe_open_w(fname):
if fname == "-":
f = sys.stdout
else:
if not options.batch and os.access(fname, os.F_OK):
if not ask("File %s exists. Do you want to overwrite it?" % fname):
return None
try:
f = open(fname, "w")
except IOError as msg:
logger.error(msg)
return None
return f
def safe_close_w(f):
if f and f != sys.stdout:
f.close()
def is_path_sane(name):
if re.search(r"['`#*?$\[\];]", name):
logger.error("%s: bad path", name)
return False
return True
def is_filename_sane(name):
if re.search(r"['`/#*?$\[\];]", name):
logger.error("%s: bad filename", name)
return False
return True
def is_name_sane(name):
if re.search("[']", name):
logger.error("%s: bad name", name)
return False
return True
def show_dot_graph(dotfile, keep_file=False, desc="transition graph"):
cmd = "%s %s" % (config.core.dotty, dotfile)
if not keep_file:
cmd = "(%s; rm -f %s)" % (cmd, dotfile)
if options.regression_tests:
print(".EXT", cmd)
subprocess.Popen(cmd, shell=True, bufsize=0,
stdin=None, stdout=None, stderr=None, close_fds=True)
logger.info("starting %s to show %s", config.core.dotty, desc)
def ext_cmd(cmd, shell=True):
cmd = add_sudo(cmd)
if options.regression_tests:
print(".EXT", cmd)
logger.debug("invoke: %s", cmd)
return subprocess.call(cmd, shell=shell)
def ext_cmd_nosudo(cmd, shell=True):
if options.regression_tests:
print(".EXT", cmd)
return subprocess.call(cmd, shell=shell)
def rmdir_r(d):
# TODO: Make sure we're not deleting something we shouldn't!
if d and os.path.isdir(d):
shutil.rmtree(d)
def nvpairs2dict(pairs):
'''
takes a list of string of form ['a=b', 'c=d']
and returns {'a':'b', 'c':'d'}
'''
data = []
for var in pairs:
if '=' in var:
data.append(var.split('=', 1))
else:
data.append([var, None])
return dict(data)
def is_check_always():
'''
Even though the frequency may be set to always, it doesn't
make sense to do that with non-interactive sessions.
'''
return options.interactive and config.core.check_frequency == "always"
def get_check_rc():
'''
If the check mode is set to strict, then on errors we
return 2 which is the code for error. Otherwise, we
pretend that errors are warnings.
'''
return 2 if config.core.check_mode == "strict" else 1
_LOCKDIR = ".lockdir"
_PIDF = "pid"
def check_locker(lockdir):
if not os.path.isdir(os.path.join(lockdir, _LOCKDIR)):
return
s = file2str(os.path.join(lockdir, _LOCKDIR, _PIDF))
pid = convert2ints(s)
if not isinstance(pid, int):
logger.warning("history: removing malformed lock")
rmdir_r(os.path.join(lockdir, _LOCKDIR))
return
try:
os.kill(pid, 0)
except OSError as err:
if err.errno == os.errno.ESRCH:
logger.info("history: removing stale lock")
rmdir_r(os.path.join(lockdir, _LOCKDIR))
else:
logger.error("%s: %s", _LOCKDIR, err.strerror)
@contextmanager
def lock(lockdir):
"""
Ensure that the lock is released properly
even in the face of an exception between
acquire and release.
"""
def acquire_lock():
check_locker(lockdir)
while True:
try:
os.makedirs(os.path.join(lockdir, _LOCKDIR))
str2file("%d" % os.getpid(), os.path.join(lockdir, _LOCKDIR, _PIDF))
return True
except OSError as err:
if err.errno != os.errno.EEXIST:
logger.error("Failed to acquire lock to %s: %s", lockdir, err.strerror)
return False
time.sleep(0.1)
continue
else:
return False
has_lock = acquire_lock()
try:
yield
finally:
if has_lock:
rmdir_r(os.path.join(lockdir, _LOCKDIR))
def mkdirp(directory, mode=0o777, parents=True, exist_ok=True):
"""
Same behavior as the POSIX mkdir -p command
"""
Path(directory).mkdir(mode, parents, exist_ok)
def pipe_cmd_nosudo(cmd):
if options.regression_tests:
print(".EXT", cmd)
proc = subprocess.Popen(cmd,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(outp, err_outp) = proc.communicate()
proc.wait()
rc = proc.returncode
if rc != 0:
print(outp)
print(err_outp)
return rc
def run_cmd_on_remote(cmd, remote_addr, prompt_msg=None):
"""
Run a cmd on remote node
return (rc, stdout, err_msg)
"""
rc = 1
out_data = None
err_data = None
need_pw = check_ssh_passwd_need(remote_addr)
if need_pw and prompt_msg:
print(prompt_msg)
try:
result = parallax.parallax_call([remote_addr], cmd, need_pw)
rc, out_data, _ = result[0][1]
except ValueError as err:
err_match = re.search("Exited with error code ([0-9]+), Error output: (.*)", str(err))
if err_match:
rc, err_data = err_match.groups()
finally:
return int(rc), to_ascii(out_data), err_data
def get_stdout(cmd, input_s=None, stderr_on=True, shell=True, raw=False):
'''
Run a cmd, return stdout output.
Optional input string "input_s".
stderr_on controls whether to show output which comes on stderr.
'''
if stderr_on:
stderr = None
else:
stderr = subprocess.PIPE
if options.regression_tests:
print(".EXT", cmd)
proc = subprocess.Popen(cmd,
shell=shell,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=stderr)
stdout_data, stderr_data = proc.communicate(input_s)
if raw:
return proc.returncode, stdout_data
return proc.returncode, to_ascii(stdout_data).strip()
def get_stdout_stderr(cmd, input_s=None, shell=True, raw=False, no_reg=False):
'''
Run a cmd, return (rc, stdout, stderr)
'''
if options.regression_tests and not no_reg:
print(".EXT", cmd)
proc = subprocess.Popen(cmd,
shell=shell,
stdin=input_s and subprocess.PIPE or None,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout_data, stderr_data = proc.communicate(input_s)
if raw:
return proc.returncode, stdout_data, stderr_data
return proc.returncode, to_ascii(stdout_data).strip(), to_ascii(stderr_data).strip()
def stdout2list(cmd, stderr_on=True, shell=True):
'''
Run a cmd, fetch output, return it as a list of lines.
stderr_on controls whether to show output which comes on stderr.
'''
rc, s = get_stdout(add_sudo(cmd), stderr_on=stderr_on, shell=shell)
if not s:
return rc, []
return rc, s.split('\n')
def append_file(dest, src):
'Append src to dest'
try:
open(dest, "a").write(open(src).read())
return True
except IOError as msg:
logger.error("append %s to %s: %s", src, dest, msg)
return False
def get_dc(timeout=None):
cmd = "crmadmin -D"
if timeout:
cmd += " -t {}".format(timeout)
rc, s, _ = get_stdout_stderr(add_sudo(cmd))
if rc != 0:
return None
if not s.startswith("Designated"):
return None
return s.split()[-1]
def wait4dc(what="", show_progress=True):
'''
Wait for the DC to get into the S_IDLE state. This should be
invoked only after a CIB modification which would exercise
the PE. Parameter "what" is whatever the caller wants to be
printed if showing progress.
It is assumed that the DC is already in a different state,
usually it should be either PENGINE or TRANSITION. This
assumption may not be true, but there's a high chance that it
is since crmd should be faster to move through states than
this shell.
Further, it may also be that crmd already calculated the new
graph, did transition, and went back to the idle state. This
may in particular be the case if the transition turned out to
be empty.
Tricky. Though in practice it shouldn't be an issue.
There's no timeout, as we expect the DC to eventually becomes
idle.
'''
dc = get_dc()
if not dc:
logger.warning("can't find DC")
return False
cmd = "crm_attribute -Gq -t crm_config -n crmd-transition-delay 2> /dev/null"
delay = get_stdout(add_sudo(cmd))[1]
if delay:
delaymsec = crm_msec(delay)
if delaymsec > 0:
logger.info("The crmd-transition-delay is configured. Waiting %d msec before check DC status.", delaymsec)
time.sleep(delaymsec // 1000)
cnt = 0
output_started = 0
init_sleep = 0.25
max_sleep = 1.00
sleep_time = init_sleep
while True:
dc = get_dc()
if not dc:
logger.warning("DC lost during wait")
return False
cmd = "crmadmin -S %s" % dc
rc, s = get_stdout(add_sudo(cmd))
if not s.startswith("Status"):
logger.warning("%s unexpected output: %s (exit code: %d)", cmd, s, rc)
return False
try:
dc_status = s.split()[-2]
except:
logger.warning("%s unexpected output: %s", cmd, s)
return False
if dc_status == "S_IDLE":
if output_started:
sys.stderr.write(" done\n")
return True
time.sleep(sleep_time)
if sleep_time < max_sleep:
sleep_time *= 2
if show_progress:
if not output_started:
output_started = 1
sys.stderr.write("waiting for %s to finish ." % what)
cnt += 1
if cnt % 5 == 0:
sys.stderr.write(".")
def run_ptest(graph_s, nograph, scores, utilization, actions, verbosity):
'''
Pipe graph_s thru ptest(8). Show graph using dotty if requested.
'''
actions_filter = "grep LogActions: | grep -vw Leave"
ptest = "2>&1 %s -x -" % config.core.ptest
if re.search("simulate", ptest) and \
not re.search("-[RS]", ptest):
ptest = "%s -S" % ptest
if verbosity:
if actions:
verbosity = 'v' * max(3, len(verbosity))
ptest = "%s -%s" % (ptest, verbosity.upper())
if scores:
ptest = "%s -s" % ptest
if utilization:
ptest = "%s -U" % ptest
if config.core.dotty and not nograph:
fd, dotfile = mkstemp()
ptest = "%s -D %s" % (ptest, dotfile)
else:
dotfile = None
# ptest prints to stderr
if actions:
ptest = "%s | %s" % (ptest, actions_filter)
if options.regression_tests:
ptest = ">/dev/null %s" % ptest
logger.debug("invoke: %s", ptest)
rc, s = get_stdout(ptest, input_s=graph_s)
if rc != 0:
logger.debug("'%s' exited with (rc=%d)", ptest, rc)
if actions and rc == 1:
logger.warning("No actions found.")
else:
logger.warning("Simulation was unsuccessful (RC=%d).", rc)
if dotfile:
if os.path.getsize(dotfile) > 0:
show_dot_graph(dotfile)
else:
logger.warning("ptest produced empty dot file")
else:
if not nograph:
logger.info("install graphviz to see a transition graph")
if s:
page_string(s)
return True
def is_id_valid(ident):
"""
Verify that the id follows the definition:
http://www.w3.org/TR/1999/REC-xml-names-19990114/#ns-qualnames
"""
if not ident:
return False
id_re = r"^[A-Za-z_][\w._-]*$"
return re.match(id_re, ident)
def check_range(a):
"""
Verify that the integer range in list a is valid.
"""
if len(a) != 2:
return False
if not isinstance(a[0], int) or not isinstance(a[1], int):
return False
return int(a[0]) <= int(a[1])
def crm_msec(t):
'''
See lib/common/utils.c:crm_get_msec().
'''
convtab = {
'ms': (1, 1),
'msec': (1, 1),
'us': (1, 1000),
'usec': (1, 1000),
'': (1000, 1),
's': (1000, 1),
'sec': (1000, 1),
'm': (60*1000, 1),
'min': (60*1000, 1),
'h': (60*60*1000, 1),
'hr': (60*60*1000, 1),
}
if not t:
return -1
r = re.match(r"\s*(\d+)\s*([a-zA-Z]+)?", t)
if not r:
return -1
if not r.group(2):
q = ''
else:
q = r.group(2).lower()
try:
mult, div = convtab[q]
except KeyError:
return -1
return (int(r.group(1))*mult) // div
def crm_time_cmp(a, b):
return crm_msec(a) - crm_msec(b)
def shorttime(ts):
if isinstance(ts, datetime.datetime):
return ts.strftime("%X")
if ts is not None:
return time.strftime("%X", time.localtime(ts))
return time.strftime("%X", time.localtime(0))
def shortdate(ts):
if isinstance(ts, datetime.datetime):
return ts.strftime("%F")
if ts is not None:
return time.strftime("%F", time.localtime(ts))
return time.strftime("%F", time.localtime(0))
def sort_by_mtime(l):
'Sort a (small) list of files by time mod.'
l2 = [(os.stat(x).st_mtime, x) for x in l]
l2.sort()
return [x[1] for x in l2]
def file_find_by_name(root, filename):
'Find a file within a tree matching fname'
assert root
assert filename
for root, dirnames, filenames in os.walk(root):
for filename in fnmatch.filter(filenames, filename):
return os.path.join(root, filename)
return None
def convert2ints(l):
"""
Convert a list of strings (or a string) to a list of ints.
All strings must be ints, otherwise conversion fails and None
is returned!
"""
try:
if isinstance(l, (tuple, list)):
return [int(x) for x in l]
# it's a string then
return int(l)
except ValueError:
return None
def is_int(s):
'Check if the string can be converted to an integer.'
try:
int(s)
return True
except ValueError:
return False
def is_process(s):
"""
Returns true if argument is the name of a running process.
s: process name
returns Boolean
"""
from os.path import join, basename
# find pids of running processes
pids = [pid for pid in os.listdir('/proc') if pid.isdigit()]
for pid in pids:
try:
cmdline = open(join('/proc', pid, 'cmdline'), 'rb').read()
procname = basename(to_ascii(cmdline).replace('\x00', ' ').split(' ')[0])
if procname == s:
return True
except EnvironmentError:
# a process may have died since we got the list of pids
pass
return False
def print_stacktrace():
"""
Print the stack at the site of call
"""
import traceback
import inspect
sf = inspect.currentframe().f_back.f_back
traceback.print_stack(sf)
def edit_file(fname):
'Edit a file.'
if not fname:
return
if not config.core.editor:
return
return ext_cmd_nosudo("%s %s" % (config.core.editor, fname))
def edit_file_ext(fname, template=''):
'''
Edit a file via a temporary file.
Raises IOError on any error.
'''
if not os.path.isfile(fname):
s = template
else:
s = open(fname).read()
filehash = hash(s)
tmpfile = str2tmp(s)
try:
try:
if edit_file(tmpfile) != 0:
return
s = open(tmpfile, 'r').read()
if hash(s) == filehash: # file unchanged
return
f2 = open(fname, 'w')
f2.write(s)
f2.close()
finally:
os.unlink(tmpfile)
except OSError as e:
raise IOError(e)
def need_pager(s, w, h):
from math import ceil
cnt = 0
for l in s.split('\n'):
# need to remove color codes
l = re.sub(r'\${\w+}', '', l)
cnt += int(ceil((len(l) + 0.5) / w))
if cnt >= h:
return True
return False
def term_render(s):
'Render for TERM.'
try:
return term.render(s)
except:
return s
def get_pager_cmd(*extra_opts):
'returns a commandline which calls the configured pager'
cmdline = [config.core.pager]
if os.path.basename(config.core.pager) == "less":
cmdline.append('-R')
cmdline.extend(extra_opts)
return ' '.join(cmdline)
def page_string(s):
'Page string rendered for TERM.'
if not s:
return
constants.need_reset = True
w, h = get_winsize()
if not need_pager(s, w, h):
print(term_render(s))
elif not config.core.pager or not can_ask() or options.batch:
print(term_render(s))
else:
pipe_string(get_pager_cmd(), term_render(s).encode('utf-8'))
constants.need_reset = False
def page_gen(g):
'Page lines generated by generator g'
w, h = get_winsize()
if not config.core.pager or not can_ask() or options.batch:
for line in g:
sys.stdout.write(term_render(line))
else:
pipe_string(get_pager_cmd(), term_render("".join(g)))
def page_file(filename):
'Open file in pager'
if not os.path.isfile(filename):
return
return ext_cmd_nosudo(get_pager_cmd(filename), shell=True)
def get_winsize():
try:
import curses
curses.setupterm()
w = curses.tigetnum('cols')
h = curses.tigetnum('lines')
except:
try:
w = os.environ['COLS']
h = os.environ['LINES']
except KeyError:
w = 80
h = 25
return w, h
def multicolumn(l):
'''
A ls-like representation of a list of strings.
A naive approach.
'''
min_gap = 2
w, _ = get_winsize()
max_len = 8
for s in l:
if len(s) > max_len:
max_len = len(s)
cols = w // (max_len + min_gap) # approx.
if not cols:
cols = 1
col_len = w // cols
for i in range(len(l) // cols + 1):
s = ''
for j in range(i * cols, (i + 1) * cols):
if not j < len(l):
break
if not s:
s = "%-*s" % (col_len, l[j])
elif (j + 1) % cols == 0:
s = "%s%s" % (s, l[j])
else:
s = "%s%-*s" % (s, col_len, l[j])
if s:
print(s)
def cli_replace_attr(pl, name, new_val):
for i, attr in enumerate(pl):
if attr[0] == name:
attr[1] = new_val
return
def cli_append_attr(pl, name, val):
pl.append([name, val])
def lines2cli(s):
'''
Convert a string into a list of lines. Replace continuation
characters. Strip white space, left and right. Drop empty lines.
'''
cl = []
l = s.split('\n')
cum = []
for p in l:
p = p.strip()
if p.endswith('\\'):
p = p.rstrip('\\')
cum.append(p)
else:
cum.append(p)
cl.append(''.join(cum).strip())
cum = []
if cum: # in case s ends with backslash
cl.append(''.join(cum))
return [x for x in cl if x]
def datetime_is_aware(dt):
"""
Determines if a given datetime.datetime is aware.
The logic is described in Python's docs:
http://docs.python.org/library/datetime.html#datetime.tzinfo
"""
return dt and dt.tzinfo is not None and dt.tzinfo.utcoffset(dt) is not None
def make_datetime_naive(dt):
"""
Ensures that the datetime is not time zone-aware:
The returned datetime object is a naive time in UTC.
"""
if dt and datetime_is_aware(dt):
return dt.replace(tzinfo=None) - dt.utcoffset()
return dt
def total_seconds(td):
"""
Backwards compatible implementation of timedelta.total_seconds()
"""
if hasattr(datetime.timedelta, 'total_seconds'):
return td.total_seconds()
return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) // 10**6
def datetime_to_timestamp(dt):
"""
Convert a datetime object into a floating-point second value
"""
try:
return total_seconds(make_datetime_naive(dt) - datetime.datetime(1970, 1, 1))
except Exception as e:
logger.error("datetime_to_timestamp error: %s", e)
return None
def timestamp_to_datetime(ts):
"""
Convert a timestamp into a naive datetime object
"""
import dateutil
import dateutil.tz
return make_datetime_naive(datetime.datetime.fromtimestamp(ts).replace(tzinfo=dateutil.tz.tzlocal()))
def parse_time(t, quiet=False):
'''
Try to make sense of the user provided time spec.
Use dateutil if available, otherwise strptime.
Return the datetime value.
Also does time zone elimination by passing the datetime
through a timestamp conversion if necessary
TODO: dateutil is very slow, avoid it if possible
'''
try:
from dateutil import parser, tz
dt = parser.parse(t)
if datetime_is_aware(dt):
ts = datetime_to_timestamp(dt)
if ts is None:
return None
dt = datetime.datetime.fromtimestamp(ts)
else:
# convert to UTC from local time
dt = dt - tz.tzlocal().utcoffset(dt)
except ValueError as msg:
if not quiet:
logger.error("parse_time %s: %s", t, msg)
return None
except ImportError as msg:
try:
tm = time.strptime(t)
dt = datetime.datetime(*tm[0:7])
except ValueError as msg:
logger.error("no dateutil, please provide times as printed by date(1)")
return None
return dt
def parse_to_timestamp(t, quiet=False):
'''
Read a string and convert it into a UNIX timestamp.
Added as an optimization of parse_time to avoid
extra conversion steps when result would be converted
into a timestamp anyway
'''
try:
from dateutil import parser, tz
dt = parser.parse(t)
if datetime_is_aware(dt):
return datetime_to_timestamp(dt)
# convert to UTC from local time
return total_seconds(dt - tz.tzlocal().utcoffset(dt) - datetime.datetime(1970, 1, 1))
except ValueError as msg:
if not quiet:
logger.error("parse_time %s: %s", t, msg)
return None
except ImportError as msg:
try:
tm = time.strptime(t)
dt = datetime.datetime(*tm[0:7])
return datetime_to_timestamp(dt)
except ValueError as msg:
logger.error("no dateutil, please provide times as printed by date(1)")
return None
def save_graphviz_file(ini_f, attr_d):
'''
Save graphviz settings to an ini file, if it does not exist.
'''
if os.path.isfile(ini_f):
logger.error("%s exists, please remove it first", ini_f)
return False
try:
f = open(ini_f, "wb")
except IOError as msg:
logger.error(msg)
return False
import configparser
p = configparser.ConfigParser()
for section, sect_d in attr_d.items():
p.add_section(section)
for n, v in sect_d.items():
p.set(section, n, v)
try:
p.write(f)
except IOError as msg:
logger.error(msg)
return False
f.close()
logger.info("graphviz attributes saved to %s", ini_f)
return True
def load_graphviz_file(ini_f):
'''
Load graphviz ini file, if it exists.
'''
if not os.path.isfile(ini_f):
return True, None
import configparser
p = configparser.ConfigParser()
try:
p.read(ini_f)
except Exception as msg:
logger.error(msg)
return False, None
_graph_d = {}
for section in p.sections():
d = {}
for n, v in p.items(section):
d[n] = v
_graph_d[section] = d
return True, _graph_d
def get_pcmk_version(dflt):
version = dflt
crmd = pacemaker_controld()
if crmd:
cmd = crmd
else:
return version
try:
rc, s, err = get_stdout_stderr("%s version" % (cmd))
if rc != 0:
logger.error("%s exited with %d [err: %s][out: %s]", cmd, rc, err, s)
else:
logger.debug("pacemaker version: [err: %s][out: %s]", err, s)
if err.startswith("CRM Version:"):
version = s.split()[0]
else:
version = s.split()[2]
logger.debug("found pacemaker version: %s", version)
except Exception as msg:
logger.warning("could not get the pacemaker version, bad installation?")
logger.warning(msg)
return version
def get_cib_property(cib_f, attr, dflt):
"""A poor man's get attribute procedure.
We don't want heavy parsing, this needs to be relatively
fast.
"""
open_t = "<cluster_property_set"
close_t = "</cluster_property_set"
attr_s = 'name="%s"' % attr
ver_patt = re.compile('value="([^"]+)"')
ver = dflt # return some version in any case
try:
f = open(cib_f, "r")
except IOError as msg:
logger.error(msg)
return ver
state = 0
for s in f:
if state == 0:
if open_t in s:
state += 1
elif state == 1:
if close_t in s:
break
if attr_s in s:
r = ver_patt.search(s)
if r:
ver = r.group(1)
break
f.close()
return ver
def get_cib_attributes(cib_f, tag, attr_l, dflt_l):
"""A poor man's get attribute procedure.
We don't want heavy parsing, this needs to be relatively
fast.
"""
open_t = "<%s " % tag
val_patt_l = [re.compile('%s="([^"]+)"' % x) for x in attr_l]
val_l = []
try:
f = open(cib_f, "rb").read()
except IOError as msg:
logger.error(msg)
return dflt_l
if os.path.splitext(cib_f)[-1] == '.bz2':
cib_bits = bz2.decompress(f)
else:
cib_bits = f
cib_s = to_ascii(cib_bits)
for s in cib_s.split('\n'):
if s.startswith(open_t):
i = 0
for patt in val_patt_l:
r = patt.search(s)
val_l.append(r and r.group(1) or dflt_l[i])
i += 1
break
return val_l
def is_larger_than_min_version(version, min_version):
return LooseVersion(version) >= LooseVersion(min_version)
def is_min_pcmk_ver(min_ver, cib_f=None):
if not constants.pcmk_version:
if cib_f:
constants.pcmk_version = get_cib_property(cib_f, "dc-version", "1.1.11")
logger.debug("found pacemaker version: %s in cib: %s", constants.pcmk_version, cib_f)
else:
constants.pcmk_version = get_pcmk_version("1.1.11")
return is_larger_than_min_version(constants.pcmk_version, min_ver)
def is_pcmk_118(cib_f=None):
return is_min_pcmk_ver("1.1.8", cib_f=cib_f)
@memoize
def cibadmin_features():
'''
# usage example:
if 'corosync-plugin' in cibadmin_features()
'''
rc, outp = get_stdout(['cibadmin', '-!'], shell=False)
if rc == 0:
m = re.match(r'Pacemaker\s(\S+)\s\(Build: ([^\)]+)\):\s(.*)', outp.strip())
if m and len(m.groups()) > 2:
return m.group(3).split()
return []
@memoize
def cibadmin_can_patch():
# cibadmin -P doesn't handle comments in <1.1.11 (unless patched)
return is_min_pcmk_ver("1.1.11")
# quote function from python module shlex.py in python 3.3
_find_unsafe = re.compile(r'[^\w@%+=:,./-]').search
def quote(s):
"""Return a shell-escaped version of the string *s*."""
if not s:
return "''"
if _find_unsafe(s) is None:
return s
# use single quotes, and put single quotes into double quotes
# the string $'b is then quoted as '$'"'"'b'
return "'" + s.replace("'", "'\"'\"'") + "'"
def doublequote(s):
"""Return a shell-escaped version of the string *s*."""
if not s:
return '""'
if _find_unsafe(s) is None:
return s
# use double quotes
return '"' + s.replace('"', "\\\"") + '"'
def fetch_opts(args, opt_l):
'''
Get and remove option keywords from args.
They are always listed last, at the end of the line.
Return a list of options found. The caller can do
if keyw in optlist: ...
'''
re_opt = None
if opt_l[0].startswith("@"):
re_opt = re.compile("^%s$" % opt_l[0][1:])
del opt_l[0]
l = []
for i in reversed(list(range(len(args)))):
if (args[i] in opt_l) or (re_opt and re_opt.search(args[i])):
l.append(args.pop())
else:
break
return l
_LIFETIME = ["reboot", "forever"]
_ISO8601_RE = re.compile("(PT?[0-9]|[0-9]+.*[:-])")
def fetch_lifetime_opt(args, iso8601=True):
'''
Get and remove a lifetime option from args. It can be one of
lifetime_options or an ISO 8601 formatted period/time. There
is apparently no good support in python for this format, so
we cheat a bit.
'''
if args:
opt = args[-1]
if opt in _LIFETIME or (iso8601 and _ISO8601_RE.match(opt)):
return args.pop()
return None
def resolve_hostnames(hostnames):
'''
Tries to resolve the given list of hostnames.
returns (ok, failed-hostname)
ok: True if all hostnames resolved
failed-hostname: First failed hostname resolution
'''
import socket
for node in hostnames:
try:
socket.gethostbyname(node)
except socket.error:
return False, node
return True, None
def list_corosync_node_names():
'''
Returns list of nodes configured
in corosync.conf
'''
try:
cfg = os.getenv('COROSYNC_MAIN_CONFIG_FILE', '/etc/corosync/corosync.conf')
lines = open(cfg).read().split('\n')
name_re = re.compile(r'\s*name:\s+(.*)')
names = []
for line in lines:
name = name_re.match(line)
if name:
names.append(name.group(1))
return names
except Exception:
return []
def list_corosync_nodes():
'''
Returns list of nodes configured
in corosync.conf
'''
try:
cfg = os.getenv('COROSYNC_MAIN_CONFIG_FILE', '/etc/corosync/corosync.conf')
lines = open(cfg).read().split('\n')
addr_re = re.compile(r'\s*ring0_addr:\s+(.*)')
nodes = []
for line in lines:
addr = addr_re.match(line)
if addr:
nodes.append(addr.group(1))
return nodes
except Exception:
return []
def print_cluster_nodes():
"""
Print the output of crm_node -l
"""
rc, out, _ = get_stdout_stderr("crm_node -l")
if rc == 0 and out:
print("{}\n".format(out))
def list_cluster_nodes():
'''
Returns a list of nodes in the cluster.
'''
from . import xmlutil
cib = None
rc, out, err = get_stdout_stderr(constants.CIB_QUERY)
# When cluster service running
if rc == 0:
cib = etree.fromstring(out)
# Static situation
else:
cib_path = os.getenv('CIB_file', constants.CIB_RAW_FILE)
if not os.path.isfile(cib_path):
return None
cib = xmlutil.file2cib_elem(cib_path)
if cib is None:
return None
node_list = []
for node in cib.xpath(constants.XML_NODE_PATH):
name = node.get('uname') or node.get('id')
if node.get('type') == 'remote':
srv = cib.xpath("//primitive[@id='%s']/instance_attributes/nvpair[@name='server']" % (name))
if srv:
continue
node_list.append(name)
return node_list
def cluster_run_cmd(cmd, node_list=[]):
"""
Run cmd in cluster nodes
"""
nodelist = node_list or list_cluster_nodes()
if not nodelist:
raise ValueError("Failed to get node list from cluster")
return parallax.parallax_call(nodelist, cmd)
def list_cluster_nodes_except_me():
"""
Get cluster node list and filter out self
"""
node_list = list_cluster_nodes()
if not node_list:
raise ValueError("Failed to get node list from cluster")
me = this_node()
if me in node_list:
node_list.remove(me)
return node_list
def service_info(name):
p = is_program('systemctl')
if p:
rc, outp = get_stdout([p, 'show',
'-p', 'UnitFileState',
'-p', 'ActiveState',
'-p', 'SubState',
name + '.service'], shell=False)
if rc == 0:
info = []
for line in outp.split('\n'):
data = line.split('=', 1)
if len(data) == 2:
info.append(data[1].strip())
return '/'.join(info)
return None
def running_on(resource):
"returns list of node names where the given resource is running"
rsc_locate = "crm_resource --resource '%s' --locate"
rc, out, err = get_stdout_stderr(rsc_locate % (resource))
if rc != 0:
return []
nodes = []
head = "resource %s is running on: " % (resource)
for line in out.split('\n'):
if line.strip().startswith(head):
w = line[len(head):].split()
if w:
nodes.append(w[0])
logger.debug("%s running on: %s", resource, nodes)
return nodes
# This RE matches nvpair values that can
# be left unquoted
_NOQUOTES_RE = re.compile(r'^[\w\.-]+$')
def noquotes(v):
return _NOQUOTES_RE.match(v) is not None
def unquote(s):
"""
Reverse shell-quoting a string, so the string '"a b c"'
becomes 'a b c'
"""
sp = shlex.split(s)
if sp:
return sp[0]
return ""
def parse_sysconfig(sysconfig_file):
"""
Reads a sysconfig file into a dict
"""
ret = {}
if os.path.isfile(sysconfig_file):
for line in open(sysconfig_file).readlines():
if line.lstrip().startswith('#'):
continue
try:
key, val = line.split("=", 1)
ret[key] = unquote(val)
except ValueError:
pass
return ret
def sysconfig_set(sysconfig_file, **values):
"""
Set the values in the sysconfig file, updating the variables
if they exist already, appending them if not.
"""
outp = ""
if os.path.isfile(sysconfig_file):
for line in open(sysconfig_file).readlines():
if line.lstrip().startswith('#'):
outp += line
else:
matched = False
try:
key, _ = line.split("=", 1)
for k, v in values.items():
if k == key:
matched = True
outp += '%s=%s\n' % (k, doublequote(v))
del values[k]
break
if not matched:
outp += line
except ValueError:
outp += line
for k, v in values.items():
outp += '%s=%s\n' % (k, doublequote(v))
str2file(outp, sysconfig_file)
def remote_diff_slurp(nodes, filename):
try:
import parallax
except ImportError:
raise ValueError("Parallax is required to diff")
from . import tmpfiles
tmpdir = tmpfiles.create_dir()
opts = parallax.Options()
opts.localdir = tmpdir
dst = os.path.basename(filename)
return list(parallax.slurp(nodes, filename, dst, opts).items())
def remote_diff_this(local_path, nodes, this_node):
try:
import parallax
except ImportError:
raise ValueError("Parallax is required to diff")
by_host = remote_diff_slurp(nodes, local_path)
for host, result in by_host:
if isinstance(result, parallax.Error):
raise ValueError("Failed on %s: %s" % (host, str(result)))
_, _, _, path = result
_, s = get_stdout("diff -U 0 -d -b --label %s --label %s %s %s" %
(host, this_node, path, local_path))
page_string(s)
def remote_diff(local_path, nodes):
try:
import parallax
except ImportError:
raise ValueError("parallax is required to diff")
by_host = remote_diff_slurp(nodes, local_path)
for host, result in by_host:
if isinstance(result, parallax.Error):
raise ValueError("Failed on %s: %s" % (host, str(result)))
h1, r1 = by_host[0]
h2, r2 = by_host[1]
_, s = get_stdout("diff -U 0 -d -b --label %s --label %s %s %s" %
(h1, h2, r1[3], r2[3]))
page_string(s)
def remote_checksum(local_path, nodes, this_node):
try:
import parallax
except ImportError:
raise ValueError("Parallax is required to diff")
import hashlib
by_host = remote_diff_slurp(nodes, local_path)
for host, result in by_host:
if isinstance(result, parallax.Error):
raise ValueError(str(result))
print("%-16s SHA1 checksum of %s" % ('Host', local_path))
if this_node not in nodes:
print("%-16s: %s" % (this_node, hashlib.sha1(open(local_path).read()).hexdigest()))
for host, result in by_host:
_, _, _, path = result
print("%-16s: %s" % (host, hashlib.sha1(open(path).read()).hexdigest()))
def cluster_copy_file(local_path, nodes=None):
"""
Copies given file to all other cluster nodes.
"""
try:
import parallax
except ImportError:
raise ValueError("parallax is required to copy cluster files")
if not nodes:
nodes = list_cluster_nodes()
nodes.remove(this_node())
opts = parallax.Options()
opts.timeout = 60
opts.ssh_options += ['ControlPersist=no']
ok = True
for host, result in parallax.copy(nodes,
local_path,
local_path, opts).items():
if isinstance(result, parallax.Error):
logger.error("Failed to push %s to %s: %s", local_path, host, result)
ok = False
else:
logger.info(host)
return ok
# a set of fnmatch patterns to match attributes whose values
# should be obscured as a sequence of **** when printed
_obscured_nvpairs = []
def obscured(key, value):
if key is not None and value is not None:
for o in _obscured_nvpairs:
if fnmatch.fnmatch(key, o):
return '*' * 6
return value
@contextmanager
def obscure(obscure_list):
global _obscured_nvpairs
prev = _obscured_nvpairs
_obscured_nvpairs = obscure_list
try:
yield
finally:
_obscured_nvpairs = prev
def gen_nodeid_from_ipv6(addr):
return int(ipaddress.ip_address(addr)) % 1000000000
def _cloud_metadata_request(uri, headers={}):
try:
import urllib2 as urllib
except ImportError:
import urllib.request as urllib
req = urllib.Request(uri)
for header, value in headers.items():
req.add_header(header, value)
try:
resp = urllib.urlopen(req, timeout=5)
content = resp.read()
if type(content) != str:
return content.decode('utf-8').strip()
return content.strip()
except urllib.URLError:
return None
def detect_aws():
"""
Detect if in AWS
"""
system_version = get_stdout_or_raise_error("dmidecode -s system-version")
return re.search(r".*amazon.*", system_version) is not None
def detect_azure():
"""
Detect if in Azure
"""
# Should check both system-manufacturer and chassis-asset-tag
# In some azure environment, dmidecode -s system-manufacturer
# might return American Megatrends Inc. instead of Microsoft Corporation in Azure.
# The better way is to check the result of dmidecode -s chassis-asset-tag is
# 7783-7084-3265-9085-8269-3286-77, aka. the ascii code of MSFT AZURE VM
system_manufacturer = get_stdout_or_raise_error("dmidecode -s system-manufacturer")
chassis_asset_tag = get_stdout_or_raise_error("dmidecode -s chassis-asset-tag")
if "microsoft corporation" in system_manufacturer.lower() or \
''.join([chr(int(n)) for n in re.findall("\d\d", chassis_asset_tag)]) == "MSFT AZURE VM":
# To detect azure we also need to make an API request
result = _cloud_metadata_request(
"http://169.254.169.254/metadata/instance/network/interface/0/ipv4/ipAddress/0/privateIpAddress?api-version=2017-08-01&format=text",
headers={"Metadata": "true"})
if result:
return True
return False
def detect_gcp():
"""
Detect if in GCP
"""
bios_vendor = get_stdout_or_raise_error("dmidecode -s bios-vendor")
if "Google" in bios_vendor:
# To detect GCP we also need to make an API request
result = _cloud_metadata_request(
"http://metadata.google.internal/computeMetadata/v1/instance/network-interfaces/0/ip",
headers={"Metadata-Flavor": "Google"})
if result:
return True
return False
@memoize
def detect_cloud():
"""
Tries to determine which (if any) cloud environment
the cluster is running on.
This is mainly done using dmidecode.
If the host cannot be determined, this function
returns None. Otherwise, it returns a string
identifying the platform.
These are the currently known platforms:
* amazon-web-services
* microsoft-azure
* google-cloud-platform
"""
if not is_program("dmidecode"):
return None
aws = detect_aws()
if aws:
return constants.CLOUD_AWS
azure = detect_azure()
if azure:
return constants.CLOUD_AZURE
gcp = detect_gcp()
if gcp:
return constants.CLOUD_GCP
return None
def debug_timestamp():
return datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S')
def get_member_iplist():
rc, out, err= get_stdout_stderr("corosync-cmapctl -b runtime.totem.pg.mrp.srp.members")
if rc != 0:
logger.debug(err)
return None
ip_list = []
for line in out.split('\n'):
match = re.search(r'ip\((.*?)\)', line)
if match:
ip_list.append(match.group(1))
return ip_list
def get_iplist_corosync_using():
"""
Get ip list used by corosync
"""
rc, out, err = get_stdout_stderr("corosync-cfgtool -s")
if rc != 0:
raise ValueError(err)
return re.findall(r'id\s*=\s*(.*)', out)
def check_ssh_passwd_need(host, user="root"):
"""
Check whether access to host need password
"""
ssh_options = "-o StrictHostKeyChecking=no -o EscapeChar=none -o ConnectTimeout=15"
ssh_cmd = "ssh {} -T -o Batchmode=yes {} true".format(ssh_options, host)
ssh_cmd = add_su(ssh_cmd, user)
rc, _, _ = get_stdout_stderr(ssh_cmd)
return rc != 0
def check_port_open(ip, port):
import socket
family = socket.AF_INET6 if IP.is_ipv6(ip) else socket.AF_INET
with closing(socket.socket(family, socket.SOCK_STREAM)) as sock:
if sock.connect_ex((ip, port)) == 0:
return True
else:
return False
def valid_port(port):
return int(port) >= 1024 and int(port) <= 65535
def is_qdevice_configured():
from . import corosync
return corosync.get_value("quorum.device.model") == "net"
def is_qdevice_tls_on():
from . import corosync
return corosync.get_value("quorum.device.net.tls") == "on"
def get_nodeinfo_from_cmaptool():
nodeid_ip_dict = {}
rc, out = get_stdout("corosync-cmapctl -b runtime.totem.pg.mrp.srp.members")
if rc != 0:
return nodeid_ip_dict
for line in out.split('\n'):
match = re.search(r'members\.(.*)\.ip', line)
if match:
node_id = match.group(1)
iplist = re.findall(r'[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}', line)
nodeid_ip_dict[node_id] = iplist
return nodeid_ip_dict
def get_iplist_from_name(name):
"""
Given node host name, return this host's ip list in corosync cmap
"""
ip_list = []
nodeid = get_nodeid_from_name(name)
if not nodeid:
return ip_list
nodeinfo = {}
nodeinfo = get_nodeinfo_from_cmaptool()
if not nodeinfo:
return ip_list
return nodeinfo[nodeid]
def valid_nodeid(nodeid):
from . import bootstrap
if not service_is_active('corosync.service'):
return False
for _id, _ in get_nodeinfo_from_cmaptool().items():
if _id == nodeid:
return True
return False
def get_nodeid_from_name(name):
rc, out = get_stdout('crm_node -l')
if rc != 0:
return None
res = re.search(r'^([0-9]+) {} '.format(name), out, re.M)
if res:
return res.group(1)
else:
return None
def check_space_option_value(options):
if not isinstance(options, argparse.Namespace):
raise ValueError("Expected type of \"options\" is \"argparse.Namespace\", not \"{}\"".format(type(options)))
for opt in vars(options):
value = getattr(options, opt)
if isinstance(value, str) and len(value.strip()) == 0:
raise ValueError("Space value not allowed for dest \"{}\"".format(opt))
def interface_choice():
_, out = get_stdout("ip a")
# should consider interface format like "ethx@xxx"
interface_list = re.findall(r'(?:[0-9]+:) (.*?)(?=: |@.*?: )', out)
return [nic for nic in interface_list if nic != "lo"]
class IP(object):
"""
Class to get some properties of IP address
"""
def __init__(self, addr):
"""
Init function
"""
self.addr = addr
@property
def ip_address(self):
"""
Create ipaddress instance
"""
return ipaddress.ip_address(self.addr)
@property
def version(self):
"""
Get IP address version
"""
return self.ip_address.version
@classmethod
def is_mcast(cls, addr):
"""
Check whether the address is multicast address
"""
cls_inst = cls(addr)
return cls_inst.ip_address.is_multicast
@classmethod
def is_ipv6(cls, addr):
"""
Check whether the address is IPV6 address
"""
return cls(addr).version == 6
@classmethod
def is_valid_ip(cls, addr):
"""
Check whether the address is valid IP address
"""
cls_inst = cls(addr)
try:
cls_inst.ip_address
except ValueError:
return False
else:
return True
@property
def is_loopback(self):
"""
Check whether the address is loopback address
"""
return self.ip_address.is_loopback
@property
def is_link_local(self):
"""
Check whether the address is link-local address
"""
return self.ip_address.is_link_local
class Interface(IP):
"""
Class to get information from one interface
"""
def __init__(self, ip_with_mask):
"""
Init function
"""
self.ip, self.mask = ip_with_mask.split('/')
super(__class__, self).__init__(self.ip)
@property
def ip_with_mask(self):
"""
Get ip with netmask
"""
return '{}/{}'.format(self.ip, self.mask)
@property
def ip_interface(self):
"""
Create ip_interface instance
"""
return ipaddress.ip_interface(self.ip_with_mask)
@property
def network(self):
"""
Get network address
"""
return str(self.ip_interface.network.network_address)
def ip_in_network(self, addr):
"""
Check whether the addr in the network
"""
return IP(addr).ip_address in self.ip_interface.network
class InterfacesInfo(object):
"""
Class to collect interfaces information on local node
"""
def __init__(self, ipv6=False, second_heartbeat=False, custom_nic_list=[]):
"""
Init function
On init process,
"ipv6" is provided by -I option
"second_heartbeat" is provided by -M option
"custom_nic_list" is provided by -i option
"""
self.ip_version = 6 if ipv6 else 4
self.second_heartbeat = second_heartbeat
self._default_nic_list = custom_nic_list
self._nic_info_dict = {}
def get_interfaces_info(self):
"""
Try to get interfaces info dictionary via "ip" command
IMPORTANT: This is the method that populates the data, should always be called after initialize
"""
cmd = "ip -{} -o addr show".format(self.ip_version)
rc, out, err = get_stdout_stderr(cmd)
if rc != 0:
raise ValueError(err)
# format on each line will like:
# 2: enp1s0 inet 192.168.122.241/24 brd 192.168.122.255 scope global enp1s0\ valid_lft forever preferred_lft forever
for line in out.splitlines():
_, nic, _, ip_with_mask, *_ = line.split()
# maybe from tun interface
if not '/' in ip_with_mask:
continue
interface_inst = Interface(ip_with_mask)
if interface_inst.is_loopback:
continue
# one nic might configured multi IP addresses
if nic not in self._nic_info_dict:
self._nic_info_dict[nic] = []
self._nic_info_dict[nic].append(interface_inst)
if not self._nic_info_dict:
raise ValueError("No address configured")
if self.second_heartbeat and len(self._nic_info_dict) == 1:
raise ValueError("Cannot configure second heartbeat, since only one address is available")
@property
def nic_list(self):
"""
Get interfaces name list
"""
return list(self._nic_info_dict.keys())
@property
def interface_list(self):
"""
Get instance list of class Interface
"""
_interface_list = []
for interface in self._nic_info_dict.values():
_interface_list.extend(interface)
return _interface_list
@property
def ip_list(self):
"""
Get IP address list
"""
return [interface.ip for interface in self.interface_list]
@classmethod
def get_local_ip_list(cls, is_ipv6):
"""
Get IP address list
"""
cls_inst = cls(is_ipv6)
cls_inst.get_interfaces_info()
return cls_inst.ip_list
@classmethod
def ip_in_local(cls, addr):
"""
Check whether given address was in one of local address
"""
cls_inst = cls(IP.is_ipv6(addr))
cls_inst.get_interfaces_info()
return addr in cls_inst.ip_list
@property
def network_list(self):
"""
Get network list
"""
return list(set([interface.network for interface in self.interface_list]))
def _nic_first_ip(self, nic):
"""
Get the first IP of specific nic
"""
return self._nic_info_dict[nic][0].ip
def get_default_nic_list_from_route(self):
"""
Get default nic list from route
"""
if self._default_nic_list:
return self._default_nic_list
#TODO what if user only has ipv6 route?
cmd = "ip -o route show"
rc, out, err = get_stdout_stderr(cmd)
if rc != 0:
raise ValueError(err)
res = re.search(r'^default via .* dev (.*?) ', out)
if res:
self._default_nic_list = [res.group(1)]
else:
if not self.nic_list:
self.get_interfaces_info()
logger.warning("No default route configured. Using the first found nic")
self._default_nic_list = [self.nic_list[0]]
return self._default_nic_list
def get_default_ip_list(self):
"""
Get default IP list will be used by corosync
"""
if not self._default_nic_list:
self.get_default_nic_list_from_route()
if not self.nic_list:
self.get_interfaces_info()
_ip_list = []
for nic in self._default_nic_list:
# in case given interface not exist
if nic not in self.nic_list:
raise ValueError("Failed to detect IP address for {}".format(nic))
_ip_list.append(self._nic_first_ip(nic))
# in case -M specified but given one interface via -i
if self.second_heartbeat and len(self._default_nic_list) == 1:
for nic in self.nic_list:
if nic not in self._default_nic_list:
_ip_list.append(self._nic_first_ip(nic))
break
return _ip_list
@classmethod
def ip_in_network(cls, addr):
"""
Check whether given address was in one of local networks
"""
cls_inst = cls(IP.is_ipv6(addr))
cls_inst.get_interfaces_info()
for interface_inst in cls_inst.interface_list:
if interface_inst.ip_in_network(addr):
return True
return False
def check_file_content_included(source_file, target_file):
"""
Check whether target_file includes contents of source_file
"""
if not os.path.exists(source_file):
raise ValueError("File {} not exist".format(source_file))
if not os.path.exists(target_file):
return False
with open(target_file, 'r') as target_fd:
target_data = target_fd.read()
with open(source_file, 'r') as source_fd:
source_data = source_fd.read()
return source_data in target_data
class ServiceManager(object):
"""
Class to manage systemctl services
"""
ACTION_MAP = {
"enable": "enable",
"disable": "disable",
"start": "start",
"stop": "stop",
"is_enabled": "is-enabled",
"is_active": "is-active",
"is_available": "list-unit-files"
}
def __init__(self, service_name, remote_addr=None, node_list=[]):
"""
Init function
When node_list set, execute action between nodes in parallel
"""
self.service_name = service_name
self.remote_addr = remote_addr
self.node_list = node_list
def _do_action(self, action_type):
"""
Actual do actions to manage service
"""
if action_type not in self.ACTION_MAP.values():
raise ValueError("status_type should be {}".format('/'.join(list(self.ACTION_MAP.values()))))
cmd = "systemctl {} {}".format(action_type, self.service_name)
if self.node_list:
cluster_run_cmd(cmd, self.node_list)
return True, None
elif self.remote_addr and self.remote_addr != this_node():
prompt_msg = "Run \"{}\" on {}".format(cmd, self.remote_addr)
rc, output, err = run_cmd_on_remote(cmd, self.remote_addr, prompt_msg)
else:
rc, output, err = get_stdout_stderr(cmd)
if rc != 0 and err:
raise ValueError("Run \"{}\" error: {}".format(cmd, err))
return rc == 0, output
@property
def is_available(self):
return self.service_name in self._do_action(self.ACTION_MAP["is_available"])[1]
@property
def is_enabled(self):
return self._do_action(self.ACTION_MAP["is_enabled"])[0]
@property
def is_active(self):
return self._do_action(self.ACTION_MAP["is_active"])[0]
def start(self):
self._do_action(self.ACTION_MAP["start"])
def stop(self):
self._do_action(self.ACTION_MAP["stop"])
def enable(self):
self._do_action(self.ACTION_MAP["enable"])
def disable(self):
self._do_action(self.ACTION_MAP["disable"])
@classmethod
def service_is_available(cls, name, remote_addr=None):
"""
Check whether service is available
"""
inst = cls(name, remote_addr)
return inst.is_available
@classmethod
def service_is_enabled(cls, name, remote_addr=None):
"""
Check whether service is enabled
"""
inst = cls(name, remote_addr)
return inst.is_enabled
@classmethod
def service_is_active(cls, name, remote_addr=None):
"""
Check whether service is active
"""
inst = cls(name, remote_addr)
return inst.is_active
@classmethod
def start_service(cls, name, enable=False, remote_addr=None, node_list=[]):
"""
Start service
"""
inst = cls(name, remote_addr, node_list)
if enable:
inst.enable()
inst.start()
@classmethod
def stop_service(cls, name, disable=False, remote_addr=None, node_list=[]):
"""
Stop service
"""
inst = cls(name, remote_addr, node_list)
if disable:
inst.disable()
inst.stop()
@classmethod
def enable_service(cls, name, remote_addr=None, node_list=[]):
"""
Enable service
"""
inst = cls(name, remote_addr, node_list)
if inst.is_available and not inst.is_enabled:
inst.enable()
@classmethod
def disable_service(cls, name, remote_addr=None, node_list=[]):
"""
Disable service
"""
inst = cls(name, remote_addr, node_list)
if inst.is_available and inst.is_enabled:
inst.disable()
service_is_available = ServiceManager.service_is_available
service_is_enabled = ServiceManager.service_is_enabled
service_is_active = ServiceManager.service_is_active
start_service = ServiceManager.start_service
stop_service = ServiceManager.stop_service
enable_service = ServiceManager.enable_service
disable_service = ServiceManager.disable_service
def package_is_installed(pkg, remote_addr=None):
"""
Check if package is installed
"""
cmd = "rpm -q --quiet {}".format(pkg)
if remote_addr:
# check on remote
prompt_msg = "Check whether {} is installed on {}".format(pkg, remote_addr)
rc, _, _ = run_cmd_on_remote(cmd, remote_addr, prompt_msg)
else:
# check on local
rc, _ = get_stdout(cmd)
return rc == 0
def ping_node(node):
"""
Check if the remote node is reachable
"""
rc, _, err = get_stdout_stderr("ping -c 1 {}".format(node))
if rc != 0:
raise ValueError("host \"{}\" is unreachable: {}".format(node, err))
def calculate_quorate_status(expected_votes, actual_votes):
"""
Given expected votes and actual votes, calculate if is quorated
"""
return int(actual_votes)/int(expected_votes) > 0.5
def get_stdout_or_raise_error(cmd, remote=None, success_val_list=[0], no_raise=False):
"""
Common function to get stdout from cmd or raise exception
"""
if remote:
cmd = "ssh {} root@{} \"{}\"".format(SSH_OPTION, remote, cmd)
rc, out, err = get_stdout_stderr(cmd, no_reg=True)
if rc not in success_val_list and not no_raise:
raise ValueError("Failed to run \"{}\": {}".format(cmd, err))
return out
def get_quorum_votes_dict(remote=None):
"""
Return a dictionary which contain expect votes and total votes
"""
out = get_stdout_or_raise_error("corosync-quorumtool -s", remote=remote, success_val_list=[0, 2])
return dict(re.findall("(Expected|Total) votes:\s+(\d+)", out))
def check_all_nodes_reachable():
"""
Check if all cluster nodes are reachable
"""
out = get_stdout_or_raise_error("crm_node -l")
for node in re.findall("\d+ (.*) \w+", out):
ping_node(node)
def re_split_string(reg, string):
"""
Split a string by a regrex, filter out empty items
"""
return [x for x in re.split(reg, string) if x]
def is_block_device(dev):
"""
Check if dev is a block device
"""
try:
rc = S_ISBLK(os.stat(dev).st_mode)
except OSError:
return False
return rc
def has_stonith_running():
"""
Check if any stonith device registered
"""
from . import sbd
out = get_stdout_or_raise_error("stonith_admin -L")
has_stonith_device = re.search("[1-9]+ fence device[s]* found", out) is not None
using_diskless_sbd = sbd.SBDManager.is_using_diskless_sbd()
return has_stonith_device or using_diskless_sbd
def parse_append_action_argument(input_list, parse_re="[; ]"):
"""
Parse append action argument into a list, like:
-s "/dev/sdb1;/dev/sdb2"
-s /dev/sdb1 -s /dev/sbd2
Both return ["/dev/sdb1", "/dev/sdb2"]
"""
result_list = []
for item in input_list:
result_list += re_split_string(parse_re, item)
return result_list
def has_disk_mounted(dev):
"""
Check if device already mounted
"""
out = get_stdout_or_raise_error("mount")
return re.search("\n{} on ".format(dev), out) is not None
def has_mount_point_used(directory):
"""
Check if mount directory already mounted
"""
out = get_stdout_or_raise_error("mount")
return re.search(" on {}".format(directory), out) is not None
def all_exist_id():
"""
Get current exist id list
"""
from .cibconfig import cib_factory
cib_factory.refresh()
return cib_factory.id_list()
def randomword(length=6):
"""
Generate random word
"""
letters = string.ascii_lowercase
return ''.join(random.choice(letters) for i in range(length))
def gen_unused_id(exist_id_list, prefix="", length=6):
"""
Generate unused id
"""
unused_id = prefix or randomword(length)
while unused_id in exist_id_list:
unused_id = re.sub("$", "-{}".format(randomword(length)), unused_id)
return unused_id
def get_all_vg_name():
"""
Get all available VGs
"""
out = get_stdout_or_raise_error("vgdisplay")
return re.findall("VG Name\s+(.*)", out)
def get_pe_number(vg_id):
"""
Get pe number
"""
output = get_stdout_or_raise_error("vgdisplay {}".format(vg_id))
res = re.search("Total PE\s+(\d+)", output)
if not res:
raise ValueError("Cannot find PE on VG({})".format(vg_id))
return int(res.group(1))
def has_dev_partitioned(dev, peer=None):
"""
Check if device has partitions
"""
return len(get_dev_info(dev, "NAME", peer=peer).splitlines()) > 1
def get_dev_uuid(dev, peer=None):
"""
Get UUID of device on local or peer node
"""
out = get_dev_info(dev, "UUID", peer=peer).splitlines()
return out[0] if out else get_dev_uuid_2(dev, peer)
def get_dev_uuid_2(dev, peer=None):
"""
Get UUID of device using blkid
"""
out = get_stdout_or_raise_error("blkid {}".format(dev), remote=peer)
res = re.search("UUID=\"(.*?)\"", out)
return res.group(1) if res else None
def get_dev_fs_type(dev, peer=None):
"""
Get filesystem type of device
"""
return get_dev_info(dev, "FSTYPE", peer=peer)
def get_dev_info(dev, *_type, peer=None):
"""
Get device info using lsblk
"""
cmd = "lsblk -fno {} {}".format(','.join(_type), dev)
return get_stdout_or_raise_error(cmd, remote=peer)
def is_dev_used_for_lvm(dev, peer=None):
"""
Check if device is LV
"""
return "lvm" in get_dev_info(dev, "TYPE", peer=peer)
def is_dev_a_plain_raw_disk_or_partition(dev, peer=None):
"""
Check if device is a raw disk or partition
"""
out = get_dev_info(dev, "TYPE", peer=peer)
return re.search("(disk|part)", out) is not None
def compare_uuid_with_peer_dev(dev_list, peer):
"""
Check if device UUID is the same with peer's device
"""
for dev in dev_list:
local_uuid = get_dev_uuid(dev)
if not local_uuid:
raise ValueError("Cannot find UUID for {} on local".format(dev))
peer_uuid = get_dev_uuid(dev, peer)
if not peer_uuid:
raise ValueError("Cannot find UUID for {} on {}".format(dev, peer))
if local_uuid != peer_uuid:
raise ValueError("UUID of {} not same with peer {}".format(dev, peer))
def append_res_to_group(group_id, res_id):
"""
Append resource to exist group
"""
cmd = "crm configure modgroup {} add {}".format(group_id, res_id)
get_stdout_or_raise_error(cmd)
def get_qdevice_sync_timeout():
"""
Get qdevice sync_timeout
"""
out = get_stdout_or_raise_error("crm corosync status qdevice")
res = re.search("Sync HB interval:\s+(\d+)ms", out)
if not res:
raise ValueError("Cannot find qdevice sync timeout")
return int(int(res.group(1))/1000)
def detect_virt():
"""
Detect if running in virt environment
"""
rc, _, _ = get_stdout_stderr("systemd-detect-virt")
return rc == 0
def fatal(error_msg):
"""
Raise exception to jump over this module,
handled by Context.run in ui_context.py
"""
raise ValueError(error_msg)
def is_standby(node):
"""
Check if the node is already standby
"""
out = get_stdout_or_raise_error("crm_mon -1")
return re.search(r'Node\s+{}:\s+standby'.format(node), out) is not None
def get_dlm_option_dict():
"""
Get dlm config option dictionary
"""
out = get_stdout_or_raise_error("dlm_tool dump_config")
return dict(re.findall("(\w+)=(\w+)", out))
def set_dlm_option(**kargs):
"""
Set dlm option
"""
dlm_option_dict = get_dlm_option_dict()
for option, value in kargs.items():
if option not in dlm_option_dict:
raise ValueError('"{}" is not dlm config option'.format(option))
if dlm_option_dict[option] != value:
get_stdout_or_raise_error('dlm_tool set_config "{}={}"'.format(option, value))
def is_dlm_running():
"""
Check if dlm ra controld is running
"""
from . import xmlutil
return xmlutil.CrmMonXmlParser.is_resource_started(constants.DLM_CONTROLD_RA)
def is_dlm_configured():
"""
Check if dlm configured
"""
from . import xmlutil
return xmlutil.CrmMonXmlParser.is_resource_configured(constants.DLM_CONTROLD_RA)
def is_quorate():
"""
Check if cluster is quorated
"""
out = get_stdout_or_raise_error("corosync-quorumtool -s", success_val_list=[0, 2])
res = re.search(r'Quorate:\s+(.*)', out)
if res:
return res.group(1) == "Yes"
else:
raise ValueError("Failed to get quorate status from corosync-quorumtool")
def is_2node_cluster_without_qdevice(removing=False):
"""
Check if current cluster has two nodes without qdevice
"""
current_num = len(list_cluster_nodes())
remove_num = 1 if removing else 0
qdevice_num = 1 if is_qdevice_configured() else 0
return (current_num - remove_num + qdevice_num) == 2
def get_pcmk_delay_max(two_node_without_qdevice=False):
"""
Get value of pcmk_delay_max
"""
if service_is_active("pacemaker.service") and two_node_without_qdevice:
return constants.PCMK_DELAY_MAX
return 0
def get_property(name):
"""
Get cluster properties
"""
cmd = "crm configure get_property " + name
rc, stdout, _ = get_stdout_stderr(cmd)
return stdout if rc == 0 else None
def set_property(**kwargs):
"""
Set cluster properties
"""
set_str = ""
for key, value in kwargs.items():
set_str += "{}={} ".format(key, value)
cmd = "crm configure property " + set_str.strip().replace('_', '-')
get_stdout_or_raise_error(cmd)
def check_no_quorum_policy_with_dlm():
"""
Give warning when no-quorum-policy not freeze while configured DLM
"""
if not is_dlm_configured():
return
res = get_property("no-quorum-policy")
if not res or res != "freeze":
logger.warning("The DLM cluster best practice suggests to set the cluster property \"no-quorum-policy=freeze\"")
def set_property_conditionally(property_name, value_from_calculation):
"""
Set cluster property if calculated value is larger then current cib value
"""
_value = get_property(property_name)
value_from_cib = int(_value.strip('s')) if _value else 0
if value_from_cib < value_from_calculation:
cmd = "crm configure property {}={}".format(property_name, value_from_calculation)
get_stdout_or_raise_error(cmd)
def get_systemd_timeout_start_in_sec(time_res):
"""
Get the TimeoutStartUSec value in second unit
The origin format was like: 1min 30s
"""
res_seconds = re.search("(\d+)s", time_res)
start_timeout = int(res_seconds.group(1)) if res_seconds else 0
res_min = re.search("(\d+)min", time_res)
start_timeout += 60 * int(res_min.group(1)) if res_min else 0
return start_timeout
def is_ocf_1_1_cib_schema_detected():
"""
Only turn on ocf_1_1 feature the cib schema version is pacemaker-3.7 or above
"""
from .cibconfig import cib_factory
return is_larger_than_min_version(cib_factory.get_schema(), constants.SCHEMA_MIN_VER_SUPPORT_OCF_1_1)
def handle_role_for_ocf_1_1(value, name='role'):
"""
* Convert role from Promoted/Unpromoted to Master/Slave if schema doesn't support OCF 1.1
* Convert role from Master/Slave to Promoted/Unpromoted if ocf1.1 cib schema detected and OCF_1_1_SUPPORT is yes
"""
role_names = ["role", "target-role"]
downgrade_dict = {"Promoted": "Master", "Unpromoted": "Slave"}
upgrade_dict = {v: k for k, v in downgrade_dict.items()}
if name not in role_names:
return value
if value in downgrade_dict and not is_ocf_1_1_cib_schema_detected():
logger.warning('Convert "%s" to "%s" since the current schema version is old and not upgraded yet. Please consider "%s"', value, downgrade_dict[value], constants.CIB_UPGRADE)
return downgrade_dict[value]
if value in upgrade_dict and is_ocf_1_1_cib_schema_detected() and config.core.OCF_1_1_SUPPORT:
logger.info('Convert deprecated "%s" to "%s"', value, upgrade_dict[value])
return upgrade_dict[value]
return value
def diff_and_patch(orig_cib_str, current_cib_str):
"""
Use crm_diff to generate patch, then apply
"""
# In cibconfig.py, _patch_cib method doesn't include status section
# So here should make a function to handle common cases
cmd = "crm_diff -u -O '{}' -N '{}'".format(orig_cib_str, current_cib_str)
rc, cib_diff, err = get_stdout_stderr(cmd)
if rc == 0: # no difference
return True
if err:
logger.error("Failed to run crm_diff: %s", err)
return False
logger.debug("Diff: %s", cib_diff)
rc = pipe_string("cibadmin -p -P --force", cib_diff)
if rc != 0:
logger.error("Failed to patch")
return False
return True
# vim:ts=4:sw=4:et:
| ClusterLabs/crmsh | crmsh/utils.py | Python | gpl-2.0 | 85,016 |
"""
A fake DB-API 2 driver.
"""
# DB names used to trigger certain behaviours.
INVALID_DB = 'invalid-db'
INVALID_CURSOR = 'invalid-cursor'
HAPPY_OUT = 'happy-out'
apilevel = '2.0'
threadsafety = 2
paramstyle = 'qmark'
def connect(database):
return Connection(database)
class Connection(object):
"""
A fake connection.
"""
def __init__(self, database):
super(Connection, self).__init__()
self.database = database
self.session = []
self.cursors = set()
self.executed = 0
if database == INVALID_DB:
self.valid = False
raise OperationalError()
self.valid = True
def close(self):
if not self.valid:
raise ProgrammingError("Cannot close a closed connection.")
self.valid = False
for cursor in self.cursors:
cursor.close()
self.session.append('close')
if self.database == INVALID_DB:
raise OperationalError()
def commit(self):
self.session.append('commit')
def rollback(self):
self.session.append('rollback')
def cursor(self):
self.session.append('cursor')
if not self.valid:
raise InterfaceError()
return Cursor(self)
class Cursor(object):
"""
A fake cursor.
"""
def __init__(self, connection):
self.connection = connection
self.result = None
if connection.database == INVALID_CURSOR:
self.valid = False
raise OperationalError("You've tripped INVALID_CURSOR!")
connection.cursors.add(self)
self.valid = True
self.rowcount = -1
def close(self):
self.connection.session.append('cursor-close')
if not self.valid:
raise InterfaceError("Cursor is closed")
self.connection.cursors.remove(self)
self.valid = False
def execute(self, stmt, args=()):
if not self.valid or not self.connection.valid:
raise InterfaceError()
stmt = stmt.lstrip().lower()
# It's the ping!
if stmt == 'select 1':
return self
stmt_type, = stmt.split(' ', 1)
if stmt_type in ('select', 'update', 'insert', 'delete'):
self.result = None if args is () else args
self.connection.session.append(stmt_type)
self.connection.executed += 1
else:
self.result = None
raise ProgrammingError()
def callproc(self, procname, args=()):
if not self.valid or not self.connection.valid:
raise InterfaceError()
self.result = None if len(args) == 0 else args
self.connection.session.append('proc:' + procname)
self.connection.executed += 1
def fetchone(self):
if not self.valid:
raise InterfaceError("Cursor is closed")
result = self.result
self.result = None
return result
def fetchall(self):
return ()
class Warning(Exception):
pass
class Error(Exception):
pass
class InterfaceError(Error):
pass
class DatabaseError(Error):
pass
class DataError(DatabaseError):
pass
class OperationalError(DatabaseError):
pass
class IntegrityError(DatabaseError):
pass
class InternalError(DatabaseError):
pass
class ProgrammingError(DatabaseError):
pass
class NotSupportedError(DatabaseError):
pass
| kgaughan/dbkit | tests/fakedb.py | Python | mit | 3,435 |
#!/usr/bin/env python
from __future__ import division
import json
class TableReader(object):
def __init__(self, table_str):
self.table_str = table_str
@property
def table_id(self):
if not hasattr(self, '_table_id'):
self._table_id = self._parse_field('id')
return self._table_id
@property
def version(self):
if not hasattr(self, '_version'):
self._version = self._parse_field('format')
return self._version
@property
def url(self):
if not hasattr(self, '_url'):
self._url = self._parse_field('format_url')
return self._url
@property
def table_type(self):
if not hasattr(self, '_table_type'):
self._table_type = self._parse_field('type')
return self._table_type
@property
def generated_by(self):
if not hasattr(self, '_generated_by'):
self._generated_by = self._parse_field('generated_by')
return self._generated_by
@property
def date(self):
if not hasattr(self, '_date'):
self._date = self._parse_field('date')
return self._date
@property
def matrix_type(self):
if not hasattr(self, '_matrix_type'):
self._matrix_type = self._parse_field('matrix_type')
return self._matrix_type
@property
def matrix_element_type(self):
if not hasattr(self, '_matrix_element_type'):
self._matrix_element_type = self._parse_field('matrix_element_type')
return self._matrix_element_type
@property
def comment(self):
if not hasattr(self, '_comment'):
self._comment = self._parse_field('comment', required=False)
return self._comment
@property
def shape(self):
if not hasattr(self, '_shape'):
self._shape = self._parse_shape()
return self._shape
@property
def observation_ids(self):
if not hasattr(self, '_observation_ids'):
self._observation_ids = self._parse_ids('rows')
return self._observation_ids
@property
def sample_ids(self):
if not hasattr(self, '_sample_ids'):
self._sample_ids = self._parse_ids('columns')
return self._sample_ids
@property
def observation_metadata(self):
if not hasattr(self, '_observation_metadata'):
self._observation_metadata = self._parse_metadata('rows')
return self._observation_metadata
@property
def sample_metadata(self):
if not hasattr(self, '_sample_metadata'):
self._sample_metadata = self._parse_metadata('columns')
return self._sample_metadata
def data(self):
search_str = '"data": [['
start_idx = self.table_str.index(search_str) + len(search_str)
while True:
end_idx = self.table_str.index(']', start_idx)
data_strs = self.table_str[start_idx:end_idx].split(',')
assert len(data_strs) == 3
yield int(data_strs[0]), int(data_strs[1]), float(data_strs[2])
if self.table_str[end_idx + 1] == ',':
start_idx = end_idx + 3
else:
break
def _parse_field(self, field, required=True):
search_str = '"%s": "' % field
try:
start_idx = self.table_str.index(search_str) + len(search_str)
except ValueError:
if required:
raise ValueError("Missing required field '%s'." % field)
else:
return None
else:
end_idx = self.table_str.index('",', start_idx)
return self.table_str[start_idx:end_idx]
def _parse_shape(self):
search_str = '"shape": ['
start_idx = self.table_str.index(search_str) + len(search_str)
end_idx = self.table_str.index('],', start_idx)
dim_strs = self.table_str[start_idx:end_idx].split(', ')
assert len(dim_strs) == 2
return tuple(map(int, dim_strs))
def _parse_ids(self, axis):
search_str = '"%s": [{' % axis
start_idx = self.table_str.index(search_str) + len(search_str) - 2
end_idx = self.table_str.index('}]', start_idx) + 2
md_str = self.table_str[start_idx:end_idx]
ids = []
for e in json.loads(md_str):
ids.append(str(e['id']))
return ids
def _parse_metadata(self, axis):
search_str = '"%s": [{' % axis
start_idx = self.table_str.index(search_str) + len(search_str) - 2
end_idx = self.table_str.index('}]', start_idx) + 2
md_str = self.table_str[start_idx:end_idx]
md = []
for e in json.loads(md_str):
e_md = e['metadata']
if e_md is None:
return None
else:
md.append(str(';'.join(e['metadata']['taxonomy'])))
return md
| jairideout/protobiom | protobiom/convert.py | Python | bsd-3-clause | 4,907 |
from pylab import *
from numpy import *
##############################################################################################
print("Program Running...")
###Program Constants
mass = 1
K = 1
omegaD = 1
omegaD_0 = 1 #when this value is not zero, omegaD will be the multiple of omega0 by this amount - only valid if showOldPlot==1
vDamp = 0.00 #Dampening coeff
showOldPlot=1 #Value of 1 will simulate and display the oscillator without driving it
seconds = 15
totalTime = seconds*2*pi #Total time to simulate
N = 1000*seconds # Number of simulation steps (more = more accuracy)
#basic status operations
calcForce = lambda x: (-1)*K*x
calcAccel = lambda F: F/mass
calcDeltaV = lambda aNow,delT: aNow*delT
calcDeltaX = lambda vNow,aNow,delT: vNow*delT + 0.5*aNow*delT**2
calcZ = lambda T: sin(omegaD*T)
calcDamp = lambda vNow: vNow*vDamp
##### Initial conditions #####
initX = 0
initV = 1
vNow = initV
xNow = initX
vzNow = initV
xzNow = initX
timeNow = 0
##### Time handling #####
delT = totalTime/N #Time for each time step
##### Arrays to store time points and associated x values #####
tSteps = []
xSteps = []
vSteps = []
zSteps = []
xzSteps = []
vzSteps = []
period = 0
minmax = 0 #lowest value in the graph, we need this to display the label out of the way but close enough
##### main "for" loop - do N steps if simulation #####
for step in range(1,N*showOldPlot):
aNow = calcAccel(calcForce(xNow)-calcDamp(vNow)) # calculate acceleration at point x
#xNow is relative to Z position, once the system starts moving.
xNow = xNow + calcDeltaX(vNow,aNow,delT) # update x (location)
vNow = vNow + calcDeltaV(aNow,delT) # update velocity
timeNow = timeNow+delT # update time
tSteps.append(timeNow)
xSteps.append(xNow) # store current location for plotting
vSteps.append(vNow)
if(len(tSteps)>1):
#list not empty
if(abs(xSteps[0]-xSteps[step-1])<(10**(-3))):
if(period==0):
#The current value repeats and a period hasn't been set
period = -1
elif(period==-1):
#The third repetition is the value - one off, not universal solution
period = timeNow
#if the multiple value is enabled for omegaD set it now
if(omegaD_0!=0 and showOldPlot==1 and period>0):
omegaD = omegaD_0*((2*pi)/period)
timeNow=0
###second for loop
###just for the sake of clarity - this one evaluates the actual system with a moving Z value
for step in range(1,N):
#xNow is relative to Z position, once the system starts moving.
zNow = calcZ(timeNow)
azNow = calcAccel(calcForce(xzNow-zNow)-calcDamp(vzNow)) #Calculate Acceleration with Damping
xzNow = xzNow+calcDeltaX(vzNow,azNow,delT) #calculate the actual position of X
vzNow = vzNow + calcDeltaV(azNow,delT)
timeNow = timeNow+delT # update time
if(showOldPlot!=1):
tSteps.append(timeNow) # store current time for plotting here since the first loop is not active
zSteps.append(zNow)
xzSteps.append(xzNow)
vzSteps.append(vzNow)
minmax = xzNow if xzNow<minmax else minmax
minmax = vzNow if vzNow<minmax else minmax
minmax = zNow if zNow<minmax else minmax
##### What is done here is to numerically calculate period and frequency ######
if(showOldPlot==1):
print "Period: "+str(period)
print "Frequency: "+str(1/period)
#plot(tSteps,xSteps)
#plot(tSteps,vSteps)
#show()
plot(tSteps,zSteps,label="Z(t)")
plot(tSteps,xzSteps,label="X(t)")
plot(tSteps,vzSteps,label="V(t)")
xlabel('Time/s')
text(1,minmax*(110/100),'Damping Coefficient: '+str(vDamp)+', m: '+str(mass)+', k: '+str(K)+', '+r'$\omega_d$: '+str(omegaD),fontsize=14)
legend()
show() | hrishioa/ISci | sh-w-damp2.py | Python | gpl-2.0 | 3,962 |
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
BUILDBOT_DIR = os.path.join(os.path.dirname(__file__), '..')
sys.path.append(BUILDBOT_DIR)
import bb_run_bot
def RunBotProcesses(bot_process_map):
code = 0
for bot, proc in bot_process_map:
_, err = proc.communicate()
code |= proc.returncode
if proc.returncode != 0:
print 'Error running the bot script with id="%s"' % bot, err
return code
def main():
procs = [
(bot, subprocess.Popen(
[os.path.join(BUILDBOT_DIR, 'bb_run_bot.py'), '--bot-id', bot,
'--testing'], stdout=subprocess.PIPE, stderr=subprocess.PIPE))
for bot in bb_run_bot.GetBotStepMap()]
return RunBotProcesses(procs)
if __name__ == '__main__':
sys.exit(main())
| Teamxrtc/webrtc-streaming-node | third_party/webrtc/src/chromium/src/build/android/buildbot/tests/bb_run_bot_test.py | Python | mit | 922 |
# -*- coding: utf-8 -*-
# Copyright (c) 2014-2017 Andrea Baldan
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
""" Utility functions module """
import os
import re
import sys
import uuid
import time
import binascii
import random
import subprocess
import struct
import fcntl
try:
xrange
except NameError:
xrange = range
IS_PY2 = sys.version_info < (3, 0)
if IS_PY2:
import urllib2
else:
import urllib.request
try:
import dpkt
except ImportError:
print("[!] Missing module dpkt")
try:
import ConfigParser
except ImportError:
print("[!] Missing module ConfigParser")
from socket import socket, inet_aton, inet_ntoa, AF_INET, SOCK_DGRAM
from scapy.all import ARP, Ether, srp
# from configparser import ConfigParser
CONF = ConfigParser.ConfigParser()
CONF.read(r'creak/config')
# console colors
W = '\033[0m' # white (normal)
R = '\033[31m' # red
G = '\033[32m' # green
def print_counter(counter):
""" print counter in place """
sys.stdout.write("[+] Packets [ %d ]\r" % counter)
sys.stdout.flush()
def print_in_line(string):
""" print without carriage return """
sys.stdout.write(string)
sys.stdout.flush()
def string_to_binary(string):
""" convert string to binary format """
return binascii.unhexlify(string)
def binary_to_string(binary):
""" convert binary to string """
return binascii.hexlify(binary)
def set_ip_forward(fwd):
""" set ip_forward to fwd (0 or 1) """
CONF.read(r'./config')
if fwd != 1 and fwd != 0:
raise ValueError('[.] Value not valid for ip_forward, must be either 0 or 1')
else:
with open(CONF.get('services', 'IP_FORWARD'), 'w') as ip_f:
ip_f.write(str(fwd) + '\n')
def get_default_gateway_linux():
"""Read the default gateway directly from /proc."""
with open("/proc/net/route") as f_h:
for line in f_h:
fields = line.strip().split()
if fields[1] != '00000000' or not int(fields[3], 16) & 2:
continue
return inet_ntoa(struct.pack("<L", int(fields[2], 16)))
def get_mac_by_dev(dev):
""" try to retrieve MAC address associated with device """
try:
sock_fd = socket(AF_INET, SOCK_DGRAM)
info = fcntl.ioctl(sock_fd.fileno(), 0x8927, struct.pack('256s', bytes(dev[:15], 'utf-8')))
return ''.join(['%02x:' % ord(char) for char in info[18:24]])[:-1]
except (IOError, TypeError):
sock_fd.close()
mac_addr = hex(uuid.getnode()).replace('0x', '')
return ':'.join(mac_addr[i : i + 2] for i in range(0, 11, 2))
def get_mac_by_ip(ip_addr):
""" try to retrieve MAC address associated with ip """
try:
subprocess.Popen(["ping", "-c 1", ip_addr], stdout=subprocess.PIPE)
time.sleep(0.5)
with open("/proc/net/arp") as f_h:
for line in f_h:
fields = line.strip().split()
addr = [x for x in fields if re.match(r'^(\w+:){5}\w+$', x)]
if addr:
return addr[0]
except OSError:
pass
try:
subprocess.Popen(["ping", "-c 1", ip_addr], stdout=subprocess.PIPE)
time.sleep(0.5) # just to be sure of the ping response time
pid = subprocess.Popen(["arp", "-n", ip_addr], stdout=subprocess.PIPE)
arp_output = pid.communicate()[0]
except OSError:
pass
try:
mac = re.search(r"(([a-f\d]{1,2}\:){5}[a-f\d]{1,2})", arp_output).groups()[0]
except (IndexError, UnboundLocalError):
exit()
return parse_mac(mac)
def get_mac_by_ip_s(ip_address, delay):
"""try to retrieve MAC address associated with ip using Scapy library """
responses, _ = srp(Ether(dst="ff:ff:ff:ff:ff:ff")/ARP(pdst=ip_address),
timeout=delay, retry=10)
# return the MAC address from a response
for _, response in responses:
return response[Ether].src
return None
def parse_mac(address):
""" remove colon inside mac addres, if there's any """
return address.replace(':', '')
def mac_to_hex(mac):
""" convert string mac octets into base 16 int """
return [int(x, 16) for x in mac.split(':')]
def fake_mac_address(prefix, mode=None):
""" generate a fake MAC address """
if mode == 1:
prefix = [0x00, 0x16, 0x3e]
prefix += [(random.randint(0x00, 0x7f)) for _ in xrange(3)]
else:
prefix += [(random.randint(0x00, 0xff)) for _ in xrange(6 - len(prefix))]
return ':'.join('%02x' % x for x in prefix)
def change_mac(dev, new_mac):
""" try to change the MAC address associated to the device """
if os.path.exists("/usr/bin/ip") or os.path.exists("/bin/ip"):
# turn off device
subprocess.check_call("ip", "link", "set", "%s" % dev, "down")
# set mac
subprocess.check_call("ip", "link", "set", "%s" % dev, "address", "%s" % new_mac)
# turn on device
subprocess.check_call("ip", "link", "set", "%s" % dev, "up")
else:
# turn off device
subprocess.check_call(["ifconfig", "%s" % dev, "down"])
# set mac
subprocess.check_call(["ifconfig", "%s" % dev, "hw", "ether", "%s" % new_mac])
# turn on device
subprocess.check_call(["ifconfig", "%s" % dev, "up"])
# restart network
if CONF.get('restart', 'NETWORK_RESTART').startswith('systemctl'):
subprocess.check_call([CONF.get('restart', 'NETWORK_RESTART')])
else:
subprocess.check_call([CONF.get('restart', 'NETWORK_RESTART'), "restart"])
def eth_ntoa(buf):
""" convert a MAC address from binary packed bytes to string format """
mac_addr = ''
for intval in struct.unpack('BBBBBB', buf):
if intval > 15:
replacestr = '0x'
else:
replacestr = 'x'
mac_addr = ''.join([mac_addr, hex(intval).replace(replacestr, '')])
return mac_addr
def eth_aton(buf):
""" convert a MAC address from string to binary packed bytes format """
addr = ''
for i in xrange(0, len(buf), 2):
addr = ''.join([addr, struct.pack('B', int(buf[i: i + 2], 16))],)
return addr
def build_arp_packet(source_mac, src=None, dst=None):
""" forge arp packets used to poison and reset target connection """
arp = dpkt.arp.ARP()
packet = dpkt.ethernet.Ethernet()
if not src or not dst:
return False
arp.sha = string_to_binary(source_mac)
arp.spa = inet_aton(dst)
arp.tha = '\x00' * 6
arp.tpa = inet_aton(src)
arp.op = dpkt.arp.ARP_OP_REPLY
packet.src = string_to_binary(source_mac)
packet.dst = '\xff' * 6 # broadcast address
packet.data = arp
packet.type = dpkt.ethernet.ETH_TYPE_ARP
return packet
def get_manufacturer(manufacturer):
"""
get a list of MAC octets based on manufacturer fetching data from
http://anonsvn.wireshark.org/wireshark/trunk/manuf
"""
output, m_list = [], None
url_lib = None
if IS_PY2:
url_lib = urllib2
else:
url_lib = urllib.request
if not os.path.exists("./manufacturers"):
os.makedirs("./manufacturers")
if not os.path.isfile("./manufacturers/list.txt"):
print("[+] No local cache data found for " + G + manufacturer + W
+ " found, fetching from web..")
try:
urls = url_lib.urlopen(CONF.get('services', 'MANUFACTURER_URL'))
m_list = open("./manufacturers/list.txt", "w+")
for line in urls:
try:
mac = line.split()[0]
man = line.split()[1]
if re.search(manufacturer.lower(),
man.lower()) and len(mac) < 17 and len(mac) > 1:
# python2.x ignore byte string b''
if IS_PY2:
output.append(mac)
else:
output.append(mac.decode('utf-8'))
except IndexError:
pass
except:
print("[!] Error occured while trying to fetch data for manufacturer based mac address")
else:
macs = []
print("[+] Fetching data from local cache..")
conf = ConfigParser.ConfigParser()
conf.read("./manufacturers/list.txt")
try:
macs = conf.get(manufacturer.lower(), 'MAC').split(',')
if len(macs) > 0:
print("[+] Found mac octets from local cache for " + G + manufacturer + W
+ " device")
return macs
except:
urls = url_lib.urlopen(CONF.get('services', 'MANUFACTURER_URL'))
m_list = open("./manufacturers/list.txt", "a+")
for line in urls:
try:
mac = line.split()[0]
man = line.split()[1]
if re.search(manufacturer.lower(),
man.lower()) and len(mac) < 17 and len(mac) > 1:
# python2.x ignore byte string b''
if IS_PY2:
output.append(mac)
else:
output.append(mac.decode('utf-8'))
except IndexError:
pass
m_list.write("[" + manufacturer.lower() + "]\nMAC = ")
m_list.write(",".join(output))
m_list.write("\n")
m_list.close()
return output
def is_ipv4(ipstring):
""" check if the given string is an ipv4"""
match = re.match(r"^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$", ipstring)
return bool(match) and all(map(lambda n: 0 <= int(n) <= 255, match.groups()))
| codepr/creak | creak/utils.py | Python | gpl-3.0 | 10,336 |
# Copyright (c) 2015 SUSE Linux GmbH. All rights reserved.
#
# This file is part of kiwi.
#
# kiwi is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# kiwi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with kiwi. If not, see <http://www.gnu.org/licenses/>
#
"""
usage: kiwi-ng system create -h | --help
kiwi-ng system create --root=<directory> --target-dir=<directory>
[--signing-key=<key-file>...]
kiwi-ng system create help
commands:
create
create a system image from the specified root directory
the root directory is the result of a system prepare
command
create help
show manual page for create command
options:
--root=<directory>
the path to the root directory, usually the result of
a former system prepare call
--target-dir=<directory>
the target directory to store the system image file(s)
--signing-key=<key-file>
includes the key-file as a trusted key for package manager validations
"""
import os
import logging
# project
from kiwi.tasks.base import CliTask
from kiwi.help import Help
from kiwi.builder import ImageBuilder
from kiwi.system.setup import SystemSetup
from kiwi.privileges import Privileges
from kiwi.path import Path
log = logging.getLogger('kiwi')
class SystemCreateTask(CliTask):
"""
Implements creation of system images
Attributes
* :attr:`manual`
Instance of Help
"""
def process(self):
"""
Create a system image from the specified root directory
the root directory is the result of a system prepare
command
"""
self.manual = Help()
if self._help():
return
Privileges.check_for_root_permissions()
abs_target_dir_path = os.path.abspath(
self.command_args['--target-dir']
)
abs_root_path = os.path.abspath(self.command_args['--root'])
self.load_xml_description(
abs_root_path
)
self.run_checks(
{'check_target_directory_not_in_shared_cache': [abs_root_path]}
)
log.info('Creating system image')
if not os.path.exists(abs_target_dir_path):
Path.create(abs_target_dir_path)
setup = SystemSetup(
xml_state=self.xml_state,
root_dir=abs_root_path
)
setup.call_image_script()
image_builder = ImageBuilder.new(
self.xml_state,
abs_target_dir_path,
abs_root_path,
custom_args={
'signing_keys': self.command_args['--signing-key'],
'xz_options': self.runtime_config.get_xz_options()
}
)
result = image_builder.create()
result.print_results()
result.dump(
os.sep.join([abs_target_dir_path, 'kiwi.result'])
)
def _help(self):
if self.command_args['help']:
self.manual.show('kiwi::system::create')
else:
return False
return self.manual
| SUSE/kiwi | kiwi/tasks/system_create.py | Python | gpl-3.0 | 3,502 |
# -*- coding: utf-8 -*-
__copyright__ = """
Copyright (C) 2008, Karl Hasselström <kha@treskal.com>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License version 2 as
published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, see http://www.gnu.org/licenses/.
"""
from stgit.argparse import opt
from stgit.commands import common
from stgit.lib import log, transaction
help = 'Undo the last undo operation'
kind = 'stack'
usage = ['']
description = """
If the last command was an undo, reset the patch stack to the state it
had before the undo. Consecutive invocations of "stg redo" will undo
the effects of consecutive invocations of "stg undo".
It is an error to run "stg redo" if the last command was not an
undo."""
args = []
options = [
opt('-n', '--number', type = 'int', metavar = 'N', default = 1,
short = 'Undo the last N undos'),
opt('--hard', action = 'store_true',
short = 'Discard changes in your index/worktree')]
directory = common.DirectoryHasRepositoryLib()
def func(parser, options, args):
stack = directory.repository.current_stack
if options.number < 1:
raise common.CmdException('Bad number of undos to redo')
state = log.undo_state(stack, -options.number)
trans = transaction.StackTransaction(stack, 'redo %d' % options.number,
discard_changes = options.hard,
allow_bad_head = True)
try:
log.reset_stack(trans, stack.repository.default_iw, state)
except transaction.TransactionHalted:
pass
return trans.run(stack.repository.default_iw, allow_bad_head = True)
| vincele/stgit | stgit/commands/redo.py | Python | gpl-2.0 | 2,033 |
def init():
global ARGS
global CONFIG_FILE_CONTENT
global CONFIG_FILE_NAME
global CONFIG_FILE_PATH
global GIT_OBJECTS
ARGS = []
CONFIG_FILE_CONTENT = []
CONFIG_FILE_NAME = ".giwyn"
CONFIG_FILE_PATH = ""
GIT_OBJECTS = []
| k0pernicus/giwyn | giwyn/lib/settings/settings.py | Python | gpl-3.0 | 261 |
# This file is part of Bika LIMS
#
# Copyright 2011-2016 by it's authors.
# Some rights reserved. See LICENSE.txt, AUTHORS.txt.
from AccessControl import ClassSecurityInfo
from Products.ATExtensions.Extensions.utils import makeDisplayList
from Products.ATExtensions.ateapi import RecordField, RecordsField
from Products.Archetypes.Registry import registerField
from Products.Archetypes.public import *
from Products.CMFCore.utils import getToolByName
from Products.validation import validation
from Products.validation.validators.RegexValidator import RegexValidator
import sys
from bika.lims import bikaMessageFactory as _
from bika.lims.utils import t
class CoordinateField(RecordField):
""" Stores angle in deg, min, sec, bearing """
security = ClassSecurityInfo()
_properties = RecordField._properties.copy()
_properties.update({
'type' : 'angle',
'subfields' : ('degrees', 'minutes', 'seconds', 'bearing'),
## 'required_subfields' : ('degrees', 'minutes', 'seconds', 'bearing'),
'subfield_labels':{'degrees':_('Degrees'),
'minutes':_('Minutes'),
'seconds':_('Seconds'),
'bearing':_('Bearing')},
'subfield_sizes': {'degrees':3,
'minutes':2,
'seconds':2,
'bearing':1},
'subfield_validators' : {'degrees':'coordinatevalidator',
'minutes':'coordinatevalidator',
'seconds':'coordinatevalidator',
'bearing':'coordinatevalidator',},
})
registerField(CoordinateField,
title = "Coordinate",
description = "Used for storing coordinates",
)
| rockfruit/bika.lims | bika/lims/browser/fields/coordinatefield.py | Python | agpl-3.0 | 1,812 |
import logging
from abc import ABCMeta, abstractmethod
from Tribler.community.market.core.message import TraderId
from Tribler.community.market.core.transaction import TransactionNumber, TransactionId, Transaction
class TransactionRepository(object):
"""A repository interface for transactions in the transaction manager"""
__metaclass__ = ABCMeta
def __init__(self):
"""
Do not use this class directly
Make a subclass of this class with a specific implementation for a storage backend
"""
super(TransactionRepository, self).__init__()
self._logger = logging.getLogger(self.__class__.__name__)
@abstractmethod
def find_all(self):
return
@abstractmethod
def find_by_id(self, transaction_id):
return
@abstractmethod
def add(self, transaction):
return
@abstractmethod
def update(self, transaction):
return
@abstractmethod
def delete_by_id(self, transaction_id):
return
@abstractmethod
def next_identity(self):
return
class MemoryTransactionRepository(TransactionRepository):
"""A repository for transactions in the transaction manager stored in memory"""
def __init__(self, mid):
"""
:param mid: Hex encoded version of the member id of this node
:type mid: str
"""
super(MemoryTransactionRepository, self).__init__()
self._logger.info("Memory transaction repository used")
self._mid = mid
self._next_id = 0 # Counter to keep track of the number of messages created by this repository
self._transactions = {}
def find_all(self):
"""
:rtype: [Transaction]
"""
return self._transactions.values()
def find_by_id(self, transaction_id):
"""
:param transaction_id: The transaction id to look for
:type transaction_id: TransactionId
:return: The transaction or null if it cannot be found
:rtype: Transaction
"""
assert isinstance(transaction_id, TransactionId), type(transaction_id)
self._logger.debug(
"Transaction with the id: " + str(transaction_id) + " was searched for in the transaction repository")
return self._transactions.get(transaction_id)
def add(self, transaction):
"""
:type transaction: Transaction
"""
assert isinstance(transaction, Transaction), type(transaction)
self._logger.debug(
"Transaction with the id: " + str(transaction.transaction_id) + " was added to the transaction repository")
self._transactions[transaction.transaction_id] = transaction
def update(self, transaction):
"""
:type transaction: Transaction
"""
assert isinstance(transaction, Transaction), type(transaction)
self._logger.debug("Transaction with the id: " + str(
transaction.transaction_id) + " was updated to the transaction repository")
self._transactions[transaction.transaction_id] = transaction
def delete_by_id(self, transaction_id):
"""
:type transaction_id: TransactionId
"""
assert isinstance(transaction_id, TransactionId), type(transaction_id)
self._logger.debug(
"Transaction with the id: " + str(transaction_id) + " was deleted from the transaction repository")
del self._transactions[transaction_id]
def next_identity(self):
"""
:rtype: TransactionId
"""
self._next_id += 1
return TransactionId(TraderId(self._mid), TransactionNumber(self._next_id))
class DatabaseTransactionRepository(TransactionRepository):
"""A repository for transactions in the transaction manager stored in a database"""
def __init__(self, mid, persistence):
"""
:param mid: Hex encoded version of the member id of this node
:type mid: str
"""
super(DatabaseTransactionRepository, self).__init__()
self._logger.info("Database transaction repository used")
self._mid = mid
self.persistence = persistence
def find_all(self):
"""
:rtype: [Transaction]
"""
return self.persistence.get_all_transactions()
def find_by_id(self, transaction_id):
"""
:param transaction_id: The transaction id to look for
:type transaction_id: TransactionId
:return: The transaction or null if it cannot be found
:rtype: Transaction
"""
assert isinstance(transaction_id, TransactionId), type(transaction_id)
self._logger.debug("Transaction with the id: %s was searched for in the transaction repository",
str(transaction_id))
return self.persistence.get_transaction(transaction_id)
def add(self, transaction):
"""
:param transaction: The transaction to add to the database
:type transaction: Transaction
"""
self.persistence.add_transaction(transaction)
def update(self, transaction):
"""
:param transaction: The transaction to update
:type transaction: Transaction
"""
self.delete_by_id(transaction.transaction_id)
self.add(transaction)
def delete_by_id(self, transaction_id):
"""
:param transaction_id: The id of the transaction to remove
"""
self.persistence.delete_transaction(transaction_id)
def next_identity(self):
"""
:rtype: TransactionId
"""
return TransactionId(TraderId(self._mid), TransactionNumber(self.persistence.get_next_transaction_number()))
| vandenheuvel/tribler | Tribler/community/market/core/transaction_repository.py | Python | lgpl-3.0 | 5,726 |
#!/usr/bin/python
#
# This source file is part of appleseed.
# Visit http://appleseedhq.net/ for additional information and resources.
#
# This software is released under the MIT license.
#
# Copyright (c) 2010-2013 Francois Beaune, Jupiter Jazz Limited
# Copyright (c) 2014-2015 Francois Beaune, The appleseedhq Organization
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
from __future__ import print_function
import argparse
import datetime
import os
import subprocess
import sys
#--------------------------------------------------------------------------------------------------
# Constants.
#--------------------------------------------------------------------------------------------------
DEFAULT_TOOL_FILENAME = "appleseed.cli.exe" if os.name == "nt" else "appleseed.cli"
#--------------------------------------------------------------------------------------------------
# Utility functions.
#--------------------------------------------------------------------------------------------------
def safe_mkdir(dir):
if not os.path.exists(dir):
os.mkdir(dir)
def walk(directory, recursive):
if recursive:
for dirpath, dirnames, filenames in os.walk(directory):
yield dirpath, dirnames, filenames
else:
yield os.walk(directory).next()
def should_skip(path):
return path.startswith("skip - ")
def format_duration(duration):
total_seconds = duration.total_seconds()
hours = int(total_seconds / 3600)
minutes = int((total_seconds % 3600) / 60)
seconds = total_seconds % 60
return "{0:02}:{1:02}:{2:09.6f}".format(hours, minutes, seconds)
#--------------------------------------------------------------------------------------------------
# Render a given project file.
#--------------------------------------------------------------------------------------------------
def render_project_file(args, project_directory, project_filename):
project_filepath = os.path.join(project_directory, project_filename)
output_directory = os.path.join(project_directory, 'renders')
safe_mkdir(output_directory)
output_filename = os.path.splitext(project_filename)[0] + '.' + args.output_format
output_filepath = os.path.join(output_directory, output_filename)
log_filename = os.path.splitext(project_filename)[0] + '.txt'
log_filepath = os.path.join(output_directory, log_filename)
with open(log_filepath, "w", 0) as log_file:
print("rendering: {0}: ".format(project_filepath), end='')
command = '"{0}" -o "{1}" "{2}"'.format(args.tool_path, output_filepath, project_filepath)
if args.args:
command += ' {0}'.format(" ".join(args.args))
log_file.write("Command line:\n {0}\n\n".format(command))
start_time = datetime.datetime.now()
result = subprocess.call(command, stderr=log_file, shell=True)
end_time = datetime.datetime.now()
if result == 0:
print("{0} [ok]".format(format_duration(end_time - start_time)))
else:
print("[failed]")
#--------------------------------------------------------------------------------------------------
# Render all project files in a given directory (possibly recursively).
# Returns the number of rendered project files.
#--------------------------------------------------------------------------------------------------
def render_project_files(args):
rendered_file_count = 0
for dirpath, dirnames, filenames in walk(args.directory, args.recursive):
if should_skip(os.path.basename(dirpath)):
print("skipping: {0}...".format(dirpath))
continue
for filename in filenames:
if os.path.splitext(filename)[1] == '.appleseed':
if should_skip(filename):
print("skipping: {0}...".format(os.path.join(dirpath, filename)))
continue
render_project_file(args, dirpath, filename)
rendered_file_count += 1
return rendered_file_count
#--------------------------------------------------------------------------------------------------
# Entry point.
#--------------------------------------------------------------------------------------------------
def main():
parser = argparse.ArgumentParser(description="render multiple project files.")
parser.add_argument("-t", "--tool-path", metavar="tool-path",
help="set the path to the appleseed.cli tool")
parser.add_argument("-f", "--format", dest="output_format", metavar="FORMAT", default="exr",
help="set output format (e.g. png, exr)")
parser.add_argument("-r", "--recursive", action='store_true', dest="recursive",
help="scan the specified directory and all its subdirectories")
parser.add_argument("-p", "--parameter", dest="args", metavar="ARG", nargs="*",
help="forward additional arguments to appleseed")
parser.add_argument("directory", help="directory to scan")
args = parser.parse_args()
# If no tool path is provided, search for the tool in the same directory as this script.
if args.tool_path is None:
script_directory = os.path.dirname(os.path.realpath(__file__))
args.tool_path = os.path.join(script_directory, DEFAULT_TOOL_FILENAME)
print("setting tool path to {0}.".format(args.tool_path))
start_time = datetime.datetime.now()
rendered_file_count = render_project_files(args)
end_time = datetime.datetime.now()
print("rendered {0} project file(s) in {1}." \
.format(rendered_file_count, format_duration(end_time - start_time)))
if __name__ == '__main__':
main()
| Vertexwahn/appleseed | scripts/rendermany.py | Python | mit | 6,731 |
"""
Fetch whole SNMP table
++++++++++++++++++++++
Send a series of SNMP GETNEXT requests using the following options:
* with SNMPv1, community 'public'
* over IPv4/UDP
* to an Agent at demo.snmplabs.com:161
* for some columns of the IF-MIB::ifEntry table
* stop when response OIDs leave the scopes of initial OIDs
Functionally similar to:
| $ snmpwalk -v1 -c public demo.snmplabs.com IF-MIB::ifDescr IF-MIB::ifType IF-MIB::ifMtu IF-MIB::ifSpeed IF-MIB::ifPhysAddress IF-MIB::ifType
"""#
from pysnmp.hlapi import *
iterator = nextCmd(
SnmpEngine(),
CommunityData('public', mpModel=0),
UdpTransportTarget(('demo.snmplabs.com', 161)),
ContextData(),
ObjectType(ObjectIdentity('IF-MIB', 'ifDescr')),
ObjectType(ObjectIdentity('IF-MIB', 'ifType')),
ObjectType(ObjectIdentity('IF-MIB', 'ifMtu')),
ObjectType(ObjectIdentity('IF-MIB', 'ifSpeed')),
ObjectType(ObjectIdentity('IF-MIB', 'ifPhysAddress')),
ObjectType(ObjectIdentity('IF-MIB', 'ifType')),
lexicographicMode=False
)
for errorIndication, errorStatus, errorIndex, varBinds in iterator:
if errorIndication:
print(errorIndication)
break
elif errorStatus:
print('%s at %s' % (errorStatus.prettyPrint(),
errorIndex and varBinds[int(errorIndex)-1][0] or '?'))
break
else:
for varBind in varBinds:
print(' = '.join([x.prettyPrint() for x in varBind]))
| etingof/pysnmp | examples/hlapi/v3arch/asyncore/sync/manager/cmdgen/pull-whole-snmp-table.py | Python | bsd-2-clause | 1,443 |
import functools
import os
import psutil
import pytest
import threading
import time
from twisted.internet import threads, reactor
#
# pytest customizations
#
# mark benchmark tests using their group names (thanks ionelmc! :)
def pytest_collection_modifyitems(items, config):
for item in items:
bench = item.get_marker("benchmark")
if bench and bench.kwargs.get('group'):
group = bench.kwargs['group']
marker = getattr(pytest.mark, 'benchmark_' + group)
item.add_marker(marker)
subdir = config.getoption('subdir')
if subdir == 'benchmarks':
# we have to manually setup the events server in order to be able to
# signal events. This is usually done by the enclosing application
# using soledad client (i.e. bitmask client).
from leap.common.events import server
server.ensure_server()
#
# benchmark fixtures
#
@pytest.fixture()
def txbenchmark(monitored_benchmark):
def blockOnThread(*args, **kwargs):
return threads.deferToThread(
monitored_benchmark, threads.blockingCallFromThread,
reactor, *args, **kwargs)
return blockOnThread
@pytest.fixture()
def txbenchmark_with_setup(monitored_benchmark_with_setup):
def blockOnThreadWithSetup(setup, f, *args, **kwargs):
def blocking_runner(*args, **kwargs):
return threads.blockingCallFromThread(reactor, f, *args, **kwargs)
def blocking_setup():
args = threads.blockingCallFromThread(reactor, setup)
try:
return tuple(arg for arg in args), {}
except TypeError:
return ((args,), {}) if args else None
def bench():
return monitored_benchmark_with_setup(
blocking_runner, setup=blocking_setup,
rounds=4, warmup_rounds=1, iterations=1,
args=args, kwargs=kwargs)
return threads.deferToThread(bench)
return blockOnThreadWithSetup
#
# resource monitoring
#
def _mean(l):
return float(sum(l)) / len(l)
def _std(l):
if len(l) <= 1:
return 0
mean = _mean(l)
squares = [(x - mean) ** 2 for x in l]
return (sum(squares) / (len(l) - 1)) ** 0.5
class ResourceWatcher(threading.Thread):
sampling_interval = 0.1
def __init__(self, watch_memory):
threading.Thread.__init__(self)
self.process = psutil.Process(os.getpid())
self.running = False
# monitored resources
self.cpu_percent = None
self.watch_memory = watch_memory
self.memory_samples = []
self.memory_percent = None
def run(self):
self.running = True
self.process.cpu_percent()
# decide how long to sleep based on need to sample memory
sleep = self.sampling_interval if not self.watch_memory else 1
while self.running:
if self.watch_memory:
sample = self.process.memory_percent(memtype='rss')
self.memory_samples.append(sample)
time.sleep(sleep)
def stop(self):
self.running = False
self.join()
# save cpu usage info
self.cpu_percent = self.process.cpu_percent()
# save memory usage info
if self.watch_memory:
stats = {
'max': max(self.memory_samples),
'min': min(self.memory_samples),
'mean': _mean(self.memory_samples),
'std': _std(self.memory_samples),
}
memory_percent = {
'sampling_interval': self.sampling_interval,
'samples': self.memory_samples,
'stats': stats,
}
self.memory_percent = memory_percent
def _monitored_benchmark(benchmark_fixture, benchmark_function, request,
*args, **kwargs):
# setup resource monitoring
watch_memory = _watch_memory(request)
watcher = ResourceWatcher(watch_memory)
watcher.start()
# run benchmarking function
benchmark_function(*args, **kwargs)
# store results
watcher.stop()
benchmark_fixture.extra_info.update({
'cpu_percent': watcher.cpu_percent
})
if watch_memory:
benchmark_fixture.extra_info.update({
'memory_percent': watcher.memory_percent,
})
# add docstring info
if request.scope == 'function':
fun = request.function
doc = fun.__doc__ or ''
benchmark_fixture.extra_info.update({'doc': doc.strip()})
def _watch_memory(request):
return request.config.getoption('--watch-memory')
@pytest.fixture
def monitored_benchmark(benchmark, request):
return functools.partial(
_monitored_benchmark, benchmark, benchmark, request)
@pytest.fixture
def monitored_benchmark_with_setup(benchmark, request, *args, **kwargs):
return functools.partial(
_monitored_benchmark, benchmark, benchmark.pedantic, request,
*args, **kwargs)
| leapcode/soledad | tests/benchmarks/conftest.py | Python | gpl-3.0 | 5,004 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-23 20:31
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import django.db.models.manager
import django.utils.timezone
import mptt.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('users', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('author_name', models.CharField(max_length=12)),
('timestamp', models.DateTimeField(default=django.utils.timezone.now)),
('ups', models.IntegerField(default=0)),
('downs', models.IntegerField(default=0)),
('score', models.IntegerField(default=0)),
('raw_comment', models.TextField(blank=True)),
('html_comment', models.TextField(blank=True)),
('lft', models.PositiveIntegerField(db_index=True, editable=False)),
('rght', models.PositiveIntegerField(db_index=True, editable=False)),
('tree_id', models.PositiveIntegerField(db_index=True, editable=False)),
('level', models.PositiveIntegerField(db_index=True, editable=False)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='users.RedditUser')),
('parent', mptt.fields.TreeForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='reddit.Comment')),
],
options={
'abstract': False,
},
managers=[
('_default_manager', django.db.models.manager.Manager()),
],
),
migrations.CreateModel(
name='Submission',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('author_name', models.CharField(max_length=12)),
('title', models.CharField(max_length=250)),
('url', models.URLField(blank=True, null=True)),
('text', models.TextField(blank=True, max_length=5000)),
('text_html', models.TextField(blank=True)),
('ups', models.IntegerField(default=0)),
('downs', models.IntegerField(default=0)),
('score', models.IntegerField(default=0)),
('timestamp', models.DateTimeField(default=django.utils.timezone.now)),
('comment_count', models.IntegerField(default=0)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='users.RedditUser')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='vote',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('vote_object_id', models.PositiveIntegerField()),
('value', models.IntegerField(default=0)),
('submission', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='reddit.Submission')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='users.RedditUser')),
('vote_object_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
],
),
migrations.AddField(
model_name='comment',
name='submission',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='reddit.Submission'),
),
]
| Nikola-K/django_reddit | reddit/migrations/0001_initial.py | Python | apache-2.0 | 3,960 |
# Copyright (C) 2016 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import sys
import time
import unittest
from fabric.api import local
import nose
from lib.noseplugin import OptionParser, parser_option
from lib import base
from lib.base import BGP_FSM_ESTABLISHED
from lib.gobgp import GoBGPContainer
class GoBGPTestBase(unittest.TestCase):
wait_per_retry = 5
retry_limit = 15
@classmethod
def setUpClass(cls):
gobgp_ctn_image_name = parser_option.gobgp_image
base.TEST_PREFIX = parser_option.test_prefix
g1 = GoBGPContainer(name='g1', asn=65000, router_id='192.168.0.1',
ctn_image_name=gobgp_ctn_image_name,
log_level=parser_option.gobgp_log_level)
g2 = GoBGPContainer(name='g2', asn=65001, router_id='192.168.0.2',
ctn_image_name=gobgp_ctn_image_name,
log_level=parser_option.gobgp_log_level)
g3 = GoBGPContainer(name='g3', asn=65002, router_id='192.168.0.3',
ctn_image_name=gobgp_ctn_image_name,
log_level=parser_option.gobgp_log_level)
g4 = GoBGPContainer(name='g4', asn=65003, router_id='192.168.0.4',
ctn_image_name=gobgp_ctn_image_name,
log_level=parser_option.gobgp_log_level)
ctns = [g1, g2, g3, g4]
# advertise a route from route-server-clients
cls.clients = {}
for cli in [g2, g3, g4]:
cls.clients[cli.name] = cli
initial_wait_time = max(ctn.run() for ctn in ctns)
time.sleep(initial_wait_time)
for cli in cls.clients.itervalues():
g1.add_peer(cli, is_rs_client=True, passwd='passwd', passive=True, prefix_limit=10)
cli.add_peer(g1, passwd='passwd')
cls.gobgp = g1
# test each neighbor state is turned establish
def test_01_neighbor_established(self):
for cli in self.clients.itervalues():
self.gobgp.wait_for(expected_state=BGP_FSM_ESTABLISHED, peer=cli)
def test_02_softresetin_test1(self):
g1 = self.gobgp
g2 = self.clients['g2']
g3 = self.clients['g3']
p1 = {'ip-prefix': '10.0.10.0/24'}
p2 = {'ip-prefix': '10.0.20.0/24'}
ps0 = {'prefix-set-name': 'ps0', 'prefix-list': [p1, p2]}
g1.set_prefix_set(ps0)
st0 = {'conditions': {'match-prefix-set': {'prefix-set': 'ps0'}},
'actions': {'route-disposition': 'accept-route'}}
pol0 = {'name': 'pol0', 'statements': [st0]}
_filename = g1.add_policy(pol0, g3, 'import', 'reject')
g3.add_route('10.0.10.0/24')
g3.add_route('10.0.20.0/24')
time.sleep(1)
num = g2.get_neighbor(g1)['state']['messages']['received'].get('update', 0)
num = g2.get_neighbor(g1)['state']['messages']['received'].get('update', 0)
ps0 = {'prefix-set-name': 'ps0', 'prefix-list': [p1]}
g1.set_prefix_set(ps0)
g1.create_config()
# this will cause g1 to do softresetin for all neighbors (policy is changed)
g1.reload_config()
time.sleep(1)
num2 = g2.get_neighbor(g1)['state']['messages']['received'].get('update', 0)
self.assertEqual((num + 1), num2)
g3.softreset(g1, type='out')
time.sleep(1)
num3 = g2.get_neighbor(g1)['state']['messages']['received'].get('update', 0)
self.assertEqual(num2, num3)
def test_03_softresetin_test2(self):
g1 = self.gobgp
g2 = self.clients['g2']
g2.add_route('10.0.10.0/24')
time.sleep(1)
num = g2.get_neighbor(g1)['state']['messages']['received'].get('update', 0)
time.sleep(3)
g1.local('gobgp n all softresetin')
time.sleep(3)
num1 = g2.get_neighbor(g1)['state']['messages']['received'].get('update', 0)
self.assertEqual(num, num1)
if __name__ == '__main__':
output = local("which docker 2>&1 > /dev/null ; echo $?", capture=True)
if int(output) is not 0:
print "docker not found"
sys.exit(1)
nose.main(argv=sys.argv, addplugins=[OptionParser()],
defaultTest=sys.argv[0])
| tamihiro/gobgp | test/scenario_test/route_server_softreset_test.py | Python | apache-2.0 | 4,831 |
# -*- coding: utf-8 -*-
#
# Picard, the next-generation MusicBrainz tagger
# Copyright (C) 2006 Lukáš Lalinský
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from operator import itemgetter
from PyQt5 import QtCore
from picard import (PICARD_APP_NAME, PICARD_ORG_NAME, PICARD_VERSION,
version_to_string, version_from_string)
from picard.util import LockableObject
from picard import log
class ConfigUpgradeError(Exception):
pass
class ConfigSection(LockableObject):
"""Configuration section."""
def __init__(self, config, name):
super().__init__()
self.__qt_config = config
self.__config = {}
self.__name = name
self.__load_keys()
def __qt_keys(self):
prefix = self.__name + '/'
return filter(lambda key: key.startswith(prefix),
self.__qt_config.allKeys())
def __load_keys(self):
for key in self.__qt_keys():
try:
self.__config[key] = self.__qt_config.value(key)
except TypeError:
# Related to PICARD-1255, Unable to load the object into
# Python at all. Something weird with the way it is read and converted
# via the Qt C++ API. Simply ignore the key and it will be reset to
# default whenever the user opens Picard options
log.error('Unable to load config value: %s', key)
def __getitem__(self, name):
opt = Option.get(self.__name, name)
if opt is None:
return None
return self.value(name, opt, opt.default)
def __setitem__(self, name, value):
key = self.__name + '/' + name
self.lock_for_write()
try:
self.__config[key] = value
self.__qt_config.setValue(key, value)
finally:
self.unlock()
def __contains__(self, name):
key = self.__name + '/' + name
return key in self.__config
def remove(self, name):
key = self.__name + '/' + name
self.lock_for_write()
try:
if key in self.__config:
self.__config.pop(key)
self.__qt_config.remove(key)
finally:
self.unlock()
def raw_value(self, name):
"""Return an option value without any type conversion."""
value = self.__config[self.__name + '/' + name]
return value
def value(self, name, option_type, default=None):
"""Return an option value converted to the given Option type."""
key = self.__name + '/' + name
self.lock_for_read()
try:
if key in self.__config:
return option_type.convert(self.raw_value(name))
return default
except Exception:
return default
finally:
self.unlock()
class Config(QtCore.QSettings):
"""Configuration."""
def __init__(self):
pass
def __initialize(self):
"""Common initializer method for :meth:`from_app` and
:meth:`from_file`."""
# If there are no settings, copy existing settings from old format
# (registry on windows systems)
if not self.allKeys():
oldFormat = QtCore.QSettings(PICARD_ORG_NAME, PICARD_APP_NAME)
for k in oldFormat.allKeys():
self.setValue(k, oldFormat.value(k))
self.sync()
self.application = ConfigSection(self, "application")
self.setting = ConfigSection(self, "setting")
self.persist = ConfigSection(self, "persist")
self.profile = ConfigSection(self, "profile/default")
self.current_preset = "default"
TextOption("application", "version", '0.0.0dev0')
self._version = version_from_string(self.application["version"])
self._upgrade_hooks = dict()
@classmethod
def from_app(cls, parent):
"""Build a Config object using the default configuration file
location."""
this = cls()
QtCore.QSettings.__init__(this, QtCore.QSettings.IniFormat,
QtCore.QSettings.UserScope, PICARD_ORG_NAME,
PICARD_APP_NAME, parent)
this.__initialize()
return this
@classmethod
def from_file(cls, parent, filename):
"""Build a Config object using a user-provided configuration file
path."""
this = cls()
QtCore.QSettings.__init__(this, filename, QtCore.QSettings.IniFormat,
parent)
this.__initialize()
return this
def switchProfile(self, profilename):
"""Sets the current profile."""
key = "profile/%s" % (profilename,)
if self.contains(key):
self.profile.name = key
else:
raise KeyError("Unknown profile '%s'" % (profilename,))
def register_upgrade_hook(self, func, *args):
"""Register a function to upgrade from one config version to another"""
to_version = version_from_string(func.__name__)
assert to_version <= PICARD_VERSION, "%r > %r !!!" % (to_version, PICARD_VERSION)
self._upgrade_hooks[to_version] = {
'func': func,
'args': args,
'done': False
}
def run_upgrade_hooks(self, outputfunc=None):
"""Executes registered functions to upgrade config version to the latest"""
if not self._upgrade_hooks:
return
if self._version >= PICARD_VERSION:
if self._version > PICARD_VERSION:
print("Warning: config file %s was created by a more recent "
"version of Picard (current is %s)" % (
version_to_string(self._version),
version_to_string(PICARD_VERSION)
))
return
for version in sorted(self._upgrade_hooks):
hook = self._upgrade_hooks[version]
if self._version < version:
try:
if outputfunc and hook['func'].__doc__:
outputfunc("Config upgrade %s -> %s: %s" % (
version_to_string(self._version),
version_to_string(version),
hook['func'].__doc__.strip()))
hook['func'](*hook['args'])
except:
import traceback
raise ConfigUpgradeError(
"Error during config upgrade from version %s to %s "
"using %s():\n%s" % (
version_to_string(self._version),
version_to_string(version),
hook['func'].__name__,
traceback.format_exc()
))
else:
hook['done'] = True
self._version = version
self._write_version()
else:
# hook is not applicable, mark as done
hook['done'] = True
if all(map(itemgetter("done"), self._upgrade_hooks.values())):
# all hooks were executed, ensure config is marked with latest version
self._version = PICARD_VERSION
self._write_version()
def _write_version(self):
self.application["version"] = version_to_string(self._version)
self.sync()
class Option(QtCore.QObject):
"""Generic option."""
registry = {}
def __init__(self, section, name, default):
super().__init__()
self.section = section
self.name = name
self.default = default
if not hasattr(self, "convert"):
self.convert = type(default)
self.registry[(self.section, self.name)] = self
@classmethod
def get(cls, section, name):
return cls.registry.get((section, name))
class TextOption(Option):
convert = str
class BoolOption(Option):
@staticmethod
def convert(value):
# The QSettings IniFormat saves boolean values as the strings "true"
# and "false". Thus, explicit boolean and string comparisons are used
# to determine the value. NOTE: In PyQt >= 4.8.3, QSettings.value has
# an optional "type" parameter that avoids this. But we still support
# PyQt >= 4.5, so that is not used.
return value is True or value == "true"
class IntOption(Option):
convert = int
class FloatOption(Option):
convert = float
class ListOption(Option):
convert = list
class IntListOption(Option):
@staticmethod
def convert(values):
return list(map(int, values))
config = None
setting = None
persist = None
def _setup(app, filename=None):
global config, setting, persist
if filename is None:
config = Config.from_app(app)
else:
config = Config.from_file(app, filename)
setting = config.setting
persist = config.persist
| samj1912/picard | picard/config.py | Python | gpl-2.0 | 9,722 |
# -*- coding: utf-8 -*-
from kivy.app import App
from kivy.uix.floatlayout import FloatLayout
from kivy.uix.relativelayout import RelativeLayout
from kivy.uix.gridlayout import GridLayout
from kivy.uix.scatterlayout import ScatterLayout
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.label import Label
from kivy.graphics import Color, Rectangle
from kivy.lang import Builder
from kivy.uix.button import Button
from kivy.properties import ObjectProperty, NumericProperty, ListProperty
from kivy.clock import Clock
from kivy.factory import Factory
import time
import datetime
Builder.load_string("""
<ColoredGridLayout@GridLayout>:
size_hint: None, 1
size: self.height, self.height
bcolor: 1, 1, 1, 1
#pos_hint: {'center': (.5, .5)}
canvas.before:
Color:
rgba: self.bcolor
Rectangle:
pos: self.pos
size: self.size
""")
class ColoredGridLayout(GridLayout):
bcolor = ListProperty([1,1,1,1])
class VerboseClock(ColoredGridLayout):
test_mode = False
test_mode_hour = NumericProperty(11)
test_mode_minute = NumericProperty(53)
color_inactive = ListProperty([ [0.3, 0.3, 0.3, 1.0], [0.0, 0.3, 0.3, 1.0] ])
color_highlighted = ListProperty([ [0.0, 1.0, 1.0, 1.0], [1.0, 0.0, 0.0, 1.0] ])
color_background = ListProperty([ [0.0, 0.0, 0.0, 1.0], [0.0, 0.1, 0.3, 1.0] ])
active_theme = NumericProperty(0)
update_event = None
has_focus = False
def on_touch_down( self, touch ):
if (touch.pos[0] > self.pos[0] + self.size[0]) or ( touch.pos[0] < self.pos[0]):
pass
else:
#print( 'INSIDE')
self.active_theme = ( self.active_theme + 1 ) % min( len(self.color_highlighted), len(self.color_inactive) )
def highlight_label(self, label):
label.color = self.color_highlighted[self.active_theme]
label.bold = True
def highlight(self, text):
str = ''
for buchstabe in self.woerter[text]:
str += buchstabe.text
self.highlight_label(buchstabe)
#print( str)
def update(self, arg):
time_str = time.strftime("%d %b %y %H:%M:%S", time.localtime())
print( 'VerboseClock.update() %s' % time_str)
if self.test_mode == True:
hour = self.test_mode_hour
minute = self.test_mode_minute
else:
hour = time.localtime()[3]
minute = time.localtime()[4]
#print( '%i:%02i' % (hour, minute))
# reset all
for wort in self.w_order:
for buchstabe in self.woerter[wort]:
buchstabe.color = self.color_inactive[self.active_theme]
buchstabe.bold = False
self.highlight('ES')
self.highlight('IST')
if minute > 3 and minute < 8:
self.highlight('FUNF')
self.highlight('NACH')
elif minute >= 8 and minute < 12:
self.highlight('ZEHNM')
self.highlight('NACH')
elif minute >= 12 and minute < 18:
self.highlight('VIERTEL')
self.highlight('NACH')
elif minute >= 18 and minute < 22:
self.highlight('ZWANZIG')
self.highlight('NACH')
elif minute >= 18 and minute < 22:
self.highlight('ZWANZIG')
self.highlight('NACH')
elif minute >= 22 and minute < 28:
self.highlight('FUNF')
self.highlight('VOR')
self.highlight('HALB')
hour += 1
elif minute >= 28 and minute < 32:
self.highlight('HALB')
hour += 1
elif minute >= 32 and minute < 38:
self.highlight('FUNF')
self.highlight('NACH')
self.highlight('HALB')
hour += 1
elif minute >= 38 and minute < 42:
self.highlight('ZWANZIG')
self.highlight('VOR')
hour += 1
elif minute >= 42 and minute < 48:
self.highlight('VIERTEL')
self.highlight('VOR')
hour += 1
elif minute >= 48 and minute < 52:
self.highlight('ZEHNM')
self.highlight('VOR')
hour += 1
elif minute >= 52 and minute < 58:
self.highlight('FUNF')
self.highlight('VOR')
hour += 1
elif minute >= 58:
hour += 1
hour = hour % 12
if ( hour == 5 ):
self.highlight_label(self.woerter['ELF'][2])
self.highlight('UNF')
elif ( hour == 9 ):
self.highlight_label(self.woerter['ZEHN'][3])
self.highlight('EUN')
else:
self.highlight(self.hours[hour])
self.highlight('UHR')
#print( 'self.has_focus = %s' % self.has_focus)
#print( 'self.update_event = %s' % self.update_event)
if self.update_event is not None:
self.update_event.cancel()
if ( self.has_focus is True ):
self.update_event = Clock.schedule_once(self.update, 2)
def __init__(self, **kwargs):
super(VerboseClock, self).__init__(**kwargs)
self.cols = 11
self.row_force_default=True
self.row_default_height=40
self.row_default_width=40
layout = self
self.hours = ['ZWOLF', 'EINS', 'ZWEI', 'DREI', 'VIER', 'FUENF', 'SECHS', 'SIEBEN', 'ACHT', 'EUN', 'ZEHN', 'ELF']
self.w_order = ['ES', 'F1', 'IST', 'F2', 'FUNF', 'ZEHNM', 'ZWANZIG', 'DREIV', 'VIERTEL', 'VOR', 'F3', 'NACH', 'HALB', 'F4', 'ELF', 'UNF', 'EINS', 'F6', 'ZWEI', 'DREI', 'F7', 'VIER', 'SECHS', 'F8', 'ACHT', 'SIEBEN', 'ZWOLF', 'ZEHN', 'EUN', 'F9', 'UHR']
self.woerter = { 'ES': [Label(text='E'), Label(text='S')],
'F1': [Label(text='K')],
'IST': [Label(text='I'), Label(text='S'), Label(text='T')],
'F2': [Label(text='A')],
'FUNF': [Label(text='F'), Label(text='Ü'), Label(text='N'), Label(text='F')],
'ZEHNM': [Label(text='Z'), Label(text='E'), Label(text='H'), Label(text='N')],
'ZWANZIG': [Label(text='Z'), Label(text='W'), Label(text='A'), Label(text='N'), Label(text='Z'), Label(text='I'), Label(text='G')],
'DREIV': [Label(text='D'), Label(text='R'), Label(text='E'), Label(text='I')],
'VIERTEL': [Label(text='V'), Label(text='I'), Label(text='E'), Label(text='R'), Label(text='T'), Label(text='E'), Label(text='L')],
'VOR': [Label(text='V'), Label(text='O'), Label(text='R')],
'F3': [Label(text='F'), Label(text='U'), Label(text='N'), Label(text='K')],
'NACH': [Label(text='N'), Label(text='A'), Label(text='C'), Label(text='H')],
'HALB': [Label(text='H'), Label(text='A'), Label(text='L'), Label(text='B')],
'F4': [Label(text='A')],
'ELF': [Label(text='E'), Label(text='L'), Label(text='F')],
'UNF': [Label(text='Ü'), Label(text='N'), Label(text='F')],
'EINS': [Label(text='E'), Label(text='I'), Label(text='N'), Label(text='S')],
'F6': [Label(text='X'), Label(text='A'), Label(text='M')],
'ZWEI': [Label(text='Z'), Label(text='W'), Label(text='E'), Label(text='I')],
'DREI': [Label(text='D'), Label(text='R'), Label(text='E'), Label(text='I')],
'F7': [Label(text='P'), Label(text='M'), Label(text='J')],
'VIER': [Label(text='V'), Label(text='I'), Label(text='E'), Label(text='R')],
'SECHS': [Label(text='S'), Label(text='E'), Label(text='C'), Label(text='H'), Label(text='S')],
'F8': [Label(text='N'), Label(text='L')],
'ACHT': [Label(text='A'), Label(text='C'), Label(text='H'), Label(text='T')],
'SIEBEN': [Label(text='S'), Label(text='I'), Label(text='E'), Label(text='B'), Label(text='E'), Label(text='N')],
'ZWOLF': [Label(text='Z'), Label(text='W'), Label(text='Ö'), Label(text='L'), Label(text='F')],
'ZEHN': [Label(text='Z'), Label(text='E'), Label(text='H'), Label(text='N')],
'EUN': [Label(text='E'), Label(text='U'), Label(text='N')],
'F9': [Label(text='K')],
'UHR': [Label(text='U'), Label(text='H'), Label(text='R')]
}
for wort in self.w_order:
#print( wort)
for buchstabe in self.woerter[wort]:
#print( ' %s' % buchstabe.text)
buchstabe.size_x = 20
buchstabe.size_y = 20
buchstabe.font_size = '40sp'
buchstabe.color = self.color_inactive[self.active_theme]
layout.add_widget(buchstabe)
if ( self.has_focus is True ):
self.update_event = Clock.schedule_once(self.update, 2)
#Clock.schedule_interval(self.update, 1)
self.bind(size=self._update_rect, pos=self._update_rect, active_theme=self._update_rect)
def _update_rect(self, instance, value):
print( '_update_rect')
self.bcolor = self.color_background[self.active_theme]
self.update(0)
def hourPlus(self):
self.test_mode_hour = ( self.test_mode_hour + 1 ) % 12
self.update(0)
def hourMinus(self):
self.test_mode_hour = ( self.test_mode_hour - 1 ) % 12
self.update(0)
def minutePlus(self):
self.test_mode_minute = ( self.test_mode_minute + 1 ) % 60
self.update(0)
def minuteMinus(self):
self.test_mode_minute = ( self.test_mode_minute - 1 ) % 60
self.update(0)
def on_get_focus(self):
print( 'VerboseClock.on_get_focus() self %s' % self)
self.update(0)
#self.update_event = Clock.schedule_interval(homectrlTabbedPanel.doorCamItem.subwidget.update, 2)
#if self.update_event is not None:
# self.update_event = Clock.schedule_interval(self.update, 2)
self.update_event = Clock.schedule_once(self.update, 2)
self.has_focus = True
def on_release_focus(self):
print( 'VerboseClock.on_release_focus() self %s' % self)
self.has_focus = False
if self.update_event is not None:
print( 'self.update_event.cancel()')
self.update_event.cancel()
self.update_event = None
class MainApp(App):
def hourPlus(self, arg):
self.vClock.hourPlus()
self.hour_label.text = str(self.vClock.test_mode_hour)
def hourMinus(self, arg):
self.vClock.hourMinus()
self.hour_label.text = str(self.vClock.test_mode_hour)
def minutePlus(self, arg):
self.vClock.minutePlus()
self.min_label.text = "%02i" % self.vClock.test_mode_minute
def minuteMinus(self, arg):
self.vClock.minuteMinus()
self.min_label.text = "%02i" % self.vClock.test_mode_minute
def build(self):
self.title = 'Verbose Clock'
self.vClock = VerboseClock(center=self.parent.center)
l = BoxLayout(orientation='vertical')
l.add_widget(self.vClock)
self.vClock.on_get_focus()
if ( self.vClock.test_mode == True ):
buttons = BoxLayout(orientation='horizontal', size_hint=(0.3, 0.3))
self.hour_plus_button = Button(text='+', on_press=self.hourPlus )
self.hour_minus_button = Button(text='-', on_press=self.hourMinus )
self.hour_label = Label(text=str(self.vClock.test_mode_hour))
hour_layout = BoxLayout(orientation='vertical')
hour_layout.add_widget(self.hour_plus_button)
hour_layout.add_widget(self.hour_label)
hour_layout.add_widget(self.hour_minus_button)
self.min_plus_button = Button(text='+', on_press=self.minutePlus )
self.min_minus_button = Button(text='-', on_press=self.minuteMinus )
self.min_label = Label(text=str(self.vClock.test_mode_minute))
min_layout = BoxLayout(orientation='vertical')
min_layout.add_widget(self.min_plus_button)
min_layout.add_widget(self.min_label)
min_layout.add_widget(self.min_minus_button)
buttons.add_widget(hour_layout)
buttons.add_widget(min_layout)
l.add_widget(buttons)
return l
if __name__ == "__main__":
app = MainApp()
app.run() | ThomasHangstoerfer/pyHomeCtrl | verboseclock.py | Python | apache-2.0 | 12,867 |
__copyright__ = 'Copyright(c) Gordon Elliott 2017'
"""
"""
from a_tuin.db import RelationMap, TableMap, PagedQuery, InstanceQuery
from glod.model.statement_item import StatementItem, StatementItemDesignatedBalance
from glod.model.statement_item_collection import StatementItemCollection
from glod.model.references import statement_item__account
from glod.db.db_column_type_map import DB_COLUMN_TYPE_MAP
from glod.db.constants import SCHEMA_NAME
TableMap(StatementItem, SCHEMA_NAME, 'statement_item', DB_COLUMN_TYPE_MAP, RelationMap(
statement_item__account,
'account._id',
backref='statement_items',
lazy='joined'
))
class StatementItemInstanceQuery(InstanceQuery):
def __init__(self, session):
super().__init__(StatementItem, StatementItemCollection, session)
class StatementItemQuery(PagedQuery):
def __init__(self, session):
super().__init__(StatementItem, StatementItemCollection, session)
| gordon-elliott/glod | src/glod/db/statement_item.py | Python | mit | 944 |
from __future__ import absolute_import
from .hedwig import start_consumer
| ofpiyush/hedwig-py | tests/djangotest/djangotest/__init__.py | Python | mit | 74 |
#!/usr/bin/env python3
# Copyright 2018 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Disk utility functions for all VM scripts."""
import logging
from .common import AptGetInstall
try:
import guestfs
except ImportError:
AptGetInstall(['python3-guestfs'])
import guestfs
_STATUS_PREFIX = 'TranslateStatus: '
def log_key_value(key, value):
"""
Prints the key and value using the format defined by
Daisy's serial output inspector.
The format is defined in `daisy/step_wait_for_instances_signal.go`
"""
print(_STATUS_PREFIX + "<serial-output key:'%s' value:'%s'>" % (key, value))
def MountDisk(disk) -> guestfs.GuestFS:
# All new Python code should pass python_return_dict=True
# to the constructor. It indicates that your program wants
# to receive Python dicts for methods in the API that return
# hashtables.
g = guestfs.GuestFS(python_return_dict=True)
# Set the product name as cloud-init checks it to confirm this is a VM in GCE
g.config('-smbios',
'type=1,product=Google Compute Engine,manufacturer=Google')
g.set_memsize(4096)
# Enable network
g.set_network(True)
# Attach the disk image to libguestfs.
g.add_drive_opts(disk)
# Run the libguestfs back-end.
g.launch()
# Ask libguestfs to inspect for operating systems.
roots = g.inspect_os()
if len(roots) == 0:
raise Exception('inspect_vm: no operating systems found')
# Sort keys by length, shortest first, so that we end up
# mounting the filesystems in the correct order.
mps = g.inspect_get_mountpoints(roots[0])
g.gcp_image_distro = g.inspect_get_distro(roots[0])
g.gcp_image_major = str(g.inspect_get_major_version(roots[0]))
g.gcp_image_minor = str(g.inspect_get_minor_version(roots[0]))
for device in sorted(list(mps.keys()), key=len):
try:
g.mount(mps[device], device)
except RuntimeError as msg:
logging.warn('%s (ignored)' % msg)
return g
def UnmountDisk(g):
try:
g.umount_all()
except Exception as e:
logging.debug(str(e))
logging.warn('Unmount failed. Continuing anyway.')
| adjackura/compute-image-tools | daisy_workflows/linux_common/utils/diskutils.py | Python | apache-2.0 | 2,614 |
import dsz
import os
import re
from task import *
class Audit(Task, ):
def __init__(self, file):
Task.__init__(self, file, 'Audit')
def CreateCommandLine(self):
return ['audit -status']
TaskingOptions['_auditTasking'] = Audit | DarthMaulware/EquationGroupLeaks | Leak #5 - Lost In Translation/windows/Resources/Tasking/PyScripts/Lib/tasking/audit.py | Python | unlicense | 253 |
import cv2
# Haar-like特徴分類器の読み込み
face_cascade = cv2.CascadeClassifier('data/haarcascades/haarcascade_frontalface_default.xml')
eye_cascade = cv2.CascadeClassifier('data/haarcascades/haarcascade_eye.xml')
# イメージファイルの読み込み
img = cv2.imread('face.jpg')
# グレースケール変換
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# 顔を検知
faces = face_cascade.detectMultiScale(gray)
for (x,y,w,h) in faces:
# 検知した顔を矩形で囲む
cv2.rectangle(img,(x,y),(x+w,y+h),(255,0,0),2)
# 顔画像(グレースケール)
roi_gray = gray[y:y+h, x:x+w]
# 顔g増(カラースケール)
roi_color = img[y:y+h, x:x+w]
# 顔の中から目を検知
eyes = eye_cascade.detectMultiScale(roi_gray)
for (ex,ey,ew,eh) in eyes:
# 検知した目を矩形で囲む
cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(0,255,0),2)
# 画像表示
cv2.imshow('img',img)
# 何かキーを押したら終了
cv2.waitKey(0)
cv2.destroyAllWindows()
| yukihirai0505/tutorial-program | programming/python/opencv/sample/main.py | Python | mit | 1,042 |
class Allergies:
_allergies = [
"eggs",
"peanuts",
"shellfish",
"strawberries",
"tomatoes",
"chocolate",
"pollen",
"cats"
]
def __init__(self, score):
self.score = score
def is_allergic_to(self, allergy):
return self.score & 1 << self._allergies.index(allergy)
@property
def list(self):
return [allergy for allergy in self._allergies
if self.is_allergic_to(allergy)]
| ZacharyRSmith/xpython | allergies/example.py | Python | mit | 499 |
# Copyright 2017 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Policy neural network.
Implements network which takes in input and produces actions
and log probabilities given a sampling distribution parameterization.
"""
import tensorflow as tf
import numpy as np
class Policy(object):
def __init__(self, env_spec, internal_dim,
fixed_std=True, recurrent=True,
input_prev_actions=True):
self.env_spec = env_spec
self.internal_dim = internal_dim
self.rnn_state_dim = self.internal_dim
self.fixed_std = fixed_std
self.recurrent = recurrent
self.input_prev_actions = input_prev_actions
self.matrix_init = tf.truncated_normal_initializer(stddev=0.01)
self.vector_init = tf.constant_initializer(0.0)
@property
def input_dim(self):
return (self.env_spec.total_obs_dim +
self.env_spec.total_sampled_act_dim * self.input_prev_actions)
@property
def output_dim(self):
return self.env_spec.total_sampling_act_dim
def get_cell(self):
"""Get RNN cell."""
self.cell_input_dim = self.internal_dim // 2
cell = tf.contrib.rnn.LSTMCell(self.cell_input_dim,
state_is_tuple=False,
reuse=tf.get_variable_scope().reuse)
cell = tf.contrib.rnn.OutputProjectionWrapper(
cell, self.output_dim,
reuse=tf.get_variable_scope().reuse)
return cell
def core(self, obs, prev_internal_state, prev_actions):
"""Core neural network taking in inputs and outputting sampling
distribution parameters."""
batch_size = tf.shape(obs[0])[0]
if not self.recurrent:
prev_internal_state = tf.zeros([batch_size, self.rnn_state_dim])
cell = self.get_cell()
b = tf.get_variable('input_bias', [self.cell_input_dim],
initializer=self.vector_init)
cell_input = tf.nn.bias_add(tf.zeros([batch_size, self.cell_input_dim]), b)
for i, (obs_dim, obs_type) in enumerate(self.env_spec.obs_dims_and_types):
w = tf.get_variable('w_state%d' % i, [obs_dim, self.cell_input_dim],
initializer=self.matrix_init)
if self.env_spec.is_discrete(obs_type):
cell_input += tf.matmul(tf.one_hot(obs[i], obs_dim), w)
elif self.env_spec.is_box(obs_type):
cell_input += tf.matmul(obs[i], w)
else:
assert False
if self.input_prev_actions:
if self.env_spec.combine_actions: # TODO(ofir): clean this up
prev_action = prev_actions[0]
for i, action_dim in enumerate(self.env_spec.orig_act_dims):
act = tf.mod(prev_action, action_dim)
w = tf.get_variable('w_prev_action%d' % i, [action_dim, self.cell_input_dim],
initializer=self.matrix_init)
cell_input += tf.matmul(tf.one_hot(act, action_dim), w)
prev_action = tf.to_int32(prev_action / action_dim)
else:
for i, (act_dim, act_type) in enumerate(self.env_spec.act_dims_and_types):
w = tf.get_variable('w_prev_action%d' % i, [act_dim, self.cell_input_dim],
initializer=self.matrix_init)
if self.env_spec.is_discrete(act_type):
cell_input += tf.matmul(tf.one_hot(prev_actions[i], act_dim), w)
elif self.env_spec.is_box(act_type):
cell_input += tf.matmul(prev_actions[i], w)
else:
assert False
output, next_state = cell(cell_input, prev_internal_state)
return output, next_state
def sample_action(self, logits, sampling_dim,
act_dim, act_type, greedy=False):
"""Sample an action from a distribution."""
if self.env_spec.is_discrete(act_type):
if greedy:
act = tf.argmax(logits, 1)
else:
act = tf.reshape(tf.multinomial(logits, 1), [-1])
elif self.env_spec.is_box(act_type):
means = logits[:, :sampling_dim / 2]
std = logits[:, sampling_dim / 2:]
if greedy:
act = means
else:
batch_size = tf.shape(logits)[0]
act = means + std * tf.random_normal([batch_size, act_dim])
else:
assert False
return act
def entropy(self, logits,
sampling_dim, act_dim, act_type):
"""Calculate entropy of distribution."""
if self.env_spec.is_discrete(act_type):
entropy = tf.reduce_sum(
-tf.nn.softmax(logits) * tf.nn.log_softmax(logits), -1)
elif self.env_spec.is_box(act_type):
means = logits[:, :sampling_dim / 2]
std = logits[:, sampling_dim / 2:]
entropy = tf.reduce_sum(
0.5 * (1 + tf.log(2 * np.pi * tf.square(std))), -1)
else:
assert False
return entropy
def self_kl(self, logits,
sampling_dim, act_dim, act_type):
"""Calculate KL of distribution with itself.
Used layer only for the gradients.
"""
if self.env_spec.is_discrete(act_type):
probs = tf.nn.softmax(logits)
log_probs = tf.nn.log_softmax(logits)
self_kl = tf.reduce_sum(
tf.stop_gradient(probs) *
(tf.stop_gradient(log_probs) - log_probs), -1)
elif self.env_spec.is_box(act_type):
means = logits[:, :sampling_dim / 2]
std = logits[:, sampling_dim / 2:]
my_means = tf.stop_gradient(means)
my_std = tf.stop_gradient(std)
self_kl = tf.reduce_sum(
tf.log(std / my_std) +
(tf.square(my_std) + tf.square(my_means - means)) /
(2.0 * tf.square(std)) - 0.5,
-1)
else:
assert False
return self_kl
def log_prob_action(self, action, logits,
sampling_dim, act_dim, act_type):
"""Calculate log-prob of action sampled from distribution."""
if self.env_spec.is_discrete(act_type):
act_log_prob = tf.reduce_sum(
tf.one_hot(action, act_dim) * tf.nn.log_softmax(logits), -1)
elif self.env_spec.is_box(act_type):
means = logits[:, :sampling_dim / 2]
std = logits[:, sampling_dim / 2:]
act_log_prob = (- 0.5 * tf.log(2 * np.pi * tf.square(std))
- 0.5 * tf.square(action - means) / tf.square(std))
act_log_prob = tf.reduce_sum(act_log_prob, -1)
else:
assert False
return act_log_prob
def sample_actions(self, output, actions=None, greedy=False):
"""Sample all actions given output of core network."""
sampled_actions = []
logits = []
log_probs = []
entropy = []
self_kl = []
start_idx = 0
for i, (act_dim, act_type) in enumerate(self.env_spec.act_dims_and_types):
sampling_dim = self.env_spec.sampling_dim(act_dim, act_type)
if self.fixed_std and self.env_spec.is_box(act_type):
act_logits = output[:, start_idx:start_idx + act_dim]
log_std = tf.get_variable('std%d' % i, [1, sampling_dim // 2])
# fix standard deviations to variable
act_logits = tf.concat(
[act_logits,
1e-6 + tf.exp(log_std) + 0 * act_logits], 1)
else:
act_logits = output[:, start_idx:start_idx + sampling_dim]
if actions is None:
act = self.sample_action(act_logits, sampling_dim,
act_dim, act_type,
greedy=greedy)
else:
act = actions[i]
ent = self.entropy(act_logits, sampling_dim, act_dim, act_type)
kl = self.self_kl(act_logits, sampling_dim, act_dim, act_type)
act_log_prob = self.log_prob_action(
act, act_logits,
sampling_dim, act_dim, act_type)
sampled_actions.append(act)
logits.append(act_logits)
log_probs.append(act_log_prob)
entropy.append(ent)
self_kl.append(kl)
start_idx += sampling_dim
assert start_idx == self.env_spec.total_sampling_act_dim
return sampled_actions, logits, log_probs, entropy, self_kl
def get_kl(self, my_logits, other_logits):
"""Calculate KL between one policy output and another."""
kl = []
for i, (act_dim, act_type) in enumerate(self.env_spec.act_dims_and_types):
sampling_dim = self.env_spec.sampling_dim(act_dim, act_type)
single_my_logits = my_logits[i]
single_other_logits = other_logits[i]
if self.env_spec.is_discrete(act_type):
my_probs = tf.nn.softmax(single_my_logits)
my_log_probs = tf.nn.log_softmax(single_my_logits)
other_log_probs = tf.nn.log_softmax(single_other_logits)
my_kl = tf.reduce_sum(my_probs * (my_log_probs - other_log_probs), -1)
elif self.env_spec.is_box(act_type):
my_means = single_my_logits[:, :sampling_dim / 2]
my_std = single_my_logits[:, sampling_dim / 2:]
other_means = single_other_logits[:, :sampling_dim / 2]
other_std = single_other_logits[:, sampling_dim / 2:]
my_kl = tf.reduce_sum(
tf.log(other_std / my_std) +
(tf.square(my_std) + tf.square(my_means - other_means)) /
(2.0 * tf.square(other_std)) - 0.5,
-1)
else:
assert False
kl.append(my_kl)
return kl
def single_step(self, prev, cur, greedy=False):
"""Single RNN step. Equivalently, single-time-step sampled actions."""
prev_internal_state, prev_actions, _, _, _, _ = prev
obs, actions = cur # state observed and action taken at this time step
# feed into RNN cell
output, next_state = self.core(
obs, prev_internal_state, prev_actions)
# sample actions with values and log-probs
(actions, logits, log_probs,
entropy, self_kl) = self.sample_actions(
output, actions=actions, greedy=greedy)
return (next_state, tuple(actions), tuple(logits), tuple(log_probs),
tuple(entropy), tuple(self_kl))
def sample_step(self, obs, prev_internal_state, prev_actions, greedy=False):
"""Sample single step from policy."""
(next_state, sampled_actions, logits, log_probs,
entropies, self_kls) = self.single_step(
(prev_internal_state, prev_actions, None, None, None, None),
(obs, None), greedy=greedy)
return next_state, sampled_actions
def multi_step(self, all_obs, initial_state, all_actions):
"""Calculate log-probs and other calculations on batch of episodes."""
batch_size = tf.shape(initial_state)[0]
time_length = tf.shape(all_obs[0])[0]
initial_actions = [act[0] for act in all_actions]
all_actions = [tf.concat([act[1:], act[0:1]], 0)
for act in all_actions] # "final" action is dummy
(internal_states, _, logits, log_probs,
entropies, self_kls) = tf.scan(
self.single_step,
(all_obs, all_actions),
initializer=self.get_initializer(
batch_size, initial_state, initial_actions))
# remove "final" computations
log_probs = [log_prob[:-1] for log_prob in log_probs]
entropies = [entropy[:-1] for entropy in entropies]
self_kls = [self_kl[:-1] for self_kl in self_kls]
return internal_states, logits, log_probs, entropies, self_kls
def get_initializer(self, batch_size, initial_state, initial_actions):
"""Get initializer for RNN."""
logits_init = []
log_probs_init = []
for act_dim, act_type in self.env_spec.act_dims_and_types:
sampling_dim = self.env_spec.sampling_dim(act_dim, act_type)
logits_init.append(tf.zeros([batch_size, sampling_dim]))
log_probs_init.append(tf.zeros([batch_size]))
entropy_init = [tf.zeros([batch_size]) for _ in self.env_spec.act_dims]
self_kl_init = [tf.zeros([batch_size]) for _ in self.env_spec.act_dims]
return (initial_state,
tuple(initial_actions),
tuple(logits_init), tuple(log_probs_init),
tuple(entropy_init),
tuple(self_kl_init))
def calculate_kl(self, my_logits, other_logits):
"""Calculate KL between one policy and another on batch of episodes."""
batch_size = tf.shape(my_logits[0])[1]
time_length = tf.shape(my_logits[0])[0]
reshaped_my_logits = [
tf.reshape(my_logit, [batch_size * time_length, -1])
for my_logit in my_logits]
reshaped_other_logits = [
tf.reshape(other_logit, [batch_size * time_length, -1])
for other_logit in other_logits]
kl = self.get_kl(reshaped_my_logits, reshaped_other_logits)
kl = [tf.reshape(kkl, [time_length, batch_size])
for kkl in kl]
return kl
class MLPPolicy(Policy):
"""Non-recurrent policy."""
def get_cell(self):
self.cell_input_dim = self.internal_dim
def mlp(cell_input, prev_internal_state):
w1 = tf.get_variable('w1', [self.cell_input_dim, self.internal_dim])
b1 = tf.get_variable('b1', [self.internal_dim])
w2 = tf.get_variable('w2', [self.internal_dim, self.internal_dim])
b2 = tf.get_variable('b2', [self.internal_dim])
w3 = tf.get_variable('w3', [self.internal_dim, self.internal_dim])
b3 = tf.get_variable('b3', [self.internal_dim])
proj = tf.get_variable(
'proj', [self.internal_dim, self.output_dim])
hidden = cell_input
hidden = tf.tanh(tf.nn.bias_add(tf.matmul(hidden, w1), b1))
hidden = tf.tanh(tf.nn.bias_add(tf.matmul(hidden, w2), b2))
output = tf.matmul(hidden, proj)
return output, hidden
return mlp
def single_step(self, obs, actions, prev_actions, greedy=False):
"""Single step."""
batch_size = tf.shape(obs[0])[0]
prev_internal_state = tf.zeros([batch_size, self.internal_dim])
output, next_state = self.core(
obs, prev_internal_state, prev_actions)
# sample actions with values and log-probs
(actions, logits, log_probs,
entropy, self_kl) = self.sample_actions(
output, actions=actions, greedy=greedy)
return (next_state, tuple(actions), tuple(logits), tuple(log_probs),
tuple(entropy), tuple(self_kl))
def sample_step(self, obs, prev_internal_state, prev_actions, greedy=False):
"""Sample single step from policy."""
(next_state, sampled_actions, logits, log_probs,
entropies, self_kls) = self.single_step(obs, None, prev_actions,
greedy=greedy)
return next_state, sampled_actions
def multi_step(self, all_obs, initial_state, all_actions):
"""Calculate log-probs and other calculations on batch of episodes."""
batch_size = tf.shape(initial_state)[0]
time_length = tf.shape(all_obs[0])[0]
# first reshape inputs as a single batch
reshaped_obs = []
for obs, (obs_dim, obs_type) in zip(all_obs, self.env_spec.obs_dims_and_types):
if self.env_spec.is_discrete(obs_type):
reshaped_obs.append(tf.reshape(obs, [time_length * batch_size]))
elif self.env_spec.is_box(obs_type):
reshaped_obs.append(tf.reshape(obs, [time_length * batch_size, obs_dim]))
reshaped_act = []
reshaped_prev_act = []
for i, (act_dim, act_type) in enumerate(self.env_spec.act_dims_and_types):
act = tf.concat([all_actions[i][1:], all_actions[i][0:1]], 0)
prev_act = all_actions[i]
if self.env_spec.is_discrete(act_type):
reshaped_act.append(tf.reshape(act, [time_length * batch_size]))
reshaped_prev_act.append(
tf.reshape(prev_act, [time_length * batch_size]))
elif self.env_spec.is_box(act_type):
reshaped_act.append(
tf.reshape(act, [time_length * batch_size, act_dim]))
reshaped_prev_act.append(
tf.reshape(prev_act, [time_length * batch_size, act_dim]))
# now inputs go into single step as one large batch
(internal_states, _, logits, log_probs,
entropies, self_kls) = self.single_step(
reshaped_obs, reshaped_act, reshaped_prev_act)
# reshape the outputs back to original time-major format
internal_states = tf.reshape(internal_states, [time_length, batch_size, -1])
logits = [tf.reshape(logit, [time_length, batch_size, -1])
for logit in logits]
log_probs = [tf.reshape(log_prob, [time_length, batch_size])[:-1]
for log_prob in log_probs]
entropies = [tf.reshape(ent, [time_length, batch_size])[:-1]
for ent in entropies]
self_kls = [tf.reshape(self_kl, [time_length, batch_size])[:-1]
for self_kl in self_kls]
return internal_states, logits, log_probs, entropies, self_kls
| unnikrishnankgs/va | venv/lib/python3.5/site-packages/tensorflow/models/pcl_rl/policy.py | Python | bsd-2-clause | 16,946 |
#
# Copyright 2013 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import logbook
import math
import numpy as np
import numpy.linalg as la
from six import iteritems
import pandas as pd
from . import risk
from . risk import (
check_entry,
)
from empyrical import (
alpha_beta_aligned,
annual_volatility,
cum_returns,
downside_risk,
information_ratio,
max_drawdown,
sharpe_ratio,
sortino_ratio
)
from zipline.utils.serialization_utils import (
VERSION_LABEL
)
log = logbook.Logger('Risk Period')
choose_treasury = functools.partial(risk.choose_treasury,
risk.select_treasury_duration)
class RiskMetricsPeriod(object):
def __init__(self, start_date, end_date, returns, env,
benchmark_returns=None, algorithm_leverages=None):
self.env = env
treasury_curves = env.treasury_curves
if treasury_curves.index[-1] >= start_date:
mask = ((treasury_curves.index >= start_date) &
(treasury_curves.index <= end_date))
self.treasury_curves = treasury_curves[mask]
else:
# our test is beyond the treasury curve history
# so we'll use the last available treasury curve
self.treasury_curves = treasury_curves[-1:]
self.start_date = start_date
self.end_date = end_date
if benchmark_returns is None:
br = env.benchmark_returns
benchmark_returns = br[(br.index >= returns.index[0]) &
(br.index <= returns.index[-1])]
self.algorithm_returns = self.mask_returns_to_period(returns,
env)
self.benchmark_returns = self.mask_returns_to_period(benchmark_returns,
env)
self.algorithm_leverages = algorithm_leverages
self.calculate_metrics()
def calculate_metrics(self):
#print('-'*100)
#print(self.benchmark_returns.head())
#print(self.algorithm_returns.head())
self.benchmark_period_returns = \
cum_returns(self.benchmark_returns).iloc[-1]
self.algorithm_period_returns = \
cum_returns(self.algorithm_returns).iloc[-1]
# fix the case when the indices don't match
if not self.algorithm_returns.index.equals(self.benchmark_returns.index):
joined = self.algorithm_returns.align(self.benchmark_returns, join='outer')
self.algorithm_returns = joined[0].fillna(method='ffill')
self.benchmark_returns = joined[1].fillna(method='ffill')
if not self.algorithm_returns.index.equals(self.benchmark_returns.index):
message = "Mismatch between benchmark_returns ({bm_count}) and \
algorithm_returns ({algo_count}) in range {start} : {end}"
message = message.format(
bm_count=len(self.benchmark_returns),
algo_count=len(self.algorithm_returns),
start=self.start_date,
end=self.end_date
)
# save for debugging
import pickle
pickle.dump((self.algorithm_returns, self.benchmark_returns), open('/tmp/zp-returns.pkl', 'wb'))
raise Exception(message)
self.num_trading_days = len(self.benchmark_returns)
## begin empyrical metrics
self.mean_algorithm_returns = (
self.algorithm_returns.cumsum() /
np.arange(1, self.num_trading_days + 1, dtype=np.float64)
)
self.benchmark_volatility = annual_volatility(self.benchmark_returns)
self.algorithm_volatility = annual_volatility(self.algorithm_returns)
self.treasury_period_return = choose_treasury(
self.treasury_curves,
self.start_date,
self.end_date,
self.env,
)
self.sharpe = sharpe_ratio(
self.algorithm_returns,
)
# The consumer currently expects a 0.0 value for sharpe in period,
# this differs from cumulative which was np.nan.
# When factoring out the sharpe_ratio, the different return types
# were collapsed into `np.nan`.
# TODO: Either fix consumer to accept `np.nan` or make the
# `sharpe_ratio` return type configurable.
# In the meantime, convert nan values to 0.0
if pd.isnull(self.sharpe):
self.sharpe = 0.0
self.downside_risk = downside_risk(
self.algorithm_returns.values
)
self.sortino = sortino_ratio(
self.algorithm_returns.values,
_downside_risk=self.downside_risk,
)
self.information = information_ratio(
self.algorithm_returns.values,
self.benchmark_returns.values,
)
self.alpha, self.beta = alpha_beta_aligned(
self.algorithm_returns.values,
self.benchmark_returns.values,
)
self.excess_return = self.algorithm_period_returns - \
self.treasury_period_return
self.max_drawdown = max_drawdown(self.algorithm_returns.values)
self.max_leverage = self.calculate_max_leverage()
def to_dict(self):
"""
Creates a dictionary representing the state of the risk report.
Returns a dict object of the form:
"""
period_label = self.end_date.strftime("%Y-%m")
rval = {
'trading_days': self.num_trading_days,
'benchmark_volatility': self.benchmark_volatility,
'algo_volatility': self.algorithm_volatility,
'treasury_period_return': self.treasury_period_return,
'algorithm_period_return': self.algorithm_period_returns,
'benchmark_period_return': self.benchmark_period_returns,
'sharpe': self.sharpe,
'sortino': self.sortino,
'information': self.information,
'beta': self.beta,
'alpha': self.alpha,
'excess_return': self.excess_return,
'max_drawdown': self.max_drawdown,
'max_leverage': self.max_leverage,
'period_label': period_label
}
return {k: None if check_entry(k, v) else v
for k, v in iteritems(rval)}
def __repr__(self):
statements = []
metrics = [
"algorithm_period_returns",
"benchmark_period_returns",
"excess_return",
"num_trading_days",
"benchmark_volatility",
"algorithm_volatility",
"sharpe",
"sortino",
"information",
# "algorithm_covariance",
# "benchmark_variance",
"beta",
"alpha",
"max_drawdown",
"max_leverage",
"algorithm_returns",
"benchmark_returns",
# "condition_number",
# "eigen_values"
]
for metric in metrics:
value = getattr(self, metric)
statements.append("{m}:{v}".format(m=metric, v=value))
return '\n'.join(statements)
def mask_returns_to_period(self, daily_returns, env):
if isinstance(daily_returns, list):
returns = pd.Series([x.returns for x in daily_returns],
index=[x.date for x in daily_returns])
else: # otherwise we're receiving an index already
returns = daily_returns
trade_days = env.trading_days
trade_day_mask = returns.index.normalize().isin(trade_days)
mask = ((returns.index >= self.start_date) &
(returns.index <= self.end_date) & trade_day_mask)
returns = returns[mask]
return returns
def calculate_max_leverage(self):
if self.algorithm_leverages is None:
return 0.0
else:
return max(self.algorithm_leverages)
def __getstate__(self):
state_dict = {k: v for k, v in iteritems(self.__dict__)
if not k.startswith('_')}
STATE_VERSION = 3
state_dict[VERSION_LABEL] = STATE_VERSION
return state_dict
def __setstate__(self, state):
OLDEST_SUPPORTED_STATE = 3
version = state.pop(VERSION_LABEL)
if version < OLDEST_SUPPORTED_STATE:
raise BaseException("RiskMetricsPeriod saved state \
is too old.")
self.__dict__.update(state)
| jimgoo/zipline-fork | zipline/finance/risk/period.py | Python | apache-2.0 | 9,089 |
'''
Helper functions for writing and opening ntuples.
Copyright (c) 2010 Juan Palacios juan.palacios.puyana@gmail.com
Subject to the Lesser GNU Public License - see < http://www.gnu.org/licenses/lgpl.html>
'''
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
__author__ = 'Juan Palacios <juan.palacios@nikhef.nl>'
__version__ = '1.3'
__all__ = ('gzip_save', 'save')
def gzip_save(obj, filename = "file.ntp") :
import gzip
import cPickle
file = gzip.open(filename, 'wb')
cPickle.dump(obj, file, 2)
file.close()
def save(obj, filename = "file.ntp") :
import pickle
file = open(filename, 'w')
pickle.dump(obj, file)
file.close()
| juanchopanza/pyhistuples | pyhistuples/pyntuple/write.py | Python | lgpl-3.0 | 1,283 |
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for testing/trigger_scripts.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into depot_tools.
"""
USE_PYTHON3 = True
def CommonChecks(input_api, output_api):
return input_api.canned_checks.RunUnitTestsInDirectory(
input_api,
output_api,
'.',
files_to_check=['.*test.py'],
run_on_python2=not USE_PYTHON3,
run_on_python3=USE_PYTHON3,
skip_shebang_check=True)
def CheckChangeOnUpload(input_api, output_api):
return CommonChecks(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return CommonChecks(input_api, output_api)
| scheib/chromium | testing/trigger_scripts/PRESUBMIT.py | Python | bsd-3-clause | 883 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('taxbrain', '0082_auto_20150314_2206'),
]
operations = [
migrations.RenameField(
model_name='taxsaveinputs',
old_name='eitc_credit_rate_0',
new_name='_EITC_rt_0',
),
migrations.RenameField(
model_name='taxsaveinputs',
old_name='eitc_credit_rate_1',
new_name='_EITC_rt_1',
),
migrations.RenameField(
model_name='taxsaveinputs',
old_name='eitc_credit_rate_2',
new_name='_EITC_rt_2',
),
migrations.RemoveField(
model_name='taxsaveinputs',
name='eitc_credit_rate_3',
),
]
| talumbau/webapp-public | webapp/apps/taxbrain/migrations/0083_auto_20150314_2207.py | Python | mit | 855 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.