code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2018-10-26 01:41
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('accounts', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='PasswordReset',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('key', models.CharField(max_length=100, unique=True, verbose_name='Chave')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Criado em')),
('confirmed', models.BooleanField(default=False, verbose_name='Confirmado ?')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='Usuário')),
],
options={
'verbose_name': 'Nova Senha',
'verbose_name_plural': 'Novas Senhas',
'ordering': ['-created_at'],
},
),
]
| ricardogtx/estudoDjango | simplemooc/accounts/migrations/0002_passwordreset.py | Python | gpl-3.0 | 1,198 |
# Copyright 2016 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.callbacks import events
from neutron_lib.callbacks import registry
def notify_init_event(agent_type, agent):
"""Notify init event for the specified agent."""
registry.publish(agent_type, events.AFTER_INIT, agent)
def register(callback, agent_type):
"""Subscribe callback to init event for the specified agent.
:param agent_type: an agent type as defined in neutron_lib.constants.
:param callback: a callback that can process the agent init event.
"""
registry.subscribe(callback, agent_type, events.AFTER_INIT)
| noironetworks/neutron | neutron/plugins/ml2/drivers/agent/capabilities.py | Python | apache-2.0 | 1,161 |
# -*- coding: utf-8 -*-
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
import logging
from django.contrib.auth.mixins import PermissionRequiredMixin, LoginRequiredMixin
from django.shortcuts import get_object_or_404
from django.core.urlresolvers import reverse
from django.forms import ModelForm, ModelChoiceField
from django.utils.translation import ugettext_lazy
from wger.utils.language import load_language
from django.views.generic import (
DeleteView,
CreateView,
UpdateView
)
from wger.nutrition.models import (
Ingredient,
IngredientWeightUnit,
WeightUnit
)
from wger.utils.generic_views import (
WgerFormMixin,
WgerDeleteMixin
)
logger = logging.getLogger(__name__)
# ************************
# Weight units to ingredient functions
# ************************
class WeightUnitIngredientCreateView(WgerFormMixin,
LoginRequiredMixin,
PermissionRequiredMixin,
CreateView):
'''
Generic view to add a new weight unit to ingredient entry
'''
model = IngredientWeightUnit
title = ugettext_lazy('Add a new weight unit')
permission_required = 'nutrition.add_ingredientweightunit'
# Send some additional data to the template
def get_context_data(self, **kwargs):
context = super(WeightUnitIngredientCreateView, self).get_context_data(**kwargs)
context['form_action'] = reverse('nutrition:unit_ingredient:add',
kwargs={'ingredient_pk': self.kwargs['ingredient_pk']})
return context
def get_success_url(self):
return reverse('nutrition:ingredient:view', kwargs={'id': self.kwargs['ingredient_pk']})
def form_valid(self, form):
ingredient = get_object_or_404(Ingredient, pk=self.kwargs['ingredient_pk'])
form.instance.ingredient = ingredient
return super(WeightUnitIngredientCreateView, self).form_valid(form)
def get_form_class(self):
'''
The form can only show units in the user's language
'''
class IngredientWeightUnitForm(ModelForm):
unit = ModelChoiceField(queryset=WeightUnit.objects.filter(language=load_language()))
class Meta:
model = IngredientWeightUnit
fields = ['unit', 'gram', 'amount']
return IngredientWeightUnitForm
class WeightUnitIngredientUpdateView(WgerFormMixin,
LoginRequiredMixin,
PermissionRequiredMixin,
UpdateView):
'''
Generic view to update an weight unit to ingredient entry
'''
model = IngredientWeightUnit
title = ugettext_lazy('Edit a weight unit to ingredient connection')
form_action_urlname = 'nutrition:unit_ingredient:edit'
permission_required = 'nutrition.add_ingredientweightunit'
def get_success_url(self):
return reverse('nutrition:ingredient:view', kwargs={'id': self.object.ingredient.id})
def get_form_class(self):
'''
The form can only show units in the user's language
'''
class IngredientWeightUnitForm(ModelForm):
unit = ModelChoiceField(queryset=WeightUnit.objects.filter(language=load_language()))
class Meta:
model = IngredientWeightUnit
fields = ['unit', 'gram', 'amount']
return IngredientWeightUnitForm
class WeightUnitIngredientDeleteView(WgerDeleteMixin,
LoginRequiredMixin,
PermissionRequiredMixin,
DeleteView):
'''
Generic view to delete a weight unit to ingredient entry
'''
model = IngredientWeightUnit
fields = ('unit', 'gram', 'amount')
title = ugettext_lazy('Delete?')
form_action_urlname = 'nutrition:unit_ingredient:delete'
permission_required = 'nutrition.add_ingredientweightunit'
def get_success_url(self):
return reverse('nutrition:ingredient:view', kwargs={'id': self.object.ingredient.id})
| kjagoo/wger_stark | wger/nutrition/views/unit_ingredient.py | Python | agpl-3.0 | 4,765 |
#!/usr/bin/env python
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from distutils.command.install import INSTALL_SCHEMES
from os.path import dirname, join, abspath
from setuptools import setup
from setuptools.command.install import install
for scheme in INSTALL_SCHEMES.values():
scheme['data'] = scheme['purelib']
setup_args = {
'cmdclass': {'install': install},
'name': 'selenium',
'version': "3.9.0",
'license': 'Apache 2.0',
'description': 'Python bindings for Selenium',
'long_description': open(join(abspath(dirname(__file__)), "README.rst")).read(),
'url': 'https://github.com/SeleniumHQ/selenium/',
'classifiers': ['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'],
'package_dir': {
'selenium': 'selenium',
'selenium.common': 'selenium/common',
'selenium.webdriver': 'selenium/webdriver',
},
'packages': ['selenium',
'selenium.common',
'selenium.webdriver',
'selenium.webdriver.android',
'selenium.webdriver.chrome',
'selenium.webdriver.common',
'selenium.webdriver.common.html5',
'selenium.webdriver.support',
'selenium.webdriver.firefox',
'selenium.webdriver.ie',
'selenium.webdriver.edge',
'selenium.webdriver.opera',
'selenium.webdriver.phantomjs',
'selenium.webdriver.remote',
'selenium.webdriver.support', ],
'package_data': {
'selenium.webdriver.firefox': ['*.xpi', 'webdriver_prefs.json'],
'selenium.webdriver.remote': ['getAttribute.js', 'isDisplayed.js'],
},
'data_files': [('selenium/webdriver/firefox/x86', ['selenium/webdriver/firefox/x86/x_ignore_nofocus.so']),
('selenium/webdriver/firefox/amd64', ['selenium/webdriver/firefox/amd64/x_ignore_nofocus.so']),
('selenium/webdriver/remote', ['selenium/webdriver/remote/getAttribute.js']),
('selenium/webdriver/remote', ['selenium/webdriver/remote/isDisplayed.js'])],
'include_package_data': True,
'zip_safe': False
}
setup(**setup_args)
| GorK-ChO/selenium | py/setup.py | Python | apache-2.0 | 3,684 |
"""Tests for nexus reading manipulation"""
import os
import re
import unittest
from nexus import NexusReader
from nexus.reader import GenericHandler, DataHandler, TreeHandler
EXAMPLE_DIR = os.path.join(os.path.dirname(__file__), '../examples')
class Test_Manipulation_Data(unittest.TestCase):
"""Test the manipulation of data in the NexusReader"""
def setUp(self):
self.nex = NexusReader(os.path.join(EXAMPLE_DIR, 'example.nex'))
def test_add_taxa(self):
assert self.nex.data.ntaxa == 4
self.nex.data.add_taxon('Elvis', ['1', '2'])
assert self.nex.data.ntaxa == 5
assert self.nex.data.matrix['Elvis'] == ['1', '2']
assert 'Elvis' in self.nex.data.taxa
assert 'Elvis' in self.nex.data.matrix
expected_patterns = [
'^begin data;$',
'^\s+dimensions ntax=5 nchar=2;$',
'^\s+format datatype=standard symbols="01" gap=-;$',
'^matrix$',
'^Simon\s+01$',
'^Louise\s+11$',
'^Betty\s+10$',
'^Harry\s+00$',
'^Elvis\s+12$',
'^\s+;$',
'^end;$',
]
written = self.nex.write()
for expected in expected_patterns:
assert re.search(expected, written, re.MULTILINE), 'Expected "%s"' % expected
def test_delete_taxa(self):
assert self.nex.data.ntaxa == 4
self.nex.data.del_taxon('Simon')
assert self.nex.data.ntaxa == 3
assert 'Simon' not in self.nex.data.taxa
assert 'Simon' not in self.nex.data.matrix
expected_patterns = [
'^begin data;$',
'^\s+dimensions ntax=3 nchar=2;$',
'^\s+format datatype=standard symbols="01" gap=-;$',
'^matrix$',
'^Louise\s+11$',
'^Betty\s+10$',
'^Harry\s+00$',
'^\s+;$',
'^end;$',
]
written = self.nex.write()
for expected in expected_patterns:
assert re.search(expected, written, re.MULTILINE), 'Expected "%s"' % expected
# should NOT be here
assert re.search('^Simon\s+01$', written, re.MULTILINE) == None, \
'Expected Taxon "Simon" to be Deleted'
def test_add_character(self):
pass
def test_delete_character(self):
pass
def test_edit_charlabels(self):
pass
# TreeHandler
# self.translators = {}
# self.attributes = []
# self.taxa = []
# self.trees = []
# ntrees | zhangjiajie/PTP | nexus/test/test_reader_manipulation.py | Python | gpl-3.0 | 2,555 |
#/*
# This file is part of ddprint - a 3D printer firmware.
#
# Copyright 2015 erwin.rieger@ibrieger.de
#
# ddprint is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ddprint is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ddprint. If not, see <http://www.gnu.org/licenses/>.
#*/
import math, os, types
import ddprintutil as util
from ddprintconstants import dimNames, X_AXIS, Y_AXIS, Z_AXIS, A_AXIS, B_AXIS
class ProfileException(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "Profile error: " + self.msg
####################################################################################################
#
# Json profile base class
#
####################################################################################################
class ProfileBase(object):
def __init__(self, name, specificName=""):
self.name = name
self.specificName = specificName
f = self.openJson(name)
self.values = util.jsonLoad(f)
# Let specific profile overwrite values from a generic profile
if specificName:
f = self.openJson(specificName)
specificValues = util.jsonLoad(f)
for k in list(specificValues.keys()):
self.values[k] = specificValues[k]
def getBaseName(self):
return os.path.basename(self.name)
def hasValue(self, valueName):
return valueName in self.values
def getValue(self, valueName):
try:
return self.values[valueName]
except KeyError:
raise ProfileException("Profile '%s' does has no key: '%s'!" % (self.name, valueName))
def openJson(self, name):
#
# Directory search order:
# * working directory
# * directories listed in $DDPRINTPROFILES path
#
searchpath = []
try:
dirlist = ["."] + os.environ["DDPRINTPROFILES"].split(":")
except KeyError:
dirlist = ["."]
for d in dirlist:
for p in ["mat-profiles", "nozzle-profiles", "machine-profiles"]:
searchpath.append(os.path.join(d, p))
for searchdir in searchpath:
for extension in ["", ".json"]:
try:
f = open(os.path.join(searchdir, name+extension))
return f
except IOError:
pass
raise Exception("Profile %s not found." % name)
def override(self, key, value):
self.values[key] = value
def logValues(self, heading, logger):
logger.logPrintLog("\n%s: %s\n" % (heading, self.name))
if self.specificName:
logger.logPrintLog(" Specific profile: %s\n" % self.specificName)
for key in list(self.values.keys()):
logger.logPrintLog(" %s: %s\n" % (key, str(self.values[key])))
####################################################################################################
#
# Printer profile
#
####################################################################################################
class PrinterProfile(ProfileBase):
def __init__(self, name):
super(PrinterProfile, self).__init__(name)
def getStepsPerMMI(self, axisNr):
return int(self.getValue("axes")[dimNames[axisNr]]["steps_per_mm"])
def getHomeDir(self, axisNr):
return int(self.getValue("axes")[dimNames[axisNr]]["home_dir"])
def getHomeFeedrate(self, axisNr):
return int(self.getValue("axes")[dimNames[axisNr]]["home_feedrate"])
def getStepsPerMMVectorI(self):
return [self.getStepsPerMMI(d) for d in range(5)]
def getMaxFeedrateI(self, axisNr):
return self.getValue("axes")[dimNames[axisNr]]["max_feedrate"]
def getMaxFeedrateVectorI(self):
return [self.getMaxFeedrateI(d) for d in range(5)]
def getRetractFeedrate(self):
return self.getValue("RetractFeedrate")
def getRetractLength(self):
return float(self.getValue("RetractLength"))
def getMaxAxisAccelerationI(self):
return self.getValue("MaxAxisAcceleration")
def getHwVersionI(self):
return self.getValue("hwVersion")
def getFilSensorCalibration(self):
return self.getValue("filSensorCalibration")
def getBedlevelOffset(self):
if self.hasValue("add_homeing_z"):
ofs = float(self.getValue("add_homeing_z"))
if ofs < 0:
print("Warning: negative add_homeing_z is deprecated (%f)" % ofs)
return abs(ofs)
return ofs
return 0.0
def getFeederWheelDiamI(self):
return float(self.getValue("feederWheelDiam"))
def getFeederWheelCircumI(self):
return self.getFeederWheelDiamI() * math.pi
def getFilSensorCountsPerMM(self):
return self.getValue("filSensorCountsPerMM")
def getFilSensorIntervalI(self):
return float(self.getValue("filSensorInterval"))
def getSettings(self, pidSet):
pidSetHeating = pidSet[:2]
pidSetCooling = pidSet[2:]
print("getSettings(): pidset to use: %s, heating: %s, cooling: %s" % (pidSet, pidSetHeating, pidSetCooling))
return {
"filSensorCalibration": self.getFilSensorCalibration(),
"Kp": self.getPidValue(pidSetHeating, "Kp"),
"Ki": self.getPidValue(pidSetHeating, "Ki"),
"Kd": self.getPidValue(pidSetHeating, "Kd"),
"KpC": self.getPidValue(pidSetCooling, "Kp"),
"KiC": self.getPidValue(pidSetCooling, "Ki"),
"KdC": self.getPidValue(pidSetCooling, "Kd"),
"Tu": self.getTuI(),
"stepsPerMMX": self.getStepsPerMMI(X_AXIS),
"stepsPerMMY": self.getStepsPerMMI(Y_AXIS),
"stepsPerMMZ": self.getStepsPerMMI(Z_AXIS),
"stepsPerMMA": self.getStepsPerMMI(A_AXIS),
# "stepsPerMMB": self.getStepsPerMMI(B_AXIS),
"buildVolX": int(self.getPlatformLengthI(X_AXIS) * self.getStepsPerMMI(X_AXIS)),
"buildVolY": int(self.getPlatformLengthI(Y_AXIS) * self.getStepsPerMMI(Y_AXIS)),
"buildVolZ": int(self.getPlatformLengthI(Z_AXIS) * self.getStepsPerMMI(Z_AXIS)),
"xHomeDir": int(self.getHomeDir(X_AXIS)),
"yHomeDir": int(self.getHomeDir(Y_AXIS)),
"zHomeDir": int(self.getHomeDir(Z_AXIS)),
}
def getTuI(self):
return float(self.getValue("Tu"))
def getTgI(self):
return float(self.getValue("Tg"))
def getPidValue(self, pidSet, key):
return float(self.getValue(pidSet)[key])
def getNLongIntervalI(self, feedrate, howlong):
# dt = self.getFilSensorIntervalI()
# Time for one revolution
tRound = self.getFeederWheelCircumI() / feedrate
# nAvg = int(round(tRound / dt))
# nAvg = max(nAvg, 2)
return howlong / tRound
def getWeakPowerBedTemp(self):
if self.hasValue("weakPowerBedTemp"):
return int(self.getValue("weakPowerBedTemp"))
return 0
def getBedSurface(self):
if self.hasValue("bedSurface"):
return self.getValue("bedSurface")
return None
def getPlatformLengthI(self, axisNr):
return self.getValue("axes")[dimNames[axisNr]]["platform_length"]
def getBedLevelMode(self):
return self.getValue("bedLevelMode")
def getJerk(self, dim):
return self.getValue("axes")[dim]["jerk"]
def getMaxStepperFreq(self):
return float(self.getValue("maxStepperFreq"))
def getBowdenLength(self):
return self.getValue("bowdenLength")
def getXo(self):
return int(self.getValue("Xo"))
# True if Z-endstop is at Z zero position
def homingToZero(self):
return self.getHomeDir(Z_AXIS) <= 0
def getBautRateLimit(self):
if self.hasValue("baudRateLimit"):
return int(self.getValue("baudRateLimit"))
else:
return 1000000
####################################################################################################
#
# Material profile
#
####################################################################################################
class MatProfile(ProfileBase):
def __init__(self, name, smatName, printerName, hwVersion, nozzleDiam):
if smatName:
smatName = os.path.join(printerName, smatName)
super(MatProfile, self).__init__(name, smatName)
# Check hardware version
assert(self.getValue("version") == hwVersion)
self.matArea = (math.pi * pow(float(self.values["material_diameter"]), 2)) / 4.0
self.nozzleDiam = nozzleDiam
def override(self, key, value):
assert(key != "material_diameter")
ProfileBase.override(self, key, value)
def getValuesI(self):
return self.values
def getHotendBaseTemp(self):
return int(self.getValue("hotendBaseTemp"))
def getHotendGoodTemp(self):
return int(self.getValue("hotendGoodTemp"))
def getHotendMaxTemp(self):
return int(self.getValuesI()["hotendMaxTemp"])
def getBedTemp(self):
return int(self.getValue("bedTemp"))
def getBedTempReduced(self):
return int(self.getValue("bedTempReduced"))
def getKeepBedtemp(self):
if self.hasValue("keepBedtemp"):
return int(self.getValue("keepBedtemp"))
return 0
def getMatArea(self):
return self.matArea
def getKAdvI(self):
return float(self.getValue("kAdvance"))
def getFlowrateData(self):
return self.getValue("properties_%d" % (self.nozzleDiam*100))
def getKpwm(self):
flowrateData = self.getFlowrateData()
return flowrateData["Kpwm"]
def getKtemp(self):
flowrateData = self.getFlowrateData()
return flowrateData["Ktemp"]
def getP0pwm(self):
flowrateData = self.getFlowrateData()
return flowrateData["P0pwm"]
def getP0pwmPrint(self):
flowrateData = self.getFlowrateData()
return flowrateData["P0pwmPrint"]
def getFR0pwm(self):
flowrateData = self.getFlowrateData()
return flowrateData["FR0pwm"]
def getFR0pwmPrint(self):
flowrateData = self.getFlowrateData()
return flowrateData["FR0pwmPrint"]
def getP0temp(self):
flowrateData = self.getFlowrateData()
return flowrateData["P0temp"]
def getP0tempPrint(self):
flowrateData = self.getFlowrateData()
return flowrateData["P0tempPrint"]
def getSlippageI(self):
flowrateData = self.getFlowrateData()
return flowrateData["slippage"]
def getFrSLE(self):
return (
util.SLE(x1=self.getP0temp(), y1=self.getFR0pwm(), m=self.getKtemp()),
util.SLE(x1=self.getP0pwm(), y1=self.getFR0pwm(), m=self.getKpwm())
)
def getFrSLEPrint(self):
return (
util.SLE(x1=self.getP0tempPrint(), y1=self.getFR0pwmPrint(), m=self.getKtemp()),
util.SLE(x1=self.getP0pwmPrint(), y1=self.getFR0pwmPrint(), m=self.getKpwm())
)
####################################################################################################
#
# Nozzle profile
#
####################################################################################################
class NozzleProfile(ProfileBase):
def __init__(self, name):
super(NozzleProfile, self).__init__(name)
def getSizeI(self):
return float(self.getValue("size"))
def getMaxExtrusionRateI(self):
return float(self.getValue("maxExtrusionRate"))
def getAreaI(self):
return (math.pi/4) * pow(self.getSizeI(), 2)
if __name__ == "__main__":
printerProfile = PrinterProfile("UM2.json")
| ErwinRieger/ddprint | host/ddprofile.py | Python | gpl-2.0 | 12,336 |
# Copyright Iris contributors
#
# This file is part of Iris and is released under the LGPL license.
# See COPYING and COPYING.LESSER in the root of the repository for full
# licensing details.
"""
Tests elements of the cartography module.
"""
# import iris tests first so that some things can be initialised before importing anything else
import iris.tests as tests # isort:skip
import numpy as np
import iris
import iris.analysis.cartography
class Test_get_xy_grids(tests.IrisTest):
# Testing for iris.analysis.carography.get_xy_grids().
def test_1d(self):
cube = iris.cube.Cube(np.arange(12).reshape(3, 4))
cube.add_dim_coord(iris.coords.DimCoord(np.arange(3), "latitude"), 0)
cube.add_dim_coord(iris.coords.DimCoord(np.arange(4), "longitude"), 1)
x, y = iris.analysis.cartography.get_xy_grids(cube)
self.assertRepr((x, y), ("cartography", "get_xy_grids", "1d.txt"))
def test_2d(self):
cube = iris.cube.Cube(np.arange(12).reshape(3, 4))
cube.add_aux_coord(
iris.coords.AuxCoord(np.arange(12).reshape(3, 4), "latitude"),
(0, 1),
)
cube.add_aux_coord(
iris.coords.AuxCoord(
np.arange(100, 112).reshape(3, 4), "longitude"
),
(0, 1),
)
x, y = iris.analysis.cartography.get_xy_grids(cube)
self.assertRepr((x, y), ("cartography", "get_xy_grids", "2d.txt"))
def test_3d(self):
cube = iris.cube.Cube(np.arange(60).reshape(5, 3, 4))
cube.add_aux_coord(
iris.coords.AuxCoord(np.arange(60).reshape(5, 3, 4), "latitude"),
(0, 1, 2),
)
cube.add_aux_coord(
iris.coords.AuxCoord(
np.arange(100, 160).reshape(5, 3, 4), "longitude"
),
(0, 1, 2),
)
self.assertRaises(
ValueError, iris.analysis.cartography.get_xy_grids, cube
)
if __name__ == "__main__":
tests.main()
| SciTools/iris | lib/iris/tests/test_cartography.py | Python | lgpl-3.0 | 1,999 |
"""HTTP views to interact with the device registry."""
import voluptuous as vol
from homeassistant.components import websocket_api
from homeassistant.components.websocket_api.decorators import (
async_response,
require_admin,
)
from homeassistant.core import callback
from homeassistant.helpers.device_registry import async_get_registry
WS_TYPE_LIST = "config/device_registry/list"
SCHEMA_WS_LIST = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
{vol.Required("type"): WS_TYPE_LIST}
)
WS_TYPE_UPDATE = "config/device_registry/update"
SCHEMA_WS_UPDATE = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
{
vol.Required("type"): WS_TYPE_UPDATE,
vol.Required("device_id"): str,
vol.Optional("area_id"): vol.Any(str, None),
vol.Optional("name_by_user"): vol.Any(str, None),
}
)
async def async_setup(hass):
"""Enable the Device Registry views."""
hass.components.websocket_api.async_register_command(
WS_TYPE_LIST, websocket_list_devices, SCHEMA_WS_LIST
)
hass.components.websocket_api.async_register_command(
WS_TYPE_UPDATE, websocket_update_device, SCHEMA_WS_UPDATE
)
return True
@async_response
async def websocket_list_devices(hass, connection, msg):
"""Handle list devices command."""
registry = await async_get_registry(hass)
connection.send_message(
websocket_api.result_message(
msg["id"], [_entry_dict(entry) for entry in registry.devices.values()]
)
)
@require_admin
@async_response
async def websocket_update_device(hass, connection, msg):
"""Handle update area websocket command."""
registry = await async_get_registry(hass)
msg.pop("type")
msg_id = msg.pop("id")
entry = registry.async_update_device(**msg)
connection.send_message(websocket_api.result_message(msg_id, _entry_dict(entry)))
@callback
def _entry_dict(entry):
"""Convert entry to API format."""
return {
"config_entries": list(entry.config_entries),
"connections": list(entry.connections),
"manufacturer": entry.manufacturer,
"model": entry.model,
"name": entry.name,
"sw_version": entry.sw_version,
"entry_type": entry.entry_type,
"id": entry.id,
"identifiers": list(entry.identifiers),
"via_device_id": entry.via_device_id,
"area_id": entry.area_id,
"name_by_user": entry.name_by_user,
}
| sdague/home-assistant | homeassistant/components/config/device_registry.py | Python | apache-2.0 | 2,447 |
# Copyright (c) 2016, the Cap authors.
#
# This file is subject to the Modified BSD License and may not be distributed
# without copyright and license information. Please refer to the file LICENSE
# for the text and further information on this license.
from pycap import PropertyTree, EnergyStorageDevice, TimeEvolution
from mpi4py import MPI
import unittest
comm = MPI.COMM_WORLD
filename = 'series_rc.info'
ptree = PropertyTree()
ptree.parse_info(filename)
device = EnergyStorageDevice(ptree, comm)
class capTimeEvolutionTestCase(unittest.TestCase):
def test_evolve_constant_voltage(self):
ptree = PropertyTree()
ptree.put_string('mode', 'constant_voltage')
ptree.put_double('voltage', 2.1)
evolve_one_time_step = TimeEvolution.factory(ptree)
evolve_one_time_step(device, 0.1)
self.assertEqual(device.get_voltage(), 2.1)
def test_evolve_constant_current(self):
ptree = PropertyTree()
ptree.put_string('mode', 'constant_current')
ptree.put_double('current', 100e-3)
evolve_one_time_step = TimeEvolution.factory(ptree)
evolve_one_time_step(device, 0.1)
self.assertEqual(device.get_current(), 100e-3)
def test_evolve_constant_power(self):
ptree = PropertyTree()
ptree.put_string('mode', 'constant_power')
ptree.put_double('power', 0.3)
evolve_one_time_step = TimeEvolution.factory(ptree)
evolve_one_time_step(device, 0.1)
self.assertAlmostEqual(device.get_current() *
device.get_voltage(), 0.3)
def test_evolve_constant_load(self):
ptree = PropertyTree()
ptree.put_string('mode', 'constant_load')
ptree.put_double('load', 120)
evolve_one_time_step = TimeEvolution.factory(ptree)
evolve_one_time_step(device, 0.1)
self.assertAlmostEqual(device.get_voltage() /
device.get_current(), -120)
def test_hold(self):
ptree = PropertyTree()
ptree.put_string('mode', 'hold')
evolve_one_time_step = TimeEvolution.factory(ptree)
device.evolve_one_time_step_constant_voltage(0.1, 1.4)
evolve_one_time_step(device, 0.1)
self.assertEqual(device.get_voltage(), 1.4)
def test_rest(self):
ptree = PropertyTree()
ptree.put_string('mode', 'rest')
evolve_one_time_step = TimeEvolution.factory(ptree)
evolve_one_time_step(device, 0.1)
self.assertEqual(device.get_current(), 0.0)
def test_invalid_time_evolution(self):
ptree = PropertyTree()
ptree.put_string('mode', 'unexpected')
self.assertRaises(RuntimeError, TimeEvolution.factory, ptree)
def test_constructor(self):
self.assertRaises(TypeError, TimeEvolution)
self.assertRaises(RuntimeError, TimeEvolution, PropertyTree())
if __name__ == '__main__':
unittest.main()
| dalg24/Cap | python/test/test_time_evolution.py | Python | bsd-3-clause | 2,928 |
from django.conf.urls.defaults import *
urlpatterns = patterns('autoadmin.views',
(r'^$','index'),
(r'server_fun_categ/$','server_fun_categ'),
(r'server_app_categ/$','server_app_categ'),
(r'server_list/$','server_list'),
(r'module_list/$','module_list'),
(r'module_info/$','module_info'),
(r'module_run/$','module_run'),
(r'module_add/$','module_add'),
(r'module_add_post/$','module_add_post'),
)
| zhengjue/mytornado | omserver/OMserverweb/autoadmin/urls.py | Python | gpl-3.0 | 434 |
__all__ = ['create_subprocess_exec', 'create_subprocess_shell']
import collections
import subprocess
from . import events
from . import futures
from . import protocols
from . import streams
from . import tasks
from .coroutines import coroutine
from .log import logger
PIPE = subprocess.PIPE
STDOUT = subprocess.STDOUT
DEVNULL = subprocess.DEVNULL
class SubprocessStreamProtocol(streams.FlowControlMixin,
protocols.SubprocessProtocol):
"""Like StreamReaderProtocol, but for a subprocess."""
def __init__(self, limit, loop):
super().__init__(loop=loop)
self._limit = limit
self.stdin = self.stdout = self.stderr = None
self.waiter = futures.Future(loop=loop)
self._waiters = collections.deque()
self._transport = None
def __repr__(self):
info = [self.__class__.__name__]
if self.stdin is not None:
info.append('stdin=%r' % self.stdin)
if self.stdout is not None:
info.append('stdout=%r' % self.stdout)
if self.stderr is not None:
info.append('stderr=%r' % self.stderr)
return '<%s>' % ' '.join(info)
def connection_made(self, transport):
self._transport = transport
if transport.get_pipe_transport(1):
self.stdout = streams.StreamReader(limit=self._limit,
loop=self._loop)
if transport.get_pipe_transport(2):
self.stderr = streams.StreamReader(limit=self._limit,
loop=self._loop)
stdin = transport.get_pipe_transport(0)
if stdin is not None:
self.stdin = streams.StreamWriter(stdin,
protocol=self,
reader=None,
loop=self._loop)
self.waiter.set_result(None)
def pipe_data_received(self, fd, data):
if fd == 1:
reader = self.stdout
elif fd == 2:
reader = self.stderr
else:
reader = None
if reader is not None:
reader.feed_data(data)
def pipe_connection_lost(self, fd, exc):
if fd == 0:
pipe = self.stdin
if pipe is not None:
pipe.close()
self.connection_lost(exc)
return
if fd == 1:
reader = self.stdout
elif fd == 2:
reader = self.stderr
else:
reader = None
if reader != None:
if exc is None:
reader.feed_eof()
else:
reader.set_exception(exc)
def process_exited(self):
# wake up futures waiting for wait()
returncode = self._transport.get_returncode()
while self._waiters:
waiter = self._waiters.popleft()
waiter.set_result(returncode)
class Process:
def __init__(self, transport, protocol, loop):
self._transport = transport
self._protocol = protocol
self._loop = loop
self.stdin = protocol.stdin
self.stdout = protocol.stdout
self.stderr = protocol.stderr
self.pid = transport.get_pid()
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, self.pid)
@property
def returncode(self):
return self._transport.get_returncode()
@coroutine
def wait(self):
"""Wait until the process exit and return the process return code."""
returncode = self._transport.get_returncode()
if returncode is not None:
return returncode
waiter = futures.Future(loop=self._loop)
self._protocol._waiters.append(waiter)
yield from waiter
return waiter.result()
def _check_alive(self):
if self._transport.get_returncode() is not None:
raise ProcessLookupError()
def send_signal(self, signal):
self._check_alive()
self._transport.send_signal(signal)
def terminate(self):
self._check_alive()
self._transport.terminate()
def kill(self):
self._check_alive()
self._transport.kill()
@coroutine
def _feed_stdin(self, input):
debug = self._loop.get_debug()
self.stdin.write(input)
if debug:
logger.debug('%r communicate: feed stdin (%s bytes)',
self, len(input))
try:
yield from self.stdin.drain()
except (BrokenPipeError, ConnectionResetError) as exc:
# communicate() ignores BrokenPipeError and ConnectionResetError
if debug:
logger.debug('%r communicate: stdin got %r', self, exc)
if debug:
logger.debug('%r communicate: close stdin', self)
self.stdin.close()
@coroutine
def _noop(self):
return None
@coroutine
def _read_stream(self, fd):
transport = self._transport.get_pipe_transport(fd)
if fd == 2:
stream = self.stderr
else:
assert fd == 1
stream = self.stdout
if self._loop.get_debug():
name = 'stdout' if fd == 1 else 'stderr'
logger.debug('%r communicate: read %s', self, name)
output = yield from stream.read()
if self._loop.get_debug():
name = 'stdout' if fd == 1 else 'stderr'
logger.debug('%r communicate: close %s', self, name)
transport.close()
return output
@coroutine
def communicate(self, input=None):
if input:
stdin = self._feed_stdin(input)
else:
stdin = self._noop()
if self.stdout is not None:
stdout = self._read_stream(1)
else:
stdout = self._noop()
if self.stderr is not None:
stderr = self._read_stream(2)
else:
stderr = self._noop()
stdin, stdout, stderr = yield from tasks.gather(stdin, stdout, stderr,
loop=self._loop)
yield from self.wait()
return (stdout, stderr)
@coroutine
def create_subprocess_shell(cmd, stdin=None, stdout=None, stderr=None,
loop=None, limit=streams._DEFAULT_LIMIT, **kwds):
if loop is None:
loop = events.get_event_loop()
protocol_factory = lambda: SubprocessStreamProtocol(limit=limit,
loop=loop)
transport, protocol = yield from loop.subprocess_shell(
protocol_factory,
cmd, stdin=stdin, stdout=stdout,
stderr=stderr, **kwds)
yield from protocol.waiter
return Process(transport, protocol, loop)
@coroutine
def create_subprocess_exec(program, *args, stdin=None, stdout=None,
stderr=None, loop=None,
limit=streams._DEFAULT_LIMIT, **kwds):
if loop is None:
loop = events.get_event_loop()
protocol_factory = lambda: SubprocessStreamProtocol(limit=limit,
loop=loop)
transport, protocol = yield from loop.subprocess_exec(
protocol_factory,
program, *args,
stdin=stdin, stdout=stdout,
stderr=stderr, **kwds)
yield from protocol.waiter
return Process(transport, protocol, loop)
| ruibarreira/linuxtrail | usr/lib/python3.4/asyncio/subprocess.py | Python | gpl-3.0 | 7,702 |
from nose.tools import *
import stacktrain.storage_tasks as storage_tasks
import os
path = os.getcwd()
storage = storage_tasks.Storage(path, 'test.qcow2')
def test_create_disk():
assert storage.create_disk()
def test_list_disk():
assert (len(storage.list_disk()) > 0)
def test_destroy_disk():
assert storage.destroy_disk()
| sayalilunkad/libvirtPOC | stacktrain/tests/test_storage_tasks.py | Python | apache-2.0 | 343 |
from django.contrib.auth.models import BaseUserManager
class LifeUserManager(BaseUserManager):
def create_user(self, email, password=None):
if not email:
raise ValueError('Users must have an email address')
user = self.model(
email=self.normalize_email(email),
)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, password):
user = self.create_user(
email,
password=password,
)
user.is_admin = True
user.save(using=self._db)
return user
| BoraDowon/Life3.0 | life3/dashboard/managers.py | Python | mit | 635 |
import jieba
import sys
if __name__ == '__main__':
jieba.set_dictionary('jieba/extra_dict/dict.txt.big')
for l in sys.stdin:
words = jieba.cut(l.strip())
sys.stdout.write((u' '.join(words) + u'\n').encode('utf8'))
| shaform/experiments | word2vec_tw/cut.py | Python | mit | 239 |
import logging
from zeroless import (Server, log)
# Setup console logging
consoleHandler = logging.StreamHandler()
log.setLevel(logging.DEBUG)
log.addHandler(consoleHandler)
# Binds the reply server to port 12345
# And assigns a callable and an iterable
# To both transmit and wait for incoming messages
reply, listen_for_request = Server(port=12345).reply()
for msg in listen_for_request:
print(msg)
reply(msg)
| x8lucas8x/python-zeroless | examples/reqRepServer.py | Python | lgpl-2.1 | 424 |
# Global Forest Watch API
# Copyright (C) 2013 World Resource Institute
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import json
import webapp2
from google.appengine.ext import ndb
from gfw.middlewares.cors import CORSRequestHandler
from gfw.common import gfw_url
from appengine_config import runtime_config
from gfw.v2.migrations.migration import Migration
from gfw.models.subscription import Subscription
class MigrationsApi(CORSRequestHandler):
def dispatch(self):
options_request = (self.request.method == "OPTIONS")
self.user = self.request.user if hasattr(self.request, 'user') else None
if not options_request and self.user is None:
params = self.app.router.match(self.request)[-1]
self.redirect(gfw_url('my_gfw/subscriptions', {
'migration_id': params['migration_id']}))
else:
webapp2.RequestHandler.dispatch(self)
def migrate(self, migration_id):
migration = ndb.Key(urlsafe=migration_id).get()
if migration and migration.key.kind() == Migration.kind:
migration.update_subscriptions(self.user)
self.redirect(gfw_url('my_gfw/subscriptions', {
'migration_successful': 'true'}))
else:
self.write_error(404, 'Not found')
| wri/gfw-api | gfw/v2/migrations/handlers.py | Python | gpl-2.0 | 1,961 |
#!/usr/bin/env python3
'''ioriodb CLI client to interact with the api from the command line'''
from __future__ import print_function
import time
import json
import argparse
import iorio
def get_arg_parser():
'''build the cli arg parser'''
parser = argparse.ArgumentParser(description='Iorio DB CLI')
parser.add_argument('--verbose', '-v', action='count')
parser.add_argument('-u', '--username', default='admin',
help='username used for authentication')
parser.add_argument('-p', '--password', default='secret',
help='password used for authentication')
parser.add_argument('-t', '--token', default=None,
help='token from an already authenticated user')
parser.add_argument('-H', '--host', default='localhost',
help='host where ioriodb is running')
parser.add_argument('-P', '--port', default=8080, type=int,
help='port where ioriodb is running')
parser.add_argument('-c', '--count', default=1, type=int,
help='how many times to do the action')
parser.add_argument('--human', action='store_true', default=False)
subparsers = parser.add_subparsers()
p_post = subparsers.add_parser('post', help='add an event to a stream')
p_patch = subparsers.add_parser('patch',
help='patch last event from a stream')
p_list_buckets = subparsers.add_parser('list-buckets', help='list buckets')
p_list_streams = subparsers.add_parser('list-streams', help='list streams')
p_get = subparsers.add_parser('get', help='get content from a stream')
p_listen = subparsers.add_parser('listen',
help='listen to new content from streams')
p_stats = subparsers.add_parser('stats', help='get server stats')
p_stats.set_defaults(action='stats')
#p_admin = subparsers.add_parser('admin', help='admin tasks')
p_post.set_defaults(action='post')
p_post.add_argument('bucket', help='bucket name')
p_post.add_argument('stream', help='stream name')
p_post.add_argument('-c', '--content-type', default='application/json',
help='content-type for the request')
p_post.add_argument('data', help='literal JSON data or if starts with @ ' +
'path to a file with JSON data')
p_patch.set_defaults(action='patch')
p_patch.add_argument('bucket', help='bucket name')
p_patch.add_argument('stream', help='stream name')
p_patch.add_argument('-c', '--content-type',
default='application/json-patch+json',
help='content-type for the request')
p_patch.add_argument('data', help='literal JSON data or if starts with @ ' +
'path to a file with JSON data')
p_get.set_defaults(action='get')
p_get.add_argument('bucket', help='bucket name')
p_get.add_argument('stream', help='stream name')
p_get.add_argument('-l', '--limit', default=10, type=int,
help='amount of items to retrieve')
p_get.add_argument('-f', '--from', default=None, type=int, dest='fromsn',
help='sequence number to start from')
p_list_buckets.set_defaults(action='list-buckets')
p_list_streams.set_defaults(action='list-streams')
p_list_streams.add_argument('bucket', help='bucket name')
p_listen.set_defaults(action='listen')
p_listen.add_argument('subscriptions', nargs='+',
help="subscription descriptiors (bucket:stream or bucket:stream:from)")
return parser
def parse_args():
'''parse arguments and return them'''
parser = get_arg_parser()
args = parser.parse_args()
return args
def parse_data_from_raw(data_raw):
'''parse data from literal, if it starts wit @ parse content from file'''
if data_raw.startswith('@'):
return json.load(open(data_raw[1:]))
else:
return json.loads(data_raw)
def do_when_authenticated(args, fun, conn=None):
'''if auth works run fun'''
if conn is None:
conn = iorio.Connection(args.host, args.port)
auth_t1 = time.time()
auth_ok, auth_resp = conn.authenticate(args.username, args.password)
auth_t2 = time.time()
if args.verbose and args.verbose > 1:
print("Auth request time", (auth_t2 - auth_t1) * 1000, "ms")
if auth_ok:
req_t1 = time.time()
response = fun(conn)
req_t2 = time.time()
if args.verbose and args.verbose > 1:
print("Request time", (req_t2 - req_t1) * 1000, "ms")
print(response)
else:
print("Auth Failed")
print(auth_resp)
def post_or_patch(args, name):
'''avoid duplication'''
bucket = args.bucket
stream = args.stream
content_type = args.content_type
data_raw = args.data
data = parse_data_from_raw(data_raw)
def fun(conn):
'''fun that does the work'''
function = getattr(conn, name)
for _ in range(args.count):
result = function(bucket, stream, data, content_type)
return result
do_when_authenticated(args, fun)
def handle_post_event(args):
'''post a new event'''
post_or_patch(args, 'send')
def handle_patch_event(args):
'''patch a new event'''
post_or_patch(args, 'send_patch')
def handle_get_events(args):
'''get events'''
bucket = args.bucket
stream = args.stream
limit = args.limit
fromsn = args.fromsn
def fun(conn):
'''fun that does the work'''
return conn.query(bucket, stream, fromsn, limit)
do_when_authenticated(args, fun)
def handle_list_streams(args):
'''get events'''
bucket = args.bucket
def fun(conn):
'''fun that does the work'''
return conn.list_streams(bucket)
do_when_authenticated(args, fun)
def handle_list_buckets(args):
'''get events'''
def fun(conn):
'''fun that does the work'''
return conn.list_buckets()
do_when_authenticated(args, fun)
def diff_keys(dict1, dict2, keys):
'''calculate differebce between key on d2 and d1'''
result = {}
for key in keys:
val1 = dict1.get(key)
val2 = dict2.get(key)
if isinstance(val1, int) and isinstance(val2, int):
result[key] = val2 - val1
return result
def handle_stats(args):
'''get events'''
def fun(conn):
'''fun that does the work'''
response = conn.stats()
stats = response.body
node_stats = stats['node']
abs1 = node_stats['abs1']
abs2 = node_stats['abs2']
keys = ['error_logger_queue_len', 'memory_atoms' 'memory_bin',
'memory_ets', 'memory_procs', 'memory_total', 'process_count',
'run_queue']
abs_diff = diff_keys(abs1, abs2, keys)
stats['abs_diff'] = abs_diff
return response
do_when_authenticated(args, fun)
def parse_subscription(sub):
'''parse a subscription in notation bucket:stream[:from]'''
parts = sub.split(':')
parts_count = len(parts)
if parts_count == 2:
return True, parts + [None]
elif parts_count == 3:
try:
seqnum = int(parts[2])
return True, [parts[0], parts[1], seqnum]
except ValueError:
return (False, "expected subscription to have format " +
"bucket:stream:from where from is a number, got %s" % sub)
else:
return (False, "expected subscription to have format " +
"bucket:stream[:from], got %s" % sub)
def handle_listen(args):
'''listen to events in subscriptions'''
raw_subs = args.subscriptions
subs = iorio.Subscriptions()
for sub in raw_subs:
ok, result = parse_subscription(sub)
if not ok:
print(result)
return
bucket, stream, count = result
subs.add(bucket, stream, count)
def fun(conn):
'''fun that does the work'''
while True:
current_subs = subs.to_list()
print('listening', ' '.join(current_subs))
response = conn.listen(current_subs)
print(response)
print()
if response.status == 200:
subs.update_seqnums(response.body)
do_when_authenticated(args, fun)
HANDLERS = {
'post': handle_post_event,
'patch': handle_patch_event,
'get': handle_get_events,
'listen': handle_listen,
'list-buckets': handle_list_buckets,
'list-streams': handle_list_streams,
'stats': handle_stats
}
def main():
'''cli entry point'''
args = parse_args()
handler = HANDLERS[args.action]
handler(args)
if __name__ == '__main__':
main()
| javierdallamore/ioriodb | tools/ioriocli.py | Python | mpl-2.0 | 8,730 |
"""
:class:`DominatorTree` computes the dominance relation over
control flow graphs.
See http://www.cs.rice.edu/~keith/EMBED/dom.pdf.
"""
class GenericDominatorTree:
def __init__(self):
self._assign_names()
self._compute()
def _traverse_in_postorder(self):
raise NotImplementedError
def _prev_block_names(self, block):
raise NotImplementedError
def _assign_names(self):
postorder = self._traverse_in_postorder()
self._start_name = len(postorder) - 1
self._block_of_name = postorder
self._name_of_block = {}
for block_name, block in enumerate(postorder):
self._name_of_block[block] = block_name
def _intersect(self, block_name_1, block_name_2):
finger_1, finger_2 = block_name_1, block_name_2
while finger_1 != finger_2:
while finger_1 < finger_2:
finger_1 = self._doms[finger_1]
while finger_2 < finger_1:
finger_2 = self._doms[finger_2]
return finger_1
def _compute(self):
self._doms = {}
# Start block dominates itself.
self._doms[self._start_name] = self._start_name
# We don't yet know what blocks dominate all other blocks.
for block_name in range(self._start_name):
self._doms[block_name] = None
changed = True
while changed:
changed = False
# For all blocks except start block, in reverse postorder...
for block_name in reversed(range(self._start_name)):
# Select a new immediate dominator from the blocks we have
# already processed, and remember all others.
# We've already processed at least one previous block because
# of the graph traverse order.
new_idom, prev_block_names = None, []
for prev_block_name in self._prev_block_names(block_name):
if new_idom is None and self._doms[prev_block_name] is not None:
new_idom = prev_block_name
else:
prev_block_names.append(prev_block_name)
# Find a common previous block
for prev_block_name in prev_block_names:
if self._doms[prev_block_name] is not None:
new_idom = self._intersect(prev_block_name, new_idom)
if self._doms[block_name] != new_idom:
self._doms[block_name] = new_idom
changed = True
def immediate_dominator(self, block):
return self._block_of_name[self._doms[self._name_of_block[block]]]
def dominators(self, block):
# Blocks that are statically unreachable from entry are considered
# dominated by every other block.
if block not in self._name_of_block:
yield from self._block_of_name
return
block_name = self._name_of_block[block]
yield self._block_of_name[block_name]
while block_name != self._doms[block_name]:
block_name = self._doms[block_name]
yield self._block_of_name[block_name]
class DominatorTree(GenericDominatorTree):
def __init__(self, function):
self.function = function
super().__init__()
def _traverse_in_postorder(self):
postorder = []
visited = set()
def visit(block):
visited.add(block)
for next_block in block.successors():
if next_block not in visited:
visit(next_block)
postorder.append(block)
visit(self.function.entry())
return postorder
def _prev_block_names(self, block_name):
for block in self._block_of_name[block_name].predecessors():
# Only return predecessors that are statically reachable from entry.
if block in self._name_of_block:
yield self._name_of_block[block]
class PostDominatorTree(GenericDominatorTree):
def __init__(self, function):
self.function = function
super().__init__()
def _traverse_in_postorder(self):
postorder = []
visited = set()
def visit(block):
visited.add(block)
for next_block in block.predecessors():
if next_block not in visited:
visit(next_block)
postorder.append(block)
for block in self.function.basic_blocks:
if not any(block.successors()):
visit(block)
postorder.append(None) # virtual exit block
return postorder
def _prev_block_names(self, block_name):
succ_blocks = self._block_of_name[block_name].successors()
if len(succ_blocks) > 0:
for block in succ_blocks:
yield self._name_of_block[block]
else:
yield self._start_name
| JQIamo/artiq | artiq/compiler/analyses/domination.py | Python | lgpl-3.0 | 4,947 |
from __future__ import division
import numpy as np
import tensorflow as tf
from cost_functions.huber_loss import huber_loss
from data_providers.data_provider_32_price_history_autoencoder import PriceHistoryAutoEncDataProvider
from interfaces.neural_net_model_interface import NeuralNetModelInterface
from mylibs.batch_norm import BatchNormer, batchNormWrapper, fully_connected_layer_with_batch_norm_and_l2, \
fully_connected_layer_with_batch_norm
from mylibs.jupyter_notebook_helper import DynStats, getRunTime
from tensorflow.contrib import rnn
from collections import OrderedDict
from mylibs.py_helper import merge_dicts
from mylibs.tf_helper import generate_weights_var, fully_connected_layer
from os import system
from fastdtw import fastdtw
from matplotlib import pyplot as plt
from plotter.price_hist import renderRandomMultipleTargetsVsPredictions
class PriceHistoryAutoencoder(NeuralNetModelInterface):
"""
NECESSARY FOR MULTIPLE SEQS:
- Make it with dynamic inputs
IDEAS FOR IMPROVEMENT:
0) introduce extra layers
1) Add the mobile attributes per instance
2) MAKE OUTPUT BE DEPENDED ON PREVIOUS OUTPUT
3) use EOS
4) Add dropout
*) Make also input be depende on previous input ??
"""
DATE_FEATURE_LEN = 6
INPUT_FEATURE_LEN = DATE_FEATURE_LEN + 1
TS_INPUT_IND = 0 # if feature len is multi
TARGET_FEATURE_LEN = 1
ADAM_DEFAULT_LEARNING_RATE = 1e-3
SEED = 16011984
DEFAULT_KEEP_PROB = 1.
DEFAULT_LAMDA2 = 0.
DEFAULT_ARR_LAMDA2 = [DEFAULT_LAMDA2] * 3
BATCH_NORM_ENABLED_BY_DEFAULT = True
class DECODER_FIRST_INPUT(object):
PREVIOUS_INPUT = "PREVIOUS_INPUT"
ZEROS = "ZEROS"
def __init__(self, rng, dtype, config):
super(PriceHistoryAutoencoder, self).__init__()
self.rng = rng
self.dtype = dtype
self.config = config
self.train_data = None
self.valid_data = None
self.init = None
self.error = None
self.inputs = None
self.predictions = None
self.train_step = None
self.is_training = None
self.decoder_extra_inputs = None
self.keep_prob_rnn_out = None
self.keep_prob_readout = None
self.twod = None
@staticmethod
def DEFAULT_ACTIVATION_RNN():
return tf.nn.tanh # tf.nn.elu
def run(self, npz_path, epochs, batch_size, enc_num_units, dec_num_units, ts_len,
#decoder_first_input=DECODER_FIRST_INPUT.ZEROS,
learning_rate=ADAM_DEFAULT_LEARNING_RATE,
preds_gather_enabled=True,
):
graph = self.getGraph(batch_size=batch_size, verbose=False, enc_num_units=enc_num_units,
dec_num_units=dec_num_units, ts_len=ts_len,
learning_rate=learning_rate)
# input_keep_prob=input_keep_prob, hidden_keep_prob=hidden_keep_prob,
train_data = PriceHistoryAutoEncDataProvider(npz_path=npz_path, batch_size=batch_size, rng=self.rng,
which_set='train', ts_max_len=ts_len)
# during cross validation we execute our experiment multiple times and we get a score at the end
# so this means that we need to retrain the model one final time in order to output the predictions
# from this training procedure
preds_dp = PriceHistoryAutoEncDataProvider(npz_path=npz_path, batch_size=batch_size, rng=self.rng,
shuffle_order=False,
which_set='test',
ts_max_len=ts_len,
) if preds_gather_enabled else None
self.__print_hyperparams(learning_rate=learning_rate, epochs=epochs, enc_num_units=enc_num_units,
dec_num_units=dec_num_units)
return self.train_validate(train_data=train_data, valid_data=None, graph=graph, epochs=epochs,
preds_gather_enabled=preds_gather_enabled, preds_dp=preds_dp,
batch_size=batch_size)
def train_validate(self, train_data, valid_data, **kwargs):
graph = kwargs['graph']
epochs = kwargs['epochs']
batch_size = kwargs['batch_size']
verbose = kwargs['verbose'] if 'verbose' in kwargs.keys() else True
preds_dp = kwargs['preds_dp'] if 'preds_dp' in kwargs.keys() else None
preds_gather_enabled = kwargs['preds_gather_enabled'] if 'preds_gather_enabled' in kwargs.keys() else True
test_error = None
preds_dict = None
with tf.Session(graph=graph, config=self.config) as sess:
sess.run(self.init) # sess.run(tf.initialize_all_variables())
dynStats = DynStats(validation=valid_data is not None)
for epoch in range(epochs):
train_error, runTime = getRunTime(
lambda:
self.trainEpoch(
sess=sess,
data_provider=train_data,
extraFeedDict={
self.is_training: True,
}
)
)
if np.isnan(train_error):
raise Exception('do something with your learning rate because it is extremely high')
if valid_data is None:
if verbose:
# print 'EndEpoch%02d(%.3f secs):err(train)=%.4f,acc(train)=%.2f,err(valid)=%.2f,acc(valid)=%.2f, ' % \
# (epoch + 1, runTime, train_error, train_accuracy, valid_error, valid_accuracy)
print 'End Epoch %02d (%.3f secs): err(train) = %.6f' % (
epoch + 1, runTime, train_error)
dynStats.gatherStats(train_error=train_error)
else:
# if (epoch + 1) % 1 == 0:
valid_error = self.validateEpoch(
sess=sess,
data_provider=valid_data,
extraFeedDict={self.is_training: False},
)
if np.isnan(valid_error):
raise Exception('do something with your learning rate because it is extremely high')
if verbose:
print 'End Epoch %02d (%.3f secs): err(train) = %.6f, err(valid)=%.6f' % (
epoch + 1, runTime, train_error, valid_error)
dynStats.gatherStats(train_error=train_error, valid_error=valid_error)
preds_dict, test_error = self.getPredictions(batch_size=batch_size, data_provider=preds_dp,
sess=sess) if preds_gather_enabled else (None, None)
if verbose:
if preds_gather_enabled:
print "total test error: {}".format(test_error)
print
if preds_gather_enabled:
return dynStats, preds_dict, preds_dp.get_targets_dict()
else:
return dynStats
def train_predict(self, npz_path,
num_units, epochs, batch_size, ts_len, rnn_hidden_dim,
plotting=False,
decoder_first_input=DECODER_FIRST_INPUT.ZEROS,
lamda2=DEFAULT_ARR_LAMDA2,
keep_prob_rnn_out=DEFAULT_KEEP_PROB,
keep_prob_readout=DEFAULT_KEEP_PROB,
learning_rate=ADAM_DEFAULT_LEARNING_RATE,
verbose=True):
"""WE NEED TO FIX THIS, BEFORE USING. IT HAS NEVER BEEN TESTED. IT IS COPY PASTE FROM ANOTHER MODEL"""
graph = self.getGraph(batch_size=batch_size, verbose=False, enc_num_units=num_units,
dec_num_units=num_units, ts_len=ts_len,
learning_rate=learning_rate)
# input_keep_prob=input_keep_prob, hidden_keep_prob=hidden_keep_prob,
train_data = PriceHistoryAutoEncDataProvider(npz_path=npz_path, batch_size=batch_size, rng=self.rng,
which_set='train')
# during cross validation we execute our experiment multiple times and we get a score at the end
# so this means that we need to retrain the model one final time in order to output the predictions
# from this training procedure
preds_dp = PriceHistoryAutoEncDataProvider(npz_path=npz_path, batch_size=batch_size, rng=self.rng,
shuffle_order=False,
which_set='test',
)
self.__print_hyperparams(learning_rate=learning_rate, epochs=epochs, keep_prob_rnn_out=keep_prob_rnn_out,
keep_prob_readout=keep_prob_readout, lamda2=lamda2, enc_num_units=num_units,
dec_num_units=num_units)
test_error = None
preds_dict = None
with tf.Session(graph=graph, config=self.config) as sess:
sess.run(self.init) # sess.run(tf.initialize_all_variables())
dyn_stats = DynStats()
for epoch in range(epochs):
train_error, runTime = getRunTime(
lambda:
self.trainEpoch(
sess=sess,
data_provider=train_data,
extraFeedDict={
self.is_training: True,
# self.keep_prob_rnn_out: keep_prob_rnn_out,
# self.keep_prob_readout: keep_prob_readout,
}
)
)
if np.isnan(train_error):
raise Exception('do something with your learning rate because it is extremely high')
if verbose:
# print 'EndEpoch%02d(%.3f secs):err(train)=%.4f,acc(train)=%.2f,err(valid)=%.2f,acc(valid)=%.2f, ' % \
# (epoch + 1, runTime, train_error, train_accuracy, valid_error, valid_accuracy)
print 'End Epoch %02d (%.3f secs): err(train) = %.6f' % (
epoch + 1, runTime, train_error)
dyn_stats.gatherStats(train_error=train_error)
if verbose:
preds_dict, test_error = self.getPredictions(batch_size=batch_size, data_provider=preds_dp,
sess=sess)
targets = preds_dp.targets #recall inputs and targets are the same for an autoencoder
dtw_scores = [fastdtw(targets[ind], preds_dict[ind])[0] for ind in range(len(targets))]
print "cur dtw score: {}".format(np.mean(dtw_scores))
if plotting:
renderRandomMultipleTargetsVsPredictions(targets=targets, inputs=preds_dp.inputs,
preds=preds_dict.values())
plt.show()
if verbose:
print "total test error: {}".format(test_error)
print
return dyn_stats, preds_dict, preds_dp.get_targets_dict()
def getGraph(self,
batch_size,
enc_num_units,
dec_num_units,
ts_len,
learning_rate=ADAM_DEFAULT_LEARNING_RATE, # default of Adam is 1e-3
verbose=True):
# momentum = 0.5
# tf.reset_default_graph() #kind of redundant statement
graph = tf.Graph() # create new graph
with graph.as_default():
with tf.name_scope('parameters'):
self.is_training = tf.placeholder(tf.bool, name="is_training")
with tf.name_scope('data'):
inputs = tf.placeholder(dtype=self.dtype,
shape=(batch_size, ts_len, self.INPUT_FEATURE_LEN), name="inputs")
targets = inputs[:, :, self.TS_INPUT_IND]
decoder_extra_inputs = tf.placeholder(dtype=self.dtype,
shape=(batch_size, ts_len, self.DATE_FEATURE_LEN),
name="decoder_extra_inputs")
self.decoder_extra_inputs = decoder_extra_inputs
if verbose:
print "targets"
print targets
print
with tf.name_scope('inputs'):
# unpack matrix into 1 dim array
inputs_series = tf.unstack(inputs, axis=1)
if verbose:
print len(inputs_series)
print inputs_series[0] # shape: (batch_size, 1+6)
print
with tf.name_scope('encoder_rnn_layer'):
# don't really care for encoder outputs, but only for its final state
# the encoder consumes all the input to get a sense of the trend of price history
_, encoder_final_state = rnn.static_rnn(
cell=tf.contrib.rnn.GRUCell(num_units=enc_num_units, activation=self.DEFAULT_ACTIVATION_RNN()),
# cell=tf.contrib.rnn.GRUCell(num_units=enc_num_units),
inputs=inputs_series,
initial_state=None,
dtype=self.dtype
)
if verbose:
print encoder_final_state
print
with tf.name_scope('encoder_state_out_process'):
# don't really care for encoder outputs, but only for its final state
# the encoder consumes all the input to get a sense of the trend of price history
# fully_connected_layer_with_batch_norm_and_l2(fcId='encoder_state_out_process',
# inputs=encoder_final_state,
# input_dim=enc_num_units, output_dim=2,
# is_training=self.is_training, lamda2=0)
ww_enc_out = generate_weights_var(ww_id='encoder_state_out_process', input_dim=enc_num_units,
output_dim=2,
dtype=self.dtype)
nonlinearity = tf.nn.elu
avoidDeadNeurons = 0.1 if nonlinearity == tf.nn.relu else 0. # prevent zero when relu
bb_enc_out = tf.Variable(avoidDeadNeurons * tf.ones([2]),
name='biases_{}'.format('encoder_state_out_process'))
# out_affine = tf.matmul(inputs, weights) + biases
affine_enc_out = tf.add(tf.matmul(encoder_final_state, ww_enc_out), bb_enc_out)
self.twod = affine_enc_out
batchNorm = batchNormWrapper('encoder_state_out_process', affine_enc_out, self.is_training)
nonlinear_enc_out = nonlinearity(batchNorm)
if verbose:
print nonlinear_enc_out
print
with tf.name_scope('decoder_state_in_process'):
dec_init_state = fully_connected_layer_with_batch_norm(fcId='decoder_state_in_process',
inputs=nonlinear_enc_out,
input_dim=2, output_dim=dec_num_units,
is_training=self.is_training,
nonlinearity=tf.nn.elu)
if verbose:
print dec_init_state
print
with tf.name_scope('dec_extra_ins'):
# unpack matrix
dec_extra_inputs_series = tf.unstack(decoder_extra_inputs, axis=1)
if verbose:
print len(dec_extra_inputs_series)
print dec_extra_inputs_series[0] # shape: (batch_size, 6) #only date info for the time being
print
with tf.variable_scope('decoder_rnn_layer'):
decoder_outputs, decoder_final_state = rnn.static_rnn(
# cell=tf.contrib.rnn.GRUCell(num_units=dec_num_units, activation=self.DEFAULT_ACTIVATION_RNN),
cell=tf.contrib.rnn.GRUCell(num_units=dec_num_units, activation=self.DEFAULT_ACTIVATION_RNN()),
inputs=dec_extra_inputs_series,
initial_state=dec_init_state,
dtype=self.dtype
)
if verbose:
print "decoder_outputs len: {}".format(len(decoder_outputs))
print decoder_outputs[0]
print
with tf.name_scope('decoder_outs'):
stacked_dec_outs = tf.stack(decoder_outputs, axis=1)
flattened_dec_outs = tf.reshape(stacked_dec_outs, shape=(-1, dec_num_units))
if verbose:
print stacked_dec_outs
print flattened_dec_outs
print
with tf.name_scope('readout_affine'):
processed_dec_outs = fully_connected_layer(inputs=flattened_dec_outs,
input_dim=dec_num_units,
output_dim=self.TARGET_FEATURE_LEN,
nonlinearity=tf.identity)
outputs = tf.reshape(processed_dec_outs, shape=(batch_size, ts_len))
if verbose:
print processed_dec_outs
print outputs
print
with tf.name_scope('error'):
losses = huber_loss(y_true=targets, y_pred=outputs) # both have shape: (batch_size, target_len)
if verbose:
print losses
print
loss = tf.reduce_mean(losses)
error = loss
if verbose:
print loss
print error
print
with tf.name_scope('training_step'):
train_step = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(loss)
init = tf.global_variables_initializer()
self.init = init
self.inputs = inputs
self.error = error
self.train_step = train_step
self.predictions = outputs
return graph
def getPredictions(self, sess, data_provider, batch_size, extraFeedDict=None):
if extraFeedDict is None:
extraFeedDict = {}
assert data_provider.data_len % batch_size == 0 # provider can support and intermediate values
total_error = 0.
instances_order = data_provider.current_order
target_len = data_provider.targets.shape[1]
all_predictions = np.zeros(shape=(data_provider.data_len, target_len))
for inst_ind, (input_batch, dec_extra_ins) in enumerate(data_provider):
cur_error, cur_preds = sess.run(
[self.error, self.predictions],
feed_dict=merge_dicts({self.inputs: input_batch,
self.decoder_extra_inputs: dec_extra_ins,
self.is_training: False,
}, extraFeedDict))
assert np.all(instances_order == data_provider.current_order)
all_predictions[inst_ind * batch_size: (inst_ind + 1) * batch_size, :] = cur_preds
total_error += cur_error
total_error /= data_provider.num_batches
if np.any(all_predictions == 0):
print "all predictions are expected to be something else than absolute zero".upper()
system('play --no-show-progress --null --channels 1 synth {} sine {}'.format(0.5, 800))
# assert np.all(all_predictions != 0), "all predictions are expected to be something else than absolute zero"
preds_dict = OrderedDict(zip(instances_order, all_predictions))
return preds_dict, total_error
def validateEpoch(self, sess, data_provider, extraFeedDict=None):
if extraFeedDict is None:
extraFeedDict = {}
total_error = 0.
num_batches = data_provider.num_batches
for step, (input_batch, dec_extra_ins) in enumerate(data_provider):
feed_dic = merge_dicts({self.inputs: input_batch,
self.decoder_extra_inputs: dec_extra_ins,
}, extraFeedDict)
batch_error = sess.run(self.error, feed_dict=feed_dic)
total_error += batch_error
total_error /= num_batches
return total_error
def trainEpoch(self, sess, data_provider, extraFeedDict=None):
if extraFeedDict is None:
extraFeedDict = {}
train_error = 0.
num_batches = data_provider.num_batches
for step, (input_batch, dec_extra_ins) in enumerate(data_provider):
feed_dic = merge_dicts({self.inputs: input_batch,
self.decoder_extra_inputs: dec_extra_ins,
}, extraFeedDict)
_, batch_error = sess.run([self.train_step, self.error], feed_dict=feed_dic)
train_error += batch_error
train_error /= num_batches
return train_error
@staticmethod
def __print_hyperparams(**kwargs):
for key in kwargs:
print "{}: {}".format(key, kwargs[key])
| pligor/predicting-future-product-prices | 04_time_series_prediction/models/model_32_price_history_autoencoder.py | Python | agpl-3.0 | 22,093 |
#-*- coding:utf-8 -*-
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^', include('blog.urls')),
# Examples:
# url(r'^$', 'myblog.views.home', name='home'),
# url(r'^myblog/', include('myblog.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^comments/', include('django.contrib.comments.urls')),
)
| elover/python-django-blog | myblog/myblog/urls.py | Python | mit | 847 |
import glob
import importlib
import settings
import os
import sys
def import_all(name, object_name):
modules = glob.glob(os.path.join(settings.BASE_DIR, name) + "/*.py")
all_objects = []
for module in modules:
module_name = os.path.basename(module)[:-3]
if module_name == "__init__":
continue
importlib.import_module("{}.{}".format(name, module_name))
all_objects.append(getattr(sys.modules["{}.{}".format(name, module_name)], object_name))
return all_objects
| ovkulkarni/hangoutsbot | utils/imports.py | Python | mit | 522 |
# -*- coding: utf-8 -*-
import time
def log(func):
def wrapper(*args, **kw):
print('call %s():' % func.__name__)
return func(*args, **kw)
return wrapper
def exe_time(func):
def wrapper(*args, **args2):
t0 = time.time()
print("@%s, {%s} start" % (time.strftime("%X", time.localtime()), func.__name__))
back = func(*args, **args2)
print("@%s, {%s} end" % (time.strftime("%X", time.localtime()), func.__name__))
print("@%.3fs taken for {%s}" % (time.time() - t0, func.__name__))
return back
return wrapper
if __name__ == '__main__':
@exe_time
def sleeptime():
time.sleep(3)
sleeptime()
| jtr109/Alpha2kindle | utils/timer.py | Python | mit | 692 |
# -*- coding: utf-8 -*-
# Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Reflection module.
.. versionadded:: 1.1
"""
import inspect
import logging
import operator
import types
try:
_TYPE_TYPE = types.TypeType
except AttributeError:
_TYPE_TYPE = type
# See: https://docs.python.org/2/library/__builtin__.html#module-__builtin__
# and see https://docs.python.org/2/reference/executionmodel.html (and likely
# others)...
_BUILTIN_MODULES = ('builtins', '__builtin__', '__builtins__', 'exceptions')
LOG = logging.getLogger(__name__)
Parameter = inspect.Parameter
Signature = inspect.Signature
get_signature = inspect.signature
def get_members(obj, exclude_hidden=True):
"""Yields the members of an object, filtering by hidden/not hidden.
.. versionadded:: 2.3
"""
for (name, value) in inspect.getmembers(obj):
if name.startswith("_") and exclude_hidden:
continue
yield (name, value)
def get_member_names(obj, exclude_hidden=True):
"""Get all the member names for a object."""
return [name for (name, _obj) in
get_members(obj, exclude_hidden=exclude_hidden)]
def get_class_name(obj, fully_qualified=True, truncate_builtins=True):
"""Get class name for object.
If object is a type, returns name of the type. If object is a bound
method or a class method, returns its ``self`` object's class name.
If object is an instance of class, returns instance's class name.
Else, name of the type of the object is returned. If fully_qualified
is True, returns fully qualified name of the type. For builtin types,
just name is returned. TypeError is raised if can't get class name from
object.
"""
if inspect.isfunction(obj):
raise TypeError("Can't get class name.")
if inspect.ismethod(obj):
obj = get_method_self(obj)
if not isinstance(obj, type):
obj = type(obj)
if truncate_builtins:
try:
built_in = obj.__module__ in _BUILTIN_MODULES
except AttributeError: # nosec
pass
else:
if built_in:
return obj.__name__
if fully_qualified and hasattr(obj, '__module__'):
return '%s.%s' % (obj.__module__, obj.__name__)
else:
return obj.__name__
def get_all_class_names(obj, up_to=object,
fully_qualified=True, truncate_builtins=True):
"""Get class names of object parent classes.
Iterate over all class names object is instance or subclass of,
in order of method resolution (mro). If up_to parameter is provided,
only name of classes that are sublcasses to that class are returned.
"""
if not isinstance(obj, type):
obj = type(obj)
for cls in obj.mro():
if issubclass(cls, up_to):
yield get_class_name(cls,
fully_qualified=fully_qualified,
truncate_builtins=truncate_builtins)
def get_callable_name(function):
"""Generate a name from callable.
Tries to do the best to guess fully qualified callable name.
"""
method_self = get_method_self(function)
if method_self is not None:
# This is a bound method.
if isinstance(method_self, type):
# This is a bound class method.
im_class = method_self
else:
im_class = type(method_self)
try:
parts = (im_class.__module__, function.__qualname__)
except AttributeError:
parts = (im_class.__module__, im_class.__name__, function.__name__)
elif inspect.ismethod(function) or inspect.isfunction(function):
# This could be a function, a static method, a unbound method...
try:
parts = (function.__module__, function.__qualname__)
except AttributeError:
if hasattr(function, 'im_class'):
# This is a unbound method, which exists only in python 2.x
im_class = function.im_class
parts = (im_class.__module__,
im_class.__name__, function.__name__)
else:
parts = (function.__module__, function.__name__)
else:
im_class = type(function)
if im_class is _TYPE_TYPE:
im_class = function
try:
parts = (im_class.__module__, im_class.__qualname__)
except AttributeError:
parts = (im_class.__module__, im_class.__name__)
return '.'.join(parts)
def get_method_self(method):
"""Gets the ``self`` object attached to this method (or none)."""
if not inspect.ismethod(method):
return None
try:
return operator.attrgetter("__self__")(method)
except AttributeError:
return None
def is_same_callback(callback1, callback2, strict=True):
"""Returns if the two callbacks are the same.
'strict' arg has no meaning for python 3.8 onwards and will
always return the equality of both callback based on 'self'
comparison only.
"""
if callback1 is callback2:
# This happens when plain methods are given (or static/non-bound
# methods).
return True
if callback1 == callback2:
# NOTE(gmann): python3.8 onward, comparison of bound methods is
# changed. It no longer decide the bound method's equality based
# on their bounded objects equality instead it checks the identity
# of their '__self__'. So python3.8 onward, two different bound
# methods are no longer equal even __eq__ method return True.
# Or in other term, 'strict' arg has no meaning from python 3.8
# onwards above if condition never satisfy if both callback are
# bounded to two different objects.
# For backward compatibility for python <3.8, we can keep the 'strict'
# arg and the below code of comparing 'self' and once minimum
# supported python version is 3.8 we can remove both because python
# 3.8 onward == operator itself checks identity of 'self'.
# Ref bug: https://bugs.launchpad.net/oslo.utils/+bug/1841072
if not strict:
LOG.warning('"strict" arg is deprecated because it no '
'longer work for python 3.8 onwards')
return True
# Until python 3.7, two bound methods are equal if functions
# themselves are equal and objects they are applied to are equal.
# This means that a bound method could be the same bound method on
# another object if the objects have __eq__ methods that return true
# (when in fact it is a different bound method). Python u so crazy!
try:
self1 = operator.attrgetter("__self__")(callback1)
self2 = operator.attrgetter("__self__")(callback2)
return self1 is self2
except AttributeError: # nosec
pass
return False
def is_bound_method(method):
"""Returns if the given method is bound to an object."""
return get_method_self(method) is not None
def is_subclass(obj, cls):
"""Returns if the object is class and it is subclass of a given class."""
return inspect.isclass(obj) and issubclass(obj, cls)
def get_callable_args(function, required_only=False):
"""Get names of callable arguments.
Special arguments (like ``*args`` and ``**kwargs``) are not included into
output.
If required_only is True, optional arguments (with default values)
are not included into output.
"""
sig = get_signature(function)
function_args = list(sig.parameters.keys())
for param_name, p in sig.parameters.items():
if (p.kind in (Parameter.VAR_POSITIONAL, Parameter.VAR_KEYWORD) or
(required_only and p.default is not Parameter.empty)):
function_args.remove(param_name)
return function_args
def accepts_kwargs(function):
"""Returns ``True`` if function accepts kwargs otherwise ``False``."""
sig = get_signature(function)
return any(
p.kind == Parameter.VAR_KEYWORD for p in sig.parameters.values()
)
| openstack/oslo.utils | oslo_utils/reflection.py | Python | apache-2.0 | 8,707 |
from .common import *
ENVIRONMENT = 'development'
DEBUG = True
TEMPLATE_DEBUG = True
INSTALLED_APPS += (
'debug_toolbar',
)
MIDDLEWARE_CLASSES += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
from fnmatch import fnmatch
class glob_list(list):
def __contains__(self, key):
for elt in self:
if fnmatch(key, elt): return True
return False
INTERNAL_IPS = glob_list(['127.0.0.1', '10.0.*.*'])
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
DEBUG_TOOLBAR_PATCH_SETTINGS = False | gunnery/gunnery | gunnery/gunnery/settings/development.py | Python | apache-2.0 | 549 |
# Use Netmiko to enter into configuration mode on pynet-rtr2.
# Also use Netmiko to verify your state (i.e. that you are currently in configuration mode).
from getpass import getpass
import time
from netmiko import ConnectHandler
password = getpass()
pynet_rtr2 = {'device_type': 'cisco_ios', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 8022}
ssh_connection = ConnectHandler(**pynet_rtr2)
time.sleep(2)
ssh_connection.config_mode()
output = ssh_connection.find_prompt()
print "The current state of the prompt is %s" % output
| linkdebian/pynet_course | class4/exercise5.py | Python | apache-2.0 | 563 |
# (C) British Crown Copyright 2010 - 2016, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""
Interpolation and re-gridding routines.
The core definitions of the now deprecated 'iris.analysis.interpolate', with
added deprecation wrappers.
These contents are exposed as 'iris.analysis.interpolate', which is
automatically available when 'iris.analysis' is imported.
This is provided *only* because removing the automatic import broke some user
code -- even though reliance on automatic imports is accepted bad practice.
The "real" module 'iris.analysis.interpolate' can also be explicitly
imported, and provides exactly the same definitions.
The only difference is that the explicit import *itself* emits a deprecation
warning.
"""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
import six
import collections
from functools import wraps
import numpy as np
import scipy
import scipy.spatial
from scipy.interpolate.interpolate import interp1d
from iris._deprecation import (warn_deprecated as iris_warn_deprecated,
ClassWrapperSameDocstring)
from iris.analysis import Linear
import iris.cube
import iris.coord_systems
import iris.coords
import iris.exceptions
from . import _interpolate_private as oldinterp
_INTERPOLATE_DEPRECATION_WARNING = \
"The module 'iris.analysis.interpolate' is deprecated."
# Define a common callpoint for deprecation warnings.
def _warn_deprecated(msg=None):
if msg is None:
msg = _INTERPOLATE_DEPRECATION_WARNING
iris_warn_deprecated(msg)
def nearest_neighbour_indices(cube, sample_points):
msg = (_INTERPOLATE_DEPRECATION_WARNING + '\n' +
'Please replace usage of '
'iris.analysis.interpolate.nearest_neighbour_indices() '
'with iris.coords.Coord.nearest_neighbour_index()).')
_warn_deprecated(msg)
return oldinterp.nearest_neighbour_indices(cube, sample_points)
nearest_neighbour_indices.__doc__ = oldinterp.nearest_neighbour_indices.__doc__
def extract_nearest_neighbour(cube, sample_points):
msg = (_INTERPOLATE_DEPRECATION_WARNING + '\n' +
'Please replace usage of '
'iris.analysis.interpolate.extract_nearest_neighbour() with '
'iris.cube.Cube.interpolate(..., scheme=iris.analysis.Nearest()).')
_warn_deprecated(msg)
return oldinterp.extract_nearest_neighbour(cube, sample_points)
extract_nearest_neighbour.__doc__ = oldinterp.extract_nearest_neighbour.__doc__
def nearest_neighbour_data_value(cube, sample_points):
msg = (_INTERPOLATE_DEPRECATION_WARNING + '\n' +
'Please replace usage of '
'iris.analysis.interpolate.nearest_neighbour_data_value() with '
'iris.cube.Cube.interpolate(..., scheme=iris.analysis.Nearest()).')
_warn_deprecated(msg)
return oldinterp.nearest_neighbour_data_value(cube, sample_points)
nearest_neighbour_data_value.__doc__ = \
oldinterp.nearest_neighbour_data_value.__doc__
def regrid(source_cube, grid_cube, mode='bilinear', **kwargs):
msg = (_INTERPOLATE_DEPRECATION_WARNING + '\n' +
'Please replace usage of iris.analysis.interpolate.regrid() '
'with iris.cube.Cube.regrid().')
_warn_deprecated(msg)
return oldinterp.regrid(source_cube, grid_cube, mode=mode, **kwargs)
regrid.__doc__ = oldinterp.regrid.__doc__
def regrid_to_max_resolution(cubes, **kwargs):
msg = (_INTERPOLATE_DEPRECATION_WARNING + '\n' +
'Please replace usage of '
'iris.analysis.interpolate.regrid_to_max_resolution() '
'with iris.cube.Cube.regrid().')
_warn_deprecated(msg)
return oldinterp.regrid_to_max_resolution(cubes, **kwargs)
regrid_to_max_resolution.__doc__ = oldinterp.regrid_to_max_resolution.__doc__
def linear(cube, sample_points, extrapolation_mode='linear'):
msg = (_INTERPOLATE_DEPRECATION_WARNING + '\n' +
'Please replace usage of iris.analysis.interpolate.linear() with '
'iris.cube.Cube.interpolate(..., scheme=iris.analysis.Linear()).')
_warn_deprecated(msg)
return oldinterp.linear(cube, sample_points,
extrapolation_mode=extrapolation_mode)
linear.__doc__ = oldinterp.linear.__doc__
class Linear1dExtrapolator(six.with_metaclass(ClassWrapperSameDocstring,
oldinterp.Linear1dExtrapolator)):
@wraps(oldinterp.Linear1dExtrapolator.__init__)
def __init__(self, interpolator):
_warn_deprecated()
super(Linear1dExtrapolator, self).__init__(interpolator)
| jswanljung/iris | lib/iris/analysis/_interpolate_backdoor.py | Python | lgpl-3.0 | 5,248 |
##===-- statuswin.py -----------------------------------------*- Python -*-===##
##
# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
# See https://llvm.org/LICENSE.txt for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
##
##===----------------------------------------------------------------------===##
import lldb
import lldbutil
import cui
import curses
class StatusWin(cui.TextWin):
def __init__(self, x, y, w, h):
super(StatusWin, self).__init__(x, y, w)
self.keys = [ # ('F1', 'Help', curses.KEY_F1),
('F3', 'Cycle-focus', curses.KEY_F3),
('F10', 'Quit', curses.KEY_F10)]
def draw(self):
self.win.addstr(0, 0, '')
for key in self.keys:
self.win.addstr('{0}'.format(key[0]), curses.A_REVERSE)
self.win.addstr(' {0} '.format(key[1]), curses.A_NORMAL)
super(StatusWin, self).draw()
def handleEvent(self, event):
if isinstance(event, int):
pass
elif isinstance(event, lldb.SBEvent):
if lldb.SBProcess.EventIsProcessEvent(event):
state = lldb.SBProcess.GetStateFromEvent(event)
status = lldbutil.state_type_to_str(state)
self.win.erase()
x = self.win.getmaxyx()[1] - len(status) - 1
self.win.addstr(0, x, status)
return
| apple/swift-lldb | utils/lui/statuswin.py | Python | apache-2.0 | 1,419 |
#!/usr/bin/env python
#
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Commands for interacting with Google TaskQueue."""
__version__ = '0.0.1'
import os
import sys
import urlparse
from apiclient.discovery import build
from apiclient.errors import HttpError
from apiclient.anyjson import simplejson as json
import httplib2
from oauth2client.file import Storage
from oauth2client.client import OAuth2WebServerFlow
from oauth2client.tools import run
from google.apputils import app
from google.apputils import appcommands
import gflags as flags
FLAGS = flags.FLAGS
flags.DEFINE_string(
'service_version',
'v1beta1',
'Google taskqueue api version.')
flags.DEFINE_string(
'api_host',
'https://www.googleapis.com/',
'API host name')
flags.DEFINE_string(
'project_name',
'default',
'The name of the Taskqueue API project.')
flags.DEFINE_bool(
'use_developer_key',
False,
'User wants to use the developer key while accessing taskqueue apis')
flags.DEFINE_string(
'developer_key_file',
'~/.taskqueue.apikey',
'Developer key provisioned from api console')
flags.DEFINE_bool(
'dump_request',
False,
'Prints the outgoing HTTP request along with headers and body.')
flags.DEFINE_string(
'credentials_file',
'taskqueue.dat',
'File where you want to store the auth credentails for later user')
# Set up a Flow object to be used if we need to authenticate. This
# sample uses OAuth 2.0, and we set up the OAuth2WebServerFlow with
# the information it needs to authenticate. Note that it is called
# the Web Server Flow, but it can also handle the flow for native
# applications <http://code.google.com/apis/accounts/docs/OAuth2.html#IA>
# The client_id client_secret are copied from the Identity tab on
# the Google APIs Console <http://code.google.com/apis/console>
FLOW = OAuth2WebServerFlow(
client_id='157776985798.apps.googleusercontent.com',
client_secret='tlpVCmaS6yLjxnnPu0ARIhNw',
scope='https://www.googleapis.com/auth/taskqueue',
user_agent='taskqueue-cmdline-sample/1.0')
class GoogleTaskQueueCommandBase(appcommands.Cmd):
"""Base class for all the Google TaskQueue client commands."""
DEFAULT_PROJECT_PATH = 'projects/default'
def __init__(self, name, flag_values):
super(GoogleTaskQueueCommandBase, self).__init__(name, flag_values)
def _dump_request_wrapper(self, http):
"""Dumps the outgoing HTTP request if requested.
Args:
http: An instance of httplib2.Http or something that acts like it.
Returns:
httplib2.Http like object.
"""
request_orig = http.request
def new_request(uri, method='GET', body=None, headers=None,
redirections=httplib2.DEFAULT_MAX_REDIRECTS,
connection_type=None):
"""Overrides the http.request method to add some utilities."""
if (FLAGS.api_host + "discovery/" not in uri and
FLAGS.use_developer_key):
developer_key_path = os.path.expanduser(
FLAGS.developer_key_file)
if not os.path.isfile(developer_key_path):
print 'Please generate developer key from the Google APIs' \
'Console and store it in %s' % (FLAGS.developer_key_file)
sys.exit()
developer_key_file = open(developer_key_path, 'r')
try:
developer_key = developer_key_file.read().strip()
except IOError, io_error:
print 'Error loading developer key from file %s' % (
FLAGS.developer_key_file)
print 'Error details: %s' % str(io_error)
sys.exit()
finally:
developer_key_file.close()
s = urlparse.urlparse(uri)
query = 'key=' + developer_key
if s.query:
query = s.query + '&key=' + developer_key
d = urlparse.ParseResult(s.scheme,
s.netloc,
s.path,
s.params,
query,
s.fragment)
uri = urlparse.urlunparse(d)
if FLAGS.dump_request:
print '--request-start--'
print '%s %s' % (method, uri)
if headers:
for (h, v) in headers.iteritems():
print '%s: %s' % (h, v)
print ''
if body:
print json.dumps(json.loads(body), sort_keys=True, indent=2)
print '--request-end--'
return request_orig(uri,
method,
body,
headers,
redirections,
connection_type)
http.request = new_request
return http
def Run(self, argv):
"""Run the command, printing the result.
Args:
argv: The non-flag arguments to the command.
"""
if not FLAGS.project_name:
raise app.UsageError('You must specify a project name'
' using the "--project_name" flag.')
discovery_uri = (
FLAGS.api_host + 'discovery/v1/apis/{api}/{apiVersion}/rest')
try:
# If the Credentials don't exist or are invalid run through the
# native client flow. The Storage object will ensure that if
# successful the good Credentials will get written back to a file.
# Setting FLAGS.auth_local_webserver to false since we can run our
# tool on Virtual Machines and we do not want to run the webserver
# on VMs.
FLAGS.auth_local_webserver = False
storage = Storage(FLAGS.credentials_file)
credentials = storage.get()
if credentials is None or credentials.invalid == True:
credentials = run(FLOW, storage)
http = credentials.authorize(self._dump_request_wrapper(
httplib2.Http()))
api = build('taskqueue',
FLAGS.service_version,
http=http,
discoveryServiceUrl=discovery_uri)
result = self.run_with_api_and_flags_and_args(api, FLAGS, argv)
self.print_result(result)
except HttpError, http_error:
print 'Error Processing request: %s' % str(http_error)
def run_with_api_and_flags_and_args(self, api, flag_values, unused_argv):
"""Run the command given the API, flags, and args.
The default implementation of this method discards the args and
calls into run_with_api_and_flags.
Args:
api: The handle to the Google TaskQueue API.
flag_values: The parsed command flags.
unused_argv: The non-flag arguments to the command.
Returns:
The result of running the command
"""
return self.run_with_api_and_flags(api, flag_values)
def print_result(self, result):
"""Pretty-print the result of the command.
The default behavior is to dump a formatted JSON encoding
of the result.
Args:
result: The JSON-serializable result to print.
"""
# We could have used the pprint module, but it produces
# noisy output due to all of our keys and values being
# unicode strings rather than simply ascii.
print json.dumps(result, sort_keys=True, indent=2)
class GoogleTaskQueueCommand(GoogleTaskQueueCommandBase):
"""Base command for working with the taskqueues collection."""
def __init__(self, name, flag_values):
super(GoogleTaskQueueCommand, self).__init__(name, flag_values)
flags.DEFINE_string('taskqueue_name',
'myqueue',
'TaskQueue name',
flag_values=flag_values)
def run_with_api_and_flags(self, api, flag_values):
"""Run the command, returning the result.
Args:
api: The handle to the Google TaskQueue API.
flag_values: The parsed command flags.
Returns:
The result of running the command.
"""
taskqueue_request = self.build_request(api.taskqueues(), flag_values)
return taskqueue_request.execute()
class GoogleTaskCommand(GoogleTaskQueueCommandBase):
"""Base command for working with the tasks collection."""
def __init__(self, name, flag_values, need_task_flag=True):
super(GoogleTaskCommand, self).__init__(name, flag_values)
# Common flags that are shared by all the Task commands.
flags.DEFINE_string('taskqueue_name',
'myqueue',
'TaskQueue name',
flag_values=flag_values)
# Not all task commands need the task_name flag.
if need_task_flag:
flags.DEFINE_string('task_name',
None,
'Task name',
flag_values=flag_values)
def run_with_api_and_flags(self, api, flag_values):
"""Run the command, returning the result.
Args:
api: The handle to the Google TaskQueue API.
flag_values: The parsed command flags.
flags.DEFINE_string('payload',
None,
'Payload of the task')
Returns:
The result of running the command.
"""
task_request = self.build_request(api.tasks(), flag_values)
return task_request.execute()
| MapofLife/MOL | earthengine/google-api-python-client/samples/gtaskqueue_sample/gtaskqueue/taskqueue_cmd_base.py | Python | bsd-3-clause | 10,637 |
import xbmc
import xbmcaddon
import xbmcgui
import xbmcplugin
import os
import os.path
import urlparse
addon = xbmcaddon.Addon()
addonname = addon.getAddonInfo('name')
script_file = os.path.realpath(__file__)
directory = os.path.dirname(script_file)
# miramos si hay alguna accion
args = urlparse.parse_qs(sys.argv[2][1:])
command = args['com'][0] if 'com' in args else 'EXEC_ADDON'
# log_debug('command = "{0}"'.format(command))
if command == 'DOWNLOAD_THEMES':
xbmcgui.Dialog().ok("EmulationStation", "Downloading EmulationStation themes, please do not power off your device.")
os.system('kodi-send --action="xbmc.ActivateWindow(busydialog)"')
os.system("echo 'EmulationStation [ADDON] :: Downloading EmulationStation themes.' $(date) >> /storage/.kodi/temp/emulationstation.log")
os.system("mkdir -p /storage/.kodi/userdata/addon_data/game.emulationstation/themes")
os.system("echo 'EmulationStation [ADDON] :: Installing Carbon no-meta...' >> /storage/.kodi/temp/emulationstation.log")
os.system("wget --no-check-certificate -O /storage/.kodi/userdata/addon_data/game.emulationstation/carbon-nometa.zip https://github.com/RetroPie/es-theme-carbon-nometa/archive/master.zip")
os.system("unzip -o //storage/.kodi/userdata/addon_data/game.emulationstation/carbon-nometa.zip -d /storage/.kodi/userdata/addon_data/game.emulationstation/themes/")
os.system("rm //storage/.kodi/userdata/addon_data/game.emulationstation/carbon-nometa.zip")
os.system("echo 'EmulationStation [ADDON] :: Installing Carbon...' >> /storage/.kodi/temp/emulationstation.log")
os.system("wget --no-check-certificate -O /storage/.kodi/userdata/addon_data/game.emulationstation/carbon.zip https://github.com/RetroPie/es-theme-carbon/archive/master.zip")
os.system("unzip -o /storage/.kodi/userdata/addon_data/game.emulationstation/carbon.zip -d /storage/.kodi/userdata/addon_data/game.emulationstation/themes/")
os.system("rm /storage/.kodi/userdata/addon_data/game.emulationstation/carbon.zip")
os.system("echo 'EmulationStation [ADDON] :: Installing Pixel...' >> /storage/.kodi/temp/emulationstation.log")
os.system("wget --no-check-certificate -O /storage/.kodi/userdata/addon_data/game.emulationstation/pixel.zip https://github.com/RetroPie/es-theme-pixel/archive/master.zip")
os.system("unzip -o /storage/.kodi/userdata/addon_data/game.emulationstation/pixel.zip -d /storage/.kodi/userdata/addon_data/game.emulationstation/themes/")
os.system("rm /storage/.kodi/userdata/addon_data/game.emulationstation/pixel.zip")
os.system("echo 'EmulationStation [ADDON] :: Installing Turtle-pi...' >> /storage/.kodi/temp/emulationstation.log")
os.system("wget --no-check-certificate -O /storage/.kodi/userdata/addon_data/game.emulationstation/turtle.zip https://github.com/RetroPie/es-theme-turtle-pi/archive/master.zip")
os.system("unzip -o /storage/.kodi/userdata/addon_data/game.emulationstation/turtle.zip -d /storage/.kodi/userdata/addon_data/game.emulationstation/themes/")
os.system("rm /storage/.kodi/userdata/addon_data/game.emulationstation/turtle.zip")
os.system("echo 'EmulationStation [ADDON] :: Installing Canela...' >> /storage/.kodi/temp/emulationstation.log")
os.system("wget --no-check-certificate -O /storage/.kodi/userdata/addon_data/game.emulationstation/canela.zip https://github.com/RetroPie/es-theme-simplified-static-canela/archive/master.zip")
os.system("unzip -o /storage/.kodi/userdata/addon_data/game.emulationstation/canela.zip -d /storage/.kodi/userdata/addon_data/game.emulationstation/themes/")
os.system("rm /storage/.kodi/userdata/addon_data/game.emulationstation/canela.zip")
os.system("echo 'EmulationStation [ADDON] :: Installing clean look...' >> /storage/.kodi/temp/emulationstation.log")
os.system("wget --no-check-certificate -O /storage/.kodi/userdata/addon_data/game.emulationstation/clean.zip https://github.com/RetroPie/es-theme-clean-look/archive/master.zip")
os.system("unzip -o /storage/.kodi/userdata/addon_data/game.emulationstation/clean.zip -d /storage/.kodi/userdata/addon_data/game.emulationstation/themes/")
os.system("rm /storage/.kodi/userdata/addon_data/game.emulationstation/clean.zip")
os.system("echo 'EmulationStation [ADDON] :: Installing Tronkyfran...' >> /storage/.kodi/temp/emulationstation.log")
os.system("wget --no-check-certificate -O /storage/.kodi/userdata/addon_data/game.emulationstation/tronkyfran.zip https://github.com/HerbFargus/es-theme-tronkyfran/archive/master.zip")
os.system("unzip -o /storage/.kodi/userdata/addon_data/game.emulationstation/tronkyfran.zip -d /storage/.kodi/userdata/addon_data/game.emulationstation/themes/")
os.system("rm /storage/.kodi/userdata/addon_data/game.emulationstation/tronkyfran.zip")
os.system("echo 'EmulationStation [ADDON] :: EmulationStation Themes Installed.' >> /storage/.kodi/temp/emulationstation.log")
os.system('kodi-send --action="xbmc.Dialog.Close(busydialog)"')
xbmcgui.Dialog().ok("EmulationStation", "EmulationStation themes downloaded. Select your favourite after launching EmulationStation.")
elif command == 'UPDATE_SYSTEMS':
os.system("echo 'EmulationStation [ADDON] :: Updating es_systems.cfg file.' $(date) >> /storage/.kodi/temp/emulationstation.log")
import xml.etree.ElementTree as ET
# Writes a XML text tag line, indented 2 spaces by default.
# Both tag_name and tag_text must be Unicode strings.
# Returns an Unicode string.
#
def XML_text(tag_name, tag_text, num_spaces = 2):
if tag_text:
tag_text = text_escape_XML(tag_text)
line = '{0}<{1}>{2}</{3}>\n'.format(' ' * num_spaces, tag_name, tag_text, tag_name)
else:
# >> Empty tag
line = '{0}<{1} />\n'.format(' ' * num_spaces, tag_name)
return line
# Some XML encoding of special characters:
# {'\n': ' ', '\r': ' ', '\t':'	'}
#
# See http://stackoverflow.com/questions/1091945/what-characters-do-i-need-to-escape-in-xml-documents
# See https://wiki.python.org/moin/EscapingXml
# See https://github.com/python/cpython/blob/master/Lib/xml/sax/saxutils.py
# See http://stackoverflow.com/questions/2265966/xml-carriage-return-encoding
#
def text_escape_XML(data_str):
# Ampersand MUST BE replaced FIRST
data_str = data_str.replace('&', '&')
data_str = data_str.replace('>', '>')
data_str = data_str.replace('<', '<')
data_str = data_str.replace("'", ''')
data_str = data_str.replace('"', '"')
# --- Unprintable characters ---
data_str = data_str.replace('\n', ' ')
data_str = data_str.replace('\r', ' ')
data_str = data_str.replace('\t', '	')
return data_str
# archivo = directory.join('es_systems.xml')
os.system("rm /storage/.kodi/userdata/addon_data/game.emulationstation/es_systems.cfg")
archivo = "/storage/.kodi/userdata/addon_data/game.emulationstation/es_systems.cfg"
ruta_roms =xbmcaddon.Addon(id='game.emulationstation').getSetting('romspath')
app_file= '/storage/.kodi/addons/game.retroarch/addon.start'
emus=[
["amiga","Commodore Amiga",".adf"],
["atari2600","Atari 2600",".a26 .bin .rom .zip .gz .A26 .BIN .ROM .ZIP .GZ"],
["atarilynx","Atari Lynx",".lnx .zip .LNX .ZIP"],
["gamegear","Game Gear",".gg .bin .sms .zip .GG .BIN .SMS .ZIP"],
["gba","Game Boy Advance",".gba .zip .GBA .ZIP"],
["gb","Game Boy/Game Boy Color",".gb .zip .GB .ZIP .gbc .GBC"],
["mame","MAME",".zip .ZIP"],
["fba","FBA",".zip .ZIP"],
["neogeo","NEOGEO",".zip .ZIP"],
["msx","MSX/MSX2",".rom .mx1 .mx2 .col .dsk .zip .ROM .MX1 .MX2 .COL .DSK .ZIP"],
["nes","Nintendo NES",".nes .zip .NES .ZIP .fds .FDS"],
["mastersystem","Sega Master System",".sms .bin .zip .SMS .BIN .ZIP"],
["snes","Super Nintendo",".bin .smc .sfc .fig .swc .mgd .zip .BIN .SMC .SFC .FIG .SWC .MGD .ZIP"],
["megadrive","MegaDrive/MegaCD",".smd .bin .gen .md .sg .zip .SMD .BIN .GEN .MD .SG .ZIP"],
["pcengine","PC Engine/PC Engine CD",".pce .cue .zip .PCE .CUE .ZIP"],
["psx","PlayStation",".bin .cue .cbn .img .iso .m3u .mdf .pbp .toc .z .znx .BIN .CUE .CBN .IMG .ISO .M3U .MDF .PBP .TOC .Z .ZNX .zip .ZIP"],
["n64","Nintendo 64",".z64 .n64 .v64 .Z64 .N64 .V64"],
["psp","PSP",".iso .pbp .cso .ISO .PBP .CSO"],
["zxspectrum","ZX Spectrum","sna .szx .z80 .tap .tzx .gz .udi .mgt .img .trd .scl .dsk .zip SNA .SZX .Z80 .TAP .TZX .GZ .UDI .MGT .IMG .TRD .SCL .DSK .ZIP"],
["videopac","Philips Videopac",".bin .zip .BIN .ZIP"],
["ports","PC Games",".sh .SH"],
["scummvm","ScummVM",".sh .SH"],
["saturn","Sega Saturn",".bin .cue .iso"],
["wonderswan","Wonderswan",".ws .wsc .zip .ZIP"],
["virtualboy","Virtual Boy",".zip .ZIP .vb .VB"],
["gw","Game and Watch",".zip .ZIP .mgw .MGW"],
["sega32x","Sega 32x",".32x .32X .smd .SMD .bin .BIN .zip .ZIP"],
["segasg1000","Sega SG1000",".sg .SG .zip .ZIP"],
["segacd","Sega CD",".cue .CUE .iso .ISO"],
["supergrafx","SuperGrafx",".pce .PCE .cue .CUE .sgx .SGX .zip .ZIP .ccd .CCD"],
["atari7800","Atari 7800",".a78 .A78 .bin .BIN .zip .ZIP"],
["ngp","Neo-Geo Pocket/Neo Geo Pocket Color",".zip .ZIP .ngc .NGC .ngp .NGP"],
["vectrex","Vectrex",".zip .ZIP .vec .VEC .bin .BIN"],
["lutro","Lutro",".zip .ZIP .lua .LUA"],
["atarist","Atari ST",".st .ST .stx .STX .zip .ZIP"],
["amstradcpc","Amstrad CPC",".dsk .DSK .zip .ZIP"],
["zx81","ZX81",".tzx .TZX .p .P .zip .ZIP"],
["dreamcast","Dreamcast",".gdi .GDI .cdi .CDI"],
["nds","Nintendo DS",".nds .zip .NDS .ZIP"]
]
str_list = []
str_list.append('<?xml version="1.0"?>\n')
str_list.append('<systemList>\n')
# # --- Write launchers ---
for emuID in emus:
# # # Data which is not string must be converted to string
# emu = emus[emuID]
str_list.append('<system>\n')
str_list.append(XML_text('name', emuID[0]))
str_list.append(XML_text('fullname', emuID[1]))
str_list.append(XML_text('path', ruta_roms+emuID[0]))
str_list.append(XML_text('extension', emuID[2]))
str_list.append(XML_text('command', app_file+' '+xbmcaddon.Addon(id='game.emulationstation').getSetting(emuID[0])+' %ROM% ES'))
str_list.append(XML_text('platform', emuID[0]))
str_list.append('</system>\n')
# End of file
str_list.append('</systemList>\n')
full_string = ''.join(str_list).encode('utf-8')
file_obj = open(archivo, 'w')
file_obj.write(full_string)
file_obj.close()
os.system("echo 'EmulationStation [ADDON] :: es_systems.cfg updated.' >> /storage/.kodi/temp/emulationstation.log")
xbmcgui.Dialog().ok("EmulationStation", "EmulationStation Systems config file (es_systems.cfg) updated.");
else:
xbmc.executebuiltin('ActivateWindow(busydialog)')
os.system("echo 'EmulationStation [ADDON] ::' $(date) > /storage/.kodi/temp/emulationstation.log")
os.system("echo '============================================================' >> /storage/.kodi/temp/emulationstation.log")
# project=str(os.popen('$(head -c 3 /etc/release)').read())
# primero habria que comprobar si es la priemra vez que se lanza entonces hacer la instalacion:
if os.path.isdir(directory+"/emulationstation") == True:
os.system("echo 'EmulationStation [ADDON] :: Firts Run!' >> /storage/.kodi/temp/emulationstation.log")
os.system("mkdir -p /storage/.kodi/userdata/addon_data/game.emulationstation")
os.system("if [ ! -f /storage/.kodi/userdata/addon_data/game.emulationstation/es_systems.cfg ] ; then cp /storage/.kodi/addons/game.emulationstation/emulationstation/es_systems.cfg /storage/.kodi/userdata/addon_data/game.emulationstation/es_systems.cfg ; fi && rm -rf /storage/.kodi/addons/game.emulationstation/emulationstation/es_systems.cfg")
os.system("cp -r /storage/.kodi/addons/game.emulationstation/emulationstation/* /storage/.kodi/userdata/addon_data/game.emulationstation")
# os.system("rm -rf /storage/.kodi/addons/game.emulationstation/emulationstation")
os.system("if [ -L /storage/.config/emulationstation ] ; then rm /storage/.config/emulationstation ; fi && ln -s /storage/.kodi/userdata/addon_data/game.emulationstation /storage/.config/emulationstation")
os.system("rm -rf /storage/.kodi/addons/game.emulationstation/emulationstation")
# os.system("touch /storage/.kodi/addons/game.emulationstation/installed && echo 'EmulationStation [ADDON] :: Firts Run! - ' $(date) >> /storage/.kodi/addons/game.emulationstation/installed")
os.system("chmod a+x /storage/.kodi/addons/game.emulationstation/game.emulationstation-RPi")
os.system("chmod a+x /storage/.kodi/addons/game.emulationstation/game.emulationstation-Gen")
os.system("chmod a+x /storage/.kodi/addons/game.emulationstation/addon.sh && chmod a+x /storage/.kodi/addons/game.retroarch/addon.start && chmod a+x /storage/.kodi/addons/game.retroarch/game.retroarch-RPi && chmod a+x /storage/.kodi/addons/game.retroarch/game.retroarch-Gen")
# xbmcgui.Dialog().ok(addonname, "Done. Copy your roms, reboot and enjoy!")
#renombrar la carpeta de libs para el device
os.system("PROJECT=$(head -c 3 /etc/release) && if [[ $PROJECT == 'Gen' ]] ; then mv -n /storage/.kodi/addons/game.emulationstation/lib-Gen /storage/.kodi/addons/game.emulationstation/lib ; else mv -n /storage/.kodi/addons/game.emulationstation/lib-RPi /storage/.kodi/addons/game.emulationstation/lib ; fi && echo 'EmulationStation [ADDON] :: '$PROJECT >> /storage/.kodi/temp/emulationstation.log")
# if project in "Pi":
# os.system("mv -r /storage/.kodi/addons/game.emulationstation/lib-RPi /storage/.kodi/addons/game.emulationstation/lib")
# else:
# os.system("mv -r /storage/.kodi/addons/game.emulationstation/lib-Gen /storage/.kodi/addons/game.emulationstation/lib")
# Finalmente lanzamos Emulationstation
# else:
askConfirmation = xbmcplugin.getSetting(int(sys.argv[1]),'ask')
if askConfirmation == "true":
resultado = xbmcgui.Dialog().yesno("EmulationStation", "Exit Kodi and run EmulationStation?");
if resultado:
xbmc.executebuiltin('ShowPicture("/storage/.kodi/addons/game.emulationstation/fanart.jpg")')
os.system("echo 'EmulationStation [ADDON] :: Kodi is ready.' >> /storage/.kodi/temp/emulationstation.log")
os.system("sh "+directory+"/addon.sh emulationstation")
else:
xbmc.executebuiltin('Dialog.Close(busydialog)')
os.system("echo 'EmulationStation [ADDON] :: Abort launch.' >> /storage/.kodi/temp/emulationstation.log")
else:
xbmc.executebuiltin('ShowPicture("/storage/.kodi/addons/game.emulationstation/fanart.jpg")')
os.system("echo 'EmulationStation [ADDON] :: Kodi is ready.' >> /storage/.kodi/temp/emulationstation.log")
os.system("sh "+directory+"/addon.sh emulationstation")
| bite-your-idols/Gamestarter-Pi | repository.gamestarter/game.emulationstation/addon.py | Python | gpl-2.0 | 14,760 |
#!/usr/bin/env python
'''Simple viewer for DDS texture files.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
from ctypes import *
import getopt
import sys
import textwrap
from SDL import *
from pyglet.gl.VERSION_1_1 import *
import pyglet.dds
import pyglet.event
import pyglet.image
import pyglet.sprite
import pyglet.window
from OpenGL.GLU import *
def usage():
print textwrap.dedent('''
Usage: ddsview.py [--header] texture1.dds texture2.dds ...
--header Dump the header of each file instead of displaying.
Within the program, press:
left/right keys Flip between loaded textures
up/down keys Increase/decrease mipmap level for a texture
space Toggle flat or sphere view
Click and drag with mouse to reposition texture with wrapping.
''')
texture_index = 0
textures = []
mipmap_level = 0
last_pos = None
texture_offset = [0, 0]
view = 'flat'
sphere_angle = 0
def keydown(character, symbol, modifiers):
global mipmap_level, texture_index
if symbol == SDLK_DOWN:
mipmap_level = max(0, mipmap_level - 1)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_BASE_LEVEL, mipmap_level)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL, mipmap_level)
elif symbol == SDLK_UP:
mipmap_level = mipmap_level + 1
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_BASE_LEVEL, mipmap_level)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAX_LEVEL, mipmap_level)
elif symbol == SDLK_LEFT:
texture_index = max(0, texture_index - 1)
elif symbol == SDLK_RIGHT:
texture_index = min(len(textures) - 1, texture_index + 1)
elif symbol == SDLK_SPACE:
toggle_view()
return True
def mousemotion(x, y):
global last_pos
state, x, y = SDL_GetMouseState()
if state & SDL_BUTTON(1):
texture_offset[0] += x - last_pos[0]
texture_offset[1] += y - last_pos[1]
update_texture_matrix()
last_pos = x, y
def update_texture_matrix():
glMatrixMode(GL_TEXTURE)
glLoadIdentity()
glTranslatef(-texture_offset[0] / float(textures[texture_index].size[0]),
-texture_offset[1] / float(textures[texture_index].size[1]),
0)
glMatrixMode(GL_MODELVIEW)
def toggle_view():
global view
if view != 'flat':
pyglet.event.pop()
pyglet.window.set_2d()
view = 'flat'
else:
pyglet.event.push()
pyglet.event.on_mousemotion(sphere_mousemotion)
pyglet.window.set_3d()
glEnable(GL_LIGHT0)
glLightfv(GL_LIGHT0, GL_POSITION, (c_float * 4)(0.5, 0.5, 1, 0))
view = 'sphere'
def sphere_mousemotion(x, y):
# TODO: virtual trackball
return True
def draw_sphere():
global sphere_angle
glPushMatrix()
glTranslatef(0., 0., -4)
glRotatef(sphere_angle, 0, 1, 0)
glRotatef(90, 1, 0, 0)
sphere_angle += 0.01
glPushAttrib(GL_ENABLE_BIT)
glEnable(GL_DEPTH_TEST)
glEnable(GL_LIGHTING)
glEnable(GL_TEXTURE_2D)
glBindTexture(GL_TEXTURE_2D, textures[texture_index].id)
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE)
sphere = gluNewQuadric()
gluQuadricTexture(sphere, True)
gluSphere(sphere, 1.0, 100, 100)
gluDeleteQuadric(sphere)
glPopAttrib()
glPopMatrix()
def main(args):
header = False
options, args = getopt.getopt(args[1:], 'h', ['help', 'header'])
for option, value in options:
if option in ('-h', '--help'):
usage()
sys.exit()
elif option == '--header':
header = True
if len(args) < 1:
usage()
sys.exit()
if header:
for arg in args:
print pyglet.dds.DDSURFACEDESC2(open(arg,
'r').read(pyglet.dds.DDSURFACEDESC2.get_size()))
else:
pyglet.window.set_window(resizable=True)
global textures, texture_index
textures = [pyglet.dds.load_dds(arg) for arg in args]
texture_index = 0
pyglet.window.resize(*textures[0].size)
pyglet.event.push()
pyglet.event.on_keydown(keydown)
pyglet.event.on_mousemotion(mousemotion)
global last_pos
state, x, y = SDL_GetMouseState()
last_pos = x, y
glClearColor(0, 0, 0, 0)
while not pyglet.event.is_quit():
pyglet.event.pump()
pyglet.window.clear()
if view == 'flat':
textures[texture_index].draw()
elif view == 'sphere':
draw_sphere()
pyglet.window.flip()
if __name__ == '__main__':
main(sys.argv)
| shaileshgoogler/pyglet | tools/ddsview.py | Python | bsd-3-clause | 4,680 |
'''
Created by auto_sdk on 2015.09.16
'''
from top.api.base import RestApi
class WlbWmsStockOutOrderNotifyRequest(RestApi):
def __init__(self,domain='gw.api.taobao.com',port=80):
RestApi.__init__(self,domain, port)
self.car_no = None
self.carriers_name = None
self.extend_fields = None
self.order_code = None
self.order_create_time = None
self.order_item_list = None
self.order_type = None
self.outbound_type_desc = None
self.pick_call = None
self.pick_id = None
self.pick_name = None
self.prev_order_code = None
self.receiver_info = None
self.remark = None
self.send_time = None
self.sender_info = None
self.store_code = None
self.transport_mode = None
def getapiname(self):
return 'taobao.wlb.wms.stock.out.order.notify'
| colaftc/webtool | top/api/rest/WlbWmsStockOutOrderNotifyRequest.py | Python | mit | 794 |
'''
Author Alumet 2015
https://github.com/Alumet/Codingame
'''
n = int(input()) # Number of elements which make up the association table.
q = int(input()) # Number Q of file names to be analyzed.
Link_table = {None : 'UNKNOWN'}
# Fill the dic
for i in range(n):
ext, mt = input().split()
Link_table[ext.lower()]=mt
for i in range(q):
fname=(input().lower().split("."))
if len(fname) > 1:
answer=fname[-1]
else:
answer=None
print(Link_table.get(answer, "UNKNOWN"))
| Alumet/Codingame | Easy/MIME_Type.py | Python | mit | 533 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import luigi
import luigi.contrib.external_program
import sqlite3
import pickle
import os
import numpy as np
import pandas as pd
import uncertainties as u
from contrib import afterpulse, darknoise, crosstalk
def is_none(param):
"""
Checks if param is None or "None".
"""
if param is None:
return True
if type(param) == str:
try:
if ast.literal_eval(param) is None:
return True
except:
pass
return False
def basename(path):
"""
Returns the base name of the path.
"""
return os.path.splitext(os.path.basename(path))[0]
class RunMetaTask():
id = luigi.IntParameter(-1)
name = luigi.Parameter()
model = luigi.Parameter()
bias_voltage = luigi.FloatParameter(default=np.nan)
path_spec = luigi.Parameter(default=None)
silence = luigi.IntParameter(400)
def path(self):
model = self.model
# Is a properties file.
if os.path.exists(model):
model = basename(model)
p = "./results/%s/%s" % (self.name, model)
if not is_none(self.path_spec):
if self.path_spec.startswith("/"):
p += self.path_spec
else:
p += "-" + self.path_spec
return p
class Run(luigi.contrib.external_program.ExternalProgramTask, RunMetaTask):
nice_level = luigi.IntParameter(5)
def program_args(self):
# Create macro file.
mac = self.output()[0]
if not mac.exists():
with mac.open("w") as o:
# Particle source parameters
print >> o, "/ps/nParticles", 1
print >> o, "/g4sipm/noise/preThermal", 3
print >> o, "/g4sipm/noise/postThermal", 10
print >> o, "/g4sipm/noise/crosstalkNeighbours", 4
print >> o, "/g4sipm/noise/ifNoSignal", True
print >> o, "/g4sipm/digitize/hits", True
print >> o, "/g4sipm/digitize/trace", False
# Persistency parameters
print >> o, "/g4sipm/persist/tMin", 0, "ns"
print >> o, "/g4sipm/persist/tMax", 10e6, "ns"
print >> o, "/g4sipm/persist/hits", False
print >> o, "/g4sipm/persist/digis", True
# Tell the simulation to run
print >> o, "/run/beamOn 1"
# Execute simulation.
output = self.output()[1]
args = [ # "ulimit", "-s", "32768", "&&",
"nice", "-n", self.nice_level,
"./sample", "--mac", mac.fn, "--output", output.fn, "--model", self.model]
if not np.isnan(self.bias_voltage):
args.extend(["--bias-voltage", self.bias_voltage])
return args
def output(self):
return [luigi.LocalTarget("%s/g4sipm-%d.mac" % (self.path(), self.id)), luigi.LocalTarget("%s/g4sipm-%d.sqlite" % (self.path(), self.id)) ]
class ExportRunSettings(luigi.Task, RunMetaTask):
def requires(self):
return self.clone(Run, index=0)
def run(self):
tables = ['particleSourceMessenger', 'g4sipmUiMessenger', 'sipmModel', 'sipmVoltageTraceModel']
if self.input()[1].exists():
input = self.input()[1].fn
# Open database.
con = sqlite3.connect(input)
con.row_factory = sqlite3.Row
cur = con.cursor()
# Select first entry of all tables.
settings = dict()
for table in tables:
row = cur.execute("SELECT * FROM `%s`;" % table).fetchone()
# Iterate all columns.
entry = dict()
for key, value in zip(row.keys(), row):
entry[key] = value
# Add to settings dictionary
settings[table] = entry
# Pickle settings
with self.output().open("w") as o:
pickle.dump(settings, o)
def output(self):
return luigi.LocalTarget("%s/g4sipm.pkl" % (self.path()))
class Histogram(luigi.Task, RunMetaTask):
def requires(self):
return self.clone(Run)
def run(self):
con = sqlite3.connect(self.input()[1].fn)
cur = con.cursor()
try:
# Get persisted digis.
digis = np.array(cur.execute("SELECT time, weight FROM `g4sipmDigis-0`;").fetchall())
time = digis[:, 0]
weight = digis[:, 1]
# Histogram.
bins = np.arange(np.floor(0), np.ceil(time.max() - 5000) + 1)
hist, bin_edges = np.histogram(time, bins=bins, weights=weight)
# Create dataframe.
df = pd.DataFrame({"time": bin_edges[:-1], "pe": hist})
# Drop entries with zero.
df = df[df.pe != 0]
# Save
df.to_pickle(self.output().fn)
except Exception as e:
print ("Failure in", self.input()[1].fn)
raise e
def output(self):
return luigi.LocalTarget("%s/g4sipm-%d-hist.pkl" % (self.path(), self.id))
class DeltaTDistribution(luigi.Task, RunMetaTask):
def requires(self):
return self.clone(Histogram)
def run(self):
df = pd.read_pickle(self.input().fn)
time = df.time.values
pe = df.pe.values
i = 1
time_diff, next_pe = [], []
while i < len(time) - 1:
# Select a thermal noise pulse
# If the pulse has 1 p.e. ...
if pe[i] > 0.5 and pe[i] < 1.5:
diff_prev = time[i] - time[i - 1]
diff_next = time[i + 1] - time[i]
# ... and the time difference to the previous pulse is large enough ...
if diff_prev > self.silence and diff_next > 0.0:
# ... the pulse is thermally induced.
# We measure the time and pulse height to the next pulse
time_diff.append(diff_next)
next_pe.append(pe[i + 1])
# Skipt this pulse for next iteration
i += 1
i += 1
df = pd.DataFrame({"time_diff": time_diff, "pe": next_pe})
df.to_pickle(self.output().fn)
def output(self):
return luigi.LocalTarget("%s/g4sipm-%d-delta-t.pkl" % (self.path(), self.id))
class PeDistribution(luigi.Task, RunMetaTask):
def requires(self):
return self.clone(Histogram)
def run(self):
df = pd.read_pickle(self.input().fn)
time = df.time.values
pe = df.pe.values
i = 1
curr_pe = []
while i < len(time):
# Select a thermal noise induced pulse
diff_prev = time[i] - time[i - 1]
if diff_prev > self.silence and pe[i] > 0.0:
# ... the pulse is thermally induced.
# We measure the time and pulse height to the next pulse
curr_pe.append(pe[i])
# Skipt this pulse for next iteration
i += 1
i += 1
df = pd.DataFrame({"pe": curr_pe})
df.to_pickle(self.output().fn)
def output(self):
return luigi.LocalTarget("%s/g4sipm-%d-pe.pkl" % (self.path(), self.id))
class DarknoiseDeltaTSimulation(luigi.Task, RunMetaTask):
n_repititions = luigi.IntParameter(1000)
def requires(self):
return [self.clone(DeltaTDistribution, id=i) for i in xrange(self.n_repititions)]
def run(self):
frames = []
for input in self.input():
frames.append(pd.read_pickle(input.fn))
df = pd.concat(frames, ignore_index=True)
df.to_pickle(self.output().fn)
def output(self):
return luigi.LocalTarget("%s/g4sipm-delta-t.pkl" % (self.path()))
class DarknoisePeSimulation(luigi.Task, RunMetaTask):
n_repititions = luigi.IntParameter(1000)
def requires(self):
return [self.clone(PeDistribution, id=i) for i in xrange(self.n_repititions)]
def run(self):
frames = []
for input in self.input():
frames.append(pd.read_pickle(input.fn))
df = pd.concat(frames, ignore_index=True)
df.to_pickle(self.output().fn)
def output(self):
return luigi.LocalTarget("%s/g4sipm-pe.pkl" % (self.path()))
class DeltaTFit(luigi.Task, RunMetaTask):
bin_width = luigi.IntParameter(10)
t_max = luigi.IntParameter(5000)
def requires(self):
return [self.clone(DarknoiseDeltaTSimulation), self.clone(ExportRunSettings)]
def run(self):
df = pd.read_pickle(self.input()[0].fn)
g4sipm = pickle.load(self.input()[1].open("r"))
# Create histogram
from ROOT import TH1D, TFile
nbins = self.t_max / self.bin_width
f = TFile(self.output()[0].fn, "RECREATE")
h = TH1D("hist", ";time/ns;entries", nbins , 0, self.t_max)
h.Sumw2()
for ti in df.time_diff:
h.Fill(ti)
# Normalize
h.Scale(1.0 / h.Integral("width"))
# Perform fit.
amp, amp_err, p_ap_s, p_ap_s_err, p_ap_l, p_ap_l_err, tau_s, tau_s_err, tau_l, tau_l_err, tau_th, tau_th_err, chi2, ndof = afterpulse.fit(h, xlow=50)
#
h.Write()
f.Close()
# Create dataframe with results and save.
df = pd.DataFrame({"amp_delta_t": [u.ufloat(amp, amp_err)],
"p_ap_f": [u.ufloat(p_ap_s, p_ap_s_err)],
"p_ap_s": [u.ufloat(p_ap_l, p_ap_l_err)],
"tau_ap_f": [u.ufloat(tau_s, tau_s_err)],
"tau_ap_s": [u.ufloat(tau_l, tau_l_err)],
"tau_th": [u.ufloat(tau_th, tau_th_err)],
"chi2_delta_t": [chi2],
"ndof_delta_t": [ndof],
})
df.to_pickle(self.output()[1].fn)
def output(self):
return [luigi.LocalTarget("%s/g4sipm-delta-t-fit.root" % (self.path())), luigi.LocalTarget("%s/g4sipm-delta-t-fit.pkl" % (self.path()))]
class PeFit(luigi.Task, RunMetaTask):
bin_width = luigi.FloatParameter(0.1)
pe_max = luigi.IntParameter(12)
def requires(self):
return [self.clone(DarknoisePeSimulation), self.clone(ExportRunSettings)]
def run(self):
df = pd.read_pickle(self.input()[0].fn)
g4sipm = pickle.load(self.input()[1].open("r"))
ncell = g4sipm["sipmModel"]["numberOfCells"]
neighbours = g4sipm["g4sipmUiMessenger"]["noiseCrosstalkNeighbours"]
# Create histogram
from ROOT import TH1D, TFile, TF1, ROOT
nbins = int(self.pe_max / self.bin_width)
# Create and fill histogram.
f = TFile(self.output()[0].fn, "RECREATE")
h = TH1D("hist", "hist", nbins , 0 + self.bin_width / 2.0, self.pe_max + self.bin_width / 2.0)
h.Sumw2()
for p in df.pe:
h.Fill(p)
# Perform fit
p_ct_fit, p_ct_fit_err, amp, amp_err, g, g_err, sigma_pe, sigma_pe_err, p_ct, p_ct_err, mu_0, mu_0_err, sigma_el, sigma_el_err, nu, nu_err, chi2, ndof = darknoise.fit(h, ncell, neighbours)
#
h.Write()
f.Close()
# Create dataframe with results and save.
df = pd.DataFrame({"p_ct_fit": [u.ufloat(p_ct_fit, p_ct_fit_err)],
"amp_pe": [u.ufloat(amp, amp_err)],
"g": [u.ufloat(g, g_err)],
"sigma_pe": [u.ufloat(sigma_pe, sigma_pe_err)],
"p_ct": [u.ufloat(p_ct, p_ct_err)],
"mu_0": [u.ufloat(mu_0, mu_0_err)],
"sigma_el": [u.ufloat(sigma_el, sigma_el_err)],
"nu": [u.ufloat(nu, nu_err)],
"chi2_pe": [chi2],
"ndof_pe": [ndof],
})
df.to_pickle(self.output()[1].fn)
def output(self):
return [luigi.LocalTarget("%s/g4sipm-pe-fit.root" % (self.path())), luigi.LocalTarget("%s/g4sipm-pe-fit.pkl" % (self.path()))]
class DarknoiseSimulation(luigi.Task, RunMetaTask):
name = luigi.Parameter("darknoise-simulation")
def requires(self):
yield self.clone(DeltaTFit)
yield self.clone(PeFit)
def run(self):
frames = []
for input in self.input():
frames.append(pd.read_pickle(input[1].fn))
df = pd.concat(frames, axis=1, join="inner")
df.to_pickle(self.output().fn)
def output(self):
return luigi.LocalTarget("%s/darknoise-simulation.pkl" % (self.path()))
class AllOvervoltagesForHamamatsuS1036233050C(luigi.Task, RunMetaTask):
name = luigi.Parameter("darknoise-simulation")
model = luigi.Parameter("./resources/hamamatsu-s10362-33-050c-niggemann.properties")
def requires(self):
v_break = 70.0
v_ov = [0.71, 0.81, 0.91, 1.01, 1.11, 1.21, 1.31, 1.41]
return [DarknoiseSimulation(name=self.name, model=self.model, path_spec=str(v), bias_voltage=(v_break + v)) for v in v_ov]
def run(self):
frames = []
for input in self.input():
frames.append(pd.read_pickle(input.fn))
df = pd.concat(frames, ignore_index=True)
df = pd.concat([df, pd.DataFrame({"v_ov": [0.71, 0.81, 0.91, 1.01, 1.11, 1.21, 1.31, 1.41]})], axis=1, join="inner")
self.output()[0].makedirs()
df.to_pickle(self.output()[0].fn)
# Get expectation from model files.
v_ov, f_th, p_ct, p_ap_s, p_ap_f, tau_ap_s, tau_ap_f = [], [], [], [], [], [], []
for input in self.input():
g4sipm = pickle.load(open(os.path.join(os.path.dirname(input.fn), "g4sipm.pkl")))
v_ov.append(g4sipm["sipmModel"]["overVoltage"] * 1e6) # volt
f_th.append(g4sipm["sipmModel"]["thermalNoiseRate"])
p_ct.append(g4sipm["sipmModel"]["crossTalkProbability"])
p_ap_s.append(g4sipm["sipmModel"]["apProbLong"])
p_ap_f.append(g4sipm["sipmModel"]["apProbShort"])
tau_ap_s.append(g4sipm["sipmModel"]["apTauLong"])
tau_ap_f.append(g4sipm["sipmModel"]["apTauShort"])
df = pd.DataFrame({"v_ov":v_ov, "f_th":f_th, "p_ct":p_ct, "p_ap_s":p_ap_s, "p_ap_f":p_ap_f, "tau_ap_s":tau_ap_s, "tau_ap_f":tau_ap_f})
df.to_pickle(self.output()[1].fn)
def output(self):
return [luigi.LocalTarget("%s/darknoise-simulation.pkl" % (self.path())), luigi.LocalTarget("%s/darknoise-simulation-expected.pkl" % (self.path()))]
if __name__ == "__main__":
luigi.run()
| ntim/g4sipm | sample/run/luigi/darknoise_simulation_new.py | Python | gpl-3.0 | 14,785 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class SiteSealRequest(Model):
"""Site seal request.
:param light_theme: If <code>true</code> use the light color theme for
site seal; otherwise, use the default color theme.
:type light_theme: bool
:param locale: Locale of site seal.
:type locale: str
"""
_attribute_map = {
'light_theme': {'key': 'lightTheme', 'type': 'bool'},
'locale': {'key': 'locale', 'type': 'str'},
}
def __init__(self, light_theme=None, locale=None):
super(SiteSealRequest, self).__init__()
self.light_theme = light_theme
self.locale = locale
| lmazuel/azure-sdk-for-python | azure-mgmt-web/azure/mgmt/web/models/site_seal_request.py | Python | mit | 1,117 |
"""
git-flow - Manage version branches and tags
Usage:
git-flow status
[--root=DIR] [--config=FILE] [-B|--batch] [-v|--verbose] [-p|--pretty]
[(-a|--all) | <object>]
git-flow (bump-major|bump-minor)
[--root=DIR] [--config=FILE] [-B|--batch] [-v|--verbose] [-p|--pretty]
[-d|--dry-run] [-y|--assume-yes] [<object>]
git-flow (bump-patch|bump-prerelease-type|bump-prerelease|bump-to-release)
[--root=DIR] [--config=FILE] [-B|--batch] [-v|--verbose] [-p|--pretty]
[-d|--dry-run] [-y|--assume-yes] [<object>]
git-flow bump-to
[--root=DIR] [--config=FILE] [-B|--batch] [-v|--verbose] [-p|--pretty]
[-d|--dry-run] [-y|--assume-yes] <version> [<object>]
git-flow discontinue
[--root=DIR] [--config=FILE] [-B|--batch] [-v|--verbose] [-p|--pretty]
[-d|--dry-run] [-y|--assume-yes] [--reintegrate|--no-reintegrate] [<object>]
git-flow start
[--root=DIR] [--config=FILE] [-B|--batch] [-v|--verbose] [-p|--pretty]
[-d|--dry-run] [-y|--assume-yes] (<supertype> <type> <name>|<work-branch>) [<base-object>]
git-flow finish
[--root=DIR] [--config=FILE] [-B|--batch] [-v|--verbose] [-p|--pretty]
[-d|--dry-run] [-y|--assume-yes] [(<supertype> <type> <name>|<work-branch>) [<base-object>]]
git-flow log
[--root=DIR] [--config=FILE] [-B|--batch] [-v|--verbose] [-p|--pretty]
[<object>] [-- <git-arg>...]
git-flow (assemble|test|integration-test)
[--root=DIR] [--config=FILE] [-B|--batch] [-v|--verbose] [-p|--pretty]
[-d|--dry-run] [--inplace| [<object>]]
git-flow drop-cache
[-B|--batch] [-v|--verbose] [-p|--pretty]
[-d|--dry-run]
git-flow convert-config <input-file> <output-file>
git-flow (-h|--help)
git-flow --version
git-flow --hook=<hook-name> [<hook-args>...]
Options:
-h --help Shows this screen.
--version Shows version information.
Selection Options:
-a --all Select all branches
Workspace Options:
--root=DIR The working copy root.
[default: .]
--config=FILE The configuration file relative to the working copy root.
Defaults to default file name in the following order: .gitflow.yml, gitflow.json
Execution Mode Options:
-B --batch Disables interaction and output coloring.
-y --assume-yes Automatically answer yes for all questions.
-d --dry-run Prints actions without executing them.
Output Options:
-v --verbose Enables detailed output.
-p --pretty Enables formatted and colored output.
Hook Options:
--hook=<hook-name> Sets the hook type. For use in Git hooks only.
"""
import os
import sys
import docopt
import gitflow.procedures.begin
import gitflow.procedures.build
import gitflow.procedures.create_version
import gitflow.procedures.discontinue_version
import gitflow.procedures.end
import gitflow.procedures.log
import gitflow.procedures.status
from gitflow import cli, repotools, _, hooks, filesystem
from gitflow import const
from gitflow.common import GitFlowException, Result
from gitflow.context import Context
# project_env = os.path.normpath(os.path.join(os.path.dirname(__file__), '..'))
# print('project_env: ' + project_env)
# sys.path.insert(0, project_env)
from gitflow.procedures.scheme import scheme_procedures
from gitflow.properties import PropertyIO
ENABLE_PROFILER = False
# ========== commands
# mapped by cmd_<name>
def cmd_bump_major(context):
return gitflow.procedures.create_version.call(context, scheme_procedures.version_bump_major)
def cmd_bump_minor(context):
return gitflow.procedures.create_version.call(context, scheme_procedures.version_bump_minor)
def cmd_bump_patch(context):
return gitflow.procedures.create_version.call(context, scheme_procedures.version_bump_patch)
def cmd_bump_prerelease_type(context):
return gitflow.procedures.create_version.call(context, scheme_procedures.version_bump_qualifier)
def cmd_bump_prerelease(context):
return gitflow.procedures.create_version.call(context, scheme_procedures.version_bump_prerelease)
def cmd_bump_to_release(context):
return gitflow.procedures.create_version.call(context, scheme_procedures.version_bump_to_release)
def cmd_bump_to(context):
return gitflow.procedures.create_version.call(context, scheme_procedures.VersionSet(context.args['<version>']))
def cmd_discontinue(context):
return gitflow.procedures.discontinue_version.call(context)
def cmd_start(context):
return gitflow.procedures.begin.call(context)
def cmd_finish(context):
return gitflow.procedures.end.call(context)
def cmd_log(context):
return gitflow.procedures.log.call(context)
def cmd_status(context):
return gitflow.procedures.status.call(context)
def cmd_build(context):
return gitflow.procedures.build.call(context)
def cmd_drop_cache(context):
result = Result()
cache_root = filesystem.get_cache_root_dir()
cli.print("dropping cache root: " + repr(cache_root))
if not context.dry_run:
filesystem.delete_all_cache_dirs()
return result
def cmd_convert_config(context):
result = Result()
with open(context.args['<input-file>'], mode='r', encoding='utf-8') as in_file:
if in_file is None:
result.fail(os.EX_USAGE,
_("Failed to open input file"),
None)
return result
input = PropertyIO.get_instance_by_filename(in_file.name)
with open(context.args['<output-file>'], mode='w', encoding='utf-8') as out_file:
if out_file is None:
result.fail(os.EX_USAGE,
_("Failed to open output file"),
None)
return result
output = PropertyIO.get_instance_by_filename(out_file.name)
config = input.from_stream(in_file)
output.to_stream(out_file, config)
return result
# ========== hooks
# mapped by hook_<name>
def hook_pre_commit(context):
return hooks.pre_commit(context)
def hook_pre_push(context):
return hooks.pre_push(context)
# ========== entry point
def main(argv: list = sys.argv) -> int:
if ENABLE_PROFILER:
import cProfile
profiler = cProfile.Profile()
profiler.enable()
else:
profiler = None
result = Result()
args = docopt.docopt(argv=argv[1:], doc=__doc__, version=const.VERSION, help=True, options_first=False)
try:
context = Context.create(args, result)
except GitFlowException as e:
context = None
pass # errors are in result
if context is not None:
try:
if context.verbose >= const.DEBUG_VERBOSITY:
cli.print("GitFlow version: " + const.VERSION)
cli.print("Python version:" + sys.version.replace('\n', ' '))
cli.print("cwd: " + os.getcwd())
if args['--hook'] is not None:
if context.verbose >= const.TRACE_VERBOSITY:
cli.print('hook=' + args['--hook'])
hook_func = cli.get_cmd([
hook_pre_commit,
hook_pre_push,
], args['--hook'], 'hook_')
try:
hook_result = hook_func(context)
except GitFlowException as e:
hook_result = e.result
result.errors.extend(hook_result.errors)
else:
commands = {
'status': cmd_status,
'bump-major': cmd_bump_major,
'bump-minor': cmd_bump_minor,
'bump-patch': cmd_bump_patch,
'bump-prerelease-type': cmd_bump_prerelease_type,
'bump-prerelease': cmd_bump_prerelease,
'bump-to-release': cmd_bump_to_release,
'bump-to': cmd_bump_to,
'discontinue': cmd_discontinue,
'start': cmd_start,
'finish': cmd_finish,
'log': cmd_log,
'assemble': cmd_build,
'test': cmd_build,
'integration-test': cmd_build,
'drop-cache': cmd_drop_cache,
'convert-config': cmd_convert_config,
}
command_funcs = list()
for command_name, command_func in commands.items():
if args[command_name] is True:
command_funcs.append(command_func)
if not len(command_funcs):
cli.fail(os.EX_SOFTWARE, "unimplemented command")
if context.verbose >= const.TRACE_VERBOSITY:
cli.print("commands: " + repr(command_funcs))
start_branch = repotools.git_get_current_branch(context.repo) if context.repo is not None else None
for command_func in command_funcs:
try:
command_result = command_func(context)
except GitFlowException as e:
command_result = e.result
result.errors.extend(command_result.errors)
if result.has_errors():
break
current_branch = repotools.git_get_current_branch(context.repo) if context.repo is not None else None
if current_branch is not None and current_branch != start_branch:
cli.print(_("You are now on {branch}.")
.format(branch=repr(current_branch.short_name) if current_branch is not None else '-'))
finally:
context.cleanup()
exit_code = os.EX_OK
if len(result.errors):
sys.stderr.flush()
sys.stdout.flush()
for error in result.errors:
if error.exit_code != os.EX_OK and exit_code != os.EX_SOFTWARE:
exit_code = error.exit_code
cli.eprint('\n'.join(filter(None, [error.message, error.reason])))
# print dry run status, if possible
if context is not None:
if exit_code == os.EX_OK:
if context.dry_run:
cli.print('')
cli.print("dry run succeeded")
else:
pass
else:
if context.dry_run:
cli.print('')
cli.eprint("dry run failed")
else:
pass
if profiler is not None:
profiler.disable()
# pr.dump_stats('profile.pstat')
profiler.print_stats(sort="calls")
return exit_code
if __name__ == "__main__":
__exit_code = main(sys.argv)
sys.exit(__exit_code)
| abacusresearch/gitflow | gitflow/__main__.py | Python | mit | 10,798 |
from ionotomo import *
import numpy as np
import pylab as plt
def test_turbulent_realisation(plot=True):
xvec = np.linspace(-100,100,100)
zvec = np.linspace(0,1000,1000)
M = np.zeros([100,100,1000])
TCI = TriCubic(xvec,xvec,zvec,M)
print("Matern 1/2 kernel")
cov_obj = Covariance(tci=TCI)
sigma = 1.
corr = 30.
nu = 1./2.
print("Testing spectral density")
B = cov_obj.realization()
print("Fluctuations measured {}".format((np.percentile(B.flatten(),95) + np.percentile(-B.flatten(),95))))
#xy slice
x = TCI.xvec
y = TCI.yvec
z = TCI.zvec
X,Y,Z = np.meshgrid(x,y,z,indexing='ij')
dx = x[1] - x[0]
dy = y[1] - y[0]
dz = z[1] - z[0]
if plot and True:
f = plt.figure(figsize=(8,4))
vmin = np.min(B)
vmax = np.max(B)
ax = f.add_subplot(1,3,1)
ax.imshow(B[49,:,:],extent=(z[0],z[-1],y[0],y[-1]),vmin=vmin,vmax=vmax)
ax = f.add_subplot(1,3,2)
plt.imshow(B[:,49,:],extent=(z[0],z[-1],x[0],x[-1]),vmin=vmin,vmax=vmax)
ax = f.add_subplot(1,3,3)
im = plt.imshow(B[:,:,499],extent=(y[0],y[-1],x[0],x[-1]),vmin=vmin,vmax=vmax)
plt.colorbar(im)
plt.show()
print("testing contraction C^{-1}.phi")
phi = np.zeros_like(TCI.M)
#phi = np.cos(R*4)*np.exp(-R)
phi = X**2 + Y**2 + Z**4
phihat = cov_obj.contract(phi)
assert not np.any(np.isnan(phihat))
#Analytic for exp covariance is 1/(8*np.pi*sigma**2) * (1/L**3 * phi - 2/L * Lap phi + L * Lap Lap phi)
# 1/(8*np.pi*sigma**2) * (1/L**3 * phi + 2/L * sin(2 pi Z / 20)*(2*pi/20)**2 + L * sin(2 pi Z / 20)*(2*pi/20)**4)
phih = 1./(8*np.pi*sigma**2) * ( 1./corr**3 * phi - 2./corr *(2 + 2 + 2*Z**2) + corr*4)
if plot:
f = plt.figure(figsize=(12,12))
ax = f.add_subplot(3,3,1)
ax.set_title("phi")
im = ax.imshow(phi[50,:,:],extent=(z[0],z[-1],y[0],y[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,2)
ax.set_title("FFT based")
im = plt.imshow(phihat[50,:,:],extent=(z[0],z[-1],y[0],y[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,3)
ax.set_title("Analytic")
im = plt.imshow(phih[50,:,:],extent=(z[0],z[-1],y[0],y[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,4)
im = ax.imshow(phi[:,20,:],extent=(z[0],z[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,5)
im = plt.imshow(phihat[:,20,:],extent=(z[0],z[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,6)
im = plt.imshow(phih[:,20,:],extent=(z[0],z[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,7)
im = ax.imshow(phi[:,:,70],extent=(y[0],y[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,8)
im = plt.imshow(phihat[:,:,70],extent=(y[0],y[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,9)
im = plt.imshow(phih[:,:,70],extent=(y[0],y[-1],x[0],x[-1]))
plt.colorbar(im)
plt.tight_layout()
plt.show()
return
phih = phi.copy()/corr**3
from scipy import ndimage
stencil = np.zeros([3,3,3])
for i in range(-1,2):
for j in range(-1,2):
for k in range(-1,2):
s = 0
if i == 0:
s += 1
if j == 0:
s += 1
if k == 0:
s += 1
if s == 3:
stencil[i,j,k] = -2*3.
if s == 3 - 1:
stencil[i,j,k] = 1.
stencil /= (dx*dy*dz)**(2./3.)
lap = ndimage.convolve(phi,stencil,mode='wrap')
phih -= 2/corr*lap
laplap = ndimage.convolve(lap,stencil,mode='wrap')
phih += corr*laplap
phih /= 8*np.pi*sigma**2
if plot:
f = plt.figure(figsize=(12,12))
ax = f.add_subplot(3,3,1)
ax.set_title("phi")
im = ax.imshow(phi[50,:,:],extent=(z[0],z[-1],y[0],y[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,2)
ax.set_title("FFT based")
im = plt.imshow(phihat[50,:,:],extent=(z[0],z[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,3)
ax.set_title("Analytic")
im = plt.imshow(phih[50,:,:],extent=(y[0],y[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,4)
im = ax.imshow(phi[:,20,:],extent=(z[0],z[-1],y[0],y[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,5)
im = plt.imshow(phihat[:,20,:],extent=(z[0],z[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,6)
im = plt.imshow(phih[:,20,:],extent=(y[0],y[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,7)
im = ax.imshow(phi[:,:,70],extent=(z[0],z[-1],y[0],y[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,8)
im = plt.imshow(phihat[:,:,70],extent=(z[0],z[-1],x[0],x[-1]))
plt.colorbar(im)
ax = f.add_subplot(3,3,9)
im = plt.imshow(phih[:,:,70],extent=(y[0],y[-1],x[0],x[-1]))
plt.colorbar(im)
plt.show()
| Joshuaalbert/IonoTomo | src/ionotomo/tests/test_turbulent_realisation.py | Python | apache-2.0 | 5,260 |
from itertools import combinations
def stringy(stringtoget, strings):
stringtoget=sorted(stringtoget)
strlen = len(stringtoget)
strlens = len(strings)
for i in xrange(strlens):
for perm in combinations(strings, i):
perm="".join(perm)
if len(perm) == strlen:
if stringtoget==sorted(perm):
return "YES"
else:
return "NO"
if __name__ == "__main__":
for tc in xrange(int(raw_input())):
N = int(raw_input())
strings = []
for i in xrange(N):
strings.append(raw_input())
stringtoget = raw_input()
print stringy(stringtoget, strings)
| jamtot/HackerEarth | Problems/The String Monster/monster.py | Python | mit | 690 |
"""This example samples from a simple bivariate normal distribution."""
import jass.mcmc as mcmc
import jass.samplers as samplers
import numpy as np
import scipy.stats as stats
import triangle
import matplotlib.pyplot as pl
# Define the log-likelihood function to be a bivariate normal
normal_rv = stats.multivariate_normal(cov=np.identity(2))
# Initialise the chain at the mean
initial = [0.0, 0.0]
sampler = samplers.ComponentWiseSlice()
samples = mcmc.run(sampler, normal_rv.logpdf, initial, 5000)
# Plot the the samples
triangle.corner(samples)
pl.show()
| ebnn/jass | examples/normal.py | Python | mit | 563 |
#!/usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from multiprocessing import cpu_count
from contextlib import contextmanager
from threading import Thread
from itertools import chain
from Queue import Queue
import shutil
import string
import time
import sys
import os
import re
BASE_DIR = os.path.split(os.path.abspath(__file__))[0]
ZCL_COMPILER = os.path.join(BASE_DIR, os.path.join('bin', 'zcl-compiler.py'))
def execCommand(cmd):
import subprocess
try:
output = subprocess.check_output(cmd.split(' '), stderr=subprocess.STDOUT)
except Exception as e:
return e.returncode, e.output
return 0, output
# =============================================================================
# Job Executor
# =============================================================================
class JobExecutor:
__shared_state = {}
class Worker:
def __init__(self, jobq):
self.jobq = jobq
self.reset()
def reset(self):
self.job_results = []
self.job_failures = []
def run(self):
while True:
job = self.jobq.get()
try:
result = job()
self.job_results.append(result)
except Exception as e:
self.job_failures.append(e)
finally:
self.jobq.task_done()
def __init__(self):
self.__dict__ = self.__shared_state
self._jobq = Queue()
self._workers = [self.Worker(self._jobq) for _ in xrange(cpu_count())]
for worker in self._workers:
t = Thread(target=worker.run)
t.setDaemon(True)
t.start()
def add_job(self, func, *args, **kwargs):
self._jobq.put(lambda: func(*args, **kwargs))
def wait_jobs(self, raise_failure=True):
self._jobq.join()
if not self._jobq.empty():
raise Exception("NOT EMPTY")
job_results = []
job_failures = []
for worker in self._workers:
job_results.extend(worker.job_results)
job_failures.extend(worker.job_failures)
worker.reset()
if raise_failure and len(job_failures) > 0:
for e in job_failures:
print e
raise Exception('%d Job Failures' % len(job_failures))
return sorted(job_results)
# =============================================================================
# Output
# =============================================================================
NO_OUTPUT = False
def msg_write(*args):
if not NO_OUTPUT:
data = ' '.join([str(x) for x in args]) if len(args) > 0 else ''
sys.stdout.write('%s\n' % data)
@contextmanager
def bench(format):
st = time.time()
try:
yield
finally:
et = time.time()
msg_write(format, (et - st))
# =============================================================================
# I/O
# =============================================================================
def writeFile(filename, content):
fd = open(filename, 'w')
try:
fd.write(content)
finally:
fd.close()
def _removeDirectory(path):
if os.path.exists(path):
for root, dirs, files in os.walk(path, topdown=False):
for name in files:
os.remove(os.path.join(root, name))
for name in dirs:
os.rmdir(os.path.join(root, name))
os.removedirs(path)
def removeDirectoryContents(path):
for root, dirs, files in os.walk(path, topdown=False):
for name in dirs:
_removeDirectory(os.path.join(root, name))
def filesWalk(regex, dir_src, func):
for root, dirs, files in os.walk(dir_src, topdown=False):
for name in files:
if regex.match(name):
func(os.path.join(root, name))
def cFilesWalk(dir_src, func):
pattern = re.compile('[A-Za-z0-9_-]+\.c$')
return filesWalk(pattern, dir_src, func)
# =============================================================================
# Build System
# =============================================================================
def findCompiler():
paths = os.getenv('PATH', '/usr/bin').split(':')
compilers = ['clang', 'gcc', 'distcc']
for compiler in compilers:
for path in paths:
compiler_path = os.path.join(path, compiler)
if os.path.exists(compiler_path):
return compiler_path
raise Exception('Compiler Not Found!')
def ldLibraryPathUpdate(ldlibs):
env_name = 'LD_LIBRARY_PATH'
env = os.environ.get(env_name, '')
env_libs = ':'.join([lib for lib in ldlibs if lib not in env])
if env_libs:
env = '%s:%s' % (env, env_libs) if env else env_libs
os.environ[env_name] = env
def compileZclStructs(src_dir, out_dir, dump_error=True):
def _compile(source):
cmd = '%s %s %s' % (ZCL_COMPILER, out_dir, source)
msg_write(' [CG]', source)
exit_code, output = execCommand(cmd)
if exit_code != 0:
if dump_error:
msg_write(' * Failed with Status %d\n * %s\n%s' % (exit_code, cmd, output))
raise RuntimeError("Linking Failure!")
pattern = re.compile('[A-Za-z0-9_-]+\.rpc$')
filesWalk(pattern, src_dir, _compile)
msg_write()
def runTool(tool, verbose=True):
exit_code, output = execCommand(tool)
tool_output = []
if verbose:
tool_output.append(output)
if exit_code != 0:
tool_output.append(' [FAIL] %s exit code %d' % (tool, exit_code))
else:
tool_output.append(' [ OK ] %s' % tool)
msg_write('\n'.join(tool_output))
def runTools(name, tools, verbose=True):
if not tools:
return
msg_write('Run %s:' % name)
msg_write('-' * 60)
job_exec = JobExecutor()
for tool in tools:
job_exec.add_job(runTool, tool, verbose)
job_exec.wait_jobs()
msg_write()
def runTests(name, tests, verbose=True):
if tests:
tests = [t for t in tests if os.path.basename(t).startswith('test-')]
runTools(name, tests, verbose)
class BuildOptions(object):
def __init__(self):
self.cc = findCompiler()
self.ldlibs = set()
self.cflags = set()
self.defines = set()
self.includes = set()
self.pedantic = False
def setCompiler(self, cc):
self.cc = cc
def addCFlags(self, cflags):
self.cflags |= set(cflags)
def addDefines(self, defines):
self.defines |= set(defines)
def addIncludePaths(self, includes):
self.includes |= set(includes)
def addLdLibs(self, ldlibs):
self.ldlibs |= set(ldlibs)
def setPedantic(self, pedantic):
self.pedantic = pedantic
def clone(self):
opts = BuildOptions()
opts.setCompiler(self.cc)
opts.addCFlags(self.cflags)
opts.addDefines(self.defines)
opts.addIncludePaths(self.includes)
opts.addLdLibs(self.ldlibs)
opts.setPedantic(self.pedantic)
return opts
class Build(object):
DEFAULT_BUILD_DIR = 'build'
HEADER_TITLE = 'Building'
def __init__(self, name, build_dir=None, options=[]):
def _setDefaultFunc(value, default_f):
return value if value else default_f()
def _setDefaultValue(value, default_v):
return value if value else default_v
self.name = name
self._options = _setDefaultFunc(options, BuildOptions)
self._makeBuildDirs(_setDefaultValue(build_dir, self.DEFAULT_BUILD_DIR))
self._print_header()
def build(self, *args, **kwargs):
self.cleanup()
os.makedirs(self._dir_obj)
os.makedirs(self._dir_lib)
os.makedirs(self._dir_inc)
return self._build(*args, **kwargs)
def _print_header(self):
if self.HEADER_TITLE:
msg_write(self.HEADER_TITLE, self.name)
msg_write('-' * 60)
def _build(self):
raise NotImplementedError
def cleanup(self, full=True):
if full:
_removeDirectory(self._dir_out)
else:
_removeDirectory(self._dir_obj)
def _makeBuildDirs(self, build_dir):
self._file_buildnr = os.path.join(build_dir, '.buildnr-%s' % self.name)
self._dir_out = os.path.join(build_dir, self.name)
self._dir_obj = os.path.join(self._dir_out, 'objs')
self._dir_lib = os.path.join(self._dir_out, 'libs')
self._dir_inc = os.path.join(self._dir_out, 'include')
def updateLdPath(self):
ldLibraryPathUpdate([self._dir_lib])
def compileFile(self, filename, dump_error=True):
obj_name, obj_path = self._objFilePath(filename)
cmd = '%s -c %s %s %s %s %s -o %s' % \
(self._options.cc, \
string.join(self._options.cflags, ' '), \
string.join(self._options.defines, ' '), \
string.join(self._options.includes, ' '), \
'-I%s' % self._dir_inc, \
filename, \
obj_path)
msg_write(' [CC]', filename)
exit_code, output = execCommand(cmd)
if exit_code != 0:
if dump_error:
msg_write(' * Failed with Status %d\n * %s\n%s' % (exit_code, cmd, output))
raise RuntimeError("Compilation Failure! %s" % filename)
if self._options.pedantic and len(output) > 0:
msg_write(output)
def compileDirectories(self, dirs_src):
job_exec = JobExecutor()
compileFunc = lambda f: job_exec.add_job(self.compileFile, f)
for src in dirs_src:
cFilesWalk(src, compileFunc)
results = job_exec.wait_jobs()
return len(results)
def linkFile(self, filename, dump_error=True):
_, obj_path = self._objFilePath(filename)
app_name, app_path = self._appFilePathFromObj(obj_path)
cmd = '%s -o %s %s %s' % \
(self._options.cc, app_path, obj_path, \
string.join(self._options.ldlibs, ' '))
msg_write(' [LD]', app_name)
exit_code, output = execCommand(cmd)
if exit_code != 0:
if dump_error:
msg_write(' * Failed with Status %d\n * %s\n%s' % (exit_code, cmd, output))
raise RuntimeError("Linking Failure!")
return app_path
def _objFilePath(self, filename):
if filename.endswith('.o'):
return os.path.basename(filename), filename
objname = os.path.normpath(filename).replace('/', '_')
objname = objname[:objname.rindex('.')] + '.o'
objpath = os.path.join(self._dir_obj, objname)
return objname, objpath
def _appFilePath(self, filename):
obj_path = self._objFilePath(filename)
return self._appFilePathFromObj(obj_path)
def _appFilePathFromObj(self, obj_path):
app_name = obj_path[obj_path.rfind('_') + 1:-2]
app_path = os.path.join(self._dir_out, app_name)
return app_name, app_path
def buildNumber(self, major, minor, inc=1):
mx = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'
try:
build = int(file(self._file_buildnr, 'r').read()) + inc
except:
build = 0
if inc > 0:
file(self._file_buildnr, 'w').write('%d' % build)
return '%d%s%04X' % (major, mx[minor], build)
@staticmethod
def platformIsMac():
return os.uname()[0] == 'Darwin'
@staticmethod
def platformIsLinux():
return os.uname()[0] == 'Linux'
class BuildApp(Build):
def __init__(self, name, src_dirs, **kwargs):
super(BuildApp, self).__init__(name, **kwargs)
self.src_dirs = src_dirs
def _build(self):
if not self.compileDirectories(self.src_dirs):
return
obj_list = os.listdir(self._dir_obj)
obj_list = [os.path.join(self._dir_obj, f) for f in obj_list]
app_path = os.path.join(self._dir_out, self.name)
cmd = '%s -o %s %s %s' % \
(self._options.cc, app_path, \
string.join(obj_list, ' '), \
string.join(self._options.ldlibs, ' '))
msg_write(' [LD]', self.name)
exit_code, output = execCommand(cmd)
if exit_code != 0:
msg_write(' * Failed with Status %d\n * %s\n%s' % (exit_code, cmd, output))
sys.exit(1)
msg_write()
class BuildMiniTools(Build):
def __init__(self, name, src_dirs, **kwargs):
super(BuildMiniTools, self).__init__(name, **kwargs)
self.src_dirs = src_dirs
def _build(self):
if not self.compileDirectories(self.src_dirs):
return []
job_exec = JobExecutor()
for obj_name in os.listdir(self._dir_obj):
job_exec.add_job(self.linkFile, os.path.join(self._dir_obj, obj_name))
tools = job_exec.wait_jobs()
msg_write()
return sorted(tools)
class BuildConfig(Build):
HEADER_TITLE = 'Running Build.Config'
def __init__(self, name, src_dirs, **kwargs):
super(BuildConfig, self).__init__(name, **kwargs)
self.src_dirs = src_dirs
def _build(self, config_file, config_head, debug=False, dump_error=False):
job_exec = JobExecutor()
test_func = lambda f: job_exec.add_job(self._testApp, f, dump_error)
for src_dir in self.src_dirs:
cFilesWalk(src_dir, test_func)
config = job_exec.wait_jobs(raise_failure=False)
self._writeConfigFile(config_file, config_head, config, debug)
def _testApp(self, filename, dump_error):
try:
self.compileFile(filename, dump_error=dump_error)
self.linkFile(filename, dump_error=dump_error)
except Exception:
msg_write(' [!!]', filename)
raise Exception('Config Test %s failed' % filename)
obj_name, obj_path = self._objFilePath(filename)
app_name, app_path = self._appFilePathFromObj(obj_path)
ldLibraryPathUpdate([self._dir_lib])
exit_code, output = execCommand(app_path)
if exit_code != 0:
msg_write(' [!!]', filename)
raise Exception('Config Test %s failed' % app_name)
return app_name
def _writeConfigFile(self, config_file, config_head, config, debug):
msg_write(' [WR] Write config', config_file)
fd = open(config_file, 'w')
fd.write('/* File autogenerated, do not edit */\n')
fd.write('#ifndef _%s_BUILD_CONFIG_H_\n' % config_head)
fd.write('#define _%s_BUILD_CONFIG_H_\n' % config_head)
fd.write('\n')
fd.write('/* C++ needs to know that types and declarations are C, not C++. */\n')
fd.write('#ifdef __cplusplus\n')
fd.write(' #define __%s_BEGIN_DECLS__ extern "C" {\n' % config_head)
fd.write(' #define __%s_END_DECLS__ }\n' % config_head)
fd.write('#else\n')
fd.write(' #define __%s_BEGIN_DECLS__\n' % config_head)
fd.write(' #define __%s_END_DECLS__\n' % config_head)
fd.write('#endif\n')
fd.write('\n')
if debug:
fd.write('/* Debugging Mode on! Print as much as you can! */\n')
fd.write('#define __Z_DEBUG__ 1\n')
fd.write('\n')
if len(config) > 0:
fd.write("/* You've support for this things... */\n")
for define in config:
fd.write('#define %s_%s\n' % (config_head, define.upper().replace('-', '_')))
fd.write('\n')
fd.write('#endif /* !_%s_BUILD_CONFIG_H_ */\n' % config_head)
fd.flush()
fd.close()
msg_write()
class BuildLibrary(Build):
SKIP_HEADER_ENDS = ('_p.h', 'private.h')
HEADER_TITLE = None
def __init__(self, name, version, src_dirs, copy_dirs=None, **kwargs):
super(BuildLibrary, self).__init__(name, **kwargs)
self.version = version
self.src_dirs = src_dirs
self.copy_dirs = copy_dirs or []
def moveBuild(self, dst):
_removeDirectory(dst)
os.makedirs(dst)
os.rename(self._dir_inc, os.path.join(dst, 'include'))
os.rename(self._dir_lib, os.path.join(dst, 'libs'))
def _build(self):
v_maj, v_min, v_rev = (int(x) for x in self.version.split('.'))
build_nr = self.buildNumber(v_maj, v_min)
msg_write('Copy %s %s (%s) Library Headers' % (self.name, self.version, build_nr))
msg_write('-' * 60)
self.copyHeaders()
msg_write('Building %s %s (%s) Library' % (self.name, self.version, build_nr))
msg_write('-' * 60)
if not self.compileDirectories(self.src_dirs):
return
obj_list = os.listdir(self._dir_obj)
obj_list = [os.path.join(self._dir_obj, f) for f in obj_list]
if not os.path.exists(self._dir_lib):
os.makedirs(self._dir_lib)
libversion_maj = self.version[:self.version.index('.')]
lib_ext = 'dylib' if self.platformIsMac() else 'so'
lib_name = 'lib%s.%s' % (self.name, lib_ext)
lib_name_maj = 'lib%s.%s.%s' % (self.name, lib_ext, libversion_maj)
lib_name_full = 'lib%s.%s.%s' % (self.name, lib_ext, self.version)
lib_path = os.path.join(self._dir_lib, lib_name_full)
if self.platformIsMac():
cmd = '%s -dynamiclib -current_version %s -o %s %s %s' % \
(self._options.cc, self.version, lib_path, \
string.join(obj_list, ' '), \
string.join(self._options.ldlibs, ' '))
elif self.platformIsLinux():
cmd = '%s -shared -Wl,-soname,%s -o %s %s %s' % \
(self._options.cc, lib_name_maj, lib_path, \
string.join(obj_list, ' '), \
string.join(self._options.ldlibs, ' '))
else:
raise RuntimeError("Unsupported Platform %s" % ' '.join(os.uname()))
msg_write()
msg_write(' [LD]', lib_name_full)
exit_code, output = execCommand(cmd)
if exit_code != 0:
msg_write(' * Failed with Status %d\n * %s\n%s' % (exit_code, cmd, output))
sys.exit(1)
cwd = os.getcwd()
os.chdir(self._dir_lib)
for name in (lib_name, lib_name_maj):
msg_write(' [LN]', name)
execCommand('ln -s %s %s' % (lib_name_full, name))
os.chdir(cwd)
msg_write()
def copyHeaders(self):
self.copyHeadersFromTo(None, self.src_dirs)
for hname, hdirs in self.copy_dirs:
self.copyHeadersFromTo(hname, hdirs)
msg_write()
def copyHeadersFromTo(self, name, src_dirs):
dir_dst = os.path.join(self._dir_inc, self.name)
if name is not None:
dir_dst = os.path.join(dir_dst, name)
_removeDirectory(dir_dst)
os.makedirs(dir_dst)
for dir_src in src_dirs:
for root, dirs, files in os.walk(dir_src, topdown=False):
for name in files:
if not name.endswith('.h'):
continue
for s in self.SKIP_HEADER_ENDS:
if name.endswith(s):
break
else:
src_path = os.path.join(root, name)
dst_path = os.path.join(dir_dst, name)
shutil.copyfile(src_path, dst_path)
msg_write(' [CP]', dst_path)
# =============================================================================
# Project
# =============================================================================
_ldlib = lambda name: '-L%s/%s/libs -l%s' % (Build.DEFAULT_BUILD_DIR, name, name)
_inclib = lambda name: '-I%s/%s/include' % (Build.DEFAULT_BUILD_DIR, name)
class Project(object):
BUILD_DIR = 'build'
NAME = None
def __init__(self, options):
self.options = options
DEFAULT_CFLAGS = ['-Wall', '-Wmissing-field-initializers', '-msse4.2'] #, '-mcx16']
DEFAULT_RELEASE_CFLAGS = ['-O3']
DEFAULT_DEBUG_CFLAGS = ['-g']
DEFAULT_DEFINES = ['-D_GNU_SOURCE', '-D__USE_FILE_OFFSET64']
DEFAULT_LDLIBS = ['-lpthread', '-lm']
# Default Build Options
default_opts = BuildOptions()
self.default_opts = default_opts
default_opts.addDefines(DEFAULT_DEFINES)
default_opts.addLdLibs(DEFAULT_LDLIBS)
default_opts.addCFlags(DEFAULT_CFLAGS)
default_opts.setPedantic(options.pedantic)
if options.compiler is not None:
default_opts.setCompiler(options.compiler)
if options.release:
default_opts.addCFlags(DEFAULT_RELEASE_CFLAGS)
else:
default_opts.addCFlags(DEFAULT_DEBUG_CFLAGS)
if options.pedantic:
default_opts.addCFlags(['-pedantic', '-Wignored-qualifiers', '-Wsign-compare',
'-Wtype-limits', '-Wuninitialized', '-Winline', '-Wpacked', '-Wcast-align',
'-Wconversion', '-Wuseless-cast', '-Wsign-conversion'])
else:
default_opts.addCFlags(['-Werror'])
# Default Library Build Options
default_lib_opts = default_opts.clone()
self.default_lib_opts = default_lib_opts
default_lib_opts.addCFlags(['-fPIC', '-fno-strict-aliasing'])
def build_config(self):
print "No build-config step for '%s'" % self.NAME
def build_auto_generated(self):
print "No build auto-generated step for '%s'" % self.NAME
def setup_library(self):
return None
@classmethod
def get_includes(self):
return [_inclib(self.NAME)]
@classmethod
def get_ldlibs(self):
return [_ldlib(self.NAME)]
def build_tools(self):
print "No tools step for '%s'" % self.NAME
def build_tests(self):
print "No tests step for '%s'" % self.NAME
return None
# =============================================================================
# Project Targets
# =============================================================================
class Zcl(Project):
VERSION = '0.5.0'
NAME = 'zcl'
def build_config(self):
build_opts = self.default_opts.clone()
build_opts.addCFlags(['-Werror'])
build = BuildConfig('zcl-config', ['build.config'], options=build_opts)
build.build('src/zcl/config.h', 'Z',
debug=not self.options.release and not self.options.no_assert,
dump_error=self.options.verbose)
build.cleanup()
def setup_library(self):
return BuildLibrary(self.NAME, self.VERSION,
['src/zcl'],
options=self.default_lib_opts)
def build_tests(self):
build_opts = self.default_opts.clone()
build_opts.addLdLibs(self.get_ldlibs())
build_opts.addIncludePaths(self.get_includes())
build = BuildMiniTools('zcl-test', ['tests/zcl'], options=build_opts)
return build.build()
class RaleighSL(Project):
VERSION = '0.5.0'
NAME = 'raleighsl'
def setup_library(self):
build_opts = self.default_lib_opts.clone()
build_opts.addLdLibs(Zcl.get_ldlibs())
build_opts.addIncludePaths(Zcl.get_includes())
copy_dirs = [
('devices', ['src/raleighsl/devices']),
#('key', ['src/raleighsl/plugins/key']),
('objects', ['src/raleighsl/objects']),
#('oid', ['src/raleighsl/plugins/oid']),
('semantics', ['src/raleighsl/semantics']),
#('space', ['src/raleighsl/plugins/space']),
#('format', ['src/raleighsl/plugins/format']),
]
return BuildLibrary(self.NAME, self.VERSION,
['src/raleighsl/core'] + list(chain(*[x for _, x in copy_dirs])),
copy_dirs=copy_dirs, options=build_opts)
@classmethod
def get_includes(self):
return [_inclib(self.NAME)] + Zcl.get_includes()
@classmethod
def get_ldlibs(self):
return [_ldlib(self.NAME)] + Zcl.get_ldlibs()
def build_tools(self):
build_opts = self.default_opts.clone()
build_opts.addLdLibs(self.get_ldlibs())
build_opts.addIncludePaths(self.get_includes())
build = BuildMiniTools('raleighsl-tools', ['src/raleighsl/tools'], options=build_opts)
tools = build.build()
def build_tests(self):
build_opts = self.default_opts.clone()
build_opts.addLdLibs(self.get_ldlibs())
build_opts.addIncludePaths(self.get_includes())
build = BuildMiniTools('raleighsl-test', ['tests/raleighsl'], options=build_opts)
return build.build()
class RaleighServer(Project):
VERSION = '0.5.0'
NAME = 'raleigh-server'
def build_auto_generated(self):
compileZclStructs('src/raleigh-server/rpc',
'src/raleigh-server/rpc/generated',
dump_error=self.options.verbose)
def build_tools(self):
build_opts = self.default_opts.clone()
build_opts.addLdLibs(RaleighSL.get_ldlibs())
build_opts.addIncludePaths(RaleighSL.get_includes())
build = BuildApp(self.NAME, ['src/raleigh-server/'], options=build_opts)
if not self.options.xcode:
build.build()
class RaleighClient(Project):
VERSION = '0.5.0'
NAME = 'raleigh-client'
def setup_library(self):
build_opts = self.default_lib_opts.clone()
build_opts.addLdLibs(RaleighSL.get_ldlibs())
build_opts.addIncludePaths(Zcl.get_includes())
return BuildLibrary(self.NAME, self.VERSION,
['src/raleigh-client/raleigh-c'],
options=build_opts)
@classmethod
def get_includes(self):
return [_inclib(self.NAME)] + Zcl.get_includes()
@classmethod
def get_ldlibs(self):
return [_ldlib(self.NAME)] + Zcl.get_ldlibs()
def build_tests(self):
build_opts = self.default_opts.clone()
build_opts.addLdLibs(self.get_ldlibs())
build_opts.addIncludePaths(self.get_includes())
build = BuildMiniTools('%s-test' % self.NAME, ['tests/raleigh-client'], options=build_opts)
return build.build()
def main(options):
for project in (Zcl, RaleighSL, RaleighServer, RaleighClient):
print '=' * 79
print ' Building Target: %s' % project.NAME
print '=' * 79
target = project(options)
target.build_config()
target.build_auto_generated()
library = target.setup_library()
if library is not None:
library.copyHeaders()
if not options.xcode:
library.build()
library.updateLdPath()
if not options.xcode:
target.build_tools()
tests = target.build_tests()
runTests('%s Test' % target.NAME, tests, verbose=options.verbose)
def _parse_cmdline():
try:
from argparse import ArgumentParser
except ImportError:
from optparse import OptionParser
class ArgumentParser(OptionParser):
def add_argument(self, *args, **kwargs):
return self.add_option(*args, **kwargs)
def parse_args(self):
options, args = OptionParser.parse_args(self)
return options
parser = ArgumentParser()
parser.add_argument(dest='clean', nargs='?',
help='Clean the build directory and exit')
parser.add_argument('-c', '--compiler', dest='compiler', action='store',
help='Compiler to use')
parser.add_argument('-x', '--xcode', dest='xcode', action='store_true', default=False,
help="Use XCode to build everything (copy headers only)")
parser.add_argument('-r', '--release', dest='release', action='store_true', default=False,
help="Use release flags during compilation")
parser.add_argument('--no-assert', dest='no_assert', action='store_true', default=False,
help="Disable the asserts even in debug mode")
parser.add_argument('-p', '--pedantic', dest='pedantic', action='store_true', default=False,
help="Issue all the warnings demanded by strict ISO C")
parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', default=False,
help="Show traceback infomation if something fails")
parser.add_argument('--no-output', dest='no_output', action='store_true', default=False,
help='Do not print messages')
return parser.parse_args()
if __name__ == '__main__':
options = _parse_cmdline()
NO_OUTPUT = options.no_output
if options.clean:
removeDirectoryContents(Build.DEFAULT_BUILD_DIR)
sys.exit(0)
try:
with bench('[T] Build Time'):
main(options)
except Exception as e:
print e
sys.exit(1)
sys.exit(0) | matteobertozzi/RaleighSL | build.py | Python | apache-2.0 | 27,503 |
# swift_build_support/products/llvm.py --------------------------*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
from . import cmark
from . import product
from ..cmake import CMakeOptions
class LLVM(product.Product):
def __init__(self, args, toolchain, source_dir, build_dir):
product.Product.__init__(self, args, toolchain, source_dir,
build_dir)
# Add the cmake option for enabling or disabling assertions.
self.cmake_options.define(
'LLVM_ENABLE_ASSERTIONS:BOOL', args.llvm_assertions)
# Add the cmake option for LLVM_TARGETS_TO_BUILD.
self.cmake_options.define(
'LLVM_TARGETS_TO_BUILD', args.llvm_targets_to_build)
# Add the cmake options for vendors
self.cmake_options.extend(self._compiler_vendor_flags)
# Add the cmake options for compiler version information.
self.cmake_options.extend(self._version_flags)
@classmethod
def is_build_script_impl_product(cls):
"""is_build_script_impl_product -> bool
Whether this product is produced by build-script-impl.
"""
return True
@classmethod
def is_before_build_script_impl_product(cls):
"""is_before_build_script_impl_product -> bool
Whether this product is build before any build-script-impl products.
"""
return False
@property
def _compiler_vendor_flags(self):
if self.args.compiler_vendor == "none":
return []
if self.args.compiler_vendor != "apple":
raise RuntimeError("Unknown compiler vendor?!")
return [
('CLANG_VENDOR', 'Apple'),
('CLANG_VENDOR_UTI', 'com.apple.compilers.llvm.clang'),
# This is safe since we always provide a default.
('PACKAGE_VERSION', str(self.args.clang_user_visible_version))
]
@property
def _version_flags(self):
result = CMakeOptions()
if self.args.clang_compiler_version is not None:
result.define(
'CLANG_REPOSITORY_STRING',
"clang-{}".format(self.args.clang_compiler_version))
return result
@classmethod
def get_dependencies(cls):
return [cmark.CMark]
| JGiola/swift | utils/swift_build_support/swift_build_support/products/llvm.py | Python | apache-2.0 | 2,666 |
####################################################################################################
#
# Musica - A Music Theory Package for Python
# Copyright (C) 2017 Fabrice Salvaire
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
####################################################################################################
####################################################################################################
import logging
# import math
import os
# import numpy as np
from .Spectrum import Spectrum
####################################################################################################
_module_logger = logging.getLogger(__name__)
####################################################################################################
class AudioFormatMetadata:
##############################################
def __init__(self,
number_of_channels, # int > 0
sampling_frequency, # e.g. 44.1kHz 48kHz 96kHz
bits_per_sample, # e.g. 8 16 24-bit
):
self._number_of_channels = number_of_channels
self._sampling_frequency = sampling_frequency
self._bits_per_sample = bits_per_sample
##############################################
@property
def number_of_channels(self):
return self._number_of_channels
@property
def sampling_frequency(self):
return self._sampling_frequency
@property
def time_resolution(self):
return 1 / self._sampling_frequency
@property
def bits_per_sample(self):
return self._bits_per_sample
@property
def float_scale(self):
# N-bit signed integer range from -2**(N-1) to 2**(N-1) -1
return 2**(self._bits_per_sample -1)
##############################################
def sample_to_time(self, i):
return i / self._sampling_frequency
def time_to_sample(self, t):
return int(t * self._sampling_frequency)
####################################################################################################
class AudioFormatMetaclass(type):
__extensions__ = {}
_logger = _module_logger.getChild('AudioFormatMetaclass')
##############################################
def __new__(cls, class_name, base_classes, attributes):
return super().__new__(cls, class_name, base_classes, attributes)
##############################################
def __init__(cls, class_name, base_classes, attributes):
type.__init__(cls, class_name, base_classes, attributes)
if cls.__extensions__ is not None:
for extension in cls.__extensions__:
AudioFormatMetaclass._logger.info('Register {} for {}'.format(cls, extension))
AudioFormatMetaclass.__extensions__[extension] = cls
##############################################
@classmethod
def get(cls, extension):
if extension.startswith('.'):
extension = extension[1:]
return cls.__extensions__[extension]
####################################################################################################
class AudioFormat(metaclass=AudioFormatMetaclass):
__extensions__ = None
_logger = _module_logger.getChild('AudioFormat')
##############################################
@classmethod
def open(cls, path):
basename, ext = os.path.splitext(path)
audio_format_cls = AudioFormatMetaclass.get(ext)
return audio_format_cls(path)
##############################################
def __init__(self, metadata, channels):
self._metadata = metadata
self._channels = channels
##############################################
@property
def metadata(self):
return self._metadata
def channel(self, i, as_float=False):
data = self._channels[i]
if as_float:
return data / self._metadata.float_scale
else:
return data
##############################################
def spectrum(self, channel, **kwargs):
sampling_frequency = self._metadata.sampling_frequency
window = kwargs.get('window', 'hann')
data = self.channel(channel, as_float=True)
if 'start' in kwargs:
start = self._metadata.time_to_sample(kwargs['start'])
else:
start = kwargs.get('start_sample', 0)
if 'number_of_samples' in kwargs:
stop = start + kwargs['number_of_samples']
elif 'stop_sample' in kwargs:
stop = kwargs['stop_sample'] + 1
elif 'stop' in kwargs:
stop = self._metadata.time_to_sample(kwargs['stop']) + 1
elif 'frequency_resolution' in kwargs:
number_of_samples = Spectrum.sample_for_resolution(sampling_frequency,
kwargs['frequency_resolution'],
kwargs.get('power_of_two', True))
else:
stop = data.size
if stop > data.size:
raise ValueError("stop is too large")
data = data[start:stop]
self._logger.info("spectrum from {} to {}".format(start, stop))
return Spectrum(sampling_frequency, data, window)
| FabriceSalvaire/Musica | Musica/Audio/AudioFormat.py | Python | gpl-3.0 | 5,905 |
import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
start_time = demisto.args()['start_time'].replace('"', '')
end_time = demisto.args()['end_time'].replace('"', '')
try:
# Strip microseconds and convert to datetime object
start_time_obj = datetime.strptime(start_time.split(".")[0], "%Y-%m-%dT%H:%M:%S")
end_time_obj = datetime.strptime(end_time.split(".")[0], "%Y-%m-%dT%H:%M:%S")
# Calculate the difference in minutes
time_diff = end_time_obj - start_time_obj
mins = round((time_diff.total_seconds() / 60), 2)
hr = 'Calculated Time Difference: {} minutes.'.format(str(mins))
context = {
"Time.Difference": mins,
"Time.Start": start_time,
"Time.End": end_time
}
return_outputs(hr, context, mins)
except Exception as ex:
return_error('Error occurred while parsing output from command. Exception info:\n' + str(ex))
| VirusTotal/content | Packs/CommonScripts/Scripts/CalculateTimeDifference/CalculateTimeDifference.py | Python | mit | 938 |
#! /usr/bin/env python3
# Convert GNU texinfo files into HTML, one file per node.
# Based on Texinfo 2.14.
# Usage: texi2html [-d] [-d] [-c] inputfile outputdirectory
# The input file must be a complete texinfo file, e.g. emacs.texi.
# This creates many files (one per info node) in the output directory,
# overwriting existing files of the same name. All files created have
# ".html" as their extension.
# XXX To do:
# - handle @comment*** correctly
# - handle @xref {some words} correctly
# - handle @ftable correctly (items aren't indexed?)
# - handle @itemx properly
# - handle @exdent properly
# - add links directly to the proper line from indices
# - check against the definitive list of @-cmds; we still miss (among others):
# - @defindex (hard)
# - @c(omment) in the middle of a line (rarely used)
# - @this* (not really needed, only used in headers anyway)
# - @today{} (ever used outside title page?)
# More consistent handling of chapters/sections/etc.
# Lots of documentation
# Many more options:
# -top designate top node
# -links customize which types of links are included
# -split split at chapters or sections instead of nodes
# -name Allow different types of filename handling. Non unix systems
# will have problems with long node names
# ...
# Support the most recent texinfo version and take a good look at HTML 3.0
# More debugging output (customizable) and more flexible error handling
# How about icons ?
# rpyron 2002-05-07
# Robert Pyron <rpyron@alum.mit.edu>
# 1. BUGFIX: In function makefile(), strip blanks from the nodename.
# This is necessary to match the behavior of parser.makeref() and
# parser.do_node().
# 2. BUGFIX fixed KeyError in end_ifset (well, I may have just made
# it go away, rather than fix it)
# 3. BUGFIX allow @menu and menu items inside @ifset or @ifclear
# 4. Support added for:
# @uref URL reference
# @image image file reference (see note below)
# @multitable output an HTML table
# @vtable
# 5. Partial support for accents, to match MAKEINFO output
# 6. I added a new command-line option, '-H basename', to specify
# HTML Help output. This will cause three files to be created
# in the current directory:
# `basename`.hhp HTML Help Workshop project file
# `basename`.hhc Contents file for the project
# `basename`.hhk Index file for the project
# When fed into HTML Help Workshop, the resulting file will be
# named `basename`.chm.
# 7. A new class, HTMLHelp, to accomplish item 6.
# 8. Various calls to HTMLHelp functions.
# A NOTE ON IMAGES: Just as 'outputdirectory' must exist before
# running this program, all referenced images must already exist
# in outputdirectory.
import os
import sys
import string
import re
MAGIC = '\\input texinfo'
cmprog = re.compile('^@([a-z]+)([ \t]|$)') # Command (line-oriented)
blprog = re.compile('^[ \t]*$') # Blank line
kwprog = re.compile('@[a-z]+') # Keyword (embedded, usually
# with {} args)
spprog = re.compile('[\n@{}&<>]') # Special characters in
# running text
#
# menu item (Yuck!)
miprog = re.compile('^\* ([^:]*):(:|[ \t]*([^\t,\n.]+)([^ \t\n]*))[ \t\n]*')
# 0 1 1 2 3 34 42 0
# ----- ---------- ---------
# -|-----------------------------
# -----------------------------------------------------
class HTMLNode:
"""Some of the parser's functionality is separated into this class.
A Node accumulates its contents, takes care of links to other Nodes
and saves itself when it is finished and all links are resolved.
"""
DOCTYPE = '<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML 2.0//EN">'
type = 0
cont = ''
epilogue = '</BODY></HTML>\n'
def __init__(self, dir, name, topname, title, next, prev, up):
self.dirname = dir
self.name = name
if topname:
self.topname = topname
else:
self.topname = name
self.title = title
self.next = next
self.prev = prev
self.up = up
self.lines = []
def write(self, *lines):
for line in lines:
self.lines.append(line)
def flush(self):
fp = open(self.dirname + '/' + makefile(self.name), 'w')
fp.write(self.prologue)
fp.write(self.text)
fp.write(self.epilogue)
fp.close()
def link(self, label, nodename, rel=None, rev=None):
if nodename:
if nodename.lower() == '(dir)':
addr = '../dir.html'
title = ''
else:
addr = makefile(nodename)
title = ' TITLE="%s"' % nodename
self.write(label, ': <A HREF="', addr, '"', \
rel and (' REL=' + rel) or "", \
rev and (' REV=' + rev) or "", \
title, '>', nodename, '</A> \n')
def finalize(self):
length = len(self.lines)
self.text = ''.join(self.lines)
self.lines = []
self.open_links()
self.output_links()
self.close_links()
links = ''.join(self.lines)
self.lines = []
self.prologue = (
self.DOCTYPE +
'\n<HTML><HEAD>\n'
' <!-- Converted with texi2html and Python -->\n'
' <TITLE>' + self.title + '</TITLE>\n'
' <LINK REL=Next HREF="'
+ makefile(self.next) + '" TITLE="' + self.next + '">\n'
' <LINK REL=Previous HREF="'
+ makefile(self.prev) + '" TITLE="' + self.prev + '">\n'
' <LINK REL=Up HREF="'
+ makefile(self.up) + '" TITLE="' + self.up + '">\n'
'</HEAD><BODY>\n' +
links)
if length > 20:
self.epilogue = '<P>\n%s</BODY></HTML>\n' % links
def open_links(self):
self.write('<HR>\n')
def close_links(self):
self.write('<HR>\n')
def output_links(self):
if self.cont != self.next:
self.link(' Cont', self.cont)
self.link(' Next', self.next, rel='Next')
self.link(' Prev', self.prev, rel='Previous')
self.link(' Up', self.up, rel='Up')
if self.name != self.topname:
self.link(' Top', self.topname)
class HTML3Node(HTMLNode):
DOCTYPE = '<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML Level 3//EN//3.0">'
def open_links(self):
self.write('<DIV CLASS=Navigation>\n <HR>\n')
def close_links(self):
self.write(' <HR>\n</DIV>\n')
class TexinfoParser:
COPYRIGHT_SYMBOL = "©"
FN_ID_PATTERN = "(%(id)s)"
FN_SOURCE_PATTERN = '<A NAME=footnoteref%(id)s' \
' HREF="#footnotetext%(id)s">' \
+ FN_ID_PATTERN + '</A>'
FN_TARGET_PATTERN = '<A NAME=footnotetext%(id)s' \
' HREF="#footnoteref%(id)s">' \
+ FN_ID_PATTERN + '</A>\n%(text)s<P>\n'
FN_HEADER = '\n<P>\n<HR NOSHADE SIZE=1 WIDTH=200>\n' \
'<STRONG><EM>Footnotes</EM></STRONG>\n<P>'
Node = HTMLNode
# Initialize an instance
def __init__(self):
self.unknown = {} # statistics about unknown @-commands
self.filenames = {} # Check for identical filenames
self.debugging = 0 # larger values produce more output
self.print_headers = 0 # always print headers?
self.nodefp = None # open file we're writing to
self.nodelineno = 0 # Linenumber relative to node
self.links = None # Links from current node
self.savetext = None # If not None, save text head instead
self.savestack = [] # If not None, save text head instead
self.htmlhelp = None # html help data
self.dirname = 'tmp' # directory where files are created
self.includedir = '.' # directory to search @include files
self.nodename = '' # name of current node
self.topname = '' # name of top node (first node seen)
self.title = '' # title of this whole Texinfo tree
self.resetindex() # Reset all indices
self.contents = [] # Reset table of contents
self.numbering = [] # Reset section numbering counters
self.nofill = 0 # Normal operation: fill paragraphs
self.values={'html': 1} # Names that should be parsed in ifset
self.stackinfo={} # Keep track of state in the stack
# XXX The following should be reset per node?!
self.footnotes = [] # Reset list of footnotes
self.itemarg = None # Reset command used by @item
self.itemnumber = None # Reset number for @item in @enumerate
self.itemindex = None # Reset item index name
self.node = None
self.nodestack = []
self.cont = 0
self.includedepth = 0
# Set htmlhelp helper class
def sethtmlhelp(self, htmlhelp):
self.htmlhelp = htmlhelp
# Set (output) directory name
def setdirname(self, dirname):
self.dirname = dirname
# Set include directory name
def setincludedir(self, includedir):
self.includedir = includedir
# Parse the contents of an entire file
def parse(self, fp):
line = fp.readline()
lineno = 1
while line and (line[0] == '%' or blprog.match(line)):
line = fp.readline()
lineno = lineno + 1
if line[:len(MAGIC)] != MAGIC:
raise SyntaxError('file does not begin with %r' % (MAGIC,))
self.parserest(fp, lineno)
# Parse the contents of a file, not expecting a MAGIC header
def parserest(self, fp, initial_lineno):
lineno = initial_lineno
self.done = 0
self.skip = 0
self.stack = []
accu = []
while not self.done:
line = fp.readline()
self.nodelineno = self.nodelineno + 1
if not line:
if accu:
if not self.skip: self.process(accu)
accu = []
if initial_lineno > 0:
print('*** EOF before @bye')
break
lineno = lineno + 1
mo = cmprog.match(line)
if mo:
a, b = mo.span(1)
cmd = line[a:b]
if cmd in ('noindent', 'refill'):
accu.append(line)
else:
if accu:
if not self.skip:
self.process(accu)
accu = []
self.command(line, mo)
elif blprog.match(line) and \
'format' not in self.stack and \
'example' not in self.stack:
if accu:
if not self.skip:
self.process(accu)
if self.nofill:
self.write('\n')
else:
self.write('<P>\n')
accu = []
else:
# Append the line including trailing \n!
accu.append(line)
#
if self.skip:
print('*** Still skipping at the end')
if self.stack:
print('*** Stack not empty at the end')
print('***', self.stack)
if self.includedepth == 0:
while self.nodestack:
self.nodestack[-1].finalize()
self.nodestack[-1].flush()
del self.nodestack[-1]
# Start saving text in a buffer instead of writing it to a file
def startsaving(self):
if self.savetext is not None:
self.savestack.append(self.savetext)
# print '*** Recursively saving text, expect trouble'
self.savetext = ''
# Return the text saved so far and start writing to file again
def collectsavings(self):
savetext = self.savetext
if len(self.savestack) > 0:
self.savetext = self.savestack[-1]
del self.savestack[-1]
else:
self.savetext = None
return savetext or ''
# Write text to file, or save it in a buffer, or ignore it
def write(self, *args):
try:
text = ''.join(args)
except:
print(args)
raise TypeError
if self.savetext is not None:
self.savetext = self.savetext + text
elif self.nodefp:
self.nodefp.write(text)
elif self.node:
self.node.write(text)
# Complete the current node -- write footnotes and close file
def endnode(self):
if self.savetext is not None:
print('*** Still saving text at end of node')
dummy = self.collectsavings()
if self.footnotes:
self.writefootnotes()
if self.nodefp:
if self.nodelineno > 20:
self.write('<HR>\n')
[name, next, prev, up] = self.nodelinks[:4]
self.link('Next', next)
self.link('Prev', prev)
self.link('Up', up)
if self.nodename != self.topname:
self.link('Top', self.topname)
self.write('<HR>\n')
self.write('</BODY>\n')
self.nodefp.close()
self.nodefp = None
elif self.node:
if not self.cont and \
(not self.node.type or \
(self.node.next and self.node.prev and self.node.up)):
self.node.finalize()
self.node.flush()
else:
self.nodestack.append(self.node)
self.node = None
self.nodename = ''
# Process a list of lines, expanding embedded @-commands
# This mostly distinguishes between menus and normal text
def process(self, accu):
if self.debugging > 1:
print('!'*self.debugging, 'process:', self.skip, self.stack, end=' ')
if accu: print(accu[0][:30], end=' ')
if accu[0][30:] or accu[1:]: print('...', end=' ')
print()
if self.inmenu():
# XXX should be done differently
for line in accu:
mo = miprog.match(line)
if not mo:
line = line.strip() + '\n'
self.expand(line)
continue
bgn, end = mo.span(0)
a, b = mo.span(1)
c, d = mo.span(2)
e, f = mo.span(3)
g, h = mo.span(4)
label = line[a:b]
nodename = line[c:d]
if nodename[0] == ':': nodename = label
else: nodename = line[e:f]
punct = line[g:h]
self.write(' <LI><A HREF="',
makefile(nodename),
'">', nodename,
'</A>', punct, '\n')
self.htmlhelp.menuitem(nodename)
self.expand(line[end:])
else:
text = ''.join(accu)
self.expand(text)
# find 'menu' (we might be inside 'ifset' or 'ifclear')
def inmenu(self):
#if 'menu' in self.stack:
# print 'inmenu :', self.skip, self.stack, self.stackinfo
stack = self.stack
while stack and stack[-1] in ('ifset','ifclear'):
try:
if self.stackinfo[len(stack)]:
return 0
except KeyError:
pass
stack = stack[:-1]
return (stack and stack[-1] == 'menu')
# Write a string, expanding embedded @-commands
def expand(self, text):
stack = []
i = 0
n = len(text)
while i < n:
start = i
mo = spprog.search(text, i)
if mo:
i = mo.start()
else:
self.write(text[start:])
break
self.write(text[start:i])
c = text[i]
i = i+1
if c == '\n':
self.write('\n')
continue
if c == '<':
self.write('<')
continue
if c == '>':
self.write('>')
continue
if c == '&':
self.write('&')
continue
if c == '{':
stack.append('')
continue
if c == '}':
if not stack:
print('*** Unmatched }')
self.write('}')
continue
cmd = stack[-1]
del stack[-1]
try:
method = getattr(self, 'close_' + cmd)
except AttributeError:
self.unknown_close(cmd)
continue
method()
continue
if c != '@':
# Cannot happen unless spprog is changed
raise RuntimeError('unexpected funny %r' % c)
start = i
while i < n and text[i] in string.ascii_letters: i = i+1
if i == start:
# @ plus non-letter: literal next character
i = i+1
c = text[start:i]
if c == ':':
# `@:' means no extra space after
# preceding `.', `?', `!' or `:'
pass
else:
# `@.' means a sentence-ending period;
# `@@', `@{', `@}' quote `@', `{', `}'
self.write(c)
continue
cmd = text[start:i]
if i < n and text[i] == '{':
i = i+1
stack.append(cmd)
try:
method = getattr(self, 'open_' + cmd)
except AttributeError:
self.unknown_open(cmd)
continue
method()
continue
try:
method = getattr(self, 'handle_' + cmd)
except AttributeError:
self.unknown_handle(cmd)
continue
method()
if stack:
print('*** Stack not empty at para:', stack)
# --- Handle unknown embedded @-commands ---
def unknown_open(self, cmd):
print('*** No open func for @' + cmd + '{...}')
cmd = cmd + '{'
self.write('@', cmd)
if cmd not in self.unknown:
self.unknown[cmd] = 1
else:
self.unknown[cmd] = self.unknown[cmd] + 1
def unknown_close(self, cmd):
print('*** No close func for @' + cmd + '{...}')
cmd = '}' + cmd
self.write('}')
if cmd not in self.unknown:
self.unknown[cmd] = 1
else:
self.unknown[cmd] = self.unknown[cmd] + 1
def unknown_handle(self, cmd):
print('*** No handler for @' + cmd)
self.write('@', cmd)
if cmd not in self.unknown:
self.unknown[cmd] = 1
else:
self.unknown[cmd] = self.unknown[cmd] + 1
# XXX The following sections should be ordered as the texinfo docs
# --- Embedded @-commands without {} argument list --
def handle_noindent(self): pass
def handle_refill(self): pass
# --- Include file handling ---
def do_include(self, args):
file = args
file = os.path.join(self.includedir, file)
try:
fp = open(file, 'r')
except IOError as msg:
print('*** Can\'t open include file', repr(file))
return
print('!'*self.debugging, '--> file', repr(file))
save_done = self.done
save_skip = self.skip
save_stack = self.stack
self.includedepth = self.includedepth + 1
self.parserest(fp, 0)
self.includedepth = self.includedepth - 1
fp.close()
self.done = save_done
self.skip = save_skip
self.stack = save_stack
print('!'*self.debugging, '<-- file', repr(file))
# --- Special Insertions ---
def open_dmn(self): pass
def close_dmn(self): pass
def open_dots(self): self.write('...')
def close_dots(self): pass
def open_bullet(self): pass
def close_bullet(self): pass
def open_TeX(self): self.write('TeX')
def close_TeX(self): pass
def handle_copyright(self): self.write(self.COPYRIGHT_SYMBOL)
def open_copyright(self): self.write(self.COPYRIGHT_SYMBOL)
def close_copyright(self): pass
def open_minus(self): self.write('-')
def close_minus(self): pass
# --- Accents ---
# rpyron 2002-05-07
# I would like to do at least as well as makeinfo when
# it is producing HTML output:
#
# input output
# @"o @"o umlaut accent
# @'o 'o acute accent
# @,{c} @,{c} cedilla accent
# @=o @=o macron/overbar accent
# @^o @^o circumflex accent
# @`o `o grave accent
# @~o @~o tilde accent
# @dotaccent{o} @dotaccent{o} overdot accent
# @H{o} @H{o} long Hungarian umlaut
# @ringaccent{o} @ringaccent{o} ring accent
# @tieaccent{oo} @tieaccent{oo} tie-after accent
# @u{o} @u{o} breve accent
# @ubaraccent{o} @ubaraccent{o} underbar accent
# @udotaccent{o} @udotaccent{o} underdot accent
# @v{o} @v{o} hacek or check accent
# @exclamdown{} ¡ upside-down !
# @questiondown{} ¿ upside-down ?
# @aa{},@AA{} å,Å a,A with circle
# @ae{},@AE{} æ,Æ ae,AE ligatures
# @dotless{i} @dotless{i} dotless i
# @dotless{j} @dotless{j} dotless j
# @l{},@L{} l/,L/ suppressed-L,l
# @o{},@O{} ø,Ø O,o with slash
# @oe{},@OE{} oe,OE oe,OE ligatures
# @ss{} ß es-zet or sharp S
#
# The following character codes and approximations have been
# copied from makeinfo's HTML output.
def open_exclamdown(self): self.write('¡') # upside-down !
def close_exclamdown(self): pass
def open_questiondown(self): self.write('¿') # upside-down ?
def close_questiondown(self): pass
def open_aa(self): self.write('å') # a with circle
def close_aa(self): pass
def open_AA(self): self.write('Å') # A with circle
def close_AA(self): pass
def open_ae(self): self.write('æ') # ae ligatures
def close_ae(self): pass
def open_AE(self): self.write('Æ') # AE ligatures
def close_AE(self): pass
def open_o(self): self.write('ø') # o with slash
def close_o(self): pass
def open_O(self): self.write('Ø') # O with slash
def close_O(self): pass
def open_ss(self): self.write('ß') # es-zet or sharp S
def close_ss(self): pass
def open_oe(self): self.write('oe') # oe ligatures
def close_oe(self): pass
def open_OE(self): self.write('OE') # OE ligatures
def close_OE(self): pass
def open_l(self): self.write('l/') # suppressed-l
def close_l(self): pass
def open_L(self): self.write('L/') # suppressed-L
def close_L(self): pass
# --- Special Glyphs for Examples ---
def open_result(self): self.write('=>')
def close_result(self): pass
def open_expansion(self): self.write('==>')
def close_expansion(self): pass
def open_print(self): self.write('-|')
def close_print(self): pass
def open_error(self): self.write('error-->')
def close_error(self): pass
def open_equiv(self): self.write('==')
def close_equiv(self): pass
def open_point(self): self.write('-!-')
def close_point(self): pass
# --- Cross References ---
def open_pxref(self):
self.write('see ')
self.startsaving()
def close_pxref(self):
self.makeref()
def open_xref(self):
self.write('See ')
self.startsaving()
def close_xref(self):
self.makeref()
def open_ref(self):
self.startsaving()
def close_ref(self):
self.makeref()
def open_inforef(self):
self.write('See info file ')
self.startsaving()
def close_inforef(self):
text = self.collectsavings()
args = [s.strip() for s in text.split(',')]
while len(args) < 3: args.append('')
node = args[0]
file = args[2]
self.write('`', file, '\', node `', node, '\'')
def makeref(self):
text = self.collectsavings()
args = [s.strip() for s in text.split(',')]
while len(args) < 5: args.append('')
nodename = label = args[0]
if args[2]: label = args[2]
file = args[3]
title = args[4]
href = makefile(nodename)
if file:
href = '../' + file + '/' + href
self.write('<A HREF="', href, '">', label, '</A>')
# rpyron 2002-05-07 uref support
def open_uref(self):
self.startsaving()
def close_uref(self):
text = self.collectsavings()
args = [s.strip() for s in text.split(',')]
while len(args) < 2: args.append('')
href = args[0]
label = args[1]
if not label: label = href
self.write('<A HREF="', href, '">', label, '</A>')
# rpyron 2002-05-07 image support
# GNU makeinfo producing HTML output tries `filename.png'; if
# that does not exist, it tries `filename.jpg'. If that does
# not exist either, it complains. GNU makeinfo does not handle
# GIF files; however, I include GIF support here because
# MySQL documentation uses GIF files.
def open_image(self):
self.startsaving()
def close_image(self):
self.makeimage()
def makeimage(self):
text = self.collectsavings()
args = [s.strip() for s in text.split(',')]
while len(args) < 5: args.append('')
filename = args[0]
width = args[1]
height = args[2]
alt = args[3]
ext = args[4]
# The HTML output will have a reference to the image
# that is relative to the HTML output directory,
# which is what 'filename' gives us. However, we need
# to find it relative to our own current directory,
# so we construct 'imagename'.
imagelocation = self.dirname + '/' + filename
if os.path.exists(imagelocation+'.png'):
filename += '.png'
elif os.path.exists(imagelocation+'.jpg'):
filename += '.jpg'
elif os.path.exists(imagelocation+'.gif'): # MySQL uses GIF files
filename += '.gif'
else:
print("*** Cannot find image " + imagelocation)
#TODO: what is 'ext'?
self.write('<IMG SRC="', filename, '"', \
width and (' WIDTH="' + width + '"') or "", \
height and (' HEIGHT="' + height + '"') or "", \
alt and (' ALT="' + alt + '"') or "", \
'/>' )
self.htmlhelp.addimage(imagelocation)
# --- Marking Words and Phrases ---
# --- Other @xxx{...} commands ---
def open_(self): pass # Used by {text enclosed in braces}
def close_(self): pass
open_asis = open_
close_asis = close_
def open_cite(self): self.write('<CITE>')
def close_cite(self): self.write('</CITE>')
def open_code(self): self.write('<CODE>')
def close_code(self): self.write('</CODE>')
def open_t(self): self.write('<TT>')
def close_t(self): self.write('</TT>')
def open_dfn(self): self.write('<DFN>')
def close_dfn(self): self.write('</DFN>')
def open_emph(self): self.write('<EM>')
def close_emph(self): self.write('</EM>')
def open_i(self): self.write('<I>')
def close_i(self): self.write('</I>')
def open_footnote(self):
# if self.savetext is not None:
# print '*** Recursive footnote -- expect weirdness'
id = len(self.footnotes) + 1
self.write(self.FN_SOURCE_PATTERN % {'id': repr(id)})
self.startsaving()
def close_footnote(self):
id = len(self.footnotes) + 1
self.footnotes.append((id, self.collectsavings()))
def writefootnotes(self):
self.write(self.FN_HEADER)
for id, text in self.footnotes:
self.write(self.FN_TARGET_PATTERN
% {'id': repr(id), 'text': text})
self.footnotes = []
def open_file(self): self.write('<CODE>')
def close_file(self): self.write('</CODE>')
def open_kbd(self): self.write('<KBD>')
def close_kbd(self): self.write('</KBD>')
def open_key(self): self.write('<KEY>')
def close_key(self): self.write('</KEY>')
def open_r(self): self.write('<R>')
def close_r(self): self.write('</R>')
def open_samp(self): self.write('`<SAMP>')
def close_samp(self): self.write('</SAMP>\'')
def open_sc(self): self.write('<SMALLCAPS>')
def close_sc(self): self.write('</SMALLCAPS>')
def open_strong(self): self.write('<STRONG>')
def close_strong(self): self.write('</STRONG>')
def open_b(self): self.write('<B>')
def close_b(self): self.write('</B>')
def open_var(self): self.write('<VAR>')
def close_var(self): self.write('</VAR>')
def open_w(self): self.write('<NOBREAK>')
def close_w(self): self.write('</NOBREAK>')
def open_url(self): self.startsaving()
def close_url(self):
text = self.collectsavings()
self.write('<A HREF="', text, '">', text, '</A>')
def open_email(self): self.startsaving()
def close_email(self):
text = self.collectsavings()
self.write('<A HREF="mailto:', text, '">', text, '</A>')
open_titlefont = open_
close_titlefont = close_
def open_small(self): pass
def close_small(self): pass
def command(self, line, mo):
a, b = mo.span(1)
cmd = line[a:b]
args = line[b:].strip()
if self.debugging > 1:
print('!'*self.debugging, 'command:', self.skip, self.stack, \
'@' + cmd, args)
try:
func = getattr(self, 'do_' + cmd)
except AttributeError:
try:
func = getattr(self, 'bgn_' + cmd)
except AttributeError:
# don't complain if we are skipping anyway
if not self.skip:
self.unknown_cmd(cmd, args)
return
self.stack.append(cmd)
func(args)
return
if not self.skip or cmd == 'end':
func(args)
def unknown_cmd(self, cmd, args):
print('*** unknown', '@' + cmd, args)
if cmd not in self.unknown:
self.unknown[cmd] = 1
else:
self.unknown[cmd] = self.unknown[cmd] + 1
def do_end(self, args):
words = args.split()
if not words:
print('*** @end w/o args')
else:
cmd = words[0]
if not self.stack or self.stack[-1] != cmd:
print('*** @end', cmd, 'unexpected')
else:
del self.stack[-1]
try:
func = getattr(self, 'end_' + cmd)
except AttributeError:
self.unknown_end(cmd)
return
func()
def unknown_end(self, cmd):
cmd = 'end ' + cmd
print('*** unknown', '@' + cmd)
if cmd not in self.unknown:
self.unknown[cmd] = 1
else:
self.unknown[cmd] = self.unknown[cmd] + 1
# --- Comments ---
def do_comment(self, args): pass
do_c = do_comment
# --- Conditional processing ---
def bgn_ifinfo(self, args): pass
def end_ifinfo(self): pass
def bgn_iftex(self, args): self.skip = self.skip + 1
def end_iftex(self): self.skip = self.skip - 1
def bgn_ignore(self, args): self.skip = self.skip + 1
def end_ignore(self): self.skip = self.skip - 1
def bgn_tex(self, args): self.skip = self.skip + 1
def end_tex(self): self.skip = self.skip - 1
def do_set(self, args):
fields = args.split(' ')
key = fields[0]
if len(fields) == 1:
value = 1
else:
value = ' '.join(fields[1:])
self.values[key] = value
def do_clear(self, args):
self.values[args] = None
def bgn_ifset(self, args):
if args not in self.values or self.values[args] is None:
self.skip = self.skip + 1
self.stackinfo[len(self.stack)] = 1
else:
self.stackinfo[len(self.stack)] = 0
def end_ifset(self):
try:
if self.stackinfo[len(self.stack) + 1]:
self.skip = self.skip - 1
del self.stackinfo[len(self.stack) + 1]
except KeyError:
print('*** end_ifset: KeyError :', len(self.stack) + 1)
def bgn_ifclear(self, args):
if args in self.values and self.values[args] is not None:
self.skip = self.skip + 1
self.stackinfo[len(self.stack)] = 1
else:
self.stackinfo[len(self.stack)] = 0
def end_ifclear(self):
try:
if self.stackinfo[len(self.stack) + 1]:
self.skip = self.skip - 1
del self.stackinfo[len(self.stack) + 1]
except KeyError:
print('*** end_ifclear: KeyError :', len(self.stack) + 1)
def open_value(self):
self.startsaving()
def close_value(self):
key = self.collectsavings()
if key in self.values:
self.write(self.values[key])
else:
print('*** Undefined value: ', key)
# --- Beginning a file ---
do_finalout = do_comment
do_setchapternewpage = do_comment
do_setfilename = do_comment
def do_settitle(self, args):
self.startsaving()
self.expand(args)
self.title = self.collectsavings()
def do_parskip(self, args): pass
# --- Ending a file ---
def do_bye(self, args):
self.endnode()
self.done = 1
# --- Title page ---
def bgn_titlepage(self, args): self.skip = self.skip + 1
def end_titlepage(self): self.skip = self.skip - 1
def do_shorttitlepage(self, args): pass
def do_center(self, args):
# Actually not used outside title page...
self.write('<H1>')
self.expand(args)
self.write('</H1>\n')
do_title = do_center
do_subtitle = do_center
do_author = do_center
do_vskip = do_comment
do_vfill = do_comment
do_smallbook = do_comment
do_paragraphindent = do_comment
do_setchapternewpage = do_comment
do_headings = do_comment
do_footnotestyle = do_comment
do_evenheading = do_comment
do_evenfooting = do_comment
do_oddheading = do_comment
do_oddfooting = do_comment
do_everyheading = do_comment
do_everyfooting = do_comment
# --- Nodes ---
def do_node(self, args):
self.endnode()
self.nodelineno = 0
parts = [s.strip() for s in args.split(',')]
while len(parts) < 4: parts.append('')
self.nodelinks = parts
[name, next, prev, up] = parts[:4]
file = self.dirname + '/' + makefile(name)
if file in self.filenames:
print('*** Filename already in use: ', file)
else:
if self.debugging: print('!'*self.debugging, '--- writing', file)
self.filenames[file] = 1
# self.nodefp = open(file, 'w')
self.nodename = name
if self.cont and self.nodestack:
self.nodestack[-1].cont = self.nodename
if not self.topname: self.topname = name
title = name
if self.title: title = title + ' -- ' + self.title
self.node = self.Node(self.dirname, self.nodename, self.topname,
title, next, prev, up)
self.htmlhelp.addnode(self.nodename,next,prev,up,file)
def link(self, label, nodename):
if nodename:
if nodename.lower() == '(dir)':
addr = '../dir.html'
else:
addr = makefile(nodename)
self.write(label, ': <A HREF="', addr, '" TYPE="',
label, '">', nodename, '</A> \n')
# --- Sectioning commands ---
def popstack(self, type):
if (self.node):
self.node.type = type
while self.nodestack:
if self.nodestack[-1].type > type:
self.nodestack[-1].finalize()
self.nodestack[-1].flush()
del self.nodestack[-1]
elif self.nodestack[-1].type == type:
if not self.nodestack[-1].next:
self.nodestack[-1].next = self.node.name
if not self.node.prev:
self.node.prev = self.nodestack[-1].name
self.nodestack[-1].finalize()
self.nodestack[-1].flush()
del self.nodestack[-1]
else:
if type > 1 and not self.node.up:
self.node.up = self.nodestack[-1].name
break
def do_chapter(self, args):
self.heading('H1', args, 0)
self.popstack(1)
def do_unnumbered(self, args):
self.heading('H1', args, -1)
self.popstack(1)
def do_appendix(self, args):
self.heading('H1', args, -1)
self.popstack(1)
def do_top(self, args):
self.heading('H1', args, -1)
def do_chapheading(self, args):
self.heading('H1', args, -1)
def do_majorheading(self, args):
self.heading('H1', args, -1)
def do_section(self, args):
self.heading('H1', args, 1)
self.popstack(2)
def do_unnumberedsec(self, args):
self.heading('H1', args, -1)
self.popstack(2)
def do_appendixsec(self, args):
self.heading('H1', args, -1)
self.popstack(2)
do_appendixsection = do_appendixsec
def do_heading(self, args):
self.heading('H1', args, -1)
def do_subsection(self, args):
self.heading('H2', args, 2)
self.popstack(3)
def do_unnumberedsubsec(self, args):
self.heading('H2', args, -1)
self.popstack(3)
def do_appendixsubsec(self, args):
self.heading('H2', args, -1)
self.popstack(3)
def do_subheading(self, args):
self.heading('H2', args, -1)
def do_subsubsection(self, args):
self.heading('H3', args, 3)
self.popstack(4)
def do_unnumberedsubsubsec(self, args):
self.heading('H3', args, -1)
self.popstack(4)
def do_appendixsubsubsec(self, args):
self.heading('H3', args, -1)
self.popstack(4)
def do_subsubheading(self, args):
self.heading('H3', args, -1)
def heading(self, type, args, level):
if level >= 0:
while len(self.numbering) <= level:
self.numbering.append(0)
del self.numbering[level+1:]
self.numbering[level] = self.numbering[level] + 1
x = ''
for i in self.numbering:
x = x + repr(i) + '.'
args = x + ' ' + args
self.contents.append((level, args, self.nodename))
self.write('<', type, '>')
self.expand(args)
self.write('</', type, '>\n')
if self.debugging or self.print_headers:
print('---', args)
def do_contents(self, args):
# pass
self.listcontents('Table of Contents', 999)
def do_shortcontents(self, args):
pass
# self.listcontents('Short Contents', 0)
do_summarycontents = do_shortcontents
def listcontents(self, title, maxlevel):
self.write('<H1>', title, '</H1>\n<UL COMPACT PLAIN>\n')
prevlevels = [0]
for level, title, node in self.contents:
if level > maxlevel:
continue
if level > prevlevels[-1]:
# can only advance one level at a time
self.write(' '*prevlevels[-1], '<UL PLAIN>\n')
prevlevels.append(level)
elif level < prevlevels[-1]:
# might drop back multiple levels
while level < prevlevels[-1]:
del prevlevels[-1]
self.write(' '*prevlevels[-1],
'</UL>\n')
self.write(' '*level, '<LI> <A HREF="',
makefile(node), '">')
self.expand(title)
self.write('</A>\n')
self.write('</UL>\n' * len(prevlevels))
# --- Page lay-out ---
# These commands are only meaningful in printed text
def do_page(self, args): pass
def do_need(self, args): pass
def bgn_group(self, args): pass
def end_group(self): pass
# --- Line lay-out ---
def do_sp(self, args):
if self.nofill:
self.write('\n')
else:
self.write('<P>\n')
def do_hline(self, args):
self.write('<HR>')
# --- Function and variable definitions ---
def bgn_deffn(self, args):
self.write('<DL>')
self.do_deffnx(args)
def end_deffn(self):
self.write('</DL>\n')
def do_deffnx(self, args):
self.write('<DT>')
words = splitwords(args, 2)
[category, name], rest = words[:2], words[2:]
self.expand('@b{%s}' % name)
for word in rest: self.expand(' ' + makevar(word))
#self.expand(' -- ' + category)
self.write('\n<DD>')
self.index('fn', name)
def bgn_defun(self, args): self.bgn_deffn('Function ' + args)
end_defun = end_deffn
def do_defunx(self, args): self.do_deffnx('Function ' + args)
def bgn_defmac(self, args): self.bgn_deffn('Macro ' + args)
end_defmac = end_deffn
def do_defmacx(self, args): self.do_deffnx('Macro ' + args)
def bgn_defspec(self, args): self.bgn_deffn('{Special Form} ' + args)
end_defspec = end_deffn
def do_defspecx(self, args): self.do_deffnx('{Special Form} ' + args)
def bgn_defvr(self, args):
self.write('<DL>')
self.do_defvrx(args)
end_defvr = end_deffn
def do_defvrx(self, args):
self.write('<DT>')
words = splitwords(args, 2)
[category, name], rest = words[:2], words[2:]
self.expand('@code{%s}' % name)
# If there are too many arguments, show them
for word in rest: self.expand(' ' + word)
#self.expand(' -- ' + category)
self.write('\n<DD>')
self.index('vr', name)
def bgn_defvar(self, args): self.bgn_defvr('Variable ' + args)
end_defvar = end_defvr
def do_defvarx(self, args): self.do_defvrx('Variable ' + args)
def bgn_defopt(self, args): self.bgn_defvr('{User Option} ' + args)
end_defopt = end_defvr
def do_defoptx(self, args): self.do_defvrx('{User Option} ' + args)
# --- Ditto for typed languages ---
def bgn_deftypefn(self, args):
self.write('<DL>')
self.do_deftypefnx(args)
end_deftypefn = end_deffn
def do_deftypefnx(self, args):
self.write('<DT>')
words = splitwords(args, 3)
[category, datatype, name], rest = words[:3], words[3:]
self.expand('@code{%s} @b{%s}' % (datatype, name))
for word in rest: self.expand(' ' + makevar(word))
#self.expand(' -- ' + category)
self.write('\n<DD>')
self.index('fn', name)
def bgn_deftypefun(self, args): self.bgn_deftypefn('Function ' + args)
end_deftypefun = end_deftypefn
def do_deftypefunx(self, args): self.do_deftypefnx('Function ' + args)
def bgn_deftypevr(self, args):
self.write('<DL>')
self.do_deftypevrx(args)
end_deftypevr = end_deftypefn
def do_deftypevrx(self, args):
self.write('<DT>')
words = splitwords(args, 3)
[category, datatype, name], rest = words[:3], words[3:]
self.expand('@code{%s} @b{%s}' % (datatype, name))
# If there are too many arguments, show them
for word in rest: self.expand(' ' + word)
#self.expand(' -- ' + category)
self.write('\n<DD>')
self.index('fn', name)
def bgn_deftypevar(self, args):
self.bgn_deftypevr('Variable ' + args)
end_deftypevar = end_deftypevr
def do_deftypevarx(self, args):
self.do_deftypevrx('Variable ' + args)
# --- Ditto for object-oriented languages ---
def bgn_defcv(self, args):
self.write('<DL>')
self.do_defcvx(args)
end_defcv = end_deftypevr
def do_defcvx(self, args):
self.write('<DT>')
words = splitwords(args, 3)
[category, classname, name], rest = words[:3], words[3:]
self.expand('@b{%s}' % name)
# If there are too many arguments, show them
for word in rest: self.expand(' ' + word)
#self.expand(' -- %s of @code{%s}' % (category, classname))
self.write('\n<DD>')
self.index('vr', '%s @r{on %s}' % (name, classname))
def bgn_defivar(self, args):
self.bgn_defcv('{Instance Variable} ' + args)
end_defivar = end_defcv
def do_defivarx(self, args):
self.do_defcvx('{Instance Variable} ' + args)
def bgn_defop(self, args):
self.write('<DL>')
self.do_defopx(args)
end_defop = end_defcv
def do_defopx(self, args):
self.write('<DT>')
words = splitwords(args, 3)
[category, classname, name], rest = words[:3], words[3:]
self.expand('@b{%s}' % name)
for word in rest: self.expand(' ' + makevar(word))
#self.expand(' -- %s of @code{%s}' % (category, classname))
self.write('\n<DD>')
self.index('fn', '%s @r{on %s}' % (name, classname))
def bgn_defmethod(self, args):
self.bgn_defop('Method ' + args)
end_defmethod = end_defop
def do_defmethodx(self, args):
self.do_defopx('Method ' + args)
# --- Ditto for data types ---
def bgn_deftp(self, args):
self.write('<DL>')
self.do_deftpx(args)
end_deftp = end_defcv
def do_deftpx(self, args):
self.write('<DT>')
words = splitwords(args, 2)
[category, name], rest = words[:2], words[2:]
self.expand('@b{%s}' % name)
for word in rest: self.expand(' ' + word)
#self.expand(' -- ' + category)
self.write('\n<DD>')
self.index('tp', name)
# --- Making Lists and Tables
def bgn_enumerate(self, args):
if not args:
self.write('<OL>\n')
self.stackinfo[len(self.stack)] = '</OL>\n'
else:
self.itemnumber = args
self.write('<UL>\n')
self.stackinfo[len(self.stack)] = '</UL>\n'
def end_enumerate(self):
self.itemnumber = None
self.write(self.stackinfo[len(self.stack) + 1])
del self.stackinfo[len(self.stack) + 1]
def bgn_itemize(self, args):
self.itemarg = args
self.write('<UL>\n')
def end_itemize(self):
self.itemarg = None
self.write('</UL>\n')
def bgn_table(self, args):
self.itemarg = args
self.write('<DL>\n')
def end_table(self):
self.itemarg = None
self.write('</DL>\n')
def bgn_ftable(self, args):
self.itemindex = 'fn'
self.bgn_table(args)
def end_ftable(self):
self.itemindex = None
self.end_table()
def bgn_vtable(self, args):
self.itemindex = 'vr'
self.bgn_table(args)
def end_vtable(self):
self.itemindex = None
self.end_table()
def do_item(self, args):
if self.itemindex: self.index(self.itemindex, args)
if self.itemarg:
if self.itemarg[0] == '@' and self.itemarg[1] and \
self.itemarg[1] in string.ascii_letters:
args = self.itemarg + '{' + args + '}'
else:
# some other character, e.g. '-'
args = self.itemarg + ' ' + args
if self.itemnumber is not None:
args = self.itemnumber + '. ' + args
self.itemnumber = increment(self.itemnumber)
if self.stack and self.stack[-1] == 'table':
self.write('<DT>')
self.expand(args)
self.write('\n<DD>')
elif self.stack and self.stack[-1] == 'multitable':
self.write('<TR><TD>')
self.expand(args)
self.write('</TD>\n</TR>\n')
else:
self.write('<LI>')
self.expand(args)
self.write(' ')
do_itemx = do_item # XXX Should suppress leading blank line
# rpyron 2002-05-07 multitable support
def bgn_multitable(self, args):
self.itemarg = None # should be handled by columnfractions
self.write('<TABLE BORDER="">\n')
def end_multitable(self):
self.itemarg = None
self.write('</TABLE>\n<BR>\n')
def handle_columnfractions(self):
# It would be better to handle this, but for now it's in the way...
self.itemarg = None
def handle_tab(self):
self.write('</TD>\n <TD>')
# --- Enumerations, displays, quotations ---
# XXX Most of these should increase the indentation somehow
def bgn_quotation(self, args): self.write('<BLOCKQUOTE>')
def end_quotation(self): self.write('</BLOCKQUOTE>\n')
def bgn_example(self, args):
self.nofill = self.nofill + 1
self.write('<PRE>')
def end_example(self):
self.write('</PRE>\n')
self.nofill = self.nofill - 1
bgn_lisp = bgn_example # Synonym when contents are executable lisp code
end_lisp = end_example
bgn_smallexample = bgn_example # XXX Should use smaller font
end_smallexample = end_example
bgn_smalllisp = bgn_lisp # Ditto
end_smalllisp = end_lisp
bgn_display = bgn_example
end_display = end_example
bgn_format = bgn_display
end_format = end_display
def do_exdent(self, args): self.expand(args + '\n')
# XXX Should really mess with indentation
def bgn_flushleft(self, args):
self.nofill = self.nofill + 1
self.write('<PRE>\n')
def end_flushleft(self):
self.write('</PRE>\n')
self.nofill = self.nofill - 1
def bgn_flushright(self, args):
self.nofill = self.nofill + 1
self.write('<ADDRESS COMPACT>\n')
def end_flushright(self):
self.write('</ADDRESS>\n')
self.nofill = self.nofill - 1
def bgn_menu(self, args):
self.write('<DIR>\n')
self.write(' <STRONG><EM>Menu</EM></STRONG><P>\n')
self.htmlhelp.beginmenu()
def end_menu(self):
self.write('</DIR>\n')
self.htmlhelp.endmenu()
def bgn_cartouche(self, args): pass
def end_cartouche(self): pass
# --- Indices ---
def resetindex(self):
self.noncodeindices = ['cp']
self.indextitle = {}
self.indextitle['cp'] = 'Concept'
self.indextitle['fn'] = 'Function'
self.indextitle['ky'] = 'Keyword'
self.indextitle['pg'] = 'Program'
self.indextitle['tp'] = 'Type'
self.indextitle['vr'] = 'Variable'
#
self.whichindex = {}
for name in self.indextitle:
self.whichindex[name] = []
def user_index(self, name, args):
if name in self.whichindex:
self.index(name, args)
else:
print('*** No index named', repr(name))
def do_cindex(self, args): self.index('cp', args)
def do_findex(self, args): self.index('fn', args)
def do_kindex(self, args): self.index('ky', args)
def do_pindex(self, args): self.index('pg', args)
def do_tindex(self, args): self.index('tp', args)
def do_vindex(self, args): self.index('vr', args)
def index(self, name, args):
self.whichindex[name].append((args, self.nodename))
self.htmlhelp.index(args, self.nodename)
def do_synindex(self, args):
words = args.split()
if len(words) != 2:
print('*** bad @synindex', args)
return
[old, new] = words
if old not in self.whichindex or \
new not in self.whichindex:
print('*** bad key(s) in @synindex', args)
return
if old != new and \
self.whichindex[old] is not self.whichindex[new]:
inew = self.whichindex[new]
inew[len(inew):] = self.whichindex[old]
self.whichindex[old] = inew
do_syncodeindex = do_synindex # XXX Should use code font
def do_printindex(self, args):
words = args.split()
for name in words:
if name in self.whichindex:
self.prindex(name)
else:
print('*** No index named', repr(name))
def prindex(self, name):
iscodeindex = (name not in self.noncodeindices)
index = self.whichindex[name]
if not index: return
if self.debugging:
print('!'*self.debugging, '--- Generating', \
self.indextitle[name], 'index')
# The node already provides a title
index1 = []
junkprog = re.compile('^(@[a-z]+)?{')
for key, node in index:
sortkey = key.lower()
# Remove leading `@cmd{' from sort key
# -- don't bother about the matching `}'
oldsortkey = sortkey
while 1:
mo = junkprog.match(sortkey)
if not mo:
break
i = mo.end()
sortkey = sortkey[i:]
index1.append((sortkey, key, node))
del index[:]
index1.sort()
self.write('<DL COMPACT>\n')
prevkey = prevnode = None
for sortkey, key, node in index1:
if (key, node) == (prevkey, prevnode):
continue
if self.debugging > 1: print('!'*self.debugging, key, ':', node)
self.write('<DT>')
if iscodeindex: key = '@code{' + key + '}'
if key != prevkey:
self.expand(key)
self.write('\n<DD><A HREF="%s">%s</A>\n' % (makefile(node), node))
prevkey, prevnode = key, node
self.write('</DL>\n')
# --- Final error reports ---
def report(self):
if self.unknown:
print('--- Unrecognized commands ---')
cmds = sorted(self.unknown.keys())
for cmd in cmds:
print(cmd.ljust(20), self.unknown[cmd])
class TexinfoParserHTML3(TexinfoParser):
COPYRIGHT_SYMBOL = "©"
FN_ID_PATTERN = "[%(id)s]"
FN_SOURCE_PATTERN = '<A ID=footnoteref%(id)s ' \
'HREF="#footnotetext%(id)s">' + FN_ID_PATTERN + '</A>'
FN_TARGET_PATTERN = '<FN ID=footnotetext%(id)s>\n' \
'<P><A HREF="#footnoteref%(id)s">' + FN_ID_PATTERN \
+ '</A>\n%(text)s</P></FN>\n'
FN_HEADER = '<DIV CLASS=footnotes>\n <HR NOSHADE WIDTH=200>\n' \
' <STRONG><EM>Footnotes</EM></STRONG>\n <P>\n'
Node = HTML3Node
def bgn_quotation(self, args): self.write('<BQ>')
def end_quotation(self): self.write('</BQ>\n')
def bgn_example(self, args):
# this use of <CODE> would not be legal in HTML 2.0,
# but is in more recent DTDs.
self.nofill = self.nofill + 1
self.write('<PRE CLASS=example><CODE>')
def end_example(self):
self.write("</CODE></PRE>\n")
self.nofill = self.nofill - 1
def bgn_flushleft(self, args):
self.nofill = self.nofill + 1
self.write('<PRE CLASS=flushleft>\n')
def bgn_flushright(self, args):
self.nofill = self.nofill + 1
self.write('<DIV ALIGN=right CLASS=flushright><ADDRESS COMPACT>\n')
def end_flushright(self):
self.write('</ADDRESS></DIV>\n')
self.nofill = self.nofill - 1
def bgn_menu(self, args):
self.write('<UL PLAIN CLASS=menu>\n')
self.write(' <LH>Menu</LH>\n')
def end_menu(self):
self.write('</UL>\n')
# rpyron 2002-05-07
class HTMLHelp:
"""
This class encapsulates support for HTML Help. Node names,
file names, menu items, index items, and image file names are
accumulated until a call to finalize(). At that time, three
output files are created in the current directory:
`helpbase`.hhp is a HTML Help Workshop project file.
It contains various information, some of
which I do not understand; I just copied
the default project info from a fresh
installation.
`helpbase`.hhc is the Contents file for the project.
`helpbase`.hhk is the Index file for the project.
When these files are used as input to HTML Help Workshop,
the resulting file will be named:
`helpbase`.chm
If none of the defaults in `helpbase`.hhp are changed,
the .CHM file will have Contents, Index, Search, and
Favorites tabs.
"""
codeprog = re.compile('@code{(.*?)}')
def __init__(self,helpbase,dirname):
self.helpbase = helpbase
self.dirname = dirname
self.projectfile = None
self.contentfile = None
self.indexfile = None
self.nodelist = []
self.nodenames = {} # nodename : index
self.nodeindex = {}
self.filenames = {} # filename : filename
self.indexlist = [] # (args,nodename) == (key,location)
self.current = ''
self.menudict = {}
self.dumped = {}
def addnode(self,name,next,prev,up,filename):
node = (name,next,prev,up,filename)
# add this file to dict
# retrieve list with self.filenames.values()
self.filenames[filename] = filename
# add this node to nodelist
self.nodeindex[name] = len(self.nodelist)
self.nodelist.append(node)
# set 'current' for menu items
self.current = name
self.menudict[self.current] = []
def menuitem(self,nodename):
menu = self.menudict[self.current]
menu.append(nodename)
def addimage(self,imagename):
self.filenames[imagename] = imagename
def index(self, args, nodename):
self.indexlist.append((args,nodename))
def beginmenu(self):
pass
def endmenu(self):
pass
def finalize(self):
if not self.helpbase:
return
# generate interesting filenames
resultfile = self.helpbase + '.chm'
projectfile = self.helpbase + '.hhp'
contentfile = self.helpbase + '.hhc'
indexfile = self.helpbase + '.hhk'
# generate a reasonable title
title = self.helpbase
# get the default topic file
(topname,topnext,topprev,topup,topfile) = self.nodelist[0]
defaulttopic = topfile
# PROJECT FILE
try:
fp = open(projectfile,'w')
print('[OPTIONS]', file=fp)
print('Auto Index=Yes', file=fp)
print('Binary TOC=No', file=fp)
print('Binary Index=Yes', file=fp)
print('Compatibility=1.1', file=fp)
print('Compiled file=' + resultfile + '', file=fp)
print('Contents file=' + contentfile + '', file=fp)
print('Default topic=' + defaulttopic + '', file=fp)
print('Error log file=ErrorLog.log', file=fp)
print('Index file=' + indexfile + '', file=fp)
print('Title=' + title + '', file=fp)
print('Display compile progress=Yes', file=fp)
print('Full-text search=Yes', file=fp)
print('Default window=main', file=fp)
print('', file=fp)
print('[WINDOWS]', file=fp)
print('main=,"' + contentfile + '","' + indexfile
+ '","","",,,,,0x23520,222,0x1046,[10,10,780,560],'
'0xB0000,,,,,,0', file=fp)
print('', file=fp)
print('[FILES]', file=fp)
print('', file=fp)
self.dumpfiles(fp)
fp.close()
except IOError as msg:
print(projectfile, ':', msg)
sys.exit(1)
# CONTENT FILE
try:
fp = open(contentfile,'w')
print('<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN">', file=fp)
print('<!-- This file defines the table of contents -->', file=fp)
print('<HTML>', file=fp)
print('<HEAD>', file=fp)
print('<meta name="GENERATOR"'
'content="Microsoft® HTML Help Workshop 4.1">', file=fp)
print('<!-- Sitemap 1.0 -->', file=fp)
print('</HEAD>', file=fp)
print('<BODY>', file=fp)
print(' <OBJECT type="text/site properties">', file=fp)
print(' <param name="Window Styles" value="0x800025">', file=fp)
print(' <param name="comment" value="title:">', file=fp)
print(' <param name="comment" value="base:">', file=fp)
print(' </OBJECT>', file=fp)
self.dumpnodes(fp)
print('</BODY>', file=fp)
print('</HTML>', file=fp)
fp.close()
except IOError as msg:
print(contentfile, ':', msg)
sys.exit(1)
# INDEX FILE
try:
fp = open(indexfile ,'w')
print('<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN">', file=fp)
print('<!-- This file defines the index -->', file=fp)
print('<HTML>', file=fp)
print('<HEAD>', file=fp)
print('<meta name="GENERATOR"'
'content="Microsoft® HTML Help Workshop 4.1">', file=fp)
print('<!-- Sitemap 1.0 -->', file=fp)
print('</HEAD>', file=fp)
print('<BODY>', file=fp)
print('<OBJECT type="text/site properties">', file=fp)
print('</OBJECT>', file=fp)
self.dumpindex(fp)
print('</BODY>', file=fp)
print('</HTML>', file=fp)
fp.close()
except IOError as msg:
print(indexfile , ':', msg)
sys.exit(1)
def dumpfiles(self, outfile=sys.stdout):
filelist = sorted(self.filenames.values())
for filename in filelist:
print(filename, file=outfile)
def dumpnodes(self, outfile=sys.stdout):
self.dumped = {}
if self.nodelist:
nodename, dummy, dummy, dummy, dummy = self.nodelist[0]
self.topnode = nodename
print('<UL>', file=outfile)
for node in self.nodelist:
self.dumpnode(node,0,outfile)
print('</UL>', file=outfile)
def dumpnode(self, node, indent=0, outfile=sys.stdout):
if node:
# Retrieve info for this node
(nodename,next,prev,up,filename) = node
self.current = nodename
# Have we been dumped already?
if nodename in self.dumped:
return
self.dumped[nodename] = 1
# Print info for this node
print(' '*indent, end=' ', file=outfile)
print('<LI><OBJECT type="text/sitemap">', end=' ', file=outfile)
print('<param name="Name" value="' + nodename +'">', end=' ', file=outfile)
print('<param name="Local" value="'+ filename +'">', end=' ', file=outfile)
print('</OBJECT>', file=outfile)
# Does this node have menu items?
try:
menu = self.menudict[nodename]
self.dumpmenu(menu,indent+2,outfile)
except KeyError:
pass
def dumpmenu(self, menu, indent=0, outfile=sys.stdout):
if menu:
currentnode = self.current
if currentnode != self.topnode: # XXX this is a hack
print(' '*indent + '<UL>', file=outfile)
indent += 2
for item in menu:
menunode = self.getnode(item)
self.dumpnode(menunode,indent,outfile)
if currentnode != self.topnode: # XXX this is a hack
print(' '*indent + '</UL>', file=outfile)
indent -= 2
def getnode(self, nodename):
try:
index = self.nodeindex[nodename]
return self.nodelist[index]
except KeyError:
return None
except IndexError:
return None
# (args,nodename) == (key,location)
def dumpindex(self, outfile=sys.stdout):
print('<UL>', file=outfile)
for (key,location) in self.indexlist:
key = self.codeexpand(key)
location = makefile(location)
location = self.dirname + '/' + location
print('<LI><OBJECT type="text/sitemap">', end=' ', file=outfile)
print('<param name="Name" value="' + key + '">', end=' ', file=outfile)
print('<param name="Local" value="' + location + '">', end=' ', file=outfile)
print('</OBJECT>', file=outfile)
print('</UL>', file=outfile)
def codeexpand(self, line):
co = self.codeprog.match(line)
if not co:
return line
bgn, end = co.span(0)
a, b = co.span(1)
line = line[:bgn] + line[a:b] + line[end:]
return line
# Put @var{} around alphabetic substrings
def makevar(str):
return '@var{'+str+'}'
# Split a string in "words" according to findwordend
def splitwords(str, minlength):
words = []
i = 0
n = len(str)
while i < n:
while i < n and str[i] in ' \t\n': i = i+1
if i >= n: break
start = i
i = findwordend(str, i, n)
words.append(str[start:i])
while len(words) < minlength: words.append('')
return words
# Find the end of a "word", matching braces and interpreting @@ @{ @}
fwprog = re.compile('[@{} ]')
def findwordend(str, i, n):
level = 0
while i < n:
mo = fwprog.search(str, i)
if not mo:
break
i = mo.start()
c = str[i]; i = i+1
if c == '@': i = i+1 # Next character is not special
elif c == '{': level = level+1
elif c == '}': level = level-1
elif c == ' ' and level <= 0: return i-1
return n
# Convert a node name into a file name
def makefile(nodename):
nodename = nodename.strip()
return fixfunnychars(nodename) + '.html'
# Characters that are perfectly safe in filenames and hyperlinks
goodchars = string.ascii_letters + string.digits + '!@-=+.'
# Replace characters that aren't perfectly safe by dashes
# Underscores are bad since Cern HTTPD treats them as delimiters for
# encoding times, so you get mismatches if you compress your files:
# a.html.gz will map to a_b.html.gz
def fixfunnychars(addr):
i = 0
while i < len(addr):
c = addr[i]
if c not in goodchars:
c = '-'
addr = addr[:i] + c + addr[i+1:]
i = i + len(c)
return addr
# Increment a string used as an enumeration
def increment(s):
if not s:
return '1'
for sequence in string.digits, string.ascii_lowercase, string.ascii_uppercase:
lastc = s[-1]
if lastc in sequence:
i = sequence.index(lastc) + 1
if i >= len(sequence):
if len(s) == 1:
s = sequence[0]*2
if s == '00':
s = '10'
else:
s = increment(s[:-1]) + sequence[0]
else:
s = s[:-1] + sequence[i]
return s
return s # Don't increment
def test():
import sys
debugging = 0
print_headers = 0
cont = 0
html3 = 0
htmlhelp = ''
while sys.argv[1] == ['-d']:
debugging = debugging + 1
del sys.argv[1]
if sys.argv[1] == '-p':
print_headers = 1
del sys.argv[1]
if sys.argv[1] == '-c':
cont = 1
del sys.argv[1]
if sys.argv[1] == '-3':
html3 = 1
del sys.argv[1]
if sys.argv[1] == '-H':
helpbase = sys.argv[2]
del sys.argv[1:3]
if len(sys.argv) != 3:
print('usage: texi2hh [-d [-d]] [-p] [-c] [-3] [-H htmlhelp]', \
'inputfile outputdirectory')
sys.exit(2)
if html3:
parser = TexinfoParserHTML3()
else:
parser = TexinfoParser()
parser.cont = cont
parser.debugging = debugging
parser.print_headers = print_headers
file = sys.argv[1]
dirname = sys.argv[2]
parser.setdirname(dirname)
parser.setincludedir(os.path.dirname(file))
htmlhelp = HTMLHelp(helpbase, dirname)
parser.sethtmlhelp(htmlhelp)
try:
fp = open(file, 'r')
except IOError as msg:
print(file, ':', msg)
sys.exit(1)
parser.parse(fp)
fp.close()
parser.report()
htmlhelp.finalize()
if __name__ == "__main__":
test()
| technologiescollege/Blockly-rduino-communication | scripts_XP/Tools/Scripts/texi2html.py | Python | gpl-3.0 | 70,169 |
# original work: https://github.com/graphite-project/whisper/issues/22
# whisper-fill: unlike whisper-merge, don't overwrite data that's
# already present in the target file, but instead, only add the missing
# data (e.g. where the gaps in the target file are). Because no values
# are overwritten, no data or precision gets lost. Also, unlike
# whisper-merge, try to take the highest-precision archive to provide
# the data, instead of the one with the largest retention.
# Using this script, reconciliation between two replica instances can be
# performed by whisper-fill-ing the data of the other replica with the
# data that exists locally, without introducing the quite remarkable
# gaps that whisper-merge leaves behind (filling a higher precision
# archive with data from a lower precision one)
# Work performed by author while working at Booking.com.
from whisper import info, fetch, update_many
try:
from whisper import operator
HAS_OPERATOR = True
except ImportError:
HAS_OPERATOR = False
import itertools
import time
def itemgetter(*items):
if HAS_OPERATOR:
return operator.itemgetter(*items)
else:
if len(items) == 1:
item = items[0]
def g(obj):
return obj[item]
else:
def g(obj):
return tuple(obj[item] for item in items)
return g
def fill(src, dst, tstart, tstop):
# fetch range start-stop from src, taking values from the highest
# precision archive, thus optionally requiring multiple fetch + merges
srcHeader = info(src)
srcArchives = srcHeader['archives']
srcArchives.sort(key=itemgetter('retention'))
# find oldest point in time, stored by both files
srcTime = int(time.time()) - srcHeader['maxRetention']
if tstart < srcTime and tstop < srcTime:
return
# we want to retain as much precision as we can, hence we do backwards
# walk in time
# skip forward at max 'step' points at a time
for archive in srcArchives:
# skip over archives that don't have any data points
rtime = time.time() - archive['retention']
if tstop <= rtime:
continue
untilTime = tstop
fromTime = rtime if rtime > tstart else tstart
(timeInfo, values) = fetch(src, fromTime, untilTime)
(start, end, archive_step) = timeInfo
pointsToWrite = list(itertools.ifilter(
lambda points: points[1] is not None,
itertools.izip(xrange(start, end, archive_step), values)))
# order points by timestamp, newest first
pointsToWrite.sort(key=lambda p: p[0], reverse=True)
update_many(dst, pointsToWrite)
tstop = fromTime
# can stop when there's nothing to fetch any more
if tstart == tstop:
return
def fill_archives(src, dst, startFrom, endAt=0):
"""
Fills gaps in dst using data from src.
src is the path as a string
dst is the path as a string
startFrom is the latest timestamp (archives are read backward)
endAt is the earliest timestamp (archives are read backward).
if absent, we take the earliest timestamp in the archive
"""
header = info(dst)
archives = header['archives']
archives = sorted(archives, key=lambda t: t['retention'])
for archive in archives:
fromTime = max(endAt, time.time() - archive['retention'])
if fromTime >= startFrom:
continue
(timeInfo, values) = fetch(dst, fromTime, untilTime=startFrom)
(start, end, step) = timeInfo
gapstart = None
for v in values:
if not v and not gapstart:
gapstart = start
elif v and gapstart:
# ignore single units lost
if (start - gapstart) > archive['secondsPerPoint']:
fill(src, dst, gapstart - step, start)
gapstart = None
elif gapstart and start == end - step:
fill(src, dst, gapstart - step, start)
start += step
# The next archive only needs to be filled up to the latest point
# in time we updated.
startFrom = fromTime
| unbrice/carbonate | carbonate/fill.py | Python | mit | 4,207 |
# -*- coding: utf-8 -*-
# vim: autoindent shiftwidth=4 expandtab textwidth=120 tabstop=4 softtabstop=4
###############################################################################
# OpenLP - Open Source Lyrics Projection #
# --------------------------------------------------------------------------- #
# Copyright (c) 2008-2013 Raoul Snyman #
# Portions copyright (c) 2008-2013 Tim Bentley, Gerald Britton, Jonathan #
# Corwin, Samuel Findlay, Michael Gorven, Scott Guerrieri, Matthias Hub, #
# Meinert Jordan, Armin Köhler, Erik Lundin, Edwin Lunando, Brian T. Meyer. #
# Joshua Miller, Stevan Pettit, Andreas Preikschat, Mattias Põldaru, #
# Christian Richter, Philip Ridout, Simon Scudder, Jeffrey Smith, #
# Maikel Stuivenberg, Martin Thompson, Jon Tibble, Dave Warnock, #
# Frode Woldsund, Martin Zibricky, Patrick Zimmermann #
# --------------------------------------------------------------------------- #
# This program is free software; you can redistribute it and/or modify it #
# under the terms of the GNU General Public License as published by the Free #
# Software Foundation; version 2 of the License. #
# #
# This program is distributed in the hope that it will be useful, but WITHOUT #
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for #
# more details. #
# #
# You should have received a copy of the GNU General Public License along #
# with this program; if not, write to the Free Software Foundation, Inc., 59 #
# Temple Place, Suite 330, Boston, MA 02111-1307 USA #
###############################################################################
"""
The :mod:`openlp.plugins.songs.lib.ui` module provides standard UI components
for the songs plugin.
"""
from openlp.core.lib import translate
class SongStrings(object):
"""
Provide standard strings for use throughout the songs plugin.
"""
# These strings should need a good reason to be retranslated elsewhere.
Author = translate('OpenLP.Ui', 'Author', 'Singular')
Authors = translate('OpenLP.Ui', 'Authors', 'Plural')
AuthorUnknown = u'Author Unknown' # Used to populate the database.
CopyrightSymbol = translate('OpenLP.Ui', '\xa9', 'Copyright symbol.')
SongBook = translate('OpenLP.Ui', 'Song Book', 'Singular')
SongBooks = translate('OpenLP.Ui', 'Song Books', 'Plural')
SongIncomplete = translate('OpenLP.Ui','Title and/or verses not found')
SongMaintenance = translate('OpenLP.Ui', 'Song Maintenance')
Topic = translate('OpenLP.Ui', 'Topic', 'Singular')
Topics = translate('OpenLP.Ui', 'Topics', 'Plural')
XMLSyntaxError = translate('OpenLP.Ui', 'XML syntax error')
| marmyshev/transitions | openlp/plugins/songs/lib/ui.py | Python | gpl-2.0 | 3,142 |
"""This module contains a series of unit tests which
validate lib/mac.py"""
import logging
import unittest
import string
import time
import uuid
import hashlib
import base64
import json
import os
import mock
import requests
from yar.util import mac
class HexifyTestCase(unittest.TestCase):
def test_bytes_is_none(self):
self.assertIsNone(mac._hexify(None))
class DehexifyTestCase(unittest.TestCase):
def test_bytes_encoded_as_hex_string_is_none(self):
self.assertIsNone(mac._dehexify(None))
def test_bytes_encoded_as_hex_string_not_decodable(self):
self.assertIsNone(mac._dehexify("dave was here"))
class MACKeyTestCase(unittest.TestCase):
def test_generate_returns_non_none_MACKey(self):
mac_key = mac.MACKey.generate()
self.assertIsNotNone(mac_key)
self.assertEqual(mac_key.__class__, mac.MACKey)
def test_created_with_explicit_good_value(self):
value = "0"*43
mac_key = mac.MACKey(value)
self.assertIsNotNone(mac_key)
self.assertEqual(mac_key, value)
def test_created_with_explicit_invalid_characters(self):
with self.assertRaises(ValueError):
value = ")"*43
mac_key = mac.MACKey(value)
def test_created_with_zero_length_value(self):
with self.assertRaises(ValueError):
value = ""
mac_key = mac.MACKey(value)
def test_created_with_explicit_fifty_three_character_string(self):
with self.assertRaises(ValueError):
value = "1"*53
mac_key = mac.MACKey(value)
def test_created_with_explicit_none_value(self):
with self.assertRaises(ValueError):
value = None
mac_key = mac.MACKey(value)
class MACKeyIdentifierTestCase(unittest.TestCase):
def test_generate_returns_non_none_MACKeyIdentifier(self):
mac_key_identifier = mac.MACKeyIdentifier.generate()
self.assertIsNotNone(mac_key_identifier)
self.assertEqual(mac_key_identifier.__class__, mac.MACKeyIdentifier)
self.assertEqual(32, len(mac_key_identifier))
def test_created_with_explicit_content(self):
content = 'dave was here'
mac_key_identifier = mac.MACKeyIdentifier(content)
self.assertIsNotNone(mac_key_identifier)
self.assertEqual(mac_key_identifier, content)
class NonceTestCase(unittest.TestCase):
def test_generate_returns_non_none_Nonces(self):
nonce = mac.Nonce.generate()
self.assertIsNotNone(nonce)
self.assertEqual(nonce.__class__, mac.Nonce)
self.assertEqual(16, len(nonce))
def test_created_with_explicit_content(self):
content = 'dave was here'
nonce = mac.Nonce(content)
self.assertIsNotNone(nonce)
self.assertEqual(nonce, content)
class TimestampTestCase(unittest.TestCase):
def test_gen_returns_non_none_ts_which_represents_int(self):
ts = mac.Timestamp.generate()
self.assertIsNotNone(ts)
self.assertEqual(ts.__class__, mac.Timestamp)
self.assertTrue(0 < len(ts))
self.assertEqual(int, int(ts).__class__)
def test_created_with_explicit_content(self):
content = '45'
ts = mac.Timestamp(content)
self.assertIsNotNone(ts)
self.assertEqual(ts, content)
def test_conversion_to_int(self):
value = 45
ts = mac.Timestamp(value)
self.assertIsNotNone(ts)
self.assertEqual(ts.__class__, mac.Timestamp)
self.assertEqual(int(ts), value)
def test_create_with_non_int(self):
with self.assertRaises(ValueError):
value = "dave"
ts = mac.Timestamp(value)
class ExtTestCase(unittest.TestCase):
def test_content_type_and_body_none_is_zero_length_ext(self):
content_type = None
body = None
ext = mac.Ext.generate(content_type, body)
self.assertIsNotNone(ext)
self.assertEqual(ext, "")
def test_content_type_not_none_and_body_none_is_zero_length_ext(self):
content_type = "dave was here"
body = None
ext = mac.Ext.generate(content_type, body)
self.assertIsNotNone(ext)
hash = hashlib.sha1(content_type)
self.assertEqual(ext, hash.hexdigest())
def test_content_type_none_and_body_not_none_is_zero_length_ext(self):
content_type = None
body = "dave was here"
ext = mac.Ext.generate(content_type, body)
self.assertIsNotNone(ext)
hash = hashlib.sha1(body)
self.assertEqual(ext, hash.hexdigest())
def test_content_type_and_body_not_none_is_sha1_of_both(self):
content_type = "hello world!"
body = "dave was here"
ext = mac.Ext.generate(content_type, body)
self.assertIsNotNone(ext)
hash = hashlib.sha1(content_type + body)
self.assertEqual(ext, hash.hexdigest())
def test_content_type_zero_length_and_body_none(self):
content_type = ""
body = None
ext = mac.Ext.generate(content_type, body)
self.assertIsNotNone(ext)
hash = hashlib.sha1(content_type)
self.assertEqual(ext, hash.hexdigest())
def test_content_type_none_and_body_zero_length(self):
content_type = None
body = ""
ext = mac.Ext.generate(content_type, body)
self.assertIsNotNone(ext)
hash = hashlib.sha1(body)
self.assertEqual(ext, hash.hexdigest())
def test_created_with_explicit_content(self):
content = "abc"
ext = mac.Ext(content)
self.assertIsNotNone(ext)
self.assertEqual(ext, content)
class AuthHeaderValueTestCase(unittest.TestCase):
def _uuid(self):
return str(uuid.uuid4()).replace("-", "")
def _create_ahv_str(self, mac_key_identifier, ts, nonce, ext, my_mac):
fmt = 'MAC id="%s", ts="%s", nonce="%s", ext="%s", mac="%s"'
return fmt % (mac_key_identifier, ts, nonce, ext, my_mac)
def test_ctr_correct_property_assignment(self):
mac_key_identifier = self._uuid()
ts = self._uuid()
nonce = self._uuid()
ext = self._uuid()
my_mac = self._uuid()
ah = mac.AuthHeaderValue(mac_key_identifier, ts, nonce, ext, my_mac)
self.assertEqual(ah.mac_key_identifier, mac_key_identifier)
self.assertEqual(ah.ts, ts)
self.assertEqual(ah.nonce, nonce)
self.assertEqual(ah.ext, ext)
self.assertEqual(ah.mac, my_mac)
def test_parse_generated_value_for_get(self):
ts = mac.Timestamp.generate()
nonce = mac.Nonce.generate()
http_method = "GET"
uri = "/whatever"
host = "127.0.0.1"
port = 8080
content_type = None
body = None
ext = mac.Ext.generate(content_type, body)
normalized_request_string = mac.NormalizedRequestString.generate(
ts,
nonce,
http_method,
uri,
host,
port,
ext)
mac_key = mac.MACKey.generate()
mac_algorithm = mac.MAC.algorithm
my_mac = mac.MAC.generate(
mac_key,
mac_algorithm,
normalized_request_string)
mac_key_identifier = mac.MACKeyIdentifier.generate()
ahv = mac.AuthHeaderValue(
mac_key_identifier,
ts,
nonce,
ext,
my_mac)
pahv = mac.AuthHeaderValue.parse(str(ahv))
self.assertIsNotNone(pahv)
self.assertEqual(pahv.mac_key_identifier, ahv.mac_key_identifier)
self.assertEqual(pahv.ts, ahv.ts)
self.assertEqual(pahv.nonce, ahv.nonce)
self.assertEqual(pahv.ext, ahv.ext)
self.assertEqual(pahv.mac, ahv.mac)
def test_parse_generated_value_for_post(self):
ts = mac.Timestamp.generate()
nonce = mac.Nonce.generate()
http_method = "POST"
uri = "/whatever"
host = "127.0.0.1"
port = 8080
content_type = "application/json;charset=utf-8"
body = json.dumps({"dave": "was", "there": "you", "are": 42})
ext = mac.Ext.generate(content_type, body)
normalized_request_string = mac.NormalizedRequestString.generate(
ts,
nonce,
http_method,
uri,
host,
port,
ext)
mac_key = mac.MACKey.generate()
mac_algorithm = mac.MAC.algorithm
my_mac = mac.MAC.generate(
mac_key,
mac_algorithm,
normalized_request_string)
mac_key_identifier = mac.MACKeyIdentifier.generate()
ahv = mac.AuthHeaderValue(
mac_key_identifier,
ts,
nonce,
ext,
my_mac)
pahv = mac.AuthHeaderValue.parse(str(ahv))
self.assertIsNotNone(pahv)
self.assertEqual(pahv.mac_key_identifier, ahv.mac_key_identifier)
self.assertEqual(pahv.ts, ahv.ts)
self.assertEqual(pahv.nonce, ahv.nonce)
self.assertEqual(pahv.ext, ahv.ext)
self.assertEqual(pahv.mac, ahv.mac)
def test_parse_with_empty_mac_key_identifier(self):
mac_key_identifier = ""
ts = self._uuid()
nonce = self._uuid()
ext = self._uuid()
my_mac = self._uuid()
ahv_str = self._create_ahv_str(
mac_key_identifier,
ts,
nonce,
ext,
my_mac)
self.assertIsNone(mac.AuthHeaderValue.parse(ahv_str))
def test_parse_with_empty_timestamp(self):
mac_key_identifier = self._uuid()
ts = ""
nonce = self._uuid()
ext = self._uuid()
my_mac = self._uuid()
ahv_str = self._create_ahv_str(
mac_key_identifier,
ts,
nonce,
ext,
my_mac)
self.assertIsNone(mac.AuthHeaderValue.parse(ahv_str))
def test_parse_with_empty_nonce(self):
mac_key_identifier = self._uuid()
ts = self._uuid()
nonce = ""
ext = self._uuid()
my_mac = self._uuid()
ahv_str = self._create_ahv_str(
mac_key_identifier,
ts,
nonce,
ext,
my_mac)
self.assertIsNone(mac.AuthHeaderValue.parse(ahv_str))
def test_parse_with_empty_mac(self):
mac_key_identifier = self._uuid()
ts = self._uuid()
nonce = self._uuid()
ext = self._uuid()
my_mac = ""
ahv_str = self._create_ahv_str(
mac_key_identifier,
ts,
nonce,
ext,
my_mac)
self.assertIsNone(mac.AuthHeaderValue.parse(ahv_str))
def test_parse_none(self):
self.assertIsNone(mac.AuthHeaderValue.parse(None))
def test_parse_zero_length_string(self):
self.assertIsNone(mac.AuthHeaderValue.parse(""))
def test_parse_random_string(self):
self.assertIsNone(mac.AuthHeaderValue.parse(self._uuid()))
class MACTestCase(unittest.TestCase):
def _core_test_logic(self,
http_method,
body,
content_type):
ts = mac.Timestamp.generate()
nonce = mac.Nonce.generate()
uri = "/whatever"
host = "127.0.0.1"
port = 8080
ext = mac.Ext.generate(content_type, body)
normalized_request_string = mac.NormalizedRequestString.generate(
ts,
nonce,
http_method,
uri,
host,
port,
ext)
mac_key = mac.MACKey.generate()
self.assertIsNotNone(mac_key)
my_mac = mac.MAC.generate(
mac_key,
mac.MAC.algorithm,
normalized_request_string)
self.assertIsNotNone(my_mac)
verify_rv = my_mac.verify(
mac_key,
mac.MAC.algorithm,
normalized_request_string)
self.assertTrue(verify_rv)
normalized_request_string = mac.NormalizedRequestString.generate(
ts,
nonce,
http_method,
uri,
host,
port + 1, # <<< note this change
ext)
verify_rv = my_mac.verify(
mac_key,
mac.MAC.algorithm,
normalized_request_string)
self.assertFalse(verify_rv)
def test_it(self):
content_type = "application/json;charset=utf-8"
body = json.dumps({"dave": "was", "there": "you", "are": 42})
self._core_test_logic("POST", body, content_type)
self._core_test_logic("GET", None, None)
self._core_test_logic("PUT", body, content_type)
self._core_test_logic("DELETE", None, None)
class TestRequestsAuth(unittest.TestCase):
"""These unit tests verify the behavior of
yar.util.mac.RequestsAuth"""
def test_all_good_http_get_with_port(self):
"""Verify the behavior of yar.util.mac.RequestsAuth
for HTTP GETs where the URL contains a port."""
mac_key_identifier = mac.MACKeyIdentifier.generate()
mac_key = mac.MACKey.generate()
mac_algorithm = mac.MAC.algorithm
auth = mac.RequestsAuth(
mac_key_identifier,
mac_key,
mac_algorithm)
mock_request = mock.Mock()
mock_request.headers = {}
mock_request.body = None
mock_request.method = "GET"
mock_request.url = "http://localhost:8000"
rv = auth(mock_request)
self.assertIsNotNone(rv)
self.assertIs(rv, mock_request)
self.assertTrue("Authorization" in mock_request.headers)
ahv = mac.AuthHeaderValue.parse(mock_request.headers["Authorization"])
self.assertIsNotNone(ahv)
self.assertEqual(ahv.mac_key_identifier, mac_key_identifier)
self.assertEqual(ahv.ext, "")
def test_all_good_http_get_without_port(self):
"""Verify the behavior of yar.util.mac.RequestsAuth
for HTTP GETs where the URL contains no port."""
mac_key_identifier = mac.MACKeyIdentifier.generate()
mac_key = mac.MACKey.generate()
mac_algorithm = mac.MAC.algorithm
auth = mac.RequestsAuth(
mac_key_identifier,
mac_key,
mac_algorithm)
mock_request = mock.Mock()
mock_request.headers = {}
mock_request.body = None
mock_request.method = "GET"
mock_request.url = "http://localhost"
rv = auth(mock_request)
self.assertIsNotNone(rv)
self.assertIs(rv, mock_request)
self.assertTrue("Authorization" in mock_request.headers)
ahv = mac.AuthHeaderValue.parse(mock_request.headers["Authorization"])
self.assertIsNotNone(ahv)
self.assertEqual(ahv.mac_key_identifier, mac_key_identifier)
self.assertEqual(ahv.ext, "")
def test_all_good_http_post(self):
"""Verify the behavior of yar.util.mac.RequestsAuth
for HTTP POSTs."""
mac_key_identifier = mac.MACKeyIdentifier.generate()
mac_key = mac.MACKey.generate()
mac_algorithm = mac.MAC.algorithm
auth = mac.RequestsAuth(
mac_key_identifier,
mac_key,
mac_algorithm)
mock_request = mock.Mock()
mock_request.headers = {
"content-type": "application/json",
}
body = {
1: 2,
3: 4,
}
mock_request.body = json.dumps(body)
mock_request.method = "POST"
mock_request.url = "http://localhost:8000"
rv = auth(mock_request)
self.assertIsNotNone(rv)
self.assertIs(rv, mock_request)
self.assertTrue("Authorization" in mock_request.headers)
ahv = mac.AuthHeaderValue.parse(mock_request.headers["Authorization"])
self.assertIsNotNone(ahv)
self.assertEqual(ahv.mac_key_identifier, mac_key_identifier)
self.assertNotEqual(ahv.ext, "")
| simonsdave/yar | yar/util/tests/mac_unit_tests.py | Python | mit | 15,971 |
from StringIO import StringIO
import subprocess
from pyethapp.app import app
from click.testing import CliRunner
from ethereum.block import BlockHeader
import rlp
import pytest
@pytest.mark.xfail # can not work without mock-up chain
def test_export():
# requires a chain with at least 5 blocks
assert subprocess.call('pyethapp export', shell=True) != 0
assert subprocess.call('pyethapp export --from -1 -', shell=True) != 0
assert subprocess.call('pyethapp export --to -3 -', shell=True) != 0
assert subprocess.call('pyethapp export --from 4 --to 2 -', shell=True) != 0
result = subprocess.Popen('pyethapp export --from 2 --to 4 -', shell=True,
stdout=subprocess.PIPE)
result.wait()
assert result.returncode == 0
s = result.stdout.read()
headers = []
end = 0
while end < len(s):
item, end = rlp.codec.consume_item(s, end)
headers.append(BlockHeader.deserialize(item[0]))
assert [header.number for header in headers] == [2, 3, 4]
| gsalgado/pyethapp | pyethapp/tests/test_export.py | Python | mit | 1,032 |
#!/usr/bin/python -tt
# An incredibly simple agent. All we do is find the closest enemy tank, drive
# towards it, and shoot. Note that if friendly fire is allowed, you will very
# often kill your own tanks with this code.
#################################################################
# NOTE TO STUDENTS
# This is a starting point for you. You will need to greatly
# modify this code if you want to do anything useful. But this
# should help you to know how to interact with BZRC in order to
# get the information you need.
#
# After starting the bzrflag server, this is one way to start
# this code:
# python agent0.py [hostname] [port]
#
# Often this translates to something like the following (with the
# port name being printed out by the bzrflag server):
# python agent0.py localhost 49857
#################################################################
import sys
import math
import time
from bzrc import BZRC, Command
class Agent(object):
"""Class handles all command and control logic for a teams tanks."""
def __init__(self, bzrc):
self.bzrc = bzrc
self.constants = self.bzrc.get_constants()
self.commands = []
def tick(self, time_diff):
"""Some time has passed; decide what to do next."""
mytanks, othertanks, flags, shots, obstacles = self.bzrc.get_lots_o_stuff()
self.mytanks = mytanks
self.othertanks = othertanks
self.flags = flags
self.shots = shots
self.enemies = [tank for tank in othertanks if tank.color !=
self.constants['team']]
self.commands = []
for tank in mytanks:
self.attack_enemies(tank)
results = self.bzrc.do_commands(self.commands)
def attack_enemies(self, tank):
"""Find the closest enemy and chase it, shooting as you go."""
best_enemy = None
best_dist = 2 * float(self.constants['worldsize'])
for enemy in self.enemies:
if enemy.status != 'alive':
continue
dist = math.sqrt((enemy.x - tank.x)**2 + (enemy.y - tank.y)**2)
if dist < best_dist:
best_dist = dist
best_enemy = enemy
if best_enemy is None:
command = Command(tank.index, 0, 0, False)
self.commands.append(command)
else:
self.move_to_position(tank, best_enemy.x, best_enemy.y)
def move_to_position(self, tank, target_x, target_y):
"""Set command to move to given coordinates."""
target_angle = math.atan2(target_y - tank.y,
target_x - tank.x)
relative_angle = self.normalize_angle(target_angle - tank.angle)
command = Command(tank.index, 1, 2 * relative_angle, True)
self.commands.append(command)
def normalize_angle(self, angle):
"""Make any angle be between +/- pi."""
angle -= 2 * math.pi * int (angle / (2 * math.pi))
if angle <= -math.pi:
angle += 2 * math.pi
elif angle > math.pi:
angle -= 2 * math.pi
return angle
def main():
# Process CLI arguments.
try:
execname, host, port = sys.argv
except ValueError:
execname = sys.argv[0]
print >>sys.stderr, '%s: incorrect number of arguments' % execname
print >>sys.stderr, 'usage: %s hostname port' % sys.argv[0]
sys.exit(-1)
# Connect.
#bzrc = BZRC(host, int(port), debug=True)
bzrc = BZRC(host, int(port))
agent = Agent(bzrc)
prev_time = time.time()
# Run the agent
try:
while True:
time_diff = time.time() - prev_time
agent.tick(time_diff)
except KeyboardInterrupt:
print "Exiting due to keyboard interrupt."
bzrc.close()
if __name__ == '__main__':
main()
# vim: et sw=4 sts=4
| bweaver2/bzrFlag | bzagents/agent0.py | Python | gpl-3.0 | 3,861 |
# Copyright (c) 2012 Citrix Systems, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for the aggregates admin api."""
from webob import exc
from nova.api.openstack.compute.contrib import aggregates
from nova import context
from nova import exception
from nova import test
from nova.tests.api.openstack import fakes
from nova.tests import matchers
AGGREGATE_LIST = [
{"name": "aggregate1", "id": "1", "availability_zone": "nova1"},
{"name": "aggregate2", "id": "2", "availability_zone": "nova1"},
{"name": "aggregate3", "id": "3", "availability_zone": "nova2"},
{"name": "aggregate1", "id": "4", "availability_zone": "nova1"}]
AGGREGATE = {"name": "aggregate1",
"id": "1",
"availability_zone": "nova1",
"metadata": {"foo": "bar"},
"hosts": ["host1, host2"]}
class FakeRequest(object):
environ = {"nova.context": context.get_admin_context()}
class AggregateTestCase(test.NoDBTestCase):
"""Test Case for aggregates admin api."""
def setUp(self):
super(AggregateTestCase, self).setUp()
self.controller = aggregates.AggregateController()
self.req = FakeRequest()
self.user_req = fakes.HTTPRequest.blank('/v2/os-aggregates')
self.context = self.req.environ['nova.context']
def test_index(self):
def stub_list_aggregates(context):
if context is None:
raise Exception()
return AGGREGATE_LIST
self.stubs.Set(self.controller.api, 'get_aggregate_list',
stub_list_aggregates)
result = self.controller.index(self.req)
self.assertEqual(AGGREGATE_LIST, result["aggregates"])
def test_index_no_admin(self):
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.index,
self.user_req)
def test_create(self):
def stub_create_aggregate(context, name, availability_zone):
self.assertEqual(context, self.context, "context")
self.assertEqual("test", name, "name")
self.assertEqual("nova1", availability_zone, "availability_zone")
return AGGREGATE
self.stubs.Set(self.controller.api, "create_aggregate",
stub_create_aggregate)
result = self.controller.create(self.req, {"aggregate":
{"name": "test",
"availability_zone": "nova1"}})
self.assertEqual(AGGREGATE, result["aggregate"])
def test_create_no_admin(self):
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.create, self.user_req,
{"aggregate":
{"name": "test",
"availability_zone": "nova1"}})
def test_create_with_duplicate_aggregate_name(self):
def stub_create_aggregate(context, name, availability_zone):
raise exception.AggregateNameExists(aggregate_name=name)
self.stubs.Set(self.controller.api, "create_aggregate",
stub_create_aggregate)
self.assertRaises(exc.HTTPConflict, self.controller.create,
self.req, {"aggregate":
{"name": "test",
"availability_zone": "nova1"}})
def test_create_with_incorrect_availability_zone(self):
def stub_create_aggregate(context, name, availability_zone):
raise exception.InvalidAggregateAction(action='create_aggregate',
aggregate_id="'N/A'",
reason='invalid zone')
self.stubs.Set(self.controller.api, "create_aggregate",
stub_create_aggregate)
self.assertRaises(exception.InvalidAggregateAction,
self.controller.create,
self.req, {"aggregate":
{"name": "test",
"availability_zone": "nova_bad"}})
def test_create_with_no_aggregate(self):
self.assertRaises(exc.HTTPBadRequest, self.controller.create,
self.req, {"foo":
{"name": "test",
"availability_zone": "nova1"}})
def test_create_with_no_name(self):
self.assertRaises(exc.HTTPBadRequest, self.controller.create,
self.req, {"aggregate":
{"foo": "test",
"availability_zone": "nova1"}})
def test_create_with_no_availability_zone(self):
def stub_create_aggregate(context, name, availability_zone):
self.assertEqual(context, self.context, "context")
self.assertEqual("test", name, "name")
self.assertIsNone(availability_zone, "availability_zone")
return AGGREGATE
self.stubs.Set(self.controller.api, "create_aggregate",
stub_create_aggregate)
result = self.controller.create(self.req,
{"aggregate": {"name": "test"}})
self.assertEqual(AGGREGATE, result["aggregate"])
def test_create_with_null_name(self):
self.assertRaises(exc.HTTPBadRequest, self.controller.create,
self.req, {"aggregate":
{"name": "",
"availability_zone": "nova1"}})
def test_create_with_name_too_long(self):
self.assertRaises(exc.HTTPBadRequest, self.controller.create,
self.req, {"aggregate":
{"name": "x" * 256,
"availability_zone": "nova1"}})
def test_create_with_extra_invalid_arg(self):
self.assertRaises(exc.HTTPBadRequest, self.controller.create,
self.req, dict(name="test",
availability_zone="nova1",
foo='bar'))
def test_show(self):
def stub_get_aggregate(context, id):
self.assertEqual(context, self.context, "context")
self.assertEqual("1", id, "id")
return AGGREGATE
self.stubs.Set(self.controller.api, 'get_aggregate',
stub_get_aggregate)
aggregate = self.controller.show(self.req, "1")
self.assertEqual(AGGREGATE, aggregate["aggregate"])
def test_show_no_admin(self):
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.show,
self.user_req, "1")
def test_show_with_invalid_id(self):
def stub_get_aggregate(context, id):
raise exception.AggregateNotFound(aggregate_id=2)
self.stubs.Set(self.controller.api, 'get_aggregate',
stub_get_aggregate)
self.assertRaises(exc.HTTPNotFound,
self.controller.show, self.req, "2")
def test_update(self):
body = {"aggregate": {"name": "new_name",
"availability_zone": "nova1"}}
def stub_update_aggregate(context, aggregate, values):
self.assertEqual(context, self.context, "context")
self.assertEqual("1", aggregate, "aggregate")
self.assertEqual(body["aggregate"], values, "values")
return AGGREGATE
self.stubs.Set(self.controller.api, "update_aggregate",
stub_update_aggregate)
result = self.controller.update(self.req, "1", body=body)
self.assertEqual(AGGREGATE, result["aggregate"])
def test_update_no_admin(self):
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.update,
self.user_req, "1", body={})
def test_update_with_only_name(self):
body = {"aggregate": {"name": "new_name"}}
def stub_update_aggregate(context, aggregate, values):
return AGGREGATE
self.stubs.Set(self.controller.api, "update_aggregate",
stub_update_aggregate)
result = self.controller.update(self.req, "1", body=body)
self.assertEqual(AGGREGATE, result["aggregate"])
def test_update_with_only_availability_zone(self):
body = {"aggregate": {"availability_zone": "nova1"}}
def stub_update_aggregate(context, aggregate, values):
return AGGREGATE
self.stubs.Set(self.controller.api, "update_aggregate",
stub_update_aggregate)
result = self.controller.update(self.req, "1", body=body)
self.assertEqual(AGGREGATE, result["aggregate"])
def test_update_with_no_updates(self):
test_metadata = {"aggregate": {}}
self.assertRaises(exc.HTTPBadRequest, self.controller.update,
self.req, "2", body=test_metadata)
def test_update_with_no_update_key(self):
test_metadata = {"asdf": {}}
self.assertRaises(exc.HTTPBadRequest, self.controller.update,
self.req, "2", body=test_metadata)
def test_update_with_wrong_updates(self):
test_metadata = {"aggregate": {"status": "disable",
"foo": "bar"}}
self.assertRaises(exc.HTTPBadRequest, self.controller.update,
self.req, "2", body=test_metadata)
def test_update_with_null_name(self):
test_metadata = {"aggregate": {"name": ""}}
self.assertRaises(exc.HTTPBadRequest, self.controller.update,
self.req, "2", body=test_metadata)
def test_update_with_name_too_long(self):
test_metadata = {"aggregate": {"name": "x" * 256}}
self.assertRaises(exc.HTTPBadRequest, self.controller.update,
self.req, "2", body=test_metadata)
def test_update_with_bad_aggregate(self):
test_metadata = {"aggregate": {"name": "test_name"}}
def stub_update_aggregate(context, aggregate, metadata):
raise exception.AggregateNotFound(aggregate_id=2)
self.stubs.Set(self.controller.api, "update_aggregate",
stub_update_aggregate)
self.assertRaises(exc.HTTPNotFound, self.controller.update,
self.req, "2", body=test_metadata)
def test_invalid_action(self):
body = {"append_host": {"host": "host1"}}
self.assertRaises(exc.HTTPBadRequest,
self.controller.action, self.req, "1", body=body)
def test_add_host(self):
def stub_add_host_to_aggregate(context, aggregate, host):
self.assertEqual(context, self.context, "context")
self.assertEqual("1", aggregate, "aggregate")
self.assertEqual("host1", host, "host")
return AGGREGATE
self.stubs.Set(self.controller.api, "add_host_to_aggregate",
stub_add_host_to_aggregate)
aggregate = self.controller.action(self.req, "1",
body={"add_host": {"host":
"host1"}})
self.assertEqual(aggregate["aggregate"], AGGREGATE)
def test_add_host_no_admin(self):
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.action,
self.user_req, "1",
body={"add_host": {"host": "host1"}})
def test_add_host_with_already_added_host(self):
def stub_add_host_to_aggregate(context, aggregate, host):
raise exception.AggregateHostExists(aggregate_id=aggregate,
host=host)
self.stubs.Set(self.controller.api, "add_host_to_aggregate",
stub_add_host_to_aggregate)
self.assertRaises(exc.HTTPConflict, self.controller.action,
self.req, "1",
body={"add_host": {"host": "host1"}})
def test_add_host_with_bad_aggregate(self):
def stub_add_host_to_aggregate(context, aggregate, host):
raise exception.AggregateNotFound(aggregate_id=aggregate)
self.stubs.Set(self.controller.api, "add_host_to_aggregate",
stub_add_host_to_aggregate)
self.assertRaises(exc.HTTPNotFound, self.controller.action,
self.req, "bogus_aggregate",
body={"add_host": {"host": "host1"}})
def test_add_host_with_bad_host(self):
def stub_add_host_to_aggregate(context, aggregate, host):
raise exception.ComputeHostNotFound(host=host)
self.stubs.Set(self.controller.api, "add_host_to_aggregate",
stub_add_host_to_aggregate)
self.assertRaises(exc.HTTPNotFound, self.controller.action,
self.req, "1",
body={"add_host": {"host": "bogus_host"}})
def test_add_host_with_missing_host(self):
self.assertRaises(exc.HTTPBadRequest, self.controller.action,
self.req, "1", body={"add_host": {"asdf": "asdf"}})
def test_add_host_raises_key_error(self):
def stub_add_host_to_aggregate(context, aggregate, host):
raise KeyError
self.stubs.Set(self.controller.api, "add_host_to_aggregate",
stub_add_host_to_aggregate)
#NOTE(mtreinish) The check for a KeyError here is to ensure that
# if add_host_to_aggregate() raises a KeyError it propagates. At
# one point the api code would mask the error as a HTTPBadRequest.
# This test is to ensure that this doesn't occur again.
self.assertRaises(KeyError, self.controller.action, self.req, "1",
body={"add_host": {"host": "host1"}})
def test_remove_host(self):
def stub_remove_host_from_aggregate(context, aggregate, host):
self.assertEqual(context, self.context, "context")
self.assertEqual("1", aggregate, "aggregate")
self.assertEqual("host1", host, "host")
stub_remove_host_from_aggregate.called = True
return {}
self.stubs.Set(self.controller.api,
"remove_host_from_aggregate",
stub_remove_host_from_aggregate)
self.controller.action(self.req, "1",
body={"remove_host": {"host": "host1"}})
self.assertTrue(stub_remove_host_from_aggregate.called)
def test_remove_host_no_admin(self):
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.action,
self.user_req, "1",
body={"remove_host": {"host": "host1"}})
def test_remove_host_with_bad_aggregate(self):
def stub_remove_host_from_aggregate(context, aggregate, host):
raise exception.AggregateNotFound(aggregate_id=aggregate)
self.stubs.Set(self.controller.api,
"remove_host_from_aggregate",
stub_remove_host_from_aggregate)
self.assertRaises(exc.HTTPNotFound, self.controller.action,
self.req, "bogus_aggregate",
body={"remove_host": {"host": "host1"}})
def test_remove_host_with_host_not_in_aggregate(self):
def stub_remove_host_from_aggregate(context, aggregate, host):
raise exception.AggregateHostNotFound(aggregate_id=aggregate,
host=host)
self.stubs.Set(self.controller.api,
"remove_host_from_aggregate",
stub_remove_host_from_aggregate)
self.assertRaises(exc.HTTPNotFound, self.controller.action,
self.req, "1",
body={"remove_host": {"host": "host1"}})
def test_remove_host_with_bad_host(self):
def stub_remove_host_from_aggregate(context, aggregate, host):
raise exception.ComputeHostNotFound(host=host)
self.stubs.Set(self.controller.api,
"remove_host_from_aggregate",
stub_remove_host_from_aggregate)
self.assertRaises(exc.HTTPNotFound, self.controller.action,
self.req, "1", body={"remove_host": {"host": "bogushost"}})
def test_remove_host_with_missing_host(self):
self.assertRaises(exc.HTTPBadRequest, self.controller.action,
self.req, "1", body={"asdf": "asdf"})
def test_remove_host_with_extra_param(self):
self.assertRaises(exc.HTTPBadRequest, self.controller.action,
self.req, "1", body={"remove_host": {"asdf": "asdf",
"host": "asdf"}})
def test_set_metadata(self):
body = {"set_metadata": {"metadata": {"foo": "bar"}}}
def stub_update_aggregate(context, aggregate, values):
self.assertEqual(context, self.context, "context")
self.assertEqual("1", aggregate, "aggregate")
self.assertThat(body["set_metadata"]['metadata'],
matchers.DictMatches(values))
return AGGREGATE
self.stubs.Set(self.controller.api,
"update_aggregate_metadata",
stub_update_aggregate)
result = self.controller.action(self.req, "1", body=body)
self.assertEqual(AGGREGATE, result["aggregate"])
def test_set_metadata_no_admin(self):
self.assertRaises(exception.PolicyNotAuthorized,
self.controller._set_metadata,
self.user_req, "1",
body={"set_metadata": {"metadata":
{"foo": "bar"}}})
def test_set_metadata_with_bad_aggregate(self):
body = {"set_metadata": {"metadata": {"foo": "bar"}}}
def stub_update_aggregate(context, aggregate, metadata):
raise exception.AggregateNotFound(aggregate_id=aggregate)
self.stubs.Set(self.controller.api,
"update_aggregate_metadata",
stub_update_aggregate)
self.assertRaises(exc.HTTPNotFound, self.controller.action,
self.req, "bad_aggregate", body=body)
def test_set_metadata_with_missing_metadata(self):
body = {"asdf": {"foo": "bar"}}
self.assertRaises(exc.HTTPBadRequest, self.controller.action,
self.req, "1", body=body)
def test_set_metadata_with_extra_params(self):
body = {"metadata": {"foo": "bar"}, "asdf": {"foo": "bar"}}
self.assertRaises(exc.HTTPBadRequest, self.controller.action,
self.req, "1", body=body)
def test_delete_aggregate(self):
def stub_delete_aggregate(context, aggregate):
self.assertEqual(context, self.context, "context")
self.assertEqual("1", aggregate, "aggregate")
stub_delete_aggregate.called = True
self.stubs.Set(self.controller.api, "delete_aggregate",
stub_delete_aggregate)
self.controller.delete(self.req, "1")
self.assertTrue(stub_delete_aggregate.called)
def test_delete_aggregate_no_admin(self):
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.delete,
self.user_req, "1")
def test_delete_aggregate_with_bad_aggregate(self):
def stub_delete_aggregate(context, aggregate):
raise exception.AggregateNotFound(aggregate_id=aggregate)
self.stubs.Set(self.controller.api, "delete_aggregate",
stub_delete_aggregate)
self.assertRaises(exc.HTTPNotFound, self.controller.delete,
self.req, "bogus_aggregate")
| ewindisch/nova | nova/tests/api/openstack/compute/contrib/test_aggregates.py | Python | apache-2.0 | 20,787 |
import logging
from test.parser.pattern.test_matching.base import PatternMatcherBaseClass
class PatternMatcherBasicTests(PatternMatcherBaseClass):
def test_single_word_match(self):
self.add_pattern_to_graph(pattern="A", topic="X", that="Y", template="1")
self.dump_graph()
context = self.match_sentence("A", topic="X", that="Y")
self.assertIsNotNone(context)
self.assertIsNotNone(context.template_node())
self.assertEqual("1", context.template_node().template.word)
def test_single_word_no_match(self):
self.add_pattern_to_graph(pattern="A", topic="X", that="Y", template="1")
self.dump_graph()
context = self.match_sentence("B", topic="X", that="Y")
self.assertIsNone(context)
| Thielak/program-y | src/test/parser/pattern/test_matching/test_basics.py | Python | mit | 781 |
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
from telemetry import decorators
from telemetry.core import exceptions
from telemetry.core import forwarders
from telemetry.core import util
from telemetry.core.backends.chrome import chrome_browser_backend
from telemetry.core.forwarders import cros_forwarder
class CrOSBrowserBackend(chrome_browser_backend.ChromeBrowserBackend):
# Some developers' workflow includes running the Chrome process from
# /usr/local/... instead of the default location. We have to check for both
# paths in order to support this workflow.
CHROME_PATHS = ['/opt/google/chrome/chrome ',
'/usr/local/opt/google/chrome/chrome ']
def __init__(self, browser_type, browser_options, cri, is_guest,
extensions_to_load):
super(CrOSBrowserBackend, self).__init__(
is_content_shell=False, supports_extensions=not is_guest,
browser_options=browser_options,
output_profile_path=None, extensions_to_load=extensions_to_load)
from telemetry.core.backends.chrome import chrome_browser_options
assert isinstance(browser_options,
chrome_browser_options.CrosBrowserOptions)
# Initialize fields so that an explosion during init doesn't break in Close.
self._browser_type = browser_type
self._cri = cri
self._is_guest = is_guest
self._forwarder = None
self.wpr_port_pairs = forwarders.PortPairs(
http=forwarders.PortPair(self.wpr_port_pairs.http.local_port,
self._cri.GetRemotePort()),
https=forwarders.PortPair(self.wpr_port_pairs.https.local_port,
self._cri.GetRemotePort()),
dns=None)
self._remote_debugging_port = self._cri.GetRemotePort()
self._port = self._remote_debugging_port
self._SetBranchNumber(self._GetChromeVersion())
self._login_ext_dir = None
if not self._use_oobe_login_for_testing:
self._login_ext_dir = os.path.join(os.path.dirname(__file__),
'chromeos_login_ext')
# Push a dummy login extension to the device.
# This extension automatically logs in test user specified by
# self.browser_options.username.
# Note that we also perform this copy locally to ensure that
# the owner of the extensions is set to chronos.
logging.info('Copying dummy login extension to the device')
cri.PushFile(self._login_ext_dir, '/tmp/')
self._login_ext_dir = '/tmp/chromeos_login_ext'
cri.Chown(self._login_ext_dir)
# Copy extensions to temp directories on the device.
# Note that we also perform this copy locally to ensure that
# the owner of the extensions is set to chronos.
for e in extensions_to_load:
output = cri.RunCmdOnDevice(['mktemp', '-d', '/tmp/extension_XXXXX'])
extension_dir = output[0].rstrip()
cri.PushFile(e.path, extension_dir)
cri.Chown(extension_dir)
e.local_path = os.path.join(extension_dir, os.path.basename(e.path))
# Ensure the UI is running and logged out.
self._RestartUI()
util.WaitFor(self.IsBrowserRunning, 20)
# Delete test user's cryptohome vault (user data directory).
if not self.browser_options.dont_override_profile:
self._cri.RunCmdOnDevice(['cryptohome', '--action=remove', '--force',
'--user=%s' % self.browser_options.username])
if self.browser_options.profile_dir:
cri.RmRF(self.profile_directory)
cri.PushFile(self.browser_options.profile_dir + '/Default',
self.profile_directory)
cri.Chown(self.profile_directory)
def GetBrowserStartupArgs(self):
args = super(CrOSBrowserBackend, self).GetBrowserStartupArgs()
args.extend([
'--enable-smooth-scrolling',
'--enable-threaded-compositing',
'--enable-per-tile-painting',
'--force-compositing-mode',
# Disables the start page, as well as other external apps that can
# steal focus or make measurements inconsistent.
'--disable-default-apps',
# Skip user image selection screen, and post login screens.
'--oobe-skip-postlogin',
# Allow devtools to connect to chrome.
'--remote-debugging-port=%i' % self._remote_debugging_port,
# Open a maximized window.
'--start-maximized',
# TODO(achuith): Re-enable this flag again before multi-profiles
# will become enabled by default to have telemetry mileage on it.
# '--multi-profiles',
# Debug logging for login flake (crbug.com/263527).
'--vmodule=*/browser/automation/*=2,*/chromeos/net/*=2,'
'*/chromeos/login/*=2,*/extensions/*=2,'
'*/device_policy_decoder_chromeos.cc=2'])
if self._is_guest:
args.extend([
# Jump to the login screen, skipping network selection, eula, etc.
'--login-screen=login',
# Skip hwid check, for VMs and pre-MP lab devices.
'--skip-hwid-check'
])
elif not self._use_oobe_login_for_testing:
# This extension bypasses gaia and logs us in.
logging.info('Using --auth-ext-path=%s to login', self._login_ext_dir)
args.append('--auth-ext-path=%s' % self._login_ext_dir)
return args
def _GetSessionManagerPid(self, procs):
"""Returns the pid of the session_manager process, given the list of
processes."""
for pid, process, _, _ in procs:
if process.startswith('/sbin/session_manager '):
return pid
return None
def _GetChromeProcess(self):
"""Locates the the main chrome browser process.
Chrome on cros is usually in /opt/google/chrome, but could be in
/usr/local/ for developer workflows - debug chrome is too large to fit on
rootfs.
Chrome spawns multiple processes for renderers. pids wrap around after they
are exhausted so looking for the smallest pid is not always correct. We
locate the session_manager's pid, and look for the chrome process that's an
immediate child. This is the main browser process.
"""
procs = self._cri.ListProcesses()
session_manager_pid = self._GetSessionManagerPid(procs)
if not session_manager_pid:
return None
# Find the chrome process that is the child of the session_manager.
for pid, process, ppid, _ in procs:
if ppid != session_manager_pid:
continue
for path in self.CHROME_PATHS:
if process.startswith(path):
return {'pid': pid, 'path': path, 'args': process}
return None
def _GetChromeVersion(self):
result = util.WaitFor(self._GetChromeProcess, timeout=30)
assert result and result['path']
(version, _) = self._cri.RunCmdOnDevice([result['path'], '--version'])
assert version
return version
@property
def pid(self):
result = self._GetChromeProcess()
if result and 'pid' in result:
return result['pid']
return None
@property
def browser_directory(self):
result = self._GetChromeProcess()
if result and 'path' in result:
return os.path.dirname(result['path'])
return None
@property
def profile_directory(self):
return '/home/chronos/Default'
@property
def hwid(self):
return self._cri.RunCmdOnDevice(['/usr/bin/crossystem', 'hwid'])[0]
@property
def _use_oobe_login_for_testing(self):
"""Oobe.LoginForTesting was introduced after branch 1599."""
return self.chrome_branch_number > 1599
def GetRemotePort(self, port):
if self._cri.local:
return port
return self._cri.GetRemotePort()
def __del__(self):
self.Close()
def Start(self):
# Escape all commas in the startup arguments we pass to Chrome
# because dbus-send delimits array elements by commas
startup_args = [a.replace(',', '\\,') for a in self.GetBrowserStartupArgs()]
# Restart Chrome with the login extension and remote debugging.
logging.info('Restarting Chrome with flags and login')
args = ['dbus-send', '--system', '--type=method_call',
'--dest=org.chromium.SessionManager',
'/org/chromium/SessionManager',
'org.chromium.SessionManagerInterface.EnableChromeTesting',
'boolean:true',
'array:string:"%s"' % ','.join(startup_args)]
self._cri.RunCmdOnDevice(args)
if not self._cri.local:
self._port = util.GetUnreservedAvailableLocalPort()
self._forwarder = self.forwarder_factory.Create(
forwarders.PortPairs(
http=forwarders.PortPair(self._port, self._remote_debugging_port),
https=None,
dns=None), forwarding_flag='L')
try:
self._WaitForBrowserToComeUp(wait_for_extensions=False)
self._PostBrowserStartupInitialization()
except:
import traceback
traceback.print_exc()
self.Close()
raise
# chrome_branch_number is set in _PostBrowserStartupInitialization.
# Without --skip-hwid-check (introduced in crrev.com/203397), devices/VMs
# will be stuck on the bad hwid screen.
if self.chrome_branch_number <= 1500 and not self.hwid:
raise exceptions.LoginException(
'Hardware id not set on device/VM. --skip-hwid-check not supported '
'with chrome branches 1500 or earlier.')
util.WaitFor(lambda: self.oobe_exists, 10)
if self.browser_options.auto_login:
if self._is_guest:
pid = self.pid
self._NavigateGuestLogin()
# Guest browsing shuts down the current browser and launches an
# incognito browser in a separate process, which we need to wait for.
util.WaitFor(lambda: pid != self.pid, 10)
self._WaitForBrowserToComeUp()
else:
self._NavigateLogin()
logging.info('Browser is up!')
def Close(self):
super(CrOSBrowserBackend, self).Close()
self._RestartUI() # Logs out.
if self._forwarder:
self._forwarder.Close()
self._forwarder = None
if self._login_ext_dir:
self._cri.RmRF(self._login_ext_dir)
self._login_ext_dir = None
if self._cri:
for e in self._extensions_to_load:
self._cri.RmRF(os.path.dirname(e.local_path))
self._cri = None
@property
@decorators.Cache
def forwarder_factory(self):
return cros_forwarder.CrOsForwarderFactory(self._cri)
def IsBrowserRunning(self):
return bool(self.pid)
def GetStandardOutput(self):
return 'Cannot get standard output on CrOS'
def GetStackTrace(self):
return 'Cannot get stack trace on CrOS'
def _RestartUI(self):
if self._cri:
logging.info('(Re)starting the ui (logs the user out)')
if self._cri.IsServiceRunning('ui'):
self._cri.RunCmdOnDevice(['restart', 'ui'])
else:
self._cri.RunCmdOnDevice(['start', 'ui'])
@property
def oobe(self):
return self.misc_web_contents_backend.GetOobe()
@property
def oobe_exists(self):
return self.misc_web_contents_backend.oobe_exists
def _SigninUIState(self):
"""Returns the signin ui state of the oobe. HIDDEN: 0, GAIA_SIGNIN: 1,
ACCOUNT_PICKER: 2, WRONG_HWID_WARNING: 3, MANAGED_USER_CREATION_FLOW: 4.
These values are in
chrome/browser/resources/chromeos/login/display_manager.js
"""
return self.oobe.EvaluateJavaScript('''
loginHeader = document.getElementById('login-header-bar')
if (loginHeader) {
loginHeader.signinUIState_;
}
''')
def _HandleUserImageSelectionScreen(self):
"""If we're stuck on the user image selection screen, we click the ok
button.
"""
oobe = self.oobe
if oobe:
try:
oobe.EvaluateJavaScript("""
var ok = document.getElementById("ok-button");
if (ok) {
ok.click();
}
""")
except (exceptions.TabCrashException):
pass
def _IsLoggedIn(self):
"""Returns True if we're logged in (cryptohome has mounted), and the oobe
has been dismissed."""
if self.chrome_branch_number <= 1547:
self._HandleUserImageSelectionScreen()
return (self._cri.IsCryptohomeMounted(self.browser_options.username) and
not self.oobe_exists)
def _StartupWindow(self):
"""Closes the startup window, which is an extension on official builds,
and a webpage on chromiumos"""
startup_window_ext_id = 'honijodknafkokifofgiaalefdiedpko'
return (self.extension_backend[startup_window_ext_id]
if startup_window_ext_id in self.extension_backend
else self.tab_list_backend.Get(0, None))
def _WaitForSigninScreen(self):
"""Waits for oobe to be on the signin or account picker screen."""
def OnAccountPickerScreen():
signin_state = self._SigninUIState()
# GAIA_SIGNIN or ACCOUNT_PICKER screens.
return signin_state == 1 or signin_state == 2
try:
util.WaitFor(OnAccountPickerScreen, 60)
except util.TimeoutException:
self._cri.TakeScreenShot('guest-screen')
raise exceptions.LoginException('Timed out waiting for signin screen, '
'signin state %d' % self._SigninUIState())
def _ClickBrowseAsGuest(self):
"""Click the Browse As Guest button on the account picker screen. This will
restart the browser, and we could have a tab crash or a browser crash."""
try:
self.oobe.EvaluateJavaScript("""
var guest = document.getElementById("guest-user-button");
if (guest) {
guest.click();
}
""")
except (exceptions.TabCrashException,
exceptions.BrowserConnectionGoneException):
pass
def _WaitForGuestFsMounted(self):
"""Waits for the guest user to be mounted as guestfs"""
guest_path = self._cri.CryptohomePath('$guest')
util.WaitFor(lambda: (self._cri.FilesystemMountedAt(guest_path) ==
'guestfs'), 20)
def _NavigateGuestLogin(self):
"""Navigates through oobe login screen as guest"""
if not self.oobe_exists:
raise exceptions.LoginException('Oobe missing')
self._WaitForSigninScreen()
self._ClickBrowseAsGuest()
self._WaitForGuestFsMounted()
def _NavigateLogin(self):
"""Navigates through oobe login screen"""
if self._use_oobe_login_for_testing:
logging.info('Invoking Oobe.loginForTesting')
if not self.oobe_exists:
raise exceptions.LoginException('Oobe missing')
oobe = self.oobe
util.WaitFor(lambda: oobe.EvaluateJavaScript(
'typeof Oobe !== \'undefined\''), 10)
if oobe.EvaluateJavaScript(
'typeof Oobe.loginForTesting == \'undefined\''):
raise exceptions.LoginException('Oobe.loginForTesting js api missing')
oobe.ExecuteJavaScript(
'Oobe.loginForTesting(\'%s\', \'%s\');'
% (self.browser_options.username, self.browser_options.password))
try:
util.WaitFor(self._IsLoggedIn, 60)
except util.TimeoutException:
self._cri.TakeScreenShot('login-screen')
raise exceptions.LoginException('Timed out going through login screen')
# Wait for extensions to load.
try:
self._WaitForBrowserToComeUp()
except util.TimeoutException:
logging.error('Chrome args: %s' % self._GetChromeProcess()['args'])
self._cri.TakeScreenShot('extension-timeout')
raise
if self.chrome_branch_number < 1500:
# Wait for the startup window, then close it. Startup window doesn't exist
# post-M27. crrev.com/197900
util.WaitFor(self._StartupWindow, 20).Close()
else:
# Workaround for crbug.com/329271, crbug.com/334726.
retries = 3
while True:
try:
# Open a new window/tab.
if len(self.tab_list_backend):
tab = self.tab_list_backend[-1]
else:
tab = self.tab_list_backend.New(timeout=30)
tab.Navigate('about:blank', timeout=10)
break
except (exceptions.TabCrashException, util.TimeoutException,
IndexError):
retries -= 1
logging.warn('TabCrashException/TimeoutException in '
'new tab creation/navigation, '
'remaining retries %d' % retries)
if not retries:
raise
| anirudhSK/chromium | tools/telemetry/telemetry/core/backends/chrome/cros_browser_backend.py | Python | bsd-3-clause | 16,477 |
#
# An example that presents CAPTCHA tests in a web environment
# and gives the user a chance to solve them.
#
# This example is for use with Apache using mod_python and its
# Publisher handler. For example, if your apache configuration
# included something like:
#
# AddHandler python-program .py
# PythonHandler mod_python.publisher
#
# You could place this script anywhere in your web space to see
# the demo.
#
# --Micah <micah@navi.cx>
#
from Captcha.Visual import Tests
import Captcha
from mod_python import apache
def _getFactory(req):
return Captcha.PersistentFactory("/tmp/pycaptcha_%s" % req.interpreter)
def test(req, name=Tests.__all__[0]):
"""Show a newly generated CAPTCHA of the given class.
Default is the first class name given in Tests.__all__
"""
test = _getFactory(req).new(getattr(Tests, name))
# Make a list of tests other than the one we're using
others = []
for t in Tests.__all__:
if t != name:
others.append('<li><a href="?name=%s">%s</a></li>' % (t,t))
others = "\n".join(others)
return """<html>
<head>
<title>PyCAPTCHA Example</title>
</head>
<body>
<h1>PyCAPTCHA Example (for mod_python)</h1>
<p>
<b>%s</b>:
%s
</p>
<p><img src="image?id=%s"/></p>
<p>
<form action="solution" method="get">
Enter the word shown:
<input type="text" name="word"/>
<input type="hidden" name="id" value="%s"/>
</form>
</p>
<p>
Or try...
<ul>
%s
</ul>
</p>
</body>
</html>
""" % (test.__class__.__name__, test.__doc__, test.id, test.id, others)
def image(req, id):
"""Generate an image for the CAPTCHA with the given ID string"""
test = _getFactory(req).get(id)
if not test:
raise apache.SERVER_RETURN, apache.HTTP_NOT_FOUND
req.content_type = "image/jpeg"
test.render().save(req, "JPEG")
return apache.OK
def solution(req, id, word):
"""Grade a CAPTCHA given a solution word"""
test = _getFactory(req).get(id)
if not test:
raise apache.SERVER_RETURN, apache.HTTP_NOT_FOUND
if not test.valid:
# Invalid tests will always return False, to prevent
# random trial-and-error attacks. This could be confusing to a user...
result = "Test invalidated, try another test"
elif test.testSolutions([word]):
result = "Correct"
else:
result = "Incorrect"
return """<html>
<head>
<title>PyCAPTCHA Example</title>
</head>
<body>
<h1>PyCAPTCHA Example</h1>
<h2>%s</h2>
<p><img src="image?id=%s"/></p>
<p><b>%s</b></p>
<p>You guessed: %s</p>
<p>Possible solutions: %s</p>
<p><a href="test">Try again</a></p>
</body>
</html>
""" % (test.__class__.__name__, test.id, result, word, ", ".join(test.solutions))
### The End ###
| lerouxb/seymour | thirdparty/pycaptcha/modpython_example.py | Python | mit | 2,728 |
import itertools
import os
import logging
from gettext import gettext as _
from pulp.plugins.util.misc import mkdir
from pulp.plugins.util.publish_step import PluginStep, AtomicDirectoryPublishStep
from pulp.server.exceptions import PulpCodedException
from pulp.server.controllers.repository import get_unit_model_querysets
from pulp_ostree.common import constants, errors
from pulp_ostree.plugins import lib
from pulp_ostree.plugins.distributors import configuration
from pulp_ostree.plugins.db.model import Branch
_LOG = logging.getLogger(__name__)
class WebPublisher(PluginStep):
"""
Web publisher class that is responsible for the actual publishing
of a repository via a web server
"""
def __init__(self, repo, conduit, config, working_dir=None, **kwargs):
"""
:param repo: The repository being published.
:type repo: pulp.plugins.model.Repository
:param conduit: Conduit providing access to relative Pulp functionality
:type conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit
:param config: Pulp configuration for the distributor
:type config: pulp.plugins.config.PluginCallConfiguration
:param working_dir: The temp directory this step should use for processing.
:type working_dir: str
"""
super(WebPublisher, self).__init__(
step_type=constants.PUBLISH_STEP_WEB_PUBLISHER,
repo=repo,
conduit=conduit,
config=config,
working_dir=working_dir,
plugin_type=constants.WEB_DISTRIBUTOR_TYPE_ID,
**kwargs)
self.publish_dir = os.path.join(self.get_working_dir(), repo.id)
atomic_publish = AtomicDirectoryPublishStep(
self.get_working_dir(),
[(repo.id, configuration.get_web_publish_dir(repo.repo_obj, config))],
configuration.get_master_publish_dir(repo.repo_obj, config),
step_type=constants.PUBLISH_STEP_OVER_HTTP)
atomic_publish.description = _('Making files available via web.')
main = MainStep(config=config)
self.add_child(main)
self.add_child(atomic_publish)
mkdir(self.publish_dir)
class MainStep(PluginStep):
def __init__(self, **kwargs):
super(MainStep, self).__init__(constants.PUBLISH_STEP_MAIN, **kwargs)
self.context = None
self.redirect_context = None
self.description = _('Publish Trees')
@property
def depth(self):
depth = self.parent.config.get(
constants.IMPORTER_CONFIG_KEY_DEPTH, constants.DEFAULT_DEPTH)
return int(depth)
def process_main(self, item=None):
"""
Publish the repository.
Create an empty repository. Then, for each unit,
perform a (local) pull which links objects in this repository to
objects in the *backing* repository at the storage path. This starts
with the branch HEAD commit and then includes all referenced objects.
:raise PulpCodedException: on error.
"""
path = self.parent.publish_dir
repository = lib.Repository(path)
repository.create()
for unit in self._get_units():
try:
repository.pull_local(unit.storage_path, [unit.commit], self.depth)
MainStep._add_ref(path, unit.branch, unit.commit)
except lib.LibError as le:
pe = PulpCodedException(errors.OST0006, reason=str(le))
raise pe
summary = lib.Summary(repository)
summary.generate()
def _get_units(self):
"""
Get the collection of units to be published.
The collection contains only the newest unit for each branch.
:return: An iterable of units to publish.
:rtype: iterable
"""
units_by_branch = {}
units = itertools.chain(*get_unit_model_querysets(self.get_repo().id, Branch))
for unit in sorted(units, key=lambda u: u.created):
units_by_branch[unit.branch] = unit
return units_by_branch.values()
@staticmethod
def _add_ref(path, branch, commit):
"""
Write a branch (ref) file into the published repository.
:param path: The absolute path to the repository.
:type path: str
:param branch: The branch relative path.
:type branch: str
:param commit: The commit hash.
:type commit: str
"""
path = os.path.join(path, 'refs', 'heads', os.path.dirname(branch))
mkdir(path)
path = os.path.join(path, os.path.basename(branch))
with open(path, 'w+') as fp:
fp.write(commit)
| pcreech/pulp_ostree | plugins/pulp_ostree/plugins/distributors/steps.py | Python | gpl-2.0 | 4,693 |
# coding: utf-8
from django.contrib import admin
from .models import Note, NoteBook, NoteRevision, User
admin.site.register(User)
admin.site.register(NoteBook)
admin.site.register(Note)
admin.site.register(NoteRevision)
| skitoo/aligot | aligot/admin.py | Python | mit | 223 |
from electrum_dgb.i18n import _
fullname = 'Virtual Keyboard'
description = '%s\n%s' % (_("Add an optional virtual keyboard to the password dialog."), _("Warning: do not use this if it makes you pick a weaker password."))
available_for = ['qt']
| protonn/Electrum-Cash | plugins/virtualkeyboard/__init__.py | Python | mit | 246 |
# coding=utf-8
"""
InaSAFE Disaster risk assessment tool by AusAid - ** Generic Impact
Function on Population for Classified Hazard.**
Contact : ole.moller.nielsen@gmail.com
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
.. todo:: Check raster is single band
"""
import numpy
import itertools
from safe.impact_functions.bases.classified_rh_continuous_re import \
ClassifiedRHContinuousRE
from safe.impact_functions.core import (
population_rounding,
has_no_data)
from safe.storage.raster import Raster
from safe.common.utilities import (
format_int,
humanize_class,
create_classes,
create_label,
get_thousand_separator)
from safe.utilities.i18n import tr
from safe.common.tables import Table, TableRow
from safe.impact_functions.generic.\
classified_raster_population.metadata_definitions import \
ClassifiedRasterHazardPopulationMetadata
from safe.impact_functions.impact_function_manager\
import ImpactFunctionManager
from safe.gui.tools.minimum_needs.needs_profile import add_needs_parameters
from safe.common.exceptions import (
FunctionParametersError, ZeroImpactException)
from safe.impact_reports.population_exposure_report_mixin import \
PopulationExposureReportMixin
__author__ = 'lucernae'
__date__ = '24/03/15'
__revision__ = '$Format:%H$'
__copyright__ = ('Copyright 2014, Australia Indonesia Facility for '
'Disaster Reduction')
class ClassifiedRasterHazardPopulationFunction(
ClassifiedRHContinuousRE,
PopulationExposureReportMixin):
# noinspection PyUnresolvedReferences
"""Plugin for impact of population as derived by classified hazard."""
_metadata = ClassifiedRasterHazardPopulationMetadata()
def __init__(self):
super(ClassifiedRasterHazardPopulationFunction, self).__init__()
self.impact_function_manager = ImpactFunctionManager()
# AG: Use the proper minimum needs, update the parameters
self.parameters = add_needs_parameters(self.parameters)
self.no_data_warning = False
def notes(self):
"""Return the notes section of the report.
:return: The notes that should be attached to this impact report.
:rtype: list
"""
notes = [
{'content': tr('Notes'), 'header': True},
{
'content': tr('Total population: %s') % format_int(
population_rounding(self.total_population))
},
{
'content': tr(
'<sup>1</sup>People need evacuation if they are in a '
'hazard zone.')
},
{
'content': tr(
'Map shows the numbers of people in high, medium, '
'and low hazard class areas.')
},
{
'content': tr(
'The layers contained `no data`. This missing data was '
'carried through to the impact layer.'),
'condition': self.no_data_warning
},
{
'content': tr(
'`No data` values in the impact layer were treated as 0 '
'when counting the affected or total population.'),
'condition': self.no_data_warning
},
{
'content': tr(
'All values are rounded up to the nearest integer in '
'order to avoid representing human lives as fractions.'),
},
{
'content': tr(
'Population rounding is applied to all population '
'values, which may cause discrepancies when adding '
'values.'
)
}
]
return notes
def run(self):
"""Plugin for impact of population as derived by classified hazard.
Counts number of people exposed to each class of the hazard
Return
Map of population exposed to high class
Table with number of people in each class
"""
self.validate()
self.prepare()
# The 3 classes
# TODO (3.2): shouldnt these be defined in keywords rather? TS
categorical_hazards = self.parameters['Categorical hazards'].value
low_class = categorical_hazards[0].value
medium_class = categorical_hazards[1].value
high_class = categorical_hazards[2].value
# The classes must be different to each other
unique_classes_flag = all(
x != y for x, y in list(
itertools.combinations(
[low_class, medium_class, high_class], 2)))
if not unique_classes_flag:
raise FunctionParametersError(
'There is hazard class that has the same value with other '
'class. Please check the parameters.')
# Extract data as numeric arrays
hazard_data = self.hazard.layer.get_data(nan=True) # Class
if has_no_data(hazard_data):
self.no_data_warning = True
# Calculate impact as population exposed to each class
population = self.exposure.layer.get_data(scaling=True)
# Get all population data that falls in each hazard class
high_hazard_population = numpy.where(
hazard_data == high_class, population, 0)
medium_hazard_population = numpy.where(
hazard_data == medium_class, population, 0)
low_hazard_population = numpy.where(
hazard_data == low_class, population, 0)
affected_population = (
high_hazard_population + medium_hazard_population +
low_hazard_population)
# Carry the no data values forward to the impact layer.
affected_population = numpy.where(
numpy.isnan(population),
numpy.nan,
affected_population)
affected_population = numpy.where(
numpy.isnan(hazard_data),
numpy.nan,
affected_population)
# Count totals
self.total_population = int(numpy.nansum(population))
self.affected_population[
tr('Population in High hazard class areas')] = int(
numpy.nansum(high_hazard_population))
self.affected_population[
tr('Population in Medium hazard class areas')] = int(
numpy.nansum(medium_hazard_population))
self.affected_population[
tr('Population in Low hazard class areas')] = int(
numpy.nansum(low_hazard_population))
self.unaffected_population = (
self.total_population - self.total_affected_population)
# check for zero impact
if self.total_affected_population == 0:
table_body = [
self.question,
TableRow(
[tr('People affected'), '%s' % format_int(0)],
header=True)]
message = Table(table_body).toNewlineFreeString()
raise ZeroImpactException(message)
self.minimum_needs = [
parameter.serialize() for parameter in
self.parameters['minimum needs']
]
total_needs = self.total_needs
impact_table = impact_summary = self.generate_html_report()
# Create style
colours = [
'#FFFFFF', '#38A800', '#79C900', '#CEED00',
'#FFCC00', '#FF6600', '#FF0000', '#7A0000']
classes = create_classes(affected_population.flat[:], len(colours))
interval_classes = humanize_class(classes)
style_classes = []
for i in xrange(len(colours)):
style_class = dict()
if i == 1:
label = create_label(
interval_classes[i],
tr('Low Population [%i people/cell]' % classes[i]))
elif i == 4:
label = create_label(
interval_classes[i],
tr('Medium Population [%i people/cell]' % classes[i]))
elif i == 7:
label = create_label(
interval_classes[i],
tr('High Population [%i people/cell]' % classes[i]))
else:
label = create_label(interval_classes[i])
style_class['label'] = label
style_class['quantity'] = classes[i]
if i == 0:
transparency = 100
else:
transparency = 0
style_class['transparency'] = transparency
style_class['colour'] = colours[i]
style_classes.append(style_class)
style_info = dict(
target_field=None,
style_classes=style_classes,
style_type='rasterStyle')
# For printing map purpose
map_title = tr('Population affected by each class')
legend_notes = tr(
'Thousand separator is represented by %s' %
get_thousand_separator())
legend_units = tr('(people per cell)')
legend_title = tr('Number of People')
# Create raster object and return
raster_layer = Raster(
data=affected_population,
projection=self.exposure.layer.get_projection(),
geotransform=self.exposure.layer.get_geotransform(),
name=tr('Population which %s') % (
self.impact_function_manager
.get_function_title(self).lower()),
keywords={
'impact_summary': impact_summary,
'impact_table': impact_table,
'map_title': map_title,
'legend_notes': legend_notes,
'legend_units': legend_units,
'legend_title': legend_title,
'total_needs': total_needs},
style_info=style_info)
self._impact = raster_layer
return raster_layer
| kant/inasafe | safe/impact_functions/generic/classified_raster_population/impact_function.py | Python | gpl-3.0 | 10,176 |
from nekrobox.docdecs import params
from six.moves import range
@params(one=(int, "First symbol"),
two=(int, "Next symbol"),
symbols=(int, "Number of symbols to choose from"),
returns=(int, "Shortest distance"))
def distance(one, two, symbols=36):
"""Get the shortest distance between two symbols."""
lowest, highest = (one, two) if one < two else (two, one)
straight = one - two if one > two else two - one
end = symbols - highest
loop = end + lowest
return straight if straight < loop else loop
@params(address=(list, "Gate address as generated"),
symbols=(int, "Number of symbols"),
returns=(int, "Shortest distance"))
def address_distance(address, symbols=36):
"""Get the shortest distance to dial this address."""
dist = 0
for pos in range(len(address[:-1])):
dist += distance(address[pos], address[pos+1], symbols)
return dist
| Nekroze/quickdial | quickdial/pathing.py | Python | mit | 936 |
"""
WSGI config for mysite project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mysite.settings")
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
#application = get_wsgi_application()
application = Cling(get_wsgi_application())
| juanc27/myfavteam | mysite/wsgi.py | Python | mit | 460 |
import json
import os
import re
import sys
import django
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from django.contrib.sitemaps.views import x_robots_tag
from django.core.exceptions import PermissionDenied, ViewDoesNotExist
from django.core.paginator import EmptyPage, PageNotAnInteger
from django.db.transaction import non_atomic_requests
from django.http import Http404, HttpResponse, HttpResponseNotFound, JsonResponse
from django.template.response import TemplateResponse
from django.utils.cache import patch_cache_control
from django.views.decorators.cache import never_cache
from django_statsd.clients import statsd
from rest_framework.exceptions import NotFound
from rest_framework.response import Response
from rest_framework.views import APIView
from olympia import amo
from olympia.amo.utils import HttpResponseXSendFile, use_fake_fxa
from olympia.api.exceptions import base_500_data
from olympia.api.serializers import SiteStatusSerializer
from olympia.users.models import UserProfile
from . import monitors
from .sitemap import get_sitemap_path, get_sitemaps, render_index_xml
@never_cache
@non_atomic_requests
def heartbeat(request):
# For each check, a boolean pass/fail status to show in the template
status_summary = {}
checks = [
'memcache',
'libraries',
'elastic',
'path',
'rabbitmq',
'signer',
'database',
]
for check in checks:
with statsd.timer('monitor.%s' % check):
status, _ = getattr(monitors, check)()
# state is a string. If it is empty, that means everything is fine.
status_summary[check] = {'state': not status, 'status': status}
# If anything broke, send HTTP 500.
status_code = 200 if all(a['state'] for a in status_summary.values()) else 500
return JsonResponse(status_summary, status=status_code)
@never_cache
@non_atomic_requests
def client_info(request):
if getattr(settings, 'ENV', None) != 'dev':
raise PermissionDenied
keys = (
'HTTP_USER_AGENT',
'HTTP_X_COUNTRY_CODE',
'HTTP_X_FORWARDED_FOR',
'REMOTE_ADDR',
)
data = {key: request.META.get(key) for key in keys}
return JsonResponse(data)
@non_atomic_requests
def robots(request):
"""Generate a robots.txt"""
_service = request.META['SERVER_NAME'] == settings.SERVICES_DOMAIN
if _service or not settings.ENGAGE_ROBOTS:
response = HttpResponse('User-agent: *\nDisallow: /', content_type='text/plain')
else:
ctx = {
'apps': amo.APP_USAGE,
'mozilla_user_id': settings.TASK_USER_ID,
'mozilla_user_username': 'mozilla',
}
response = TemplateResponse(
request, 'amo/robots.html', context=ctx, content_type='text/plain'
)
return response
@non_atomic_requests
def contribute(request):
path = os.path.join(settings.ROOT, 'contribute.json')
return HttpResponse(open(path, 'rb'), content_type='application/json')
@non_atomic_requests
def handler403(request, exception=None, **kwargs):
return TemplateResponse(request, 'amo/403.html', status=403)
@non_atomic_requests
def handler404(request, exception=None, **kwargs):
if getattr(request, 'is_api', False):
# It's a v3+ api request (/api/vX/ or /api/auth/)
return JsonResponse({'detail': str(NotFound.default_detail)}, status=404)
elif re.match(r'^/api/\d\.\d/', getattr(request, 'path_info', '')):
# It's a legacy API request in the form of /api/X.Y/. We use path_info,
# which is set in LocaleAndAppURLMiddleware, because there might be a
# locale and app prefix we don't care about in the URL.
response = HttpResponseNotFound()
patch_cache_control(response, max_age=60 * 60 * 48)
return response
return TemplateResponse(request, 'amo/404.html', status=404)
@non_atomic_requests
def handler500(request, **kwargs):
# To avoid database queries, the handler500() cannot evaluate the user - so
# we need to avoid making log calls (our custom adapter would fetch the
# user from the current thread) and set request.user to anonymous to avoid
# its usage in context processors.
request.user = AnonymousUser()
if getattr(request, 'is_api', False):
# API exceptions happening in DRF code would be handled with by our
# custom_exception_handler function in olympia.api.exceptions, but in
# the rare case where the exception is caused by a middleware or django
# itself, it might not, so we need to handle it here.
return HttpResponse(
json.dumps(base_500_data()), content_type='application/json', status=500
)
return TemplateResponse(request, 'amo/500.html', status=500)
@non_atomic_requests
def csrf_failure(request, reason=''):
from django.middleware.csrf import REASON_NO_REFERER, REASON_NO_CSRF_COOKIE
ctx = {
'reason': reason,
'no_referer': reason == REASON_NO_REFERER,
'no_cookie': reason == REASON_NO_CSRF_COOKIE,
}
return TemplateResponse(request, 'amo/403.html', context=ctx, status=403)
@non_atomic_requests
def version(request):
path = os.path.join(settings.ROOT, 'version.json')
with open(path) as f:
contents = json.loads(f.read())
py_info = sys.version_info
contents['python'] = '{major}.{minor}'.format(
major=py_info.major, minor=py_info.minor
)
contents['django'] = '{major}.{minor}'.format(
major=django.VERSION[0], minor=django.VERSION[1]
)
path = os.path.join(settings.ROOT, 'package.json')
with open(path) as f:
data = json.loads(f.read())
contents['addons-linter'] = data['dependencies']['addons-linter']
res = HttpResponse(json.dumps(contents), content_type='application/json')
res.headers['Access-Control-Allow-Origin'] = '*'
return res
def _frontend_view(*args, **kwargs):
"""View has migrated to addons-frontend but we still have the url so we
can reverse() to it in addons-server code.
If you ever hit this url somethunk gun wrong!"""
raise ViewDoesNotExist()
@non_atomic_requests
def frontend_view(*args, **kwargs):
"""Wrap _frontend_view so we can mock it in tests."""
return _frontend_view(*args, **kwargs)
# Special attribute that our <ModelBase>.get_absolute_url() looks for to
# determine whether it's a frontend view (that requires a different host prefix
# on admin instances) or not.
frontend_view.is_frontend_view = True
def fake_fxa_authorization(request):
"""Fake authentication page to bypass FxA in local development envs."""
if not use_fake_fxa():
raise Http404()
interesting_accounts = UserProfile.objects.exclude(groups=None).exclude(
deleted=True
)[:25]
return TemplateResponse(
request,
'amo/fake_fxa_authorization.html',
context={'interesting_accounts': interesting_accounts},
)
class SiteStatusView(APIView):
authentication_classes = []
permission_classes = []
@classmethod
def as_view(cls, **initkwargs):
return non_atomic_requests(super().as_view(**initkwargs))
def get(self, request, format=None):
return Response(SiteStatusSerializer(object()).data)
class InvalidSection(Exception):
pass
@non_atomic_requests
@x_robots_tag
def sitemap(request):
section = request.GET.get('section') # no section means the index page
app = request.GET.get('app_name')
page = request.GET.get('p', 1)
if 'debug' in request.GET and settings.SITEMAP_DEBUG_AVAILABLE:
try:
sitemaps = get_sitemaps()
if not section:
if page != 1:
raise EmptyPage
content = render_index_xml(sitemaps)
else:
sitemap_object = sitemaps.get((section, amo.APPS.get(app)))
if not sitemap_object:
raise InvalidSection
content = sitemap_object.render(app, page)
except EmptyPage:
raise Http404('Page %s empty' % page)
except PageNotAnInteger:
raise Http404('No page "%s"' % page)
except InvalidSection:
raise Http404('No sitemap available for section: %r' % section)
response = HttpResponse(content, content_type='application/xml')
else:
path = get_sitemap_path(section, app, page)
response = HttpResponseXSendFile(request, path, content_type='application/xml')
patch_cache_control(response, max_age=60 * 60)
return response
| wagnerand/addons-server | src/olympia/amo/views.py | Python | bsd-3-clause | 8,643 |
import datetime
def even_fib(le = None):
a = 1
b = 1
while True:
c = a + b
if le and c > le:
return
yield c
a = b + c
b = c + a
def main(le):
return sum(even_fib(le))
try:
para = int(input())
except:
para = int(4e6)
beg = datetime.datetime.now()
ans = main(para)
end = datetime.datetime.now()
print("answer:", ans)
print("time:", end - beg)
| nowsword/ProjectEuler | p002.py | Python | gpl-3.0 | 422 |
"""
Some utilities related to i386 analysis. Loaders and analysis
modules may use these as needed...
"""
import binascii
sigs = [
("558bec", "ffffff"), # push ebp; mov ebp,esp; Intel/Microsoft
("568bf1", "ffffff"), # push esi; mov esi,ecx (c++)
("5589e5", "ffffff"), # push ebp; mov ebp,esp; GCC
("8bff558bec", "ffffffffff"), # mov edi,edi; push ebp; mov epb, esp
# Ok... here's where things get cool...
# This is push <imm8>, push <imm32>, call <somewhere> # ms seh setup entry
("6a006800000000e8", "ff00ff00000000ff")
]
def addEntrySigs(vw):
for sigstr, maskstr in sigs:
bytez = binascii.unhexlify(sigstr)
masks = binascii.unhexlify(maskstr)
vw.addFunctionSignatureBytes(bytez, masks)
| bat-serjo/vivisect | vivisect/analysis/i386/__init__.py | Python | apache-2.0 | 752 |
import json
import logging
from tornado import web, gen
from lib.blinx.core.router.routestar import AddRoute
from lib.blinx.core.handlers import BaseHandler
from utils.email import validate_email
app_log = logging.getLogger("tornado.application")
@AddRoute(r'/recovery')
class RecoveryHandler(BaseHandler):
@web.asynchronous
@gen.coroutine
def get(self):
self.render('recovery.html', next_uri=self.get_argument('next', u'/'),
error=None,
notify=None,
identifier='user or user@domain.com')
@web.asynchronous
@gen.coroutine
def post(self):
identifier = self.get_argument('identifier', None)
if identifier:
identifier = identifier.lower()
else:
self.render('recovery.html',
next_uri=self.get_argument('next', u'/'),
identifier=None,
notify=None,
error="User/Email not provided!")
self.finish()
# if email addy provided
if validate_email(identifier):
app_log.info('Recovering user by email: %s', identifier)
# recover via email addy
self.api.recover_user(email=identifier)
recovery_obj = yield self.api.recover_user(email=identifier)
# else username provided
else:
app_log.info('Recoverying user by username: %s', identifier)
# recovery via username
self.api.recover_user(username=identifier)
recovery_obj = yield self.api.recover_user(username=identifier)
# grab response from API
recovery_obj = json.loads(recovery_obj.body)
# if error found in response, show them!
if 'error' in recovery_obj:
self.render('recovery.html',
next_uri=self.get_argument('next', u'/'),
identifier=identifier,
notify=None,
error="Error: {0}".format(recovery_obj['error']))
elif 'email' in recovery_obj:
self.render('recovery.html',
next_uri=self.get_argument('next', u'/'),
identifier=recovery_obj['email'],
notify="Successfully sent recovery message to {0}".format(recovery_obj['email']),
error=None)
else:
self.render('recovery.html',
next_uri=self.get_argument('next', u'/'),
identifier=identifier,
notify=None,
error="Unexpected error")
@AddRoute(r'/recovery/token/([\w\-]+)')
class RecoveryTokenHandler(BaseHandler):
"""
Renders all the messages for one user
"""
@web.asynchronous
@gen.coroutine
def get(self, token):
# grab response from API
found_token_obj = yield self.api.recover_token_validate(token)
found_token_obj = json.loads(found_token_obj.body)
# if error returned in token validation
if 'error' in found_token_obj:
self.render('reset_password.html',
next_uri=self.get_argument('next', u'/'),
username="not found",
token=token,
notify=None,
error=found_token_obj['error'])
# else token is valid
else:
# if for some reason a username wasn't provided in api response
if 'username' not in found_token_obj:
self.render('reset_password.html',
next_uri=self.get_argument('next', u'/'),
username='not found',
token=token,
notify="None",
error='username not found!')
# else render page for user to reset password
else:
self.render('reset_password.html',
next_uri=self.get_argument('next', u'/'),
username=found_token_obj['username'],
token=token,
notify="Reset token valid!",
error=None)
@web.asynchronous
@gen.coroutine
def post(self, token):
username = self.get_argument(name='username', default=None)
app_log.info('Resetting user password: {user}'.format(user=username))
blinx_password = self.get_argument(name='password', default=None)
# re-validate token for good measure
found_token_obj = yield self.api.recover_token_validate(token)
found_token_obj = json.loads(found_token_obj.body)
# if error returned in token validation
if 'error' in found_token_obj:
self.render('reset_password.html',
next_uri=self.get_argument('next', u'/'),
username=found_token_obj.get('username', None),
token=token,
notify=None,
error="Reset token is not valid!")
else:
# remove token
remove_token = yield self.api.recover_token_delete(token)
# update user
update_user = yield self.api.update_user(username=username,
password=blinx_password)
app_log.debug(update_user)
self.redirect('/')
| blinxin/blinx | blinx/routes/recovery.py | Python | gpl-2.0 | 5,542 |
#!/usr/bin/env python
import os
import sqlite3
import sys
from xml.etree import ElementTree
def generate_columns(table, columns):
ret = []
types = {
"Bool": "bool",
"Int": "int",
"Long": "long",
"ULong": "unsigned long",
"String": "text",
"LocString": "text",
"AssetPath": "text",
}
for name, type in columns:
cn = name.lower()
if type == "LocString":
cn += "_enus"
t = "%s %s" % (cn, types[type])
if cn == "id":
t += " primary key"
ret.append(t)
return ",\n".join(ret)
def get_field(table, record, column, type):
xpath = './Field[@column="%s"]' % (column)
if type == "LocString":
xpath += "/enUS"
data = record.find(xpath)
if data is None:
return None
data = data.text
if type == "Bool":
return True if data == "True" else False
elif type in ("Int", "Long", "ULong"):
if data is None:
return None
return int(data)
return data
def main():
if len(sys.argv) < 3:
sys.stderr.write("USAGE: %s [datadir] [dbfile]\n" % (sys.argv[0]))
exit(1)
datadir = sys.argv[1]
dbfile = sys.argv[2]
connection = sqlite3.connect(dbfile)
files = [
"ACHIEVE.xml",
"ADVENTURE.xml",
"ADVENTURE_DATA.xml",
"ADVENTURE_MISSION.xml",
"BANNER.xml",
"BOARD.xml",
"BOOSTER.xml",
"CARD_BACK.xml",
"CARD.xml",
"FIXED_REWARD.xml",
"FIXED_REWARD_ACTION.xml",
"FIXED_REWARD_MAP.xml",
"HERO.xml",
"SCENARIO.xml",
"SEASON.xml",
"WING.xml",
]
for path in files:
tablename = os.path.splitext(path)[0].lower()
with open(os.path.join(datadir, "DBF", path), "r") as f:
xml = ElementTree.parse(f)
cols = [(e.attrib["name"], e.attrib["type"]) for e in xml.findall("Column")]
_columns = generate_columns(tablename, cols)
create_tbl = "CREATE TABLE IF NOT EXISTS dbf_%s (%s)" % (tablename, _columns)
connection.execute(create_tbl)
values = []
for record in xml.findall("Record"):
fields = [get_field(tablename, record, column, type) for column, type in cols]
values.append(fields)
values_ph = ", ".join("?" for c in cols)
insert_into = "INSERT INTO dbf_%s VALUES (%s)" % (tablename, values_ph)
print(insert_into)
connection.executemany(insert_into, values)
# Add card names
connection.execute("ALTER TABLE dbf_card ADD COLUMN name_enus text")
# Add card class
connection.execute("ALTER TABLE dbf_card ADD COLUMN class_id int")
cur = connection.cursor()
cur.execute("SELECT id, note_mini_guid FROM dbf_card")
rows = cur.fetchall()
with open(os.path.join(datadir, "CardDefs.xml"), "r") as f:
xml = ElementTree.parse(f)
for pk, id in rows:
xpath = 'Entity[@CardID="%s"]' % (id)
e = xml.find(xpath)
if e is None:
print("WARNING: Could not find card %r in hs-data." % (id))
continue
name = e.find('Tag[@enumID="185"]/enUS').text
connection.execute("UPDATE dbf_card SET name_enus = ? WHERE id = ?", (name, pk))
card_class_elem = e.find('Tag[@enumID="199"]')
card_class = 0
if card_class_elem is not None:
card_class = int(card_class_elem.attrib["value"])
connection.execute("UPDATE dbf_card SET class_id = ? WHERE id = ?", (card_class, pk))
connection.commit()
connection.close()
if __name__ == "__main__":
main()
| oftc-ftw/stove | scripts/dbf_to_sqlite.py | Python | agpl-3.0 | 3,185 |
# Copyright (C) 2013 Statoil ASA, Norway.
#
# The file 'unrecognized_enum.py' is part of ERT - Ensemble based Reservoir Tool.
#
# ERT is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ERT is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU General Public License at <http://www.gnu.org/licenses/gpl.html>
# for more details.
from ert.cwrap import BaseCEnum
from ert.config import CONFIG_LIB
class UnrecognizedEnum(BaseCEnum):
pass
UnrecognizedEnum.populateEnum(CONFIG_LIB, "config_schema_item_unrecognized_enum_iget")
UnrecognizedEnum.registerEnum(CONFIG_LIB, "config_unrecognized_enum")
| iLoop2/ResInsight | ThirdParty/Ert/devel/python/python/ert/config/unrecognized_enum.py | Python | gpl-3.0 | 929 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import uuid
from oslo.config import cfg
from nova.api.openstack import compute
from nova.api.openstack.compute import plugins
from nova.api.openstack.compute.plugins.v3 import servers
from nova.compute import api as compute_api
from nova.compute import flavors
from nova import db
import nova.db.api
from nova.network import manager
from nova.openstack.common import jsonutils
from nova.openstack.common import rpc
from nova import test
from nova.tests.api.openstack import fakes
from nova.tests import fake_instance
from nova.tests.image import fake
CONF = cfg.CONF
FAKE_UUID = fakes.FAKE_UUID
def fake_gen_uuid():
return FAKE_UUID
def return_security_group(context, instance_id, security_group_id):
pass
class SchedulerHintsTestCase(test.TestCase):
def setUp(self):
super(SchedulerHintsTestCase, self).setUp()
self.fake_instance = fakes.stub_instance(1, uuid=FAKE_UUID)
self.app = compute.APIRouterV3(init_only=('servers',
'os-scheduler-hints'))
def test_create_server_without_hints(self):
def fake_create(*args, **kwargs):
self.assertEqual(kwargs['scheduler_hints'], {})
return ([self.fake_instance], '')
self.stubs.Set(nova.compute.api.API, 'create', fake_create)
req = fakes.HTTPRequestV3.blank('/servers')
req.method = 'POST'
req.content_type = 'application/json'
body = {'server': {
'name': 'server_test',
'image_ref': 'cedef40a-ed67-4d10-800e-17455edce175',
'flavor_ref': '1',
}}
req.body = jsonutils.dumps(body)
res = req.get_response(self.app)
self.assertEqual(202, res.status_int)
def test_create_server_with_hints(self):
def fake_create(*args, **kwargs):
self.assertEqual(kwargs['scheduler_hints'], {'a': 'b'})
return ([self.fake_instance], '')
self.stubs.Set(nova.compute.api.API, 'create', fake_create)
req = fakes.HTTPRequestV3.blank('/servers')
req.method = 'POST'
req.content_type = 'application/json'
body = {
'server': {
'name': 'server_test',
'image_ref': 'cedef40a-ed67-4d10-800e-17455edce175',
'flavor_ref': '1',
},
'os-scheduler-hints:scheduler_hints': {'a': 'b'},
}
req.body = jsonutils.dumps(body)
res = req.get_response(self.app)
self.assertEqual(202, res.status_int)
def test_create_server_bad_hints(self):
req = fakes.HTTPRequestV3.blank('/servers')
req.method = 'POST'
req.content_type = 'application/json'
body = {
'server': {
'name': 'server_test',
'image_ref': 'cedef40a-ed67-4d10-800e-17455edce175',
'flavor_ref': '1',
},
'os-scheduler-hints:scheduler_hints': 'here',
}
req.body = jsonutils.dumps(body)
res = req.get_response(self.app)
self.assertEqual(400, res.status_int)
class ServersControllerCreateTest(test.TestCase):
def setUp(self):
"""Shared implementation for tests below that create instance."""
super(ServersControllerCreateTest, self).setUp()
self.flags(verbose=True,
enable_instance_password=True)
self.instance_cache_num = 0
self.instance_cache_by_id = {}
self.instance_cache_by_uuid = {}
ext_info = plugins.LoadedExtensionInfo()
self.controller = servers.ServersController(extension_info=ext_info)
CONF.set_override('extensions_blacklist', 'os-scheduler-hints',
'osapi_v3')
self.no_scheduler_hints_controller = servers.ServersController(
extension_info=ext_info)
def instance_create(context, inst):
inst_type = flavors.get_flavor_by_flavor_id(3)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
def_image_ref = 'http://localhost/images/%s' % image_uuid
self.instance_cache_num += 1
instance = fake_instance.fake_db_instance(**{
'id': self.instance_cache_num,
'display_name': inst['display_name'] or 'test',
'uuid': FAKE_UUID,
'instance_type': dict(inst_type),
'access_ip_v4': '1.2.3.4',
'access_ip_v6': 'fead::1234',
'image_ref': inst.get('image_ref', def_image_ref),
'user_id': 'fake',
'project_id': 'fake',
'reservation_id': inst['reservation_id'],
"created_at": datetime.datetime(2010, 10, 10, 12, 0, 0),
"updated_at": datetime.datetime(2010, 11, 11, 11, 0, 0),
"config_drive": None,
"progress": 0,
"fixed_ips": [],
"task_state": "",
"vm_state": "",
"root_device_name": inst.get('root_device_name', 'vda'),
})
self.instance_cache_by_id[instance['id']] = instance
self.instance_cache_by_uuid[instance['uuid']] = instance
return instance
def instance_get(context, instance_id):
"""Stub for compute/api create() pulling in instance after
scheduling
"""
return self.instance_cache_by_id[instance_id]
def instance_update(context, uuid, values):
instance = self.instance_cache_by_uuid[uuid]
instance.update(values)
return instance
def server_update(context, instance_uuid, params):
inst = self.instance_cache_by_uuid[instance_uuid]
inst.update(params)
return (inst, inst)
def fake_method(*args, **kwargs):
pass
def project_get_networks(context, user_id):
return dict(id='1', host='localhost')
def queue_get_for(context, *args):
return 'network_topic'
fakes.stub_out_rate_limiting(self.stubs)
fakes.stub_out_key_pair_funcs(self.stubs)
fake.stub_out_image_service(self.stubs)
fakes.stub_out_nw_api(self.stubs)
self.stubs.Set(uuid, 'uuid4', fake_gen_uuid)
self.stubs.Set(db, 'instance_add_security_group',
return_security_group)
self.stubs.Set(db, 'project_get_networks',
project_get_networks)
self.stubs.Set(db, 'instance_create', instance_create)
self.stubs.Set(db, 'instance_system_metadata_update',
fake_method)
self.stubs.Set(db, 'instance_get', instance_get)
self.stubs.Set(db, 'instance_update', instance_update)
self.stubs.Set(rpc, 'cast', fake_method)
self.stubs.Set(db, 'instance_update_and_get_original',
server_update)
self.stubs.Set(rpc, 'queue_get_for', queue_get_for)
self.stubs.Set(manager.VlanManager, 'allocate_fixed_ip',
fake_method)
def _test_create_extra(self, params, no_image=False,
override_controller=None):
image_uuid = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
server = dict(name='server_test', image_ref=image_uuid, flavor_ref=2)
if no_image:
server.pop('image_ref', None)
server.update(params)
body = dict(server=server)
req = fakes.HTTPRequestV3.blank('/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
if override_controller:
server = override_controller.create(req, body).obj['server']
else:
server = self.controller.create(req, body).obj['server']
def test_create_instance_with_scheduler_hints_disabled(self):
hints = {'a': 'b'}
params = {'scheduler_hints': hints}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertNotIn('scheduler_hints', kwargs)
# self.assertEqual(kwargs['scheduler_hints'], {})
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params,
override_controller=self.no_scheduler_hints_controller)
def test_create_instance_with_scheduler_hints_enabled(self):
hints = {'a': 'b'}
params = {'scheduler_hints': hints}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['scheduler_hints'], hints)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
class TestServerCreateRequestXMLDeserializer(test.TestCase):
def setUp(self):
super(TestServerCreateRequestXMLDeserializer, self).setUp()
ext_info = plugins.LoadedExtensionInfo()
controller = servers.ServersController(extension_info=ext_info)
self.deserializer = servers.CreateDeserializer(controller)
def test_request_with_scheduler_hints_and_alternate_namespace_prefix(self):
serial_request = """
<ns2:server xmlns:ns2="http://docs.openstack.org/compute/api/v3"
name="new-server-test"
image_ref="1"
flavor_ref="2">
<ns2:metadata><ns2:meta key="hello">world</ns2:meta></ns2:metadata>
<os:scheduler_hints
xmlns:os="http://docs.openstack.org/compute/ext/scheduler-hints/api/v3">
<hypervisor>xen</hypervisor>
<near>eb999657-dd6b-464e-8713-95c532ac3b18</near>
</os:scheduler_hints>
</ns2:server>
"""
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
'os-scheduler-hints:scheduler_hints': {
'hypervisor': ['xen'],
'near': ['eb999657-dd6b-464e-8713-95c532ac3b18']
},
"name": "new-server-test",
"image_ref": "1",
"flavor_ref": "2",
"metadata": {
"hello": "world"
}
}
}
self.assertEquals(request['body'], expected)
def test_request_with_scheduler_hints(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v3"
xmlns:os-scheduler-hints=
"http://docs.openstack.org/compute/ext/scheduler-hints/api/v3"
name="new-server-test" image_ref="1" flavor_ref="1">
<os-scheduler-hints:scheduler_hints>
<different_host>
7329b667-50c7-46a6-b913-cb2a09dfeee0
</different_host>
<different_host>
f31efb24-34d2-43e1-8b44-316052956a39
</different_host>
</os-scheduler-hints:scheduler_hints>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"image_ref": "1",
"flavor_ref": "1",
"os-scheduler-hints:scheduler_hints": {
"different_host": [
"7329b667-50c7-46a6-b913-cb2a09dfeee0",
"f31efb24-34d2-43e1-8b44-316052956a39",
]
}
}}
self.assertEquals(request['body'], expected)
| ntt-sic/nova | nova/tests/api/openstack/compute/plugins/v3/test_scheduler_hints.py | Python | apache-2.0 | 12,196 |
from os.path import dirname, join
import subprocess
base = dirname(__file__)
mabot_path = join(base, '..', 'src', 'mabot', 'run.py')
test_path = join(base, 'tests')
subprocess.call('python %s %s' % (mabot_path, test_path)) | qitaos/robotframework-mabot | atest/start_tests.py | Python | apache-2.0 | 229 |
import re #dont kill me, just for preprocessing
with open("regexcif-src.py") as x,open("regexcif.py","w") as w:
y=re.sub("( *)##restart(\n\1pass)?","""
\\1##restart
\\1if len(stack)==0: #no backtrack points
\\1 start+=1
\\1 if start>len(inp):
\\1 return []
\\1 if debug:print("next start")
\\1 inindex,tkindex=start,0
\\1else:
\\1 if debug:print("stack popped")
\\1 inindex,tkindex=stack.pop()
""".strip("\n"),"\n".join(x.read().split("\n")[1:]))
#print(y) #print the code to stdout
#w.write(y) #output the code to regexcif.py
exec(y) #run the code
| CatsAreFluffy/regexcif.py | src/regexcif-launch.py | Python | mit | 590 |
"""setuptools.command.bdist_egg
Build .egg distributions"""
# This module should be kept compatible with Python 2.3
from distutils.errors import DistutilsSetupError
from distutils.dir_util import remove_tree, mkpath
from distutils import log
from types import CodeType
import sys
import os
import marshal
import textwrap
from pkg_resources import get_build_platform, Distribution, ensure_directory
from pkg_resources import EntryPoint
from setuptools.compat import basestring
from setuptools.extension import Library
from setuptools import Command
try:
# Python 2.7 or >=3.2
from sysconfig import get_path, get_python_version
def _get_purelib():
return get_path("purelib")
except ImportError:
from distutils.sysconfig import get_python_lib, get_python_version
def _get_purelib():
return get_python_lib(False)
def strip_module(filename):
if '.' in filename:
filename = os.path.splitext(filename)[0]
if filename.endswith('module'):
filename = filename[:-6]
return filename
def write_stub(resource, pyfile):
_stub_template = textwrap.dedent("""
def __bootstrap__():
global __bootstrap__, __loader__, __file__
import sys, pkg_resources, imp
__file__ = pkg_resources.resource_filename(__name__, %r)
__loader__ = None; del __bootstrap__, __loader__
imp.load_dynamic(__name__,__file__)
__bootstrap__()
""").lstrip()
with open(pyfile, 'w') as f:
f.write(_stub_template % resource)
class bdist_egg(Command):
description = "create an \"egg\" distribution"
user_options = [
('bdist-dir=', 'b',
"temporary directory for creating the distribution"),
('plat-name=', 'p', "platform name to embed in generated filenames "
"(default: %s)" % get_build_platform()),
('exclude-source-files', None,
"remove all .py files from the generated egg"),
('keep-temp', 'k',
"keep the pseudo-installation tree around after " +
"creating the distribution archive"),
('dist-dir=', 'd',
"directory to put final built distributions in"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
]
boolean_options = [
'keep-temp', 'skip-build', 'exclude-source-files'
]
def initialize_options(self):
self.bdist_dir = None
self.plat_name = None
self.keep_temp = 0
self.dist_dir = None
self.skip_build = 0
self.egg_output = None
self.exclude_source_files = None
def finalize_options(self):
ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info")
self.egg_info = ei_cmd.egg_info
if self.bdist_dir is None:
bdist_base = self.get_finalized_command('bdist').bdist_base
self.bdist_dir = os.path.join(bdist_base, 'egg')
if self.plat_name is None:
self.plat_name = get_build_platform()
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
if self.egg_output is None:
# Compute filename of the output egg
basename = Distribution(
None, None, ei_cmd.egg_name, ei_cmd.egg_version,
get_python_version(),
self.distribution.has_ext_modules() and self.plat_name
).egg_name()
self.egg_output = os.path.join(self.dist_dir, basename + '.egg')
def do_install_data(self):
# Hack for packages that install data to install's --install-lib
self.get_finalized_command('install').install_lib = self.bdist_dir
site_packages = os.path.normcase(os.path.realpath(_get_purelib()))
old, self.distribution.data_files = self.distribution.data_files, []
for item in old:
if isinstance(item, tuple) and len(item) == 2:
if os.path.isabs(item[0]):
realpath = os.path.realpath(item[0])
normalized = os.path.normcase(realpath)
if normalized == site_packages or normalized.startswith(
site_packages + os.sep
):
item = realpath[len(site_packages) + 1:], item[1]
# XXX else: raise ???
self.distribution.data_files.append(item)
try:
log.info("installing package data to %s" % self.bdist_dir)
self.call_command('install_data', force=0, root=None)
finally:
self.distribution.data_files = old
def get_outputs(self):
return [self.egg_output]
def call_command(self, cmdname, **kw):
"""Invoke reinitialized command `cmdname` with keyword args"""
for dirname in INSTALL_DIRECTORY_ATTRS:
kw.setdefault(dirname, self.bdist_dir)
kw.setdefault('skip_build', self.skip_build)
kw.setdefault('dry_run', self.dry_run)
cmd = self.reinitialize_command(cmdname, **kw)
self.run_command(cmdname)
return cmd
def run(self):
# Generate metadata first
self.run_command("egg_info")
# We run install_lib before install_data, because some data hacks
# pull their data path from the install_lib command.
log.info("installing library code to %s" % self.bdist_dir)
instcmd = self.get_finalized_command('install')
old_root = instcmd.root
instcmd.root = None
if self.distribution.has_c_libraries() and not self.skip_build:
self.run_command('build_clib')
cmd = self.call_command('install_lib', warn_dir=0)
instcmd.root = old_root
all_outputs, ext_outputs = self.get_ext_outputs()
self.stubs = []
to_compile = []
for (p, ext_name) in enumerate(ext_outputs):
filename, ext = os.path.splitext(ext_name)
pyfile = os.path.join(self.bdist_dir, strip_module(filename) +
'.py')
self.stubs.append(pyfile)
log.info("creating stub loader for %s" % ext_name)
if not self.dry_run:
write_stub(os.path.basename(ext_name), pyfile)
to_compile.append(pyfile)
ext_outputs[p] = ext_name.replace(os.sep, '/')
if to_compile:
cmd.byte_compile(to_compile)
if self.distribution.data_files:
self.do_install_data()
# Make the EGG-INFO directory
archive_root = self.bdist_dir
egg_info = os.path.join(archive_root, 'EGG-INFO')
self.mkpath(egg_info)
if self.distribution.scripts:
script_dir = os.path.join(egg_info, 'scripts')
log.info("installing scripts to %s" % script_dir)
self.call_command('install_scripts', install_dir=script_dir,
no_ep=1)
self.copy_metadata_to(egg_info)
native_libs = os.path.join(egg_info, "native_libs.txt")
if all_outputs:
log.info("writing %s" % native_libs)
if not self.dry_run:
ensure_directory(native_libs)
libs_file = open(native_libs, 'wt')
libs_file.write('\n'.join(all_outputs))
libs_file.write('\n')
libs_file.close()
elif os.path.isfile(native_libs):
log.info("removing %s" % native_libs)
if not self.dry_run:
os.unlink(native_libs)
write_safety_flag(
os.path.join(archive_root, 'EGG-INFO'), self.zip_safe()
)
if os.path.exists(os.path.join(self.egg_info, 'depends.txt')):
log.warn(
"WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
"Use the install_requires/extras_require setup() args instead."
)
if self.exclude_source_files:
self.zap_pyfiles()
# Make the archive
make_zipfile(self.egg_output, archive_root, verbose=self.verbose,
dry_run=self.dry_run, mode=self.gen_header())
if not self.keep_temp:
remove_tree(self.bdist_dir, dry_run=self.dry_run)
# Add to 'Distribution.dist_files' so that the "upload" command works
getattr(self.distribution, 'dist_files', []).append(
('bdist_egg', get_python_version(), self.egg_output))
def zap_pyfiles(self):
log.info("Removing .py files from temporary directory")
for base, dirs, files in walk_egg(self.bdist_dir):
for name in files:
if name.endswith('.py'):
path = os.path.join(base, name)
log.debug("Deleting %s", path)
os.unlink(path)
def zip_safe(self):
safe = getattr(self.distribution, 'zip_safe', None)
if safe is not None:
return safe
log.warn("zip_safe flag not set; analyzing archive contents...")
return analyze_egg(self.bdist_dir, self.stubs)
def gen_header(self):
epm = EntryPoint.parse_map(self.distribution.entry_points or '')
ep = epm.get('setuptools.installation', {}).get('eggsecutable')
if ep is None:
return 'w' # not an eggsecutable, do it the usual way.
if not ep.attrs or ep.extras:
raise DistutilsSetupError(
"eggsecutable entry point (%r) cannot have 'extras' "
"or refer to a module" % (ep,)
)
pyver = sys.version[:3]
pkg = ep.module_name
full = '.'.join(ep.attrs)
base = ep.attrs[0]
basename = os.path.basename(self.egg_output)
header = (
"#!/bin/sh\n"
'if [ `basename $0` = "%(basename)s" ]\n'
'then exec python%(pyver)s -c "'
"import sys, os; sys.path.insert(0, os.path.abspath('$0')); "
"from %(pkg)s import %(base)s; sys.exit(%(full)s())"
'" "$@"\n'
'else\n'
' echo $0 is not the correct name for this egg file.\n'
' echo Please rename it back to %(basename)s and try again.\n'
' exec false\n'
'fi\n'
) % locals()
if not self.dry_run:
mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run)
f = open(self.egg_output, 'w')
f.write(header)
f.close()
return 'a'
def copy_metadata_to(self, target_dir):
"Copy metadata (egg info) to the target_dir"
# normalize the path (so that a forward-slash in egg_info will
# match using startswith below)
norm_egg_info = os.path.normpath(self.egg_info)
prefix = os.path.join(norm_egg_info, '')
for path in self.ei_cmd.filelist.files:
if path.startswith(prefix):
target = os.path.join(target_dir, path[len(prefix):])
ensure_directory(target)
self.copy_file(path, target)
def get_ext_outputs(self):
"""Get a list of relative paths to C extensions in the output distro"""
all_outputs = []
ext_outputs = []
paths = {self.bdist_dir: ''}
for base, dirs, files in os.walk(self.bdist_dir):
for filename in files:
if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
all_outputs.append(paths[base] + filename)
for filename in dirs:
paths[os.path.join(base, filename)] = (paths[base] +
filename + '/')
if self.distribution.has_ext_modules():
build_cmd = self.get_finalized_command('build_ext')
for ext in build_cmd.extensions:
if isinstance(ext, Library):
continue
fullname = build_cmd.get_ext_fullname(ext.name)
filename = build_cmd.get_ext_filename(fullname)
if not os.path.basename(filename).startswith('dl-'):
if os.path.exists(os.path.join(self.bdist_dir, filename)):
ext_outputs.append(filename)
return all_outputs, ext_outputs
NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
def walk_egg(egg_dir):
"""Walk an unpacked egg's contents, skipping the metadata directory"""
walker = os.walk(egg_dir)
base, dirs, files = next(walker)
if 'EGG-INFO' in dirs:
dirs.remove('EGG-INFO')
yield base, dirs, files
for bdf in walker:
yield bdf
def analyze_egg(egg_dir, stubs):
# check for existing flag in EGG-INFO
for flag, fn in safety_flags.items():
if os.path.exists(os.path.join(egg_dir, 'EGG-INFO', fn)):
return flag
if not can_scan():
return False
safe = True
for base, dirs, files in walk_egg(egg_dir):
for name in files:
if name.endswith('.py') or name.endswith('.pyw'):
continue
elif name.endswith('.pyc') or name.endswith('.pyo'):
# always scan, even if we already know we're not safe
safe = scan_module(egg_dir, base, name, stubs) and safe
return safe
def write_safety_flag(egg_dir, safe):
# Write or remove zip safety flag file(s)
for flag, fn in safety_flags.items():
fn = os.path.join(egg_dir, fn)
if os.path.exists(fn):
if safe is None or bool(safe) != flag:
os.unlink(fn)
elif safe is not None and bool(safe) == flag:
f = open(fn, 'wt')
f.write('\n')
f.close()
safety_flags = {
True: 'zip-safe',
False: 'not-zip-safe',
}
def scan_module(egg_dir, base, name, stubs):
"""Check whether module possibly uses unsafe-for-zipfile stuff"""
filename = os.path.join(base, name)
if filename[:-1] in stubs:
return True # Extension module
pkg = base[len(egg_dir) + 1:].replace(os.sep, '.')
module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0]
if sys.version_info < (3, 3):
skip = 8 # skip magic & date
else:
skip = 12 # skip magic & date & file size
f = open(filename, 'rb')
f.read(skip)
code = marshal.load(f)
f.close()
safe = True
symbols = dict.fromkeys(iter_symbols(code))
for bad in ['__file__', '__path__']:
if bad in symbols:
log.warn("%s: module references %s", module, bad)
safe = False
if 'inspect' in symbols:
for bad in [
'getsource', 'getabsfile', 'getsourcefile', 'getfile'
'getsourcelines', 'findsource', 'getcomments', 'getframeinfo',
'getinnerframes', 'getouterframes', 'stack', 'trace'
]:
if bad in symbols:
log.warn("%s: module MAY be using inspect.%s", module, bad)
safe = False
if '__name__' in symbols and '__main__' in symbols and '.' not in module:
if sys.version[:3] == "2.4": # -m works w/zipfiles in 2.5
log.warn("%s: top-level module may be 'python -m' script", module)
safe = False
return safe
def iter_symbols(code):
"""Yield names and strings used by `code` and its nested code objects"""
for name in code.co_names:
yield name
for const in code.co_consts:
if isinstance(const, basestring):
yield const
elif isinstance(const, CodeType):
for name in iter_symbols(const):
yield name
def can_scan():
if not sys.platform.startswith('java') and sys.platform != 'cli':
# CPython, PyPy, etc.
return True
log.warn("Unable to analyze compiled code on this platform.")
log.warn("Please ask the author to include a 'zip_safe'"
" setting (either True or False) in the package's setup.py")
# Attribute names of options for commands that might need to be convinced to
# install to the egg build directory
INSTALL_DIRECTORY_ATTRS = [
'install_lib', 'install_dir', 'install_data', 'install_base'
]
def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None,
mode='w'):
"""Create a zip file from all the files under 'base_dir'. The output
zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
Python module (if available) or the InfoZIP "zip" utility (if installed
and found on the default search path). If neither tool is available,
raises DistutilsExecError. Returns the name of the output zip file.
"""
import zipfile
mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
def visit(z, dirname, names):
for name in names:
path = os.path.normpath(os.path.join(dirname, name))
if os.path.isfile(path):
p = path[len(base_dir) + 1:]
if not dry_run:
z.write(path, p)
log.debug("adding '%s'" % p)
if compress is None:
# avoid 2.3 zipimport bug when 64 bits
compress = (sys.version >= "2.4")
compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)]
if not dry_run:
z = zipfile.ZipFile(zip_filename, mode, compression=compression)
for dirname, dirs, files in os.walk(base_dir):
visit(z, dirname, files)
z.close()
else:
for dirname, dirs, files in os.walk(base_dir):
visit(None, dirname, files)
return zip_filename
| d3banjan/polyamide | webdev/lib/python2.7/site-packages/setuptools/command/bdist_egg.py | Python | bsd-2-clause | 17,606 |
# -*- coding: utf-8 -*-
# Copyright 2017, 2021 ProjectQ-Framework (www.projectq.ch)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for projectq.ops._command."""
import math
import sys
from copy import deepcopy
import pytest
from projectq import MainEngine
from projectq.cengines import DummyEngine
from projectq.meta import ComputeTag, canonical_ctrl_state
from projectq.ops import BasicGate, CtrlAll, NotMergeable, Rx, _command
from projectq.types import Qubit, Qureg, WeakQubitRef
@pytest.fixture
def main_engine():
return MainEngine(backend=DummyEngine(), engine_list=[DummyEngine()])
def test_command_init(main_engine):
qureg0 = Qureg([Qubit(main_engine, 0)])
qureg1 = Qureg([Qubit(main_engine, 1)])
qureg2 = Qureg([Qubit(main_engine, 2)])
# qureg3 = Qureg([Qubit(main_engine, 3)])
# qureg4 = Qureg([Qubit(main_engine, 4)])
gate = BasicGate()
cmd = _command.Command(main_engine, gate, (qureg0, qureg1, qureg2))
assert cmd.gate == gate
assert cmd.tags == []
expected_tuple = (qureg0, qureg1, qureg2)
for cmd_qureg, expected_qureg in zip(cmd.qubits, expected_tuple):
assert cmd_qureg[0].id == expected_qureg[0].id
# Testing that Qubits are now WeakQubitRef objects
assert type(cmd_qureg[0]) == WeakQubitRef
assert cmd._engine == main_engine
# Test that quregs are ordered if gate has interchangeable qubits:
symmetric_gate = BasicGate()
symmetric_gate.interchangeable_qubit_indices = [[0, 1]]
symmetric_cmd = _command.Command(main_engine, symmetric_gate, (qureg2, qureg1, qureg0))
assert cmd.gate == gate
assert cmd.tags == []
expected_ordered_tuple = (qureg1, qureg2, qureg0)
for cmd_qureg, expected_qureg in zip(symmetric_cmd.qubits, expected_ordered_tuple):
assert cmd_qureg[0].id == expected_qureg[0].id
assert symmetric_cmd._engine == main_engine
def test_command_deepcopy(main_engine):
qureg0 = Qureg([Qubit(main_engine, 0)])
qureg1 = Qureg([Qubit(main_engine, 1)])
gate = BasicGate()
cmd = _command.Command(main_engine, gate, (qureg0,))
cmd.add_control_qubits(qureg1)
cmd.tags.append("MyTestTag")
copied_cmd = deepcopy(cmd)
# Test that deepcopy gives same cmd
assert copied_cmd.gate == gate
assert copied_cmd.tags == ["MyTestTag"]
assert len(copied_cmd.qubits) == 1
assert copied_cmd.qubits[0][0].id == qureg0[0].id
assert len(copied_cmd.control_qubits) == 1
assert copied_cmd.control_qubits[0].id == qureg1[0].id
# Engine should not be deepcopied but a reference:
assert id(copied_cmd.engine) == id(main_engine)
# Test that deepcopy is actually a deepcopy
cmd.tags = ["ChangedTag"]
assert copied_cmd.tags == ["MyTestTag"]
cmd.control_qubits[0].id == 10
assert copied_cmd.control_qubits[0].id == qureg1[0].id
cmd.gate = "ChangedGate"
assert copied_cmd.gate == gate
def test_command_get_inverse(main_engine):
qubit = main_engine.allocate_qubit()
ctrl_qubit = main_engine.allocate_qubit()
cmd = _command.Command(main_engine, Rx(0.5), (qubit,))
cmd.add_control_qubits(ctrl_qubit)
cmd.tags = [ComputeTag()]
inverse_cmd = cmd.get_inverse()
assert inverse_cmd.gate == Rx(-0.5 + 4 * math.pi)
assert len(cmd.qubits) == len(inverse_cmd.qubits)
assert cmd.qubits[0][0].id == inverse_cmd.qubits[0][0].id
assert id(cmd.qubits[0][0]) != id(inverse_cmd.qubits[0][0])
assert len(cmd.control_qubits) == len(inverse_cmd.control_qubits)
assert cmd.control_qubits[0].id == inverse_cmd.control_qubits[0].id
assert id(cmd.control_qubits[0]) != id(inverse_cmd.control_qubits[0])
assert cmd.tags == inverse_cmd.tags
assert id(cmd.tags[0]) != id(inverse_cmd.tags[0])
assert id(cmd.engine) == id(inverse_cmd.engine)
def test_command_get_merged(main_engine):
qubit = main_engine.allocate_qubit()
ctrl_qubit = main_engine.allocate_qubit()
cmd = _command.Command(main_engine, Rx(0.5), (qubit,))
cmd.tags = ["TestTag"]
cmd.add_control_qubits(ctrl_qubit)
# Merge two commands
cmd2 = _command.Command(main_engine, Rx(0.5), (qubit,))
cmd2.add_control_qubits(ctrl_qubit)
cmd2.tags = ["TestTag"]
merged_cmd = cmd.get_merged(cmd2)
expected_cmd = _command.Command(main_engine, Rx(1.0), (qubit,))
expected_cmd.add_control_qubits(ctrl_qubit)
expected_cmd.tags = ["TestTag"]
assert merged_cmd == expected_cmd
# Don't merge commands as different control qubits
cmd3 = _command.Command(main_engine, Rx(0.5), (qubit,))
cmd3.tags = ["TestTag"]
with pytest.raises(NotMergeable):
cmd.get_merged(cmd3)
# Don't merge commands as different tags
cmd4 = _command.Command(main_engine, Rx(0.5), (qubit,))
cmd4.add_control_qubits(ctrl_qubit)
with pytest.raises(NotMergeable):
cmd.get_merged(cmd4)
def test_command_is_identity(main_engine):
qubit = main_engine.allocate_qubit()
qubit2 = main_engine.allocate_qubit()
cmd = _command.Command(main_engine, Rx(0.0), (qubit,))
cmd2 = _command.Command(main_engine, Rx(0.5), (qubit2,))
inverse_cmd = cmd.get_inverse()
inverse_cmd2 = cmd2.get_inverse()
assert inverse_cmd.gate.is_identity()
assert cmd.gate.is_identity()
assert not inverse_cmd2.gate.is_identity()
assert not cmd2.gate.is_identity()
def test_command_order_qubits(main_engine):
qubit0 = Qureg([Qubit(main_engine, 0)])
qubit1 = Qureg([Qubit(main_engine, 1)])
qubit2 = Qureg([Qubit(main_engine, 2)])
qubit3 = Qureg([Qubit(main_engine, 3)])
qubit4 = Qureg([Qubit(main_engine, 4)])
qubit5 = Qureg([Qubit(main_engine, 5)])
gate = BasicGate()
gate.interchangeable_qubit_indices = [[0, 4, 5], [1, 2]]
input_tuple = (qubit4, qubit5, qubit3, qubit2, qubit1, qubit0)
expected_tuple = (qubit0, qubit3, qubit5, qubit2, qubit1, qubit4)
cmd = _command.Command(main_engine, gate, input_tuple)
for ordered_qubit, expected_qubit in zip(cmd.qubits, expected_tuple):
assert ordered_qubit[0].id == expected_qubit[0].id
def test_command_interchangeable_qubit_indices(main_engine):
gate = BasicGate()
gate.interchangeable_qubit_indices = [[0, 4, 5], [1, 2]]
qubit0 = Qureg([Qubit(main_engine, 0)])
qubit1 = Qureg([Qubit(main_engine, 1)])
qubit2 = Qureg([Qubit(main_engine, 2)])
qubit3 = Qureg([Qubit(main_engine, 3)])
qubit4 = Qureg([Qubit(main_engine, 4)])
qubit5 = Qureg([Qubit(main_engine, 5)])
input_tuple = (qubit4, qubit5, qubit3, qubit2, qubit1, qubit0)
cmd = _command.Command(main_engine, gate, input_tuple)
assert (
cmd.interchangeable_qubit_indices
== [
[0, 4, 5],
[1, 2],
]
or cmd.interchangeable_qubit_indices == [[1, 2], [0, 4, 5]]
)
@pytest.mark.parametrize(
'state',
[0, 1, '0', '1', CtrlAll.One, CtrlAll.Zero],
ids=['int(0)', 'int(1)', 'str(0)', 'str(1)', 'CtrlAll.One', 'CtrlAll.Zero'],
)
def test_commmand_add_control_qubits_one(main_engine, state):
qubit0 = Qureg([Qubit(main_engine, 0)])
qubit1 = Qureg([Qubit(main_engine, 1)])
cmd = _command.Command(main_engine, Rx(0.5), (qubit0,))
cmd.add_control_qubits(qubit1, state=state)
assert cmd.control_qubits[0].id == 1
assert cmd.control_state == canonical_ctrl_state(state, 1)
with pytest.raises(ValueError):
cmd.add_control_qubits(qubit0[0])
@pytest.mark.parametrize(
'state',
[0, 1, 2, 3, '00', '01', '10', '11', CtrlAll.One, CtrlAll.Zero],
ids=[
'int(0)',
'int(1)',
'int(2)',
'int(3)',
'str(00)',
'str(01)',
'str(10)',
'str(1)',
'CtrlAll.One',
'CtrlAll.Zero',
],
)
def test_commmand_add_control_qubits_two(main_engine, state):
qubit0 = Qureg([Qubit(main_engine, 0)])
qubit1 = Qureg([Qubit(main_engine, 1)])
qubit2 = Qureg([Qubit(main_engine, 2)])
qubit3 = Qureg([Qubit(main_engine, 3)])
cmd = _command.Command(main_engine, Rx(0.5), (qubit0,), qubit1)
cmd.add_control_qubits(qubit2 + qubit3, state)
assert cmd.control_qubits[0].id == 1
assert cmd.control_state == '1' + canonical_ctrl_state(state, 2)
def test_command_all_qubits(main_engine):
qubit0 = Qureg([Qubit(main_engine, 0)])
qubit1 = Qureg([Qubit(main_engine, 1)])
cmd = _command.Command(main_engine, Rx(0.5), (qubit0,))
cmd.add_control_qubits(qubit1)
all_qubits = cmd.all_qubits
assert all_qubits[0][0].id == 1
assert all_qubits[1][0].id == 0
def test_command_engine(main_engine):
qubit0 = Qureg([Qubit("fake_engine", 0)])
qubit1 = Qureg([Qubit("fake_engine", 1)])
cmd = _command.Command("fake_engine", Rx(0.5), (qubit0,))
cmd.add_control_qubits(qubit1)
assert cmd.engine == "fake_engine"
cmd.engine = main_engine
assert id(cmd.engine) == id(main_engine)
assert id(cmd.control_qubits[0].engine) == id(main_engine)
assert id(cmd.qubits[0][0].engine) == id(main_engine)
# Avoid raising exception upon Qubit destructions
qubit0[0].id = -1
qubit1[0].id = -1
def test_command_comparison(main_engine):
qubit = Qureg([Qubit(main_engine, 0)])
ctrl_qubit = Qureg([Qubit(main_engine, 1)])
cmd1 = _command.Command(main_engine, Rx(0.5), (qubit,))
cmd1.tags = ["TestTag"]
cmd1.add_control_qubits(ctrl_qubit)
# Test equality
cmd2 = _command.Command(main_engine, Rx(0.5), (qubit,))
cmd2.tags = ["TestTag"]
cmd2.add_control_qubits(ctrl_qubit)
assert cmd2 == cmd1
# Test not equal because of tags
cmd3 = _command.Command(main_engine, Rx(0.5), (qubit,))
cmd3.tags = ["TestTag", "AdditionalTag"]
cmd3.add_control_qubits(ctrl_qubit)
assert not cmd3 == cmd1
# Test not equal because of control qubit
cmd4 = _command.Command(main_engine, Rx(0.5), (qubit,))
cmd4.tags = ["TestTag"]
assert not cmd4 == cmd1
# Test not equal because of qubit
qubit2 = Qureg([Qubit(main_engine, 2)])
cmd5 = _command.Command(main_engine, Rx(0.5), (qubit2,))
cmd5.tags = ["TestTag"]
cmd5.add_control_qubits(ctrl_qubit)
assert cmd5 != cmd1
# Test not equal because of engine
cmd6 = _command.Command("FakeEngine", Rx(0.5), (qubit,))
cmd6.tags = ["TestTag"]
cmd6.add_control_qubits(ctrl_qubit)
assert cmd6 != cmd1
def test_command_str(main_engine):
qubit = Qureg([Qubit(main_engine, 0)])
ctrl_qubit = Qureg([Qubit(main_engine, 1)])
cmd = _command.Command(main_engine, Rx(0.5 * math.pi), (qubit,))
cmd.tags = ["TestTag"]
cmd.add_control_qubits(ctrl_qubit)
cmd2 = _command.Command(main_engine, Rx(0.5 * math.pi), (qubit,))
if sys.version_info.major == 3:
assert cmd.to_string(symbols=False) == "CRx(1.570796326795) | ( Qureg[1], Qureg[0] )"
assert str(cmd2) == "Rx(1.570796326795) | Qureg[0]"
else:
assert cmd.to_string(symbols=False) == "CRx(1.5707963268) | ( Qureg[1], Qureg[0] )"
assert str(cmd2) == "Rx(1.5707963268) | Qureg[0]"
def test_command_to_string(main_engine):
qubit = Qureg([Qubit(main_engine, 0)])
ctrl_qubit = Qureg([Qubit(main_engine, 1)])
cmd = _command.Command(main_engine, Rx(0.5 * math.pi), (qubit,))
cmd.tags = ["TestTag"]
cmd.add_control_qubits(ctrl_qubit)
cmd2 = _command.Command(main_engine, Rx(0.5 * math.pi), (qubit,))
assert cmd.to_string(symbols=True) == u"CRx(0.5π) | ( Qureg[1], Qureg[0] )"
assert cmd2.to_string(symbols=True) == u"Rx(0.5π) | Qureg[0]"
if sys.version_info.major == 3:
assert cmd.to_string(symbols=False) == "CRx(1.570796326795) | ( Qureg[1], Qureg[0] )"
assert cmd2.to_string(symbols=False) == "Rx(1.570796326795) | Qureg[0]"
else:
assert cmd.to_string(symbols=False) == "CRx(1.5707963268) | ( Qureg[1], Qureg[0] )"
assert cmd2.to_string(symbols=False) == "Rx(1.5707963268) | Qureg[0]"
| ProjectQ-Framework/ProjectQ | projectq/ops/_command_test.py | Python | apache-2.0 | 12,405 |
'''
Windows file chooser
--------------------
'''
from plyer_lach.facades import FileChooser
from win32com.shell import shell, shellcon
import os
import win32gui
import win32con
import pywintypes
class Win32FileChooser(object):
'''A native implementation of NativeFileChooser using the
Win32 API on Windows.
Not Implemented features (all dialogs):
* preview
* icon
Not implemented features (in directory selection only - it's limited
by Windows itself):
* preview
* window-icon
* multiple
* show_hidden
* filters
* path
'''
path = None
multiple = False
filters = []
preview = False
title = None
icon = None
show_hidden = False
def __init__(self, **kwargs):
# Simulate Kivy's behavior
for i in kwargs:
setattr(self, i, kwargs[i])
def run(self):
try:
if mode != "dir":
args = {}
if self.path:
args["InitialDir"] = os.path.dirname(self.path)
path = os.path.splitext(os.path.dirname(self.path))
args["File"] = path[0]
args["DefExt"] = path[1]
args["Title"] = self.title if self.title else "Pick a file..."
args["CustomFilter"] = 'Other file types\x00*.*\x00'
args["FilterIndex"] = 1
filters = ""
for f in self.filters:
if type(f) == str:
filters += (f + "\x00") * 2
else:
filters += f[0] + "\x00" + ";".join(f[1:]) + "\x00"
args["Filter"] = filters
flags = (win32con.OFN_EXTENSIONDIFFERENT |
win32con.OFN_OVERWRITEPROMPT)
if self.multiple:
flags |= win32con.OFN_ALLOWmultiple | win32con.OFN_EXPLORER
if self.show_hidden:
flags |= win32con.OFN_FORCESHOWHIDDEN
args["Flags"] = flags
if self.mode == "open":
self.fname, _, _ = win32gui.GetOpenFileNameW(**args)
elif self.mode == "save":
self.fname, _, _ = win32gui.GetSaveFileNameW(**args)
if self.fname:
if self.multiple:
seq = str(self.fname).split("\x00")
dir_n, base_n = seq[0], seq[1:]
self.selection = [os.path.join(dir_n, i)
for i in base_n]
else:
self.selection = str(self.fname).split("\x00")
else:
# From http://goo.gl/UDqCqo
pidl, display_name, image_list = shell.SHBrowseForFolder(
win32gui.GetDesktopWindow(),
None,
self.title if self.title else "Pick a folder...",
0, None, None
)
self.selection = [str(shell.SHGetPathFromIDList(pidl))]
return self.selection
except (RuntimeError, pywintypes.error):
return None
class WinFileChooser(FileChooser):
'''FileChooser implementation for Windows, using win3all.
'''
def _file_selection_dialog(self, **kwargs):
return Win32FileChooser(**kwargs).run()
def instance():
return WinFileChooser()
| locksmith47/turing-sim-kivy | src/plyer_lach/platforms/win/filechooser.py | Python | mit | 3,452 |
"""
Copyright 2008 Free Software Foundation, Inc.
This file is part of GNU Radio
GNU Radio Companion is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
GNU Radio Companion is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
from lxml import etree
from . import odict
xml_failures = {}
class XMLSyntaxError(Exception):
def __init__(self, error_log):
self._error_log = error_log
xml_failures[error_log.last_error.filename] = error_log
def __str__(self):
return '\n'.join(map(str, self._error_log.filter_from_errors()))
def validate_dtd(xml_file, dtd_file=None):
"""
Validate an xml file against its dtd.
Args:
xml_file: the xml file
dtd_file: the optional dtd file
@throws Exception validation fails
"""
#perform parsing, use dtd validation if dtd file is not specified
try:
parser = etree.XMLParser(dtd_validation=not dtd_file)
xml = etree.parse(xml_file, parser=parser)
except etree.LxmlError:
pass
if parser.error_log:
raise XMLSyntaxError(parser.error_log)
# perform dtd validation if the dtd file is specified
if not dtd_file:
return
try:
dtd = etree.DTD(dtd_file)
if not dtd.validate(xml.getroot()):
raise XMLSyntaxError(dtd.error_log)
except etree.LxmlError:
raise XMLSyntaxError(dtd.error_log)
def from_file(xml_file):
"""
Create nested data from an xml file using the from xml helper.
Args:
xml_file: the xml file path
Returns:
the nested data
"""
xml = etree.parse(xml_file).getroot()
return _from_file(xml)
def _from_file(xml):
"""
Recursivly parse the xml tree into nested data format.
Args:
xml: the xml tree
Returns:
the nested data
"""
tag = xml.tag
if not len(xml):
return odict({tag: xml.text or ''}) #store empty tags (text is None) as empty string
nested_data = odict()
for elem in xml:
key, value = _from_file(elem).items()[0]
if nested_data.has_key(key): nested_data[key].append(value)
else: nested_data[key] = [value]
#delistify if the length of values is 1
for key, values in nested_data.iteritems():
if len(values) == 1: nested_data[key] = values[0]
return odict({tag: nested_data})
def to_file(nested_data, xml_file):
"""
Write an xml file and use the to xml helper method to load it.
Args:
nested_data: the nested data
xml_file: the xml file path
"""
xml = _to_file(nested_data)[0]
open(xml_file, 'w').write(etree.tostring(xml, xml_declaration=True, pretty_print=True))
def _to_file(nested_data):
"""
Recursivly parse the nested data into xml tree format.
Args:
nested_data: the nested data
Returns:
the xml tree filled with child nodes
"""
nodes = list()
for key, values in nested_data.iteritems():
#listify the values if not a list
if not isinstance(values, (list, set, tuple)):
values = [values]
for value in values:
node = etree.Element(key)
if isinstance(value, (str, unicode)): node.text = value
else: node.extend(_to_file(value))
nodes.append(node)
return nodes
if __name__ == '__main__':
"""Use the main method to test parse xml's functions."""
pass
| ambikeshwar1991/gnuradio-3.7.4 | grc/base/ParseXML.py | Python | gpl-3.0 | 3,968 |
# -*- coding: utf-8 -*-
# Copyright(C) 2014 Vincent A
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from weboob.tools.backend import BaseBackend
from weboob.capabilities.paste import BasePaste
from weboob.tools.capabilities.paste import BasePasteBackend
from weboob.tools.capabilities.paste import image_mime
import re
from .browser import UnseeBrowser
__all__ = ['UnseeBackend']
class UnPaste(BasePaste):
@classmethod
def id2url(cls, id):
return 'https://unsee.cc/%s' % id
class UnseeBackend(BaseBackend, BasePasteBackend):
NAME = 'unsee'
DESCRIPTION = u'unsee.cc expiring image hosting'
MAINTAINER = u'Vincent A'
EMAIL = 'dev@indigo.re'
LICENSE = 'AGPLv3+'
VERSION = '0.i'
BROWSER = UnseeBrowser
EXPIRATIONS = {3600: 'hour', 86400: 'day', 86400 * 7: 'week'}
def can_post(self, contents, title=None, public=None, max_age=None):
if re.search(r'[^a-zA-Z0-9=+/\s]', contents):
return 0
elif max_age is not None and not self.get_closest_expiration(max_age):
return 0
else:
mime = image_mime(contents, ('gif', 'jpeg', 'png'))
return 20 * int(mime is not None)
def get_paste(self, id):
paste = UnPaste(id)
paste.contents = self.browser.get_image(id).encode('base64')
return paste
def new_paste(self, *a, **kw):
return UnPaste(*a, **kw)
def post_paste(self, paste, max_age=None):
if max_age is None:
max_code = 'week'
else:
max_code = self.EXPIRATIONS[self.get_closest_expiration(max_age)]
d = self.browser.post_image(paste.title, paste.contents.decode('base64'), max_code)
paste.id = d['id']
return paste
| yannrouillard/weboob | modules/unsee/backend.py | Python | agpl-3.0 | 2,379 |
def formData(List):
for each in List:
if('-' in each):
sep='-'
elif(':' in each):
sep=':'
else:
return each
(m,s)=each.split(sep)
return [m+'.'+s]
try:
with open('sara.txt','r') as sara:
sara_f=sara.readline()
sara_s=[formData(t) for t in sara_f]
print(sara_s)
except IOError as ior:
print('operate file:' + str(ior))
| mayaobei/funnyTest | pyhton/HeadFirstPython/Chapter/cocah.py | Python | gpl-3.0 | 426 |
from gamtools import segregation, cosegregation
import io
from numpy.testing import assert_array_equal, assert_array_almost_equal
import pytest
import numpy as np
try:
from unittest.mock import patch
except ImportError:
from mock import patch
fixture_window1_only = io.StringIO(
u"""chrom start stop A B C D E F G H I J
chr1 0 50000 0 0 0 1 1 1 1 1 1 1
chr1 50000 100000 0 0 0 0 0 0 0 0 0 0
""")
data_window1_only = segregation.open_segregation(fixture_window1_only)
fixture_window2_only = io.StringIO(
u"""chrom start stop A B C D E F G H I J
chr1 0 50000 0 0 0 0 0 0 0 0 0 0
chr1 50000 100000 0 1 1 0 0 0 1 1 1 1
""")
data_window2_only = segregation.open_segregation(fixture_window2_only)
fixture_region_a = io.StringIO(
u"""chrom start stop A B C D E F G H I J K
chr1 0 50000 1 1 1 1 1 1 0 0 0 0 0
chr1 50000 100000 0 0 0 1 1 1 1 1 0 0 0
chr1 100000 150000 0 0 0 0 0 1 1 0 1 1 0
""")
data_region_a = segregation.open_segregation(fixture_region_a)
fixture_region_b = io.StringIO(
u"""chrom start stop A B C D E F G H I J K
chr2 0 50000 0 0 0 0 1 1 1 0 0 0 0
chr2 50000 100000 0 0 0 0 0 1 1 0 1 0 0
chr2 100000 150000 0 0 0 0 0 0 0 1 1 0 1
""")
data_region_b = segregation.open_segregation(fixture_region_b)
fixture_region_c = io.StringIO(
u"""chrom start stop A B C D E F G H I J K
chr3 0 50000 0 0 0 0 1 1 1 0 0 0 0
chr3 50000 100000 0 0 0 0 0 1 1 0 1 0 0
""")
data_region_c = segregation.open_segregation(fixture_region_c)
fixture_invalid_data = io.StringIO(
u"""chrom start stop A B C D E F G H I J K
chr3 0 50000 0 0 0 0 1 2 1 0 0 0 0
chr3 50000 100000 0 0 0 0 0 1 1 0 1 0 0
""")
data_invalid_data = segregation.open_segregation(fixture_invalid_data)
#########################################
#
# segregation.cosegregation_frequency tests
#
#########################################
def test_cosegregation_one_region():
segregation_freqs = cosegregation.get_cosegregation_from_regions(data_region_a)
assert_array_equal(segregation_freqs, np.array([[ 6., 3., 1.],
[ 3., 5., 2.],
[ 1., 2., 4.]]))
def test_cosegregation_two_regions():
segregation_freqs = cosegregation.get_cosegregation_from_regions(data_region_a,
data_region_b)
assert_array_equal(segregation_freqs, np.array([[ 2., 1., 0.],
[ 3., 2., 1.],
[ 2., 3., 1.]]))
def test_cosegregation_three_regions():
segregation_freqs = cosegregation.get_cosegregation_from_regions(data_region_c,
data_region_c,
data_region_c)
assert_array_equal(segregation_freqs, np.array([[[ 3., 2. ],
[ 2., 2. ]],
[[ 2., 2. ],
[ 2., 3. ]]]))
def test_cosegregation_missing_windows():
segregation_freqs = cosegregation.get_cosegregation_from_regions(data_window1_only,
data_window2_only)
assert_array_equal(segregation_freqs, np.array([[ 0., 4.],
[ 0., 0.]]))
def test_cosegregation_invalid_data():
with pytest.raises(cosegregation.InvalidDataError):
cosegregation.get_cosegregation_from_regions(data_invalid_data)
def test_cosegregation_min():
cosegregation_res = cosegregation.get_cosegregation_from_regions(data_region_i, data_region_k)
assert_array_equal(cosegregation_res, np.array([[ 0.0 ]]))
#########################################
#
# segregation.linkage tests
#
#########################################
fixture_region_d = io.StringIO(
u"""chrom start stop A B C D E F G H I J K L M N O P Q R S T
chr3 0 50000 1 1 1 1 1 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0
""")
data_region_d = segregation.open_segregation(fixture_region_d)
fixture_region_e = io.StringIO(
u"""chrom start stop A B C D E F G H I J K L M N O P Q R S T
chr3 0 50000 0 1 1 1 1 1 0 0 0 0 0 1 1 1 1 1 0 0 0 0
""")
data_region_e = segregation.open_segregation(fixture_region_e)
fixture_region_f = io.StringIO(
u"""chrom start stop A B C D E F G H I J K L M N O P Q R S T
chr3 0 50000 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0
""")
data_region_f = segregation.open_segregation(fixture_region_f)
fixture_region_g = io.StringIO(
u"""chrom start stop A B C D E F G H I J K L M N O P Q R S T
chr3 0 50000 1 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 1 1 1 1
""")
data_region_g = segregation.open_segregation(fixture_region_g)
fixture_region_h = io.StringIO(
u"""chrom start stop A B C D E F G H I J K L M N O P Q R S T
chr3 0 50000 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
""")
data_region_h = segregation.open_segregation(fixture_region_h)
fixture_region_i = io.StringIO(
u"""chrom start stop A B C D E F G H I J K L M N O P Q R S T
chr3 0 50000 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1
""")
data_region_i = segregation.open_segregation(fixture_region_i)
fixture_region_j = io.StringIO(
u"""chrom start stop A B C D E F G H I J K L M N O P Q R S T
chr3 0 50000 1 1 1 1 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0
""")
data_region_j = segregation.open_segregation(fixture_region_j)
fixture_region_k = io.StringIO(
u"""chrom start stop A B C D E F G H I J K L M N O P Q R S T
chr3 0 50000 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0
""")
data_region_k = segregation.open_segregation(fixture_region_k)
def test_linkage_positive():
linkage_res = cosegregation.get_linkage_from_regions(data_region_d, data_region_e)
assert_array_almost_equal(linkage_res, np.array([[ 0.15 ]]))
def test_linkage_zero():
linkage_res = cosegregation.get_linkage_from_regions(data_region_d, data_region_f)
assert_array_almost_equal(linkage_res, np.array([[ 0.0 ]]))
def test_linkage_negative():
linkage_res = cosegregation.get_linkage_from_regions(data_region_d, data_region_g)
assert_array_almost_equal(linkage_res, np.array([[ -0.15 ]]))
def test_linkage_min():
linkage_res = cosegregation.get_linkage_from_regions(data_region_i, data_region_k)
assert_array_almost_equal(linkage_res, np.array([[ -0.25 ]]))
# Make sure we don't fail the test because of the warning message
@patch('gamtools.cosegregation.warnings.warn')
def test_3d_linkage_positive(mock_warnings):
linkage_res = cosegregation.get_linkage_from_regions(data_region_d,
data_region_f,
data_region_i)
assert_array_almost_equal(linkage_res, np.array([[[ 0.125 ]]]))
# Make sure we don't fail the test because of the warning message
@patch('gamtools.cosegregation.warnings.warn')
def test_3d_linkage_zero(mock_warnings):
linkage_res = cosegregation.get_linkage_from_regions(data_region_d,
data_region_e,
data_region_f)
assert_array_almost_equal(linkage_res, np.array([[[ 0.0 ]]]))
# Make sure we don't fail the test because of the warning message
@patch('gamtools.cosegregation.warnings.warn')
def test_3d_linkage_negative(mock_warnings):
linkage_res = cosegregation.get_linkage_from_regions(data_region_f,
data_region_j,
data_region_k)
assert_array_almost_equal(linkage_res, np.array([[[ -0.1 ]]]))
@patch('gamtools.cosegregation.warnings.warn')
def test_3d_linkage_warning(mock_warnings):
linkage_res = cosegregation.get_linkage_from_regions(data_region_f,
data_region_j,
data_region_k)
assert mock_warnings.called
def test_linkage_not_detected():
linkage_res = cosegregation.get_linkage_from_regions(data_region_d, data_region_h)
assert_array_almost_equal(linkage_res, np.array([[ np.nan ]]))
def test_linkage_multiple_windows():
linkage_res = cosegregation.get_linkage_from_regions(data_region_c)
assert_array_almost_equal(linkage_res, np.array([[ 0.198347, 0.107438 ],
[ 0.107438, 0.198347 ]]))
def test_linkage_invalid_data():
with pytest.raises(cosegregation.InvalidDataError):
cosegregation.get_linkage_from_regions(data_invalid_data)
#########################################
#
# segregation.linkage tests
#
#########################################
def test_dprime_positive():
dprime_res = cosegregation.get_dprime_from_regions(data_region_d, data_region_e)
assert_array_almost_equal(dprime_res, np.array([[ 0.6 ]]))
def test_dprime_zero():
dprime_res = cosegregation.get_dprime_from_regions(data_region_d, data_region_f)
assert_array_almost_equal(dprime_res, np.array([[ 0.0 ]]))
def test_dprime_negative():
dprime_res = cosegregation.get_dprime_from_regions(data_region_d, data_region_g)
assert_array_almost_equal(dprime_res, np.array([[ -0.6 ]]))
def test_dprime_max():
dprime_res = cosegregation.get_dprime_from_regions(data_region_d, data_region_d)
assert_array_almost_equal(dprime_res, np.array([[ 1.0 ]]))
def test_dprime_min():
dprime_res = cosegregation.get_dprime_from_regions(data_region_i, data_region_k)
assert_array_almost_equal(dprime_res, np.array([[ -1.0 ]]))
def test_dprime_not_detected():
dprime_res = cosegregation.get_dprime_from_regions(data_region_d, data_region_h)
assert_array_almost_equal(dprime_res, np.array([[ np.nan ]]))
def test_dprime_multiple_windows():
dprime_res = cosegregation.get_dprime_from_regions(data_region_c)
assert_array_almost_equal(dprime_res, np.array([[ 1.0, 0.541667 ],
[ 0.541667, 1.0 ]]))
def test_dprime_invalid_data():
with pytest.raises(cosegregation.InvalidDataError):
cosegregation.get_dprime_from_regions(data_invalid_data)
| pombo-lab/gamtools | lib/gamtools/tests/test_cosegregation.py | Python | apache-2.0 | 10,531 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import logging
from scripttest import TestFileEnvironment
from migrate.tests.fixture.pathed import *
log = logging.getLogger(__name__)
class Shell(Pathed):
"""Base class for command line tests"""
def setUp(self):
super(Shell, self).setUp()
migrate_path = os.path.dirname(sys.executable)
# PATH to migrate development script folder
log.debug('PATH for ScriptTest: %s', migrate_path)
self.env = TestFileEnvironment(
base_path=os.path.join(self.temp_usable_dir, 'env'),
)
def run_version(self, repos_path):
result = self.env.run('migrate version %s' % repos_path)
return int(result.stdout.strip())
def run_db_version(self, url, repos_path):
result = self.env.run('migrate db_version %s %s' % (url, repos_path))
return int(result.stdout.strip())
| odubno/microblog | venv/lib/python2.7/site-packages/migrate/tests/fixture/shell.py | Python | bsd-3-clause | 926 |
# -*- coding: utf-8 -*-
'''
Sample application for the Python library for controlling the Teufel Raumfeld system
@author: Patrick Maier
@contact: mail@maierp.de
Webpage: https://github.com/maierp/pyraumfeld
Based on python-raumfeld by Thomas Feldmann:
https://github.com/tfeldmann/python-raumfeld
'''
import raumfeld
from time import gmtime, strftime
import logging
def dataHasBeenUpdated():
print("########## " + strftime("%Y-%m-%d %H:%M:%S", gmtime()) + " ##########")
print("Zones:")
for zone in raumfeld.getZones():
print("UDN: " + zone.UDN + " Name: " + zone.Name + " Location: " + zone.Location)
for room in zone.getRooms():
print("\tUDN: " + room.UDN + " Name: " + room.Name)
for renderer in room.getRenderers():
print("\t\tUDN: " + renderer.UDN + " Name: " + renderer.Name + " Location: " + renderer.Location + " Volume: " + unicode(renderer.volume))
print("Unassigned Rooms:")
for room in raumfeld.getUnassignedRooms():
print("Name: " + room.Name + " UDN: " + room.UDN)
for renderer in room.getRenderers():
print("\tUDN: " + renderer.UDN + " Name: " + renderer.Name + " Location: " + renderer.Location)
print("########## This gets updated when the config changes. To QUIT press any key... ##########")
#kueche = raumfeld.getRoomsByName(u'Küche')[0]
#kuecheZone = raumfeld.getZoneWithRoomName(u'Wohnzimmer')[0]
#status = kuecheZone.mute
#print("Mute: {0}".format(status))
#kueche.mute = not status
#kuecheZone.play()
#print("Volume: {0}".format(kuecheZone.volume))
#raumfeld.connectRoomToZone(kueche.UDN)
raumfeld.setLogging(logging.WARN);
raumfeld.registerChangeCallback(dataHasBeenUpdated)
raumfeld.init() # or with the host IP: raumfeld.init("192.168.0.10")
print("Host URL: " +raumfeld.hostBaseURL)
# To QUIT press any key...
raw_input()
| maierp/PyRaumfeld | PyRaumfeldSample.py | Python | mit | 1,924 |
from datetime import date, datetime
from decimal import Decimal
from functools import partial
from collections import namedtuple
from testfixtures.shouldraise import ShouldAssert
from testfixtures.tests.sample1 import SampleClassA, SampleClassB, Slotted
from testfixtures.mock import Mock, call
from re import compile
from testfixtures import (
Comparison as C,
Replacer,
ShouldRaise,
compare,
generator,
singleton,
)
from testfixtures.compat import (
class_type_name, exception_module, PY3, xrange,
BytesLiteral, UnicodeLiteral,
PY2, PY_37_PLUS, ABC
)
from testfixtures.comparison import compare_sequence, compare_object
from unittest import TestCase
hexaddr = compile('0x[0-9A-Fa-f]+')
def hexsub(raw):
return hexaddr.sub('...', raw)
call_list_repr = repr(Mock().mock_calls.__class__)
marker = object()
_compare = compare
class Lazy:
def __init__(self, message):
self.message = message
def __str__(self):
return self.message
def check_raises(x=marker, y=marker, message=None, regex=None,
compare=compare, **kw):
args = []
for value in x, y:
if value is not marker:
args.append(value)
for value in 'x', 'y':
explicit = 'explicit_{}'.format(value)
if explicit in kw:
kw[value] = kw[explicit]
del kw[explicit]
try:
compare(*args, **kw)
except Exception as e:
if not isinstance(e, AssertionError): # pragma: no cover
raise
actual = hexsub(e.args[0])
if message is not None:
# handy for debugging, but can't be relied on for tests!
_compare(actual, expected=message, show_whitespace=True)
assert actual == message
else:
if not regex.match(actual): # pragma: no cover
raise AssertionError(
'%r did not match %r' % (actual, regex.pattern)
)
else:
raise AssertionError('No exception raised!')
class CompareHelper(object):
def check_raises(self, *args, **kw):
check_raises(*args, **kw)
class TestCompare(CompareHelper, TestCase):
def test_object_same(self):
o = object()
compare(o, o)
def test_object_diff(self):
self.check_raises(
object(), object(),
'<object object at ...> != <object object at ...>'
)
def test_different_types(self):
self.check_raises('x', 1, "'x' != 1")
def test_number_same(self):
compare(1, 1)
def test_number_different(self):
self.check_raises(1, 2, '1 != 2')
def test_decimal_different(self):
self.check_raises(Decimal(1), Decimal(2),
"Decimal('1') != Decimal('2')")
def test_different_with_labels(self):
self.check_raises(1, 2, '1 (expected) != 2 (actual)',
x_label='expected', y_label='actual')
def test_string_same(self):
compare('x', 'x')
def test_unicode_string_different(self):
if PY2:
expected = "u'a' != 'b'"
else:
expected = "'a' != b'b'"
self.check_raises(
UnicodeLiteral('a'), BytesLiteral('b'),
expected
)
def test_bytes_different(self):
if PY2:
expected = (
"\n"
"'12345678901'\n"
'!=\n'
"'12345678902'"
)
else:
expected = (
"\n"
"b'12345678901'\n"
'!=\n'
"b'12345678902'"
)
self.check_raises(
BytesLiteral('12345678901'),
BytesLiteral('12345678902'),
expected
)
def test_bytes_same_strict(self):
compare(actual=b'', expected=b'', strict=True)
if PY3:
def test_moar_bytes_different(self):
self.check_raises(
actual=b'{"byte_pound":"b\'\\\\xa3\'"}',
expected=b'{"byte_pound":"b\\\'\\xa3\'"}',
message = (
"\n"
"b'{\"byte_pound\":\"b\\\\\\'\\\\xa3\\\'\"}' (expected)\n"
'!=\n'
"b'{\"byte_pound\":\"b\\\'\\\\\\\\xa3\\\'\"}' (actual)"
)
)
def test_string_diff_short(self):
self.check_raises(
'\n'+('x'*9), '\n'+('y'*9),
"'\\nxxxxxxxxx' != '\\nyyyyyyyyy'"
)
def test_string_diff_long(self):
self.check_raises(
'x'*11, 'y'*11,
"\n'xxxxxxxxxxx'\n!=\n'yyyyyyyyyyy'"
)
def test_string_diff_long_newlines(self):
self.check_raises(
'x'*5+'\n'+'y'*5, 'x'*5+'\n'+'z'*5,
"\n--- first\n+++ second\n@@ -1,2 +1,2 @@\n xxxxx\n-yyyyy\n+zzzzz"
)
def test_string_diff_short_labels(self):
self.check_raises(
'\n'+('x'*9), '\n'+('y'*9),
"'\\nxxxxxxxxx' (expected) != '\\nyyyyyyyyy' (actual)",
x_label='expected',
y_label='actual'
)
def test_string_diff_long_labels(self):
self.check_raises(
'x'*11, 'y'*11,
"\n'xxxxxxxxxxx' (expected)\n!=\n'yyyyyyyyyyy' (actual)",
x_label='expected',
y_label='actual'
)
def test_string_diff_long_newlines_labels(self):
self.check_raises(
'x'*5+'\n'+'y'*5, 'x'*5+'\n'+'z'*5,
"\n--- expected\n+++ actual\n"
"@@ -1,2 +1,2 @@\n xxxxx\n-yyyyy\n+zzzzz",
x_label='expected',
y_label='actual'
)
def test_exception_same_object(self):
e = ValueError('some message')
compare(e, e)
def test_exception_same_c_wrapper(self):
e1 = ValueError('some message')
e2 = ValueError('some message')
compare(C(e1), e2)
def test_exception_different_object(self):
e1 = ValueError('some message')
e2 = ValueError('some message')
compare(e1, e2)
def test_exception_different_object_c_wrapper(self):
e1 = ValueError('some message')
e2 = ValueError('some message')
compare(C(e1), e2)
def test_exception_diff(self):
e1 = ValueError('some message')
e2 = ValueError('some other message')
if PY_37_PLUS:
self.check_raises(
e1, e2,
"ValueError('some message') != ValueError('some other message')"
)
else:
self.check_raises(
e1, e2,
"ValueError('some message',) != ValueError('some other message',)"
)
def test_exception_diff_c_wrapper(self):
e1 = ValueError('some message')
e2 = ValueError('some other message')
self.check_raises(
C(e1), e2,
("\n"
"<C:{module}.ValueError(failed)>\n"
"attributes differ:\n"
"'args': ('some message',) (Comparison) "
"!= ('some other message',) (actual)\n"
"</C:{module}.ValueError>"
" != ValueError('some other message'{message})"
).format(module=exception_module,
message='' if PY_37_PLUS else ','))
def test_sequence_long(self):
self.check_raises(
['quite a long string 1', 'quite a long string 2',
'quite a long string 3', 'quite a long string 4',
'quite a long string 5', 'quite a long string 6',
'quite a long string 7', 'quite a long string 8'],
['quite a long string 1', 'quite a long string 2',
'quite a long string 3', 'quite a long string 4',
'quite a long string 9', 'quite a long string 10',
'quite a long string 11', 'quite a long string 12'],
"sequence not as expected:\n\n"
"same:\n"
"['quite a long string 1',\n"
" 'quite a long string 2',\n"
" 'quite a long string 3',\n"
" 'quite a long string 4']\n\n"
"first:\n"
"['quite a long string 5',\n"
" 'quite a long string 6',\n"
" 'quite a long string 7',\n"
" 'quite a long string 8']\n\n"
"second:\n"
"['quite a long string 9',\n"
" 'quite a long string 10',\n"
" 'quite a long string 11',\n"
" 'quite a long string 12']\n"
"\n"
"While comparing [4]: \n"
"'quite a long string 5'\n"
"!=\n"
"'quite a long string 9'"
)
def test_sequence_different_labels_supplied(self):
self.check_raises(
[1, 2, 3], [1, 2, 4],
"sequence not as expected:\n\n"
"same:\n"
"[1, 2]\n\n"
"expected:\n"
"[3]\n\n"
"actual:\n"
"[4]",
x_label='expected',
y_label='actual',
)
def test_list_same(self):
compare([1, 2, 3], [1, 2, 3])
def test_list_different(self):
self.check_raises(
[1, 2, 3], [1, 2, 4],
"sequence not as expected:\n\n"
"same:\n"
"[1, 2]\n\n"
"first:\n"
"[3]\n\n"
"second:\n"
"[4]"
)
def test_list_different_float(self):
self.check_raises(
[1, 2, 3.0], [1, 2, 4.0],
"sequence not as expected:\n\n"
"same:\n"
"[1, 2]\n\n"
"first:\n"
"[3.0]\n\n"
"second:\n"
"[4.0]"
)
def test_list_different_decimal(self):
self.check_raises(
[1, 2, Decimal(3)], [1, 2, Decimal(4)],
"sequence not as expected:\n\n"
"same:\n"
"[1, 2]\n\n"
"first:\n"
"[Decimal('3')]\n\n"
"second:\n"
"[Decimal('4')]"
)
def test_list_totally_different(self):
self.check_raises(
[1], [2],
"sequence not as expected:\n\n"
"same:\n"
"[]\n\n"
"first:\n"
"[1]\n\n"
"second:\n"
"[2]"
)
def test_list_first_shorter(self):
self.check_raises(
[1, 2], [1, 2, 3],
"sequence not as expected:\n\n"
"same:\n[1, 2]\n\n"
"first:\n[]\n\n"
"second:\n[3]"
)
def test_list_second_shorter(self):
self.check_raises(
[1, 2, 3], [1, 2],
"sequence not as expected:\n\n"
"same:\n[1, 2]\n\n"
"first:\n[3]\n\n"
"second:\n[]"
)
def test_dict_same(self):
compare(dict(x=1), dict(x=1))
def test_dict_first_missing_keys(self):
self.check_raises(
dict(), dict(z=3),
"dict not as expected:\n"
"\n"
"in second but not first:\n"
"'z': 3"
)
def test_dict_second_missing_keys(self):
self.check_raises(
dict(z=3), dict(),
"dict not as expected:\n"
"\n"
"in first but not second:\n"
"'z': 3"
)
def test_dict_values_different(self):
self.check_raises(
dict(x=1), dict(x=2),
"dict not as expected:\n"
"\n"
"values differ:\n"
"'x': 1 != 2"
)
def test_dict_identical_non_matching_ints(self):
self.check_raises(
dict(x=1, y=1), dict(x=2, y=2),
"dict not as expected:\n"
"\n"
"values differ:\n"
"'x': 1 != 2\n"
"'y': 1 != 2"
)
def test_dict_identical_non_matching_floats(self):
self.check_raises(
dict(x=1.0, y=1.0), dict(x=2.0, y=2.0),
"dict not as expected:\n"
"\n"
"values differ:\n"
"'x': 1.0 != 2.0\n"
"'y': 1.0 != 2.0"
)
def test_dict_labels_specified(self):
self.check_raises(
dict(x=1, y=2), dict(x=2, z=3),
"dict not as expected:\n"
"\n"
"in expected but not actual:\n"
"'y': 2\n"
"\n"
"in actual but not expected:\n"
"'z': 3\n"
"\n"
"values differ:\n"
"'x': 1 (expected) != 2 (actual)",
x_label='expected',
y_label='actual'
)
def test_dict_tuple_keys_same_value(self):
compare({(1, 2): None}, {(1, 2): None})
def test_dict_tuple_keys_different_value(self):
self.check_raises(
{(1, 2): 3},
{(1, 2): 42},
"dict not as expected:\n"
"\n"
"values differ:\n"
"(1, 2): 3 != 42"
)
def test_dict_full_diff(self):
self.check_raises(
dict(x=1, y=2, a=4), dict(x=1, z=3, a=5),
"dict not as expected:\n"
"\n"
'same:\n'
"['x']\n"
"\n"
"in first but not second:\n"
"'y': 2\n"
'\n'
"in second but not first:\n"
"'z': 3\n"
'\n'
"values differ:\n"
"'a': 4 != 5"
)
def test_dict_consistent_ordering(self):
self.check_raises(
dict(xa=1, xb=2, ya=1, yb=2, aa=3, ab=4),
dict(xa=1, xb=2, za=3, zb=4, aa=5, ab=5),
"dict not as expected:\n"
"\n"
'same:\n'
"['xa', 'xb']\n"
"\n"
"in first but not second:\n"
"'ya': 1\n"
"'yb': 2\n"
'\n'
"in second but not first:\n"
"'za': 3\n"
"'zb': 4\n"
'\n'
"values differ:\n"
"'aa': 3 != 5\n"
"'ab': 4 != 5"
)
def test_dict_consistent_ordering_types_same(self):
if PY3:
same = "[6, None]\n"
else:
same = "[None, 6]\n"
self.check_raises(
{None: 1, 6: 2, 1: 3},
{None: 1, 6: 2, 1: 4},
"dict not as expected:\n"
"\n"+
'same:\n'+
same+
"\n"
"values differ:\n"
"1: 3 != 4"
)
def test_dict_consistent_ordering_types_x_not_y(self):
self.check_raises(
{None: 1, 3: 2},
{},
"dict not as expected:\n"
"\n"
"in first but not second:\n"
"3: 2\n"
"None: 1"
)
def test_dict_consistent_ordering_types_y_not_x(self):
self.check_raises(
{},
{None: 1, 3: 2},
"dict not as expected:\n"
"\n"
"in second but not first:\n"
"3: 2\n"
"None: 1"
)
def test_dict_consistent_ordering_types_value(self):
self.check_raises(
{None: 1, 6: 2},
{None: 3, 6: 4},
"dict not as expected:\n"
"\n"
"values differ:\n"
"6: 2 != 4\n"
"None: 1 != 3"
)
def test_set_same(self):
compare(set([1]), set([1]))
def test_set_first_missing_keys(self):
self.check_raises(
set(), set([3]),
"set not as expected:\n"
"\n"
"in second but not first:\n"
"[3]\n"
'\n'
)
def test_set_second_missing_keys(self):
self.check_raises(
set([3]), set(),
"set not as expected:\n"
"\n"
"in first but not second:\n"
"[3]\n"
'\n'
)
def test_set_full_diff(self):
self.check_raises(
set([1, 2, 4]), set([1, 3, 5]),
"set not as expected:\n"
"\n"
"in first but not second:\n"
"[2, 4]\n"
'\n'
"in second but not first:\n"
"[3, 5]\n"
'\n'
)
def test_set_type_ordering(self):
self.check_raises(
{None, 1}, {'', 2},
"set not as expected:\n"
"\n"
"in first but not second:\n"
"[1, None]\n"
'\n'
"in second but not first:\n"
"['', 2]\n"
'\n'
)
def test_set_labels(self):
self.check_raises(
set([1, 2, 4]), set([1, 3, 5]),
"set not as expected:\n"
"\n"
"in expected but not actual:\n"
"[2, 4]\n"
'\n'
"in actual but not expected:\n"
"[3, 5]\n"
'\n',
x_label='expected',
y_label='actual',
)
def test_tuple_same(self):
compare((1, 2, 3), (1, 2, 3))
def test_tuple_different(self):
self.check_raises(
(1, 2, 3), (1, 2, 4),
"sequence not as expected:\n\n"
"same:\n(1, 2)\n\n"
"first:\n(3,)\n\n"
"second:\n(4,)"
)
def test_tuple_totally_different(self):
self.check_raises(
(1, ), (2, ),
"sequence not as expected:\n\n"
"same:\n()\n\n"
"first:\n(1,)\n\n"
"second:\n(2,)"
)
def test_tuple_first_shorter(self):
self.check_raises(
(1, 2), (1, 2, 3),
"sequence not as expected:\n\n"
"same:\n(1, 2)\n\n"
"first:\n()\n\n"
"second:\n(3,)"
)
def test_tuple_second_shorter(self):
self.check_raises(
(1, 2, 3), (1, 2),
"sequence not as expected:\n\n"
"same:\n(1, 2)\n\n"
"first:\n(3,)\n\n"
"second:\n()"
)
def test_generator_same(self):
compare(generator(1, 2, 3), generator(1, 2, 3))
def test_generator_different(self):
self.check_raises(
generator(1, 2, 3), generator(1, 2, 4),
"sequence not as expected:\n\n"
"same:\n(1, 2)\n\n"
"first:\n(3,)\n\n"
"second:\n(4,)"
)
def test_generator_totally_different(self):
self.check_raises(
generator(1, ), generator(2, ),
"sequence not as expected:\n\n"
"same:\n()\n\n"
"first:\n(1,)\n\n"
"second:\n(2,)"
)
def test_generator_first_shorter(self):
self.check_raises(
generator(1, 2), generator(1, 2, 3),
"sequence not as expected:\n\n"
"same:\n(1, 2)\n\n"
"first:\n()\n\n"
"second:\n(3,)"
)
def test_generator_second_shorted(self):
self.check_raises(
generator(1, 2, 3), generator(1, 2),
"sequence not as expected:\n\n"
"same:\n(1, 2)\n\n"
"first:\n(3,)\n\n"
"second:\n()"
)
def test_nested_generator_different(self):
self.check_raises(
generator(1, 2, generator(3), 4),
generator(1, 2, generator(3), 5),
"sequence not as expected:\n"
"\n"
"same:\n"
"(1, 2, <generator object generator at ...>)\n"
"\n"
"first:\n"
"(4,)\n"
"\n"
"second:\n"
"(5,)"
)
def test_nested_generator_tuple_left(self):
compare(
generator(1, 2, (3, ), 4),
generator(1, 2, generator(3), 4),
)
def test_nested_generator_tuple_right(self):
compare(
generator(1, 2, generator(3), 4),
generator(1, 2, (3, ), 4),
)
def test_sequence_and_generator(self):
compare((1, 2, 3), generator(1, 2, 3))
def test_sequence_and_generator_strict(self):
expected = compile(
"\(1, 2, 3\) \(<(class|type) 'tuple'>\) \(expected\) != "
"<generator object (generator )?at... "
"\(<(class|type) 'generator'>\) \(actual\)"
)
self.check_raises(
(1, 2, 3), generator(1, 2, 3),
regex=expected,
strict=True,
x_label='expected',
y_label='actual',
)
def test_generator_and_sequence(self):
compare(generator(1, 2, 3), (1, 2, 3))
def test_iterable_with_iterable_same(self):
compare(xrange(1, 4), xrange(1, 4))
def test_iterable_with_iterable_different(self):
self.check_raises(
xrange(1, 4), xrange(1, 3),
"sequence not as expected:\n"
"\n"
"same:\n"
"(1, 2)\n"
"\n"
"first:\n"
"(3,)\n"
"\n"
"second:\n"
"()"
)
def test_iterable_and_generator(self):
compare(xrange(1, 4), generator(1, 2, 3))
def test_iterable_and_generator_strict(self):
expected = compile(
"x?range\(1, 4\) \(<(class|type) 'x?range'>\) != "
"<generator object (generator )?at... "
"\(<(class|type) 'generator'>\)"
)
self.check_raises(
xrange(1, 4), generator(1, 2, 3),
regex=expected,
strict=True,
)
def test_generator_and_iterable(self):
compare(generator(1, 2, 3), xrange(1, 4))
def test_tuple_and_list(self):
compare((1, 2, 3), [1, 2, 3])
def test_tuple_and_list_strict(self):
if PY2:
expected = ("(1, 2, 3) (<type 'tuple'>) != "
"[1, 2, 3] (<type 'list'>)")
else:
expected = ("(1, 2, 3) (<class 'tuple'>) != "
"[1, 2, 3] (<class 'list'>)")
self.check_raises(
(1, 2, 3), [1, 2, 3],
expected,
strict=True
)
def test_float_subclass_strict(self):
class TestFloat(float):
pass
compare(TestFloat(0.75), TestFloat(0.75), strict=True)
def test_old_style_classes_same(self):
class X:
pass
compare(X, X)
def test_old_style_classes_different(self):
if PY3:
expected = (
"<class 'testfixtures.tests.test_compare.TestCompare."
"test_old_style_classes_different.<locals>.X'>"
" != "
"<class 'testfixtures.tests.test_compare.TestCompare."
"test_old_style_classes_different.<locals>.Y'>"
)
else:
expected = (
"<class testfixtures.tests.test_compare.X at ...>"
" != "
"<class testfixtures.tests.test_compare.Y at ...>"
)
class X:
pass
class Y:
pass
self.check_raises(X, Y, expected)
def test_new_style_classes_same(self):
class X(object):
pass
compare(X, X)
def test_new_style_classes_different(self):
if PY3:
expected = (
"<class 'testfixtures.tests.test_compare.TestCompare."
"test_new_style_classes_different.<locals>.X'>"
" != "
"<class 'testfixtures.tests.test_compare.TestCompare."
"test_new_style_classes_different.<locals>.Y'>"
)
else:
expected = (
"<class 'testfixtures.tests.test_compare.X'>"
" != "
"<class 'testfixtures.tests.test_compare.Y'>"
)
class X(object):
pass
class Y(object):
pass
self.check_raises(X, Y, expected)
def test_show_whitespace(self):
# does nothing! ;-)
self.check_raises(
' x \n\r', ' x \n \t',
"' x \\n\\r' != ' x \\n \\t'",
show_whitespace=True
)
def test_show_whitespace_long(self):
self.check_raises(
"\t \n '", '\r \n ',
'\n--- first\n'
'+++ second\n'
'@@ -1,2 +1,2 @@\n'
'-\'\\t \\n\'\n'
'-" \'"\n'
'+\'\\r \\n\'\n'
'+\' \'',
show_whitespace=True
)
def test_show_whitespace_equal(self):
compare('x', 'x', show_whitespace=True)
def test_show_whitespace_not_used_because_of_other_difference(self):
self.check_raises(
(1, 'a'),
(2, 'b'),
"sequence not as expected:\n"
"\n"
"same:\n"
"()\n"
"\n"
"first:\n"
"(1, 'a')\n"
"\n"
"second:\n"
"(2, 'b')",
show_whitespace=False
)
def test_include_trailing_whitespace(self):
self.check_raises(
' x \n', ' x \n',
"' x \\n' != ' x \\n'"
)
def test_ignore_trailing_whitespace(self):
compare(' x \t\n', ' x\t \n', trailing_whitespace=False)
def test_ignore_trailing_whitespace_non_string(self):
self.check_raises(
1, '',
"1 != ''",
trailing_whitespace=False
)
def test_ignore_trailing_whitespace_but_respect_leading_whitespace(self):
# NB: careful: this strips off the last newline too
# DON'T use if you care about that!
self.check_raises(
'a\n b\n c\n',
'a\nb\nc\n',
"'a\\n b\\n c' != 'a\\nb\\nc'",
trailing_whitespace=False
)
def test_include_blank_lines(self):
self.check_raises(
'\n \n', '\n ',
"'\\n \\n' != '\\n '"
)
def test_ignore_blank_lines(self):
compare("""
a
\t
b
""",
' a\nb', blanklines=False)
def test_ignore_blank_lines_non_string(self):
self.check_raises(
1, '',
"1 != ''",
blanklines=False
)
def test_supply_comparer(self):
def compare_dict(x, y, context):
self.assertEqual(x, {1: 1})
self.assertEqual(y, {2: 2})
self.assertEqual(context.get_option('foo'), 'bar')
return 'not equal'
with ShouldAssert('not equal'):
compare({1: 1}, {2: 2},
foo='bar',
comparers={dict: compare_dict})
def test_register_more_specific(self):
class_ = namedtuple('Test', 'x')
with ShouldAssert('compare class_'):
compare(class_(1), class_(2),
comparers={
tuple: Mock(return_value='compare tuple'),
class_: Mock(return_value='compare class_')
})
def test_extra_comparers_leave_existing(self):
class MyObject(object):
def __init__(self, name):
self.name = name
def __repr__(self):
return 'MyObject instance'
def compare_my_object(x, y, context):
return '%s != %s' % (x.name, y.name)
with Replacer() as r:
r.replace('testfixtures.comparison._registry', {
list: compare_sequence,
})
self.check_raises(
[1, MyObject('foo')], [1, MyObject('bar')],
"sequence not as expected:\n"
"\n"
"same:\n"
"[1]\n"
"\n"
"first:\n"
"[MyObject instance]\n"
"\n"
"second:\n"
"[MyObject instance]\n"
"\n"
"While comparing [1]: foo != bar",
comparers={MyObject: compare_my_object}
)
def test_list_subclass(self):
class MyList(list): pass
a_list = MyList([1])
b_list = MyList([2])
self.check_raises(
a_list, b_list,
"sequence not as expected:\n\n"
"same:\n[]\n\n"
"first:\n[1]\n\n"
"second:\n[2]"
)
def test_strict_okay(self):
m = object()
compare(m, m, strict=True)
def test_strict_comparer_supplied(self):
compare_obj = Mock()
compare_obj.return_value = 'not equal'
self.check_raises(
object(), object(),
"not equal",
strict=True,
comparers={object: compare_obj},
)
def test_strict_default_comparer(self):
class MyList(list):
pass
# default comparer used!
self.check_raises(
MyList((1, 2, 3)), MyList((1, 2, 4)),
"sequence not as expected:\n"
"\n"
"same:\n"
"[1, 2]\n"
"\n"
"first:\n"
"[3]\n"
"\n"
"second:\n"
"[4]",
strict=True,
)
def test_list_subclass_strict(self):
m = Mock()
m.aCall()
self.check_raises(
[call.aCall()], m.method_calls,
("[call.aCall()] (<{0} 'list'>) != [call.aCall()] "
"({1})").format(class_type_name, call_list_repr),
strict=True,
)
def test_list_subclass_long_strict(self):
m = Mock()
m.call('X'*20)
self.check_raises(
[call.call('Y'*20)], m.method_calls,
("[call.call('YYYYYYYYYYYYYYYYYY... "
"(<{0} 'list'>) != "
"[call.call('XXXXXXXXXXXXXXXXXX... "
"({1})").format(class_type_name, call_list_repr),
strict=True,
)
def test_prefix(self):
self.check_raises(1, 2, 'wrong number of orders: 1 != 2',
prefix='wrong number of orders')
def test_prefix_multiline(self):
self.check_raises(
'x'*5+'\n'+'y'*5, 'x'*5+'\n'+'z'*5,
"file content: \n--- first\n+++ second\n"
"@@ -1,2 +1,2 @@\n xxxxx\n-yyyyy\n+zzzzz",
prefix='file content'
)
def test_prefix_callable(self):
with ShouldAssert('foo: 1 != 2'):
compare(1, 2, prefix=lambda: 'foo')
def test_prefix_stringable(self):
with ShouldAssert('foo: 1 != 2'):
compare(1, 2, prefix=Lazy('foo'))
def test_prefix_lazy(self):
compare(2, 2, prefix=Mock(side_effect=Exception('boom!')))
def test_suffix(self):
self.check_raises(
1, 2,
'1 != 2\n'
'additional context',
suffix='additional context',
)
def test_suffix_callable(self):
with ShouldAssert('1 != 2\n3'):
compare(1, 2, suffix=lambda: 3)
def test_suffix_stringable(self):
with ShouldAssert('1 != 2\nfoo'):
compare(1, 2, suffix=Lazy('foo'))
def test_suffix_lazy(self):
compare(2, 2, suffix=Mock(side_effect=Exception('boom!')))
def test_labels_multiline(self):
self.check_raises(
'x'*5+'\n'+'y'*5, 'x'*5+'\n'+'z'*5,
"\n--- expected\n+++ actual\n"
"@@ -1,2 +1,2 @@\n xxxxx\n-yyyyy\n+zzzzz",
x_label='expected',
y_label='actual'
)
def test_generator_with_non_generator(self):
self.check_raises(
generator(1, 2, 3), None,
'<generator object generator at ...> != None',
)
def test_generator_with_buggy_generator(self):
def bad_gen():
yield 1
# raising a TypeError here is important :-/
raise TypeError('foo')
with ShouldRaise(TypeError('foo')):
compare(generator(1, 2, 3), bad_gen())
def test_nested_dict_tuple_values_different(self):
self.check_raises(
dict(x=(1, 2, 3)), dict(x=(1, 2, 4)),
"dict not as expected:\n"
"\n"
"values differ:\n"
"'x': (1, 2, 3) != (1, 2, 4)\n"
'\n'
"While comparing ['x']: sequence not as expected:\n"
"\n"
"same:\n"
"(1, 2)\n"
"\n"
"first:\n"
"(3,)\n"
"\n"
"second:\n"
"(4,)"
)
def test_nested_dict_different(self):
self.check_raises(
dict(x=dict(y=1)), dict(x=dict(y=2)),
"dict not as expected:\n"
"\n"
"values differ:\n"
"'x': {'y': 1} != {'y': 2}\n"
'\n'
"While comparing ['x']: dict not as expected:\n"
"\n"
"values differ:\n"
"'y': 1 != 2"
)
def test_nested_dict_empty_but_same(self):
compare(dict(x=dict()), dict(x=dict()), ignore_eq=True)
def test_nested_dict_empty_with_keys(self):
compare(dict(x=dict(x=1)), dict(x=dict(x=1)), ignore_eq=True)
def test_tuple_list_different(self):
self.check_raises(
(1, [2, 3, 5]), (1, [2, 4, 5]),
"sequence not as expected:\n"
"\n"
"same:\n"
"(1,)\n"
"\n"
"first:\n"
"([2, 3, 5],)\n"
"\n"
"second:\n"
"([2, 4, 5],)\n"
"\n"
"While comparing [1]: sequence not as expected:\n"
"\n"
"same:\n"
"[2]\n"
"\n"
"first:\n"
"[3, 5]\n"
"\n"
"second:\n"
"[4, 5]"
)
def test_tuple_long_strings_different(self):
self.check_raises(
(1, 2, "foo\nbar\nbaz\n", 4),
(1, 2, "foo\nbob\nbaz\n", 4),
"sequence not as expected:\n"
"\n"
"same:\n"
"(1, 2)\n"
"\n"
"first:\n"
"('foo\\nbar\\nbaz\\n', 4)\n"
"\n"
"second:\n"
"('foo\\nbob\\nbaz\\n', 4)\n"
"\n"
"While comparing [2]: \n"
"--- first\n"
"+++ second\n"
"@@ -1,4 +1,4 @@\n"
# check that show_whitespace bubbles down
" 'foo\\n'\n"
"-'bar\\n'\n"
"+'bob\\n'\n"
" 'baz\\n'\n"
" ''",
show_whitespace=True
)
def test_dict_multiple_differences(self):
self.check_raises(
dict(x=(1, 2, 3), y=(4, 5, 6, )),
dict(x=(1, 2, 4), y=(4, 5, 7, )),
"dict not as expected:\n"
"\n"
"values differ:\n"
"'x': (1, 2, 3) != (1, 2, 4)\n"
"'y': (4, 5, 6) != (4, 5, 7)\n"
"\n"
"While comparing ['x']: sequence not as expected:\n"
"\n"
"same:\n"
"(1, 2)\n"
"\n"
"first:\n"
"(3,)\n"
"\n"
"second:\n"
"(4,)\n"
"\n"
"While comparing ['y']: sequence not as expected:\n"
"\n"
"same:\n"
"(4, 5)\n"
"\n"
"first:\n"
"(6,)\n"
"\n"
"second:\n"
"(7,)"
)
def test_deep_breadcrumbs(self):
obj1 = singleton('obj1')
obj2 = singleton('obj2')
gen1 = generator(obj1, obj2)
gen2 = generator(obj1, )
# dict -> list -> tuple -> generator
self.check_raises(
dict(x=[1, ('a', 'b', gen1), 3], y=[3, 4]),
dict(x=[1, ('a', 'b', gen2), 3], y=[3, 4]), (
"dict not as expected:\n"
"\n"
"same:\n"
"['y']\n"
"\n"
"values differ:\n"
"'x': [1, ('a', 'b', {gen1}), 3] != [1, ('a', 'b', {gen2}), 3]"
"\n\n"
"While comparing ['x']: sequence not as expected:\n"
"\n"
"same:\n"
"[1]\n"
"\n"
"first:\n"
"[('a', 'b', {gen1}), 3]\n"
"\n"
"second:\n"
"[('a', 'b', {gen2}), 3]\n"
"\n"
"While comparing ['x'][1]: sequence not as expected:\n"
"\n"
"same:\n"
"('a', 'b')\n"
"\n"
"first:\n"
"({gen1},)\n"
"\n"
"second:\n"
"({gen2},)\n"
"\n"
"While comparing ['x'][1][2]: sequence not as expected:\n"
"\n"
"same:\n"
"(<obj1>,)\n"
"\n"
"first:\n"
"(<obj2>,)\n"
"\n"
"second:\n"
"()"
).format(gen1=hexsub(repr(gen1)),
gen2=hexsub(repr(gen2)))
)
def test_nested_labels(self):
obj1 = singleton('obj1')
obj2 = singleton('obj2')
gen1 = generator(obj1, obj2)
gen2 = generator(obj1, )
# dict -> list -> tuple -> generator
self.check_raises(
dict(x=[1, ('a', 'b', gen1), 3], y=[3, 4]),
dict(x=[1, ('a', 'b', gen2), 3], y=[3, 4]), (
"dict not as expected:\n"
"\n"
"same:\n"
"['y']\n"
"\n"
"values differ:\n"
"'x': [1, ('a', 'b', {gen1}), 3] (expected) != "
"[1, ('a', 'b', {gen2}), 3] (actual)\n"
"\n"
"While comparing ['x']: sequence not as expected:\n"
"\n"
"same:\n"
"[1]\n"
"\n"
"expected:\n"
"[('a', 'b', {gen1}), 3]\n"
"\n"
"actual:\n"
"[('a', 'b', {gen2}), 3]\n"
"\n"
"While comparing ['x'][1]: sequence not as expected:\n"
"\n"
"same:\n"
"('a', 'b')\n"
"\n"
"expected:\n"
"({gen1},)\n"
"\n"
"actual:\n"
"({gen2},)\n"
"\n"
"While comparing ['x'][1][2]: sequence not as expected:\n"
"\n"
"same:\n"
"(<obj1>,)\n"
"\n"
"expected:\n"
"(<obj2>,)\n"
"\n"
"actual:\n"
"()"
).format(gen1=hexsub(repr(gen1)),
gen2=hexsub(repr(gen2))),
x_label='expected',
y_label='actual',
)
def test_nested_strict_only_type_difference(self):
MyTuple = namedtuple('MyTuple', 'x y z')
type_repr = repr(MyTuple)
tuple_repr = repr(tuple)
self.check_raises(
[MyTuple(1, 2, 3)],
[(1, 2, 3)],
("sequence not as expected:\n"
"\n"
"same:\n"
"[]\n"
"\n"
"first:\n"
"[MyTuple(x=1, y=2, z=3)]\n"
"\n"
"second:\n"
"[(1, 2, 3)]\n"
"\n"
"While comparing [0]: MyTuple(x=1, y=2, z=3) "
"(%s) "
"!= (1, 2, 3) "
"(%s)") % (type_repr, tuple_repr),
strict=True
)
def test_strict_nested_different(self):
if PY2:
expected = "[1, 2] (<type 'list'>) != (1, 3) (<type 'tuple'>)"
else:
expected = "[1, 2] (<class 'list'>) != (1, 3) (<class 'tuple'>)"
self.check_raises(
(1, 2, [1, 2]), (1, 2, (1, 3)),
"sequence not as expected:\n"
"\n"
"same:\n"
"(1, 2)\n"
"\n"
"first:\n"
"([1, 2],)\n"
"\n"
"second:\n"
"((1, 3),)"
"\n\n"
"While comparing [2]: " + expected,
strict=True,
)
def test_namedtuple_equal(self):
class_ = namedtuple('Foo', 'x')
compare(class_(1), class_(1))
def test_namedtuple_same_type(self):
class_ = namedtuple('Foo', 'x y')
self.check_raises(
class_(1, 2), class_(1, 3),
"Foo not as expected:\n\n"
"same:\n"
"['x']\n\n"
"values differ:\n"
"'y': 2 != 3"
)
def test_namedtuple_different_type(self):
class_a = namedtuple('Foo', 'x y')
class_b = namedtuple('Bar', 'x y z')
self.check_raises(
class_a(1, 2), class_b(1, 2, 3),
"Foo(x=1, y=2) (<class 'testfixtures.tests.test_compare.Foo'>) != "
"Bar(x=1, y=2, z=3) "
"(<class 'testfixtures.tests.test_compare.Bar'>)"
)
def test_dict_with_list(self):
self.check_raises(
{1: 'one', 2: 'two'}, [1, 2],
"{1: 'one', 2: 'two'} != [1, 2]"
)
def test_explicit_expected(self):
self.check_raises('x', expected='y',
message="'y' (expected) != 'x' (actual)")
def test_explicit_actual(self):
self.check_raises('x', actual='y',
message="'x' (expected) != 'y' (actual)")
def test_explicit_both(self):
self.check_raises(expected='x', actual='y',
message="'x' (expected) != 'y' (actual)")
def test_implicit_and_labels(self):
self.check_raises('x', 'y',
x_label='x_label', y_label='y_label',
message="'x' (x_label) != 'y' (y_label)")
def test_explicit_and_labels(self):
self.check_raises(explicit_x='x', explicit_y='y',
x_label='x_label', y_label='y_label',
message="'x' (x_label) != 'y' (y_label)")
def test_invalid_two_args_expected(self):
with ShouldRaise(TypeError(
"Exactly two objects needed, you supplied: ['z', 'x', 'y']"
)):
compare('x', 'y', expected='z')
def test_invalid_two_args_actual(self):
with ShouldRaise(TypeError(
"Exactly two objects needed, you supplied: ['x', 'y', 'z']"
)):
compare('x', 'y', actual='z')
def test_invalid_zero_args(self):
with ShouldRaise(TypeError(
'Exactly two objects needed, you supplied:'
)):
compare()
def test_invalid_one_args(self):
with ShouldRaise(TypeError(
"Exactly two objects needed, you supplied: ['x']"
)):
compare('x')
def test_invalid_three_args(self):
with ShouldRaise(TypeError(
"Exactly two objects needed, you supplied: ['x', 'y', 'z']"
)):
compare('x', 'y', 'z')
def test_invalid_because_of_typo(self):
with ShouldRaise(TypeError(
"Exactly two objects needed, you supplied: ['x'] {'expceted': 'z'}"
)):
compare('x', expceted='z')
def test_dont_raise(self):
self.assertEqual(compare('x', 'y', raises=False), "'x' != 'y'")
class OrmObj(object):
def __init__(self, a):
self.a = a
def __eq__(self, other):
return True
def __repr__(self):
return 'OrmObj: '+str(self.a)
def test_django_orm_is_horrible(self):
self.assertTrue(self.OrmObj(1) == self.OrmObj(2))
def query_set():
yield self.OrmObj(1)
yield self.OrmObj(2)
self.check_raises(
message=(
"sequence not as expected:\n"
"\n"
"same:\n"
"(OrmObj: 1,)\n"
"\n"
"expected:\n"
"(OrmObj: 3,)\n"
"\n"
"actual:\n"
"(OrmObj: 2,)\n"
'\n'
'While comparing [1]: OrmObj not as expected:\n'
'\n'
'attributes differ:\n'
"'a': 3 (expected) != 2 (actual)"
),
expected=[self.OrmObj(1), self.OrmObj(3)],
actual=query_set(),
ignore_eq=True
)
def test_django_orm_is_horrible_part_2(self):
t_compare = partial(compare, ignore_eq=True)
t_compare(self.OrmObj(1), self.OrmObj(1))
t_compare(self.OrmObj('some longish string'),
self.OrmObj('some longish string'))
t_compare(self.OrmObj(date(2016, 1, 1)),
self.OrmObj(date(2016, 1, 1)))
def test_django_orm_is_horrible_part_3(self):
compare(
expected=self.OrmObj(1),
actual=self.OrmObj(1),
ignore_eq=True
)
def test_django_orm_is_horrible_part_4(self):
self.check_raises(
message='[1] (expected) != 2 (actual)',
expected=[1],
actual=2,
ignore_eq=True
)
def test_nested_django_orm_in_object(self):
class MyObject(object):
def __init__(self, orm):
self.orm = orm
self.check_raises(
message="MyObject not as expected:\n"
"\n"
"attributes differ:\n"
"'orm': OrmObj: 1 (expected) != OrmObj: 2 (actual)\n"
"\n"
"While comparing .orm: OrmObj not as expected:\n"
"\n"
"attributes differ:\n"
"'a': 1 (expected) != 2 (actual)",
expected=MyObject(self.OrmObj(1)),
actual=MyObject(self.OrmObj(2)),
ignore_eq=True)
def test_mock_call_same(self):
m = Mock()
m.foo(1, 2, x=3)
compare(m.mock_calls, m.mock_calls)
def test_mock_call_same_strict(self):
m = Mock()
m.foo(1, 2, x=3)
compare(m.mock_calls, m.mock_calls, strict=True)
def test_calls_different(self):
m1 = Mock()
m2 = Mock()
m1.foo(1, 2, x=3, y=4)
m2.bar(1, 3, x=7, y=4)
self.check_raises(
m1.mock_calls,
m2.mock_calls,
"sequence not as expected:\n"
"\n"
"same:\n"
"[]\n"
"\n"
"first:\n"
"[call.foo(1, 2, x=3, y=4)]\n"
"\n"
"second:\n"
"[call.bar(1, 3, x=7, y=4)]"
"\n\n"
'While comparing [0]: \n'
"'call.foo(1, 2, x=3, y=4)'\n"
'!=\n'
"'call.bar(1, 3, x=7, y=4)'"
)
def test_call_args_different(self):
m = Mock()
m.foo(1)
self.check_raises(
m.foo.call_args,
call(2),
"'call(1)' != 'call(2)'"
)
def test_calls_args_different_but_same_repr(self):
class Annoying(object):
def __init__(self, x):
self.x = x
def __repr__(self):
return '<annoying>'
m1 = Mock()
m2 = Mock()
m1.foo(Annoying(1))
m2.foo(Annoying(3))
self.check_raises(
m1.mock_calls,
m2.mock_calls,
'sequence not as expected:\n'
'\n'
'same:\n'
'[]\n'
'\n'
'first:\n'
'[call.foo(<annoying>)]\n'
'\n'
'second:\n'
'[call.foo(<annoying>)]\n'
'\n'
'While comparing [0]: mock.call not as expected:\n'
'\n'
'While comparing [0] args: sequence not as expected:\n'
'\n'
'same:\n'
'()\n'
'\n'
'first:\n'
'(<annoying>,)\n'
'\n'
'second:\n'
'(<annoying>,)\n'
'\n'
'While comparing [0] args[0]: Annoying not as expected:\n'
'\n'
'attributes differ:\n'
"'x': 1 != 3"
)
def test_calls_nested_equal_sub_attributes(self):
class Annoying(object):
def __init__(self, x):
self.x = x
def __repr__(self):
return '<annoying>'
m1 = Mock()
m2 = Mock()
m1.foo(x=[Annoying(1)])
m2.foo(x=[Annoying(1)])
compare(m1.mock_calls, m2.mock_calls)
def test_compare_arbitrary_nested_diff(self):
class OurClass:
def __init__(self, *args):
self.args = args
def __repr__(self):
return '<OurClass obj>'
self.check_raises(
OurClass(OurClass(1)),
OurClass(OurClass(2)),
"OurClass not as expected:\n"
"\n"
'attributes differ:\n'
"'args': (<OurClass obj>,) != (<OurClass obj>,)\n"
'\n'
'While comparing .args: sequence not as expected:\n'
'\n'
'same:\n'
'()\n'
'\n'
'first:\n'
'(<OurClass obj>,)\n'
'\n'
'second:\n'
'(<OurClass obj>,)\n'
'\n'
'While comparing .args[0]: OurClass not as expected:\n'
'\n'
'attributes differ:\n'
"'args': (1,) != (2,)\n"
'\n'
'While comparing .args[0].args: sequence not as expected:\n'
'\n'
'same:\n'
'()\n'
'\n'
'first:\n'
'(1,)\n'
'\n'
'second:\n'
'(2,)'
)
def test_compare_slotted_same(self):
compare(Slotted(1, 2), Slotted(1, 2))
def test_compare_slotted_diff(self):
self.check_raises(
Slotted(1, 2),
Slotted(1, 3),
"Slotted not as expected:\n"
"\n"
"attributes same:\n"
"['x']\n"
"\n"
'attributes differ:\n'
"'y': 2 != 3"
)
def test_empty_sets(self):
compare(set(), set())
def test_empty_sets_strict(self):
compare(set(), set(), strict=True)
def test_datetime_not_equal(self):
self.check_raises(
datetime(2001, 1, 1),
datetime(2001, 1, 2),
"datetime.datetime(2001, 1, 1, 0, 0) != "
"datetime.datetime(2001, 1, 2, 0, 0)"
)
def test_inherited_slots(self):
class Parent(object):
__slots__ = ('a',)
class Child(Parent):
__slots__ = ('b',)
def __init__(self, a, b):
self.a, self.b = a, b
self.check_raises(
Child(1, 'x'),
Child(2, 'x'),
'Child not as expected:\n'
'\n'
'attributes same:\n'
"['b']\n"
'\n'
'attributes differ:\n'
"'a': 1 != 2"
)
def test_empty_child_slots(self):
class Parent(object):
__slots__ = ('a',)
def __init__(self, a):
self.a = a
class Child(Parent):
__slots__ = ()
compare(Child(1), Child(1))
def test_slots_and_attrs(self):
class Parent(object):
__slots__ = ('a',)
class Child(Parent):
def __init__(self, a, b):
self.a = a
self.b = b
self.check_raises(Child(1, 2), Child(1, 3), message=(
'Child not as expected:\n'
'\n'
'attributes same:\n'
"['a']\n"
'\n'
'attributes differ:\n'
"'b': 2 != 3"
))
def test_partial_callable_different(self):
def foo(x): pass
def bar(y): pass
self.check_raises(
partial(foo),
partial(bar),
(
'partial not as expected:\n'
'\n'
'attributes same:\n'
"['args', 'keywords']\n"
'\n'
'attributes differ:\n'
"'func': {foo} != {bar}\n"
'\n'
'While comparing .func: {foo} != {bar}'
).format(foo=hexsub(repr(foo)), bar=hexsub(repr(bar))))
def test_partial_args_different(self):
def foo(x): pass
self.check_raises(
partial(foo, 1),
partial(foo, 2),
'partial not as expected:\n'
'\n'
'attributes same:\n'
"['func', 'keywords']\n"
'\n'
'attributes differ:\n'
"'args': (1,) != (2,)\n"
'\n'
'While comparing .args: sequence not as expected:\n'
'\n'
'same:\n'
'()\n'
'\n'
'first:\n'
'(1,)\n'
'\n'
'second:\n'
'(2,)'
)
def test_partial_kw_different(self):
def foo(x): pass
self.check_raises(
partial(foo, x=1, y=3),
partial(foo, x=2, z=4),
'partial not as expected:\n'
'\n'
'attributes same:\n'
"['args', 'func']\n"
'\n'
'attributes differ:\n'
"'keywords': {'x': 1, 'y': 3} != {'x': 2, 'z': 4}\n"
'\n'
'While comparing .keywords: dict not as expected:\n'
'\n'
'in first but not second:\n'
"'y': 3\n"
'\n'
'in second but not first:\n'
"'z': 4\n"
'\n'
'values differ:\n'
"'x': 1 != 2"
)
def test_partial_equal(self):
def foo(x): pass
compare(partial(foo, 1, x=2), partial(foo, 1, x=2))
def test_repr_and_attributes_equal(self):
class Wut(object):
def __repr__(self):
return 'Wut'
def __eq__(self, other):
return False
self.check_raises(
Wut(),
Wut(),
"Both x and y appear as 'Wut', but are not equal!"
)
self.check_raises(
expected=Wut(),
actual=Wut(),
message="Both expected and actual appear as 'Wut', but are not equal!"
)
def test_string_with_slotted(self):
class Slotted(object):
__slots__ = ['foo']
def __init__(self, foo):
self.foo = foo
def __repr__(self):
return repr(self.foo)
self.check_raises(
'foo',
Slotted('foo'),
"'foo' (%s) != 'foo' (%s)" % (repr(str), repr(Slotted))
)
def test_not_recursive(self):
self.check_raises(
{1: 'foo', 2: 'foo'},
{1: 'bar', 2: 'bar'},
"dict not as expected:\n"
"\n"
"values differ:\n"
"1: 'foo' != 'bar'\n"
"2: 'foo' != 'bar'\n"
"\n"
"While comparing [1]: 'foo' != 'bar'"
"\n\n"
"While comparing [2]: 'foo' != 'bar'"
)
class TestIgnore(CompareHelper):
class Parent(object):
def __init__(self, id, other):
self.id = id
self.other = other
def __repr__(self):
return '<{}:{}>'.format(type(self).__name__, self.id)
class Child(Parent): pass
def test_ignore_attributes(self):
compare(self.Parent(1, 3), self.Parent(2, 3), ignore_attributes={'id'})
def test_ignore_attributes_different_types(self):
self.check_raises(
self.Parent(1, 3),
self.Child(2, 3),
'<Parent:1> != <Child:2>',
ignore_attributes={'id'}
)
def test_ignore_attributes_per_type(self):
ignore = {self.Parent: {'id'}}
compare(self.Parent(1, 3), self.Parent(2, 3), ignore_attributes=ignore)
self.check_raises(
self.Child(1, 3),
self.Child(2, 3),
'Child not as expected:\n'
'\n'
'attributes same:\n'
"['other']\n"
'\n'
'attributes differ:\n'
"'id': 1 != 2",
ignore_attributes=ignore
)
class TestCompareObject(object):
class Thing(object):
def __init__(self, **kw):
for k, v in kw.items():
setattr(self, k, v)
def test_ignore(self):
def compare_thing(x, y, context):
return compare_object(x, y, context, ignore_attributes=['y'])
compare(self.Thing(x=1, y=2), self.Thing(x=1, y=3),
comparers={self.Thing: compare_thing})
def test_ignore_dict_context_list_param(self):
def compare_thing(x, y, context):
return compare_object(x, y, context, ignore_attributes=['y'])
compare(self.Thing(x=1, y=2, z=3), self.Thing(x=1, y=4, z=5),
comparers={self.Thing: compare_thing},
ignore_attributes={self.Thing: ['z']})
def test_ignore_list_context_list_param(self):
def compare_thing(x, y, context):
return compare_object(x, y, context, ignore_attributes=['y'])
compare(self.Thing(x=1, y=2, z=3), self.Thing(x=1, y=4, z=5),
comparers={self.Thing: compare_thing},
ignore_attributes=['z'])
class BaseClass(ABC):
pass
class MyDerivedClass(BaseClass):
def __init__(self, thing):
self.thing = thing
class ConcreteBaseClass(object): pass
class ConcreteDerivedClass(ConcreteBaseClass):
def __init__(self, thing):
self.thing = thing
class TestBaseClasses(CompareHelper):
def test_abc_equal(self):
thing1 = MyDerivedClass(1)
thing2 = MyDerivedClass(1)
compare(thing1, thing2)
def test_abc_unequal(self):
thing1 = MyDerivedClass(1)
thing2 = MyDerivedClass(2)
self.check_raises(thing1, thing2, message=(
"MyDerivedClass not as expected:\n\n"
"attributes differ:\n"
"'thing': 1 != 2"
))
def test_concrete_equal(self):
thing1 = ConcreteDerivedClass(1)
thing2 = ConcreteDerivedClass(1)
compare(thing1, thing2)
def test_concrete_unequal(self):
thing1 = ConcreteDerivedClass(1)
thing2 = ConcreteDerivedClass(2)
self.check_raises(thing1, thing2, message=(
"ConcreteDerivedClass not as expected:\n\n"
"attributes differ:\n"
"'thing': 1 != 2"
))
| Simplistix/testfixtures | testfixtures/tests/test_compare.py | Python | mit | 59,062 |
from octopy import *
def InitDuplication(host1, host2, timeout = Minutes(10)):
def dup_module(host1, host2):
return {
"const" : {
"type" : "octotron",
"host" : host1,
"dup_host" : host2
},
"sensor" : {
"working" : Boolean(timeout)
},
"trigger" : {
"not_working" : Match("working", False)
},
"react" : {
"notify1" : Reaction()
.On("not_working")
.Begin(Danger("tag", "OCTOTRON")
.Msg("loc", "{host}")
.Msg("descr", "octotron on {host} failed one check")
.Msg("msg", "don't panic - might be just a false alarm"))
.End(RDanger("tag", "OCTOTRON")
.Msg("loc", "{host}")
.Msg("descr", "octotron on {host} is ok")
.Msg("msg", "that was close..")),
"notify2" : Reaction()
.On("not_working", 2, 0)
.Begin(Danger("tag", "OCTOTRON")
.Msg("loc", "{host}")
.Msg("descr", "octotron on {host} failed TWO checks, emergency protocol activated")
.Msg("msg", "self-destruct sequence initiated. please panic")
.Exec("on_octotron_fail", "{host}", "{dup_host}")),
}
}
GenID(CreateObject(dup_module(host1, host2)))
GenID(CreateObject(dup_module(host2, host1)), n=1)
| srcc-msu/octotron | octopy_lib/util_duplication.py | Python | mit | 1,193 |
import json
from django.core.urlresolvers import reverse
from test_base import MainTestCase
from main.views import api
from odk_viewer.models.parsed_instance import ParsedInstance, \
_encode_for_mongo, _decode_from_mongo
import base64
def dict_for_mongo_without_userform_id(parsed_instance):
d = parsed_instance.to_dict_for_mongo()
# remove _userform_id since its not returned by the API
d.pop(ParsedInstance.USERFORM_ID)
return d
class TestFormAPI(MainTestCase):
def setUp(self):
MainTestCase.setUp(self)
self._create_user_and_login()
self._publish_transportation_form_and_submit_instance()
self.api_url = reverse(api, kwargs={
'username': self.user.username,
'id_string': self.xform.id_string
})
def test_api(self):
# query string
response = self.client.get(self.api_url, {})
self.assertEqual(response.status_code, 200)
d = dict_for_mongo_without_userform_id(
self.xform.surveys.all()[0].parsed_instance)
find_d = json.loads(response.content)[0]
self.assertEqual(sorted(find_d, key=find_d.get), sorted(d, key=d.get))
def test_api_with_query(self):
# query string
query = '{"transport/available_transportation_types_to_referral_facility":"none"}'
data = {'query': query}
response = self.client.get(self.api_url, data)
self.assertEqual(response.status_code, 200)
d = dict_for_mongo_without_userform_id(self.xform.surveys.all()[0].parsed_instance)
find_d = json.loads(response.content)[0]
self.assertEqual(sorted(find_d, key=find_d.get), sorted(d, key=d.get))
def test_api_query_no_records(self):
# query string
query = '{"available_transporation_types_to_referral_facility": "bicycle"}'
data = {'query': query}
response = self.client.get(self.api_url, data)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, '[]')
def test_handle_bad_json(self):
response = self.client.get(self.api_url, {'query': 'bad'})
self.assertEqual(response.status_code, 400)
self.assertEqual(True, 'JSON' in response.content)
def test_api_jsonp(self):
# query string
callback = 'jsonpCallback'
response = self.client.get(self.api_url, {'callback': callback})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content.startswith(callback + '('), True)
self.assertEqual(response.content.endswith(')'), True)
start = callback.__len__() + 1
end = response.content.__len__() - 1
content = response.content[start: end]
d = dict_for_mongo_without_userform_id(self.xform.surveys.all()[0].parsed_instance)
find_d = json.loads(content)[0]
self.assertEqual(sorted(find_d, key=find_d.get), sorted(d, key=d.get))
def test_api_with_query_start_limit(self):
# query string
query = '{"transport/available_transportation_types_to_referral_facility":"none"}'
data = {'query': query, 'start': 0, 'limit': 10}
response = self.client.get(self.api_url, data)
self.assertEqual(response.status_code, 200)
d = dict_for_mongo_without_userform_id(self.xform.surveys.all()[0].parsed_instance)
find_d = json.loads(response.content)[0]
self.assertEqual(sorted(find_d, key=find_d.get), sorted(d, key=d.get))
def test_api_with_query_invalid_start_limit(self):
# query string
query = '{"transport/available_transportation_types_to_referral_facility":"none"}'
data = {'query': query, 'start': -100, 'limit': -100}
response = self.client.get(self.api_url, data)
self.assertEqual(response.status_code, 400)
def test_api_count(self):
# query string
query = '{"transport/available_transportation_types_to_referral_facility":"none"}'
data = {'query': query, 'count': 1}
response = self.client.get(self.api_url, data)
self.assertEqual(response.status_code, 200)
find_d = json.loads(response.content)[0]
self.assertTrue(find_d.has_key('count'))
self.assertEqual(find_d.get('count'), 1)
def test_api_column_select(self):
# query string
query = '{"transport/available_transportation_types_to_referral_facility":"none"}'
columns = '["transport/available_transportation_types_to_referral_facility"]'
data = {'query': query, 'fields': columns}
response = self.client.get(self.api_url, data)
self.assertEqual(response.status_code, 200)
find_d = json.loads(response.content)[0]
self.assertTrue(find_d.has_key('transport/available_transportation_types_to_referral_facility'))
self.assertFalse(find_d.has_key('_attachments'))
def test_api_decode_from_mongo(self):
field = "$section1.group01.question1"
encoded = _encode_for_mongo(field)
self.assertEqual(encoded, ("%(dollar)ssection1%(dot)sgroup01%(dot)squestion1" % \
{"dollar": base64.b64encode("$"), \
"dot": base64.b64encode(".")}))
decoded = _decode_from_mongo(encoded)
self.assertEqual(field, decoded)
def test_api_with_or_query(self):
"""Test that an or query is interpreted correctly since we use an
internal or query to filter out deleted records"""
for i in range(1, 3):
self._submit_transport_instance(i)
#record 0: does NOT have the 'transport/loop_over_transport_types_frequency/ambulance/frequency_to_referral_facility' field
#record 1 'transport/loop_over_transport_types_frequency/ambulance/frequency_to_referral_facility': 'daily'
#record 2 'transport/loop_over_transport_types_frequency/ambulance/frequency_to_referral_facility': 'weekly'
params = {
'query':
'{"$or": [{"transport/loop_over_transport_types_frequency/ambulance/frequency_to_referral_facility": "weekly"}, '
'{"transport/loop_over_transport_types_frequency/ambulance/frequency_to_referral_facility": "daily"}]}'}
response = self.client.get(self.api_url, params)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 2)
# check that blank params give us all our records i.e. 3
params = {}
response = self.client.get(self.api_url, params)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 3)
def test_api_cors_options(self):
response = self.anon.options(self.api_url)
allowed_headers = ['Accept', 'Origin', 'X-Requested-With',
'Authorization']
provided_headers = [h.strip() for h in
response['Access-Control-Allow-Headers'].split(',')]
self.assertListEqual(allowed_headers, provided_headers)
self.assertEqual(response['Access-Control-Allow-Methods'], 'GET')
self.assertEqual(response['Access-Control-Allow-Origin'], '*')
| makinacorpus/formhub | main/tests/test_form_api.py | Python | bsd-2-clause | 7,229 |
import io
import zipfile
import csv
import sys
w = csv.writer(open("output-{0}.csv".format(sys.argv[1]), "w"))
w.writerow(["STATE", "PUMA", sys.argv[1]])
# number of rows for ss13husa,ss13husb
row_counts = [756065,720248]
pumaC = 0
for fNr in range(2):
if (fNr == 0):
alpha = 'a'
else:
alpha = 'b'
zf = zipfile.ZipFile('ss13hus{0}.csv.zip'.format(alpha))
f = io.TextIOWrapper(zf.open("ss13hus{0}.csv".format(alpha), "rU"))
csvf = csv.reader(f)
header = csvf.next()
pumaColNr = header.index('PUMA')
stColNr = header.index('ST')
colNr = header.index(sys.argv[1])
for i in range(row_counts[fNr]):
row=csvf.next()
puma=row[pumaColNr]
state=row[stColNr]
col=row[colNr]
# ignore N/A entries
if (col == ''):
continue
if (int(puma) == 100 and int(state) == 35):
pumaC += 1
col=int(col)
w.writerow([state,puma,col])
print("PumaC: ",pumaC)
| openmachinesblog/visualization-census-2013 | getCSVByColumn.py | Python | mit | 1,038 |
# coding=utf-8
# Author: Gonçalo M. (aka duramato/supergonkas) <supergonkas@gmail.com>
#
# This file is part of Medusa.
#
# Medusa is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Medusa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Medusa. If not, see <http://www.gnu.org/licenses/>.
"""Provider code for Bitsnoop."""
from __future__ import unicode_literals
import traceback
from requests.compat import urljoin
from ..torrent_provider import TorrentProvider
from .... import app, logger, tv_cache
from ....bs4_parser import BS4Parser
from ....helper.common import convert_size, try_int
class BitSnoopProvider(TorrentProvider):
"""BitSnoop Torrent provider."""
def __init__(self):
"""Initialize the class."""
super(self.__class__, self).__init__('BitSnoop')
# Credentials
self.public = True
# URLs
self.url = 'https://bitsnoop.com'
self.urls = {
'base': self.url,
'rss': urljoin(self.url, '/new_video.html?fmt=rss'),
'search': urljoin(self.url, '/search/video/'),
}
# Proper Strings
self.proper_strings = ['PROPER', 'REPACK']
# Miscellaneous Options
# Torrent Stats
self.minseed = None
self.minleech = None
# Cache
self.cache = tv_cache.TVCache(self, search_params={'RSS': ['rss']})
def search(self, search_strings, age=0, ep_obj=None):
"""
Search a provider and parse the results.
:param search_strings: A dict with mode (key) and the search value (value)
:param age: Not used
:param ep_obj: Not used
:returns: A list of search results (structure)
"""
results = []
for mode in search_strings:
logger.log('Search mode: {0}'.format(mode), logger.DEBUG)
for search_string in search_strings[mode]:
if mode != 'RSS':
logger.log('Search string: {search}'.format
(search=search_string), logger.DEBUG)
search_url = (self.urls['rss'], self.urls['search'] + search_string + '/s/d/1/?fmt=rss')[mode != 'RSS']
response = self.get_url(search_url, returns='response')
if not response or not response.text:
logger.log('No data returned from provider', logger.DEBUG)
continue
elif not response or not response.text.startswith('<?xml'):
logger.log('Expected xml but got something else, is your mirror failing?', logger.INFO)
continue
results += self.parse(response.text, mode)
return results
def parse(self, data, mode):
"""
Parse search results for items.
:param data: The raw response from a search
:param mode: The current mode used to search, e.g. RSS
:return: A list of items found
"""
items = []
with BS4Parser(data, 'html5lib') as html:
torrent_rows = html('item')
for row in torrent_rows:
try:
if not row.category.text.endswith(('TV', 'Anime')):
continue
title = row.title.text
# Use the torcache link bitsnoop provides,
# unless it is not torcache or we are not using blackhole
# because we want to use magnets if connecting direct to client
# so that proxies work.
download_url = row.enclosure['url']
if app.TORRENT_METHOD != 'blackhole' or 'torcache' not in download_url:
download_url = row.find('magneturi').next.replace('CDATA', '').strip('[]') + \
self._custom_trackers
if not all([title, download_url]):
continue
seeders = try_int(row.find('numseeders').text)
leechers = try_int(row.find('numleechers').text)
# Filter unseeded torrent
if seeders < min(self.minseed, 1):
if mode != 'RSS':
logger.log("Discarding torrent because it doesn't meet the "
"minimum seeders: {0}. Seeders: {1}".format
(title, seeders), logger.DEBUG)
continue
torrent_size = row.find('size').text
size = convert_size(torrent_size) or -1
item = {
'title': title,
'link': download_url,
'size': size,
'seeders': seeders,
'leechers': leechers,
'pubdate': None,
}
if mode != 'RSS':
logger.log('Found result: {0} with {1} seeders and {2} leechers'.format
(title, seeders, leechers), logger.DEBUG)
items.append(item)
except (AttributeError, TypeError, KeyError, ValueError, IndexError):
logger.log('Failed parsing provider. Traceback: {0!r}'.format
(traceback.format_exc()), logger.ERROR)
return items
provider = BitSnoopProvider()
| FireBladeNooT/Medusa_1_6 | medusa/providers/torrent/xml/bitsnoop.py | Python | gpl-3.0 | 5,911 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for rnn module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
import numpy as np
import tensorflow as tf
class StackBidirectionalRNNTest(tf.test.TestCase):
def setUp(self):
self._seed = 23489
np.random.seed(self._seed)
def _createStackBidirectionalRNN(self,
use_gpu,
use_shape,
use_sequence_length,
initial_states_fw=None,
initial_states_bw=None,
scope=None):
self.layers = [2, 3]
input_size = 5
batch_size = 2
max_length = 8
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
sequence_length = tf.placeholder(tf.int64) if use_sequence_length else None
self.cells_fw = [tf.contrib.rnn.LSTMCell(
num_units, input_size, initializer=initializer, state_is_tuple=False)
for num_units in self.layers]
self.cells_bw = [tf.contrib.rnn.LSTMCell(
num_units, input_size, initializer=initializer, state_is_tuple=False)
for num_units in self.layers]
inputs = max_length * [
tf.placeholder(
tf.float32,
shape=(batch_size, input_size) if use_shape else (None, input_size))
]
outputs, state_fw, state_bw = tf.contrib.rnn.stack_bidirectional_rnn(
self.cells_fw,
self.cells_bw,
inputs,
initial_states_fw,
initial_states_bw,
dtype=tf.float32,
sequence_length=sequence_length,
scope=scope)
self.assertEqual(len(outputs), len(inputs))
for out in outputs:
self.assertAlmostEqual(
out.get_shape().as_list(),
[batch_size if use_shape else None, 2 * self.layers[-1]])
input_value = np.random.randn(batch_size, input_size)
outputs = tf.stack(outputs)
return input_value, inputs, outputs, state_fw, state_bw, sequence_length
def _testStackBidirectionalRNN(self, use_gpu, use_shape):
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
input_value, inputs, outputs, state_fw, state_bw, sequence_length = (
self._createStackBidirectionalRNN(use_gpu, use_shape, True))
tf.global_variables_initializer().run()
# Run with pre-specified sequence lengths of 2, 3.
out, s_fw, s_bw = sess.run([outputs, state_fw, state_bw],
feed_dict={inputs[0]: input_value,
sequence_length: [2, 3]})
# Since the forward and backward LSTM cells were initialized with the
# same parameters, the forward and backward states of the first layer
# must be the same.
# For the next layers, since the input is a concat of forward and backward
# outputs of the previous layers the symmetry is broken and the following
# states and outputs differ.
# We cannot access the intermediate values between layers but we can
# check that the forward and backward states of the first layer match.
self.assertAllClose(s_fw[0], s_bw[0])
# If outputs are not concat between layers the output of the forward
# and backward would be the same but symmetric.
# Check that it is not the case.
# Due to depth concatenation (as num_units=3 for both RNNs):
# - forward output: out[][][depth] for 0 <= depth < 3
# - backward output: out[][][depth] for 4 <= depth < 6
# First sequence in batch is length=2
# Check that the time=0 forward output is not equal to time=1 backward.
self.assertNotEqual(out[0][0][0], out[1][0][3])
self.assertNotEqual(out[0][0][1], out[1][0][4])
self.assertNotEqual(out[0][0][2], out[1][0][5])
# Check that the time=1 forward output is not equal to time=0 backward.
self.assertNotEqual(out[1][0][0], out[0][0][3])
self.assertNotEqual(out[1][0][1], out[0][0][4])
self.assertNotEqual(out[1][0][2], out[0][0][5])
# Second sequence in batch is length=3
# Check that the time=0 forward output is not equal to time=2 backward.
self.assertNotEqual(out[0][1][0], out[2][1][3])
self.assertNotEqual(out[0][1][1], out[2][1][4])
self.assertNotEqual(out[0][1][2], out[2][1][5])
# Check that the time=1 forward output is not equal to time=1 backward.
self.assertNotEqual(out[1][1][0], out[1][1][3])
self.assertNotEqual(out[1][1][1], out[1][1][4])
self.assertNotEqual(out[1][1][2], out[1][1][5])
# Check that the time=2 forward output is not equal to time=0 backward.
self.assertNotEqual(out[2][1][0], out[0][1][3])
self.assertNotEqual(out[2][1][1], out[0][1][4])
self.assertNotEqual(out[2][1][2], out[0][1][5])
def _testStackBidirectionalRNNStates(self, use_gpu):
# Check that the states are correctly initialized.
# - Create a net and iterate for 3 states. Keep the state (state_3).
# - Reset states, and iterate for 5 steps. Last state is state_5.
# - Reset the sets to state_3 and iterate for 2 more steps,
# last state will be state_5'.
# - Check that the state_5 and state_5' (forward and backward) are the
# same for the first layer (it does not apply for the second layer since
# it has forward-backward dependencies).
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
batch_size = 2
# Create states placeholders.
initial_states_fw = [tf.placeholder(tf.float32, shape=(batch_size, layer*2))
for layer in self.layers]
initial_states_bw = [tf.placeholder(tf.float32, shape=(batch_size, layer*2))
for layer in self.layers]
# Create the net
input_value, inputs, outputs, state_fw, state_bw, sequence_length = (
self._createStackBidirectionalRNN(use_gpu, True, True,
initial_states_fw, initial_states_bw))
tf.global_variables_initializer().run()
# Run 3 steps.
feed_dict = {inputs[0]: input_value, sequence_length: [3, 2]}
# Initialize to empty state.
for i, layer in enumerate(self.layers):
feed_dict[initial_states_fw[i]] = np.zeros((batch_size, layer*2),
dtype=np.float32)
feed_dict[initial_states_bw[i]] = np.zeros((batch_size, layer*2),
dtype=np.float32)
_, st_3_fw, st_3_bw = sess.run([outputs, state_fw, state_bw],
feed_dict=feed_dict)
# Reset the net and run 5 steps.
feed_dict = {inputs[0]: input_value, sequence_length: [5, 3]}
for i, layer in enumerate(self.layers):
feed_dict[initial_states_fw[i]] = np.zeros((batch_size, layer*2),
dtype=np.float32)
feed_dict[initial_states_bw[i]] = np.zeros((batch_size, layer*2),
dtype=np.float32)
_, st_5_fw, st_5_bw = sess.run([outputs, state_fw, state_bw],
feed_dict=feed_dict)
# Reset the net to state_3 and run 2 more steps.
feed_dict = {inputs[0]: input_value, sequence_length: [2, 1]}
for i, _ in enumerate(self.layers):
feed_dict[initial_states_fw[i]] = st_3_fw[i]
feed_dict[initial_states_bw[i]] = st_3_bw[i]
out_5p, st_5p_fw, st_5p_bw = sess.run([outputs, state_fw, state_bw],
feed_dict=feed_dict)
# Check that the 3+2 and 5 first layer states.
self.assertAllEqual(st_5_fw[0], st_5p_fw[0])
self.assertAllEqual(st_5_bw[0], st_5p_bw[0])
def testStackBidirectionalRNN(self):
self._testStackBidirectionalRNN(use_gpu=False, use_shape=False)
self._testStackBidirectionalRNN(use_gpu=True, use_shape=False)
self._testStackBidirectionalRNN(use_gpu=False, use_shape=True)
self._testStackBidirectionalRNN(use_gpu=True, use_shape=True)
self._testStackBidirectionalRNNStates(use_gpu=False)
self._testStackBidirectionalRNNStates(use_gpu=True)
def _createStackBidirectionalDynamicRNN(self,
use_gpu,
use_shape,
use_state_tuple,
initial_states_fw=None,
initial_states_bw=None,
scope=None):
self.layers = [2, 3]
input_size = 5
batch_size = 2
max_length = 8
initializer = tf.random_uniform_initializer(-0.01, 0.01, seed=self._seed)
sequence_length = tf.placeholder(tf.int64)
self.cells_fw = [tf.contrib.rnn.LSTMCell(
num_units, input_size, initializer=initializer, state_is_tuple=False)
for num_units in self.layers]
self.cells_bw = [tf.contrib.rnn.LSTMCell(
num_units, input_size, initializer=initializer, state_is_tuple=False)
for num_units in self.layers]
inputs = max_length * [
tf.placeholder(
tf.float32,
shape=(batch_size, input_size) if use_shape else (None, input_size))
]
inputs_c = tf.stack(inputs)
inputs_c = tf.transpose(inputs_c, [1, 0, 2])
outputs, st_fw, st_bw = tf.contrib.rnn.stack_bidirectional_dynamic_rnn(
self.cells_fw,
self.cells_bw,
inputs_c,
initial_states_fw=initial_states_fw,
initial_states_bw=initial_states_bw,
dtype=tf.float32,
sequence_length=sequence_length,
scope=scope)
# Outputs has shape (batch_size, max_length, 2* layer[-1].
output_shape = [None, max_length, 2 * self.layers[-1]]
if use_shape:
output_shape[0] = batch_size
self.assertAllEqual(outputs.get_shape().as_list(), output_shape)
input_value = np.random.randn(batch_size, input_size)
return input_value, inputs, outputs, st_fw, st_bw, sequence_length
def _testStackBidirectionalDynamicRNN(self, use_gpu, use_shape,
use_state_tuple):
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
input_value, inputs, outputs, state_fw, state_bw, sequence_length = (
self._createStackBidirectionalDynamicRNN(use_gpu, use_shape,
use_state_tuple))
tf.global_variables_initializer().run()
# Run with pre-specified sequence length of 2, 3
out, s_fw, s_bw = sess.run([outputs, state_fw, state_bw],
feed_dict={inputs[0]: input_value,
sequence_length: [2, 3]})
# Since the forward and backward LSTM cells were initialized with the
# same parameters, the forward and backward states of the first layer has
# to be the same.
# For the next layers, since the input is a concat of forward and backward
# outputs of the previous layers the symmetry is broken and the following
# states and outputs differ.
# We cannot access the intermediate values between layers but we can
# check that the forward and backward states of the first layer match.
self.assertAllClose(s_fw[0], s_bw[0])
out = np.swapaxes(out, 0, 1)
# If outputs are not concat between layers the output of the forward
# and backward would be the same but symmetric.
# Check that is not the case.
# Due to depth concatenation (as num_units=3 for both RNNs):
# - forward output: out[][][depth] for 0 <= depth < 3
# - backward output: out[][][depth] for 4 <= depth < 6
# First sequence in batch is length=2
# Check that the time=0 forward output is not equal to time=1 backward.
self.assertNotEqual(out[0][0][0], out[1][0][3])
self.assertNotEqual(out[0][0][1], out[1][0][4])
self.assertNotEqual(out[0][0][2], out[1][0][5])
# Check that the time=1 forward output is not equal to time=0 backward.
self.assertNotEqual(out[1][0][0], out[0][0][3])
self.assertNotEqual(out[1][0][1], out[0][0][4])
self.assertNotEqual(out[1][0][2], out[0][0][5])
# Second sequence in batch is length=3
# Check that the time=0 forward output is not equal to time=2 backward.
self.assertNotEqual(out[0][1][0], out[2][1][3])
self.assertNotEqual(out[0][1][1], out[2][1][4])
self.assertNotEqual(out[0][1][2], out[2][1][5])
# Check that the time=1 forward output is not equal to time=1 backward.
self.assertNotEqual(out[1][1][0], out[1][1][3])
self.assertNotEqual(out[1][1][1], out[1][1][4])
self.assertNotEqual(out[1][1][2], out[1][1][5])
# Check that the time=2 forward output is not equal to time=0 backward.
self.assertNotEqual(out[2][1][0], out[0][1][3])
self.assertNotEqual(out[2][1][1], out[0][1][4])
self.assertNotEqual(out[2][1][2], out[0][1][5])
def _testStackBidirectionalDynamicRNNStates(self, use_gpu):
# Check that the states are correctly initialized.
# - Create a net and iterate for 3 states. Keep the state (state_3).
# - Reset states, and iterate for 5 steps. Last state is state_5.
# - Reset the sets to state_3 and iterate for 2 more steps,
# last state will be state_5'.
# - Check that the state_5 and state_5' (forward and backward) are the
# same for the first layer (it does not apply for the second layer since
# it has forward-backward dependencies).
with self.test_session(use_gpu=use_gpu, graph=tf.Graph()) as sess:
batch_size=2
# Create states placeholders.
initial_states_fw = [tf.placeholder(tf.float32, shape=(batch_size, layer*2))
for layer in self.layers]
initial_states_bw = [tf.placeholder(tf.float32, shape=(batch_size, layer*2))
for layer in self.layers]
# Create the net
input_value, inputs, outputs, state_fw, state_bw, sequence_length = (
self._createStackBidirectionalDynamicRNN(
use_gpu,
use_shape=True,
use_state_tuple=False,
initial_states_fw=initial_states_fw,
initial_states_bw=initial_states_bw))
tf.global_variables_initializer().run()
# Run 3 steps.
feed_dict = {inputs[0]: input_value, sequence_length: [3, 2]}
# Initialize to empty state.
for i, layer in enumerate(self.layers):
feed_dict[initial_states_fw[i]] = np.zeros((batch_size, layer*2),
dtype=np.float32)
feed_dict[initial_states_bw[i]] = np.zeros((batch_size, layer*2),
dtype=np.float32)
_, st_3_fw, st_3_bw = sess.run([outputs, state_fw, state_bw],
feed_dict=feed_dict)
# Reset the net and run 5 steps.
feed_dict = {inputs[0]: input_value, sequence_length: [5, 3]}
for i, layer in enumerate(self.layers):
feed_dict[initial_states_fw[i]] = np.zeros((batch_size, layer*2),
dtype=np.float32)
feed_dict[initial_states_bw[i]] = np.zeros((batch_size, layer*2),
dtype=np.float32)
_, st_5_fw, st_5_bw = sess.run([outputs, state_fw, state_bw],
feed_dict=feed_dict)
# Reset the net to state_3 and run 2 more steps.
feed_dict = {inputs[0]: input_value, sequence_length: [2, 1]}
for i, _ in enumerate(self.layers):
feed_dict[initial_states_fw[i]] = st_3_fw[i]
feed_dict[initial_states_bw[i]] = st_3_bw[i]
out_5p, st_5p_fw, st_5p_bw = sess.run([outputs, state_fw, state_bw],
feed_dict=feed_dict)
# Check that the 3+2 and 5 first layer states.
self.assertAllEqual(st_5_fw[0], st_5p_fw[0])
self.assertAllEqual(st_5_bw[0], st_5p_bw[0])
def testBidirectionalRNN(self):
# Generate 2^3 option values
# from [True, True, True] to [False, False, False]
options = itertools.product([True, False], repeat=3)
for option in options:
self._testStackBidirectionalDynamicRNN(
use_gpu=option[0], use_shape=option[1], use_state_tuple=option[2])
# Check States.
self._testStackBidirectionalDynamicRNNStates(
use_gpu=False)
self._testStackBidirectionalDynamicRNNStates(
use_gpu=True)
def _testScope(self, factory, prefix="prefix", use_outer_scope=True):
# REMARKS: factory(scope) is a function accepting a scope
# as an argument, such scope can be None, a string
# or a VariableScope instance.
with self.test_session(use_gpu=True, graph=tf.Graph()):
if use_outer_scope:
with tf.variable_scope(prefix) as scope:
factory(scope)
else:
factory(prefix)
# check that all the variables names starts with the proper scope.
tf.global_variables_initializer()
all_vars = tf.global_variables()
prefix = prefix or "stack_bidirectional_rnn"
scope_vars = [v for v in all_vars if v.name.startswith(prefix + "/")]
tf.logging.info("StackRNN with scope: %s (%s)"
% (prefix, "scope" if use_outer_scope else "str"))
for v in scope_vars:
tf.logging.info(v.name)
self.assertEqual(len(scope_vars), len(all_vars))
def testStackBidirectionalRNNScope(self):
def factory(scope):
return self._createStackBidirectionalRNN(
use_gpu=True, use_shape=True,
use_sequence_length=True, scope=scope)
self._testScope(factory, use_outer_scope=True)
self._testScope(factory, use_outer_scope=False)
self._testScope(factory, prefix=None, use_outer_scope=False)
def testBidirectionalDynamicRNNScope(self):
def factory(scope):
return self._createStackBidirectionalDynamicRNN(
use_gpu=True, use_shape=True, use_state_tuple=True, scope=scope)
self._testScope(factory, use_outer_scope=True)
self._testScope(factory, use_outer_scope=False)
self._testScope(factory, prefix=None, use_outer_scope=False)
if __name__ == "__main__":
tf.test.main()
| AndreasMadsen/tensorflow | tensorflow/contrib/rnn/python/kernel_tests/rnn_test.py | Python | apache-2.0 | 18,801 |
# -*- coding: utf-8 -*-
from __future__ import print_function
import logging
import exhibitionist.settings as settings
def getLogger(name, level=settings.DEFAULT_LOG_LEVEL):
logger = logging.getLogger(name.replace("exhibitionist.", ""))
sh = logging.FileHandler(settings.LOG_FILE)
sh.setFormatter(settings.LOG_FORMAT)
logger.setLevel(level)
logger.addHandler(sh)
return logger | kentfrazier/Exhibitionist | exhibitionist/log.py | Python | bsd-3-clause | 406 |
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Admin model views for records."""
import json
from flask import flash
from flask_admin.contrib.sqla import ModelView
from flask_babelex import gettext as _
from invenio_admin.filters import FilterConverter
from invenio_db import db
from markupsafe import Markup
from sqlalchemy.exc import SQLAlchemyError
from .api import Record
from .models import RecordMetadata
class RecordMetadataModelView(ModelView):
"""Records admin model view."""
filter_converter = FilterConverter()
can_create = False
can_edit = False
can_delete = True
can_view_details = True
column_list = ('id', 'version_id', 'updated', 'created',)
column_details_list = ('id', 'version_id', 'updated', 'created', 'json')
column_labels = dict(
id=_('UUID'),
version_id=_('Revision'),
json=_('JSON'),
)
column_formatters = dict(
version_id=lambda v, c, m, p: m.version_id-1,
json=lambda v, c, m, p: Markup("<pre>{0}</pre>".format(
json.dumps(m.json, indent=2, sort_keys=True)))
)
column_filters = ('created', 'updated', )
column_default_sort = ('updated', True)
page_size = 25
def delete_model(self, model):
"""Delete a record."""
try:
if model.json is None:
return True
record = Record(model.json, model=model)
record.delete()
db.session.commit()
except SQLAlchemyError as e:
if not self.handle_view_exception(e):
flash(_('Failed to delete record. %(error)s', error=str(e)),
category='error')
db.session.rollback()
return False
return True
record_adminview = dict(
modelview=RecordMetadataModelView,
model=RecordMetadata,
category=_('Records'))
| tiborsimko/invenio-records | invenio_records/admin.py | Python | mit | 2,051 |
#!/usr/bin/env python3
# Copyright (c) 2013 Jakub Filipowicz <jakubf@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import sys
from m400lib import *
from m400_utils import *
NIETAK = ["Nie", "Tak"]
# ------------------------------------------------------------------------
class WordConf:
# --------------------------------------------------------------------
def __init__(self, word, disableable = False):
self.word = word
self.disableable = disableable
self.keys = []
self.conf = {}
self.pos = 0
# --------------------------------------------------------------------
def __iter__(self):
self.pos = 0
return self
# --------------------------------------------------------------------
def add(self, key, name, bit_start, bit_end, options = []):
bitlen = bit_end - bit_start + 1
if key in self.conf:
raise KeyError("Key '%s' alredy defined" % key)
if bit_end < bit_start:
raise ValueError("Starting bit > ending bit")
if len(options) != 0 and len(options) != 2**(bitlen):
raise ValueError("Option list size != field capacity")
mask = bitmask(bitlen)
shift = 15 - bit_end
value = (self.word >> shift) & mask
self.keys.append(key)
self.conf[key] = [name, value, mask, shift, options]
# --------------------------------------------------------------------
def set(self, key, value):
if value > self.conf[key][2]:
raise ValueError("Value %i won't fit in '%s' bit length" % (value, key))
self.conf[key][1] = value
# --------------------------------------------------------------------
def get_word(self):
word = 0
for k in self.keys:
word |= (self.conf[k][1] & self.conf[k][2]) << self.conf[k][3]
return word
# --------------------------------------------------------------------
def is_disabled(self):
if self.disableable == True and self.word == 65535:
return True
return False
# --------------------------------------------------------------------
def get(self, key):
if self.is_disabled():
raise ValueError("Device disabled")
entry = self.conf[key]
name = entry[0]
value = entry[1]
try:
value_name = entry[4][entry[1]]
except:
value_name = "%s" % value
return key, name, value, value_name
# --------------------------------------------------------------------
def get_val(self, key):
return self.conf[key][1]
# --------------------------------------------------------------------
def get_by_id(self, key_id):
return self.get(self.keys[key_id])
# --------------------------------------------------------------------
def __next__(self):
if (self.pos > len(self.conf)-1):
raise StopIteration
self.pos += 1
return self.get_by_id(self.pos-1)
# ------------------------------------------------------------------------
class M400Conf:
# --------------------------------------------------------------------
def __init__(self, image, offset):
self.image = image
self.offset = offset
self.data = wload(self.image, self.offset, 64)
self.magic_w0 = 0b1111000000000000 # UJ
self.magic_w2 = 0b0000000000000000 # 0
self.magic_w3 = 0b0001011000000000 # 0x1600
if self.data[0] != self.magic_w0 or self.data[2] != self.magic_w2 or self.data[3] != self.magic_w3:
raise ValueError("Image '%s' doesn't contain system at offset %i" % (self.image, self.offset))
self.config = {}
self.config["sys1"] = self.parse_sys1(self.data[0o4])
self.config["multix"] = self.parse_multix(self.data[0o5])
self.config["sys2"] = self.parse_sys2(self.data[0o6])
self.config["sys3"] = self.parse_sys3(self.data[0o7])
for i in range(0, 8):
self.config["mem%i"%i] = self.parse_mem(self.data[0o10+i])
for i in range(0, 16):
self.config["disk%i"%i] = self.parse_disk(self.data[0o20+i])
for i in range(0, 4):
self.config["tape%i"%i] = self.parse_tapes(self.data[0o40+i])
self.config["io1"] = self.parse_io1(self.data[0o44])
self.config["io2"] = self.parse_io2(self.data[0o45])
self.config["io3"] = self.parse_io3(self.data[0o46])
self.config["io4"] = self.parse_io4(self.data[0o47])
self.config["io5"] = self.parse_io5(self.data[0o50])
self.config["io6"] = self.parse_io6(self.data[0o51])
self.config["io7"] = self.parse_io7(self.data[0o52])
self.config["io8"] = self.parse_io8(self.data[0o53])
self.config["rtc"] = self.parse_rtc(self.data[0o56])
self.config["mon"] = self.parse_mon(self.data[0o57])
self.config["oprq"] = self.parse_oprq(self.data[0o60])
for i in range(0, 15):
self.config["char%i"%i] = self.parse_char(self.data[0o61+i])
# --------------------------------------------------------------------
def sections(self):
return self.config.keys()
# --------------------------------------------------------------------
def parse_sys1(self, word):
sys1 = WordConf(word)
sys1.add("exlsem", "Ekstrakody semaforowe", 0, 0, NIETAK)
sys1.add("dw3", "Obsługa DW3", 1, 1, NIETAK)
sys1.add("lod", "Ekstrakody LOD i UNL", 2, 2, NIETAK)
sys1.add("timer", "Szybki zegar", 3, 3, NIETAK)
sys1.add("noswap", "Bez trybu z wymianami", 4, 4, NIETAK)
sys1.add("4kbuf", "Używaj byforów 4k", 5, 5, NIETAK)
sys1.add("res", "Programy jako rezydujące", 6, 6, NIETAK)
sys1.add("automx", "Automatyczna obsługa MULTIX-a", 7, 7, NIETAK)
sys1.add("sysbuf", "Liczba buf. systemowych", 8, 15)
return sys1
# --------------------------------------------------------------------
def parse_multix(self, word):
mulx = WordConf(word)
mulx.add("err", "Obsługa błędów", 0, 0, NIETAK)
mulx.add("nopar", "Bez parzystości", 1, 1, NIETAK)
mulx.add("par", "Z nieparzystością", 2, 2, NIETAK)
mulx.add("8bit", "8 bitów", 3, 3, NIETAK)
mulx.add("xonxoff", "XON/XOFF", 4, 4, NIETAK)
mulx.add("bscan", "Obsługuj BS i CAN", 5, 5, NIETAK)
mulx.add("upper", "Litery małe na duże", 6, 6, NIETAK)
mulx.add("dumper", "Dołącz dumper", 7, 7, NIETAK)
mulx.add("strvec", "Liczba tablic strumieni", 8, 15)
return mulx
# --------------------------------------------------------------------
def parse_sys2(self, word):
sys2 = WordConf(word)
sys2.add("autoram", "Zbiory robocze w RAM", 0, 0, NIETAK)
sys2.add("uservec", "Liczba tablic skorowidzów", 8, 15)
return sys2
# --------------------------------------------------------------------
def parse_sys3(self, word):
sys3 = WordConf(word)
sys3.add("sysram", "Bloki dla OS", 0, 3)
sys3.add("buflen", "Długość buforów końcówek", 8, 15)
return sys3
# --------------------------------------------------------------------
def parse_mem(self, word):
mem = WordConf(word)
mem.add("silicon", "Typ pamięci", 0, 0, ["Ferrytowa", "Półprzewodnikowa"])
mem.add("mega", "Rozmiar modułów", 1, 1, ["32k", "MEGA 64k"])
mem.add("blocks", "Liczba bloków", 2, 7)
mem.add("blstart", "Blok poczatkowy", 8, 11)
mem.add("mdstart", "Moduł poczatkowy", 12, 15)
return mem
# --------------------------------------------------------------------
def parse_disk(self, word):
disk = WordConf(word, disableable = True)
disk.add("foreign", "Talerz", 0, 0, ["Własny", "Obcy"])
disk.add("dtype", "Typ", 1, 2, ['MERA 9425 w kanale pamięciowym', 'Winchester', 'Floppy', '9425 lub EC 5061 w PLIX-ie'])
disk_type = disk.get_val('dtype')
# MERA 9425 w kanale pamięciowym
if disk_type == 0:
disk.add("unit", "Jednostka St.", 7, 9)
disk.add("chan", "Kanał", 10, 13)
disk.add("fixed", "Talerz", 15, 15, ["Wymienny", "Stały"])
# Floppy
elif disk_type == 2:
disk.add("inch", "Rozmiar flopa", 3, 3, ['8"', '5.25"'])
inch = disk.get_val('inch')
if inch == 0:
disk.add("number", "Numer jednostki", 8, 12)
disk.add("door", "Drzwiczki stacji", 13, 15)
else:
disk.add("density", "Gęstość", 10, 12, ["SD", "DD", "HD", "--", "--", "--", "--", "--"])
disk.add("number", "Numer stacji", 13, 15)
# Winchester
elif disk_type == 1:
disk.add("quant", "Kwant startowy", 4, 9)
disk.add("type", "Typ Winchestera", 10, 12)
disk.add("number", "Numer Winchestera", 14, 15)
# MERA 9425 lub EC 5061 w PLIX-ie
elif disk_type == 3:
disk.add("type", "Typ", 6, 7, ['EC-5061', 'MERA 9425 (talerz wymienny)', 'MERA 9425 (talerz stały)', 'MERA 9425 (cały dysk)'])
disk.add("plix", "Numer pakiety PLIX-a", 8, 12)
disk.add("number", "Numer urządzenia", 13, 15)
return disk
# --------------------------------------------------------------------
def parse_tapes(self, word):
tape = WordConf(word, disableable = True)
tape.add("unit", "Numer jednostki ster.", 8, 10)
tape.add("chan", "Numer kanał", 11, 14)
return tape
# --------------------------------------------------------------------
def parse_io1(self, word):
wc = WordConf(word, disableable = True)
wc.add("camac1", "Adres CAMAC 1", 11, 14)
return wc
# --------------------------------------------------------------------
def parse_io2(self, word):
wc = WordConf(word, disableable = True)
wc.add("camac2", "Adres CAMAC 2", 11, 14)
return wc
# --------------------------------------------------------------------
def parse_io3(self, word):
wc = WordConf(word, disableable = True)
wc.add("camac3", "Adres CAMAC 3/PI", 11, 14)
return wc
# --------------------------------------------------------------------
def parse_io4(self, word):
wc = WordConf(word, disableable = True)
wc.add("camac4", "Adres CAMAC 3/IEC", 11, 14)
return wc
# --------------------------------------------------------------------
def parse_io5(self, word):
wc = WordConf(word)
wc.add("winch", "Linia sterownika Winchester", 2, 7)
wc.add("plix", "Kanał PLIX", 11, 14)
return wc
# --------------------------------------------------------------------
def parse_io6(self, word):
wc = WordConf(word)
wc.add("winch", "Linia sterownika Floppy", 2, 7)
wc.add("plix", "Kanał MULTIX", 11, 14)
return wc
# --------------------------------------------------------------------
def parse_io7(self, word):
wc = WordConf(word, disableable = True)
wc.add("char1", "Kanał znakowy 1", 11, 14)
return wc
# --------------------------------------------------------------------
def parse_io8(self, word):
wc = WordConf(word, disableable = True)
wc.add("char2", "Kanał znakowy 2", 11, 14)
return wc
# --------------------------------------------------------------------
def parse_rtc(self, word):
wc = WordConf(word, disableable = True)
wc.add("unit", "Urządzenie", 8, 10)
wc.add("chan", "Kanał", 11, 14)
return wc
# --------------------------------------------------------------------
def parse_mon(self, word):
wc = WordConf(word)
wc.add("mon", "Numer systemowy monitorów", 0, 15)
return wc
# --------------------------------------------------------------------
def parse_oprq(self, word):
wc = WordConf(word)
wc.add("oprq", "Numer systemowy końcówki dla OPRQ", 0, 15)
return wc
# --------------------------------------------------------------------
def parse_char(self, word, disableable = True):
wc = WordConf(word, disableable = True)
# MULTIX only
if word >> 8 != 0:
wc.add("dir", "Kierunek", 0, 2, ["--", "--", "Wejście", "--", "Wyjście", "", "Half-Duplex", "Full-Duplex"])
wc.add("used", "Linia użyta", 3, 3, NIETAK)
wc.add("type", "Typ linii", 4, 7, ["szeregowa", "równoległa", "synchroniczna"]+["--"]*13)
wc.add("proto", "Protokół", 8, 10, ["czytnik taśmy", "drukarka, perforator", "monitor"]+["--"]*5)
wc.add("count", "Liczba urządzeń", 11, 15)
return wc
# ------------------------------------------------------------------------
# ---- MAIN --------------------------------------------------------------
# ------------------------------------------------------------------------
if len(sys.argv) == 3:
image = sys.argv[1]
offset = int(sys.argv[2])
elif len(sys.argv) == 2:
image = sys.argv[1]
offset = 0
else:
print("Usage: m4konf.py <image> [offset]")
sys.exit(1)
try:
m4c = M400Conf(image, offset)
except Exception as e:
print( "Cannot load system configuration: %s" % str(e))
sys.exit(1)
while True:
try:
command = input("KONF> ").split()
cmd = command[0]
args = command[1:]
except EOFError:
break
except KeyboardInterrupt:
break
except:
continue
if cmd == "quit" or cmd == "exit":
break
elif cmd == "sections":
print(sorted(m4c.sections()))
elif cmd == "print":
try:
section = args[0]
except:
print(" Use: print <section>|all")
continue
if section == 'all':
sections = sorted([ x for x in m4c.config ])
else:
sections = [ section ]
for s in sections:
print("[%s]" % s)
try:
for name, desc, val, dval in m4c.config[s]:
print(" %-7s = %-3i # %s = %s" % (name, val, desc, dval))
except ValueError:
print(" Disabled")
continue
except:
print(" No section: %s" % s)
elif cmd == "set":
try:
section = args[0]
key = args[1]
value = int(args[2])
except:
print(" Use: set <section> <key> <value>")
continue
try:
m4c.config[section].set(key, value)
except Exception as e:
print(" Cannot set %s/%s = %i. Error: %s" % (section, key, value, str(e)))
elif cmd == "write":
print(" Not implemented")
else:
print(" Unknown command")
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
| jakubfi/em400 | tools/m4konf.py | Python | gpl-2.0 | 15,829 |
# pylint: disable=too-many-arguments
import random
import string
from dataclasses import dataclass, fields, replace
from functools import singledispatch
from eth_utils import to_checksum_address
from raiden.constants import EMPTY_MERKLE_ROOT, UINT64_MAX, UINT256_MAX
from raiden.messages import Lock, LockedTransfer, RefundTransfer
from raiden.transfer import balance_proof, channel, token_network
from raiden.transfer.identifiers import CanonicalIdentifier
from raiden.transfer.mediated_transfer import mediator
from raiden.transfer.mediated_transfer.state import (
HashTimeLockState,
LockedTransferSignedState,
LockedTransferUnsignedState,
MediationPairState,
TransferDescriptionWithSecretState,
lockedtransfersigned_from_message,
)
from raiden.transfer.mediated_transfer.state_change import ActionInitMediator
from raiden.transfer.merkle_tree import compute_layers, merkleroot
from raiden.transfer.state import (
NODE_NETWORK_REACHABLE,
BalanceProofSignedState,
BalanceProofUnsignedState,
MerkleTreeState,
NettingChannelEndState,
NettingChannelState,
RouteState,
TokenNetworkState,
TransactionExecutionStatus,
message_identifier_from_prng,
)
from raiden.transfer.state_change import ContractReceiveChannelNew, ContractReceiveRouteNew
from raiden.transfer.utils import hash_balance_data
from raiden.utils import privatekey_to_address, random_secret, sha3
from raiden.utils.signer import LocalSigner, Signer
from raiden.utils.typing import (
AdditionalHash,
Address,
AddressHex,
Any,
Balance,
BlockExpiration,
BlockHash,
BlockNumber,
BlockTimeout,
ChainID,
ChannelID,
ChannelMap,
ClassVar,
Dict,
FeeAmount,
InitiatorAddress,
Keccak256,
List,
Locksroot,
MerkleTreeLeaves,
MessageID,
NamedTuple,
NodeNetworkStateMap,
Nonce,
Optional,
PaymentID,
PaymentNetworkID,
Secret,
SecretHash,
Signature,
TargetAddress,
TokenAddress,
TokenAmount,
TokenNetworkAddress,
TokenNetworkID,
TransactionHash,
Tuple,
Type,
TypeVar,
)
EMPTY = "empty"
GENERATE = "generate"
K = TypeVar("K")
V = TypeVar("V")
def _partial_dict(full_dict: Dict[K, V], *args) -> Dict[K, V]:
return {key: full_dict[key] for key in args}
class Properties:
""" Base class for all properties classes. """
DEFAULTS: ClassVar["Properties"] = None
TARGET_TYPE: ClassVar[Type] = None
@property
def kwargs(self):
return {key: value for key, value in self.__dict__.items() if value is not EMPTY}
def extract(self, subset_type: Type) -> "Properties":
field_names = [field.name for field in fields(subset_type)]
return subset_type(**_partial_dict(self.__dict__, *field_names))
def partial_dict(self, *args) -> Dict[str, Any]:
return _partial_dict(self.__dict__, *args)
def if_empty(value, default):
return value if value is not EMPTY else default
def _replace_properties(properties, defaults):
replacements = {
k: create_properties(v, defaults.__dict__[k]) if isinstance(v, Properties) else v
for k, v in properties.kwargs.items()
}
return replace(defaults, **replacements)
def create_properties(properties: Properties, defaults: Properties = None) -> Properties:
full_defaults = type(properties).DEFAULTS
if defaults is not None:
full_defaults = _replace_properties(defaults, full_defaults)
return _replace_properties(properties, full_defaults)
def make_uint256() -> int:
return random.randint(0, UINT256_MAX)
def make_channel_identifier() -> ChannelID:
return ChannelID(make_uint256())
def make_uint64() -> int:
return random.randint(0, UINT64_MAX)
def make_balance() -> Balance:
return Balance(random.randint(0, UINT256_MAX))
def make_block_number() -> BlockNumber:
return BlockNumber(random.randint(0, UINT256_MAX))
def make_chain_id() -> ChainID:
return ChainID(random.randint(0, UINT64_MAX))
def make_message_identifier() -> MessageID:
return MessageID(random.randint(0, UINT64_MAX))
def make_20bytes() -> bytes:
return bytes("".join(random.choice(string.printable) for _ in range(20)), encoding="utf-8")
def make_address() -> Address:
return Address(make_20bytes())
def make_checksum_address() -> AddressHex:
return to_checksum_address(make_address())
def make_additional_hash() -> AdditionalHash:
return AdditionalHash(make_32bytes())
def make_32bytes() -> bytes:
return bytes("".join(random.choice(string.printable) for _ in range(32)), encoding="utf-8")
def make_transaction_hash() -> TransactionHash:
return TransactionHash(make_32bytes())
def make_locksroot() -> Locksroot:
return Locksroot(make_32bytes())
def make_block_hash() -> BlockHash:
return BlockHash(make_32bytes())
def make_privatekey_bin() -> bin:
return make_32bytes()
def make_payment_network_identifier() -> PaymentNetworkID:
return PaymentNetworkID(make_address())
def make_keccak_hash() -> Keccak256:
return Keccak256(make_32bytes())
def make_secret(i: int = EMPTY) -> Secret:
if i is not EMPTY:
return format(i, ">032").encode()
else:
return make_32bytes()
def make_privkey_address(privatekey: bytes = EMPTY,) -> Tuple[bytes, Address]:
privatekey = if_empty(privatekey, make_privatekey_bin())
address = privatekey_to_address(privatekey)
return privatekey, address
def make_signer() -> Signer:
privatekey = make_privatekey_bin()
return LocalSigner(privatekey)
def make_route_from_channel(channel_state: NettingChannelState = EMPTY) -> RouteState:
channel_state = if_empty(channel_state, create(NettingChannelStateProperties()))
return RouteState(channel_state.partner_state.address, channel_state.identifier)
def make_route_to_channel(channel_state: NettingChannelState = EMPTY) -> RouteState:
channel_state = if_empty(channel_state, create(NettingChannelStateProperties()))
return RouteState(channel_state.our_state.address, channel_state.identifier)
# CONSTANTS
# In this module constants are in the bottom because we need some of the
# factories.
# Prefixing with UNIT_ to differ from the default globals.
UNIT_SETTLE_TIMEOUT = 50
UNIT_REVEAL_TIMEOUT = 5
UNIT_TRANSFER_AMOUNT = 10
UNIT_TRANSFER_FEE = 5
UNIT_SECRET = b"secretsecretsecretsecretsecretse"
UNIT_SECRETHASH = sha3(UNIT_SECRET)
UNIT_REGISTRY_IDENTIFIER = b"registryregistryregi"
UNIT_TOKEN_ADDRESS = b"tokentokentokentoken"
UNIT_TOKEN_NETWORK_ADDRESS = b"networknetworknetwor"
UNIT_CHANNEL_ID = 1338
UNIT_CHAIN_ID = 337
UNIT_CANONICAL_ID = CanonicalIdentifier(
chain_identifier=UNIT_CHAIN_ID,
token_network_address=UNIT_TOKEN_NETWORK_ADDRESS,
channel_identifier=UNIT_CHANNEL_ID,
)
UNIT_PAYMENT_NETWORK_IDENTIFIER = b"paymentnetworkidentifier"
UNIT_TRANSFER_IDENTIFIER = 37
UNIT_TRANSFER_INITIATOR = b"initiatorinitiatorin"
UNIT_TRANSFER_TARGET = b"targettargettargetta"
UNIT_TRANSFER_PKEY_BIN = sha3(b"transfer pkey")
UNIT_TRANSFER_PKEY = UNIT_TRANSFER_PKEY_BIN
UNIT_TRANSFER_SENDER = privatekey_to_address(sha3(b"transfer pkey"))
HOP1_KEY = b"11111111111111111111111111111111"
HOP2_KEY = b"22222222222222222222222222222222"
HOP3_KEY = b"33333333333333333333333333333333"
HOP4_KEY = b"44444444444444444444444444444444"
HOP5_KEY = b"55555555555555555555555555555555"
HOP1 = privatekey_to_address(HOP1_KEY)
HOP2 = privatekey_to_address(HOP2_KEY)
ADDR = b"addraddraddraddraddr"
def make_merkletree_leaves(width: int) -> List[Keccak256]:
return [make_secret() for _ in range(width)]
def make_merkletree(leaves: List[SecretHash]) -> MerkleTreeState:
return MerkleTreeState(compute_layers(leaves))
@singledispatch
def create(properties: Any, defaults: Optional[Properties] = None) -> Any:
"""Create objects from their associated property class.
E. g. a NettingChannelState from NettingChannelStateProperties. For any field in
properties set to EMPTY a default will be used. The default values can be changed
by giving another object of the same property type as the defaults argument.
"""
if isinstance(properties, Properties):
return properties.TARGET_TYPE(**_properties_to_kwargs(properties, defaults))
return properties
def _properties_to_kwargs(properties: Properties, defaults: Properties) -> Dict:
properties = create_properties(properties, defaults or properties.DEFAULTS)
return {key: create(value) for key, value in properties.__dict__.items()}
@dataclass(frozen=True)
class CanonicalIdentifierProperties(Properties):
chain_identifier: ChainID = EMPTY
token_network_address: TokenNetworkAddress = EMPTY
channel_identifier: ChannelID = EMPTY
TARGET_TYPE = CanonicalIdentifier
CanonicalIdentifierProperties.DEFAULTS = CanonicalIdentifierProperties(
chain_identifier=UNIT_CHAIN_ID,
token_network_address=UNIT_TOKEN_NETWORK_ADDRESS,
channel_identifier=GENERATE,
)
@create.register(CanonicalIdentifierProperties)
def _(properties, defaults=None):
kwargs = _properties_to_kwargs(properties, defaults)
if kwargs["channel_identifier"] == GENERATE:
kwargs["channel_identifier"] = make_channel_identifier()
return CanonicalIdentifier(**kwargs)
def make_canonical_identifier(
chain_identifier=EMPTY, token_network_address=EMPTY, channel_identifier=EMPTY
) -> CanonicalIdentifier:
""" Alias of the CanonicalIdentifier create function """
return create(
CanonicalIdentifierProperties(
chain_identifier=chain_identifier,
token_network_address=token_network_address,
channel_identifier=channel_identifier or make_channel_identifier(),
)
)
@dataclass(frozen=True)
class TransactionExecutionStatusProperties(Properties):
started_block_number: BlockNumber = EMPTY
finished_block_number: BlockNumber = EMPTY
result: str = EMPTY
TARGET_TYPE = TransactionExecutionStatus
TransactionExecutionStatusProperties.DEFAULTS = TransactionExecutionStatusProperties(
started_block_number=None,
finished_block_number=None,
result=TransactionExecutionStatus.SUCCESS,
)
@dataclass(frozen=True)
class NettingChannelEndStateProperties(Properties):
address: Address = EMPTY
privatekey: bytes = EMPTY
balance: TokenAmount = EMPTY
merkletree_leaves: MerkleTreeLeaves = EMPTY
merkletree_width: int = EMPTY
TARGET_TYPE = NettingChannelEndState
NettingChannelEndStateProperties.DEFAULTS = NettingChannelEndStateProperties(
address=None, privatekey=None, balance=100, merkletree_leaves=None, merkletree_width=0
)
@create.register(NettingChannelEndStateProperties) # noqa: F811
def _(properties, defaults=None) -> NettingChannelEndState:
args = _properties_to_kwargs(properties, defaults or NettingChannelEndStateProperties.DEFAULTS)
state = NettingChannelEndState(args["address"] or make_address(), args["balance"])
merkletree_leaves = (
args["merkletree_leaves"] or make_merkletree_leaves(args["merkletree_width"]) or None
)
if merkletree_leaves:
state.merkletree = MerkleTreeState(compute_layers(merkletree_leaves))
return state
@dataclass(frozen=True)
class NettingChannelStateProperties(Properties):
canonical_identifier: CanonicalIdentifier = EMPTY
token_address: TokenAddress = EMPTY
payment_network_identifier: PaymentNetworkID = EMPTY
reveal_timeout: BlockTimeout = EMPTY
settle_timeout: BlockTimeout = EMPTY
mediation_fee: FeeAmount = EMPTY
our_state: NettingChannelEndStateProperties = EMPTY
partner_state: NettingChannelEndStateProperties = EMPTY
open_transaction: TransactionExecutionStatusProperties = EMPTY
close_transaction: TransactionExecutionStatusProperties = EMPTY
settle_transaction: TransactionExecutionStatusProperties = EMPTY
TARGET_TYPE = NettingChannelState
NettingChannelStateProperties.DEFAULTS = NettingChannelStateProperties(
canonical_identifier=CanonicalIdentifierProperties.DEFAULTS,
token_address=UNIT_TOKEN_ADDRESS,
payment_network_identifier=UNIT_PAYMENT_NETWORK_IDENTIFIER,
reveal_timeout=UNIT_REVEAL_TIMEOUT,
settle_timeout=UNIT_SETTLE_TIMEOUT,
mediation_fee=0,
our_state=NettingChannelEndStateProperties.DEFAULTS,
partner_state=NettingChannelEndStateProperties.DEFAULTS,
open_transaction=TransactionExecutionStatusProperties.DEFAULTS,
close_transaction=None,
settle_transaction=None,
)
@dataclass(frozen=True)
class TransferDescriptionProperties(Properties):
payment_network_identifier: PaymentNetworkID = EMPTY
payment_identifier: PaymentID = EMPTY
amount: TokenAmount = EMPTY
token_network_identifier: TokenNetworkID = EMPTY
initiator: InitiatorAddress = EMPTY
target: TargetAddress = EMPTY
secret: Secret = EMPTY
allocated_fee: FeeAmount = EMPTY
TARGET_TYPE = TransferDescriptionWithSecretState
TransferDescriptionProperties.DEFAULTS = TransferDescriptionProperties(
payment_network_identifier=UNIT_PAYMENT_NETWORK_IDENTIFIER,
payment_identifier=UNIT_TRANSFER_IDENTIFIER,
amount=UNIT_TRANSFER_AMOUNT,
token_network_identifier=UNIT_TOKEN_NETWORK_ADDRESS,
initiator=UNIT_TRANSFER_INITIATOR,
target=UNIT_TRANSFER_TARGET,
secret=GENERATE,
allocated_fee=0,
)
@create.register(TransferDescriptionProperties)
def _(properties, defaults=None) -> TransferDescriptionWithSecretState:
properties: TransferDescriptionProperties = create_properties(properties, defaults)
params = {key: value for key, value in properties.__dict__.items()}
if params["secret"] == GENERATE:
params["secret"] = random_secret()
return TransferDescriptionWithSecretState(**params)
UNIT_TRANSFER_DESCRIPTION = create(TransferDescriptionProperties(secret=UNIT_SECRET))
@dataclass(frozen=True)
class BalanceProofProperties(Properties):
nonce: Nonce = EMPTY
transferred_amount: TokenAmount = EMPTY
locked_amount: TokenAmount = EMPTY
locksroot: Locksroot = EMPTY
canonical_identifier: CanonicalIdentifier = EMPTY
TARGET_TYPE = BalanceProofUnsignedState
BalanceProofProperties.DEFAULTS = BalanceProofProperties(
nonce=1,
transferred_amount=UNIT_TRANSFER_AMOUNT,
locked_amount=0,
locksroot=EMPTY_MERKLE_ROOT,
canonical_identifier=UNIT_CANONICAL_ID,
)
@dataclass(frozen=True)
class BalanceProofSignedStateProperties(BalanceProofProperties):
message_hash: AdditionalHash = EMPTY
signature: Signature = GENERATE
sender: Address = EMPTY
pkey: bytes = EMPTY
TARGET_TYPE = BalanceProofSignedState
BalanceProofSignedStateProperties.DEFAULTS = BalanceProofSignedStateProperties(
**BalanceProofProperties.DEFAULTS.__dict__,
message_hash=UNIT_SECRETHASH,
sender=UNIT_TRANSFER_SENDER,
pkey=UNIT_TRANSFER_PKEY,
)
def make_signed_balance_proof_from_unsigned(
unsigned: BalanceProofUnsignedState, signer: Signer
) -> BalanceProofSignedState:
balance_hash = hash_balance_data(
transferred_amount=unsigned.transferred_amount,
locked_amount=unsigned.locked_amount,
locksroot=unsigned.locksroot,
)
additional_hash = make_additional_hash()
data_to_sign = balance_proof.pack_balance_proof(
balance_hash=balance_hash,
additional_hash=additional_hash,
canonical_identifier=unsigned.canonical_identifier,
nonce=unsigned.nonce,
)
signature = signer.sign(data=data_to_sign)
sender = signer.address
return BalanceProofSignedState(
nonce=unsigned.nonce,
transferred_amount=unsigned.transferred_amount,
locked_amount=unsigned.locked_amount,
locksroot=unsigned.locksroot,
message_hash=additional_hash,
signature=signature,
sender=sender,
canonical_identifier=unsigned.canonical_identifier,
)
@create.register(BalanceProofSignedStateProperties) # noqa: F811
def _(properties: BalanceProofSignedStateProperties, defaults=None) -> BalanceProofSignedState:
defaults = defaults or BalanceProofSignedStateProperties.DEFAULTS
params = create_properties(properties, defaults).__dict__
signer = LocalSigner(params.pop("pkey"))
if params["signature"] is GENERATE:
keys = ("transferred_amount", "locked_amount", "locksroot")
balance_hash = hash_balance_data(**_partial_dict(params, *keys))
data_to_sign = balance_proof.pack_balance_proof(
balance_hash=balance_hash,
additional_hash=params["message_hash"],
canonical_identifier=params["canonical_identifier"],
nonce=params.get("nonce"),
)
params["signature"] = signer.sign(data=data_to_sign)
return BalanceProofSignedState(**params)
@dataclass(frozen=True)
class LockedTransferUnsignedStateProperties(BalanceProofProperties):
amount: TokenAmount = EMPTY
expiration: BlockExpiration = EMPTY
initiator: InitiatorAddress = EMPTY
target: TargetAddress = EMPTY
payment_identifier: PaymentID = EMPTY
token: TokenAddress = EMPTY
secret: Secret = EMPTY
TARGET_TYPE = LockedTransferUnsignedState
LockedTransferUnsignedStateProperties.DEFAULTS = LockedTransferUnsignedStateProperties(
**create_properties(
BalanceProofProperties(locked_amount=UNIT_TRANSFER_AMOUNT, transferred_amount=0)
).__dict__,
amount=UNIT_TRANSFER_AMOUNT,
expiration=UNIT_REVEAL_TIMEOUT,
initiator=UNIT_TRANSFER_INITIATOR,
target=UNIT_TRANSFER_TARGET,
payment_identifier=1,
token=UNIT_TOKEN_ADDRESS,
secret=UNIT_SECRET,
)
@create.register(LockedTransferUnsignedStateProperties) # noqa: F811
def _(properties, defaults=None) -> LockedTransferUnsignedState:
transfer: LockedTransferUnsignedStateProperties = create_properties(properties, defaults)
lock = HashTimeLockState(
amount=transfer.amount, expiration=transfer.expiration, secrethash=sha3(transfer.secret)
)
if transfer.locksroot == EMPTY_MERKLE_ROOT:
transfer = replace(transfer, locksroot=lock.lockhash)
return LockedTransferUnsignedState(
balance_proof=create(transfer.extract(BalanceProofProperties)),
lock=lock,
**transfer.partial_dict("initiator", "target", "payment_identifier", "token"),
)
@dataclass(frozen=True)
class LockedTransferSignedStateProperties(LockedTransferUnsignedStateProperties):
sender: Address = EMPTY
recipient: Address = EMPTY
pkey: bytes = EMPTY
message_identifier: MessageID = EMPTY
TARGET_TYPE = LockedTransferSignedState
LockedTransferSignedStateProperties.DEFAULTS = LockedTransferSignedStateProperties(
**LockedTransferUnsignedStateProperties.DEFAULTS.__dict__,
sender=UNIT_TRANSFER_SENDER,
recipient=UNIT_TRANSFER_TARGET,
pkey=UNIT_TRANSFER_PKEY,
message_identifier=1,
)
@create.register(LockedTransferSignedStateProperties) # noqa: F811
def _(properties, defaults=None) -> LockedTransferSignedState:
transfer: LockedTransferSignedStateProperties = create_properties(properties, defaults)
params = {key: value for key, value in transfer.__dict__.items()}
lock = Lock(
amount=transfer.amount, expiration=transfer.expiration, secrethash=sha3(transfer.secret)
)
pkey = params.pop("pkey")
signer = LocalSigner(pkey)
sender = params.pop("sender")
canonical_identifier = params.pop("canonical_identifier")
params["chain_id"] = int(canonical_identifier.chain_identifier)
params["channel_identifier"] = int(canonical_identifier.channel_identifier)
params["token_network_address"] = canonical_identifier.token_network_address
if params["locksroot"] == EMPTY_MERKLE_ROOT:
params["locksroot"] = lock.lockhash
locked_transfer = LockedTransfer(lock=lock, **params)
locked_transfer.sign(signer)
assert locked_transfer.sender == sender
return lockedtransfersigned_from_message(locked_transfer)
@dataclass(frozen=True)
class LockedTransferProperties(LockedTransferSignedStateProperties):
fee: FeeAmount = EMPTY
TARGET_TYPE = LockedTransfer
LockedTransferProperties.DEFAULTS = LockedTransferProperties(
**replace(LockedTransferSignedStateProperties.DEFAULTS, locksroot=GENERATE).__dict__, fee=0
)
def prepare_locked_transfer(properties, defaults):
properties: LockedTransferProperties = create_properties(properties, defaults)
params = {key: value for key, value in properties.__dict__.items()}
canonical_identifier = params.pop("canonical_identifier")
params["chain_id"] = canonical_identifier.chain_identifier
params["token_network_address"] = canonical_identifier.token_network_address
params["channel_identifier"] = canonical_identifier.channel_identifier
secrethash = sha3(params.pop("secret"))
params["lock"] = Lock(
amount=properties.amount, expiration=properties.expiration, secrethash=secrethash
)
if params["locksroot"] == GENERATE:
params["locksroot"] = sha3(params["lock"].as_bytes)
return params, LocalSigner(params.pop("pkey"))
@create.register(LockedTransferProperties)
def _(properties, defaults=None) -> LockedTransfer:
params, signer = prepare_locked_transfer(properties, defaults)
transfer = LockedTransfer(**params)
transfer.sign(signer)
assert params["sender"] == transfer.sender
return transfer
@dataclass(frozen=True)
class RefundTransferProperties(LockedTransferProperties):
TARGET_TYPE = RefundTransfer
RefundTransferProperties.DEFAULTS = RefundTransferProperties(
**LockedTransferProperties.DEFAULTS.__dict__
)
@create.register(RefundTransferProperties)
def _(properties, defaults=None) -> RefundTransfer:
params, signer = prepare_locked_transfer(properties, defaults)
transfer = RefundTransfer(**params)
transfer.sign(signer)
assert params["sender"] == transfer.sender
return transfer
SIGNED_TRANSFER_FOR_CHANNEL_DEFAULTS = create_properties(
LockedTransferSignedStateProperties(expiration=UNIT_SETTLE_TIMEOUT - UNIT_REVEAL_TIMEOUT)
)
def make_signed_transfer_for(
channel_state: NettingChannelState = EMPTY,
properties: LockedTransferSignedStateProperties = None,
defaults: LockedTransferSignedStateProperties = None,
compute_locksroot: bool = False,
allow_invalid: bool = False,
only_transfer: bool = True,
) -> LockedTransferSignedState:
properties: LockedTransferSignedStateProperties = create_properties(
properties or LockedTransferSignedStateProperties(),
defaults or SIGNED_TRANSFER_FOR_CHANNEL_DEFAULTS,
)
channel_state = if_empty(channel_state, create(NettingChannelStateProperties()))
if not allow_invalid:
ok = channel_state.reveal_timeout < properties.expiration < channel_state.settle_timeout
assert ok, "Expiration must be between reveal_timeout and settle_timeout."
assert privatekey_to_address(properties.pkey) == properties.sender
if properties.sender == channel_state.our_state.address:
recipient = channel_state.partner_state.address
elif properties.sender == channel_state.partner_state.address:
recipient = channel_state.our_state.address
else:
assert False, "Given sender does not participate in given channel."
if compute_locksroot:
lock = Lock(
amount=properties.amount,
expiration=properties.expiration,
secrethash=sha3(properties.secret),
)
locksroot = merkleroot(
channel.compute_merkletree_with(
merkletree=channel_state.partner_state.merkletree, lockhash=sha3(lock.as_bytes)
)
)
else:
locksroot = properties.locksroot
if only_transfer:
transfer_properties = LockedTransferUnsignedStateProperties(
locksroot=locksroot,
canonical_identifier=channel_state.canonical_identifier,
locked_amount=properties.amount,
)
else:
transfer_properties = LockedTransferUnsignedStateProperties(
locksroot=locksroot, canonical_identifier=channel_state.canonical_identifier
)
transfer = create(
LockedTransferSignedStateProperties(recipient=recipient, **transfer_properties.__dict__),
defaults=properties,
)
if not allow_invalid:
is_valid, msg, _ = channel.is_valid_lockedtransfer(
transfer_state=transfer,
channel_state=channel_state,
sender_state=channel_state.partner_state,
receiver_state=channel_state.our_state,
)
assert is_valid, msg
return transfer
def pkeys_from_channel_state(
properties: NettingChannelStateProperties,
defaults: NettingChannelStateProperties = NettingChannelStateProperties.DEFAULTS,
) -> Tuple[Optional[bytes], Optional[bytes]]:
our_key = None
if properties.our_state is not EMPTY:
our_key = properties.our_state.privatekey
elif defaults is not None:
our_key = defaults.our_state.privatekey
partner_key = None
if properties.partner_state is not EMPTY:
partner_key = properties.partner_state.privatekey
elif defaults is not None:
partner_key = defaults.partner_state.privatekey
return our_key, partner_key
class ChannelSet:
"""Manage a list of channels. The channels can be accessed by subscript."""
HOP3_KEY, HOP3 = make_privkey_address()
HOP4_KEY, HOP4 = make_privkey_address()
HOP5_KEY, HOP5 = make_privkey_address()
PKEYS = (HOP1_KEY, HOP2_KEY, HOP3_KEY, HOP4_KEY, HOP5_KEY)
ADDRESSES = (HOP1, HOP2, HOP3, HOP4, HOP5)
def __init__(
self,
channels: List[NettingChannelState],
our_privatekeys: List[bytes],
partner_privatekeys: List[bytes],
):
self.channels = channels
self.our_privatekeys = our_privatekeys
self.partner_privatekeys = partner_privatekeys
@property
def channel_map(self) -> ChannelMap:
return {channel.identifier: channel for channel in self.channels}
@property
def nodeaddresses_to_networkstates(self) -> NodeNetworkStateMap:
return {channel.partner_state.address: NODE_NETWORK_REACHABLE for channel in self.channels}
def our_address(self, index: int) -> Address:
return self.channels[index].our_state.address
def partner_address(self, index: int) -> Address:
return self.channels[index].partner_state.address
def get_route(self, channel_index: int) -> RouteState:
return make_route_from_channel(self.channels[channel_index])
def get_routes(self, *args) -> List[RouteState]:
return [self.get_route(index) for index in (args or range(len(self.channels)))]
def __getitem__(self, item: int) -> NettingChannelState:
return self.channels[item]
def make_channel_set(
properties: List[NettingChannelStateProperties] = None,
defaults: NettingChannelStateProperties = NettingChannelStateProperties.DEFAULTS,
number_of_channels: int = None,
) -> ChannelSet:
if number_of_channels is None:
number_of_channels = len(properties)
channels = list()
our_pkeys = [None] * number_of_channels
partner_pkeys = [None] * number_of_channels
if properties is None:
properties = list()
while len(properties) < number_of_channels:
properties.append(NettingChannelStateProperties())
for i in range(number_of_channels):
our_pkeys[i], partner_pkeys[i] = pkeys_from_channel_state(properties[i], defaults)
channels.append(create(properties[i], defaults))
return ChannelSet(channels, our_pkeys, partner_pkeys)
def make_channel_set_from_amounts(amounts: List[TokenAmount]) -> ChannelSet:
properties = [
NettingChannelStateProperties(our_state=NettingChannelEndStateProperties(balance=amount))
for amount in amounts
]
return make_channel_set(properties)
def mediator_make_channel_pair(
defaults: NettingChannelStateProperties = None, amount: TokenAmount = UNIT_TRANSFER_AMOUNT
) -> ChannelSet:
properties_list = [
NettingChannelStateProperties(
canonical_identifier=make_canonical_identifier(channel_identifier=1),
partner_state=NettingChannelEndStateProperties(
address=UNIT_TRANSFER_SENDER, balance=amount
),
),
NettingChannelStateProperties(
canonical_identifier=make_canonical_identifier(channel_identifier=2),
our_state=NettingChannelEndStateProperties(balance=amount),
partner_state=NettingChannelEndStateProperties(address=UNIT_TRANSFER_TARGET),
),
]
return make_channel_set(properties_list, defaults)
def mediator_make_init_action(
channels: ChannelSet, transfer: LockedTransferSignedState
) -> ActionInitMediator:
return ActionInitMediator(channels.get_routes(1), channels.get_route(0), transfer)
class MediatorTransfersPair(NamedTuple):
channels: ChannelSet
transfers_pair: List[MediationPairState]
amount: int
block_number: BlockNumber
block_hash: BlockHash
@property
def channel_map(self) -> ChannelMap:
return self.channels.channel_map
def make_transfers_pair(
number_of_channels: int, amount: int = UNIT_TRANSFER_AMOUNT, block_number: int = 5
) -> MediatorTransfersPair:
deposit = 5 * amount
defaults = create_properties(
NettingChannelStateProperties(
our_state=NettingChannelEndStateProperties(balance=deposit),
partner_state=NettingChannelEndStateProperties(balance=deposit),
open_transaction=TransactionExecutionStatusProperties(finished_block_number=10),
)
)
properties_list = [
NettingChannelStateProperties(
canonical_identifier=make_canonical_identifier(channel_identifier=i),
our_state=NettingChannelEndStateProperties(
address=ChannelSet.ADDRESSES[0], privatekey=ChannelSet.PKEYS[0]
),
partner_state=NettingChannelEndStateProperties(
address=ChannelSet.ADDRESSES[i + 1], privatekey=ChannelSet.PKEYS[i + 1]
),
)
for i in range(number_of_channels)
]
channels = make_channel_set(properties_list, defaults)
lock_expiration = block_number + UNIT_REVEAL_TIMEOUT * 2
pseudo_random_generator = random.Random()
transfers_pairs = list()
for payer_index in range(number_of_channels - 1):
payee_index = payer_index + 1
receiver_channel = channels[payer_index]
received_transfer = create(
LockedTransferSignedStateProperties(
amount=amount,
expiration=lock_expiration,
payment_identifier=UNIT_TRANSFER_IDENTIFIER,
canonical_identifier=receiver_channel.canonical_identifier,
sender=channels.partner_address(payer_index),
pkey=channels.partner_privatekeys[payer_index],
)
)
is_valid, _, msg = channel.handle_receive_lockedtransfer(
receiver_channel, received_transfer
)
assert is_valid, msg
message_identifier = message_identifier_from_prng(pseudo_random_generator)
lockedtransfer_event = channel.send_lockedtransfer(
channel_state=channels[payee_index],
initiator=UNIT_TRANSFER_INITIATOR,
target=UNIT_TRANSFER_TARGET,
amount=amount,
message_identifier=message_identifier,
payment_identifier=UNIT_TRANSFER_IDENTIFIER,
expiration=lock_expiration,
secrethash=UNIT_SECRETHASH,
)
assert lockedtransfer_event
lock_timeout = lock_expiration - block_number
assert mediator.is_channel_usable(
candidate_channel_state=channels[payee_index],
transfer_amount=amount,
lock_timeout=lock_timeout,
)
sent_transfer = lockedtransfer_event.transfer
pair = MediationPairState(received_transfer, lockedtransfer_event.recipient, sent_transfer)
transfers_pairs.append(pair)
return MediatorTransfersPair(
channels=channels,
transfers_pair=transfers_pairs,
amount=amount,
block_number=block_number,
block_hash=make_block_hash(),
)
def make_node_availability_map(nodes):
return {node: NODE_NETWORK_REACHABLE for node in nodes}
@dataclass(frozen=True)
class RouteProperties(Properties):
address1: Address
address2: Address
capacity1to2: TokenAmount
capacity2to1: TokenAmount = 0
def route_properties_to_channel(route: RouteProperties) -> NettingChannelState:
channel = create(
NettingChannelStateProperties(
canonical_identifier=make_canonical_identifier(),
our_state=NettingChannelEndStateProperties(
address=route.address1, balance=route.capacity1to2
),
partner_state=NettingChannelEndStateProperties(
address=route.address2, balance=route.capacity2to1
),
)
)
return channel # type: ignore
def create_network(
token_network_state: TokenNetworkState,
our_address: Address,
routes: List[RouteProperties],
block_number: BlockNumber,
block_hash: BlockHash = None,
) -> Tuple[Any, List[NettingChannelState]]:
"""Creates a network from route properties.
If the address in the route is our_address, create a channel also.
Returns a list of created channels and the new state.
"""
block_hash = block_hash or make_block_hash()
state = token_network_state
channels = list()
for count, route in enumerate(routes, 1):
if route.address1 == our_address:
channel = route_properties_to_channel(route)
state_change = ContractReceiveChannelNew(
transaction_hash=make_transaction_hash(),
channel_state=channel,
block_number=block_number,
block_hash=block_hash,
)
channels.append(channel)
else:
state_change = ContractReceiveRouteNew(
transaction_hash=make_transaction_hash(),
canonical_identifier=make_canonical_identifier(),
participant1=route.address1,
participant2=route.address2,
block_number=block_number,
block_hash=block_hash,
)
iteration = token_network.state_transition(
token_network_state=state,
state_change=state_change,
block_number=block_number,
block_hash=block_hash,
)
state = iteration.new_state
assert len(state.network_graph.channel_identifier_to_participants) == count
assert len(state.network_graph.network.edges()) == count
return state, channels
| hackaugusto/raiden | raiden/tests/utils/factories.py | Python | mit | 34,939 |
# -*- coding: utf-8 -*-
__author__ = 'Eric Larson'
__email__ = 'eric@ionrock.org'
__version__ = '0.1.6'
import cgitb
import smtplib
import traceback
from cStringIO import StringIO
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from contextlib import contextmanager
class ErrorEmail(object):
def __init__(self, config, **kw):
self.config = config
if isinstance(self.config['TO'], basestring):
self.config['TO'] = [self.config['TO']]
self.extra_info = kw
def __enter__(self):
return self
@contextmanager
def mail_server(self):
server = smtplib.SMTP(self.config['SERVER'],
self.config.get('PORT', 25))
yield server
server.quit()
def send_email(self, message):
to = self.config['TO']
frm = self.config['FROM']
with self.mail_server() as server:
server.sendmail(frm, to, message)
def get_plain_traceback(self, exc_info):
fh = StringIO()
traceback.print_tb(exc_info[2], fh)
return MIMEText(fh.getvalue(), 'plain')
def get_html_traceback(self, exc_info):
return MIMEText(cgitb.html(exc_info), 'html')
def get_subject(self, exc_info):
tmpl = self.config.get('SUBJECT', 'ErrorEmail: {message}')
message = traceback.format_exception(*exc_info).pop().strip()
return tmpl.format(message=message, **self.extra_info)
def create_message_from_traceback(self, exc_info):
msg = MIMEMultipart('alternative')
msg['To'] = ', '.join(self.config['TO'])
msg['From'] = self.config['FROM']
# TODO: Make this configurable
msg['Subject'] = self.get_subject(exc_info)
msg.attach(self.get_plain_traceback(exc_info))
msg.attach(self.get_html_traceback(exc_info))
return msg.as_string()
def __exit__(self, *args):
if args:
msg = self.create_message_from_traceback(args)
self.send_email(msg)
return False
return True
| ionrock/erroremail | erroremail/__init__.py | Python | bsd-3-clause | 2,084 |
# This file was generated by 'versioneer.py' (0.7+) from
# revision-control system data, or from the parent directory name of an
# unpacked source archive. Distribution tarballs contain a pre-generated copy
# of this file.
version_version = '0.2.13'
version_full = '01a5d50179af4adf28195ce6a926c735eede6b06'
def get_versions(default={}, verbose=False):
return {'version': version_version, 'full': version_full}
| proxysh/Safejumper-for-Desktop | buildmac/Resources/env/lib/python2.7/site-packages/obfsproxy/_version.py | Python | gpl-2.0 | 418 |
"""
Functions file for login app
consists of common functions used by both api.py and views.py file
"""
from django.contrib.auth.models import User
from django.contrib.auth import authenticate
from django.contrib.auth import login as django_login
from django.core.mail import send_mail, EmailMultiAlternatives
from django.shortcuts import render
from django.template import Context
from django.utils import timezone
from django.views.generic import View
from rest_framework import status
from rest_framework.authtoken.models import Token
from rest_framework.decorators import api_view
from rest_framework.permissions import AllowAny
from rest_framework.response import Response
from rest_framework.views import APIView
from login.models import EmailVerificationCode, PasswordResetCode, UserProfile
import login.serializers as Serializers
from UCP.constants import result, message
from UCP.functions import send_parallel_mail
from UCP.settings import EMAIL_HOST_USER, BASE_URL, SITE_TITLE
def get_user_details(request):
"""
returns a dict with the details of the logged in user
"""
user = UserProfile.objects.get(user=request.user)
serializer = Serializers.UserProfileFullSerializer(user)
return serializer.data
def get_user_profile(pk):
"""
returns a dict with the details of a user with a given id
"""
user = UserProfile.objects.get(id=pk)
serializer = Serializers.UserProfileFullSerializer(user)
return serializer.data
def update_profile(request):
"""
updates user profile details
"""
user = UserProfile.objects.get(user=request.user)
print request.POST
if "first_name" in request.POST:
user.user.first_name = request.POST["first_name"].capitalize()
if "last_name" in request.POST:
user.user.last_name = request.POST["last_name"].capitalize()
if "profile_picture" in request.FILES:
user.profile_image = request.FILES["profile_picture"]
if "age" in request.POST:
user.age = request.POST["age"]
if "gender" in request.POST:
user.gender = request.POST["gender"]
if "theme" in request.POST:
user.theme = request.POST["theme"]
user.user.save()
user.save()
response = {}
response["message"] = "Your details were updated"
return response
def get_response_text(response):
"""
Combines message and errors returned by a function to create an HTML to be displayed in a modal
"""
messageHTML = ""
if "message" in response:
messageHTML += "<h4>" + response["message"] + "</h4>"
if "error" in response:
for key in response["error"]:
for error in response["error"][key]:
messageHTML += "<h4>" + error + "</h4>"
return messageHTML
def send_verification_email(user):
"""
Creates a EmailVerificationCode Object and send a verification mail to the user
"""
emailVerificationCode = EmailVerificationCode.objects.create(user=user)
verification_link = "http://"+BASE_URL + "/user/verify_email/?email=" + user.email+"&code="+ emailVerificationCode.verification_code+"&format=json"
emailMessage ='<div style="background-color:#4285f4;padding:20px;border-radius:10px;"><h2 style="text-align:center">Welcome to '+ SITE_TITLE +' Campus Portal</h2>'
emailMessage += '<p style="color:white">Hey '+user.first_name+' !</p>'
emailMessage += '<p>Thank you for signing up on our portal</p>'
emailMessage += '<p>Please click <a href="'+ verification_link +'">here</a> to verify your email address</p>'
emailMessage += '<p>If the above link does not work, copy paste the following in your browser address bar </p>'
emailMessage += '<p>'+verification_link+'</p>'
emailMessage += '</div>'
emailSubject = "Verification Email"
to = [user.email]
senderEmail = EMAIL_HOST_USER
msg = EmailMultiAlternatives(emailSubject, emailMessage, senderEmail, to)
msg.attach_alternative(emailMessage, "text/html")
msg.send( )
send_parallel_mail(emailSubject, emailMessage, to)
def send_password_reset_email(user):
"""
Creates a PasswordResetCode Object and mails it the code to the user
"""
passwordResetCode = PasswordResetCode.objects.create(user=user)
emailSubject = "Reset your password"
emailMessage = "Use the code " + passwordResetCode.reset_code + " to reset your password"
to = [user.email]
senderEmail = EMAIL_HOST_USER
print emailMessage
send_parallel_mail(emailSubject, emailMessage, to)
def login(request):
"""
Logs in the user
"""
serializer = Serializers.LoginRequestSerializer(data = request.POST)
response = {}
if serializer.is_valid():
username = request.POST['email']
password = request.POST['password']
user = authenticate(username=username, password=password)
if user:
if user.is_active:
#create a authentication key for the user
django_login(request, user)
data = {}
if Token.objects.filter(user=user).exists():
token = Token.objects.get(user=user)
else:
token = Token.objects.create(user=user)
data["access_token"] = token.key
response["result"] = result.RESULT_SUCCESS
response["data"] = data
response["message"] = message.MESSAGE_LOGIN_SUCCESSFUL
else:
response["result"] = result.RESULT_FAILURE
response["message"] = message.MESSAGE_ACCOUNT_INACTIVE
else:
response["result"] = result.RESULT_FAILURE
response["message"] = message.MESSAGE_INVALID_LOGIN_DETAILS
else:
response["result"] = result.RESULT_FAILURE
response["error"] = serializer.errors
return response
def register(request):
"""
Register a new user
"""
response = {}
serializer = Serializers.UserSerializer(data=request.POST)
if serializer.is_valid():
user = serializer.save()
userProfileSerializer = Serializers.UserProfileSerializer(data=request.POST)
if userProfileSerializer.is_valid():
userProfileSerializer.save(user = user)
response["result"] = result.RESULT_SUCCESS
response["message"]= message.MESSAGE_REGISTRATION_SUCCESSFUL
response["error"] = []
#send a verification email
send_verification_email(user)
else:
response["result"] = result.RESULT_FAILURE
response["message"] = message.MESSAGE_REGISTRATION_FAILED
response["error"] = userProfileSerializer.errors
else:
response["result"] = result.RESULT_FAILURE
response["message"] = message.MESSAGE_REGISTRATION_FAILED
response["error"] = serializer.errors
return response
def forgot_password(request):
response = {}
serializer = Serializers.PasswordForgotRequestSerializer(data = request.GET)
if serializer.is_valid():
email = request.GET['email']
if User.objects.filter(email = email).exists():
user = User.objects.get(email=email)
send_password_reset_email(user)
response["result"] = result.RESULT_SUCCESS
response["message"] = message.MESSAGE_PASSWORD_RESET_CODE_SENT
else:
response["result"] = result.RESULT_FAILURE
response["message"] = message.MESSAGE_EMAIL_NOT_REGISTERED
#invalid email provided
else:
response["result"] = result.RESULT_FAILURE
response["error"] = serializer.errors
return response
def reset_password(request):
response = {}
serializer = Serializers.PasswordResetRequestSerializer(data = request.POST)
if serializer.is_valid():
reset_code = request.POST['reset_code']
password = request.POST['password']
if PasswordResetCode.objects.filter(reset_code = reset_code).exists():
code = PasswordResetCode.objects.get(reset_code = reset_code)
user = code.user
user.set_password(password)
user.save()
#delete the password rest code so it cant be used again
code.delete()
response["result"] = result.RESULT_SUCCESS
response["message"] = "Your password has been reset"
else:
response["result"] = result.RESULT_FAILURE
response["message"] = "The password code is not valid"
else:
response["result"] = result.RESULT_FAILURE
response["error"] = serializer.errors
return response
def verify_email(request):
response = {}
serializer = Serializers.VerifyEmailRequestSerializer(data = request.GET)
if serializer.is_valid():
verification_code = request.GET['code']
if EmailVerificationCode.objects.filter(verification_code = verification_code).exists():
#verify the user
code = EmailVerificationCode.objects.get(verification_code = verification_code)
user = code.user
user.is_active = True
user.save()
#delete verification code so it cant be used again
code.delete()
response["result"] = result.RESULT_SUCCESS
response["message"] = message.MESSAGE_EMAIL_VERIFICATION_SUCCESSFUL
else:
response["result"] = result.RESULT_FAILURE
response["message"] = message.MESSAGE_VERIFICATION_CODE_EXPIRED
#invalid or expired verification code
else:
response["result"] = result.RESULT_FAILURE
response["error"] = serializer.errors
return response
| BuildmLearn/University-Campus-Portal-UCP | UCP/login/functions.py | Python | bsd-3-clause | 10,003 |
# Copyright (C) 2010-2014 CEA/DEN, EDF R&D
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
#
#This case corresponds to: /visu/animation/H1 case
#%Create animation for Vectors for 'vitesse' field of the the given MED file and dumps picture files in PNG format %
import sys
import os
from paravistest import *
from presentations import *
from pvsimple import *
import pvserver as paravis
#import file
myParavis = paravis.myParavis
# Directory for saving snapshots
picturedir = get_picture_dir("Animation/H1")
theFileName = datadir + "TimeStamps.med"
print " --------------------------------- "
print "file ", theFileName
print " --------------------------------- "
OpenDataFile(theFileName)
aProxy = GetActiveSource()
if aProxy is None:
raise RuntimeError, "Error: can't import file."
else: print "OK"
print "Creating a Viewer.........................",
aView = GetRenderView()
reset_view(aView)
Render(aView)
if aView is None : print "Error"
else : print "OK"
# Vectors creation
prs= VectorsOnField(aProxy,EntityType.NODE,'vitesse' , 1)
prs.Visibility=1
aView.ResetCamera()
print "Creating an Animation.....................",
my_format = "png"
print "Current format to save snapshots: ",my_format
# Add path separator to the end of picture path if necessery
if not picturedir.endswith(os.sep):
picturedir += os.sep
# Select only the current field:
aProxy.AllArrays = []
aProxy.UpdatePipeline()
aProxy.AllArrays = ['TS0/dom/ComSup0/vitesse@@][@@P1']
aProxy.UpdatePipeline()
# Animation creation and saving into set of files into picturedir
scene = AnimateReader(aProxy,aView,picturedir+"H1_dom."+my_format)
nb_frames = len(scene.TimeKeeper.TimestepValues)
pics = os.listdir(picturedir)
if len(pics) != nb_frames:
print "FAILED!!! Number of made pictures is equal to ", len(pics), " instead of ", nb_frames
for pic in pics:
os.remove(picturedir+pic)
# Prepare animation performance
scene.PlayMode = 1 # set RealTime mode for animation performance
# set period
scene.Duration = 30 # correspond to set the speed of animation in VISU
scene.GoToFirst()
print "Animation.................................",
scene.Play()
scene.GoToFirst()
| FedoraScientific/salome-paravis | test/VisuPrs/Animation/H1.py | Python | lgpl-2.1 | 2,941 |
"""
Task sequencer
"""
import sys
import logging
from teuthology import run_tasks
log = logging.getLogger(__name__)
def task(ctx, config):
"""
Sequentialize a group of tasks into one executable block
example:
- sequential:
- tasktest:
- tasktest:
You can also reference the job from elsewhere:
foo:
tasktest:
tasks:
- sequential:
- tasktest:
- foo
- tasktest:
That is, if the entry is not a dict, we will look it up in the top-level
config.
Sequential tasks and Parallel tasks can be nested.
:param ctx: Context
:param config: Configuration
"""
stack = []
try:
for entry in config:
if not isinstance(entry, dict):
entry = ctx.config.get(entry, {})
((taskname, confg),) = entry.iteritems()
log.info('In sequential, running task %s...' % taskname)
mgr = run_tasks.run_one_task(taskname, ctx=ctx, config=confg)
if hasattr(mgr, '__enter__'):
mgr.__enter__()
stack.append(mgr)
finally:
try:
exc_info = sys.exc_info()
while stack:
mgr = stack.pop()
mgr.__exit__(*exc_info)
finally:
del exc_info
| ktdreyer/teuthology | teuthology/task/sequential.py | Python | mit | 1,305 |
'''
=====================
Folder "View" Classes
=====================
These classes wrap Directories and perform automatic actions
to Histograms retrieved from them. The different views can be composited and
layered.
Summary of views:
- ScaleView: scale histogram normalization
- NormalizeView: normalize histograms
- SumView: sum histograms from different folders together
- StyleView: apply a style to histograms
- StackView: build THStacks using histograms from different folders
- TitleView: change the title of histograms
- FunctorView: apply a arbitrary transformation function to the histograms
- MultiFunctorView: apply a arbitrary transformation function to a collection
of histograms
- SubdirectoryView: A view of a subdirectory, which maintains the same view as
the base.
Example use case
================
One has a ROOT file with the following content::
zjets/mutau_mass
zz/mutau_mass
wz/mutau_mass
data_2010/mutau_mass
data_2011/mutau_mass
and wants to do the following:
1. Merge the two data taking periods together
2. Scale the Z, WZ, and ZZ simulated results to the appropriate int. lumi.
3. Combine WZ and ZZ into a single diboson sample
4. Apply different colors to the MC samples
5. Make a Stack of the expected yields from different simulated processes
This example can be tested by running::
python -m rootpy.plotting.views
>>> # Mock up the example test case
>>> import rootpy.io as io
>>> # We have to keep these, to make sure PyROOT doesn't garbage collect them
>>> keep = []
>>> zjets_dir = io.Directory('zjets', 'Zjets directory')
>>> zz_dir = io.Directory('zz', 'ZZ directory')
>>> wz_dir = io.Directory('wz', 'WZ directory')
>>> data2010_dir = io.Directory('data2010', 'data2010 directory')
>>> data2011_dir = io.Directory('data2011', 'data2011 directory')
>>> # Make the Zjets case
>>> _ = zjets_dir.cd()
>>> zjets_hist = ROOT.TH1F("mutau_mass", "Mu-Tau mass", 100, 0, 100)
>>> zjets_hist.FillRandom('gaus', 5000)
>>> keep.append(zjets_hist)
>>> # Make the ZZ case
>>> _ = zz_dir.cd()
>>> zz_hist = ROOT.TH1F("mutau_mass", "Mu-Tau mass", 100, 0, 100)
>>> zz_hist.FillRandom('gaus', 5000)
>>> keep.append(zz_hist)
>>> # Make the WZ case
>>> _ = wz_dir.cd()
>>> wz_hist = ROOT.TH1F("mutau_mass", "Mu-Tau mass", 100, 0, 100)
>>> wz_hist.FillRandom('gaus', 5000)
>>> keep.append(wz_hist)
>>> # Make the 2010 data case
>>> _ = data2010_dir.cd()
>>> data2010_hist = ROOT.TH1F("mutau_mass", "Mu-Tau mass", 100, 0, 100)
>>> data2010_hist.FillRandom('gaus', 30)
>>> keep.append(data2010_hist)
>>> # Make the 2011 data case
>>> _ = data2011_dir.cd()
>>> data2011_hist = ROOT.TH1F("mutau_mass", "Mu-Tau mass", 100, 0, 100)
>>> data2011_hist.FillRandom('gaus', 51)
>>> keep.append(data2011_hist)
SumView
-------
We can merge the two data periods into a single case using a SumView.
>>> data = SumView(data2010_dir, data2011_dir)
>>> data_hist = data.Get("mutau_mass")
>>> data_hist.Integral()
81.0
>>> data_hist.Integral() == data2010_hist.Integral() + data2011_hist.Integral()
True
ScaleView
---------
The simulated results (Z & diboson) can be scaled to the expected integrated
luminosity using ScaleViews.
>>> zjets = ScaleView(zjets_dir, 0.01)
>>> zjets_hist = zjets.Get("mutau_mass")
>>> abs(zjets_hist.Integral() - 50.0) < 1e-5
True
>>> # Scale the diboson contribution
>>> zz = ScaleView(zz_dir, 0.001)
>>> wz = ScaleView(wz_dir, 0.003)
Combining views
---------------
The dibosons individually are tiny, let's put them together using a SumView.
Note that this operation nests two ScaleViews into a SumView.
>>> dibosons = SumView(zz, wz)
>>> # We expect 5000*0.001 + 5000*0.003 = 20 events
>>> dibosons_hist = dibosons.Get("mutau_mass")
>>> abs(dibosons_hist.Integral() - 20) < 1e-4
True
StyleView
---------
A style view automatically applies a style to retrieved Plottable objects.
The style is specified using the same arguments as the Plottable.decorate.
Let's make the Z background red and the diboson background blue.
>>> zjets = StyleView(zjets, fillcolor=ROOT.EColor.kRed)
>>> dibosons = StyleView(dibosons, fillcolor=ROOT.EColor.kBlue)
>>> zjets_hist = zjets.Get("mutau_mass")
>>> zjets_hist.GetFillColor() == ROOT.EColor.kRed
True
>>> dibosons_hist = dibosons.Get("mutau_mass")
>>> dibosons_hist.GetFillColor() == ROOT.EColor.kBlue
True
StackView
---------
The StackView combines multiple items into a HistStack. In our example
we stack the SM backgrounds to compare to the data.
>>> sm_bkg = StackView(zjets, dibosons)
>>> sm_bkg_stack = sm_bkg.Get("mutau_mass")
>>> '%0.0f' % sm_bkg_stack.Integral()
'70'
Looks like we have an excess of 11 events - must be the Higgs.
Other Examples
==============
NormalizeView
-------------
The normalization view renormalizes histograms to a given value (default 1.0).
Here is an example of using the NormalizeView to compare the Z and diboson
shapes.
>>> z_shape = NormalizeView(zjets)
>>> z_shape_hist = z_shape.Get("mutau_mass")
>>> abs(1 - z_shape_hist.Integral()) < 1e-5
True
>>> # Let's compare the shapes using a HistStack, using the "nostack" option.
>>> diboson_shape = NormalizeView(dibosons)
>>> shape_comparison = StackView(z_shape, diboson_shape)
>>> # To draw the comparison:
>>> # shape_comparison.Get("mutau_mass").Draw('nostack')
FunctorView
-----------
FunctorView allows you to apply an arbitrary transformation to the object.
Here we show how you can change the axis range for all histograms in a
directory.
>>> rebin = lambda x: x.Rebin(2)
>>> zjets_rebinned = FunctorView(zjets, rebin)
>>> zjets.Get("mutau_mass").GetNbinsX()
100
>>> zjets_rebinned.Get("mutau_mass").GetNbinsX()
50
The functor doesn't have to return a histogram.
>>> mean_getter = lambda x: x.GetMean()
>>> mean = zjets.Get("mutau_mass").GetMean()
>>> zjets_mean = FunctorView(zjets, mean_getter)
>>> zjets_mean.Get("mutau_mass") == mean
True
MultiFunctorView
----------------
MultiFunctorView is similar except that it operates on a group of histograms.
The functor should take one argument, a *generator* of the sub-objects.
Here's an example to get the integral of the biggest histogram in a set:
>>> biggest_histo = lambda objects: max(y.Integral() for y in objects)
>>> biggest = MultiFunctorView(biggest_histo, zjets, dibosons)
>>> biggest.Get("mutau_mass") == zjets.Get("mutau_mass").Integral()
True
SubdirectoryView
----------------
If you'd like to "cd" into a lower subdirectory, while still maintaining
the same view, use a SubdirectoryView.
>>> basedir = io.Directory('base', 'base directory')
>>> _ = basedir.cd()
>>> subdir1 = io.Directory('subdir1', 'subdir directory in 1')
>>> _ = subdir1.cd()
>>> hist = ROOT.TH1F("mutau_mass", "Mu-Tau mass", 100, 0, 100)
>>> hist.FillRandom('gaus', 2000)
>>> keep.append(hist)
>>> _ = basedir.cd()
>>> subdir2 = io.Directory('subdir2', 'subdir directory 2')
>>> _ = subdir2.cd()
>>> hist = ROOT.TH1F("mutau_mass", "Mu-Tau mass", 100, 0, 100)
>>> hist.FillRandom('gaus', 5000)
>>> keep.append(hist)
The directory structure is now::
base/subdir1/hist
base/subdir2/hist
Subdirectory views work on top of other views.
>>> baseview = ScaleView(basedir, 0.1)
>>> subdir1view = SubdirectoryView(baseview, 'subdir1')
>>> subdir2view = SubdirectoryView(baseview, 'subdir2')
>>> histo1 = subdir1view.Get('mutau_mass')
>>> histo2 = subdir2view.Get('mutau_mass')
>>> exp_histo1 = baseview.Get("subdir1/mutau_mass")
>>> exp_histo2 = baseview.Get("subdir2/mutau_mass")
>>> def equivalent(h1, h2):
... return (abs(h1.GetMean() - h2.GetMean()) < 1e-4 and
... abs(h1.GetRMS() - h2.GetRMS()) < 1e-4 and
... abs(h1.Integral() - h2.Integral()) < 1e-4)
>>> equivalent(exp_histo1, histo1)
True
>>> equivalent(exp_histo2, histo2)
True
>>> equivalent(histo1, histo2)
False
'''
from __future__ import absolute_import
import os
import ROOT
from .base import Plottable
from .hist import HistStack
from ..io import Directory, DoesNotExist
__all__ = [
'ScaleView',
'NormalizeView',
'StyleView',
'TitleView',
'SumView',
'StackView',
'FunctorView',
'MultiFunctorView',
'PathModifierView',
'SubdirectoryView',
]
class _FolderView(object):
'''
Abstract view of an individual folder
Provides one interface: Get(path) which returns a modified version
of whatever exists at path. Subclasses should define::
apply_view(self, obj)
which should return the modified [object] as necessary.
The subclass can get access to the queried path via the self.getting
variable.
'''
def __init__(self, directory):
''' Initialize with the directory to be wrapped '''
self.dir = directory
def path(self):
''' Get the path of the wrapped folder '''
if isinstance(self.dir, Directory):
return self.dir._path
elif isinstance(self.dir, ROOT.TDirectory):
return self.dir.GetPath()
elif isinstance(self.dir, _FolderView):
return self.dir.path()
else:
return str(self.dir)
def __str__(self):
return "{0}('{1}')".format(self.__class__.__name__, self.path())
def Get(self, path):
''' Get the (modified) object from path '''
self.getting = path
try:
obj = self.dir.Get(path)
return self.apply_view(obj)
except DoesNotExist as dne:
#print dir(dne)
raise DoesNotExist(
str(dne) + "[{0}]".format(self.__class__.__name__))
class _MultiFolderView(object):
'''
Abstract view of a collection of folders
Applies some type of "merge" operation to the result of the get from each
folder. Subclasses should define::
merge_views(self, objects)
which takes a *generator* of objects returns a merged object.
The subclass can get access to the queried path via the self.getting
variable.
'''
def __init__(self, *directories):
self.dirs = directories
def __str__(self):
return "{0}({1})".format(
self.__class__.__name__,
','.join(str(x) for x in self.dirs))
def Get(self, path):
''' Merge the objects at path in all subdirectories '''
return self.merge_views(x.Get(path) for x in self.dirs)
class ScaleView(_FolderView):
''' View of a folder which applies a scaling factor to histograms. '''
def __init__(self, directory, scale_factor):
super(ScaleView, self).__init__(directory)
self.factor = scale_factor
def apply_view(self, obj):
if not hasattr(obj, 'Scale'):
raise ValueError(
"`ScaleView` can't determine how to handle"
"an object of type `{0}`; "
"it has no `Scale` method".format(type(obj)))
clone = obj.Clone()
clone.Scale(self.factor)
return clone
class NormalizeView(ScaleView):
''' Normalize histograms to a constant value '''
def __init__(self, directory, normalization=1.0):
# Initialize the scale view with a dummy scale factor.
# The scale factor is changed dynamically for each histogram.
super(NormalizeView, self).__init__(directory, None)
self.norm = normalization
def apply_view(self, obj):
current_norm = obj.Integral()
# Update the scale factor (in the base)
if current_norm > 0:
self.factor = self.norm / current_norm
else:
self.factor = 0
return super(NormalizeView, self).apply_view(obj)
class StyleView(_FolderView):
'''
View of a folder which applies a style to Plottable objects.
The kwargs are passed to Plottable.decorate
'''
def __init__(self, directory, **kwargs):
super(StyleView, self).__init__(directory)
self.kwargs = kwargs
def apply_view(self, obj):
if not isinstance(obj, Plottable):
raise TypeError(
"`ScaleView` can't determine how to handle "
"an object of type `{0}`; it is not a subclass of "
"`Plottable`".format(type(obj)))
clone = obj.Clone()
clone.decorate(**self.kwargs)
return clone
class TitleView(_FolderView):
''' Override the title of gotten histograms '''
def __init__(self, directory, title):
self.title = title
super(TitleView, self).__init__(directory)
def apply_view(self, obj):
clone = obj.Clone()
clone.SetTitle(self.title)
return clone
class SumView(_MultiFolderView):
''' Add a collection of histograms together '''
def __init__(self, *directories):
super(SumView, self).__init__(*directories)
def merge_views(self, objects):
output = None
for obj in objects:
if output is None:
output = obj.Clone()
else:
output += obj
return output
class StackView(_MultiFolderView):
'''
Build a HistStack from the input histograms
The default draw option that histograms will use is "hist".
One can override this for all histograms by passing a string.
Individual behavior can be controlled by passing a list of draw options,
corresponding to the input directories. In this case the option for
all histograms must be specified.
The name and title of the HistStack is taken from the first histogram in
the list.
Normally the histograms will be added to the stack in the order
of the constructor. Optionally, one can add them in order of ascending
integral by passing the kwarg sorted=True.
'''
def __init__(self, *directories, **kwargs):
super(StackView, self).__init__(*directories)
self.sort = kwargs.get(sorted, False)
def merge_views(self, objects):
output = None
if self.sort:
objects = sorted(objects, key=lambda x: x.Integral())
for obj in objects:
if output is None:
output = HistStack(name=obj.GetName(),
title=obj.GetTitle())
output.Add(obj)
return output
class FunctorView(_FolderView):
'''
Apply an arbitrary function to the output histogram.
The histogram is always cloned before it is passed to the function.
'''
def __init__(self, directory, function):
self.f = function
super(FunctorView, self).__init__(directory)
def apply_view(self, obj):
clone = obj.Clone()
return self.f(clone)
class MultiFunctorView(_MultiFolderView):
'''
Apply an arbitrary function to the output histograms.
The function must take one argument, a generator of objects.
'''
def __init__(self, f, *directories):
self.f = f
super(MultiFunctorView, self).__init__(*directories)
def merge_views(self, objects):
return self.f(objects)
class PathModifierView(_FolderView):
'''
Does some magic to the path
User should supply a functor which transforms the path argument
passed to Get(...)
'''
def __init__(self, dir, path_modifier):
self.path_modifier = path_modifier
super(PathModifierView, self).__init__(dir)
def Get(self, path):
newpath = self.path_modifier(path)
return super(PathModifierView, self).Get(newpath)
def apply_view(self, obj):
''' Do nothing '''
return obj
class SubdirectoryView(PathModifierView):
'''
Add some base directories to the path of Get()
<subdir> is the directory you want to 'cd' too.
'''
def __init__(self, dir, subdirpath):
functor = lambda path: os.path.join(subdirpath, path)
super(SubdirectoryView, self).__init__(dir, functor)
if __name__ == "__main__":
import doctest
doctest.testmod()
| ndawe/rootpy | rootpy/plotting/views.py | Python | bsd-3-clause | 15,818 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Operations to emit summaries."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import collections
import functools
import os
import re
import threading
import six
from tensorflow.core.framework import graph_pb2
from tensorflow.core.framework import summary_pb2
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.eager import context
from tensorflow.python.eager import profiler as _profiler
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import smart_cond
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gen_summary_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import summary_op_util
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import training_util
from tensorflow.python.util import deprecation
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util.tf_export import tf_export
# Name for graph collection of summary writer init ops, which is only exposed
# as a legacy API for tf.contrib.summary in TF 1.x.
_SUMMARY_WRITER_INIT_COLLECTION_NAME = "_SUMMARY_WRITER_V2"
class _SummaryState(threading.local):
def __init__(self):
super(_SummaryState, self).__init__()
self.is_recording = None
# TODO(slebedev): why a separate flag for DS and is it on by default?
self.is_recording_distribution_strategy = True
self.writer = None
self.step = None
_summary_state = _SummaryState()
def _should_record_summaries_internal(default_state):
"""Returns boolean Tensor if summaries should/shouldn't be recorded.
Now the summary condition is decided by logical "and" of below conditions:
First, summary writer must be set. Given this constraint is met,
ctx.summary_recording and ctx.summary_recording_distribution_strategy.
The former one is usually set by user, and the latter one is controlled
by DistributionStrategy (tf.distribute.ReplicaContext).
Args:
default_state: can be True or False. The default summary behavior when
summary writer is set and the user does not specify
ctx.summary_recording and ctx.summary_recording_distribution_strategy
is True.
"""
if _summary_state.writer is None:
return constant_op.constant(False)
if not callable(_summary_state.is_recording):
static_cond = tensor_util.constant_value(_summary_state.is_recording)
if static_cond is not None and not static_cond:
return constant_op.constant(False)
resolve = lambda x: x() if callable(x) else x
cond_distributed = resolve(_summary_state.is_recording_distribution_strategy)
cond = resolve(_summary_state.is_recording)
if cond is None:
cond = default_state
return math_ops.logical_and(cond_distributed, cond)
def _should_record_summaries_v2():
"""Returns boolean Tensor which is true if summaries should be recorded.
If no recording status has been set, this defaults to True, unlike the public
should_record_summaries().
"""
return _should_record_summaries_internal(default_state=True)
@tf_export("summary.should_record_summaries", v1=[])
def should_record_summaries():
"""Returns boolean Tensor which is true if summaries should be recorded."""
return _should_record_summaries_internal(default_state=False)
@tf_export("summary.record_if", v1=[])
@tf_contextlib.contextmanager
def record_if(condition):
"""Sets summary recording on or off per the provided boolean value.
The provided value can be a python boolean, a scalar boolean Tensor, or
or a callable providing such a value; if a callable is passed it will be
invoked on-demand to determine whether summary writing will occur. Note that
when calling record_if() in an eager mode context, if you intend to provide a
varying condition like `step % 100 == 0`, you must wrap this in a
callable to avoid immediate eager evaluation of the condition. In particular,
using a callable is the only way to have your condition evaluated as part of
the traced body of an @tf.function that is invoked from within the
`record_if()` context.
Args:
condition: can be True, False, a bool Tensor, or a callable providing such.
Yields:
Returns a context manager that sets this value on enter and restores the
previous value on exit.
"""
old = _summary_state.is_recording
try:
_summary_state.is_recording = condition
yield
finally:
_summary_state.is_recording = old
# TODO(apassos) consider how to handle local step here.
def record_summaries_every_n_global_steps(n, global_step=None):
"""Sets the should_record_summaries Tensor to true if global_step % n == 0."""
if global_step is None:
global_step = training_util.get_or_create_global_step()
with ops.device("cpu:0"):
should = lambda: math_ops.equal(global_step % n, 0)
if not context.executing_eagerly():
should = should()
return record_if(should)
def always_record_summaries():
"""Sets the should_record_summaries Tensor to always true."""
return record_if(True)
def never_record_summaries():
"""Sets the should_record_summaries Tensor to always false."""
return record_if(False)
@tf_export("summary.experimental.get_step", v1=[])
def get_step():
"""Returns the default summary step for the current thread.
Returns:
The step set by `tf.summary.experimental.set_step()` if one has been set,
otherwise None.
"""
return _summary_state.step
@tf_export("summary.experimental.set_step", v1=[])
def set_step(step):
"""Sets the default summary step for the current thread.
For convenience, this function sets a default value for the `step` parameter
used in summary-writing functions elsewhere in the API so that it need not
be explicitly passed in every such invocation. The value can be a constant
or a variable, and can be retrieved via `tf.summary.experimental.get_step()`.
Note: when using this with @tf.functions, the step value will be captured at
the time the function is traced, so changes to the step outside the function
will not be reflected inside the function unless using a `tf.Variable` step.
Args:
step: An `int64`-castable default step value, or None to unset.
"""
_summary_state.step = step
@tf_export("summary.SummaryWriter", v1=[])
@six.add_metaclass(abc.ABCMeta)
class SummaryWriter(object):
"""Interface representing a stateful summary writer object."""
@abc.abstractmethod
def set_as_default(self, step=None):
"""Enables this summary writer for the current thread.
For convenience, if `step` is not None, this function also sets a default
value for the `step` parameter used in summary-writing functions elsewhere
in the API so that it need not be explicitly passed in every such
invocation. The value can be a constant or a variable.
Note: when setting `step` in a @tf.function, the step value will be
captured at the time the function is traced, so changes to the step outside
the function will not be reflected inside the function unless using
a `tf.Variable` step.
Args:
step: An `int64`-castable default step value, or `None`. When not `None`,
the current step is modified to the given value. When `None`, the
current step is not modified.
"""
raise NotImplementedError()
@abc.abstractmethod
@tf_contextlib.contextmanager
def as_default(self, step=None):
"""Returns a context manager that enables summary writing.
For convenience, if `step` is not None, this function also sets a default
value for the `step` parameter used in summary-writing functions elsewhere
in the API so that it need not be explicitly passed in every such
invocation. The value can be a constant or a variable.
Note: when setting `step` in a @tf.function, the step value will be
captured at the time the function is traced, so changes to the step outside
the function will not be reflected inside the function unless using
a `tf.Variable` step.
For example, `step` can be used as:
```python
with writer_a.as_default(step=10):
tf.summary.scalar(tag, value) # Logged to writer_a with step 10
with writer_b.as_default(step=20):
tf.summary.scalar(tag, value) # Logged to writer_b with step 20
tf.summary.scalar(tag, value) # Logged to writer_a with step 10
```
Args:
step: An `int64`-castable default step value, or `None`. When not `None`,
the current step is captured, replaced by a given one, and the original
one is restored when the context manager exits. When `None`, the current
step is not modified (and not restored when the context manager exits).
"""
raise NotImplementedError()
def init(self):
"""Initializes the summary writer."""
raise NotImplementedError()
def flush(self):
"""Flushes any buffered data."""
raise NotImplementedError()
def close(self):
"""Flushes and closes the summary writer."""
raise NotImplementedError()
class ResourceSummaryWriter(SummaryWriter):
"""Implementation of SummaryWriter using a SummaryWriterInterface resource."""
def __init__(self,
shared_name,
init_op_fn,
name=None,
v2=False,
metadata=None):
self._resource = gen_summary_ops.summary_writer(
shared_name=shared_name, name=name)
# TODO(nickfelt): cache other constructed ops in graph mode
self._init_op_fn = init_op_fn
self._init_op = init_op_fn(self._resource)
self._v2 = v2
self._metadata = {} if metadata is None else metadata
self._closed = False
if context.executing_eagerly():
self._resource_deleter = resource_variable_ops.EagerResourceDeleter(
handle=self._resource, handle_device="cpu:0")
else:
ops.add_to_collection(_SUMMARY_WRITER_INIT_COLLECTION_NAME, self._init_op)
def set_as_default(self, step=None):
"""Enables this summary writer for the current thread.
For convenience, if `step` is not None, this function also sets a default
value for the `step` parameter used in summary-writing functions elsewhere
in the API so that it need not be explicitly passed in every such
invocation. The value can be a constant or a variable.
Note: when setting `step` in a @tf.function, the step value will be
captured at the time the function is traced, so changes to the step outside
the function will not be reflected inside the function unless using
a `tf.Variable` step.
Args:
step: An `int64`-castable default step value, or `None`. When not `None`,
the current step is modified to the given value. When `None`, the
current step is not modified.
"""
if self._v2 and context.executing_eagerly() and self._closed:
raise RuntimeError("SummaryWriter is already closed")
_summary_state.writer = self
if step is not None:
_summary_state.step = step
@tf_contextlib.contextmanager
def as_default(self, step=None):
"""Returns a context manager that enables summary writing.
For convenience, if `step` is not None, this function also sets a default
value for the `step` parameter used in summary-writing functions elsewhere
in the API so that it need not be explicitly passed in every such
invocation. The value can be a constant or a variable.
Note: when setting `step` in a @tf.function, the step value will be
captured at the time the function is traced, so changes to the step outside
the function will not be reflected inside the function unless using
a `tf.Variable` step.
For example, `step` can be used as:
```python
with writer_a.as_default(step=10):
tf.summary.scalar(tag, value) # Logged to writer_a with step 10
with writer_b.as_default(step=20):
tf.summary.scalar(tag, value) # Logged to writer_b with step 20
tf.summary.scalar(tag, value) # Logged to writer_a with step 10
```
Args:
step: An `int64`-castable default step value, or `None`. When not `None`,
the current step is captured, replaced by a given one, and the original
one is restored when the context manager exits. When `None`, the current
step is not modified (and not restored when the context manager exits).
"""
if self._v2 and context.executing_eagerly() and self._closed:
raise RuntimeError("SummaryWriter is already closed")
old = _summary_state.writer
if step is not None:
old_step = _summary_state.step
try:
_summary_state.writer = self
if step is not None:
_summary_state.step = step
yield self
# Flushes the summary writer in eager mode or in graph functions, but
# not in legacy graph mode (you're on your own there).
self.flush()
finally:
_summary_state.writer = old
if step is not None:
_summary_state.step = old_step
def init(self):
"""Initializes the summary writer."""
if self._v2:
if context.executing_eagerly() and self._closed:
raise RuntimeError("SummaryWriter is already closed")
return self._init_op
# Legacy behavior allows re-initializing the resource.
return self._init_op_fn(self._resource)
def flush(self):
"""Flushes any buffered data."""
if self._v2 and context.executing_eagerly() and self._closed:
return
return _flush_fn(writer=self)
def close(self):
"""Flushes and closes the summary writer."""
if self._v2 and context.executing_eagerly() and self._closed:
return
try:
with ops.control_dependencies([self.flush()]):
with ops.device("cpu:0"):
return gen_summary_ops.close_summary_writer(self._resource)
finally:
if self._v2 and context.executing_eagerly():
self._closed = True
class NoopSummaryWriter(SummaryWriter):
"""A summary writer that does nothing, for create_noop_writer()."""
def set_as_default(self, step=None):
pass
@tf_contextlib.contextmanager
def as_default(self, step=None):
yield
def init(self):
pass
def flush(self):
pass
def close(self):
pass
@tf_export(v1=["summary.initialize"])
def initialize(
graph=None, # pylint: disable=redefined-outer-name
session=None):
"""Initializes summary writing for graph execution mode.
This operation is a no-op when executing eagerly.
This helper method provides a higher-level alternative to using
`tf.contrib.summary.summary_writer_initializer_op` and
`tf.contrib.summary.graph`.
Most users will also want to call `tf.compat.v1.train.create_global_step`
which can happen before or after this function is called.
Args:
graph: A `tf.Graph` or `tf.compat.v1.GraphDef` to output to the writer.
This function will not write the default graph by default. When
writing to an event log file, the associated step will be zero.
session: So this method can call `tf.Session.run`. This defaults
to `tf.compat.v1.get_default_session`.
Raises:
RuntimeError: If the current thread has no default
`tf.contrib.summary.SummaryWriter`.
ValueError: If session wasn't passed and no default session.
"""
if context.executing_eagerly():
return
if _summary_state.writer is None:
raise RuntimeError("No default tf.contrib.summary.SummaryWriter found")
if session is None:
session = ops.get_default_session()
if session is None:
raise ValueError("session must be passed if no default session exists")
session.run(summary_writer_initializer_op())
if graph is not None:
data = _serialize_graph(graph)
x = array_ops.placeholder(dtypes.string)
session.run(graph_v1(x, 0), feed_dict={x: data})
@tf_export("summary.create_file_writer", v1=[])
def create_file_writer_v2(logdir,
max_queue=None,
flush_millis=None,
filename_suffix=None,
name=None):
"""Creates a summary file writer for the given log directory.
Args:
logdir: a string specifying the directory in which to write an event file.
max_queue: the largest number of summaries to keep in a queue; will
flush once the queue gets bigger than this. Defaults to 10.
flush_millis: the largest interval between flushes. Defaults to 120,000.
filename_suffix: optional suffix for the event file name. Defaults to `.v2`.
name: a name for the op that creates the writer.
Returns:
A SummaryWriter object.
"""
if logdir is None:
raise ValueError("logdir cannot be None")
inside_function = ops.inside_function()
with ops.name_scope(name, "create_file_writer") as scope, ops.device("cpu:0"):
# Run init inside an init_scope() to hoist it out of tf.functions.
with ops.init_scope():
if context.executing_eagerly():
_check_create_file_writer_args(
inside_function,
logdir=logdir,
max_queue=max_queue,
flush_millis=flush_millis,
filename_suffix=filename_suffix)
logdir = ops.convert_to_tensor(logdir, dtype=dtypes.string)
if max_queue is None:
max_queue = constant_op.constant(10)
if flush_millis is None:
flush_millis = constant_op.constant(2 * 60 * 1000)
if filename_suffix is None:
filename_suffix = constant_op.constant(".v2")
# Prepend the PID and a process-local UID to the filename suffix to avoid
# filename collisions within the machine (the filename already contains
# the hostname to avoid cross-machine collisions).
unique_prefix = constant_op.constant(".%s.%s" % (os.getpid(), ops.uid()))
filename_suffix = unique_prefix + filename_suffix
# Use a unique shared_name to prevent resource sharing.
if context.executing_eagerly():
shared_name = context.shared_name()
else:
shared_name = ops.name_from_scope_name(scope) # pylint: disable=protected-access
return ResourceSummaryWriter(
shared_name=shared_name,
init_op_fn=functools.partial(
gen_summary_ops.create_summary_file_writer,
logdir=logdir,
max_queue=max_queue,
flush_millis=flush_millis,
filename_suffix=filename_suffix),
name=name,
v2=True,
metadata={"logdir": logdir})
def create_file_writer(logdir,
max_queue=None,
flush_millis=None,
filename_suffix=None,
name=None):
"""Creates a summary file writer in the current context under the given name.
Args:
logdir: a string, or None. If a string, creates a summary file writer
which writes to the directory named by the string. If None, returns
a mock object which acts like a summary writer but does nothing,
useful to use as a context manager.
max_queue: the largest number of summaries to keep in a queue; will
flush once the queue gets bigger than this. Defaults to 10.
flush_millis: the largest interval between flushes. Defaults to 120,000.
filename_suffix: optional suffix for the event file name. Defaults to `.v2`.
name: Shared name for this SummaryWriter resource stored to default
Graph. Defaults to the provided logdir prefixed with `logdir:`. Note: if a
summary writer resource with this shared name already exists, the returned
SummaryWriter wraps that resource and the other arguments have no effect.
Returns:
Either a summary writer or an empty object which can be used as a
summary writer.
"""
if logdir is None:
return NoopSummaryWriter()
logdir = str(logdir)
with ops.device("cpu:0"):
if max_queue is None:
max_queue = constant_op.constant(10)
if flush_millis is None:
flush_millis = constant_op.constant(2 * 60 * 1000)
if filename_suffix is None:
filename_suffix = constant_op.constant(".v2")
if name is None:
name = "logdir:" + logdir
return ResourceSummaryWriter(
shared_name=name,
init_op_fn=functools.partial(
gen_summary_ops.create_summary_file_writer,
logdir=logdir,
max_queue=max_queue,
flush_millis=flush_millis,
filename_suffix=filename_suffix))
@tf_export("summary.create_noop_writer", v1=[])
def create_noop_writer():
"""Returns a summary writer that does nothing.
This is useful as a placeholder in code that expects a context manager.
"""
return NoopSummaryWriter()
def _cleanse_string(name, pattern, value):
if isinstance(value, six.string_types) and pattern.search(value) is None:
raise ValueError("%s (%s) must match %s" % (name, value, pattern.pattern))
return ops.convert_to_tensor(value, dtypes.string)
def _nothing():
"""Convenient else branch for when summaries do not record."""
return constant_op.constant(False)
@tf_export(v1=["summary.all_v2_summary_ops"])
def all_v2_summary_ops():
"""Returns all V2-style summary ops defined in the current default graph.
This includes ops from TF 2.0 tf.summary and TF 1.x tf.contrib.summary (except
for `tf.contrib.summary.graph` and `tf.contrib.summary.import_event`), but
does *not* include TF 1.x tf.summary ops.
Returns:
List of summary ops, or None if called under eager execution.
"""
if context.executing_eagerly():
return None
return ops.get_collection(ops.GraphKeys._SUMMARY_COLLECTION) # pylint: disable=protected-access
def summary_writer_initializer_op():
"""Graph-mode only. Returns the list of ops to create all summary writers.
Returns:
The initializer ops.
Raises:
RuntimeError: If in Eager mode.
"""
if context.executing_eagerly():
raise RuntimeError(
"tf.contrib.summary.summary_writer_initializer_op is only "
"supported in graph mode.")
return ops.get_collection(_SUMMARY_WRITER_INIT_COLLECTION_NAME)
_INVALID_SCOPE_CHARACTERS = re.compile(r"[^-_/.A-Za-z0-9]")
@tf_export("summary.experimental.summary_scope", v1=[])
@tf_contextlib.contextmanager
def summary_scope(name, default_name="summary", values=None):
"""Experimental context manager for use when defining a custom summary op.
This behaves similarly to `tf.name_scope`, except that it returns a generated
summary tag in addition to the scope name. The tag is structurally similar to
the scope name - derived from the user-provided name, prefixed with enclosing
name scopes if any - but we relax the constraint that it be uniquified, as
well as the character set limitation (so the user-provided name can contain
characters not legal for scope names; in the scope name these are removed).
This makes the summary tag more predictable and consistent for the user.
For example, to define a new summary op called `my_op`:
```python
def my_op(name, my_value, step):
with tf.summary.summary_scope(name, "MyOp", [my_value]) as (tag, scope):
my_value = tf.convert_to_tensor(my_value)
return tf.summary.write(tag, my_value, step=step)
```
Args:
name: string name for the summary.
default_name: Optional; if provided, used as default name of the summary.
values: Optional; passed as `values` parameter to name_scope.
Yields:
A tuple `(tag, scope)` as described above.
"""
name = name or default_name
current_scope = ops.get_name_scope()
tag = current_scope + "/" + name if current_scope else name
# Strip illegal characters from the scope name, and if that leaves nothing,
# use None instead so we pick up the default name.
name = _INVALID_SCOPE_CHARACTERS.sub("", name) or None
with ops.name_scope(name, default_name, values, skip_on_eager=False) as scope:
yield tag, scope
@tf_export("summary.write", v1=[])
def write(tag, tensor, step=None, metadata=None, name=None):
"""Writes a generic summary to the default SummaryWriter if one exists.
This exists primarily to support the definition of type-specific summary ops
like scalar() and image(), and is not intended for direct use unless defining
a new type-specific summary op.
Args:
tag: string tag used to identify the summary (e.g. in TensorBoard), usually
generated with `tf.summary.summary_scope`
tensor: the Tensor holding the summary data to write or a callable that
returns this Tensor. If a callable is passed, it will only be called when
a default SummaryWriter exists and the recording condition specified by
`record_if()` is met.
step: Explicit `int64`-castable monotonic step value for this summary. If
omitted, this defaults to `tf.summary.experimental.get_step()`, which must
not be None.
metadata: Optional SummaryMetadata, as a proto or serialized bytes
name: Optional string name for this op.
Returns:
True on success, or false if no summary was written because no default
summary writer was available.
Raises:
ValueError: if a default writer exists, but no step was provided and
`tf.summary.experimental.get_step()` is None.
"""
with ops.name_scope(name, "write_summary") as scope:
if _summary_state.writer is None:
return constant_op.constant(False)
if step is None:
step = get_step()
if metadata is None:
serialized_metadata = b""
elif hasattr(metadata, "SerializeToString"):
serialized_metadata = metadata.SerializeToString()
else:
serialized_metadata = metadata
def record():
"""Record the actual summary and return True."""
if step is None:
raise ValueError("No step set via 'step' argument or "
"tf.summary.experimental.set_step()")
# Note the identity to move the tensor to the CPU.
with ops.device("cpu:0"):
summary_tensor = tensor() if callable(tensor) else array_ops.identity(
tensor)
write_summary_op = gen_summary_ops.write_summary(
_summary_state.writer._resource, # pylint: disable=protected-access
step,
summary_tensor,
tag,
serialized_metadata,
name=scope)
with ops.control_dependencies([write_summary_op]):
return constant_op.constant(True)
op = smart_cond.smart_cond(
_should_record_summaries_v2(), record, _nothing, name="summary_cond")
if not context.executing_eagerly():
ops.add_to_collection(ops.GraphKeys._SUMMARY_COLLECTION, op) # pylint: disable=protected-access
return op
@tf_export("summary.experimental.write_raw_pb", v1=[])
def write_raw_pb(tensor, step=None, name=None):
"""Writes a summary using raw `tf.compat.v1.Summary` protocol buffers.
Experimental: this exists to support the usage of V1-style manual summary
writing (via the construction of a `tf.compat.v1.Summary` protocol buffer)
with the V2 summary writing API.
Args:
tensor: the string Tensor holding one or more serialized `Summary` protobufs
step: Explicit `int64`-castable monotonic step value for this summary. If
omitted, this defaults to `tf.summary.experimental.get_step()`, which must
not be None.
name: Optional string name for this op.
Returns:
True on success, or false if no summary was written because no default
summary writer was available.
Raises:
ValueError: if a default writer exists, but no step was provided and
`tf.summary.experimental.get_step()` is None.
"""
with ops.name_scope(name, "write_raw_pb") as scope:
if _summary_state.writer is None:
return constant_op.constant(False)
if step is None:
step = get_step()
if step is None:
raise ValueError("No step set via 'step' argument or "
"tf.summary.experimental.set_step()")
def record():
"""Record the actual summary and return True."""
# Note the identity to move the tensor to the CPU.
with ops.device("cpu:0"):
raw_summary_op = gen_summary_ops.write_raw_proto_summary(
_summary_state.writer._resource, # pylint: disable=protected-access
step,
array_ops.identity(tensor),
name=scope)
with ops.control_dependencies([raw_summary_op]):
return constant_op.constant(True)
with ops.device("cpu:0"):
op = smart_cond.smart_cond(
_should_record_summaries_v2(), record, _nothing, name="summary_cond")
if not context.executing_eagerly():
ops.add_to_collection(ops.GraphKeys._SUMMARY_COLLECTION, op) # pylint: disable=protected-access
return op
def summary_writer_function(name, tensor, function, family=None):
"""Helper function to write summaries.
Args:
name: name of the summary
tensor: main tensor to form the summary
function: function taking a tag and a scope which writes the summary
family: optional, the summary's family
Returns:
The result of writing the summary.
"""
name_scope = ops.get_name_scope()
if name_scope:
# Add a slash to allow reentering the name scope.
name_scope += "/"
def record():
with ops.name_scope(name_scope), summary_op_util.summary_scope(
name, family, values=[tensor]) as (tag, scope):
with ops.control_dependencies([function(tag, scope)]):
return constant_op.constant(True)
if _summary_state.writer is None:
return control_flow_ops.no_op()
with ops.device("cpu:0"):
op = smart_cond.smart_cond(
should_record_summaries(), record, _nothing, name="")
if not context.executing_eagerly():
ops.add_to_collection(ops.GraphKeys._SUMMARY_COLLECTION, op) # pylint: disable=protected-access
return op
def generic(name, tensor, metadata=None, family=None, step=None):
"""Writes a tensor summary if possible."""
def function(tag, scope):
if metadata is None:
serialized_metadata = constant_op.constant("")
elif hasattr(metadata, "SerializeToString"):
serialized_metadata = constant_op.constant(metadata.SerializeToString())
else:
serialized_metadata = metadata
# Note the identity to move the tensor to the CPU.
return gen_summary_ops.write_summary(
_summary_state.writer._resource, # pylint: disable=protected-access
_choose_step(step),
array_ops.identity(tensor),
tag,
serialized_metadata,
name=scope)
return summary_writer_function(name, tensor, function, family=family)
def scalar(name, tensor, family=None, step=None):
"""Writes a scalar summary if possible.
Unlike `tf.contrib.summary.generic` this op may change the dtype
depending on the writer, for both practical and efficiency concerns.
Args:
name: An arbitrary name for this summary.
tensor: A `tf.Tensor` Must be one of the following types:
`float32`, `float64`, `int32`, `int64`, `uint8`, `int16`,
`int8`, `uint16`, `half`, `uint32`, `uint64`.
family: Optional, the summary's family.
step: The `int64` monotonic step variable, which defaults
to `tf.compat.v1.train.get_global_step`.
Returns:
The created `tf.Operation` or a `tf.no_op` if summary writing has
not been enabled for this context.
"""
def function(tag, scope):
# Note the identity to move the tensor to the CPU.
return gen_summary_ops.write_scalar_summary(
_summary_state.writer._resource, # pylint: disable=protected-access
_choose_step(step),
tag,
array_ops.identity(tensor),
name=scope)
return summary_writer_function(name, tensor, function, family=family)
def histogram(name, tensor, family=None, step=None):
"""Writes a histogram summary if possible."""
def function(tag, scope):
# Note the identity to move the tensor to the CPU.
return gen_summary_ops.write_histogram_summary(
_summary_state.writer._resource, # pylint: disable=protected-access
_choose_step(step),
tag,
array_ops.identity(tensor),
name=scope)
return summary_writer_function(name, tensor, function, family=family)
def image(name, tensor, bad_color=None, max_images=3, family=None, step=None):
"""Writes an image summary if possible."""
def function(tag, scope):
bad_color_ = (constant_op.constant([255, 0, 0, 255], dtype=dtypes.uint8)
if bad_color is None else bad_color)
# Note the identity to move the tensor to the CPU.
return gen_summary_ops.write_image_summary(
_summary_state.writer._resource, # pylint: disable=protected-access
_choose_step(step),
tag,
array_ops.identity(tensor),
bad_color_,
max_images,
name=scope)
return summary_writer_function(name, tensor, function, family=family)
def audio(name, tensor, sample_rate, max_outputs, family=None, step=None):
"""Writes an audio summary if possible."""
def function(tag, scope):
# Note the identity to move the tensor to the CPU.
return gen_summary_ops.write_audio_summary(
_summary_state.writer._resource, # pylint: disable=protected-access
_choose_step(step),
tag,
array_ops.identity(tensor),
sample_rate=sample_rate,
max_outputs=max_outputs,
name=scope)
return summary_writer_function(name, tensor, function, family=family)
def graph_v1(param, step=None, name=None):
"""Writes a TensorFlow graph to the summary interface.
The graph summary is, strictly speaking, not a summary. Conditions
like `tf.summary.should_record_summaries` do not apply. Only
a single graph can be associated with a particular run. If multiple
graphs are written, then only the last one will be considered by
TensorBoard.
When not using eager execution mode, the user should consider passing
the `graph` parameter to `tf.compat.v1.summary.initialize` instead of
calling this function. Otherwise special care needs to be taken when
using the graph to record the graph.
Args:
param: A `tf.Tensor` containing a serialized graph proto. When
eager execution is enabled, this function will automatically
coerce `tf.Graph`, `tf.compat.v1.GraphDef`, and string types.
step: The global step variable. This doesn't have useful semantics
for graph summaries, but is used anyway, due to the structure of
event log files. This defaults to the global step.
name: A name for the operation (optional).
Returns:
The created `tf.Operation` or a `tf.no_op` if summary writing has
not been enabled for this context.
Raises:
TypeError: If `param` isn't already a `tf.Tensor` in graph mode.
"""
if not context.executing_eagerly() and not isinstance(param, ops.Tensor):
raise TypeError("graph() needs a tf.Tensor (e.g. tf.placeholder) in graph "
"mode, but was: %s" % type(param))
writer = _summary_state.writer
if writer is None:
return control_flow_ops.no_op()
with ops.device("cpu:0"):
if isinstance(param, (ops.Graph, graph_pb2.GraphDef)):
tensor = ops.convert_to_tensor(_serialize_graph(param), dtypes.string)
else:
tensor = array_ops.identity(param)
return gen_summary_ops.write_graph_summary(
writer._resource, _choose_step(step), tensor, name=name) # pylint: disable=protected-access
@tf_export("summary.graph", v1=[])
def graph(graph_data):
"""Writes a TensorFlow graph summary.
Write an instance of `tf.Graph` or `tf.compat.v1.GraphDef` as summary only
in an eager mode. Please prefer to use the trace APIs (`tf.summary.trace_on`,
`tf.summary.trace_off`, and `tf.summary.trace_export`) when using
`tf.function` which can automatically collect and record graphs from
executions.
Usage Example:
```py
writer = tf.summary.create_file_writer("/tmp/mylogs")
@tf.function
def f():
x = constant_op.constant(2)
y = constant_op.constant(3)
return x**y
with writer.as_default():
tf.summary.graph(f.get_concrete_function().graph)
# Another example: in a very rare use case, when you are dealing with a TF v1
# graph.
graph = tf.Graph()
with graph.as_default():
c = tf.constant(30.0)
with writer.as_default():
tf.summary.graph(graph)
```
Args:
graph_data: The TensorFlow graph to write, as a `tf.Graph` or a
`tf.compat.v1.GraphDef`.
Returns:
True on success, or False if no summary was written because no default
summary writer was available.
Raises:
ValueError: `graph` summary API is invoked in a graph mode.
"""
if not context.executing_eagerly():
raise ValueError("graph() cannot be invoked inside a graph context.")
writer = _summary_state.writer
if writer is None:
return constant_op.constant(False)
with ops.device("cpu:0"):
if not _should_record_summaries_v2():
return constant_op.constant(False)
if isinstance(graph_data, (ops.Graph, graph_pb2.GraphDef)):
tensor = ops.convert_to_tensor(
_serialize_graph(graph_data), dtypes.string)
else:
raise ValueError("'graph_data' is not tf.Graph or tf.compat.v1.GraphDef")
gen_summary_ops.write_graph_summary(
writer._resource, # pylint: disable=protected-access
# Graph does not have step. Set to 0.
0,
tensor,
)
return constant_op.constant(True)
def import_event(tensor, name=None):
"""Writes a `tf.compat.v1.Event` binary proto.
This can be used to import existing event logs into a new summary writer sink.
Please note that this is lower level than the other summary functions and
will ignore the `tf.summary.should_record_summaries` setting.
Args:
tensor: A `tf.Tensor` of type `string` containing a serialized
`tf.compat.v1.Event` proto.
name: A name for the operation (optional).
Returns:
The created `tf.Operation`.
"""
return gen_summary_ops.import_event(
_summary_state.writer._resource, tensor, name=name) # pylint: disable=protected-access
@tf_export("summary.flush", v1=[])
def flush(writer=None, name=None):
"""Forces summary writer to send any buffered data to storage.
This operation blocks until that finishes.
Args:
writer: The `tf.summary.SummaryWriter` resource to flush.
The thread default will be used if this parameter is None.
Otherwise a `tf.no_op` is returned.
name: A name for the operation (optional).
Returns:
The created `tf.Operation`.
"""
if writer is None:
writer = _summary_state.writer
if writer is None:
return control_flow_ops.no_op()
if isinstance(writer, ResourceSummaryWriter):
resource = writer._resource # pylint: disable=protected-access
else:
# Assume we were passed a raw resource tensor.
resource = writer
with ops.device("cpu:0"):
return gen_summary_ops.flush_summary_writer(resource, name=name)
_flush_fn = flush # for within SummaryWriter.flush()
def eval_dir(model_dir, name=None):
"""Construct a logdir for an eval summary writer."""
return os.path.join(model_dir, "eval" if not name else "eval_" + name)
@deprecation.deprecated(date=None,
instructions="Renamed to create_file_writer().")
def create_summary_file_writer(*args, **kwargs):
"""Please use `tf.contrib.summary.create_file_writer`."""
logging.warning("Deprecation Warning: create_summary_file_writer was renamed "
"to create_file_writer")
return create_file_writer(*args, **kwargs)
def _serialize_graph(arbitrary_graph):
if isinstance(arbitrary_graph, ops.Graph):
return arbitrary_graph.as_graph_def(add_shapes=True).SerializeToString()
else:
return arbitrary_graph.SerializeToString()
def _choose_step(step):
if step is None:
return training_util.get_or_create_global_step()
if not isinstance(step, ops.Tensor):
return ops.convert_to_tensor(step, dtypes.int64)
return step
def _check_create_file_writer_args(inside_function, **kwargs):
"""Helper to check the validity of arguments to a create_file_writer() call.
Args:
inside_function: whether the create_file_writer() call is in a tf.function
**kwargs: the arguments to check, as kwargs to give them names.
Raises:
ValueError: if the arguments are graph tensors.
"""
for arg_name, arg in kwargs.items():
if not isinstance(arg, ops.EagerTensor) and tensor_util.is_tf_type(arg):
if inside_function:
raise ValueError(
"Invalid graph Tensor argument \"%s=%s\" to create_file_writer() "
"inside an @tf.function. The create call will be lifted into the "
"outer eager execution context, so it cannot consume graph tensors "
"defined inside the function body." % (arg_name, arg))
else:
raise ValueError(
"Invalid graph Tensor argument \"%s=%s\" to eagerly executed "
"create_file_writer()." % (arg_name, arg))
def run_metadata(name, data, step=None):
"""Writes entire RunMetadata summary.
A RunMetadata can contain DeviceStats, partition graphs, and function graphs.
Please refer to the proto for definition of each field.
Args:
name: A name for this summary. The summary tag used for TensorBoard will be
this name prefixed by any active name scopes.
data: A RunMetadata proto to write.
step: Explicit `int64`-castable monotonic step value for this summary. If
omitted, this defaults to `tf.summary.experimental.get_step()`, which must
not be None.
Returns:
True on success, or false if no summary was written because no default
summary writer was available.
Raises:
ValueError: if a default writer exists, but no step was provided and
`tf.summary.experimental.get_step()` is None.
"""
summary_metadata = summary_pb2.SummaryMetadata()
# Hard coding a plugin name. Please refer to go/tb-plugin-name-hardcode for
# the rationale.
summary_metadata.plugin_data.plugin_name = "graph_run_metadata"
# version number = 1
summary_metadata.plugin_data.content = b"1"
with summary_scope(name,
"graph_run_metadata_summary",
[data, step]) as (tag, _):
with ops.device("cpu:0"):
tensor = constant_op.constant(data.SerializeToString(),
dtype=dtypes.string)
return write(
tag=tag,
tensor=tensor,
step=step,
metadata=summary_metadata)
def run_metadata_graphs(name, data, step=None):
"""Writes graphs from a RunMetadata summary.
Args:
name: A name for this summary. The summary tag used for TensorBoard will be
this name prefixed by any active name scopes.
data: A RunMetadata proto to write.
step: Explicit `int64`-castable monotonic step value for this summary. If
omitted, this defaults to `tf.summary.experimental.get_step()`, which must
not be None.
Returns:
True on success, or false if no summary was written because no default
summary writer was available.
Raises:
ValueError: if a default writer exists, but no step was provided and
`tf.summary.experimental.get_step()` is None.
"""
summary_metadata = summary_pb2.SummaryMetadata()
# Hard coding a plugin name. Please refer to go/tb-plugin-name-hardcode for
# the rationale.
summary_metadata.plugin_data.plugin_name = "graph_run_metadata_graph"
# version number = 1
summary_metadata.plugin_data.content = b"1"
data = config_pb2.RunMetadata(
function_graphs=data.function_graphs,
partition_graphs=data.partition_graphs)
with summary_scope(name,
"graph_run_metadata_graph_summary",
[data, step]) as (tag, _):
with ops.device("cpu:0"):
tensor = constant_op.constant(data.SerializeToString(),
dtype=dtypes.string)
return write(
tag=tag,
tensor=tensor,
step=step,
metadata=summary_metadata)
_TraceContext = collections.namedtuple("TraceContext", ("graph", "profiler"))
_current_trace_context_lock = threading.Lock()
_current_trace_context = None
@tf_export("summary.trace_on", v1=[])
def trace_on(graph=True, profiler=False): # pylint: disable=redefined-outer-name
"""Starts a trace to record computation graphs and profiling information.
Must be invoked in eager mode.
When enabled, TensorFlow runtime will collection information that can later be
exported and consumed by TensorBoard. The trace is activated across the entire
TensorFlow runtime and affects all threads of execution.
To stop the trace and export the collected information, use
`tf.summary.trace_export`. To stop the trace without exporting, use
`tf.summary.trace_off`.
Args:
graph: If True, enables collection of executed graphs. It includes ones from
tf.function invocation and ones from the legacy graph mode. The default
is True.
profiler: If True, enables the advanced profiler. Enabling profiler
implicitly enables the graph collection. The profiler may incur a high
memory overhead. The default is False.
"""
if ops.inside_function():
logging.warn("Cannot enable trace inside a tf.function.")
return
if not context.executing_eagerly():
logging.warn("Must enable trace in eager mode.")
return
global _current_trace_context
with _current_trace_context_lock:
if _current_trace_context:
logging.warn("Trace already enabled")
return
if graph and not profiler:
context.context().enable_graph_collection()
if profiler:
context.context().enable_run_metadata()
_profiler.start()
_current_trace_context = _TraceContext(graph=graph, profiler=profiler)
@tf_export("summary.trace_export", v1=[])
def trace_export(name, step=None, profiler_outdir=None):
"""Stops and exports the active trace as a Summary and/or profile file.
Stops the trace and exports all metadata collected during the trace to the
default SummaryWriter, if one has been set.
Args:
name: A name for the summary to be written.
step: Explicit `int64`-castable monotonic step value for this summary. If
omitted, this defaults to `tf.summary.experimental.get_step()`, which must
not be None.
profiler_outdir: Output directory for profiler. This is only used when the
profiler was enabled when the trace was started. In that case, if there is
a logdir-based default SummaryWriter, this defaults to the same directory,
but otherwise the argument must be passed.
Raises:
ValueError: if a default writer exists, but no step was provided and
`tf.summary.experimental.get_step()` is None.
"""
global _current_trace_context
if ops.inside_function():
logging.warn("Cannot export trace inside a tf.function.")
return
if not context.executing_eagerly():
logging.warn("Can only export trace while executing eagerly.")
return
with _current_trace_context_lock:
if _current_trace_context is None:
raise ValueError("Must enable trace before export.")
graph, profiler = _current_trace_context # pylint: disable=redefined-outer-name
if profiler_outdir is None \
and isinstance(_summary_state.writer, ResourceSummaryWriter):
logdir = _summary_state.writer._metadata.get("logdir") # pylint: disable=protected-access
if logdir is not None:
profiler_outdir = logdir
if profiler and profiler_outdir is None:
raise ValueError("Must set profiler_outdir or "
"enable summary writer with logdir.")
run_meta = context.context().export_run_metadata()
if graph and not profiler:
run_metadata_graphs(name, run_meta, step)
else:
run_metadata(name, run_meta, step)
if profiler:
_profiler.save(profiler_outdir, _profiler.stop())
trace_off()
@tf_export("summary.trace_off", v1=[])
def trace_off():
"""Stops the current trace and discards any collected information."""
global _current_trace_context
with _current_trace_context_lock:
if _current_trace_context is None:
return # tracing already off
graph, profiler = _current_trace_context # pylint: disable=redefined-outer-name, unpacking-non-sequence
_current_trace_context = None
if graph:
# Disabling run_metadata disables graph collection as well.
context.context().disable_run_metadata()
if profiler:
try:
_profiler.stop()
except _profiler.ProfilerNotRunningError:
pass
| petewarden/tensorflow | tensorflow/python/ops/summary_ops_v2.py | Python | apache-2.0 | 49,451 |
from django.test import TestCase
from django.core.exceptions import ValidationError
from oscar.core.compat import get_user_model
from oscar.apps.catalogue.reviews import models
from oscar.test.factories import create_product
from oscar.test.factories import UserFactory
User = get_user_model()
class TestAnAnonymousReview(TestCase):
def setUp(self):
self.product = create_product()
self.data = {
'product': self.product,
'title': 'This product is lovely',
'body': 'I really like this cheese',
'score': 0,
'name': 'JR Hartley',
'email': 'hartley@example.com'
}
def review(self, **kwargs):
if kwargs:
data = self.data.copy()
data.update(kwargs)
else:
data = self.data
return models.ProductReview(**data)
def test_can_be_created(self):
review = self.review()
review.full_clean()
def test_requires_a_title(self):
review = self.review(title="")
self.assertRaises(ValidationError, review.full_clean)
def test_requires_a_body(self):
review = self.review(body="")
self.assertRaises(ValidationError, review.full_clean)
def test_requires_a_name(self):
review = self.review(name="")
self.assertRaises(ValidationError, review.full_clean)
def test_requires_an_email_address(self):
review = self.review(email="")
self.assertRaises(ValidationError, review.full_clean)
def test_requires_non_whitespace_title(self):
review = self.review(title=" ")
self.assertRaises(ValidationError, review.full_clean)
def test_starts_with_no_votes(self):
review = self.review()
review.save()
self.assertFalse(review.has_votes)
self.assertEqual(0, review.num_up_votes)
self.assertEqual(0, review.num_down_votes)
def test_has_reviewer_name_property(self):
review = self.review(name="Dave")
self.assertEqual("Dave", review.reviewer_name)
def test_review_moderate_setting_false(self):
with self.settings(OSCAR_MODERATE_REVIEWS=False):
review = self.review()
self.assertEqual(1, review.status)
def test_review_moderate_setting_true(self):
with self.settings(OSCAR_MODERATE_REVIEWS=True):
review = self.review()
self.assertEqual(0, review.status)
class TestAUserReview(TestCase):
def setUp(self):
self.product = create_product()
self.user = UserFactory(first_name="Tom", last_name="Thumb")
self.data = {
'product': self.product,
'title': 'This product is lovely',
'body': 'I really like this cheese',
'score': 0,
'user': self.user
}
def review(self, **kwargs):
if kwargs:
data = self.data.copy()
data.update(kwargs)
else:
data = self.data
return models.ProductReview(**data)
def test_can_be_created(self):
review = self.review()
review.full_clean()
def test_requires_a_title(self):
review = self.review(title="")
self.assertRaises(ValidationError, review.full_clean)
def test_requires_a_body(self):
review = self.review(body="")
self.assertRaises(ValidationError, review.full_clean)
def test_has_reviewer_name_property(self):
review = self.review()
self.assertEqual("Tom Thumb", review.reviewer_name)
def test_num_approved_reviews(self):
review = self.review()
review.save()
self.assertEqual(self.product.num_approved_reviews, 1)
self.assertEqual(self.product.reviews.approved().first(), review)
def test_review_moderate_setting_false(self):
with self.settings(OSCAR_MODERATE_REVIEWS=False):
review = self.review()
self.assertEqual(1, review.status)
def test_review_moderate_setting_true(self):
with self.settings(OSCAR_MODERATE_REVIEWS=True):
review = self.review()
self.assertEqual(0, review.status)
class TestVotingOnAReview(TestCase):
def setUp(self):
self.product = create_product()
self.user = UserFactory()
self.voter = UserFactory()
self.review = self.product.reviews.create(
title='This is nice',
score=3,
body="This is the body",
user=self.user)
def test_updates_totals_for_upvote(self):
self.review.vote_up(self.voter)
self.assertTrue(self.review.has_votes)
self.assertEqual(1, self.review.total_votes)
self.assertEqual(1, self.review.delta_votes)
def test_updates_totals_for_downvote(self):
self.review.vote_down(self.voter)
self.assertTrue(self.review.has_votes)
self.assertEqual(1, self.review.total_votes)
self.assertEqual(-1, self.review.delta_votes)
def test_is_permitted_for_normal_user(self):
is_allowed, reason = self.review.can_user_vote(self.voter)
self.assertTrue(is_allowed, reason)
def test_is_not_permitted_for_reviewer(self):
is_allowed, reason = self.review.can_user_vote(self.user)
self.assertFalse(is_allowed, reason)
def test_is_not_permitted_for_previous_voter(self):
self.review.vote_up(self.voter)
is_allowed, reason = self.review.can_user_vote(self.voter)
self.assertFalse(is_allowed, reason)
| vicky2135/lucious | tests/integration/catalogue/reviews/test_models.py | Python | bsd-3-clause | 5,519 |
"""
Contains application CRUD view definitions.
"""
from django.core.exceptions import ImproperlyConfigured
from django.core.paginator import EmptyPage
from django.core.paginator import PageNotAnInteger
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from django.utils.decorators import method_decorator
from django.utils.encoding import force_text
from django.utils.translation import ugettext_lazy as _
from django.views import generic
from django.views.decorators.vary import vary_on_headers
from wagtail.wagtailadmin.forms import SearchForm
from wagtail.wagtailadmin import messages
from wagtail.wagtailsearch.backends import get_search_backends
class IndexView(generic.ListView):
"""
CRUD index view for specified model class.
"""
paginate_by = 20
page_kwarg = 'p'
search_form_class = SearchForm
template_dir = None
def __init__(self, *args, **kwargs):
"""
Initializes the view instance.
"""
#noinspection PyArgumentList
super(IndexView, self).__init__(*args, **kwargs)
if not self.template_dir:
raise ImproperlyConfigured(
'You must set the template_dir attribute.'
)
@method_decorator(vary_on_headers('X-Requested-With'))
def dispatch(self, request, *args, **kwargs):
"""
Dispatches the request.
:param request: the request instance.
:rtype: django.http.HttpResponse.
"""
return super(IndexView, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
"""
Returns context dictionary for view.
:rtype: dict.
"""
#noinspection PyUnresolvedReferences
query_str = self.request.GET.get('q', None)
queryset = kwargs.pop('object_list', self.object_list)
context_object_name = self.get_context_object_name(queryset)
# Build the context dictionary.
context = {
'ordering': self.get_ordering(),
'query_string': query_str,
'is_searching': bool(query_str),
}
# Add extra variables to context for non-AJAX requests.
#noinspection PyUnresolvedReferences
if not self.request.is_ajax() or kwargs.get('force_search', False):
context.update({
'search_form': self.get_search_form(),
'popular_tags': self.model.popular_tags()
})
if context_object_name is not None:
context[context_object_name] = queryset
# Update context with any additional keyword arguments.
context.update(kwargs)
return super(IndexView, self).get_context_data(**context)
def get_ordering(self):
"""
Returns ordering value for list.
:rtype: str.
"""
#noinspection PyUnresolvedReferences
ordering = self.request.GET.get('ordering', None)
if ordering not in ['title', '-created_at']:
ordering = '-created_at'
return ordering
def get_queryset(self):
"""
Returns queryset instance.
:rtype: django.db.models.query.QuerySet.
"""
queryset = super(IndexView, self).get_queryset()
search_form = self.get_search_form()
if search_form.is_valid():
query_str = search_form.cleaned_data.get('q', '').strip()
queryset = self.model.search(query_str)
return queryset
def get_search_form(self):
"""
Returns search form instance.
:rtype: django.forms.ModelForm.
"""
#noinspection PyUnresolvedReferences
if 'q' in self.request.GET:
#noinspection PyUnresolvedReferences
return self.search_form_class(self.request.GET)
else:
return self.search_form_class(placeholder=_(u'Search'))
def get_template_names(self):
"""
Returns a list of template names for the view.
:rtype: list.
"""
#noinspection PyUnresolvedReferences
if self.request.is_ajax():
template_name = '/results.html'
else:
template_name = '/index.html'
return ['{0}{1}'.format(self.template_dir, template_name)]
def paginate_queryset(self, queryset, page_size):
"""
Returns tuple containing paginator instance, page instance,
object list, and whether there are other pages.
:param queryset: the queryset instance to paginate.
:param page_size: the number of instances per page.
:rtype: tuple.
"""
paginator = self.get_paginator(
queryset,
page_size,
orphans = self.get_paginate_orphans(),
allow_empty_first_page = self.get_allow_empty()
)
page_kwarg = self.page_kwarg
#noinspection PyUnresolvedReferences
page_num = self.kwargs.get(page_kwarg) or self.request.GET.get(page_kwarg) or 1
# Default to a valid page.
try:
page = paginator.page(page_num)
except PageNotAnInteger:
page = paginator.page(1)
except EmptyPage:
page = paginator.page(paginator.num_pages)
#noinspection PyRedundantParentheses
return (paginator, page, page.object_list, page.has_other_pages())
class BaseEditView(generic.edit.ModelFormMixin, generic.edit.ProcessFormView):
"""
Base CRUD edit view.
"""
url_namespace = None
template_dir = None
def __init__(self, *args, **kwargs):
"""
Initializes the view instance.
"""
super(BaseEditView, self).__init__(*args, **kwargs)
if not self.url_namespace:
raise ImproperlyConfigured(
'You must set the namespace attribute.'
)
if not self.template_dir:
raise ImproperlyConfigured(
'You must set the template_dir attribute.'
)
@method_decorator(vary_on_headers('X-Requested-With'))
def dispatch(self, request, *args, **kwargs):
"""
Dispatches the request.
:param request: the request instance.
:rtype: django.http.HttpResponse.
"""
return super(BaseEditView, self).dispatch(request, *args, **kwargs)
def form_invalid(self, form):
"""
Processes an invalid form submittal.
:param form: the form instance.
:rtype: django.http.HttpResponse.
"""
meta = getattr(self.model, '_meta')
#noinspection PyUnresolvedReferences
messages.error(
self.request,
_(u'The {0} could not be saved due to errors.').format(
meta.verbose_name.lower()
)
)
return super(BaseEditView, self).form_invalid(form)
def form_valid(self, form):
"""
Processes a valid form submittal.
:param form: the form instance.
:rtype: django.http.HttpResponse.
"""
#noinspection PyAttributeOutsideInit
self.object = form.save()
meta = getattr(self.object, '_meta')
# Index the object.
for backend in get_search_backends():
backend.add(object)
#noinspection PyUnresolvedReferences
messages.success(
self.request,
_(u'{0} "{1}" saved.').format(
meta.verbose_name,
str(self.object)
),
buttons=[messages.button(
reverse(
'{0}:edit'.format(self.url_namespace),
args=(self.object.id,)
),
_(u'Edit')
)]
)
return redirect(self.get_success_url())
def get_success_url(self):
"""
Returns redirect URL for valid form submittal.
:rtype: str.
"""
if self.success_url:
url = force_text(self.success_url)
else:
url = reverse('{0}:index'.format(self.url_namespace))
return url
class CreateView(BaseEditView, generic.CreateView):
"""
CRUD create view for specified model class.
"""
def get_template_names(self):
"""
Returns a list of template names for the view.
:rtype: list.
"""
return ['{0}/add.html'.format(self.template_dir)]
class UpdateView(BaseEditView, generic.UpdateView):
"""
CRUD edit view for specified model class.
"""
def get_template_names(self):
"""
Returns a list of template names for the view.
:rtype: list.
"""
return ['{0}/edit.html'.format(self.template_dir)]
class DeleteView(generic.DeleteView):
"""
CRUD delete view for specified model class.
"""
url_namespace = None
template_dir = None
def delete(self, request, *args, **kwargs):
"""
Processes deletion of the specified instance.
:param request: the request instance.
:rtype: django.http.HttpResponse.
"""
#noinspection PyAttributeOutsideInit
self.object = self.get_object()
success_url = self.get_success_url()
meta = getattr(self.object, '_meta')
self.object.delete()
messages.success(
request,
_(u'{0} "{1}" deleted.').format(
meta.verbose_name.lower(),
str(self.object)
)
)
return redirect(success_url)
def get_success_url(self):
"""
Returns redirect URL for valid form submittal.
:rtype: str.
"""
return reverse('{0}:index'.format(self.url_namespace))
def get_template_names(self):
"""
Returns a list of template names for the view.
:rtype: list.
"""
return ['{0}/confirm_delete.html'.format(self.template_dir)]
| thenewguy/wagtailplus | wagtailplus/utils/views/crud.py | Python | bsd-2-clause | 10,014 |
T = [(1,2),(3,4),(5,6)]
for (a,b) in T:
print(a,"e",b)
for i in range(100):
print(i,end=' ')
| felipeatr/tresa | 01_exemplos_revisao/11_laco_for.py | Python | gpl-3.0 | 100 |
#-------------------------------------------------------------------------------
# This file is part of PyMad.
#
# Copyright (c) 2011, CERN. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#-------------------------------------------------------------------------------
import unittest
from base_test import TestCpymad
class TestLHC_LSA(TestCpymad, unittest.TestCase):
name = 'lhc'
if __name__ == '__main__':
unittest.main()
| pymad/cpymad | test/test_lhc.py | Python | apache-2.0 | 952 |
"""
chatrelater.analyzer_cli
~~~~~~~~~~~~~~~~~~~~~~~~
Command line interface for analyzer.
:Copyright: 2007-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from argparse import ArgumentParser
from pathlib import Path
from .analyzer import analyze
from .serialization import serialize_data_to_file, serialize_data_to_stdout
def parse_args():
"""Setup and apply the command line parser."""
parser = ArgumentParser()
parser.add_argument(
'-d',
'--directed',
action='store_true',
dest='directed',
help='preserve directed relations instead of unifying them',
)
parser.add_argument(
'-n',
'--no-unrelated-nicknames',
action='store_true',
dest='no_unrelated_nicknames',
help='exclude unrelated nicknames to avoid unconnected nodes to be drawn',
)
parser.add_argument(
'-o',
'--output-filename',
dest='output_filename',
help='save the output to this file (default: write to STDOUT)',
)
parser.add_argument(
'-v',
'--verbose',
action='store_true',
dest='verbose',
help='display the resulting relations',
)
parser.add_argument('filenames', metavar='FILENAME', nargs='+')
return parser.parse_args()
def main() -> None:
args = parse_args()
filenames = [Path(fn) for fn in args.filenames]
if args.output_filename is not None:
output_filename = Path(args.output_filename)
else:
output_filename = None
# Analyze data.
nicknames, relations = analyze(
filenames,
directed=args.directed,
no_unrelated_nicknames=args.no_unrelated_nicknames,
)
# Show details.
if args.verbose:
connection_template = '%3dx %s <-> %s'
if args.directed:
connection_template = connection_template.replace('<', '')
print()
for rel in sorted(relations, key=lambda x: str.lower(x[0])):
print(connection_template % (rel[2], rel[0], rel[1]))
print()
print(
'Found {len(nicknames):d} nicknames in {len(relations):d} relations.'
)
# Store result.
data = {
'nicknames': list(nicknames),
'relations': relations,
'directed': args.directed,
}
if output_filename is not None:
serialize_data_to_file(data, output_filename)
else:
serialize_data_to_stdout(data)
| homeworkprod/chatrelater | src/chatrelater/analyzer_cli.py | Python | mit | 2,473 |
#!/usr/bin/env python3
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2015-2017 lamarpavel
# Copyright 2015-2017 Alexey Nabrodov (Averrin)
# Copyright 2015-2017 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Fetch list of popular user-agents.
The script is based on a gist posted by github.com/averrin, the output of this
script is formatted to be pasted into configtypes.py.
"""
import requests
from lxml import html # pylint: disable=import-error
def fetch():
"""Fetch list of popular user-agents.
Return:
List of relevant strings.
"""
url = 'https://techblog.willshouse.com/2012/01/03/most-common-user-agents/'
page = requests.get(url)
page = html.fromstring(page.text)
path = '//*[@id="post-2229"]/div[2]/table/tbody'
return page.xpath(path)[0]
def filter_list(complete_list, browsers):
"""Filter the received list based on a look up table.
The LUT should be a dictionary of the format {browser: versions}, where
'browser' is the name of the browser (eg. "Firefox") as string and
'versions' is a set of different versions of this browser that should be
included when found (eg. {"Linux", "MacOSX"}). This function returns a
dictionary with the same keys as the LUT, but storing lists of tuples
(user_agent, browser_description) as values.
"""
# pylint: disable=too-many-nested-blocks
table = {}
for entry in complete_list:
# Tuple of (user_agent, browser_description)
candidate = (entry[1].text_content(), entry[2].text_content())
for name in browsers:
found = False
if name.lower() in candidate[1].lower():
for version in browsers[name]:
if version.lower() in candidate[1].lower():
if table.get(name) is None:
table[name] = []
table[name].append(candidate)
browsers[name].remove(version)
found = True
break
if found:
break
return table
def add_diversity(table):
"""Insert a few additional entries for diversity into the dict.
(as returned by filter_list())
"""
table["Obscure"] = [
('Mozilla/5.0 (compatible; Googlebot/2.1; '
'+http://www.google.com/bot.html',
"Google Bot"),
('Wget/1.16.1 (linux-gnu)',
"wget 1.16.1"),
('curl/7.40.0',
"curl 7.40.0"),
('Mozilla/5.0 (Linux; U; Android 7.1.2) AppleWebKit/534.30 '
'(KHTML, like Gecko) Version/4.0 Mobile Safari/534.30',
"Mobile Generic Android")
]
return table
def main():
"""Generate user agent code."""
fetched = fetch()
lut = {
"Firefox": {"Win", "MacOSX", "Linux", "Android"},
"Chrome": {"Win", "MacOSX", "Linux"},
"Safari": {"MacOSX", "iOS"}
}
filtered = filter_list(fetched, lut)
filtered = add_diversity(filtered)
tab = " "
print(tab + "def complete(self):")
print((2 * tab) + "\"\"\"Complete a list of common user agents.\"\"\"")
print((2 * tab) + "out = [")
for browser in ["Firefox", "Safari", "Chrome", "Obscure"]:
for it in filtered[browser]:
print("{}(\'{}\',\n{} \"{}\"),".format(3 * tab, it[0],
3 * tab, it[1]))
print("")
print("""\
('Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like '
'Gecko',
"IE 11.0 for Desktop Win7 64-bit")""")
print("{}]\n{}return out\n".format(2 * tab, 2 * tab))
if __name__ == '__main__':
main()
| lahwaacz/qutebrowser | scripts/dev/ua_fetch.py | Python | gpl-3.0 | 4,386 |
import models
class BTM(object):
def __init__(self, totalAmountBills,
currentAmountBills, currentAmountBitcoin):
self.priceModel = models.PriceModel(totalAmountBills,
currentAmountBills,
currentAmountBitcoin)
def buy_bills(self, amountBills):
nAmountBills = amountBills.copy_negate()
amountBitcoin = self.priceModel.calculate(nAmountBills)
self.priceModel.change_amount_bills(nAmountBills)
self.priceModel.change_amount_bitcoin(amountBitcoin)
def sell_bills(self, amountBills):
amountBitcoin = self.priceModel.calculate(amountBills)
self.priceModel.change_amount_bills(amountBills)
self.priceModel.change_amount_bitcoin(amountBitcoin)
| cjduncana/Unbanked-Bitcoin-ATM | btm/btm.py | Python | gpl-2.0 | 822 |