repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values | var_hash
int64 -9,223,186,179,200,150,000
9,223,291,175B
| doc_hash
int64 -9,223,304,365,658,930,000
9,223,309,051B
| line_mean
float64 3.5
99.8
| line_max
int64 13
999
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
fzimmermann89/pyload | module/common/APIExerciser.py | 41 | 4349 | # -*- coding: utf-8 -*-
import string
from threading import Thread
from random import choice, random, sample, randint
from time import time, sleep
from math import floor
import gc
from traceback import print_exc, format_exc
from module.remote.thriftbackend.ThriftClient import ThriftClient, Destination
def createURLs():
""" create some urls, some may fail """
urls = []
for x in range(0, randint(20, 100)):
name = "DEBUG_API"
if randint(0, 5) == 5:
name = "" #this link will fail
urls.append(name + "".join(sample(string.ascii_letters, randint(10, 20))))
return urls
AVOID = (0,3,8)
idPool = 0
sumCalled = 0
def startApiExerciser(core, n):
for i in range(n):
APIExerciser(core).start()
class APIExerciser(Thread):
def __init__(self, core, thrift=False, user=None, pw=None):
global idPool
Thread.__init__(self)
self.setDaemon(True)
self.core = core
self.count = 0 #number of methods
self.time = time()
if thrift:
self.api = ThriftClient(user=user, password=pw)
else:
self.api = core.api
self.id = idPool
idPool += 1
#self.start()
def run(self):
self.core.log.info("API Excerciser started %d" % self.id)
out = open("error.log", "ab")
#core errors are not logged of course
out.write("\n" + "Starting\n")
out.flush()
while True:
try:
self.testAPI()
except Exception:
self.core.log.error("Excerciser %d throw an execption" % self.id)
print_exc()
out.write(format_exc() + 2 * "\n")
out.flush()
if not self.count % 100:
self.core.log.info("Exerciser %d tested %d api calls" % (self.id, self.count))
if not self.count % 1000:
out.flush()
if not sumCalled % 1000: #not thread safe
self.core.log.info("Exercisers tested %d api calls" % sumCalled)
persec = sumCalled / (time() - self.time)
self.core.log.info("Approx. %.2f calls per second." % persec)
self.core.log.info("Approx. %.2f ms per call." % (1000 / persec))
self.core.log.info("Collected garbage: %d" % gc.collect())
#sleep(random() / 500)
def testAPI(self):
global sumCalled
m = ["statusDownloads", "statusServer", "addPackage", "getPackageData", "getFileData", "deleteFiles",
"deletePackages", "getQueue", "getCollector", "getQueueData", "getCollectorData", "isCaptchaWaiting",
"getCaptchaTask", "stopAllDownloads", "getAllInfo", "getServices" , "getAccounts", "getAllUserData"]
method = choice(m)
#print "Testing:", method
if hasattr(self, method):
res = getattr(self, method)()
else:
res = getattr(self.api, method)()
self.count += 1
sumCalled += 1
#print res
def addPackage(self):
name = "".join(sample(string.ascii_letters, 10))
urls = createURLs()
self.api.addPackage(name, urls, choice([Destination.Queue, Destination.Collector]))
def deleteFiles(self):
info = self.api.getQueueData()
if not info: return
pack = choice(info)
fids = pack.links
if len(fids):
fids = [f.fid for f in sample(fids, randint(1, max(len(fids) / 2, 1)))]
self.api.deleteFiles(fids)
def deletePackages(self):
info = choice([self.api.getQueue(), self.api.getCollector()])
if not info: return
pids = [p.pid for p in info]
if len(pids):
pids = sample(pids, randint(1, max(floor(len(pids) / 2.5), 1)))
self.api.deletePackages(pids)
def getFileData(self):
info = self.api.getQueueData()
if info:
p = choice(info)
if p.links:
self.api.getFileData(choice(p.links).fid)
def getPackageData(self):
info = self.api.getQueue()
if info:
self.api.getPackageData(choice(info).pid)
def getAccounts(self):
self.api.getAccounts(False)
def getCaptchaTask(self):
self.api.getCaptchaTask(False) | gpl-3.0 | 7,196,862,539,802,412,000 | -2,797,242,294,572,503,000 | 26.707006 | 114 | 0.563118 | false |
TheTimmy/spack | var/spack/repos/builtin/packages/shortstack/package.py | 3 | 1943 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Shortstack(Package):
"""ShortStack is a tool developed to process and analyze smallRNA-seq data
with respect to a reference genome, and output a comprehensive and
informative annotation of all discovered small RNA genes."""
homepage = "http://sites.psu.edu/axtell/software/shortstack/"
url = "https://github.com/MikeAxtell/ShortStack/archive/v3.8.3.tar.gz"
version('3.8.3', '3f21f494f799039f3fa88ea343f2d20d')
depends_on('perl', type=('build', 'run'))
depends_on('samtools')
depends_on('viennarna')
depends_on('bowtie')
def install(self, spec, prefix):
mkdirp(prefix.bin)
install('ShortStack', prefix.bin)
| lgpl-2.1 | -6,615,827,537,778,193,000 | -988,868,549,976,880,500 | 42.177778 | 79 | 0.675244 | false |
jezdez/django-hosts | tests/test_defaults.py | 3 | 3037 | from django.core.exceptions import ImproperlyConfigured
from django_hosts.defaults import host, patterns
from django_hosts.resolvers import get_host_patterns
from .base import HostsTestCase
class PatternsTests(HostsTestCase):
def test_pattern(self):
host_patterns = patterns('',
host(r'api', 'api.urls', name='api'),
)
self.assertEqual(len(host_patterns), 1)
self.assertTrue(isinstance(host_patterns[0], host))
self.assertEqual(repr(host_patterns[0]),
"<host api: regex='api' urlconf='api.urls' "
"scheme='//' port=''>")
def test_pattern_as_tuple(self):
host_patterns = patterns('',
(r'api', 'api.urls', 'api'),
)
self.assertEqual(len(host_patterns), 1)
self.assertTrue(isinstance(host_patterns[0], host))
def test_pattern_with_duplicate(self):
api_host = host(r'api', 'api.urls', name='api')
self.assertRaises(ImproperlyConfigured,
patterns, '', api_host, api_host)
def test_pattern_with_prefix(self):
host_patterns = patterns('mysite',
host(r'api', 'api.urls', name='api'),
)
self.assertEqual(len(host_patterns), 1)
self.assertTrue(isinstance(host_patterns[0], host))
self.assertEqual(host_patterns[0].urlconf, 'mysite.api.urls')
class HostTests(HostsTestCase):
def test_host(self):
api_host = host(r'api', 'api.urls', name='api')
self.assertTrue(isinstance(api_host, host))
def test_host_prefix(self):
api_host = host(r'api', 'api.urls', name='api', prefix='spam.eggs')
self.assertEqual(api_host.urlconf, 'spam.eggs.api.urls')
def test_host_string_callback(self):
api_host = host(r'api', 'api.urls', name='api',
callback='django_hosts.resolvers.get_host_patterns')
self.assertEqual(api_host.callback, get_host_patterns)
def test_host_callable_callback(self):
api_host = host(r'api', 'api.urls', name='api',
callback=get_host_patterns)
self.assertEqual(api_host.callback, get_host_patterns)
def test_host_nonexistent_callback(self):
api_host = host(r'api', 'api.urls', name='api',
callback='whatever.non_existent')
self.assertRaisesMessage(ImproperlyConfigured,
"Could not import 'whatever'. Error was: No module named",
lambda: api_host.callback)
api_host = host(r'api', 'api.urls', name='api',
callback='django_hosts.non_existent')
self.assertRaisesMessage(ImproperlyConfigured,
"Could not import 'django_hosts.non_existent'. "
"Callable does not exist in module",
lambda: api_host.callback)
api_host = host(r'api', 'api.urls', name='api',
callback='tests.broken_module.yeah_yeah')
self.assertRaises(ImproperlyConfigured, lambda: api_host.callback)
| bsd-3-clause | -7,082,895,015,006,689,000 | 8,006,269,357,654,077,000 | 38.441558 | 76 | 0.603556 | false |
gonicus/gosa | backend/src/gosa/backend/plugins/samba/logonhours.py | 1 | 2755 | # This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
import time
from gosa.backend.objects.types import AttributeType
class SambaLogonHoursAttribute(AttributeType):
"""
This is a special object-attribute-type for sambaLogonHours.
This call can convert sambaLogonHours to a UnicodeString and vice versa.
It is used in the samba-object definition file.
"""
__alias__ = "SambaLogonHours"
def values_match(self, value1, value2):
return str(value1) == str(value2)
def is_valid_value(self, value):
if len(value):
try:
# Check if each week day contains 24 values.
if type(value[0]) is not str or len(value[0]) != 168 or len(set(value[0]) - set('01')):
return False
return True
except:
return False
def _convert_to_unicodestring(self, value):
"""
This method is a converter used when values gets read from or written to the backend.
Converts the 'SambaLogonHours' object-type into a 'UnicodeString'-object.
"""
if len(value):
# Combine the binary strings
lstr = value[0]
# New reverse every 8 bit part, and toggle high- and low-tuple (4Bits)
new = ""
for i in range(0, 21):
n = lstr[i * 8:((i + 1) * 8)]
n = n[0:4] + n[4:]
n = n[::-1]
n = str(hex(int(n, 2)))[2::].rjust(2, '0')
new += n
value = [new.upper()]
return value
def _convert_from_string(self, value):
return self._convert_from_unicodestring(value)
def _convert_from_unicodestring(self, value):
"""
This method is a converter used when values gets read from or written to the backend.
Converts a 'UnicodeString' attribute into the 'SambaLogonHours' object-type.
"""
if len(value):
# Convert each hex-pair into binary values.
# Then reverse the binary result and switch high and low pairs.
value = value[0]
lstr = ""
for i in range(0, 42, 2):
n = (bin(int(value[i:i + 2], 16))[2::]).rjust(8, '0')
n = n[::-1]
lstr += n[0:4] + n[4:]
# Shift lster by timezone offset
shift_by = int((168 + (time.timezone/3600)) % 168)
lstr = lstr[shift_by:] + lstr[:shift_by]
# Parse result into more readable value
value = [lstr]
return value
| lgpl-2.1 | 5,215,574,314,056,275,000 | 782,765,529,593,932,700 | 29.611111 | 103 | 0.549546 | false |
JavaRabbit/CS496_capstone | appengine/standard/Capstone_inPython/reportlab/graphics/testshapes.py | 3 | 17300 | #!/bin/env python
#Copyright ReportLab Europe Ltd. 2000-2017
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/graphics/testshapes.py
# testshapes.py - draws shapes onto a PDF canvas.
__version__ = ''' $Id $ '''
__doc__='''Execute this script to see some test drawings.
This contains a number of routines to generate test drawings
for reportlab/graphics. For now many of them are contrived,
but we will expand them to try and trip up any parser.
Feel free to add more.
'''
import os, sys, base64
from reportlab.lib import colors
from reportlab.lib.units import cm
from reportlab.lib.utils import asNative
from reportlab.pdfgen.canvas import Canvas
from reportlab.pdfbase.pdfmetrics import stringWidth
from reportlab.platypus import Flowable
from reportlab.graphics.shapes import *
from reportlab.graphics.renderPDF import _PDFRenderer
import unittest
_FONTS = ['Times-Roman','Vera','Times-BoldItalic',]
def _setup():
from reportlab.pdfbase import pdfmetrics, ttfonts
pdfmetrics.registerFont(ttfonts.TTFont("Vera", "Vera.ttf"))
pdfmetrics.registerFont(ttfonts.TTFont("VeraBd", "VeraBd.ttf"))
pdfmetrics.registerFont(ttfonts.TTFont("VeraIt", "VeraIt.ttf"))
pdfmetrics.registerFont(ttfonts.TTFont("VeraBI", "VeraBI.ttf"))
F = ['Times-Roman','Courier','Helvetica','Vera', 'VeraBd', 'VeraIt', 'VeraBI']
if sys.platform=='win32':
for name, ttf in [
('Adventurer Light SF','Advlit.ttf'),('ArialMS','ARIAL.TTF'),
('Arial Unicode MS', 'ARIALUNI.TTF'),
('Book Antiqua','BKANT.TTF'),
('Century Gothic','GOTHIC.TTF'),
('Comic Sans MS', 'COMIC.TTF'),
('Elementary Heavy SF Bold','Vwagh.ttf'),
('Firenze SF','flot.ttf'),
('Garamond','GARA.TTF'),
('Jagger','Rols.ttf'),
('Monotype Corsiva','MTCORSVA.TTF'),
('Seabird SF','seag.ttf'),
('Tahoma','TAHOMA.TTF'),
('VerdanaMS','VERDANA.TTF'),
]:
for D in (r'c:\WINNT',r'c:\Windows'):
fn = os.path.join(D,'Fonts',ttf)
if os.path.isfile(fn):
try:
f = ttfonts.TTFont(name, fn)
pdfmetrics.registerFont(f)
F.append(name)
except:
pass
return F
def resetFonts():
for f in _setup():
if f not in _FONTS:
_FONTS.append(f)
from reportlab.rl_config import register_reset
register_reset(resetFonts)
resetFonts()
#########################################################
#
# Collections of shape drawings.
#
#########################################################
def getFailedDrawing(funcName):
"""Generate a drawing in case something goes really wrong.
This will create a drawing to be displayed whenever some
other drawing could not be executed, because the generating
function does something terribly wrong! The box contains
an attention triangle, plus some error message.
"""
D = Drawing(400, 200)
points = [200,170, 140,80, 260,80]
D.add(Polygon(points,
strokeWidth=0.5*cm,
strokeColor=colors.red,
fillColor=colors.yellow))
s = String(200, 40,
"Error in generating function '%s'!" % funcName,
textAnchor='middle')
D.add(s)
return D
# These are the real drawings to be eye-balled.
def getDrawing01():
"""Hello World, on a rectangular background.
The rectangle's fillColor is yellow.
The string's fillColor is red.
"""
D = Drawing(400, 200)
D.add(Rect(50, 50, 300, 100, fillColor=colors.yellow))
D.add(String(180,100, 'Hello World', fillColor=colors.red))
D.add(String(180,86, b'Special characters \xc2\xa2\xc2\xa9\xc2\xae\xc2\xa3\xce\xb1\xce\xb2', fillColor=colors.red))
return D
def getDrawing02():
"""Various Line shapes.
The lines are blue and their strokeWidth is 5 mm.
One line has a strokeDashArray set to [5, 10, 15].
"""
D = Drawing(400, 200)
D.add(Line(50,50, 300,100,
strokeColor=colors.blue,
strokeWidth=0.5*cm,
))
D.add(Line(50,100, 300,50,
strokeColor=colors.blue,
strokeWidth=0.5*cm,
strokeDashArray=[5, 10, 15],
))
#x = 1/0 # Comment this to see the actual drawing!
return D
def getDrawing03():
"""Text strings in various sizes and different fonts.
Font size increases from 12 to 36 and from bottom left
to upper right corner. The first ones should be in
Times-Roman. Finally, a solitary Courier string at
the top right corner.
"""
D = Drawing(400, 200)
for size in range(12, 36, 4):
D.add(String(10+size*2,
10+size*2,
'Hello World',
fontName=_FONTS[0],
fontSize=size))
D.add(String(150, 150,
'Hello World',
fontName=_FONTS[1],
fontSize=36))
return D
def getDrawing04():
"""Text strings in various colours.
Colours are blue, yellow and red from bottom left
to upper right.
"""
D = Drawing(400, 200)
i = 0
for color in (colors.blue, colors.yellow, colors.red):
D.add(String(50+i*30, 50+i*30,
'Hello World', fillColor=color))
i = i + 1
return D
def getDrawing05():
"""Text strings with various anchors (alignments).
Text alignment conforms to the anchors in the left column.
"""
D = Drawing(400, 200)
lineX = 250
D.add(Line(lineX,10, lineX,190, strokeColor=colors.gray))
y = 130
for anchor in ('start', 'middle', 'end'):
D.add(String(lineX, y, 'Hello World', textAnchor=anchor))
D.add(String(50, y, anchor + ':'))
y = y - 30
return D
def getDrawing06():
"""This demonstrates all the basic shapes at once.
There are no groups or references.
Each solid shape should have a green fill.
"""
green = colors.green
D = Drawing(400, 200) #, fillColor=green)
D.add(Line(10,10, 390,190))
D.add(Circle(100,100,20, fillColor=green))
D.add(Circle(200,100,40, fillColor=green))
D.add(Circle(300,100,30, fillColor=green))
D.add(Wedge(330,100,40, -10,40, fillColor=green))
D.add(PolyLine([120,10, 130,20, 140,10, 150,20, 160,10,
170,20, 180,10, 190,20, 200,10], fillColor=green))
D.add(Polygon([300,20, 350,20, 390,80, 300,75, 330,40], fillColor=green))
D.add(Ellipse(50,150, 40, 20, fillColor=green))
D.add(Rect(120,150, 60,30,
strokeWidth=10,
strokeColor=colors.yellow,
fillColor=green)) #square corners
D.add(Rect(220, 150, 60, 30, 10, 10, fillColor=green)) #round corners
D.add(String(10,50, 'Basic Shapes', fillColor=colors.black, fontName='Helvetica'))
return D
def getDrawing07():
"""This tests the ability to translate and rotate groups. The first set of axes should be
near the bottom left of the drawing. The second should be rotated counterclockwise
by 15 degrees. The third should be rotated by 30 degrees."""
D = Drawing(400, 200)
Axis = Group(
Line(0,0,100,0), #x axis
Line(0,0,0,50), # y axis
Line(0,10,10,10), #ticks on y axis
Line(0,20,10,20),
Line(0,30,10,30),
Line(0,40,10,40),
Line(10,0,10,10), #ticks on x axis
Line(20,0,20,10),
Line(30,0,30,10),
Line(40,0,40,10),
Line(50,0,50,10),
Line(60,0,60,10),
Line(70,0,70,10),
Line(80,0,80,10),
Line(90,0,90,10),
String(20, 35, 'Axes', fill=colors.black)
)
firstAxisGroup = Group(Axis)
firstAxisGroup.translate(10,10)
D.add(firstAxisGroup)
secondAxisGroup = Group(Axis)
secondAxisGroup.translate(150,10)
secondAxisGroup.rotate(15)
D.add(secondAxisGroup)
thirdAxisGroup = Group(Axis, transform=mmult(translate(300,10), rotate(30)))
D.add(thirdAxisGroup)
return D
def getDrawing08():
"""This tests the ability to scale coordinates. The bottom left set of axes should be
near the bottom left of the drawing. The bottom right should be stretched vertically
by a factor of 2. The top left one should be stretched horizontally by a factor of 2.
The top right should have the vertical axiss leaning over to the right by 30 degrees."""
D = Drawing(400, 200)
Axis = Group(
Line(0,0,100,0), #x axis
Line(0,0,0,50), # y axis
Line(0,10,10,10), #ticks on y axis
Line(0,20,10,20),
Line(0,30,10,30),
Line(0,40,10,40),
Line(10,0,10,10), #ticks on x axis
Line(20,0,20,10),
Line(30,0,30,10),
Line(40,0,40,10),
Line(50,0,50,10),
Line(60,0,60,10),
Line(70,0,70,10),
Line(80,0,80,10),
Line(90,0,90,10),
String(20, 35, 'Axes', fill=colors.black)
)
firstAxisGroup = Group(Axis)
firstAxisGroup.translate(10,10)
D.add(firstAxisGroup)
secondAxisGroup = Group(Axis)
secondAxisGroup.translate(150,10)
secondAxisGroup.scale(1,2)
D.add(secondAxisGroup)
thirdAxisGroup = Group(Axis)
thirdAxisGroup.translate(10,125)
thirdAxisGroup.scale(2,1)
D.add(thirdAxisGroup)
fourthAxisGroup = Group(Axis)
fourthAxisGroup.translate(250,125)
fourthAxisGroup.skew(30,0)
D.add(fourthAxisGroup)
return D
def getDrawing09():
"""This tests rotated strings
Some renderers will have a separate mechanism for font drawing. This test
just makes sure strings get transformed the same way as regular graphics."""
D = Drawing(400, 200)
fontName = _FONTS[0]
fontSize = 12
text = "I should be totally horizontal and enclosed in a box"
textWidth = stringWidth(text, fontName, fontSize)
g1 = Group(
String(20, 20, text, fontName=fontName, fontSize = fontSize),
Rect(18, 18, textWidth + 4, fontSize + 4, fillColor=None)
)
D.add(g1)
text = "I should slope up by 15 degrees, so my right end is higher than my left"
textWidth = stringWidth(text, fontName, fontSize)
g2 = Group(
String(20, 20, text, fontName=fontName, fontSize = fontSize),
Rect(18, 18, textWidth + 4, fontSize + 4, fillColor=None)
)
g2.translate(0, 50)
g2.rotate(15)
D.add(g2)
return D
def getDrawing10():
"""This tests nested groups with multiple levels of coordinate transformation.
Each box should be staggered up and to the right, moving by 25 points each time."""
D = Drawing(400, 200)
fontName = _FONTS[0]
fontSize = 12
g1 = Group(
Rect(0, 0, 100, 20, fillColor=colors.yellow),
String(5, 5, 'Text in the box', fontName=fontName, fontSize = fontSize)
)
D.add(g1)
g2 = Group(g1, transform = translate(25,25))
D.add(g2)
g3 = Group(g2, transform = translate(25,25))
D.add(g3)
g4 = Group(g3, transform = translate(25,25))
D.add(g4)
return D
from reportlab.graphics.widgets.signsandsymbols import SmileyFace
def getDrawing11():
'''test of anchoring'''
def makeSmiley(x, y, size, color):
"Make a smiley data item representation."
d = size
s = SmileyFace()
s.fillColor = color
s.x = x-d
s.y = y-d
s.size = d*2
return s
D = Drawing(400, 200) #, fillColor=colors.purple)
g = Group(transform=(1,0,0,1,0,0))
g.add(makeSmiley(100,100,10,colors.red))
g.add(Line(90,100,110,100,strokeColor=colors.green))
g.add(Line(100,90,100,110,strokeColor=colors.green))
D.add(g)
g = Group(transform=(2,0,0,2,100,-100))
g.add(makeSmiley(100,100,10,colors.blue))
g.add(Line(90,100,110,100,strokeColor=colors.green))
g.add(Line(100,90,100,110,strokeColor=colors.green))
D.add(g)
g = Group(transform=(2,0,0,2,0,0))
return D
def getDrawing12():
"""Text strings in a non-standard font.
All that is required is to place the .afm and .pfb files
on the font patch given in rl_config.py,
for example in reportlab/lib/fonts/.
"""
faceName = "DarkGardenMK"
D = Drawing(400, 200)
for size in range(12, 36, 4):
D.add(String(10+size*2,
10+size*2,
'Hello World',
fontName=faceName,
fontSize=size))
return D
def getDrawing13():
'Test Various TTF Fonts'
def drawit(F,w=400,h=200,fontSize=12,slack=2,gap=5):
D = Drawing(w,h)
th = 2*gap + fontSize*1.2
gh = gap + .2*fontSize
y = h
maxx = 0
for fontName in F:
y -= th
text = fontName+asNative(b': I should be totally horizontal and enclosed in a box and end in alphabetagamma \xc2\xa2\xc2\xa9\xc2\xae\xc2\xa3\xca\xa5\xd0\x96\xd6\x83\xd7\x90\xd9\x82\xe0\xa6\x95\xce\xb1\xce\xb2\xce\xb3')
textWidth = stringWidth(text, fontName, fontSize)
maxx = max(maxx,textWidth+20)
D.add(
Group(Rect(8, y-gh, textWidth + 4, th, strokeColor=colors.red, strokeWidth=.5, fillColor=colors.lightgrey),
String(10, y, text, fontName=fontName, fontSize = fontSize)))
y -= 5
return maxx, h-y+gap, D
maxx, maxy, D = drawit(_FONTS)
if maxx>400 or maxy>200: _,_,D = drawit(_FONTS,maxx,maxy)
return D
def smallArrow():
'''create a small PIL image'''
from reportlab.graphics.renderPM import _getImage
from reportlab.lib.utils import getBytesIO
b = base64.decodestring(b'''R0lGODdhCgAHAIMAAP/////29v/d3f+ysv9/f/9VVf9MTP8iIv8ICP8AAAAAAAAAAAAAAAAAAAAA
AAAAACwAAAAACgAHAAAIMwABCBxIsKABAQASFli4MAECAgEAJJhIceKBAQkyasx4YECBjx8TICAQ
AIDJkwYEAFgZEAA7''')
return _getImage().open(getBytesIO(b))
def getDrawing14():
'''test shapes.Image'''
from reportlab.graphics.shapes import Image
D = Drawing(400, 200)
im0 = smallArrow()
D.add(Image(x=0,y=0,width=None,height=None,path=im0))
im1 = smallArrow()
D.add(Image(x=400-20,y=200-14,width=20,height=14,path=im1))
return D
def getAllFunctionDrawingNames(doTTF=1):
"Get a list of drawing function names from somewhere."
funcNames = []
# Here we get the names from the global name space.
symbols = list(globals().keys())
symbols.sort()
for funcName in symbols:
if funcName[0:10] == 'getDrawing':
if doTTF or funcName!='getDrawing13':
funcNames.append(funcName)
return funcNames
def _evalFuncDrawing(name, D, l=None, g=None):
try:
d = eval(name + '()', g or globals(), l or locals())
except:
d = getFailedDrawing(name)
D.append((d, eval(name + '.__doc__'), name[3:]))
def getAllTestDrawings(doTTF=1):
D = []
for f in getAllFunctionDrawingNames(doTTF=doTTF):
_evalFuncDrawing(f,D)
return D
def writePDF(drawings):
"Create and save a PDF file containing some drawings."
pdfPath = os.path.splitext(sys.argv[0])[0] + '.pdf'
c = Canvas(pdfPath)
c.setFont(_FONTS[0], 32)
c.drawString(80, 750, 'ReportLab Graphics-Shapes Test')
# Print drawings in a loop, with their doc strings.
c.setFont(_FONTS[0], 12)
y = 740
i = 1
for (drawing, docstring, funcname) in drawings:
if y < 300: # Allows 5-6 lines of text.
c.showPage()
y = 740
# Draw a title.
y = y - 30
c.setFont(_FONTS[2],12)
c.drawString(80, y, '%s (#%d)' % (funcname, i))
c.setFont(_FONTS[0],12)
y = y - 14
textObj = c.beginText(80, y)
textObj.textLines(docstring)
c.drawText(textObj)
y = textObj.getY()
y = y - drawing.height
drawing.drawOn(c, 80, y)
i = i + 1
c.save()
print('wrote %s ' % pdfPath)
class ShapesTestCase(unittest.TestCase):
"Test generating all kinds of shapes."
def setUp(self):
"Prepare some things before the tests start."
self.funcNames = getAllFunctionDrawingNames()
self.drawings = []
def tearDown(self):
"Do what has to be done after the tests are over."
writePDF(self.drawings)
# This should always succeed. If each drawing would be
# wrapped in a dedicated test method like this one, it
# would be possible to have a count for wrong tests
# as well... Something like this is left for later...
def testAllDrawings(self):
"Make a list of drawings."
for f in self.funcNames:
if f[0:10] == 'getDrawing':
# Make an instance and get its doc string.
# If that fails, use a default error drawing.
_evalFuncDrawing(f,self.drawings)
def makeSuite():
"Make a test suite for unit testing."
suite = unittest.TestSuite()
suite.addTest(ShapesTestCase('testAllDrawings'))
return suite
if __name__ == "__main__":
unittest.TextTestRunner().run(makeSuite())
| apache-2.0 | -6,062,097,869,955,016,000 | 6,103,225,721,745,618,000 | 29.086957 | 230 | 0.601965 | false |
acrsilva/animated-zZz-machine | bundle_final_app/libs/pyqtgraph-develop/examples/customGraphicsItem.py | 28 | 2235 | """
Demonstrate creation of a custom graphic (a candlestick plot)
"""
import initExample ## Add path to library (just for examples; you do not need this)
import pyqtgraph as pg
from pyqtgraph import QtCore, QtGui
## Create a subclass of GraphicsObject.
## The only required methods are paint() and boundingRect()
## (see QGraphicsItem documentation)
class CandlestickItem(pg.GraphicsObject):
def __init__(self, data):
pg.GraphicsObject.__init__(self)
self.data = data ## data must have fields: time, open, close, min, max
self.generatePicture()
def generatePicture(self):
## pre-computing a QPicture object allows paint() to run much more quickly,
## rather than re-drawing the shapes every time.
self.picture = QtGui.QPicture()
p = QtGui.QPainter(self.picture)
p.setPen(pg.mkPen('w'))
w = (self.data[1][0] - self.data[0][0]) / 3.
for (t, open, close, min, max) in self.data:
p.drawLine(QtCore.QPointF(t, min), QtCore.QPointF(t, max))
if open > close:
p.setBrush(pg.mkBrush('r'))
else:
p.setBrush(pg.mkBrush('g'))
p.drawRect(QtCore.QRectF(t-w, open, w*2, close-open))
p.end()
def paint(self, p, *args):
p.drawPicture(0, 0, self.picture)
def boundingRect(self):
## boundingRect _must_ indicate the entire area that will be drawn on
## or else we will get artifacts and possibly crashing.
## (in this case, QPicture does all the work of computing the bouning rect for us)
return QtCore.QRectF(self.picture.boundingRect())
data = [ ## fields are (time, open, close, min, max).
(1., 10, 13, 5, 15),
(2., 13, 17, 9, 20),
(3., 17, 14, 11, 23),
(4., 14, 15, 5, 19),
(5., 15, 9, 8, 22),
(6., 9, 15, 8, 16),
]
item = CandlestickItem(data)
plt = pg.plot()
plt.addItem(item)
plt.setWindowTitle('pyqtgraph example: customGraphicsItem')
## Start Qt event loop unless running in interactive mode or using pyside.
if __name__ == '__main__':
import sys
if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'):
QtGui.QApplication.instance().exec_()
| lgpl-3.0 | 3,796,217,719,547,789,000 | -3,772,769,806,301,225,000 | 35.639344 | 90 | 0.616107 | false |
mice-software/maus | tests/integration/test_simulation/test_beam_maker/binomial_beam_config.py | 1 | 4151 | # This file is part of MAUS: http://micewww.pp.rl.ac.uk:8080/projects/maus
#
# MAUS is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# MAUS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with MAUS. If not, see <http://www.gnu.org/licenses/>.
"""
Configuration to generate a beam distribution with binomial distribution in
the spill and various distributions for difference particle types
"""
#pylint: disable = C0103, R0801
import os
mrd = os.environ["MAUS_ROOT_DIR"]
simulation_geometry_filename = os.path.join(
mrd, "tests", "integration", "test_simulation", "test_beam_maker",
"BeamTest.dat"
)
output_root_file_name = os.path.join(mrd, "tmp", "test_beammaker_output.root")
input_root_file_name = output_root_file_name # for conversion
spill_generator_number_of_spills = 1000
verbose_level = 1
beam = {
"particle_generator":"binomial", # routine for generating empty primaries
"binomial_n":20, # number of coin tosses
"binomial_p":0.1, # probability of making a particle on each toss
"random_seed":5, # random seed for beam generation; controls also how the MC
# seeds are generated
"definitions":[
##### MUONS #######
{
"reference":{
"position":{"x":0.0, "y":0.0, "z":3.0},
"momentum":{"x":0.0, "y":0.0, "z":1.0},
"spin":{"x":0.0, "y":0.0, "z":1.0},
"particle_id":-13,
"energy":226.0,
"time":2.e6,
"random_seed":0
}, # reference particle
"random_seed_algorithm":"incrementing_random", # algorithm for seeding MC
"weight":90., # probability of generating a particle
"transverse":{
"transverse_mode":"penn",
"emittance_4d":6.,
"beta_4d":333.,
"alpha_4d":1.,
"normalised_angular_momentum":2.,
"bz":4.e-3
},
"longitudinal":{
"longitudinal_mode":"sawtooth_time",
"momentum_variable":"p",
"sigma_p":25.,
"t_start":-1.e6,
"t_end":+1.e6},
"coupling":{"coupling_mode":"none"}
},
##### PIONS #####
{ # as above...
"reference":{
"position":{"x":0.0, "y":-0.0, "z":0.0},
"momentum":{"x":0.0, "y":0.0, "z":1.0},
"spin":{"x":0.0, "y":0.0, "z":1.0},
"particle_id":211, "energy":285.0, "time":0.0, "random_seed":10
},
"random_seed_algorithm":"incrementing_random",
"weight":2.,
"transverse":{"transverse_mode":"constant_solenoid", "emittance_4d":6.,
"normalised_angular_momentum":0.1, "bz":4.e-3},
"longitudinal":{"longitudinal_mode":"uniform_time",
"momentum_variable":"p",
"sigma_p":25.,
"t_start":-1.e6,
"t_end":+1.e6},
"coupling":{"coupling_mode":"none"}
},
##### ELECTRONS #####
{ # as above...
"reference":{
"position":{"x":0.0, "y":-0.0, "z":0.0},
"momentum":{"x":0.0, "y":0.0, "z":1.0},
"spin":{"x":0.0, "y":0.0, "z":1.0},
"particle_id":-11, "energy":200.0, "time":0.0, "random_seed":10
},
"random_seed_algorithm":"incrementing_random",
"weight":8.,
"transverse":{"transverse_mode":"constant_solenoid", "emittance_4d":6.,
"normalised_angular_momentum":0.1, "bz":4.e-3},
"longitudinal":{"longitudinal_mode":"uniform_time",
"momentum_variable":"p",
"sigma_p":25.,
"t_start":-2.e6,
"t_end":+1.e6},
"coupling":{"coupling_mode":"none"}
}]
}
| gpl-3.0 | -8,757,678,298,232,301,000 | -8,574,706,954,395,087,000 | 37.082569 | 80 | 0.544688 | false |
berkmancenter/mediacloud | apps/common/tests/python/mediawords/util/test_extract_article_html_from_page_html.py | 1 | 3359 | import multiprocessing
from typing import Union
from unittest import TestCase
from mediawords.test.hash_server import HashServer
from mediawords.util.config.common import CommonConfig
from mediawords.util.extract_article_from_page import extract_article_html_from_page_html
from mediawords.util.network import random_unused_port
from mediawords.util.parse_json import encode_json
def test_extract_article_html_from_page_html():
"""Basic test."""
content = """
<html>
<head>
<title>I'm a test</title>
</head>
<body>
<p>Hi test, I'm dad!</p>
</body>
</html>
"""
response = extract_article_html_from_page_html(content=content)
assert response
assert 'extracted_html' in response
assert 'extractor_version' in response
assert "I'm a test" in response['extracted_html']
assert "Hi test, I'm dad!" in response['extracted_html']
assert 'readabilityBody' in response['extracted_html'] # <body id="readabilityBody">
assert "readability-lxml" in response['extractor_version']
class TestExtractConnectionErrors(TestCase):
"""Extract the page but fail the first response."""
__slots__ = [
'is_first_response',
]
expected_extracted_text = "Extraction worked the second time!"
def __extract_but_initially_fail(self, _: HashServer.Request) -> Union[str, bytes]:
"""Page callback that fails initially but then changes its mind."""
with self.is_first_response.get_lock():
if self.is_first_response.value == 1:
self.is_first_response.value = 0
# Closest to a connection error that we can get
raise Exception("Whoops!")
else:
response = ""
response += "HTTP/1.0 200 OK\r\n"
response += "Content-Type: application/json; charset=UTF-8\r\n"
response += "\r\n"
response += encode_json({
'extracted_html': self.expected_extracted_text,
'extractor_version': 'readability-lxml',
})
return response
def test_extract_article_html_from_page_html_connection_errors(self):
"""Try extracting with connection errors."""
# Use multiprocessing.Value() because request might be handled in a fork
self.is_first_response = multiprocessing.Value('i', 1)
pages = {
'/extract': {
'callback': self.__extract_but_initially_fail,
}
}
port = random_unused_port()
hs = HashServer(port=port, pages=pages)
hs.start()
class MockExtractorCommonConfig(CommonConfig):
"""Mock configuration which points to our unstable extractor."""
def extractor_api_url(self) -> str:
return f'http://localhost:{port}/extract'
extractor_response = extract_article_html_from_page_html(content='whatever', config=MockExtractorCommonConfig())
hs.stop()
assert extractor_response
assert 'extracted_html' in extractor_response
assert 'extractor_version' in extractor_response
assert extractor_response['extracted_html'] == self.expected_extracted_text
assert not self.is_first_response.value, "Make sure the initial extractor call failed."
| agpl-3.0 | 5,282,224,033,038,811,000 | 6,265,477,400,802,798,000 | 32.257426 | 120 | 0.627865 | false |
charmander/livestreamer | examples/gst-player.py | 22 | 3897 | #!/usr/bin/env python
from __future__ import print_function
import sys
import gi
from gi.repository import GObject as gobject, Gst as gst
from livestreamer import Livestreamer, StreamError, PluginError, NoPluginError
def exit(msg):
print(msg, file=sys.stderr)
sys.exit()
class LivestreamerPlayer(object):
def __init__(self):
self.fd = None
self.mainloop = gobject.MainLoop()
# This creates a playbin pipeline and using the appsrc source
# we can feed it our stream data
self.pipeline = gst.ElementFactory.make("playbin", None)
self.pipeline.set_property("uri", "appsrc://")
# When the playbin creates the appsrc source it will call
# this callback and allow us to configure it
self.pipeline.connect("source-setup", self.on_source_setup)
# Creates a bus and set callbacks to receive errors
self.bus = self.pipeline.get_bus()
self.bus.add_signal_watch()
self.bus.connect("message::eos", self.on_eos)
self.bus.connect("message::error", self.on_error)
def exit(self, msg):
self.stop()
exit(msg)
def stop(self):
# Stop playback and exit mainloop
self.pipeline.set_state(gst.State.NULL)
self.mainloop.quit()
# Close the stream
if self.fd:
self.fd.close()
def play(self, stream):
# Attempt to open the stream
try:
self.fd = stream.open()
except StreamError as err:
self.exit("Failed to open stream: {0}".format(err))
# Start playback
self.pipeline.set_state(gst.State.PLAYING)
self.mainloop.run()
def on_source_setup(self, element, source):
# When this callback is called the appsrc expects
# us to feed it more data
source.connect("need-data", self.on_source_need_data)
def on_source_need_data(self, source, length):
# Attempt to read data from the stream
try:
data = self.fd.read(length)
except IOError as err:
self.exit("Failed to read data from stream: {0}".format(err))
# If data is empty it's the end of stream
if not data:
source.emit("end-of-stream")
return
# Convert the Python bytes into a GStreamer Buffer
# and then push it to the appsrc
buf = gst.Buffer.new_wrapped(data)
source.emit("push-buffer", buf)
def on_eos(self, bus, msg):
# Stop playback on end of stream
self.stop()
def on_error(self, bus, msg):
# Print error message and exit on error
error = msg.parse_error()[1]
self.exit(error)
def main():
if len(sys.argv) < 3:
exit("Usage: {0} <url> <quality>".format(sys.argv[0]))
# Initialize and check GStreamer version
gi.require_version("Gst", "1.0")
gobject.threads_init()
gst.init(None)
# Collect arguments
url = sys.argv[1]
quality = sys.argv[2]
# Create the Livestreamer session
livestreamer = Livestreamer()
# Enable logging
livestreamer.set_loglevel("info")
livestreamer.set_logoutput(sys.stdout)
# Attempt to fetch streams
try:
streams = livestreamer.streams(url)
except NoPluginError:
exit("Livestreamer is unable to handle the URL '{0}'".format(url))
except PluginError as err:
exit("Plugin error: {0}".format(err))
if not streams:
exit("No streams found on URL '{0}'".format(url))
# Look for specified stream
if quality not in streams:
exit("Unable to find '{0}' stream on URL '{1}'".format(quality, url))
# We found the stream
stream = streams[quality]
# Create the player and start playback
player = LivestreamerPlayer()
# Blocks until playback is done
player.play(stream)
if __name__ == "__main__":
main()
| bsd-2-clause | -8,167,548,398,808,285,000 | -599,981,244,908,722,300 | 27.035971 | 78 | 0.615858 | false |
amagnus/pulsegig | app/models.py | 1 | 1894 | from django.db import models
from django.contrib.auth.models import User
class Guy(models.Model):
user = models.OneToOneField(User, primary_key=True)
cell = models.CharField(max_length=15)
metroarea_name = models.CharField(max_length=30, default=None, null=True)
metroareaID = models.IntegerField(default=None, null=True)
created = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.user
class Band(models.Model):
name = models.CharField(max_length=100)
genre = models.CharField(max_length=100)
skID = models.IntegerField()
created = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.name
class SimilarBand(models.Model):
band_input = models.ForeignKey(Band, related_name='band_input')
band_suggest = models.ForeignKey(Band, related_name='band_suggest')
disabled = models.BooleanField(default=False)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
def __unicode__(self):
return self.band_input.name
class Alert(models.Model):
user = models.ForeignKey(User)
band = models.ForeignKey(Band)
disabled = models.BooleanField(default=False)
created = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.band.name
class AlertLog(models.Model):
user = models.ForeignKey(User)
band = models.ForeignKey(Band)
eventskID = models.IntegerField(default=None)
showDate = models.DateField()
showURL = models.CharField(max_length=255)
is_similar = models.BooleanField(default=False)
send_on = models.DateTimeField()
has_sent = models.BooleanField(default=False)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
def __unicode__(self):
return self.band.name
| mit | 6,405,249,109,980,653,000 | -6,319,411,232,827,964,000 | 30.566667 | 77 | 0.705913 | false |
liangz0707/scikit-learn | benchmarks/bench_sparsify.py | 323 | 3372 | """
Benchmark SGD prediction time with dense/sparse coefficients.
Invoke with
-----------
$ kernprof.py -l sparsity_benchmark.py
$ python -m line_profiler sparsity_benchmark.py.lprof
Typical output
--------------
input data sparsity: 0.050000
true coef sparsity: 0.000100
test data sparsity: 0.027400
model sparsity: 0.000024
r^2 on test data (dense model) : 0.233651
r^2 on test data (sparse model) : 0.233651
Wrote profile results to sparsity_benchmark.py.lprof
Timer unit: 1e-06 s
File: sparsity_benchmark.py
Function: benchmark_dense_predict at line 51
Total time: 0.532979 s
Line # Hits Time Per Hit % Time Line Contents
==============================================================
51 @profile
52 def benchmark_dense_predict():
53 301 640 2.1 0.1 for _ in range(300):
54 300 532339 1774.5 99.9 clf.predict(X_test)
File: sparsity_benchmark.py
Function: benchmark_sparse_predict at line 56
Total time: 0.39274 s
Line # Hits Time Per Hit % Time Line Contents
==============================================================
56 @profile
57 def benchmark_sparse_predict():
58 1 10854 10854.0 2.8 X_test_sparse = csr_matrix(X_test)
59 301 477 1.6 0.1 for _ in range(300):
60 300 381409 1271.4 97.1 clf.predict(X_test_sparse)
"""
from scipy.sparse.csr import csr_matrix
import numpy as np
from sklearn.linear_model.stochastic_gradient import SGDRegressor
from sklearn.metrics import r2_score
np.random.seed(42)
def sparsity_ratio(X):
return np.count_nonzero(X) / float(n_samples * n_features)
n_samples, n_features = 5000, 300
X = np.random.randn(n_samples, n_features)
inds = np.arange(n_samples)
np.random.shuffle(inds)
X[inds[int(n_features / 1.2):]] = 0 # sparsify input
print("input data sparsity: %f" % sparsity_ratio(X))
coef = 3 * np.random.randn(n_features)
inds = np.arange(n_features)
np.random.shuffle(inds)
coef[inds[n_features/2:]] = 0 # sparsify coef
print("true coef sparsity: %f" % sparsity_ratio(coef))
y = np.dot(X, coef)
# add noise
y += 0.01 * np.random.normal((n_samples,))
# Split data in train set and test set
n_samples = X.shape[0]
X_train, y_train = X[:n_samples / 2], y[:n_samples / 2]
X_test, y_test = X[n_samples / 2:], y[n_samples / 2:]
print("test data sparsity: %f" % sparsity_ratio(X_test))
###############################################################################
clf = SGDRegressor(penalty='l1', alpha=.2, fit_intercept=True, n_iter=2000)
clf.fit(X_train, y_train)
print("model sparsity: %f" % sparsity_ratio(clf.coef_))
def benchmark_dense_predict():
for _ in range(300):
clf.predict(X_test)
def benchmark_sparse_predict():
X_test_sparse = csr_matrix(X_test)
for _ in range(300):
clf.predict(X_test_sparse)
def score(y_test, y_pred, case):
r2 = r2_score(y_test, y_pred)
print("r^2 on test data (%s) : %f" % (case, r2))
score(y_test, clf.predict(X_test), 'dense model')
benchmark_dense_predict()
clf.sparsify()
score(y_test, clf.predict(X_test), 'sparse model')
benchmark_sparse_predict()
| bsd-3-clause | -8,525,252,220,760,899,000 | 8,302,876,655,451,944,000 | 31.423077 | 87 | 0.580368 | false |
trel/irods-qgis | test/qgis_interface.py | 112 | 6395 | # coding=utf-8
"""QGIS plugin implementation.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
.. note:: This source code was copied from the 'postgis viewer' application
with original authors:
Copyright (c) 2010 by Ivan Mincik, ivan.mincik@gista.sk
Copyright (c) 2011 German Carrillo, geotux_tuxman@linuxmail.org
Copyright (c) 2014 Tim Sutton, tim@linfiniti.com
"""
__author__ = 'tim@linfiniti.com'
__revision__ = '$Format:%H$'
__date__ = '10/01/2011'
__copyright__ = (
'Copyright (c) 2010 by Ivan Mincik, ivan.mincik@gista.sk and '
'Copyright (c) 2011 German Carrillo, geotux_tuxman@linuxmail.org'
'Copyright (c) 2014 Tim Sutton, tim@linfiniti.com'
)
import logging
from PyQt4.QtCore import QObject, pyqtSlot, pyqtSignal
from qgis.core import QgsMapLayerRegistry
from qgis.gui import QgsMapCanvasLayer
LOGGER = logging.getLogger('QGIS')
#noinspection PyMethodMayBeStatic,PyPep8Naming
class QgisInterface(QObject):
"""Class to expose QGIS objects and functions to plugins.
This class is here for enabling us to run unit tests only,
so most methods are simply stubs.
"""
currentLayerChanged = pyqtSignal(QgsMapCanvasLayer)
def __init__(self, canvas):
"""Constructor
:param canvas:
"""
QObject.__init__(self)
self.canvas = canvas
# Set up slots so we can mimic the behaviour of QGIS when layers
# are added.
LOGGER.debug('Initialising canvas...')
# noinspection PyArgumentList
QgsMapLayerRegistry.instance().layersAdded.connect(self.addLayers)
# noinspection PyArgumentList
QgsMapLayerRegistry.instance().layerWasAdded.connect(self.addLayer)
# noinspection PyArgumentList
QgsMapLayerRegistry.instance().removeAll.connect(self.removeAllLayers)
# For processing module
self.destCrs = None
@pyqtSlot('QStringList')
def addLayers(self, layers):
"""Handle layers being added to the registry so they show up in canvas.
:param layers: list<QgsMapLayer> list of map layers that were added
.. note:: The QgsInterface api does not include this method,
it is added here as a helper to facilitate testing.
"""
#LOGGER.debug('addLayers called on qgis_interface')
#LOGGER.debug('Number of layers being added: %s' % len(layers))
#LOGGER.debug('Layer Count Before: %s' % len(self.canvas.layers()))
current_layers = self.canvas.layers()
final_layers = []
for layer in current_layers:
final_layers.append(QgsMapCanvasLayer(layer))
for layer in layers:
final_layers.append(QgsMapCanvasLayer(layer))
self.canvas.setLayerSet(final_layers)
#LOGGER.debug('Layer Count After: %s' % len(self.canvas.layers()))
@pyqtSlot('QgsMapLayer')
def addLayer(self, layer):
"""Handle a layer being added to the registry so it shows up in canvas.
:param layer: list<QgsMapLayer> list of map layers that were added
.. note: The QgsInterface api does not include this method, it is added
here as a helper to facilitate testing.
.. note: The addLayer method was deprecated in QGIS 1.8 so you should
not need this method much.
"""
pass
@pyqtSlot()
def removeAllLayers(self):
"""Remove layers from the canvas before they get deleted."""
self.canvas.setLayerSet([])
def newProject(self):
"""Create new project."""
# noinspection PyArgumentList
QgsMapLayerRegistry.instance().removeAllMapLayers()
# ---------------- API Mock for QgsInterface follows -------------------
def zoomFull(self):
"""Zoom to the map full extent."""
pass
def zoomToPrevious(self):
"""Zoom to previous view extent."""
pass
def zoomToNext(self):
"""Zoom to next view extent."""
pass
def zoomToActiveLayer(self):
"""Zoom to extent of active layer."""
pass
def addVectorLayer(self, path, base_name, provider_key):
"""Add a vector layer.
:param path: Path to layer.
:type path: str
:param base_name: Base name for layer.
:type base_name: str
:param provider_key: Provider key e.g. 'ogr'
:type provider_key: str
"""
pass
def addRasterLayer(self, path, base_name):
"""Add a raster layer given a raster layer file name
:param path: Path to layer.
:type path: str
:param base_name: Base name for layer.
:type base_name: str
"""
pass
def activeLayer(self):
"""Get pointer to the active layer (layer selected in the legend)."""
# noinspection PyArgumentList
layers = QgsMapLayerRegistry.instance().mapLayers()
for item in layers:
return layers[item]
def addToolBarIcon(self, action):
"""Add an icon to the plugins toolbar.
:param action: Action to add to the toolbar.
:type action: QAction
"""
pass
def removeToolBarIcon(self, action):
"""Remove an action (icon) from the plugin toolbar.
:param action: Action to add to the toolbar.
:type action: QAction
"""
pass
def addToolBar(self, name):
"""Add toolbar with specified name.
:param name: Name for the toolbar.
:type name: str
"""
pass
def mapCanvas(self):
"""Return a pointer to the map canvas."""
return self.canvas
def mainWindow(self):
"""Return a pointer to the main window.
In case of QGIS it returns an instance of QgisApp.
"""
pass
def addDockWidget(self, area, dock_widget):
"""Add a dock widget to the main window.
:param area: Where in the ui the dock should be placed.
:type area:
:param dock_widget: A dock widget to add to the UI.
:type dock_widget: QDockWidget
"""
pass
def legendInterface(self):
"""Get the legend."""
return self.canvas
| gpl-2.0 | 8,202,573,206,662,638,000 | 7,399,875,860,946,081,000 | 30.195122 | 79 | 0.626583 | false |
JackKelly/neuralnilm_prototype | scripts/e307.py | 2 | 6092 | from __future__ import print_function, division
import matplotlib
import logging
from sys import stdout
matplotlib.use('Agg') # Must be before importing matplotlib.pyplot or pylab!
from neuralnilm import (Net, RealApplianceSource,
BLSTMLayer, DimshuffleLayer,
BidirectionalRecurrentLayer)
from neuralnilm.source import standardise, discretize, fdiff, power_and_fdiff
from neuralnilm.experiment import run_experiment, init_experiment
from neuralnilm.net import TrainingError
from neuralnilm.layers import MixtureDensityLayer
from neuralnilm.objectives import scaled_cost, mdn_nll, scaled_cost_ignore_inactive, ignore_inactive
from neuralnilm.plot import MDNPlotter
from lasagne.nonlinearities import sigmoid, rectify, tanh
from lasagne.objectives import mse
from lasagne.init import Uniform, Normal
from lasagne.layers import (LSTMLayer, DenseLayer, Conv1DLayer,
ReshapeLayer, FeaturePoolLayer, RecurrentLayer)
from lasagne.updates import nesterov_momentum, momentum
from functools import partial
import os
import __main__
from copy import deepcopy
from math import sqrt
import numpy as np
import theano.tensor as T
NAME = os.path.splitext(os.path.split(__main__.__file__)[1])[0]
PATH = "/homes/dk3810/workspace/python/neuralnilm/figures"
SAVE_PLOT_INTERVAL = 250
GRADIENT_STEPS = 100
SEQ_LENGTH = 512
source_dict = dict(
filename='/data/dk3810/ukdale.h5',
appliances=[
['fridge freezer', 'fridge', 'freezer'],
'hair straighteners',
'television'
# 'dish washer',
# ['washer dryer', 'washing machine']
],
max_appliance_powers=[300, 500, 200, 2500, 2400],
on_power_thresholds=[5] * 5,
max_input_power=5900,
min_on_durations=[60, 60, 60, 1800, 1800],
min_off_durations=[12, 12, 12, 1800, 600],
window=("2013-06-01", "2014-07-01"),
seq_length=SEQ_LENGTH,
output_one_appliance=False,
boolean_targets=False,
train_buildings=[1],
validation_buildings=[1],
skip_probability=0.0,
n_seq_per_batch=16,
subsample_target=4,
include_diff=False,
clip_appliance_power=True,
target_is_prediction=False,
independently_center_inputs = True,
standardise_input=True,
standardise_targets=True,
input_padding=0,
lag=0,
reshape_target_to_2D=False,
input_stats={'mean': np.array([ 0.05526326], dtype=np.float32),
'std': np.array([ 0.12636775], dtype=np.float32)},
target_stats={
'mean': np.array([ 0.04066789, 0.01881946,
0.24639061, 0.17608672, 0.10273963],
dtype=np.float32),
'std': np.array([ 0.11449792, 0.07338708,
0.26608968, 0.33463112, 0.21250485],
dtype=np.float32)}
)
N = 50
net_dict = dict(
save_plot_interval=SAVE_PLOT_INTERVAL,
# loss_function=partial(ignore_inactive, loss_func=mdn_nll, seq_length=SEQ_LENGTH),
# loss_function=lambda x, t: mdn_nll(x, t).mean(),
loss_function=lambda x, t: mse(x, t).mean(),
# loss_function=partial(scaled_cost, loss_func=mse),
updates_func=momentum,
learning_rate=1e-02,
learning_rate_changes_by_iteration={
500: 5e-03
# 4000: 1e-03,
# 6000: 5e-06,
# 7000: 1e-06
# 2000: 5e-06
# 3000: 1e-05
# 7000: 5e-06,
# 10000: 1e-06,
# 15000: 5e-07,
# 50000: 1e-07
},
do_save_activations=True
)
def callback(net, epoch):
net.source.reshape_target_to_2D = True
net.plotter = MDNPlotter(net)
net.generate_validation_data_and_set_shapes()
net.loss_function = lambda x, t: mdn_nll(x, t).mean()
net.learning_rate.set_value(1e-05)
def exp_a(name):
# 3 appliances
global source
source_dict_copy = deepcopy(source_dict)
source_dict_copy['reshape_target_to_2D'] = False
source = RealApplianceSource(**source_dict_copy)
source.reshape_target_to_2D = False
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
N = 50
net_dict_copy['layers_config'] = [
{
'type': BidirectionalRecurrentLayer,
'num_units': N,
'gradient_steps': GRADIENT_STEPS,
'W_in_to_hid': Normal(std=1.),
'nonlinearity': tanh
},
{
'type': FeaturePoolLayer,
'ds': 4, # number of feature maps to be pooled together
'axis': 1, # pool over the time axis
'pool_function': T.max
},
{
'type': BidirectionalRecurrentLayer,
'num_units': N,
'gradient_steps': GRADIENT_STEPS,
'W_in_to_hid': Normal(std=1/sqrt(N)),
'nonlinearity': tanh
},
{
'type': DenseLayer,
'W': Normal(std=1/sqrt(N)),
'num_units': source.n_outputs,
'nonlinearity': None
}
]
net_dict_copy['layer_changes'] = {
1001: {
'remove_from': -2,
'callback': callback,
'new_layers': [
{
'type': MixtureDensityLayer,
'num_units': source.n_outputs,
'num_components': 2
}
]
}
}
net = Net(**net_dict_copy)
return net
def main():
# EXPERIMENTS = list('abcdefghijklmnopqrstuvwxyz')
EXPERIMENTS = list('a')
for experiment in EXPERIMENTS:
full_exp_name = NAME + experiment
func_call = init_experiment(PATH, experiment, full_exp_name)
logger = logging.getLogger(full_exp_name)
try:
net = eval(func_call)
run_experiment(net, epochs=None)
except KeyboardInterrupt:
logger.info("KeyboardInterrupt")
break
except Exception as exception:
logger.exception("Exception")
raise
finally:
logging.shutdown()
if __name__ == "__main__":
main()
| mit | 3,604,426,321,033,129,000 | -6,942,789,583,517,640,000 | 30.729167 | 100 | 0.595043 | false |
karstenw/FMPLayoutExporter | fmpa10.py | 1 | 5014 | version = 1.1
path = u'/Applications/FileMaker/FileMaker Pro 11 Advanced/FileMaker Pro Advanced.app'
classes = \
[('application', 'capp'),
('window', 'cwin'),
('document', 'docu'),
('database', 'cDB '),
('table', 'cTBL'),
('FileMaker_script', 'cSCP'),
('layout', 'ctbl'),
('field', 'ccol'),
('record', 'crow'),
('cell', 'ccel'),
('repetition', 'cREP'),
('request', 'cRQT'),
('menu_item', 'cmen'),
('menu', 'cmnu')]
enums = \
[('table', 'TABL'),
('view', 'VIEW'),
('read_only', 'nmod'),
('formulas_protected', 'fpro'),
('read_write', 'modf'),
('no_access', '\x00\x00\x00\x00'),
('read', '\x00\x00\x00\x01'),
('write', '\x00\x00\x00\x02'),
('update', '\x00\x00\x00\x04'),
('create', '\x00\x00\x00\x08'),
('delete', '\x00\x00\x00\x10'),
('read_write', '\x00\x00\x00\x03'),
('read_update', '\x00\x00\x00\x05'),
('read_create', '\x00\x00\x00\t'),
('read_delete', '\x00\x00\x00\x11'),
('write_update', '\x00\x00\x00\x06'),
('write_create', '\x00\x00\x00\n'),
('write_delete', '\x00\x00\x00\x12'),
('update_create', '\x00\x00\x00\x0c'),
('update_delete', '\x00\x00\x00\x14'),
('write_delete', '\x00\x00\x00\x18'),
('read_write_update', '\x00\x00\x00\x07'),
('read_write_create', '\x00\x00\x00\x0b'),
('read_write_delete', '\x00\x00\x00\x13'),
('write_update_create', '\x00\x00\x00\x0e'),
('write_update_delete', '\x00\x00\x00\x16'),
('update_create_delete', '\x00\x00\x00\x1c'),
('read_create_delete', '\x00\x00\x00\x19'),
('read_update_delete', '\x00\x00\x00\x15'),
('write_create_delete', '\x00\x00\x00\x1a'),
('read_update_create', '\x00\x00\x00\r'),
('no_delete', '\x00\x00\x00\x0f'),
('no_create', '\x00\x00\x00\x17'),
('no_update', '\x00\x00\x00\x1b'),
('no_read', '\x00\x00\x00\x1e'),
('no_write', '\x00\x00\x00\x1d'),
('full', '\x00\x00\x00\x1f'),
('ascending', '\x00\x00\x00\x00'),
('descending', '\x00\x00\x00\x01'),
('custom', '\x00\x00\x00\x04'),
('sum', 'TOTL'),
('count', 'CONT'),
('mean', 'MEAN'),
('standard_deviation', 'STDV'),
('average', 'AVRG'),
('minimum', 'MIN '),
('maximum', 'MAX '),
('unlocked', 'NOLK'),
('shared_lock', 'SHLK'),
('exclusive_lock', 'EXLK'),
('false', 'fals'),
('sharing_hidden', 'mltH'),
('true', 'true'),
('single', 'rSgl'),
('repeated', 'rFxd'),
('guest', 'pGST'),
('before_', 'befo'),
('after_', 'afte'),
('beginning_', 'bgng'),
('end_', 'end '),
('replace', 'rplc'),
('index', 'indx'),
('named', 'name'),
('ID_', 'ID ')]
properties = \
[('best_type', 'pbst'),
('class_', 'pcls'),
('default_type', 'deft'),
('frontmost', 'pisf'),
('name', 'pnam'),
('version', 'vers'),
('bounds', 'pbnd'),
('visible', 'pvis'),
('index', 'pidx'),
('floating', 'isfl'),
('zoomable', 'iszm'),
('zoomed', 'pzum'),
('modal', 'pmod'),
('resizable', 'prsz'),
('has_close_box', 'hclb'),
('has_title_bar', 'ptit'),
('current_layout', 'pCLY'),
('current_record', 'pCRW'),
('current_table', 'pCTB'),
('current_cell', 'pCCL'),
('modified', 'imod'),
('multiuser', 'pMUr'),
('lock', 'pLCK'),
('access', 'pACS'),
('ID_', 'ID '),
('protection', 'ppro'),
('kind', 'pKND'),
('choices', 'pCHS'),
('formula', 'pfor'),
('nulls_OK', 'pNLS'),
('repeats', 'pRPT'),
('repeat_size', 'pRPS'),
('unique_value', 'pUNQ'),
('globalValue', 'pGLL'),
('cellValue', 'vlue'),
('omitted', 'pOMT'),
('enabled', 'enbl'),
('item_number', 'itmn'),
('checked', 'pCHK')]
elements = \
[('applications', 'capp'),
('windows', 'cwin'),
('documents', 'docu'),
('databases', 'cDB '),
('tables', 'cTBL'),
('FileMaker_scripts', 'cSCP'),
('layouts', 'ctbl'),
('fields', 'ccol'),
('records', 'crow'),
('cells', 'ccel'),
('repetitions', 'cREP'),
('requests', 'cRQT'),
('menu_items', 'cmen'),
('menus', 'cmnu')]
commands = \
[('getURL', 'GURLGURL', [('for_accounts', 'pACT')]),
('exists', 'coredoex', []),
('show', 'miscmvis', []),
('close', 'coreclos', []),
('redo', 'miscredo', []),
('find', 'FMPRFIND', []),
('quit', 'aevtquit', []),
('cut', 'misccut ', []),
('get_remote_URL', 'FMPROPRM', []),
('open', 'aevtodoc', [('with_passwords', 'pPAS'), ('for_Accounts', 'pACT')]),
('create',
'corecrel',
[('new', 'kocl'),
('at', 'insh'),
('with_data', 'data'),
('with_properties', 'prdt')]),
('get_data', 'coregetd', [('as_', 'rtyp')]),
('event_info', 'coregtei', [('in_', 'wrcd')]),
('print_',
'aevtpdoc',
[('with_password', 'pPAS'),
('for_Accounts', 'pACT'),
('from_page', 'StPg'),
('to_page', 'NdPg'),
('with_copies', 'Cpis')]),
('duplicate', 'coreclon', [('to', 'insh')]),
('save', 'coresave', []),
('data_size', 'coredsiz', [('as_', 'rtyp')]),
('go_to', 'FMPRGOTO', []),
('sort', 'DATASORT', [('by', 'SRTE'), ('in_order', 'SRTT')]),
('undo', 'miscundo', []),
('set_data', 'coresetd', [('to', 'data')]),
('copy', 'misccopy', []),
('paste', 'miscpast', []),
('count', 'corecnte', [('class_', 'kocl')]),
('do_script', 'miscdosc', []),
('class_info', 'coreqobj', [('in_', 'wrcd')]),
('do_menu', 'miscmenu', [('menu_named', 'menn')]),
('delete', 'coredelo', [])]
| bsd-2-clause | 2,939,948,550,610,489,300 | 5,223,131,759,827,694,000 | 26.549451 | 86 | 0.52513 | false |
PyBossa/pybossa | pybossa/auth/token.py | 1 | 1271 | # -*- coding: utf8 -*-
# This file is part of PYBOSSA.
#
# Copyright (C) 2015 Scifabric LTD.
#
# PYBOSSA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PYBOSSA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PYBOSSA. If not, see <http://www.gnu.org/licenses/>.
class TokenAuth(object):
_specific_actions = []
@property
def specific_actions(self):
return self._specific_actions
def can(self, user, action, _, token=None):
action = ''.join(['_', action])
return getattr(self, action)(user, token)
def _create(self, user, token=None):
return False
def _read(self, user, token=None):
return not user.is_anonymous()
def _update(self, user, token):
return False
def _delete(self, user, token):
return False
| agpl-3.0 | -1,125,649,564,209,960,100 | -5,724,740,678,082,995,000 | 30 | 77 | 0.683714 | false |
tpsatish95/Python-Workshop | Python Environment Setup/Alternate/1. Python/1. Installer/Python-3.4.0(Linux)/Lib/contextlib.py | 83 | 11648 | """Utilities for with-statement contexts. See PEP 343."""
import sys
from collections import deque
from functools import wraps
__all__ = ["contextmanager", "closing", "ContextDecorator", "ExitStack",
"redirect_stdout", "suppress"]
class ContextDecorator(object):
"A base class or mixin that enables context managers to work as decorators."
def _recreate_cm(self):
"""Return a recreated instance of self.
Allows an otherwise one-shot context manager like
_GeneratorContextManager to support use as
a decorator via implicit recreation.
This is a private interface just for _GeneratorContextManager.
See issue #11647 for details.
"""
return self
def __call__(self, func):
@wraps(func)
def inner(*args, **kwds):
with self._recreate_cm():
return func(*args, **kwds)
return inner
class _GeneratorContextManager(ContextDecorator):
"""Helper for @contextmanager decorator."""
def __init__(self, func, *args, **kwds):
self.gen = func(*args, **kwds)
self.func, self.args, self.kwds = func, args, kwds
# Issue 19330: ensure context manager instances have good docstrings
doc = getattr(func, "__doc__", None)
if doc is None:
doc = type(self).__doc__
self.__doc__ = doc
# Unfortunately, this still doesn't provide good help output when
# inspecting the created context manager instances, since pydoc
# currently bypasses the instance docstring and shows the docstring
# for the class instead.
# See http://bugs.python.org/issue19404 for more details.
def _recreate_cm(self):
# _GCM instances are one-shot context managers, so the
# CM must be recreated each time a decorated function is
# called
return self.__class__(self.func, *self.args, **self.kwds)
def __enter__(self):
try:
return next(self.gen)
except StopIteration:
raise RuntimeError("generator didn't yield") from None
def __exit__(self, type, value, traceback):
if type is None:
try:
next(self.gen)
except StopIteration:
return
else:
raise RuntimeError("generator didn't stop")
else:
if value is None:
# Need to force instantiation so we can reliably
# tell if we get the same exception back
value = type()
try:
self.gen.throw(type, value, traceback)
raise RuntimeError("generator didn't stop after throw()")
except StopIteration as exc:
# Suppress the exception *unless* it's the same exception that
# was passed to throw(). This prevents a StopIteration
# raised inside the "with" statement from being suppressed
return exc is not value
except:
# only re-raise if it's *not* the exception that was
# passed to throw(), because __exit__() must not raise
# an exception unless __exit__() itself failed. But throw()
# has to raise the exception to signal propagation, so this
# fixes the impedance mismatch between the throw() protocol
# and the __exit__() protocol.
#
if sys.exc_info()[1] is not value:
raise
def contextmanager(func):
"""@contextmanager decorator.
Typical usage:
@contextmanager
def some_generator(<arguments>):
<setup>
try:
yield <value>
finally:
<cleanup>
This makes this:
with some_generator(<arguments>) as <variable>:
<body>
equivalent to this:
<setup>
try:
<variable> = <value>
<body>
finally:
<cleanup>
"""
@wraps(func)
def helper(*args, **kwds):
return _GeneratorContextManager(func, *args, **kwds)
return helper
class closing(object):
"""Context to automatically close something at the end of a block.
Code like this:
with closing(<module>.open(<arguments>)) as f:
<block>
is equivalent to this:
f = <module>.open(<arguments>)
try:
<block>
finally:
f.close()
"""
def __init__(self, thing):
self.thing = thing
def __enter__(self):
return self.thing
def __exit__(self, *exc_info):
self.thing.close()
class redirect_stdout:
"""Context manager for temporarily redirecting stdout to another file
# How to send help() to stderr
with redirect_stdout(sys.stderr):
help(dir)
# How to write help() to a file
with open('help.txt', 'w') as f:
with redirect_stdout(f):
help(pow)
"""
def __init__(self, new_target):
self._new_target = new_target
# We use a list of old targets to make this CM re-entrant
self._old_targets = []
def __enter__(self):
self._old_targets.append(sys.stdout)
sys.stdout = self._new_target
return self._new_target
def __exit__(self, exctype, excinst, exctb):
sys.stdout = self._old_targets.pop()
class suppress:
"""Context manager to suppress specified exceptions
After the exception is suppressed, execution proceeds with the next
statement following the with statement.
with suppress(FileNotFoundError):
os.remove(somefile)
# Execution still resumes here if the file was already removed
"""
def __init__(self, *exceptions):
self._exceptions = exceptions
def __enter__(self):
pass
def __exit__(self, exctype, excinst, exctb):
# Unlike isinstance and issubclass, CPython exception handling
# currently only looks at the concrete type hierarchy (ignoring
# the instance and subclass checking hooks). While Guido considers
# that a bug rather than a feature, it's a fairly hard one to fix
# due to various internal implementation details. suppress provides
# the simpler issubclass based semantics, rather than trying to
# exactly reproduce the limitations of the CPython interpreter.
#
# See http://bugs.python.org/issue12029 for more details
return exctype is not None and issubclass(exctype, self._exceptions)
# Inspired by discussions on http://bugs.python.org/issue13585
class ExitStack(object):
"""Context manager for dynamic management of a stack of exit callbacks
For example:
with ExitStack() as stack:
files = [stack.enter_context(open(fname)) for fname in filenames]
# All opened files will automatically be closed at the end of
# the with statement, even if attempts to open files later
# in the list raise an exception
"""
def __init__(self):
self._exit_callbacks = deque()
def pop_all(self):
"""Preserve the context stack by transferring it to a new instance"""
new_stack = type(self)()
new_stack._exit_callbacks = self._exit_callbacks
self._exit_callbacks = deque()
return new_stack
def _push_cm_exit(self, cm, cm_exit):
"""Helper to correctly register callbacks to __exit__ methods"""
def _exit_wrapper(*exc_details):
return cm_exit(cm, *exc_details)
_exit_wrapper.__self__ = cm
self.push(_exit_wrapper)
def push(self, exit):
"""Registers a callback with the standard __exit__ method signature
Can suppress exceptions the same way __exit__ methods can.
Also accepts any object with an __exit__ method (registering a call
to the method instead of the object itself)
"""
# We use an unbound method rather than a bound method to follow
# the standard lookup behaviour for special methods
_cb_type = type(exit)
try:
exit_method = _cb_type.__exit__
except AttributeError:
# Not a context manager, so assume its a callable
self._exit_callbacks.append(exit)
else:
self._push_cm_exit(exit, exit_method)
return exit # Allow use as a decorator
def callback(self, callback, *args, **kwds):
"""Registers an arbitrary callback and arguments.
Cannot suppress exceptions.
"""
def _exit_wrapper(exc_type, exc, tb):
callback(*args, **kwds)
# We changed the signature, so using @wraps is not appropriate, but
# setting __wrapped__ may still help with introspection
_exit_wrapper.__wrapped__ = callback
self.push(_exit_wrapper)
return callback # Allow use as a decorator
def enter_context(self, cm):
"""Enters the supplied context manager
If successful, also pushes its __exit__ method as a callback and
returns the result of the __enter__ method.
"""
# We look up the special methods on the type to match the with statement
_cm_type = type(cm)
_exit = _cm_type.__exit__
result = _cm_type.__enter__(cm)
self._push_cm_exit(cm, _exit)
return result
def close(self):
"""Immediately unwind the context stack"""
self.__exit__(None, None, None)
def __enter__(self):
return self
def __exit__(self, *exc_details):
received_exc = exc_details[0] is not None
# We manipulate the exception state so it behaves as though
# we were actually nesting multiple with statements
frame_exc = sys.exc_info()[1]
def _fix_exception_context(new_exc, old_exc):
# Context may not be correct, so find the end of the chain
while 1:
exc_context = new_exc.__context__
if exc_context is old_exc:
# Context is already set correctly (see issue 20317)
return
if exc_context is None or exc_context is frame_exc:
break
new_exc = exc_context
# Change the end of the chain to point to the exception
# we expect it to reference
new_exc.__context__ = old_exc
# Callbacks are invoked in LIFO order to match the behaviour of
# nested context managers
suppressed_exc = False
pending_raise = False
while self._exit_callbacks:
cb = self._exit_callbacks.pop()
try:
if cb(*exc_details):
suppressed_exc = True
pending_raise = False
exc_details = (None, None, None)
except:
new_exc_details = sys.exc_info()
# simulate the stack of exceptions by setting the context
_fix_exception_context(new_exc_details[1], exc_details[1])
pending_raise = True
exc_details = new_exc_details
if pending_raise:
try:
# bare "raise exc_details[1]" replaces our carefully
# set-up context
fixed_ctx = exc_details[1].__context__
raise exc_details[1]
except BaseException:
exc_details[1].__context__ = fixed_ctx
raise
return received_exc and suppressed_exc
| apache-2.0 | -9,122,639,970,055,808,000 | 2,887,237,679,268,046,000 | 33.258824 | 80 | 0.579584 | false |
cwayne18/ActivityTracker | py/gpxpy/parser.py | 3 | 6507 | # -*- coding: utf-8 -*-
# Copyright 2011 Tomo Krajina
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import pdb
import re as mod_re
import logging as mod_logging
import datetime as mod_datetime
import xml.dom.minidom as mod_minidom
try:
import lxml.etree as mod_etree
except:
mod_etree = None
pass # LXML not available
from . import gpx as mod_gpx
from . import utils as mod_utils
from . import gpxfield as mod_gpxfield
class XMLParser:
"""
Used when lxml is not available. Uses standard minidom.
"""
def __init__(self, xml):
self.xml = xml
self.dom = mod_minidom.parseString(xml)
def get_first_child(self, node=None, name=None):
# TODO: Remove find_first_node from utils!
if not node:
node = self.dom
children = node.childNodes
if not children:
return None
if not name:
return children[0]
for tmp_node in children:
if tmp_node.nodeName == name:
return tmp_node
return None
def get_node_name(self, node):
if not node:
return None
return node.nodeName
def get_children(self, node=None):
if not node:
node = self.dom
return list(filter(lambda node : node.nodeType == node.ELEMENT_NODE, node.childNodes))
def get_node_data(self, node):
if node is None:
return None
child_nodes = node.childNodes
if not child_nodes or len(child_nodes) == 0:
return None
return child_nodes[0].nodeValue
def get_node_attribute(self, node, attribute):
if (not hasattr(node, 'attributes')) or (not node.attributes):
return None
if attribute in node.attributes.keys():
return node.attributes[attribute].nodeValue
return None
class LXMLParser:
"""
Used when lxml is available.
"""
def __init__(self, xml):
if not mod_etree:
raise Exception('Cannot use LXMLParser without lxml installed')
if mod_utils.PYTHON_VERSION[0] == '3':
# In python 3 all strings are unicode and for some reason lxml
# don't like unicode strings with XMLs declared as UTF-8:
self.xml = xml.encode('utf-8')
else:
self.xml = xml
self.dom = mod_etree.XML(self.xml)
# get the namespace
self.ns = self.dom.nsmap.get(None)
def get_first_child(self, node=None, name=None):
if node is None:
if name:
if self.get_node_name(self.dom) == name:
return self.dom
return self.dom
children = node.getchildren()
if not children:
return None
if name:
for node in children:
if self.get_node_name(node) == name:
return node
return None
return children[0]
def get_node_name(self, node):
if '}' in node.tag:
return node.tag.split('}')[1]
return node.tag
def get_children(self, node=None):
if node is None:
node = self.dom
return node.getchildren()
def get_node_data(self, node):
if node is None:
return None
return node.text
def get_node_attribute(self, node, attribute):
if node is None:
return None
return node.attrib.get(attribute)
class GPXParser:
def __init__(self, xml_or_file=None, parser=None):
"""
Parser may be lxml of minidom. If you set to None then lxml will be used if installed
otherwise minidom.
"""
self.init(xml_or_file)
self.gpx = mod_gpx.GPX()
self.xml_parser_type = parser
self.xml_parser = None
def init(self, xml_or_file):
text = xml_or_file.read() if hasattr(xml_or_file, 'read') else xml_or_file
self.xml = mod_utils.make_str(text)
self.gpx = mod_gpx.GPX()
def parse(self):
"""
Parses the XML file and returns a GPX object.
It will throw GPXXMLSyntaxException if the XML file is invalid or
GPXException if the XML file is valid but something is wrong with the
GPX data.
"""
try:
if self.xml_parser_type is None:
if mod_etree:
self.xml_parser = LXMLParser(self.xml)
else:
self.xml_parser = XMLParser(self.xml)
elif self.xml_parser_type == 'lxml':
self.xml_parser = LXMLParser(self.xml)
elif self.xml_parser_type == 'minidom':
self.xml_parser = XMLParser(self.xml)
else:
raise mod_gpx.GPXException('Invalid parser type: %s' % self.xml_parser_type)
self.__parse_dom()
return self.gpx
except Exception as e:
# The exception here can be a lxml or minidom exception.
mod_logging.debug('Error in:\n%s\n-----------\n' % self.xml)
mod_logging.exception(e)
# The library should work in the same way regardless of the
# underlying XML parser that's why the exception thrown
# here is GPXXMLSyntaxException (instead of simply throwing the
# original minidom or lxml exception e).
#
# But, if the user need the original exception (lxml or minidom)
# it is available with GPXXMLSyntaxException.original_exception:
raise mod_gpx.GPXXMLSyntaxException('Error parsing XML: %s' % str(e), e)
def __parse_dom(self):
node = self.xml_parser.get_first_child(name='gpx')
if node is None:
raise mod_gpx.GPXException('Document must have a `gpx` root node.')
version = self.xml_parser.get_node_attribute(node, 'version')
mod_gpxfield.gpx_fields_from_xml(self.gpx, self.xml_parser, node, version)
| gpl-3.0 | 5,281,702,243,446,102,000 | 3,946,978,423,624,950,000 | 28.986175 | 94 | 0.590902 | false |
ekumenlabs/terminus | terminus/generators/rndf_id_mapper.py | 1 | 2695 | """
Copyright (C) 2017 Open Source Robotics Foundation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from city_visitor import CityVisitor
from models.polyline_geometry import PolylineGeometry
class RNDFIdMapper(CityVisitor):
"""Simple city visitor that generates the RNDF ids for segments,
lanes and waypoints. Ids and objects are stored in two dictionaries,
so we can later perform lookups in either way"""
# Note: For the time being we treat streets and trunks in the same way,
# hence generating a single lane for any of them. This will change in the
# future, when we properly support multi-lanes trunks.
def run(self):
self.segment_id = 0
self.waypoint_id = 0
self.lane_id = 0
self.object_to_id_level_1 = {}
self.object_to_id_level_2 = {}
self.id_to_object = {}
super(RNDFIdMapper, self).run()
def id_for(self, object):
try:
return self.object_to_id_level_1[id(object)]
except KeyError:
return self.object_to_id_level_2[object]
def object_for(self, id):
return self.id_to_object[id]
def map_road(self, road):
self.segment_id = self.segment_id + 1
self.lane_id = 0
self._register(str(self.segment_id), road)
def start_street(self, street):
self.map_road(street)
def start_trunk(self, trunk):
self.map_road(trunk)
def start_lane(self, lane):
self.lane_id = self.lane_id + 1
rndf_lane_id = str(self.segment_id) + '.' + str(self.lane_id)
self._register(rndf_lane_id, lane)
self.waypoint_id = 0
for waypoint in lane.waypoints_for(PolylineGeometry):
self.waypoint_id = self.waypoint_id + 1
rndf_waypoint_id = rndf_lane_id + '.' + str(self.waypoint_id)
self._register(rndf_waypoint_id, waypoint)
def _register(self, rndf_id, object):
"""We do some caching by id, to avoid computing hashes if they are
expensive, but keep the hash-based dict as a fallback"""
self.object_to_id_level_1[id(object)] = rndf_id
self.object_to_id_level_2[object] = rndf_id
self.id_to_object[rndf_id] = object
| apache-2.0 | 4,116,744,476,842,900,000 | -7,295,405,211,093,735,000 | 35.418919 | 77 | 0.661224 | false |
Sveder/letsencrypt | letshelp-letsencrypt/docs/conf.py | 17 | 10359 | # -*- coding: utf-8 -*-
#
# letshelp-letsencrypt documentation build configuration file, created by
# sphinx-quickstart on Sun Oct 18 13:40:19 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
here = os.path.abspath(os.path.dirname(__file__))
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(os.path.join(here, '..')))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
]
autodoc_member_order = 'bysource'
autodoc_default_flags = ['show-inheritance', 'private-members']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'letshelp-letsencrypt'
copyright = u'2014-2015, Let\'s Encrypt Project'
author = u'Let\'s Encrypt Project'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0'
# The full version, including alpha/beta/rc tags.
release = '0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'en'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
default_role = 'py:obj'
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# http://docs.readthedocs.org/en/latest/theme.html#how-do-i-use-this-locally-and-on-read-the-docs
# on_rtd is whether we are on readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# otherwise, readthedocs.org uses their theme by default, so no need to specify it
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'letshelp-letsencryptdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'letshelp-letsencrypt.tex', u'letshelp-letsencrypt Documentation',
u'Let\'s Encrypt Project', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'letshelp-letsencrypt', u'letshelp-letsencrypt Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'letshelp-letsencrypt', u'letshelp-letsencrypt Documentation',
author, 'letshelp-letsencrypt', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
intersphinx_mapping = {
'python': ('https://docs.python.org/', None),
'acme': ('https://acme-python.readthedocs.org/en/latest/', None),
'letsencrypt': ('https://letsencrypt.readthedocs.org/en/latest/', None),
}
| apache-2.0 | -7,676,467,535,778,940,000 | -3,944,274,914,930,547,000 | 32.308682 | 97 | 0.705377 | false |
wkia/kodi-addon-repo | plugin.audio.openlast/default.py | 1 | 6672 | # -*- coding: utf-8 -*-
import os
import sys
import urllib
import urlparse
import xbmcaddon
import xbmcgui
import xbmcplugin
if sys.version_info < (2, 7):
import simplejson as json
else:
import json
from logging import log
from util import build_url
__addon__ = xbmcaddon.Addon()
#__addonid__ = __addon__.getAddonInfo('id')
#__settings__ = xbmcaddon.Addon(id='xbmc-vk.svoka.com')
#__language__ = __settings__.getLocalizedString
#LANGUAGE = __addon__.getLocalizedString
ADDONVERSION = __addon__.getAddonInfo('version')
CWD = __addon__.getAddonInfo('path').decode("utf-8")
log('start -----------------------------------------------------')
log('script version %s started' % ADDONVERSION)
#xbmc.log(str(sys.argv))
addonUrl = sys.argv[0]
addon_handle = int(sys.argv[1])
args = urlparse.parse_qs(sys.argv[2][1:])
#my_addon = xbmcaddon.Addon()
# lastfmUser = my_addon.getSetting('lastfm_username')
xbmcplugin.setContent(addon_handle, 'audio')
lastfmApi = 'http://ws.audioscrobbler.com/2.0/'
lastfmApiKey = '47608ece2138b2edae9538f83f703457' # TODO use Openlast key
lastfmAddon = None
lastfmUser = ''
try:
lastfmAddon = xbmcaddon.Addon('service.scrobbler.lastfm')
lastfmUser = lastfmAddon.getSetting('lastfmuser')
except RuntimeError:
pass
#xbmc.log(str(args))
action = args.get('action', None)
folder = args.get('folder', None)
#xbmc.log('openlast: folder=' + str(folder)) #, xbmc.LOGDEBUG)
#xbmc.log('openlast: action=' + str(action)) #, xbmc.LOGDEBUG)
if folder is None:
url = build_url(addonUrl, {'folder': 'similarArtist'})
li = xbmcgui.ListItem('Similar artist radio', iconImage='DefaultFolder.png')
xbmcplugin.addDirectoryItem(handle=addon_handle, url=url, listitem=li, isFolder=True)
if '' != lastfmUser:
url = build_url(addonUrl, {'folder': 'lastfm', 'username': lastfmUser})
# xbmc.log(url)
li = xbmcgui.ListItem('Personal radio for Last.fm user: ' + lastfmUser, iconImage='DefaultFolder.png')
xbmcplugin.addDirectoryItem(handle=addon_handle, url=url, listitem=li, isFolder=True)
url = build_url(addonUrl, {'folder': 'lastfm'})
li = xbmcgui.ListItem('Personal radio for Last.fm user...', iconImage='DefaultFolder.png')
xbmcplugin.addDirectoryItem(handle=addon_handle, url=url, listitem=li, isFolder=True)
xbmcplugin.endOfDirectory(addon_handle)
elif folder[0] == 'lastfm':
username = ''
if None != args.get('username'):
username = args.get('username')[0]
playcount = 0
if None != args.get('playcount'):
playcount = int(args.get('playcount')[0])
if username == '':
user_keyboard = xbmc.Keyboard()
user_keyboard.setHeading('Last.FM user name') # __language__(30001))
user_keyboard.setHiddenInput(False)
user_keyboard.setDefault(lastfmUser)
user_keyboard.doModal()
if user_keyboard.isConfirmed():
username = user_keyboard.getText()
else:
raise Exception("Login input was cancelled.")
if action is None:
url = build_url(lastfmApi, {'method': 'user.getInfo', 'user': username,
'format': 'json', 'api_key': lastfmApiKey})
reply = urllib.urlopen(url)
resp = json.load(reply)
if "error" in resp:
raise Exception("Error! DATA: " + str(resp))
else:
# xbmc.log(str(resp))
pass
playcount = int(resp['user']['playcount'])
img = resp['user']['image'][2]['#text']
if '' == img:
img = 'DefaultAudio.png'
url = build_url(addonUrl, {'folder': folder[0], 'action': 'lovedTracks', 'username': username})
li = xbmcgui.ListItem('Listen to loved tracks', iconImage=img)
xbmcplugin.addDirectoryItem(handle=addon_handle, url=url, listitem=li, isFolder=False)
url = build_url(addonUrl, {'folder': folder[0], 'action': 'topTracks', 'username': username, 'playcount': playcount})
li = xbmcgui.ListItem('Listen to track library', iconImage=img)
xbmcplugin.addDirectoryItem(handle=addon_handle, url=url, listitem=li, isFolder=False)
url = build_url(addonUrl, {'folder': folder[0], 'action': 'topArtists', 'username': username, 'playcount': playcount})
li = xbmcgui.ListItem('Listen to artist library', iconImage=img)
xbmcplugin.addDirectoryItem(handle=addon_handle, url=url, listitem=li, isFolder=False)
url = build_url(addonUrl, {'folder': folder[0], 'action': 'syncLibrary', 'username': username, 'playcount': playcount})
li = xbmcgui.ListItem('[EXPERIMENTAL] Syncronize library to folder', iconImage=img)
xbmcplugin.addDirectoryItem(handle=addon_handle, url=url, listitem=li, isFolder=False)
xbmcplugin.endOfDirectory(addon_handle)
elif action[0] == 'lovedTracks':
script = os.path.join(CWD, "run_app.py")
log('running script %s...' % script)
xbmc.executebuiltin('XBMC.RunScript(%s, %s, %s)' % (script, action[0], username))
elif action[0] == 'topTracks':
script = os.path.join(CWD, "run_app.py")
log('running script %s...' % script)
xbmc.executebuiltin('XBMC.RunScript(%s, %s, %s, %s)' % (script, action[0], username, playcount))
elif action[0] == 'topArtists':
script = os.path.join(CWD, "run_app.py")
log('running script %s...' % script)
xbmc.executebuiltin('XBMC.RunScript(%s, %s, %s, %s)' % (script, action[0], username, playcount))
elif action[0] == 'syncLibrary':
script = os.path.join(CWD, "run_app.py")
log('running script %s...' % script)
xbmc.executebuiltin('XBMC.RunScript(%s, %s, %s)' % (script, action[0], username))
elif folder[0] == 'similarArtist':
if action is None:
url = build_url(lastfmApi, {'method': 'chart.getTopArtists',
'format': 'json', 'api_key': lastfmApiKey})
reply = urllib.urlopen(url)
resp = json.load(reply)
if "error" in resp:
raise Exception("Error! DATA: " + str(resp))
else:
#log(str(resp))
pass
for a in resp['artists']['artist']:
url = build_url(addonUrl, {'folder': folder[0], 'action': a['name'].encode('utf-8')})
li = xbmcgui.ListItem(a['name'])
li.setArt({'icon': a['image'][2]['#text'], 'thumb': a['image'][2]['#text'], 'fanart': a['image'][4]['#text']})
xbmcplugin.addDirectoryItem(handle=addon_handle, url=url, listitem=li, isFolder=False)
pass
xbmcplugin.endOfDirectory(addon_handle)
log('end -----------------------------------------------------')
| gpl-2.0 | -5,934,199,267,723,458,000 | 6,255,379,584,336,286,000 | 37.566474 | 127 | 0.618855 | false |
jonathan-beard/edx-platform | cms/djangoapps/contentstore/views/export_git.py | 146 | 1723 | """
This views handles exporting the course xml to a git repository if
the giturl attribute is set.
"""
import logging
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.views.decorators.csrf import ensure_csrf_cookie
from django.utils.translation import ugettext as _
from student.auth import has_course_author_access
import contentstore.git_export_utils as git_export_utils
from edxmako.shortcuts import render_to_response
from xmodule.modulestore.django import modulestore
from opaque_keys.edx.keys import CourseKey
log = logging.getLogger(__name__)
@ensure_csrf_cookie
@login_required
def export_git(request, course_key_string):
"""
This method serves up the 'Export to Git' page
"""
course_key = CourseKey.from_string(course_key_string)
if not has_course_author_access(request.user, course_key):
raise PermissionDenied()
course_module = modulestore().get_course(course_key)
failed = False
log.debug('export_git course_module=%s', course_module)
msg = ""
if 'action' in request.GET and course_module.giturl:
if request.GET['action'] == 'push':
try:
git_export_utils.export_to_git(
course_module.id,
course_module.giturl,
request.user,
)
msg = _('Course successfully exported to git repository')
except git_export_utils.GitExportError as ex:
failed = True
msg = unicode(ex)
return render_to_response('export_git.html', {
'context_course': course_module,
'msg': msg,
'failed': failed,
})
| agpl-3.0 | -7,636,087,755,991,685,000 | 2,931,846,731,444,684,300 | 30.327273 | 73 | 0.660476 | false |
JamesRaynor67/mptcp_with_machine_learning | src/config-store/bindings/modulegen__gcc_LP64.py | 4 | 68756 | from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.config_store', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## file-config.h (module 'config-store'): ns3::FileConfig [class]
module.add_class('FileConfig', allow_subclassing=True)
## hash.h (module 'core'): ns3::Hasher [class]
module.add_class('Hasher', import_from_module='ns.core')
## file-config.h (module 'config-store'): ns3::NoneFileConfig [class]
module.add_class('NoneFileConfig', parent=root_module['ns3::FileConfig'])
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## config-store.h (module 'config-store'): ns3::ConfigStore [class]
module.add_class('ConfigStore', parent=root_module['ns3::ObjectBase'])
## config-store.h (module 'config-store'): ns3::ConfigStore::Mode [enumeration]
module.add_enum('Mode', ['LOAD', 'SAVE', 'NONE'], outer_class=root_module['ns3::ConfigStore'])
## config-store.h (module 'config-store'): ns3::ConfigStore::FileFormat [enumeration]
module.add_enum('FileFormat', ['XML', 'RAW_TEXT'], outer_class=root_module['ns3::ConfigStore'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class]
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeAccessor [class]
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeChecker [class]
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
## attribute.h (module 'core'): ns3::AttributeValue [class]
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
## callback.h (module 'core'): ns3::CallbackChecker [class]
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## callback.h (module 'core'): ns3::CallbackImplBase [class]
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
## callback.h (module 'core'): ns3::CallbackValue [class]
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## attribute.h (module 'core'): ns3::EmptyAttributeValue [class]
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## type-id.h (module 'core'): ns3::TypeIdChecker [class]
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## type-id.h (module 'core'): ns3::TypeIdValue [class]
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
## Register a nested module for the namespace Hash
nested_module = module.add_cpp_namespace('Hash')
register_types_ns3_Hash(nested_module)
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_Hash(module):
root_module = module.get_root()
## hash-function.h (module 'core'): ns3::Hash::Implementation [class]
module.add_class('Implementation', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) *', u'ns3::Hash::Hash32Function_ptr')
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) **', u'ns3::Hash::Hash32Function_ptr*')
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) *&', u'ns3::Hash::Hash32Function_ptr&')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) *', u'ns3::Hash::Hash64Function_ptr')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) **', u'ns3::Hash::Hash64Function_ptr*')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) *&', u'ns3::Hash::Hash64Function_ptr&')
## Register a nested module for the namespace Function
nested_module = module.add_cpp_namespace('Function')
register_types_ns3_Hash_Function(nested_module)
def register_types_ns3_Hash_Function(module):
root_module = module.get_root()
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a [class]
module.add_class('Fnv1a', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32 [class]
module.add_class('Hash32', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64 [class]
module.add_class('Hash64', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3 [class]
module.add_class('Murmur3', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
def register_methods(root_module):
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3FileConfig_methods(root_module, root_module['ns3::FileConfig'])
register_Ns3Hasher_methods(root_module, root_module['ns3::Hasher'])
register_Ns3NoneFileConfig_methods(root_module, root_module['ns3::NoneFileConfig'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3ConfigStore_methods(root_module, root_module['ns3::ConfigStore'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3HashImplementation_methods(root_module, root_module['ns3::Hash::Implementation'])
register_Ns3HashFunctionFnv1a_methods(root_module, root_module['ns3::Hash::Function::Fnv1a'])
register_Ns3HashFunctionHash32_methods(root_module, root_module['ns3::Hash::Function::Hash32'])
register_Ns3HashFunctionHash64_methods(root_module, root_module['ns3::Hash::Function::Hash64'])
register_Ns3HashFunctionMurmur3_methods(root_module, root_module['ns3::Hash::Function::Murmur3'])
return
def register_Ns3CallbackBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function]
cls.add_method('GetImpl',
'ns3::Ptr< ns3::CallbackImplBase >',
[],
is_const=True)
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')],
visibility='protected')
return
def register_Ns3FileConfig_methods(root_module, cls):
## file-config.h (module 'config-store'): ns3::FileConfig::FileConfig() [constructor]
cls.add_constructor([])
## file-config.h (module 'config-store'): ns3::FileConfig::FileConfig(ns3::FileConfig const & arg0) [copy constructor]
cls.add_constructor([param('ns3::FileConfig const &', 'arg0')])
## file-config.h (module 'config-store'): void ns3::FileConfig::Attributes() [member function]
cls.add_method('Attributes',
'void',
[],
is_pure_virtual=True, is_virtual=True)
## file-config.h (module 'config-store'): void ns3::FileConfig::Default() [member function]
cls.add_method('Default',
'void',
[],
is_pure_virtual=True, is_virtual=True)
## file-config.h (module 'config-store'): void ns3::FileConfig::Global() [member function]
cls.add_method('Global',
'void',
[],
is_pure_virtual=True, is_virtual=True)
## file-config.h (module 'config-store'): void ns3::FileConfig::SetFilename(std::string filename) [member function]
cls.add_method('SetFilename',
'void',
[param('std::string', 'filename')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3Hasher_methods(root_module, cls):
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Hasher const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hasher const &', 'arg0')])
## hash.h (module 'core'): ns3::Hasher::Hasher() [constructor]
cls.add_constructor([])
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Ptr<ns3::Hash::Implementation> hp) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Hash::Implementation >', 'hp')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(std::string const s) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('std::string const', 's')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(std::string const s) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('std::string const', 's')])
## hash.h (module 'core'): ns3::Hasher & ns3::Hasher::clear() [member function]
cls.add_method('clear',
'ns3::Hasher &',
[])
return
def register_Ns3NoneFileConfig_methods(root_module, cls):
## file-config.h (module 'config-store'): ns3::NoneFileConfig::NoneFileConfig(ns3::NoneFileConfig const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NoneFileConfig const &', 'arg0')])
## file-config.h (module 'config-store'): ns3::NoneFileConfig::NoneFileConfig() [constructor]
cls.add_constructor([])
## file-config.h (module 'config-store'): void ns3::NoneFileConfig::Attributes() [member function]
cls.add_method('Attributes',
'void',
[],
is_virtual=True)
## file-config.h (module 'config-store'): void ns3::NoneFileConfig::Default() [member function]
cls.add_method('Default',
'void',
[],
is_virtual=True)
## file-config.h (module 'config-store'): void ns3::NoneFileConfig::Global() [member function]
cls.add_method('Global',
'void',
[],
is_virtual=True)
## file-config.h (module 'config-store'): void ns3::NoneFileConfig::SetFilename(std::string filename) [member function]
cls.add_method('SetFilename',
'void',
[param('std::string', 'filename')],
is_virtual=True)
return
def register_Ns3ObjectBase_methods(root_module, cls):
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor]
cls.add_constructor([])
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')])
## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function]
cls.add_method('ConstructSelf',
'void',
[param('ns3::AttributeConstructionList const &', 'attributes')],
visibility='protected')
## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3TypeId_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor]
cls.add_constructor([param('char const *', 'name')])
## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [copy constructor]
cls.add_constructor([param('ns3::TypeId const &', 'o')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')],
deprecated=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor, std::string callback) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor'), param('std::string', 'callback')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function]
cls.add_method('GetAttribute',
'ns3::TypeId::AttributeInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function]
cls.add_method('GetAttributeFullName',
'std::string',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function]
cls.add_method('GetAttributeN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase*,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::TypeId::GetConstructor() const [member function]
cls.add_method('GetConstructor',
'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function]
cls.add_method('GetGroupName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetHash() const [member function]
cls.add_method('GetHash',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function]
cls.add_method('GetName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function]
cls.add_method('GetParent',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function]
cls.add_method('GetRegistered',
'ns3::TypeId',
[param('uint32_t', 'i')],
is_static=True)
## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function]
cls.add_method('GetRegisteredN',
'uint32_t',
[],
is_static=True)
## type-id.h (module 'core'): std::size_t ns3::TypeId::GetSize() const [member function]
cls.add_method('GetSize',
'std::size_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function]
cls.add_method('GetTraceSource',
'ns3::TypeId::TraceSourceInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function]
cls.add_method('GetTraceSourceN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function]
cls.add_method('GetUid',
'uint16_t',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function]
cls.add_method('HasConstructor',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function]
cls.add_method('HasParent',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function]
cls.add_method('HideFromDocumentation',
'ns3::TypeId',
[])
## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function]
cls.add_method('IsChildOf',
'bool',
[param('ns3::TypeId', 'other')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]
cls.add_method('LookupAttributeByName',
'bool',
[param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByHash(uint32_t hash) [member function]
cls.add_method('LookupByHash',
'ns3::TypeId',
[param('uint32_t', 'hash')],
is_static=True)
## type-id.h (module 'core'): static bool ns3::TypeId::LookupByHashFailSafe(uint32_t hash, ns3::TypeId * tid) [member function]
cls.add_method('LookupByHashFailSafe',
'bool',
[param('uint32_t', 'hash'), param('ns3::TypeId *', 'tid')],
is_static=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function]
cls.add_method('LookupByName',
'ns3::TypeId',
[param('std::string', 'name')],
is_static=True)
## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function]
cls.add_method('MustHideFromDocumentation',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr<ns3::AttributeValue const> initialValue) [member function]
cls.add_method('SetAttributeInitialValue',
'bool',
[param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function]
cls.add_method('SetGroupName',
'ns3::TypeId',
[param('std::string', 'groupName')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function]
cls.add_method('SetParent',
'ns3::TypeId',
[param('ns3::TypeId', 'tid')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetSize(std::size_t size) [member function]
cls.add_method('SetSize',
'ns3::TypeId',
[param('std::size_t', 'size')])
## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t tid) [member function]
cls.add_method('SetUid',
'void',
[param('uint16_t', 'tid')])
return
def register_Ns3TypeIdAttributeInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::flags [variable]
cls.add_instance_attribute('flags', 'uint32_t', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable]
cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable]
cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
return
def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::callback [variable]
cls.add_instance_attribute('callback', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
return
def register_Ns3Empty_methods(root_module, cls):
## empty.h (module 'core'): ns3::empty::empty() [constructor]
cls.add_constructor([])
## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [copy constructor]
cls.add_constructor([param('ns3::empty const &', 'arg0')])
return
def register_Ns3ConfigStore_methods(root_module, cls):
## config-store.h (module 'config-store'): ns3::ConfigStore::ConfigStore(ns3::ConfigStore const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ConfigStore const &', 'arg0')])
## config-store.h (module 'config-store'): ns3::ConfigStore::ConfigStore() [constructor]
cls.add_constructor([])
## config-store.h (module 'config-store'): void ns3::ConfigStore::ConfigureAttributes() [member function]
cls.add_method('ConfigureAttributes',
'void',
[])
## config-store.h (module 'config-store'): void ns3::ConfigStore::ConfigureDefaults() [member function]
cls.add_method('ConfigureDefaults',
'void',
[])
## config-store.h (module 'config-store'): ns3::TypeId ns3::ConfigStore::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## config-store.h (module 'config-store'): static ns3::TypeId ns3::ConfigStore::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## config-store.h (module 'config-store'): void ns3::ConfigStore::SetFileFormat(ns3::ConfigStore::FileFormat format) [member function]
cls.add_method('SetFileFormat',
'void',
[param('ns3::ConfigStore::FileFormat', 'format')])
## config-store.h (module 'config-store'): void ns3::ConfigStore::SetFilename(std::string filename) [member function]
cls.add_method('SetFilename',
'void',
[param('std::string', 'filename')])
## config-store.h (module 'config-store'): void ns3::ConfigStore::SetMode(ns3::ConfigStore::Mode mode) [member function]
cls.add_method('SetMode',
'void',
[param('ns3::ConfigStore::Mode', 'mode')])
return
def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter< ns3::Hash::Implementation > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3TraceSourceAccessor_methods(root_module, cls):
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor]
cls.add_constructor([])
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Connect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('ConnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Disconnect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('DisconnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function]
cls.add_method('CreateValidValue',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::AttributeValue const &', 'value')],
is_const=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackChecker_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')])
return
def register_Ns3CallbackImplBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
## callback.h (module 'core'): std::string ns3::CallbackImplBase::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<ns3::CallbackImplBase const> other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::Demangle(std::string const & mangled) [member function]
cls.add_method('Demangle',
'std::string',
[param('std::string const &', 'mangled')],
is_static=True, visibility='protected')
return
def register_Ns3CallbackValue_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'base')])
## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function]
cls.add_method('Set',
'void',
[param('ns3::CallbackBase', 'base')])
return
def register_Ns3EmptyAttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, visibility='private', is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
visibility='private', is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3TypeIdChecker_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')])
return
def register_Ns3TypeIdValue_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'value')])
## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function]
cls.add_method('Get',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::TypeId const &', 'value')])
return
def register_Ns3HashImplementation_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation(ns3::Hash::Implementation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Implementation const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation() [constructor]
cls.add_constructor([])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Implementation::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_pure_virtual=True, is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Implementation::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Implementation::clear() [member function]
cls.add_method('clear',
'void',
[],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3HashFunctionFnv1a_methods(root_module, cls):
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a(ns3::Hash::Function::Fnv1a const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Fnv1a const &', 'arg0')])
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a() [constructor]
cls.add_constructor([])
## hash-fnv.h (module 'core'): uint32_t ns3::Hash::Function::Fnv1a::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): uint64_t ns3::Hash::Function::Fnv1a::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): void ns3::Hash::Function::Fnv1a::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash32_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Function::Hash32 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash32 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Hash32Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash32Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash32::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash32::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash64_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Function::Hash64 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash64 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Hash64Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash64Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash64::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Function::Hash64::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash64::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionMurmur3_methods(root_module, cls):
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3(ns3::Hash::Function::Murmur3 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Murmur3 const &', 'arg0')])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3() [constructor]
cls.add_constructor([])
## hash-murmur3.h (module 'core'): uint32_t ns3::Hash::Function::Murmur3::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): uint64_t ns3::Hash::Function::Murmur3::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): void ns3::Hash::Function::Murmur3::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_functions(root_module):
module = root_module
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
register_functions_ns3_Hash(module.get_submodule('Hash'), root_module)
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def register_functions_ns3_Hash(module, root_module):
register_functions_ns3_Hash_Function(module.get_submodule('Function'), root_module)
return
def register_functions_ns3_Hash_Function(module, root_module):
return
def main():
out = FileCodeSink(sys.stdout)
root_module = module_init()
register_types(root_module)
register_methods(root_module)
register_functions(root_module)
root_module.generate(out)
if __name__ == '__main__':
main()
| gpl-2.0 | 5,075,708,393,386,163,000 | 7,815,988,716,931,713,000 | 67.075248 | 383 | 0.640002 | false |
aacole/ursula-monitoring | sensu/plugins/metrics-nova-state.py | 2 | 1353 | #!/usr/bin/env python
# #RED
from argparse import ArgumentParser
import socket
import time
import os
import shade
DEFAULT_SCHEME = '{}.nova.states'.format(socket.gethostname())
def output_metric(name, value):
print '{}\t{}\t{}'.format(name, value, int(time.time()))
def main():
parser = ArgumentParser()
parser.add_argument('-S', '--service-type', default='compute')
parser.add_argument('-s', '--scheme', default=DEFAULT_SCHEME)
args = parser.parse_args()
cloud = shade.openstack_cloud()
servers = cloud.nova_client.servers.list(search_opts={ 'all_tenants': True })
# http://docs.openstack.org/api/openstack-compute/2/content/List_Servers-d1e2078.html
states = {
'ACTIVE': 0,
'BUILD': 0,
'DELETED': 0,
'ERROR': 0,
'HARD_REBOOT': 0,
'PASSWORD': 0,
'REBOOT': 0,
'REBUILD': 0,
'RESCUE': 0,
'RESIZE': 0,
'REVERT_RESIZE': 0,
'SHUTOFF': 0,
'SUSPENDED': 0,
'UNKNOWN': 0,
'VERIFY_RESIZE': 0,
}
for server in servers:
if server.status not in states:
states[server.status] = 0
states[server.status] += 1
for state, count in states.iteritems():
output_metric('{}.{}'.format(args.scheme, state.lower()), count)
if __name__ == '__main__':
main()
| apache-2.0 | -9,206,028,593,672,648,000 | 7,545,952,217,977,032,000 | 23.6 | 89 | 0.575758 | false |
FireWRT/OpenWrt-Firefly-Libraries | staging_dir/target-mipsel_1004kc+dsp_uClibc-0.9.33.2/usr/lib/python3.4/encodings/cp437.py | 272 | 34564 | """ Python Character Mapping Codec cp437 generated from 'VENDORS/MICSFT/PC/CP437.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp437',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
0x0083: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x0084: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
0x0085: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
0x0086: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE
0x0087: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
0x0088: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x0089: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
0x008a: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
0x008b: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS
0x008c: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x008d: 0x00ec, # LATIN SMALL LETTER I WITH GRAVE
0x008e: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x008f: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE
0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
0x0091: 0x00e6, # LATIN SMALL LIGATURE AE
0x0092: 0x00c6, # LATIN CAPITAL LIGATURE AE
0x0093: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x0094: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
0x0095: 0x00f2, # LATIN SMALL LETTER O WITH GRAVE
0x0096: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
0x0097: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
0x0098: 0x00ff, # LATIN SMALL LETTER Y WITH DIAERESIS
0x0099: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x009b: 0x00a2, # CENT SIGN
0x009c: 0x00a3, # POUND SIGN
0x009d: 0x00a5, # YEN SIGN
0x009e: 0x20a7, # PESETA SIGN
0x009f: 0x0192, # LATIN SMALL LETTER F WITH HOOK
0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
0x00a4: 0x00f1, # LATIN SMALL LETTER N WITH TILDE
0x00a5: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE
0x00a6: 0x00aa, # FEMININE ORDINAL INDICATOR
0x00a7: 0x00ba, # MASCULINE ORDINAL INDICATOR
0x00a8: 0x00bf, # INVERTED QUESTION MARK
0x00a9: 0x2310, # REVERSED NOT SIGN
0x00aa: 0x00ac, # NOT SIGN
0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER
0x00ad: 0x00a1, # INVERTED EXCLAMATION MARK
0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00b0: 0x2591, # LIGHT SHADE
0x00b1: 0x2592, # MEDIUM SHADE
0x00b2: 0x2593, # DARK SHADE
0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x00b5: 0x2561, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
0x00b6: 0x2562, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
0x00b7: 0x2556, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
0x00b8: 0x2555, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
0x00bd: 0x255c, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
0x00be: 0x255b, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x00c6: 0x255e, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
0x00c7: 0x255f, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x00cf: 0x2567, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
0x00d0: 0x2568, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
0x00d1: 0x2564, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
0x00d2: 0x2565, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
0x00d3: 0x2559, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
0x00d4: 0x2558, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
0x00d5: 0x2552, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
0x00d6: 0x2553, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
0x00d7: 0x256b, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
0x00d8: 0x256a, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x00db: 0x2588, # FULL BLOCK
0x00dc: 0x2584, # LOWER HALF BLOCK
0x00dd: 0x258c, # LEFT HALF BLOCK
0x00de: 0x2590, # RIGHT HALF BLOCK
0x00df: 0x2580, # UPPER HALF BLOCK
0x00e0: 0x03b1, # GREEK SMALL LETTER ALPHA
0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S
0x00e2: 0x0393, # GREEK CAPITAL LETTER GAMMA
0x00e3: 0x03c0, # GREEK SMALL LETTER PI
0x00e4: 0x03a3, # GREEK CAPITAL LETTER SIGMA
0x00e5: 0x03c3, # GREEK SMALL LETTER SIGMA
0x00e6: 0x00b5, # MICRO SIGN
0x00e7: 0x03c4, # GREEK SMALL LETTER TAU
0x00e8: 0x03a6, # GREEK CAPITAL LETTER PHI
0x00e9: 0x0398, # GREEK CAPITAL LETTER THETA
0x00ea: 0x03a9, # GREEK CAPITAL LETTER OMEGA
0x00eb: 0x03b4, # GREEK SMALL LETTER DELTA
0x00ec: 0x221e, # INFINITY
0x00ed: 0x03c6, # GREEK SMALL LETTER PHI
0x00ee: 0x03b5, # GREEK SMALL LETTER EPSILON
0x00ef: 0x2229, # INTERSECTION
0x00f0: 0x2261, # IDENTICAL TO
0x00f1: 0x00b1, # PLUS-MINUS SIGN
0x00f2: 0x2265, # GREATER-THAN OR EQUAL TO
0x00f3: 0x2264, # LESS-THAN OR EQUAL TO
0x00f4: 0x2320, # TOP HALF INTEGRAL
0x00f5: 0x2321, # BOTTOM HALF INTEGRAL
0x00f6: 0x00f7, # DIVISION SIGN
0x00f7: 0x2248, # ALMOST EQUAL TO
0x00f8: 0x00b0, # DEGREE SIGN
0x00f9: 0x2219, # BULLET OPERATOR
0x00fa: 0x00b7, # MIDDLE DOT
0x00fb: 0x221a, # SQUARE ROOT
0x00fc: 0x207f, # SUPERSCRIPT LATIN SMALL LETTER N
0x00fd: 0x00b2, # SUPERSCRIPT TWO
0x00fe: 0x25a0, # BLACK SQUARE
0x00ff: 0x00a0, # NO-BREAK SPACE
})
### Decoding Table
decoding_table = (
'\x00' # 0x0000 -> NULL
'\x01' # 0x0001 -> START OF HEADING
'\x02' # 0x0002 -> START OF TEXT
'\x03' # 0x0003 -> END OF TEXT
'\x04' # 0x0004 -> END OF TRANSMISSION
'\x05' # 0x0005 -> ENQUIRY
'\x06' # 0x0006 -> ACKNOWLEDGE
'\x07' # 0x0007 -> BELL
'\x08' # 0x0008 -> BACKSPACE
'\t' # 0x0009 -> HORIZONTAL TABULATION
'\n' # 0x000a -> LINE FEED
'\x0b' # 0x000b -> VERTICAL TABULATION
'\x0c' # 0x000c -> FORM FEED
'\r' # 0x000d -> CARRIAGE RETURN
'\x0e' # 0x000e -> SHIFT OUT
'\x0f' # 0x000f -> SHIFT IN
'\x10' # 0x0010 -> DATA LINK ESCAPE
'\x11' # 0x0011 -> DEVICE CONTROL ONE
'\x12' # 0x0012 -> DEVICE CONTROL TWO
'\x13' # 0x0013 -> DEVICE CONTROL THREE
'\x14' # 0x0014 -> DEVICE CONTROL FOUR
'\x15' # 0x0015 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x0016 -> SYNCHRONOUS IDLE
'\x17' # 0x0017 -> END OF TRANSMISSION BLOCK
'\x18' # 0x0018 -> CANCEL
'\x19' # 0x0019 -> END OF MEDIUM
'\x1a' # 0x001a -> SUBSTITUTE
'\x1b' # 0x001b -> ESCAPE
'\x1c' # 0x001c -> FILE SEPARATOR
'\x1d' # 0x001d -> GROUP SEPARATOR
'\x1e' # 0x001e -> RECORD SEPARATOR
'\x1f' # 0x001f -> UNIT SEPARATOR
' ' # 0x0020 -> SPACE
'!' # 0x0021 -> EXCLAMATION MARK
'"' # 0x0022 -> QUOTATION MARK
'#' # 0x0023 -> NUMBER SIGN
'$' # 0x0024 -> DOLLAR SIGN
'%' # 0x0025 -> PERCENT SIGN
'&' # 0x0026 -> AMPERSAND
"'" # 0x0027 -> APOSTROPHE
'(' # 0x0028 -> LEFT PARENTHESIS
')' # 0x0029 -> RIGHT PARENTHESIS
'*' # 0x002a -> ASTERISK
'+' # 0x002b -> PLUS SIGN
',' # 0x002c -> COMMA
'-' # 0x002d -> HYPHEN-MINUS
'.' # 0x002e -> FULL STOP
'/' # 0x002f -> SOLIDUS
'0' # 0x0030 -> DIGIT ZERO
'1' # 0x0031 -> DIGIT ONE
'2' # 0x0032 -> DIGIT TWO
'3' # 0x0033 -> DIGIT THREE
'4' # 0x0034 -> DIGIT FOUR
'5' # 0x0035 -> DIGIT FIVE
'6' # 0x0036 -> DIGIT SIX
'7' # 0x0037 -> DIGIT SEVEN
'8' # 0x0038 -> DIGIT EIGHT
'9' # 0x0039 -> DIGIT NINE
':' # 0x003a -> COLON
';' # 0x003b -> SEMICOLON
'<' # 0x003c -> LESS-THAN SIGN
'=' # 0x003d -> EQUALS SIGN
'>' # 0x003e -> GREATER-THAN SIGN
'?' # 0x003f -> QUESTION MARK
'@' # 0x0040 -> COMMERCIAL AT
'A' # 0x0041 -> LATIN CAPITAL LETTER A
'B' # 0x0042 -> LATIN CAPITAL LETTER B
'C' # 0x0043 -> LATIN CAPITAL LETTER C
'D' # 0x0044 -> LATIN CAPITAL LETTER D
'E' # 0x0045 -> LATIN CAPITAL LETTER E
'F' # 0x0046 -> LATIN CAPITAL LETTER F
'G' # 0x0047 -> LATIN CAPITAL LETTER G
'H' # 0x0048 -> LATIN CAPITAL LETTER H
'I' # 0x0049 -> LATIN CAPITAL LETTER I
'J' # 0x004a -> LATIN CAPITAL LETTER J
'K' # 0x004b -> LATIN CAPITAL LETTER K
'L' # 0x004c -> LATIN CAPITAL LETTER L
'M' # 0x004d -> LATIN CAPITAL LETTER M
'N' # 0x004e -> LATIN CAPITAL LETTER N
'O' # 0x004f -> LATIN CAPITAL LETTER O
'P' # 0x0050 -> LATIN CAPITAL LETTER P
'Q' # 0x0051 -> LATIN CAPITAL LETTER Q
'R' # 0x0052 -> LATIN CAPITAL LETTER R
'S' # 0x0053 -> LATIN CAPITAL LETTER S
'T' # 0x0054 -> LATIN CAPITAL LETTER T
'U' # 0x0055 -> LATIN CAPITAL LETTER U
'V' # 0x0056 -> LATIN CAPITAL LETTER V
'W' # 0x0057 -> LATIN CAPITAL LETTER W
'X' # 0x0058 -> LATIN CAPITAL LETTER X
'Y' # 0x0059 -> LATIN CAPITAL LETTER Y
'Z' # 0x005a -> LATIN CAPITAL LETTER Z
'[' # 0x005b -> LEFT SQUARE BRACKET
'\\' # 0x005c -> REVERSE SOLIDUS
']' # 0x005d -> RIGHT SQUARE BRACKET
'^' # 0x005e -> CIRCUMFLEX ACCENT
'_' # 0x005f -> LOW LINE
'`' # 0x0060 -> GRAVE ACCENT
'a' # 0x0061 -> LATIN SMALL LETTER A
'b' # 0x0062 -> LATIN SMALL LETTER B
'c' # 0x0063 -> LATIN SMALL LETTER C
'd' # 0x0064 -> LATIN SMALL LETTER D
'e' # 0x0065 -> LATIN SMALL LETTER E
'f' # 0x0066 -> LATIN SMALL LETTER F
'g' # 0x0067 -> LATIN SMALL LETTER G
'h' # 0x0068 -> LATIN SMALL LETTER H
'i' # 0x0069 -> LATIN SMALL LETTER I
'j' # 0x006a -> LATIN SMALL LETTER J
'k' # 0x006b -> LATIN SMALL LETTER K
'l' # 0x006c -> LATIN SMALL LETTER L
'm' # 0x006d -> LATIN SMALL LETTER M
'n' # 0x006e -> LATIN SMALL LETTER N
'o' # 0x006f -> LATIN SMALL LETTER O
'p' # 0x0070 -> LATIN SMALL LETTER P
'q' # 0x0071 -> LATIN SMALL LETTER Q
'r' # 0x0072 -> LATIN SMALL LETTER R
's' # 0x0073 -> LATIN SMALL LETTER S
't' # 0x0074 -> LATIN SMALL LETTER T
'u' # 0x0075 -> LATIN SMALL LETTER U
'v' # 0x0076 -> LATIN SMALL LETTER V
'w' # 0x0077 -> LATIN SMALL LETTER W
'x' # 0x0078 -> LATIN SMALL LETTER X
'y' # 0x0079 -> LATIN SMALL LETTER Y
'z' # 0x007a -> LATIN SMALL LETTER Z
'{' # 0x007b -> LEFT CURLY BRACKET
'|' # 0x007c -> VERTICAL LINE
'}' # 0x007d -> RIGHT CURLY BRACKET
'~' # 0x007e -> TILDE
'\x7f' # 0x007f -> DELETE
'\xc7' # 0x0080 -> LATIN CAPITAL LETTER C WITH CEDILLA
'\xfc' # 0x0081 -> LATIN SMALL LETTER U WITH DIAERESIS
'\xe9' # 0x0082 -> LATIN SMALL LETTER E WITH ACUTE
'\xe2' # 0x0083 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe4' # 0x0084 -> LATIN SMALL LETTER A WITH DIAERESIS
'\xe0' # 0x0085 -> LATIN SMALL LETTER A WITH GRAVE
'\xe5' # 0x0086 -> LATIN SMALL LETTER A WITH RING ABOVE
'\xe7' # 0x0087 -> LATIN SMALL LETTER C WITH CEDILLA
'\xea' # 0x0088 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
'\xeb' # 0x0089 -> LATIN SMALL LETTER E WITH DIAERESIS
'\xe8' # 0x008a -> LATIN SMALL LETTER E WITH GRAVE
'\xef' # 0x008b -> LATIN SMALL LETTER I WITH DIAERESIS
'\xee' # 0x008c -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\xec' # 0x008d -> LATIN SMALL LETTER I WITH GRAVE
'\xc4' # 0x008e -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\xc5' # 0x008f -> LATIN CAPITAL LETTER A WITH RING ABOVE
'\xc9' # 0x0090 -> LATIN CAPITAL LETTER E WITH ACUTE
'\xe6' # 0x0091 -> LATIN SMALL LIGATURE AE
'\xc6' # 0x0092 -> LATIN CAPITAL LIGATURE AE
'\xf4' # 0x0093 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf6' # 0x0094 -> LATIN SMALL LETTER O WITH DIAERESIS
'\xf2' # 0x0095 -> LATIN SMALL LETTER O WITH GRAVE
'\xfb' # 0x0096 -> LATIN SMALL LETTER U WITH CIRCUMFLEX
'\xf9' # 0x0097 -> LATIN SMALL LETTER U WITH GRAVE
'\xff' # 0x0098 -> LATIN SMALL LETTER Y WITH DIAERESIS
'\xd6' # 0x0099 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xdc' # 0x009a -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\xa2' # 0x009b -> CENT SIGN
'\xa3' # 0x009c -> POUND SIGN
'\xa5' # 0x009d -> YEN SIGN
'\u20a7' # 0x009e -> PESETA SIGN
'\u0192' # 0x009f -> LATIN SMALL LETTER F WITH HOOK
'\xe1' # 0x00a0 -> LATIN SMALL LETTER A WITH ACUTE
'\xed' # 0x00a1 -> LATIN SMALL LETTER I WITH ACUTE
'\xf3' # 0x00a2 -> LATIN SMALL LETTER O WITH ACUTE
'\xfa' # 0x00a3 -> LATIN SMALL LETTER U WITH ACUTE
'\xf1' # 0x00a4 -> LATIN SMALL LETTER N WITH TILDE
'\xd1' # 0x00a5 -> LATIN CAPITAL LETTER N WITH TILDE
'\xaa' # 0x00a6 -> FEMININE ORDINAL INDICATOR
'\xba' # 0x00a7 -> MASCULINE ORDINAL INDICATOR
'\xbf' # 0x00a8 -> INVERTED QUESTION MARK
'\u2310' # 0x00a9 -> REVERSED NOT SIGN
'\xac' # 0x00aa -> NOT SIGN
'\xbd' # 0x00ab -> VULGAR FRACTION ONE HALF
'\xbc' # 0x00ac -> VULGAR FRACTION ONE QUARTER
'\xa1' # 0x00ad -> INVERTED EXCLAMATION MARK
'\xab' # 0x00ae -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xbb' # 0x00af -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\u2591' # 0x00b0 -> LIGHT SHADE
'\u2592' # 0x00b1 -> MEDIUM SHADE
'\u2593' # 0x00b2 -> DARK SHADE
'\u2502' # 0x00b3 -> BOX DRAWINGS LIGHT VERTICAL
'\u2524' # 0x00b4 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT
'\u2561' # 0x00b5 -> BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
'\u2562' # 0x00b6 -> BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
'\u2556' # 0x00b7 -> BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
'\u2555' # 0x00b8 -> BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
'\u2563' # 0x00b9 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT
'\u2551' # 0x00ba -> BOX DRAWINGS DOUBLE VERTICAL
'\u2557' # 0x00bb -> BOX DRAWINGS DOUBLE DOWN AND LEFT
'\u255d' # 0x00bc -> BOX DRAWINGS DOUBLE UP AND LEFT
'\u255c' # 0x00bd -> BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
'\u255b' # 0x00be -> BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
'\u2510' # 0x00bf -> BOX DRAWINGS LIGHT DOWN AND LEFT
'\u2514' # 0x00c0 -> BOX DRAWINGS LIGHT UP AND RIGHT
'\u2534' # 0x00c1 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL
'\u252c' # 0x00c2 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
'\u251c' # 0x00c3 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT
'\u2500' # 0x00c4 -> BOX DRAWINGS LIGHT HORIZONTAL
'\u253c' # 0x00c5 -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
'\u255e' # 0x00c6 -> BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
'\u255f' # 0x00c7 -> BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
'\u255a' # 0x00c8 -> BOX DRAWINGS DOUBLE UP AND RIGHT
'\u2554' # 0x00c9 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT
'\u2569' # 0x00ca -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL
'\u2566' # 0x00cb -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
'\u2560' # 0x00cc -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
'\u2550' # 0x00cd -> BOX DRAWINGS DOUBLE HORIZONTAL
'\u256c' # 0x00ce -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
'\u2567' # 0x00cf -> BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
'\u2568' # 0x00d0 -> BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
'\u2564' # 0x00d1 -> BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
'\u2565' # 0x00d2 -> BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
'\u2559' # 0x00d3 -> BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
'\u2558' # 0x00d4 -> BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
'\u2552' # 0x00d5 -> BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
'\u2553' # 0x00d6 -> BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
'\u256b' # 0x00d7 -> BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
'\u256a' # 0x00d8 -> BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
'\u2518' # 0x00d9 -> BOX DRAWINGS LIGHT UP AND LEFT
'\u250c' # 0x00da -> BOX DRAWINGS LIGHT DOWN AND RIGHT
'\u2588' # 0x00db -> FULL BLOCK
'\u2584' # 0x00dc -> LOWER HALF BLOCK
'\u258c' # 0x00dd -> LEFT HALF BLOCK
'\u2590' # 0x00de -> RIGHT HALF BLOCK
'\u2580' # 0x00df -> UPPER HALF BLOCK
'\u03b1' # 0x00e0 -> GREEK SMALL LETTER ALPHA
'\xdf' # 0x00e1 -> LATIN SMALL LETTER SHARP S
'\u0393' # 0x00e2 -> GREEK CAPITAL LETTER GAMMA
'\u03c0' # 0x00e3 -> GREEK SMALL LETTER PI
'\u03a3' # 0x00e4 -> GREEK CAPITAL LETTER SIGMA
'\u03c3' # 0x00e5 -> GREEK SMALL LETTER SIGMA
'\xb5' # 0x00e6 -> MICRO SIGN
'\u03c4' # 0x00e7 -> GREEK SMALL LETTER TAU
'\u03a6' # 0x00e8 -> GREEK CAPITAL LETTER PHI
'\u0398' # 0x00e9 -> GREEK CAPITAL LETTER THETA
'\u03a9' # 0x00ea -> GREEK CAPITAL LETTER OMEGA
'\u03b4' # 0x00eb -> GREEK SMALL LETTER DELTA
'\u221e' # 0x00ec -> INFINITY
'\u03c6' # 0x00ed -> GREEK SMALL LETTER PHI
'\u03b5' # 0x00ee -> GREEK SMALL LETTER EPSILON
'\u2229' # 0x00ef -> INTERSECTION
'\u2261' # 0x00f0 -> IDENTICAL TO
'\xb1' # 0x00f1 -> PLUS-MINUS SIGN
'\u2265' # 0x00f2 -> GREATER-THAN OR EQUAL TO
'\u2264' # 0x00f3 -> LESS-THAN OR EQUAL TO
'\u2320' # 0x00f4 -> TOP HALF INTEGRAL
'\u2321' # 0x00f5 -> BOTTOM HALF INTEGRAL
'\xf7' # 0x00f6 -> DIVISION SIGN
'\u2248' # 0x00f7 -> ALMOST EQUAL TO
'\xb0' # 0x00f8 -> DEGREE SIGN
'\u2219' # 0x00f9 -> BULLET OPERATOR
'\xb7' # 0x00fa -> MIDDLE DOT
'\u221a' # 0x00fb -> SQUARE ROOT
'\u207f' # 0x00fc -> SUPERSCRIPT LATIN SMALL LETTER N
'\xb2' # 0x00fd -> SUPERSCRIPT TWO
'\u25a0' # 0x00fe -> BLACK SQUARE
'\xa0' # 0x00ff -> NO-BREAK SPACE
)
### Encoding Map
encoding_map = {
0x0000: 0x0000, # NULL
0x0001: 0x0001, # START OF HEADING
0x0002: 0x0002, # START OF TEXT
0x0003: 0x0003, # END OF TEXT
0x0004: 0x0004, # END OF TRANSMISSION
0x0005: 0x0005, # ENQUIRY
0x0006: 0x0006, # ACKNOWLEDGE
0x0007: 0x0007, # BELL
0x0008: 0x0008, # BACKSPACE
0x0009: 0x0009, # HORIZONTAL TABULATION
0x000a: 0x000a, # LINE FEED
0x000b: 0x000b, # VERTICAL TABULATION
0x000c: 0x000c, # FORM FEED
0x000d: 0x000d, # CARRIAGE RETURN
0x000e: 0x000e, # SHIFT OUT
0x000f: 0x000f, # SHIFT IN
0x0010: 0x0010, # DATA LINK ESCAPE
0x0011: 0x0011, # DEVICE CONTROL ONE
0x0012: 0x0012, # DEVICE CONTROL TWO
0x0013: 0x0013, # DEVICE CONTROL THREE
0x0014: 0x0014, # DEVICE CONTROL FOUR
0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE
0x0016: 0x0016, # SYNCHRONOUS IDLE
0x0017: 0x0017, # END OF TRANSMISSION BLOCK
0x0018: 0x0018, # CANCEL
0x0019: 0x0019, # END OF MEDIUM
0x001a: 0x001a, # SUBSTITUTE
0x001b: 0x001b, # ESCAPE
0x001c: 0x001c, # FILE SEPARATOR
0x001d: 0x001d, # GROUP SEPARATOR
0x001e: 0x001e, # RECORD SEPARATOR
0x001f: 0x001f, # UNIT SEPARATOR
0x0020: 0x0020, # SPACE
0x0021: 0x0021, # EXCLAMATION MARK
0x0022: 0x0022, # QUOTATION MARK
0x0023: 0x0023, # NUMBER SIGN
0x0024: 0x0024, # DOLLAR SIGN
0x0025: 0x0025, # PERCENT SIGN
0x0026: 0x0026, # AMPERSAND
0x0027: 0x0027, # APOSTROPHE
0x0028: 0x0028, # LEFT PARENTHESIS
0x0029: 0x0029, # RIGHT PARENTHESIS
0x002a: 0x002a, # ASTERISK
0x002b: 0x002b, # PLUS SIGN
0x002c: 0x002c, # COMMA
0x002d: 0x002d, # HYPHEN-MINUS
0x002e: 0x002e, # FULL STOP
0x002f: 0x002f, # SOLIDUS
0x0030: 0x0030, # DIGIT ZERO
0x0031: 0x0031, # DIGIT ONE
0x0032: 0x0032, # DIGIT TWO
0x0033: 0x0033, # DIGIT THREE
0x0034: 0x0034, # DIGIT FOUR
0x0035: 0x0035, # DIGIT FIVE
0x0036: 0x0036, # DIGIT SIX
0x0037: 0x0037, # DIGIT SEVEN
0x0038: 0x0038, # DIGIT EIGHT
0x0039: 0x0039, # DIGIT NINE
0x003a: 0x003a, # COLON
0x003b: 0x003b, # SEMICOLON
0x003c: 0x003c, # LESS-THAN SIGN
0x003d: 0x003d, # EQUALS SIGN
0x003e: 0x003e, # GREATER-THAN SIGN
0x003f: 0x003f, # QUESTION MARK
0x0040: 0x0040, # COMMERCIAL AT
0x0041: 0x0041, # LATIN CAPITAL LETTER A
0x0042: 0x0042, # LATIN CAPITAL LETTER B
0x0043: 0x0043, # LATIN CAPITAL LETTER C
0x0044: 0x0044, # LATIN CAPITAL LETTER D
0x0045: 0x0045, # LATIN CAPITAL LETTER E
0x0046: 0x0046, # LATIN CAPITAL LETTER F
0x0047: 0x0047, # LATIN CAPITAL LETTER G
0x0048: 0x0048, # LATIN CAPITAL LETTER H
0x0049: 0x0049, # LATIN CAPITAL LETTER I
0x004a: 0x004a, # LATIN CAPITAL LETTER J
0x004b: 0x004b, # LATIN CAPITAL LETTER K
0x004c: 0x004c, # LATIN CAPITAL LETTER L
0x004d: 0x004d, # LATIN CAPITAL LETTER M
0x004e: 0x004e, # LATIN CAPITAL LETTER N
0x004f: 0x004f, # LATIN CAPITAL LETTER O
0x0050: 0x0050, # LATIN CAPITAL LETTER P
0x0051: 0x0051, # LATIN CAPITAL LETTER Q
0x0052: 0x0052, # LATIN CAPITAL LETTER R
0x0053: 0x0053, # LATIN CAPITAL LETTER S
0x0054: 0x0054, # LATIN CAPITAL LETTER T
0x0055: 0x0055, # LATIN CAPITAL LETTER U
0x0056: 0x0056, # LATIN CAPITAL LETTER V
0x0057: 0x0057, # LATIN CAPITAL LETTER W
0x0058: 0x0058, # LATIN CAPITAL LETTER X
0x0059: 0x0059, # LATIN CAPITAL LETTER Y
0x005a: 0x005a, # LATIN CAPITAL LETTER Z
0x005b: 0x005b, # LEFT SQUARE BRACKET
0x005c: 0x005c, # REVERSE SOLIDUS
0x005d: 0x005d, # RIGHT SQUARE BRACKET
0x005e: 0x005e, # CIRCUMFLEX ACCENT
0x005f: 0x005f, # LOW LINE
0x0060: 0x0060, # GRAVE ACCENT
0x0061: 0x0061, # LATIN SMALL LETTER A
0x0062: 0x0062, # LATIN SMALL LETTER B
0x0063: 0x0063, # LATIN SMALL LETTER C
0x0064: 0x0064, # LATIN SMALL LETTER D
0x0065: 0x0065, # LATIN SMALL LETTER E
0x0066: 0x0066, # LATIN SMALL LETTER F
0x0067: 0x0067, # LATIN SMALL LETTER G
0x0068: 0x0068, # LATIN SMALL LETTER H
0x0069: 0x0069, # LATIN SMALL LETTER I
0x006a: 0x006a, # LATIN SMALL LETTER J
0x006b: 0x006b, # LATIN SMALL LETTER K
0x006c: 0x006c, # LATIN SMALL LETTER L
0x006d: 0x006d, # LATIN SMALL LETTER M
0x006e: 0x006e, # LATIN SMALL LETTER N
0x006f: 0x006f, # LATIN SMALL LETTER O
0x0070: 0x0070, # LATIN SMALL LETTER P
0x0071: 0x0071, # LATIN SMALL LETTER Q
0x0072: 0x0072, # LATIN SMALL LETTER R
0x0073: 0x0073, # LATIN SMALL LETTER S
0x0074: 0x0074, # LATIN SMALL LETTER T
0x0075: 0x0075, # LATIN SMALL LETTER U
0x0076: 0x0076, # LATIN SMALL LETTER V
0x0077: 0x0077, # LATIN SMALL LETTER W
0x0078: 0x0078, # LATIN SMALL LETTER X
0x0079: 0x0079, # LATIN SMALL LETTER Y
0x007a: 0x007a, # LATIN SMALL LETTER Z
0x007b: 0x007b, # LEFT CURLY BRACKET
0x007c: 0x007c, # VERTICAL LINE
0x007d: 0x007d, # RIGHT CURLY BRACKET
0x007e: 0x007e, # TILDE
0x007f: 0x007f, # DELETE
0x00a0: 0x00ff, # NO-BREAK SPACE
0x00a1: 0x00ad, # INVERTED EXCLAMATION MARK
0x00a2: 0x009b, # CENT SIGN
0x00a3: 0x009c, # POUND SIGN
0x00a5: 0x009d, # YEN SIGN
0x00aa: 0x00a6, # FEMININE ORDINAL INDICATOR
0x00ab: 0x00ae, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00ac: 0x00aa, # NOT SIGN
0x00b0: 0x00f8, # DEGREE SIGN
0x00b1: 0x00f1, # PLUS-MINUS SIGN
0x00b2: 0x00fd, # SUPERSCRIPT TWO
0x00b5: 0x00e6, # MICRO SIGN
0x00b7: 0x00fa, # MIDDLE DOT
0x00ba: 0x00a7, # MASCULINE ORDINAL INDICATOR
0x00bb: 0x00af, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00bc: 0x00ac, # VULGAR FRACTION ONE QUARTER
0x00bd: 0x00ab, # VULGAR FRACTION ONE HALF
0x00bf: 0x00a8, # INVERTED QUESTION MARK
0x00c4: 0x008e, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x00c5: 0x008f, # LATIN CAPITAL LETTER A WITH RING ABOVE
0x00c6: 0x0092, # LATIN CAPITAL LIGATURE AE
0x00c7: 0x0080, # LATIN CAPITAL LETTER C WITH CEDILLA
0x00c9: 0x0090, # LATIN CAPITAL LETTER E WITH ACUTE
0x00d1: 0x00a5, # LATIN CAPITAL LETTER N WITH TILDE
0x00d6: 0x0099, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x00dc: 0x009a, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x00df: 0x00e1, # LATIN SMALL LETTER SHARP S
0x00e0: 0x0085, # LATIN SMALL LETTER A WITH GRAVE
0x00e1: 0x00a0, # LATIN SMALL LETTER A WITH ACUTE
0x00e2: 0x0083, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x00e4: 0x0084, # LATIN SMALL LETTER A WITH DIAERESIS
0x00e5: 0x0086, # LATIN SMALL LETTER A WITH RING ABOVE
0x00e6: 0x0091, # LATIN SMALL LIGATURE AE
0x00e7: 0x0087, # LATIN SMALL LETTER C WITH CEDILLA
0x00e8: 0x008a, # LATIN SMALL LETTER E WITH GRAVE
0x00e9: 0x0082, # LATIN SMALL LETTER E WITH ACUTE
0x00ea: 0x0088, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x00eb: 0x0089, # LATIN SMALL LETTER E WITH DIAERESIS
0x00ec: 0x008d, # LATIN SMALL LETTER I WITH GRAVE
0x00ed: 0x00a1, # LATIN SMALL LETTER I WITH ACUTE
0x00ee: 0x008c, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x00ef: 0x008b, # LATIN SMALL LETTER I WITH DIAERESIS
0x00f1: 0x00a4, # LATIN SMALL LETTER N WITH TILDE
0x00f2: 0x0095, # LATIN SMALL LETTER O WITH GRAVE
0x00f3: 0x00a2, # LATIN SMALL LETTER O WITH ACUTE
0x00f4: 0x0093, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x00f6: 0x0094, # LATIN SMALL LETTER O WITH DIAERESIS
0x00f7: 0x00f6, # DIVISION SIGN
0x00f9: 0x0097, # LATIN SMALL LETTER U WITH GRAVE
0x00fa: 0x00a3, # LATIN SMALL LETTER U WITH ACUTE
0x00fb: 0x0096, # LATIN SMALL LETTER U WITH CIRCUMFLEX
0x00fc: 0x0081, # LATIN SMALL LETTER U WITH DIAERESIS
0x00ff: 0x0098, # LATIN SMALL LETTER Y WITH DIAERESIS
0x0192: 0x009f, # LATIN SMALL LETTER F WITH HOOK
0x0393: 0x00e2, # GREEK CAPITAL LETTER GAMMA
0x0398: 0x00e9, # GREEK CAPITAL LETTER THETA
0x03a3: 0x00e4, # GREEK CAPITAL LETTER SIGMA
0x03a6: 0x00e8, # GREEK CAPITAL LETTER PHI
0x03a9: 0x00ea, # GREEK CAPITAL LETTER OMEGA
0x03b1: 0x00e0, # GREEK SMALL LETTER ALPHA
0x03b4: 0x00eb, # GREEK SMALL LETTER DELTA
0x03b5: 0x00ee, # GREEK SMALL LETTER EPSILON
0x03c0: 0x00e3, # GREEK SMALL LETTER PI
0x03c3: 0x00e5, # GREEK SMALL LETTER SIGMA
0x03c4: 0x00e7, # GREEK SMALL LETTER TAU
0x03c6: 0x00ed, # GREEK SMALL LETTER PHI
0x207f: 0x00fc, # SUPERSCRIPT LATIN SMALL LETTER N
0x20a7: 0x009e, # PESETA SIGN
0x2219: 0x00f9, # BULLET OPERATOR
0x221a: 0x00fb, # SQUARE ROOT
0x221e: 0x00ec, # INFINITY
0x2229: 0x00ef, # INTERSECTION
0x2248: 0x00f7, # ALMOST EQUAL TO
0x2261: 0x00f0, # IDENTICAL TO
0x2264: 0x00f3, # LESS-THAN OR EQUAL TO
0x2265: 0x00f2, # GREATER-THAN OR EQUAL TO
0x2310: 0x00a9, # REVERSED NOT SIGN
0x2320: 0x00f4, # TOP HALF INTEGRAL
0x2321: 0x00f5, # BOTTOM HALF INTEGRAL
0x2500: 0x00c4, # BOX DRAWINGS LIGHT HORIZONTAL
0x2502: 0x00b3, # BOX DRAWINGS LIGHT VERTICAL
0x250c: 0x00da, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x2510: 0x00bf, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x2514: 0x00c0, # BOX DRAWINGS LIGHT UP AND RIGHT
0x2518: 0x00d9, # BOX DRAWINGS LIGHT UP AND LEFT
0x251c: 0x00c3, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x2524: 0x00b4, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x252c: 0x00c2, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x2534: 0x00c1, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x253c: 0x00c5, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x2550: 0x00cd, # BOX DRAWINGS DOUBLE HORIZONTAL
0x2551: 0x00ba, # BOX DRAWINGS DOUBLE VERTICAL
0x2552: 0x00d5, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
0x2553: 0x00d6, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
0x2554: 0x00c9, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x2555: 0x00b8, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
0x2556: 0x00b7, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
0x2557: 0x00bb, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x2558: 0x00d4, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
0x2559: 0x00d3, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
0x255a: 0x00c8, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x255b: 0x00be, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
0x255c: 0x00bd, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
0x255d: 0x00bc, # BOX DRAWINGS DOUBLE UP AND LEFT
0x255e: 0x00c6, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
0x255f: 0x00c7, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
0x2560: 0x00cc, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x2561: 0x00b5, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
0x2562: 0x00b6, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
0x2563: 0x00b9, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x2564: 0x00d1, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
0x2565: 0x00d2, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
0x2566: 0x00cb, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x2567: 0x00cf, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
0x2568: 0x00d0, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
0x2569: 0x00ca, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x256a: 0x00d8, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
0x256b: 0x00d7, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
0x256c: 0x00ce, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x2580: 0x00df, # UPPER HALF BLOCK
0x2584: 0x00dc, # LOWER HALF BLOCK
0x2588: 0x00db, # FULL BLOCK
0x258c: 0x00dd, # LEFT HALF BLOCK
0x2590: 0x00de, # RIGHT HALF BLOCK
0x2591: 0x00b0, # LIGHT SHADE
0x2592: 0x00b1, # MEDIUM SHADE
0x2593: 0x00b2, # DARK SHADE
0x25a0: 0x00fe, # BLACK SQUARE
}
| gpl-2.0 | -6,881,814,804,876,013,000 | -7,041,395,123,544,518,000 | 48.518625 | 103 | 0.604386 | false |
ARudiuk/mne-python | examples/inverse/plot_label_from_stc.py | 31 | 3963 | """
=================================================
Generate a functional label from source estimates
=================================================
Threshold source estimates and produce a functional label. The label
is typically the region of interest that contains high values.
Here we compare the average time course in the anatomical label obtained
by FreeSurfer segmentation and the average time course from the
functional label. As expected the time course in the functional
label yields higher values.
"""
# Author: Luke Bloy <luke.bloy@gmail.com>
# Alex Gramfort <alexandre.gramfort@telecom-paristech.fr>
# License: BSD (3-clause)
import numpy as np
import matplotlib.pyplot as plt
import mne
from mne.minimum_norm import read_inverse_operator, apply_inverse
from mne.datasets import sample
print(__doc__)
data_path = sample.data_path()
subjects_dir = data_path + '/subjects'
fname_inv = data_path + '/MEG/sample/sample_audvis-meg-oct-6-meg-inv.fif'
fname_evoked = data_path + '/MEG/sample/sample_audvis-ave.fif'
subjects_dir = data_path + '/subjects'
subject = 'sample'
snr = 3.0
lambda2 = 1.0 / snr ** 2
method = "dSPM" # use dSPM method (could also be MNE or sLORETA)
# Compute a label/ROI based on the peak power between 80 and 120 ms.
# The label bankssts-lh is used for the comparison.
aparc_label_name = 'bankssts-lh'
tmin, tmax = 0.080, 0.120
# Load data
evoked = mne.read_evokeds(fname_evoked, condition=0, baseline=(None, 0))
inverse_operator = read_inverse_operator(fname_inv)
src = inverse_operator['src'] # get the source space
# Compute inverse solution
stc = apply_inverse(evoked, inverse_operator, lambda2, method,
pick_ori='normal')
# Make an STC in the time interval of interest and take the mean
stc_mean = stc.copy().crop(tmin, tmax).mean()
# use the stc_mean to generate a functional label
# region growing is halted at 60% of the peak value within the
# anatomical label / ROI specified by aparc_label_name
label = mne.read_labels_from_annot(subject, parc='aparc',
subjects_dir=subjects_dir,
regexp=aparc_label_name)[0]
stc_mean_label = stc_mean.in_label(label)
data = np.abs(stc_mean_label.data)
stc_mean_label.data[data < 0.6 * np.max(data)] = 0.
func_labels, _ = mne.stc_to_label(stc_mean_label, src=src, smooth=True,
subjects_dir=subjects_dir, connected=True)
# take first as func_labels are ordered based on maximum values in stc
func_label = func_labels[0]
# load the anatomical ROI for comparison
anat_label = mne.read_labels_from_annot(subject, parc='aparc',
subjects_dir=subjects_dir,
regexp=aparc_label_name)[0]
# extract the anatomical time course for each label
stc_anat_label = stc.in_label(anat_label)
pca_anat = stc.extract_label_time_course(anat_label, src, mode='pca_flip')[0]
stc_func_label = stc.in_label(func_label)
pca_func = stc.extract_label_time_course(func_label, src, mode='pca_flip')[0]
# flip the pca so that the max power between tmin and tmax is positive
pca_anat *= np.sign(pca_anat[np.argmax(np.abs(pca_anat))])
pca_func *= np.sign(pca_func[np.argmax(np.abs(pca_anat))])
###############################################################################
# plot the time courses....
plt.figure()
plt.plot(1e3 * stc_anat_label.times, pca_anat, 'k',
label='Anatomical %s' % aparc_label_name)
plt.plot(1e3 * stc_func_label.times, pca_func, 'b',
label='Functional %s' % aparc_label_name)
plt.legend()
plt.show()
###############################################################################
# plot brain in 3D with PySurfer if available
brain = stc_mean.plot(hemi='lh', subjects_dir=subjects_dir)
brain.show_view('lateral')
# show both labels
brain.add_label(anat_label, borders=True, color='k')
brain.add_label(func_label, borders=True, color='b')
| bsd-3-clause | 9,191,044,958,596,562,000 | 7,036,315,184,132,473,000 | 37.105769 | 79 | 0.649508 | false |
mozman/ezdxf | tests/test_06_math/test_630b_bezier4p_functions.py | 1 | 4662 | # Copyright (c) 2010-2020 Manfred Moitzi
# License: MIT License
import pytest
import random
from ezdxf.math import (
cubic_bezier_interpolation, Vec3, Bezier3P, quadratic_to_cubic_bezier,
Bezier4P, have_bezier_curves_g1_continuity, bezier_to_bspline,
)
def test_vertex_interpolation():
points = [(0, 0), (3, 1), (5, 3), (0, 8)]
result = list(cubic_bezier_interpolation(points))
assert len(result) == 3
c1, c2, c3 = result
p = c1.control_points
assert p[0].isclose((0, 0))
assert p[1].isclose((0.9333333333333331, 0.3111111111111111))
assert p[2].isclose((1.8666666666666663, 0.6222222222222222))
assert p[3].isclose((3, 1))
p = c2.control_points
assert p[0].isclose((3, 1))
assert p[1].isclose((4.133333333333334, 1.3777777777777778))
assert p[2].isclose((5.466666666666667, 1.822222222222222))
assert p[3].isclose((5, 3))
p = c3.control_points
assert p[0].isclose((5, 3))
assert p[1].isclose((4.533333333333333, 4.177777777777778))
assert p[2].isclose((2.2666666666666666, 6.088888888888889))
assert p[3].isclose((0, 8))
def test_quadratic_to_cubic_bezier():
r = random.Random(0)
def random_vec() -> Vec3:
return Vec3(r.uniform(-10, 10), r.uniform(-10, 10), r.uniform(-10, 10))
for i in range(1000):
quadratic = Bezier3P((random_vec(), random_vec(), random_vec()))
quadratic_approx = list(quadratic.approximate(10))
cubic = quadratic_to_cubic_bezier(quadratic)
cubic_approx = list(cubic.approximate(10))
assert len(quadratic_approx) == len(cubic_approx)
for p1, p2 in zip(quadratic_approx, cubic_approx):
assert p1.isclose(p2)
# G1 continuity: normalized end-tangent == normalized start-tangent of next curve
B1 = Bezier4P([(0, 0), (1, 1), (2, 1), (3, 0)])
# B1/B2 has G1 continuity:
B2 = Bezier4P([(3, 0), (4, -1), (5, -1), (6, 0)])
# B1/B3 has no G1 continuity:
B3 = Bezier4P([(3, 0), (4, 1), (5, 1), (6, 0)])
# B1/B4 G1 continuity off tolerance:
B4 = Bezier4P([(3, 0), (4, -1.03), (5, -1.0), (6, 0)])
# B1/B5 has a gap between B1 end and B5 start:
B5 = Bezier4P([(4, 0), (5, -1), (6, -1), (7, 0)])
def test_g1_continuity_for_bezier_curves():
assert have_bezier_curves_g1_continuity(B1, B2) is True
assert have_bezier_curves_g1_continuity(B1, B3) is False
assert have_bezier_curves_g1_continuity(B1, B4, g1_tol=1e-4) is False, \
"should be outside of tolerance "
assert have_bezier_curves_g1_continuity(B1, B5) is False, \
"end- and start point should match"
D1 = Bezier4P([(0, 0), (1, 1), (3, 0), (3, 0)])
D2 = Bezier4P([(3, 0), (3, 0), (5, -1), (6, 0)])
def test_g1_continuity_for_degenerated_bezier_curves():
assert have_bezier_curves_g1_continuity(D1, B2) is False
assert have_bezier_curves_g1_continuity(B1, D2) is False
assert have_bezier_curves_g1_continuity(D1, D2) is False
@pytest.mark.parametrize('curve', [D1, D2])
def test_flatten_degenerated_bezier_curves(curve):
# Degenerated Bezier curves behave like regular curves!
assert len(list(curve.flattening(0.1))) > 4
@pytest.mark.parametrize("b1,b2", [
(B1, B2), # G1 continuity, the common case
(B1, B3), # without G1 continuity is also a regular B-spline
(B1, B5), # regular B-spline, but first control point of B5 is lost
], ids=["G1", "without G1", "gap"])
def test_bezier_curves_to_bspline(b1, b2):
bspline = bezier_to_bspline([b1, b2])
# Remove duplicate control point between two adjacent curves:
expected = list(b1.control_points) + list(b2.control_points)[1:]
assert bspline.degree == 3, "should be a cubic B-spline"
assert bspline.control_points == tuple(expected)
def test_quality_of_bezier_to_bspline_conversion_1():
# This test shows the close relationship between cubic Bézier- and
# cubic B-spline curves.
points0 = B1.approximate(10)
points1 = bezier_to_bspline([B1]).approximate(10)
for p0, p1 in zip(points0, points1):
assert p0.isclose(p1) is True, "conversion should be perfect"
def test_quality_of_bezier_to_bspline_conversion_2():
# This test shows the close relationship between cubic Bézier- and
# cubic B-spline curves.
# Remove duplicate point between the two curves:
points0 = list(B1.approximate(10)) + list(B2.approximate(10))[1:]
points1 = bezier_to_bspline([B1, B2]).approximate(20)
for p0, p1 in zip(points0, points1):
assert p0.isclose(p1) is True, "conversion should be perfect"
def test_bezier_curves_to_bspline_error():
with pytest.raises(ValueError):
bezier_to_bspline([]) # one or more curves expected
| mit | -4,769,638,682,788,265,000 | -8,585,203,539,743,206,000 | 35.40625 | 81 | 0.65794 | false |
Phrozyn/MozDef | mq/plugins/ttl_auditd.py | 2 | 3716 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
class message(object):
def __init__(self):
'''
register our criteria for being passed a message
'''
# this plugin inspects messages for whitelist stuff that
# should be stored with a TTL so we keep it for a little while
# and delete rather than waiting for the index purge
self.registration = ['auditd', 'command']
self.priority = 1
def onMessage(self, message, metadata):
# ganglia monitor daemon -> 3d
if ('details' in message and
'parentprocess' in message['details'] and
message['details']['parentprocess'] == 'gmond' and
'duser' in message['details'] and
message['details']['duser'] == 'nobody' and
'command' in message['details'] and
message['details']['command'] == '/bin/sh -c netstat -t -a -n'):
message['_ttl'] = '3d'
# rabbitmq -> 3d
if (
('details' in message and
'parentprocess' in message['details'] and
message['details']['parentprocess'] == 'beam.smp' and
'duser' in message['details'] and
message['details']['duser'] == 'rabbitmq' and
'command' in message['details']
) and
(
message['details']['command'] == '/usr/lib64/erlang/erts-5.8.5/bin/epmd -daemon' or
message['details']['command'].startswith('inet_gethost 4') or
message['details']['command'].startswith('sh -c exec inet_gethost 4') or
message['details']['command'].startswith('/bin/sh -s unix:cmd') or
message['details']['command'].startswith('sh -c exec /bin/sh -s unix:cmd'))):
message['_ttl'] = '3d'
# sshd -> 3d
if ('details' in message and
'parentprocess' in message['details'] and
message['details']['parentprocess'] == 'sshd' and
'duser' in message['details'] and
message['details']['duser'] == 'root' and
'command' in message['details'] and
message['details']['command'] == '/usr/sbin/sshd -R'):
message['_ttl'] = '3d'
# chkconfig -> 3d
if (
('details' in message and
'parentprocess' in message['details'] and
message['details']['parentprocess'] == 'chkconfig' and
'suser' in message['details'] and
message['details']['suser'] == 'root' and
'command' in message['details']
) and
(
message['details']['command'].startswith('/sbin/runlevel') or
message['details']['command'].startswith('sh -c /sbin/runlevel'))):
message['_ttl'] = '3d'
# nagios -> 3d
if (
('details' in message and
'duser' in message['details'] and
message['details']['duser'] == 'nagios' and
'suser' in message['details'] and
message['details']['suser'] == 'root' and
'command' in message['details']
) and
(
message['details']['command'].startswith('/usr/lib64/nagios/plugins') or
message['details']['command'].startswith('sh -c /usr/lib64/nagios/plugins'))):
message['_ttl'] = '3d'
return (message, metadata)
| mpl-2.0 | -4,930,305,647,792,890,000 | 6,248,553,151,952,708,000 | 42.717647 | 99 | 0.519645 | false |
splunk/splunk-demo-yelp-search-command | bin/requests/packages/charade/constants.py | 231 | 1374 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
_debug = 0
eDetecting = 0
eFoundIt = 1
eNotMe = 2
eStart = 0
eError = 1
eItsMe = 2
SHORTCUT_THRESHOLD = 0.95
| apache-2.0 | 7,047,144,671,640,507,000 | -4,144,815,878,560,382,000 | 33.230769 | 69 | 0.679039 | false |
AndroidOpenDevelopment/android_external_chromium_org | tools/perf/benchmarks/scheduler.py | 8 | 1116 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import test
from measurements import smoothness
import page_sets
@test.Disabled('linux') # crbug.com/368767
class SchedulerToughSchedulingCases(test.Test):
"""Measures rendering statistics while interacting with pages that have
challenging scheduling properties.
https://docs.google.com/a/chromium.org/document/d/
17yhE5Po9By0sCdM1yZT3LiUECaUr_94rQt9j-4tOQIM/view"""
test = smoothness.Smoothness
page_set = page_sets.ToughSchedulingCasesPageSet
# Pepper plugin is not supported on android.
@test.Disabled('android', 'win') # crbug.com/384733
class SchedulerToughPepperCases(test.Test):
"""Measures rendering statistics while interacting with pages that have
pepper plugins"""
test = smoothness.Smoothness
page_set = page_sets.ToughPepperCasesPageSet
def CustomizeBrowserOptions(self, options):
# This is needed for testing pepper plugin.
options.AppendExtraBrowserArgs('--enable-pepper-testing')
| bsd-3-clause | 8,804,373,668,597,710,000 | -630,547,313,816,517,800 | 35 | 73 | 0.781362 | false |
BlueLens/bl-magi | tensorflow/object_detection/protos/faster_rcnn_box_coder_pb2.py | 4 | 3445 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: object_detection/protos/faster_rcnn_box_coder.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='object_detection/protos/faster_rcnn_box_coder.proto',
package='object_detection.protos',
syntax='proto2',
serialized_pb=_b('\n3object_detection/protos/faster_rcnn_box_coder.proto\x12\x17object_detection.protos\"o\n\x12\x46\x61sterRcnnBoxCoder\x12\x13\n\x07y_scale\x18\x01 \x01(\x02:\x02\x31\x30\x12\x13\n\x07x_scale\x18\x02 \x01(\x02:\x02\x31\x30\x12\x17\n\x0cheight_scale\x18\x03 \x01(\x02:\x01\x35\x12\x16\n\x0bwidth_scale\x18\x04 \x01(\x02:\x01\x35')
)
_FASTERRCNNBOXCODER = _descriptor.Descriptor(
name='FasterRcnnBoxCoder',
full_name='object_detection.protos.FasterRcnnBoxCoder',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='y_scale', full_name='object_detection.protos.FasterRcnnBoxCoder.y_scale', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(10),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='x_scale', full_name='object_detection.protos.FasterRcnnBoxCoder.x_scale', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(10),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='height_scale', full_name='object_detection.protos.FasterRcnnBoxCoder.height_scale', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(5),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='width_scale', full_name='object_detection.protos.FasterRcnnBoxCoder.width_scale', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(5),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=80,
serialized_end=191,
)
DESCRIPTOR.message_types_by_name['FasterRcnnBoxCoder'] = _FASTERRCNNBOXCODER
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
FasterRcnnBoxCoder = _reflection.GeneratedProtocolMessageType('FasterRcnnBoxCoder', (_message.Message,), dict(
DESCRIPTOR = _FASTERRCNNBOXCODER,
__module__ = 'object_detection.protos.faster_rcnn_box_coder_pb2'
# @@protoc_insertion_point(class_scope:object_detection.protos.FasterRcnnBoxCoder)
))
_sym_db.RegisterMessage(FasterRcnnBoxCoder)
# @@protoc_insertion_point(module_scope)
| apache-2.0 | 8,747,953,400,754,468,000 | 7,716,959,468,981,780,000 | 37.277778 | 349 | 0.729173 | false |
fossilet/ansible | lib/ansible/plugins/lookup/credstash.py | 131 | 1651 | # (c) 2015, Ensighten <infra@ensighten.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
CREDSTASH_INSTALLED = False
try:
import credstash
CREDSTASH_INSTALLED = True
except ImportError:
CREDSTASH_INSTALLED = False
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
if not CREDSTASH_INSTALLED:
raise AnsibleError('The credstash lookup plugin requires credstash to be installed.')
ret = []
for term in terms:
try:
val = credstash.getSecret(term, **kwargs)
except credstash.ItemNotFound:
raise AnsibleError('Key {0} not found'.format(term))
except Exception as e:
raise AnsibleError('Encountered exception while fetching {0}: {1}'.format(term, e.message))
ret.append(val)
return ret
| gpl-3.0 | -2,713,418,468,108,382,000 | 3,720,183,144,781,344,000 | 33.395833 | 107 | 0.69473 | false |
fdouetteau/PyBabe | pybabe/format_csv.py | 1 | 3107 |
from base import BabeBase, StreamHeader, StreamFooter
import csv
from charset import UTF8Recoder, UTF8RecoderWithCleanup, PrefixReader, UnicodeCSVWriter
import codecs
import logging
log = logging.getLogger("csv")
def linepull(stream, dialect, kwargs):
it = iter(stream)
fields = kwargs.get('fields', None)
if not fields:
fields = [it.next().rstrip('\r\n')]
metainfo = StreamHeader(**dict(kwargs, fields=fields))
yield metainfo
for row in it:
yield metainfo.t._make([row.rstrip('\r\n')])
yield StreamFooter()
def build_value(x, null_value):
if x == null_value:
return None
else:
return unicode(x, "utf-8")
def csvpull(stream, dialect, kwargs):
reader = csv.reader(stream, dialect)
fields = kwargs.get('fields', None)
null_value = kwargs.get('null_value', "")
ignore_malformed = kwargs.get('ignore_bad_lines', False)
if not fields:
fields = reader.next()
metainfo = StreamHeader(**dict(kwargs, fields=fields))
yield metainfo
for row in reader:
try:
yield metainfo.t._make([build_value(x, null_value) for x in row])
except Exception, e:
if ignore_malformed:
log.warn("Malformed line: %s, %s" % (row, e))
else:
raise e
yield StreamFooter()
def pull(format, stream, kwargs):
if kwargs.get('utf8_cleanup', False):
stream = UTF8RecoderWithCleanup(stream, kwargs.get('encoding', 'utf-8'))
elif codecs.getreader(kwargs.get('encoding', 'utf-8')) != codecs.getreader('utf-8'):
stream = UTF8Recoder(stream, kwargs.get('encoding', None))
else:
pass
delimiter = kwargs.get('delimiter', None)
sniff_read = stream.next()
stream = PrefixReader(sniff_read, stream, linefilter=kwargs.get("linefilter", None))
dialect = csv.Sniffer().sniff(sniff_read)
if sniff_read.endswith('\r\n'):
dialect.lineterminator = '\r\n'
else:
dialect.lineterminator = '\n'
if dialect.delimiter.isalpha() and not delimiter:
# http://bugs.python.org/issue2078
for row in linepull(stream, dialect, kwargs):
yield row
return
if delimiter:
dialect.delimiter = delimiter
for row in csvpull(stream, dialect, kwargs):
yield row
class default_dialect(csv.Dialect):
lineterminator = '\n'
delimiter = ','
doublequote = False
escapechar = '\\'
quoting = csv.QUOTE_MINIMAL
quotechar = '"'
def push(format, metainfo, instream, outfile, encoding, delimiter=None, **kwargs):
if not encoding:
encoding = "utf8"
dialect = kwargs.get('dialect', default_dialect)
if delimiter:
dialect.delimiter = delimiter
writer = UnicodeCSVWriter(outfile, dialect=dialect, encoding=encoding)
writer.writerow(metainfo.fields)
for k in instream:
if isinstance(k, StreamFooter):
break
else:
writer.writerow(k)
BabeBase.addPullPlugin('csv', ['csv', 'tsv', 'txt'], pull)
BabeBase.addPushPlugin('csv', ['csv', 'tsv', 'txt'], push)
| bsd-3-clause | 6,234,318,753,516,338,000 | 1,861,185,385,074,994,000 | 30.07 | 88 | 0.631799 | false |
raychorn/knowu | django/djangononrelsample2/django/contrib/sessions/backends/signed_cookies.py | 288 | 2798 | from django.conf import settings
from django.core import signing
from django.contrib.sessions.backends.base import SessionBase
class SessionStore(SessionBase):
def load(self):
"""
We load the data from the key itself instead of fetching from
some external data store. Opposite of _get_session_key(),
raises BadSignature if signature fails.
"""
try:
return signing.loads(self.session_key,
serializer=self.serializer,
# This doesn't handle non-default expiry dates, see #19201
max_age=settings.SESSION_COOKIE_AGE,
salt='django.contrib.sessions.backends.signed_cookies')
except (signing.BadSignature, ValueError):
self.create()
return {}
def create(self):
"""
To create a new key, we simply make sure that the modified flag is set
so that the cookie is set on the client for the current request.
"""
self.modified = True
def save(self, must_create=False):
"""
To save, we get the session key as a securely signed string and then
set the modified flag so that the cookie is set on the client for the
current request.
"""
self._session_key = self._get_session_key()
self.modified = True
def exists(self, session_key=None):
"""
This method makes sense when you're talking to a shared resource, but
it doesn't matter when you're storing the information in the client's
cookie.
"""
return False
def delete(self, session_key=None):
"""
To delete, we clear the session key and the underlying data structure
and set the modified flag so that the cookie is set on the client for
the current request.
"""
self._session_key = ''
self._session_cache = {}
self.modified = True
def cycle_key(self):
"""
Keeps the same data but with a new key. To do this, we just have to
call ``save()`` and it will automatically save a cookie with a new key
at the end of the request.
"""
self.save()
def _get_session_key(self):
"""
Most session backends don't need to override this method, but we do,
because instead of generating a random string, we want to actually
generate a secure url-safe Base64-encoded string of data as our
session key.
"""
session_cache = getattr(self, '_session_cache', {})
return signing.dumps(session_cache, compress=True,
salt='django.contrib.sessions.backends.signed_cookies',
serializer=self.serializer)
@classmethod
def clear_expired(cls):
pass
| lgpl-3.0 | 3,740,639,557,257,659,400 | 2,681,324,312,760,446,000 | 33.54321 | 78 | 0.611866 | false |
filippog/pysnmp | examples/hlapi/asyncore/sync/agent/ntforg/v3-trap.py | 1 | 1601 | """
SNMPv3 TRAP: auth SHA, privacy: AES128
++++++++++++++++++++++++++++++++++++++
Send SNMP notification using the following options:
* SNMPv3
* with authoritative snmpEngineId = 0x8000000001020304
(USM must be configured at the Receiver accordingly)
* with user 'usr-sha-aes128', auth: SHA, priv: AES128
* over IPv4/UDP
* send TRAP notification
* with TRAP ID 'authenticationFailure' specified as a MIB symbol
* do not include any additional managed object information
SNMPv3 TRAPs requires pre-sharing the Notification Originator's
value of SnmpEngineId with Notification Receiver. To facilitate that
we will use static (e.g. not autogenerated) version of snmpEngineId.
Functionally similar to:
| $ snmptrap -v3 -e 8000000001020304 -l authPriv -u usr-sha-aes -A authkey1 -X privkey1 -a SHA -x AES demo.snmplabs.com 12345 1.3.6.1.4.1.20408.4.1.1.2 1.3.6.1.2.1.1.1.0 s "my system"
"""#
from pysnmp.hlapi import *
errorIndication, errorStatus, errorIndex, varBinds = next(
sendNotification(SnmpEngine(OctetString(hexValue='8000000001020304')),
UsmUserData('usr-sha-aes128', 'authkey1', 'privkey1',
authProtocol=usmHMACSHAAuthProtocol,
privProtocol=usmAesCfb128Protocol),
UdpTransportTarget(('demo.snmplabs.com', 162)),
ContextData(),
'trap',
NotificationType(
ObjectIdentity('SNMPv2-MIB', 'authenticationFailure')
)
)
)
if errorIndication:
print(errorIndication)
| bsd-3-clause | -4,234,899,939,020,452,000 | 447,321,421,237,195,300 | 38.04878 | 183 | 0.647096 | false |
sander76/home-assistant | homeassistant/components/trace/__init__.py | 1 | 3528 | """Support for script and automation tracing and debugging."""
from __future__ import annotations
import datetime as dt
from itertools import count
from typing import Any, Deque
from homeassistant.core import Context
from homeassistant.helpers.trace import (
TraceElement,
trace_id_get,
trace_id_set,
trace_set_child_id,
)
import homeassistant.util.dt as dt_util
from . import websocket_api
from .const import DATA_TRACE, STORED_TRACES
from .utils import LimitedSizeDict
DOMAIN = "trace"
async def async_setup(hass, config):
"""Initialize the trace integration."""
hass.data[DATA_TRACE] = {}
websocket_api.async_setup(hass)
return True
def async_store_trace(hass, trace):
"""Store a trace if its item_id is valid."""
key = trace.key
if key[1]:
traces = hass.data[DATA_TRACE]
if key not in traces:
traces[key] = LimitedSizeDict(size_limit=STORED_TRACES)
traces[key][trace.run_id] = trace
class ActionTrace:
"""Base container for an script or automation trace."""
_run_ids = count(0)
def __init__(
self,
key: tuple[str, str],
config: dict[str, Any],
context: Context,
):
"""Container for script trace."""
self._trace: dict[str, Deque[TraceElement]] | None = None
self._config: dict[str, Any] = config
self.context: Context = context
self._error: Exception | None = None
self._state: str = "running"
self.run_id: str = str(next(self._run_ids))
self._timestamp_finish: dt.datetime | None = None
self._timestamp_start: dt.datetime = dt_util.utcnow()
self.key: tuple[str, str] = key
if trace_id_get():
trace_set_child_id(self.key, self.run_id)
trace_id_set((key, self.run_id))
def set_trace(self, trace: dict[str, Deque[TraceElement]]) -> None:
"""Set trace."""
self._trace = trace
def set_error(self, ex: Exception) -> None:
"""Set error."""
self._error = ex
def finished(self) -> None:
"""Set finish time."""
self._timestamp_finish = dt_util.utcnow()
self._state = "stopped"
def as_dict(self) -> dict[str, Any]:
"""Return dictionary version of this ActionTrace."""
result = self.as_short_dict()
traces = {}
if self._trace:
for key, trace_list in self._trace.items():
traces[key] = [item.as_dict() for item in trace_list]
result.update(
{
"trace": traces,
"config": self._config,
"context": self.context,
}
)
if self._error is not None:
result["error"] = str(self._error)
return result
def as_short_dict(self) -> dict[str, Any]:
"""Return a brief dictionary version of this ActionTrace."""
last_step = None
if self._trace:
last_step = list(self._trace)[-1]
result = {
"last_step": last_step,
"run_id": self.run_id,
"state": self._state,
"timestamp": {
"start": self._timestamp_start,
"finish": self._timestamp_finish,
},
"domain": self.key[0],
"item_id": self.key[1],
}
if self._error is not None:
result["error"] = str(self._error)
if last_step is not None:
result["last_step"] = last_step
return result
| apache-2.0 | 6,626,841,221,182,977,000 | 2,588,214,834,841,678,300 | 27.451613 | 71 | 0.562642 | false |
IronLanguages/ironpython2 | Tests/test_decimal.py | 3 | 1134 | # Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the Apache 2.0 License.
# See the LICENSE file in the project root for more information.
import unittest
from decimal import *
from iptest import run_test, skipUnlessIronPython
@skipUnlessIronPython()
class DecimalTest(unittest.TestCase):
def test_explicit_from_System_Decimal(self):
import System
#int
self.assertEqual(str(Decimal(System.Decimal.Parse('45'))), '45')
#float
self.assertEqual(str(Decimal(System.Decimal.Parse('45.34'))), '45.34')
def test_formatting(self):
import System
d = System.Decimal.Parse('1.4274243253253245432543254545')
self.assertEqual('{}'.format(d), '1.4274243253253245432543254545')
self.assertEqual('{:,.2f}'.format(d), '1.43')
self.assertEqual('{:e}'.format(d), '1.427424325325e+00')
d = System.Decimal.Parse('4000000000.40000000')
self.assertEqual('{}'.format(d), '4000000000.40000000')
self.assertEqual('{:e}'.format(d), '4.000000000400e+09')
run_test(__name__)
| apache-2.0 | 5,914,428,122,713,562,000 | -8,705,714,945,386,833,000 | 34.4375 | 78 | 0.675485 | false |
elzaggo/pydoop | test/avro/test_io.py | 1 | 4807 | # BEGIN_COPYRIGHT
#
# Copyright 2009-2018 CRS4.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# END_COPYRIGHT
import os
import unittest
import itertools as it
import avro.datafile as avdf
from avro.io import DatumReader, DatumWriter
from pydoop.mapreduce.pipes import InputSplit
from pydoop.avrolib import (
SeekableDataFileReader, AvroReader, AvroWriter, parse
)
from pydoop.test_utils import WDTestCase
from pydoop.utils.py3compat import czip, cmap
import pydoop.hdfs as hdfs
from common import avro_user_record
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
class TestAvroIO(WDTestCase):
def setUp(self):
super(TestAvroIO, self).setUp()
with open(os.path.join(THIS_DIR, "user.avsc")) as f:
self.schema = parse(f.read())
def write_avro_file(self, rec_creator, n_samples, sync_interval):
avdf.SYNC_INTERVAL = sync_interval
self.assertEqual(avdf.SYNC_INTERVAL, sync_interval)
fo = self._mkf('data.avro', mode='wb')
with avdf.DataFileWriter(fo, DatumWriter(), self.schema) as writer:
for i in range(n_samples):
writer.append(rec_creator(i))
return fo.name
def test_seekable(self):
fn = self.write_avro_file(avro_user_record, 500, 1024)
with open(fn, 'rb') as f:
sreader = SeekableDataFileReader(f, DatumReader())
res = [t for t in czip(cmap(
lambda _: f.tell(), it.repeat(1)
), sreader)]
sreader.align_after(res[-1][0])
with self.assertRaises(StopIteration):
r = next(sreader)
sreader.align_after(0)
r = next(sreader)
self.assertEqual(r, res[0][1])
def offset_iterator():
s = -1
for o, r in res:
sreader.align_after(o)
t = f.tell()
if t == s:
continue
s = t
x = next(sreader)
yield (t, x)
i = 0
for xo, x in offset_iterator():
sreader.align_after(xo)
for o, r in res[i:]:
if o >= xo:
self.assertEqual(x, r)
break
i += 1
def test_avro_reader(self):
N = 500
fn = self.write_avro_file(avro_user_record, N, 1024)
url = hdfs.path.abspath(fn, local=True)
class FunkyCtx(object):
def __init__(self, isplit):
self.input_split = isplit
def get_areader(offset, length):
isplit = InputSplit(InputSplit.to_string(url, offset, length))
ctx = FunkyCtx(isplit)
return AvroReader(ctx)
areader = get_areader(0, 14)
file_length = areader.reader.file_length
with self.assertRaises(StopIteration):
next(areader)
areader = get_areader(0, file_length)
with SeekableDataFileReader(open(fn, 'rb'), DatumReader()) as sreader:
for (o, a), s in czip(areader, sreader):
self.assertEqual(a, s)
mid_len = int(file_length / 2)
lows = [x for x in get_areader(0, mid_len)]
highs = [x for x in get_areader(mid_len, file_length)]
self.assertEqual(N, len(lows) + len(highs))
def test_avro_writer(self):
class FunkyCtx(object):
def __init__(self_, job_conf):
self_.job_conf = job_conf
class AWriter(AvroWriter):
schema = self.schema
def emit(self_, key, value):
self_.writer.append(key)
ctx = FunkyCtx({
'mapreduce.task.partition': 1,
'mapreduce.task.output.dir': hdfs.path.abspath(self.wd, local=True)
})
awriter = AWriter(ctx)
N = 10
for i in range(N):
awriter.emit(avro_user_record(i), '')
awriter.close()
def suite():
suite_ = unittest.TestSuite()
suite_.addTest(TestAvroIO('test_seekable'))
suite_.addTest(TestAvroIO('test_avro_reader'))
suite_.addTest(TestAvroIO('test_avro_writer'))
return suite_
if __name__ == '__main__':
_RUNNER = unittest.TextTestRunner(verbosity=2)
_RUNNER.run((suite()))
| apache-2.0 | -4,157,781,359,322,517,500 | 8,610,244,872,777,444,000 | 30.418301 | 79 | 0.575619 | false |
SU-ECE-17-7/hotspotter | hsviz/draw_func2.py | 1 | 54605 | ''' Lots of functions for drawing and plotting visiony things '''
# TODO: New naming scheme
# viz_<func_name> will clear everything. The current axes and fig: clf, cla. # Will add annotations
# interact_<func_name> will clear everything and start user interactions.
# show_<func_name> will always clear the current axes, but not fig: cla # Might # add annotates?
# plot_<func_name> will not clear the axes or figure. More useful for graphs
# draw_<func_name> same as plot for now. More useful for images
from __future__ import division, print_function
from hscom import __common__
(print, print_, print_on, print_off, rrr, profile,
printDBG) = __common__.init(__name__, '[df2]', DEBUG=False, initmpl=True)
# Python
from itertools import izip
from os.path import splitext, split, join, normpath, exists
import colorsys
import itertools
import pylab
import sys
import textwrap
import time
import warnings
# Matplotlib / Qt
import matplotlib
import matplotlib as mpl # NOQA
from matplotlib.collections import PatchCollection, LineCollection
from matplotlib.font_manager import FontProperties
from matplotlib.patches import Rectangle, Circle, FancyArrow
from matplotlib.transforms import Affine2D
from matplotlib.backends import backend_qt4
import matplotlib.pyplot as plt
# Qt
from PyQt4 import QtCore, QtGui
from PyQt4.QtCore import Qt
# Scientific
import numpy as np
import scipy.stats
import cv2
# HotSpotter
from hscom import helpers
from hscom import tools
from hscom.Printable import DynStruct
#================
# GLOBALS
#================
TMP_mevent = None
QT4_WINS = []
plotWidget = None
# GENERAL FONTS
SMALLER = 8
SMALL = 10
MED = 12
LARGE = 14
#fpargs = dict(family=None, style=None, variant=None, stretch=None, fname=None)
FONTS = DynStruct()
FONTS.small = FontProperties(weight='light', size=SMALL)
FONTS.smaller = FontProperties(weight='light', size=SMALLER)
FONTS.med = FontProperties(weight='light', size=MED)
FONTS.large = FontProperties(weight='light', size=LARGE)
FONTS.medbold = FontProperties(weight='bold', size=MED)
FONTS.largebold = FontProperties(weight='bold', size=LARGE)
# SPECIFIC FONTS
FONTS.legend = FONTS.small
FONTS.figtitle = FONTS.med
FONTS.axtitle = FONTS.med
FONTS.subtitle = FONTS.med
FONTS.xlabel = FONTS.smaller
FONTS.ylabel = FONTS.small
FONTS.relative = FONTS.smaller
# COLORS
ORANGE = np.array((255, 127, 0, 255)) / 255.0
RED = np.array((255, 0, 0, 255)) / 255.0
GREEN = np.array(( 0, 255, 0, 255)) / 255.0
BLUE = np.array(( 0, 0, 255, 255)) / 255.0
YELLOW = np.array((255, 255, 0, 255)) / 255.0
BLACK = np.array(( 0, 0, 0, 255)) / 255.0
WHITE = np.array((255, 255, 255, 255)) / 255.0
GRAY = np.array((127, 127, 127, 255)) / 255.0
DEEP_PINK = np.array((255, 20, 147, 255)) / 255.0
PINK = np.array((255, 100, 100, 255)) / 255.0
FALSE_RED = np.array((255, 51, 0, 255)) / 255.0
TRUE_GREEN = np.array(( 0, 255, 0, 255)) / 255.0
DARK_ORANGE = np.array((127, 63, 0, 255)) / 255.0
DARK_YELLOW = np.array((127, 127, 0, 255)) / 255.0
PURPLE = np.array((102, 0, 153, 255)) / 255.0
UNKNOWN_PURP = PURPLE
# FIGURE GEOMETRY
DPI = 80
#DPI = 160
#FIGSIZE = (24) # default windows fullscreen
FIGSIZE_MED = (12, 6)
FIGSIZE_SQUARE = (12, 12)
FIGSIZE_BIGGER = (24, 12)
FIGSIZE_HUGE = (32, 16)
FIGSIZE = FIGSIZE_MED
# Quality drawings
#FIGSIZE = FIGSIZE_SQUARE
#DPI = 120
tile_within = (-1, 30, 969, 1041)
if helpers.get_computer_name() == 'Ooo':
TILE_WITHIN = (-1912, 30, -969, 1071)
# DEFAULTS. (TODO: Can these be cleaned up?)
DISTINCT_COLORS = True # and False
DARKEN = None
ELL_LINEWIDTH = 1.5
if DISTINCT_COLORS:
ELL_ALPHA = .6
LINE_ALPHA = .35
else:
ELL_ALPHA = .4
LINE_ALPHA = .4
LINE_ALPHA_OVERRIDE = helpers.get_arg('--line-alpha-override', type_=float, default=None)
ELL_ALPHA_OVERRIDE = helpers.get_arg('--ell-alpha-override', type_=float, default=None)
#LINE_ALPHA_OVERRIDE = None
#ELL_ALPHA_OVERRIDE = None
ELL_COLOR = BLUE
LINE_COLOR = RED
LINE_WIDTH = 1.4
SHOW_LINES = True # True
SHOW_ELLS = True
POINT_SIZE = 2
base_fnum = 9001
def next_fnum():
global base_fnum
base_fnum += 1
return base_fnum
def my_prefs():
global LINE_COLOR
global ELL_COLOR
global ELL_LINEWIDTH
global ELL_ALPHA
LINE_COLOR = (1, 0, 0)
ELL_COLOR = (0, 0, 1)
ELL_LINEWIDTH = 2
ELL_ALPHA = .5
def execstr_global():
execstr = ['global' + key for key in globals().keys()]
return execstr
def register_matplotlib_widget(plotWidget_):
'talks to PyQt4 guis'
global plotWidget
plotWidget = plotWidget_
#fig = plotWidget.figure
#axes_list = fig.get_axes()
#ax = axes_list[0]
#plt.sca(ax)
def unregister_qt4_win(win):
global QT4_WINS
if win == 'all':
QT4_WINS = []
def register_qt4_win(win):
global QT4_WINS
QT4_WINS.append(win)
def OooScreen2():
nRows = 1
nCols = 1
x_off = 30 * 4
y_off = 30 * 4
x_0 = -1920
y_0 = 30
w = (1912 - x_off) / nRows
h = (1080 - y_off) / nCols
return dict(num_rc=(1, 1), wh=(w, h), xy_off=(x_0, y_0), wh_off=(0, 10),
row_first=True, no_tile=False)
def deterministic_shuffle(list_):
randS = int(np.random.rand() * np.uint(0 - 2) / 2)
np.random.seed(len(list_))
np.random.shuffle(list_)
np.random.seed(randS)
def distinct_colors(N, brightness=.878):
# http://blog.jianhuashao.com/2011/09/generate-n-distinct-colors.html
sat = brightness
val = brightness
HSV_tuples = [(x * 1.0 / N, sat, val) for x in xrange(N)]
RGB_tuples = map(lambda x: colorsys.hsv_to_rgb(*x), HSV_tuples)
deterministic_shuffle(RGB_tuples)
return RGB_tuples
def add_alpha(colors):
return [list(color) + [1] for color in colors]
def _axis_xy_width_height(ax, xaug=0, yaug=0, waug=0, haug=0):
'gets geometry of a subplot'
autoAxis = ax.axis()
xy = (autoAxis[0] + xaug, autoAxis[2] + yaug)
width = (autoAxis[1] - autoAxis[0]) + waug
height = (autoAxis[3] - autoAxis[2]) + haug
return xy, width, height
def draw_border(ax, color=GREEN, lw=2, offset=None):
'draws rectangle border around a subplot'
xy, width, height = _axis_xy_width_height(ax, -.7, -.2, 1, .4)
if offset is not None:
xoff, yoff = offset
xy = [xoff, yoff]
height = - height - yoff
width = width - xoff
rect = matplotlib.patches.Rectangle(xy, width, height, lw=lw)
rect = ax.add_patch(rect)
rect.set_clip_on(False)
rect.set_fill(False)
rect.set_edgecolor(color)
def draw_roi(roi, label=None, bbox_color=(1, 0, 0),
lbl_bgcolor=(0, 0, 0), lbl_txtcolor=(1, 1, 1), theta=0, ax=None):
if ax is None:
ax = gca()
(rx, ry, rw, rh) = roi
#cos_ = np.cos(theta)
#sin_ = np.sin(theta)
#rot_t = Affine2D([( cos_, -sin_, 0),
#( sin_, cos_, 0),
#( 0, 0, 1)])
#scale_t = Affine2D([( rw, 0, 0),
#( 0, rh, 0),
#( 0, 0, 1)])
#trans_t = Affine2D([( 1, 0, rx + rw / 2),
#( 0, 1, ry + rh / 2),
#( 0, 0, 1)])
#t_end = scale_t + rot_t + trans_t + t_start
# Transformations are specified in backwards order.
trans_roi = Affine2D()
trans_roi.scale(rw, rh)
trans_roi.rotate(theta)
trans_roi.translate(rx + rw / 2, ry + rh / 2)
t_end = trans_roi + ax.transData
bbox = matplotlib.patches.Rectangle((-.5, -.5), 1, 1, lw=2, transform=t_end)
arw_x, arw_y, arw_dx, arw_dy = (-0.5, -0.5, 1.0, 0.0)
arrowargs = dict(head_width=.1, transform=t_end, length_includes_head=True)
arrow = FancyArrow(arw_x, arw_y, arw_dx, arw_dy, **arrowargs)
bbox.set_fill(False)
#bbox.set_transform(trans)
bbox.set_edgecolor(bbox_color)
arrow.set_edgecolor(bbox_color)
arrow.set_facecolor(bbox_color)
ax.add_patch(bbox)
ax.add_patch(arrow)
#ax.add_patch(arrow2)
if label is not None:
ax_absolute_text(rx, ry, label, ax=ax,
horizontalalignment='center',
verticalalignment='center',
color=lbl_txtcolor,
backgroundcolor=lbl_bgcolor)
# ---- GENERAL FIGURE COMMANDS ----
def sanatize_img_fname(fname):
fname_clean = fname
search_replace_list = [(' ', '_'), ('\n', '--'), ('\\', ''), ('/', '')]
for old, new in search_replace_list:
fname_clean = fname_clean.replace(old, new)
fname_noext, ext = splitext(fname_clean)
fname_clean = fname_noext + ext.lower()
# Check for correct extensions
if not ext.lower() in helpers.IMG_EXTENSIONS:
fname_clean += '.png'
return fname_clean
def sanatize_img_fpath(fpath):
[dpath, fname] = split(fpath)
fname_clean = sanatize_img_fname(fname)
fpath_clean = join(dpath, fname_clean)
fpath_clean = normpath(fpath_clean)
return fpath_clean
def set_geometry(fnum, x, y, w, h):
fig = get_fig(fnum)
qtwin = fig.canvas.manager.window
qtwin.setGeometry(x, y, w, h)
def get_geometry(fnum):
fig = get_fig(fnum)
qtwin = fig.canvas.manager.window
(x1, y1, x2, y2) = qtwin.geometry().getCoords()
(x, y, w, h) = (x1, y1, x2 - x1, y2 - y1)
return (x, y, w, h)
def get_screen_info():
from PyQt4 import Qt, QtGui # NOQA
desktop = QtGui.QDesktopWidget()
mask = desktop.mask() # NOQA
layout_direction = desktop.layoutDirection() # NOQA
screen_number = desktop.screenNumber() # NOQA
normal_geometry = desktop.normalGeometry() # NOQA
num_screens = desktop.screenCount() # NOQA
avail_rect = desktop.availableGeometry() # NOQA
screen_rect = desktop.screenGeometry() # NOQA
QtGui.QDesktopWidget().availableGeometry().center() # NOQA
normal_geometry = desktop.normalGeometry() # NOQA
def get_all_figures():
all_figures_ = [manager.canvas.figure for manager in
matplotlib._pylab_helpers.Gcf.get_all_fig_managers()]
all_figures = []
# Make sure you dont show figures that this module closed
for fig in iter(all_figures_):
if not 'df2_closed' in fig.__dict__.keys() or not fig.df2_closed:
all_figures.append(fig)
# Return all the figures sorted by their number
all_figures = sorted(all_figures, key=lambda fig: fig.number)
return all_figures
def get_all_qt4_wins():
return QT4_WINS
def all_figures_show():
if plotWidget is not None:
plotWidget.figure.show()
plotWidget.figure.canvas.draw()
for fig in iter(get_all_figures()):
time.sleep(.1)
fig.show()
fig.canvas.draw()
def all_figures_tight_layout():
for fig in iter(get_all_figures()):
fig.tight_layout()
#adjust_subplots()
time.sleep(.1)
def get_monitor_geom(monitor_num=0):
from PyQt4 import QtGui # NOQA
desktop = QtGui.QDesktopWidget()
rect = desktop.availableGeometry()
geom = (rect.x(), rect.y(), rect.width(), rect.height())
return geom
def golden_wh(x):
'returns a width / height with a golden aspect ratio'
return map(int, map(round, (x * .618, x * .312)))
def all_figures_tile(num_rc=(3, 4), wh=1000, xy_off=(0, 0), wh_off=(0, 10),
row_first=True, no_tile=False, override1=False):
'Lays out all figures in a grid. if wh is a scalar, a golden ratio is used'
# RCOS TODO:
# I want this function to layout all the figures and qt windows within the
# bounds of a rectangle. (taken from the get_monitor_geom, or specified by
# the user i.e. left half of monitor 0). It should lay them out
# rectangularly and choose figure sizes such that all of them will fit.
if no_tile:
return
if not np.iterable(wh):
wh = golden_wh(wh)
all_figures = get_all_figures()
all_qt4wins = get_all_qt4_wins()
if override1:
if len(all_figures) == 1:
fig = all_figures[0]
win = fig.canvas.manager.window
win.setGeometry(0, 0, 900, 900)
update()
return
#nFigs = len(all_figures) + len(all_qt4_wins)
num_rows, num_cols = num_rc
w, h = wh
x_off, y_off = xy_off
w_off, h_off = wh_off
x_pad, y_pad = (0, 0)
printDBG('[df2] Tile all figures: ')
printDBG('[df2] wh = %r' % ((w, h),))
printDBG('[df2] xy_offsets = %r' % ((x_off, y_off),))
printDBG('[df2] wh_offsets = %r' % ((w_off, h_off),))
printDBG('[df2] xy_pads = %r' % ((x_pad, y_pad),))
if sys.platform == 'win32':
h_off += 0
w_off += 40
x_off += 40
y_off += 40
x_pad += 0
y_pad += 100
def position_window(i, win):
isqt4_mpl = isinstance(win, backend_qt4.MainWindow)
isqt4_back = isinstance(win, QtGui.QMainWindow)
if not isqt4_mpl and not isqt4_back:
raise NotImplementedError('%r-th Backend %r is not a Qt Window' % (i, win))
if row_first:
y = (i % num_rows) * (h + h_off) + 40
x = (int(i / num_rows)) * (w + w_off) + x_pad
else:
x = (i % num_cols) * (w + w_off) + 40
y = (int(i / num_cols)) * (h + h_off) + y_pad
x += x_off
y += y_off
win.setGeometry(x, y, w, h)
ioff = 0
for i, win in enumerate(all_qt4wins):
position_window(i, win)
ioff += 1
for i, fig in enumerate(all_figures):
win = fig.canvas.manager.window
position_window(i + ioff, win)
def all_figures_bring_to_front():
all_figures = get_all_figures()
for fig in iter(all_figures):
bring_to_front(fig)
def close_all_figures():
all_figures = get_all_figures()
for fig in iter(all_figures):
close_figure(fig)
def close_figure(fig):
fig.clf()
fig.df2_closed = True
qtwin = fig.canvas.manager.window
qtwin.close()
def bring_to_front(fig):
#what is difference between show and show normal?
qtwin = fig.canvas.manager.window
qtwin.raise_()
qtwin.activateWindow()
qtwin.setWindowFlags(Qt.WindowStaysOnTopHint)
qtwin.setWindowFlags(Qt.WindowFlags(0))
qtwin.show()
def show():
all_figures_show()
all_figures_bring_to_front()
plt.show()
def reset():
close_all_figures()
def draw():
all_figures_show()
def update():
draw()
all_figures_bring_to_front()
def present(*args, **kwargs):
'execing present should cause IPython magic'
print('[df2] Presenting figures...')
with warnings.catch_warnings():
warnings.simplefilter("ignore")
all_figures_tile(*args, **kwargs)
all_figures_show()
all_figures_bring_to_front()
# Return an exec string
execstr = helpers.ipython_execstr()
execstr += textwrap.dedent('''
if not embedded:
print('[df2] Presenting in normal shell.')
print('[df2] ... plt.show()')
plt.show()
''')
return execstr
def save_figure(fnum=None, fpath=None, usetitle=False, overwrite=True):
#import warnings
#warnings.simplefilter("error")
# Find the figure
if fnum is None:
fig = gcf()
else:
fig = plt.figure(fnum, figsize=FIGSIZE, dpi=DPI)
# Enforce inches and DPI
fig.set_size_inches(FIGSIZE[0], FIGSIZE[1])
fnum = fig.number
if fpath is None:
# Find the title
fpath = sanatize_img_fname(fig.canvas.get_window_title())
if usetitle:
title = sanatize_img_fname(fig.canvas.get_window_title())
fpath = join(fpath, title)
# Add in DPI information
fpath_noext, ext = splitext(fpath)
size_suffix = '_DPI=%r_FIGSIZE=%d,%d' % (DPI, FIGSIZE[0], FIGSIZE[1])
fpath = fpath_noext + size_suffix + ext
# Sanatize the filename
fpath_clean = sanatize_img_fpath(fpath)
#fname_clean = split(fpath_clean)[1]
print('[df2] save_figure() %r' % (fpath_clean,))
#adjust_subplots()
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
if not exists(fpath_clean) or overwrite:
fig.savefig(fpath_clean, dpi=DPI)
def set_ticks(xticks, yticks):
ax = gca()
ax.set_xticks(xticks)
ax.set_yticks(yticks)
def set_xticks(tick_set):
ax = gca()
ax.set_xticks(tick_set)
def set_yticks(tick_set):
ax = gca()
ax.set_yticks(tick_set)
def set_xlabel(lbl, ax=None):
if ax is None:
ax = gca()
ax.set_xlabel(lbl, fontproperties=FONTS.xlabel)
def set_title(title, ax=None):
if ax is None:
ax = gca()
ax.set_title(title, fontproperties=FONTS.axtitle)
def set_ylabel(lbl):
ax = gca()
ax.set_ylabel(lbl, fontproperties=FONTS.xlabel)
def plot(*args, **kwargs):
return plt.plot(*args, **kwargs)
def plot2(x_data, y_data, marker='o', title_pref='', x_label='x', y_label='y', *args,
**kwargs):
do_plot = True
ax = gca()
if len(x_data) != len(y_data):
warnstr = '[df2] ! Warning: len(x_data) != len(y_data). Cannot plot2'
warnings.warn(warnstr)
draw_text(warnstr)
do_plot = False
if len(x_data) == 0:
warnstr = '[df2] ! Warning: len(x_data) == 0. Cannot plot2'
warnings.warn(warnstr)
draw_text(warnstr)
do_plot = False
if do_plot:
ax.plot(x_data, y_data, marker, *args, **kwargs)
min_ = min(x_data.min(), y_data.min())
max_ = max(x_data.max(), y_data.max())
# Equal aspect ratio
ax.set_xlim(min_, max_)
ax.set_ylim(min_, max_)
ax.set_aspect('equal')
ax.set_xlabel(x_label, fontproperties=FONTS.xlabel)
ax.set_ylabel(y_label, fontproperties=FONTS.xlabel)
ax.set_title(title_pref + ' ' + x_label + ' vs ' + y_label,
fontproperties=FONTS.axtitle)
def adjust_subplots_xlabels():
adjust_subplots(left=.03, right=.97, bottom=.2, top=.9, hspace=.15)
def adjust_subplots_xylabels():
adjust_subplots(left=.03, right=1, bottom=.1, top=.9, hspace=.15)
def adjust_subplots_safe(left=.1, right=.9, bottom=.1, top=.9, wspace=.3, hspace=.5):
adjust_subplots(left, bottom, right, top, wspace, hspace)
def adjust_subplots(left=0.02, bottom=0.02,
right=0.98, top=0.90,
wspace=0.1, hspace=0.15):
'''
left = 0.125 # the left side of the subplots of the figure
right = 0.9 # the right side of the subplots of the figure
bottom = 0.1 # the bottom of the subplots of the figure
top = 0.9 # the top of the subplots of the figure
wspace = 0.2 # the amount of width reserved for blank space between subplots
hspace = 0.2
'''
#print('[df2] adjust_subplots(%r)' % locals())
plt.subplots_adjust(left, bottom, right, top, wspace, hspace)
#=======================
# TEXT FUNCTIONS
# TODO: I have too many of these. Need to consolidate
#=======================
def upperleft_text(txt):
txtargs = dict(horizontalalignment='left',
verticalalignment='top',
#fontsize='smaller',
#fontweight='ultralight',
backgroundcolor=(0, 0, 0, .5),
color=ORANGE)
ax_relative_text(.02, .02, txt, **txtargs)
def upperright_text(txt, offset=None):
txtargs = dict(horizontalalignment='right',
verticalalignment='top',
#fontsize='smaller',
#fontweight='ultralight',
backgroundcolor=(0, 0, 0, .5),
color=ORANGE,
offset=offset)
ax_relative_text(.98, .02, txt, **txtargs)
def lowerright_text(txt):
txtargs = dict(horizontalalignment='right',
verticalalignment='top',
#fontsize='smaller',
#fontweight='ultralight',
backgroundcolor=(0, 0, 0, .5),
color=ORANGE)
ax_relative_text(.98, .92, txt, **txtargs)
def absolute_lbl(x_, y_, txt, roffset=(-.02, -.02), **kwargs):
txtargs = dict(horizontalalignment='right',
verticalalignment='top',
backgroundcolor=(0, 0, 0, .5),
color=ORANGE,
**kwargs)
ax_absolute_text(x_, y_, txt, roffset=roffset, **txtargs)
def ax_relative_text(x, y, txt, ax=None, offset=None, **kwargs):
if ax is None:
ax = gca()
xy, width, height = _axis_xy_width_height(ax)
x_, y_ = ((xy[0]) + x * width, (xy[1] + height) - y * height)
if offset is not None:
xoff, yoff = offset
x_ += xoff
y_ += yoff
ax_absolute_text(x_, y_, txt, ax=ax, **kwargs)
def ax_absolute_text(x_, y_, txt, ax=None, roffset=None, **kwargs):
if ax is None:
ax = gca()
if 'fontproperties' in kwargs:
kwargs['fontproperties'] = FONTS.relative
if roffset is not None:
xroff, yroff = roffset
xy, width, height = _axis_xy_width_height(ax)
x_ += xroff * width
y_ += yroff * height
ax.text(x_, y_, txt, **kwargs)
def fig_relative_text(x, y, txt, **kwargs):
kwargs['horizontalalignment'] = 'center'
kwargs['verticalalignment'] = 'center'
fig = gcf()
#xy, width, height = _axis_xy_width_height(ax)
#x_, y_ = ((xy[0]+width)+x*width, (xy[1]+height)-y*height)
fig.text(x, y, txt, **kwargs)
def draw_text(text_str, rgb_textFG=(0, 0, 0), rgb_textBG=(1, 1, 1)):
ax = gca()
xy, width, height = _axis_xy_width_height(ax)
text_x = xy[0] + (width / 2)
text_y = xy[1] + (height / 2)
ax.text(text_x, text_y, text_str,
horizontalalignment='center',
verticalalignment='center',
color=rgb_textFG,
backgroundcolor=rgb_textBG)
def set_figtitle(figtitle, subtitle='', forcefignum=True, incanvas=True):
if figtitle is None:
figtitle = ''
fig = gcf()
if incanvas:
if subtitle != '':
subtitle = '\n' + subtitle
fig.suptitle(figtitle + subtitle, fontsize=14, fontweight='bold')
#fig.suptitle(figtitle, x=.5, y=.98, fontproperties=FONTS.figtitle)
#fig_relative_text(.5, .96, subtitle, fontproperties=FONTS.subtitle)
else:
fig.suptitle('')
window_figtitle = ('fig(%d) ' % fig.number) + figtitle
fig.canvas.set_window_title(window_figtitle)
def convert_keypress_event_mpl_to_qt4(mevent):
global TMP_mevent
TMP_mevent = mevent
# Grab the key from the mpl.KeyPressEvent
key = mevent.key
print('[df2] convert event mpl -> qt4')
print('[df2] key=%r' % key)
# dicts modified from backend_qt4.py
mpl2qtkey = {'control': Qt.Key_Control, 'shift': Qt.Key_Shift,
'alt': Qt.Key_Alt, 'super': Qt.Key_Meta,
'enter': Qt.Key_Return, 'left': Qt.Key_Left, 'up': Qt.Key_Up,
'right': Qt.Key_Right, 'down': Qt.Key_Down,
'escape': Qt.Key_Escape, 'f1': Qt.Key_F1, 'f2': Qt.Key_F2,
'f3': Qt.Key_F3, 'f4': Qt.Key_F4, 'f5': Qt.Key_F5,
'f6': Qt.Key_F6, 'f7': Qt.Key_F7, 'f8': Qt.Key_F8,
'f9': Qt.Key_F9, 'f10': Qt.Key_F10, 'f11': Qt.Key_F11,
'f12': Qt.Key_F12, 'home': Qt.Key_Home, 'end': Qt.Key_End,
'pageup': Qt.Key_PageUp, 'pagedown': Qt.Key_PageDown}
# Reverse the control and super (aka cmd/apple) keys on OSX
if sys.platform == 'darwin':
mpl2qtkey.update({'super': Qt.Key_Control, 'control': Qt.Key_Meta, })
# Try to reconstruct QtGui.KeyEvent
type_ = QtCore.QEvent.Type(QtCore.QEvent.KeyPress) # The type should always be KeyPress
text = ''
# Try to extract the original modifiers
modifiers = QtCore.Qt.NoModifier # initialize to no modifiers
if key.find(u'ctrl+') >= 0:
modifiers = modifiers | QtCore.Qt.ControlModifier
key = key.replace(u'ctrl+', u'')
print('[df2] has ctrl modifier')
text += 'Ctrl+'
if key.find(u'alt+') >= 0:
modifiers = modifiers | QtCore.Qt.AltModifier
key = key.replace(u'alt+', u'')
print('[df2] has alt modifier')
text += 'Alt+'
if key.find(u'super+') >= 0:
modifiers = modifiers | QtCore.Qt.MetaModifier
key = key.replace(u'super+', u'')
print('[df2] has super modifier')
text += 'Super+'
if key.isupper():
modifiers = modifiers | QtCore.Qt.ShiftModifier
print('[df2] has shift modifier')
text += 'Shift+'
# Try to extract the original key
try:
if key in mpl2qtkey:
key_ = mpl2qtkey[key]
else:
key_ = ord(key.upper()) # Qt works with uppercase keys
text += key.upper()
except Exception as ex:
print('[df2] ERROR key=%r' % key)
print('[df2] ERROR %r' % ex)
raise
autorep = False # default false
count = 1 # default 1
text = QtCore.QString(text) # The text is somewhat arbitrary
# Create the QEvent
print('----------------')
print('[df2] Create event')
print('[df2] type_ = %r' % type_)
print('[df2] text = %r' % text)
print('[df2] modifiers = %r' % modifiers)
print('[df2] autorep = %r' % autorep)
print('[df2] count = %r ' % count)
print('----------------')
qevent = QtGui.QKeyEvent(type_, key_, modifiers, text, autorep, count)
return qevent
def test_build_qkeyevent():
import draw_func2 as df2
qtwin = df2.QT4_WINS[0]
# This reconstructs an test mplevent
canvas = df2.figure(1).canvas
mevent = matplotlib.backend_bases.KeyEvent('key_press_event', canvas, u'ctrl+p', x=672, y=230.0)
qevent = df2.convert_keypress_event_mpl_to_qt4(mevent)
app = qtwin.backend.app
app.sendEvent(qtwin.ui, mevent)
#type_ = QtCore.QEvent.Type(QtCore.QEvent.KeyPress) # The type should always be KeyPress
#text = QtCore.QString('A') # The text is somewhat arbitrary
#modifiers = QtCore.Qt.NoModifier # initialize to no modifiers
#modifiers = modifiers | QtCore.Qt.ControlModifier
#modifiers = modifiers | QtCore.Qt.AltModifier
#key_ = ord('A') # Qt works with uppercase keys
#autorep = False # default false
#count = 1 # default 1
#qevent = QtGui.QKeyEvent(type_, key_, modifiers, text, autorep, count)
return qevent
# This actually doesn't matter
def on_key_press_event(event):
'redirects keypress events to main window'
global QT4_WINS
print('[df2] %r' % event)
print('[df2] %r' % str(event.__dict__))
for qtwin in QT4_WINS:
qevent = convert_keypress_event_mpl_to_qt4(event)
app = qtwin.backend.app
print('[df2] attempting to send qevent to qtwin')
app.sendEvent(qtwin, qevent)
# TODO: FINISH ME
#PyQt4.QtGui.QKeyEvent
#qtwin.keyPressEvent(event)
#fig.canvas.manager.window.keyPressEvent()
def customize_figure(fig, docla):
if not 'user_stat_list' in fig.__dict__.keys() or docla:
fig.user_stat_list = []
fig.user_notes = []
# We dont need to catch keypress events because you just need to set it as
# an application level shortcut
# Catch key press events
#key_event_cbid = fig.__dict__.get('key_event_cbid', None)
#if key_event_cbid is not None:
#fig.canvas.mpl_disconnect(key_event_cbid)
#fig.key_event_cbid = fig.canvas.mpl_connect('key_press_event', on_key_press_event)
fig.df2_closed = False
def gcf():
if plotWidget is not None:
#print('is plotwidget visible = %r' % plotWidget.isVisible())
fig = plotWidget.figure
return fig
return plt.gcf()
def gca():
if plotWidget is not None:
#print('is plotwidget visible = %r' % plotWidget.isVisible())
axes_list = plotWidget.figure.get_axes()
current = 0
ax = axes_list[current]
return ax
return plt.gca()
def cla():
return plt.cla()
def clf():
return plt.clf()
def get_fig(fnum=None):
printDBG('[df2] get_fig(fnum=%r)' % fnum)
fig_kwargs = dict(figsize=FIGSIZE, dpi=DPI)
if plotWidget is not None:
return gcf()
if fnum is None:
try:
fig = gcf()
except Exception as ex:
printDBG('[df2] get_fig(): ex=%r' % ex)
fig = plt.figure(**fig_kwargs)
fnum = fig.number
else:
try:
fig = plt.figure(fnum, **fig_kwargs)
except Exception as ex:
print(repr(ex))
warnings.warn(repr(ex))
fig = gcf()
return fig
def get_ax(fnum=None, pnum=None):
figure(fnum=fnum, pnum=pnum)
ax = gca()
return ax
def figure(fnum=None, docla=False, title=None, pnum=(1, 1, 1), figtitle=None,
doclf=False, **kwargs):
'''
fnum = fignum = figure number
pnum = plotnum = plot tuple
'''
#matplotlib.pyplot.xkcd()
fig = get_fig(fnum)
axes_list = fig.get_axes()
# Ensure my customized settings
customize_figure(fig, docla)
# Convert pnum to tuple format
if tools.is_int(pnum):
nr = pnum // 100
nc = pnum // 10 - (nr * 10)
px = pnum - (nr * 100) - (nc * 10)
pnum = (nr, nc, px)
if doclf: # a bit hacky. Need to rectify docla and doclf
fig.clf()
# Get the subplot
if docla or len(axes_list) == 0:
printDBG('[df2] *** NEW FIGURE %r.%r ***' % (fnum, pnum))
if not pnum is None:
#ax = plt.subplot(*pnum)
ax = fig.add_subplot(*pnum)
ax.cla()
else:
ax = gca()
else:
printDBG('[df2] *** OLD FIGURE %r.%r ***' % (fnum, pnum))
if not pnum is None:
ax = plt.subplot(*pnum) # fig.add_subplot fails here
#ax = fig.add_subplot(*pnum)
else:
ax = gca()
#ax = axes_list[0]
# Set the title
if not title is None:
ax = gca()
ax.set_title(title, fontproperties=FONTS.axtitle)
# Add title to figure
if figtitle is None and pnum == (1, 1, 1):
figtitle = title
if not figtitle is None:
set_figtitle(figtitle, incanvas=False)
return fig
def plot_pdf(data, draw_support=True, scale_to=None, label=None, color=0,
nYTicks=3):
fig = gcf()
ax = gca()
data = np.array(data)
if len(data) == 0:
warnstr = '[df2] ! Warning: len(data) = 0. Cannot visualize pdf'
warnings.warn(warnstr)
draw_text(warnstr)
return
bw_factor = .05
if isinstance(color, (int, float)):
colorx = color
line_color = plt.get_cmap('gist_rainbow')(colorx)
else:
line_color = color
# Estimate a pdf
data_pdf = estimate_pdf(data, bw_factor)
# Get probability of seen data
prob_x = data_pdf(data)
# Get probability of unseen data data
x_data = np.linspace(0, data.max(), 500)
y_data = data_pdf(x_data)
# Scale if requested
if not scale_to is None:
scale_factor = scale_to / y_data.max()
y_data *= scale_factor
prob_x *= scale_factor
#Plot the actual datas on near the bottom perterbed in Y
if draw_support:
pdfrange = prob_x.max() - prob_x.min()
perb = (np.random.randn(len(data))) * pdfrange / 30.
preb_y_data = np.abs([pdfrange / 50. for _ in data] + perb)
ax.plot(data, preb_y_data, 'o', color=line_color, figure=fig, alpha=.1)
# Plot the pdf (unseen data)
ax.plot(x_data, y_data, color=line_color, label=label)
if nYTicks is not None:
yticks = np.linspace(min(y_data), max(y_data), nYTicks)
ax.set_yticks(yticks)
def estimate_pdf(data, bw_factor):
try:
data_pdf = scipy.stats.gaussian_kde(data, bw_factor)
data_pdf.covariance_factor = bw_factor
except Exception as ex:
print('[df2] ! Exception while estimating kernel density')
print('[df2] data=%r' % (data,))
print('[df2] ex=%r' % (ex,))
raise
return data_pdf
def show_histogram(data, bins=None, **kwargs):
print('[df2] show_histogram()')
dmin = int(np.floor(data.min()))
dmax = int(np.ceil(data.max()))
if bins is None:
bins = dmax - dmin
fig = figure(**kwargs)
ax = gca()
ax.hist(data, bins=bins, range=(dmin, dmax))
#help(np.bincount)
fig.show()
def show_signature(sig, **kwargs):
fig = figure(**kwargs)
plt.plot(sig)
fig.show()
def plot_stems(x_data=None, y_data=None):
if y_data is not None and x_data is None:
x_data = np.arange(len(y_data))
pass
if len(x_data) != len(y_data):
print('[df2] WARNING plot_stems(): len(x_data)!=len(y_data)')
if len(x_data) == 0:
print('[df2] WARNING plot_stems(): len(x_data)=len(y_data)=0')
x_data_ = np.array(x_data)
y_data_ = np.array(y_data)
x_data_sort = x_data_[y_data_.argsort()[::-1]]
y_data_sort = y_data_[y_data_.argsort()[::-1]]
markerline, stemlines, baseline = pylab.stem(x_data_sort, y_data_sort, linefmt='-')
pylab.setp(markerline, 'markerfacecolor', 'b')
pylab.setp(baseline, 'linewidth', 0)
ax = gca()
ax.set_xlim(min(x_data) - 1, max(x_data) + 1)
ax.set_ylim(min(y_data) - 1, max(max(y_data), max(x_data)) + 1)
def plot_sift_signature(sift, title='', fnum=None, pnum=None):
figure(fnum=fnum, pnum=pnum)
ax = gca()
plot_bars(sift, 16)
ax.set_xlim(0, 128)
ax.set_ylim(0, 256)
space_xticks(9, 16)
space_yticks(5, 64)
ax.set_title(title)
dark_background(ax)
return ax
def dark_background(ax=None, doubleit=False):
if ax is None:
ax = gca()
xy, width, height = _axis_xy_width_height(ax)
if doubleit:
halfw = (doubleit) * (width / 2)
halfh = (doubleit) * (height / 2)
xy = (xy[0] - halfw, xy[1] - halfh)
width *= (doubleit + 1)
height *= (doubleit + 1)
rect = matplotlib.patches.Rectangle(xy, width, height, lw=0, zorder=0)
rect.set_clip_on(True)
rect.set_fill(True)
rect.set_color(BLACK * .9)
rect = ax.add_patch(rect)
def space_xticks(nTicks=9, spacing=16, ax=None):
if ax is None:
ax = gca()
ax.set_xticks(np.arange(nTicks) * spacing)
small_xticks(ax)
def space_yticks(nTicks=9, spacing=32, ax=None):
if ax is None:
ax = gca()
ax.set_yticks(np.arange(nTicks) * spacing)
small_yticks(ax)
def small_xticks(ax=None):
for tick in ax.xaxis.get_major_ticks():
tick.label.set_fontsize(8)
def small_yticks(ax=None):
for tick in ax.yaxis.get_major_ticks():
tick.label.set_fontsize(8)
def plot_bars(y_data, nColorSplits=1):
width = 1
nDims = len(y_data)
nGroup = nDims // nColorSplits
ori_colors = distinct_colors(nColorSplits)
x_data = np.arange(nDims)
ax = gca()
for ix in xrange(nColorSplits):
xs = np.arange(nGroup) + (nGroup * ix)
color = ori_colors[ix]
x_dat = x_data[xs]
y_dat = y_data[xs]
ax.bar(x_dat, y_dat, width, color=color, edgecolor=np.array(color) * .8)
def phantom_legend_label(label, color, loc='upper right'):
'adds a legend label without displaying an actor'
pass
#phantom_actor = plt.Circle((0, 0), 1, fc=color, prop=FONTS.legend, loc=loc)
#plt.legend(phant_actor, label, framealpha=.2)
#plt.legend(*zip(*legend_tups), framealpha=.2)
#legend_tups = []
#legend_tups.append((phantom_actor, label))
def legend(loc='upper right'):
ax = gca()
ax.legend(prop=FONTS.legend, loc=loc)
def plot_histpdf(data, label=None, draw_support=False, nbins=10):
freq, _ = plot_hist(data, nbins=nbins)
plot_pdf(data, draw_support=draw_support, scale_to=freq.max(), label=label)
def plot_hist(data, bins=None, nbins=10, weights=None):
if isinstance(data, list):
data = np.array(data)
if bins is None:
dmin = data.min()
dmax = data.max()
bins = dmax - dmin
ax = gca()
freq, bins_, patches = ax.hist(data, bins=nbins, weights=weights, range=(dmin, dmax))
return freq, bins_
def variation_trunctate(data):
ax = gca()
data = np.array(data)
if len(data) == 0:
warnstr = '[df2] ! Warning: len(data) = 0. Cannot variation_truncate'
warnings.warn(warnstr)
return
trunc_max = data.mean() + data.std() * 2
trunc_min = np.floor(data.min())
ax.set_xlim(trunc_min, trunc_max)
#trunc_xticks = np.linspace(0, int(trunc_max),11)
#trunc_xticks = trunc_xticks[trunc_xticks >= trunc_min]
#trunc_xticks = np.append([int(trunc_min)], trunc_xticks)
#no_zero_yticks = ax.get_yticks()[ax.get_yticks() > 0]
#ax.set_xticks(trunc_xticks)
#ax.set_yticks(no_zero_yticks)
#_----------------- HELPERS ^^^ ---------
# ---- IMAGE CREATION FUNCTIONS ----
@tools.debug_exception
def draw_sift(desc, kp=None):
# TODO: There might be a divide by zero warning in here.
''' desc = np.random.rand(128)
desc = desc / np.sqrt((desc**2).sum())
desc = np.round(desc * 255) '''
# This is draw, because it is an overlay
ax = gca()
tau = 2 * np.pi
DSCALE = .25
XYSCALE = .5
XYSHIFT = -.75
ORI_SHIFT = 0 # -tau #1/8 * tau
# SIFT CONSTANTS
NORIENTS = 8
NX = 4
NY = 4
NBINS = NX * NY
def cirlce_rad2xy(radians, mag):
return np.cos(radians) * mag, np.sin(radians) * mag
discrete_ori = (np.arange(0, NORIENTS) * (tau / NORIENTS) + ORI_SHIFT)
# Build list of plot positions
# Build an "arm" for each sift measurement
arm_mag = desc / 255.0
arm_ori = np.tile(discrete_ori, (NBINS, 1)).flatten()
# The offset x,y's for each sift measurment
arm_dxy = np.array(zip(*cirlce_rad2xy(arm_ori, arm_mag)))
yxt_gen = itertools.product(xrange(NY), xrange(NX), xrange(NORIENTS))
yx_gen = itertools.product(xrange(NY), xrange(NX))
# Transform the drawing of the SIFT descriptor to the its elliptical patch
axTrans = ax.transData
kpTrans = None
if kp is None:
kp = [0, 0, 1, 0, 1]
kp = np.array(kp)
kpT = kp.T
x, y, a, c, d = kpT[:, 0]
kpTrans = Affine2D([( a, 0, x),
( c, d, y),
( 0, 0, 1)])
axTrans = ax.transData
# Draw 8 directional arms in each of the 4x4 grid cells
arrow_patches = []
arrow_patches2 = []
for y, x, t in yxt_gen:
index = y * NX * NORIENTS + x * NORIENTS + t
(dx, dy) = arm_dxy[index]
arw_x = x * XYSCALE + XYSHIFT
arw_y = y * XYSCALE + XYSHIFT
arw_dy = dy * DSCALE * 1.5 # scale for viz Hack
arw_dx = dx * DSCALE * 1.5
#posA = (arw_x, arw_y)
#posB = (arw_x+arw_dx, arw_y+arw_dy)
_args = [arw_x, arw_y, arw_dx, arw_dy]
_kwargs = dict(head_width=.0001, transform=kpTrans, length_includes_head=False)
arrow_patches += [FancyArrow(*_args, **_kwargs)]
arrow_patches2 += [FancyArrow(*_args, **_kwargs)]
# Draw circles around each of the 4x4 grid cells
circle_patches = []
for y, x in yx_gen:
circ_xy = (x * XYSCALE + XYSHIFT, y * XYSCALE + XYSHIFT)
circ_radius = DSCALE
circle_patches += [Circle(circ_xy, circ_radius, transform=kpTrans)]
# Efficiently draw many patches with PatchCollections
circ_collection = PatchCollection(circle_patches)
circ_collection.set_facecolor('none')
circ_collection.set_transform(axTrans)
circ_collection.set_edgecolor(BLACK)
circ_collection.set_alpha(.5)
# Body of arrows
arw_collection = PatchCollection(arrow_patches)
arw_collection.set_transform(axTrans)
arw_collection.set_linewidth(.5)
arw_collection.set_color(RED)
arw_collection.set_alpha(1)
# Border of arrows
arw_collection2 = matplotlib.collections.PatchCollection(arrow_patches2)
arw_collection2.set_transform(axTrans)
arw_collection2.set_linewidth(1)
arw_collection2.set_color(BLACK)
arw_collection2.set_alpha(1)
# Add artists to axes
ax.add_collection(circ_collection)
ax.add_collection(arw_collection2)
ax.add_collection(arw_collection)
def feat_scores_to_color(fs, cmap_='hot'):
assert len(fs.shape) == 1, 'score must be 1d'
cmap = plt.get_cmap(cmap_)
mins = fs.min()
rnge = fs.max() - mins
if rnge == 0:
return [cmap(.5) for fx in xrange(len(fs))]
score2_01 = lambda score: .1 + .9 * (float(score) - mins) / (rnge)
colors = [cmap(score2_01(score)) for score in fs]
return colors
def colorbar(scalars, colors):
'adds a color bar next to the axes'
orientation = ['vertical', 'horizontal'][0]
TICK_FONTSIZE = 8
# Put colors and scalars in correct order
sorted_scalars = sorted(scalars)
sorted_colors = [x for (y, x) in sorted(zip(scalars, colors))]
# Make a listed colormap and mappable object
listed_cmap = mpl.colors.ListedColormap(sorted_colors)
sm = plt.cm.ScalarMappable(cmap=listed_cmap)
sm.set_array(sorted_scalars)
# Use mapable object to create the colorbar
cb = plt.colorbar(sm, orientation=orientation)
# Add the colorbar to the correct label
axis = cb.ax.xaxis if orientation == 'horizontal' else cb.ax.yaxis
position = 'bottom' if orientation == 'horizontal' else 'right'
axis.set_ticks_position(position)
axis.set_ticks([0, .5, 1])
cb.ax.tick_params(labelsize=TICK_FONTSIZE)
def draw_lines2(kpts1, kpts2, fm=None, fs=None, kpts2_offset=(0, 0),
color_list=None, **kwargs):
if not DISTINCT_COLORS:
color_list = None
# input data
if not SHOW_LINES:
return
if fm is None: # assume kpts are in director correspondence
assert kpts1.shape == kpts2.shape
if len(fm) == 0:
return
ax = gca()
woff, hoff = kpts2_offset
# Draw line collection
kpts1_m = kpts1[fm[:, 0]].T
kpts2_m = kpts2[fm[:, 1]].T
xxyy_iter = iter(zip(kpts1_m[0],
kpts2_m[0] + woff,
kpts1_m[1],
kpts2_m[1] + hoff))
if color_list is None:
if fs is None: # Draw with solid color
color_list = [ LINE_COLOR for fx in xrange(len(fm))]
else: # Draw with colors proportional to score difference
color_list = feat_scores_to_color(fs)
segments = [((x1, y1), (x2, y2)) for (x1, x2, y1, y2) in xxyy_iter]
linewidth = [LINE_WIDTH for fx in xrange(len(fm))]
line_alpha = LINE_ALPHA
if LINE_ALPHA_OVERRIDE is not None:
line_alpha = LINE_ALPHA_OVERRIDE
line_group = LineCollection(segments, linewidth, color_list, alpha=line_alpha)
#plt.colorbar(line_group, ax=ax)
ax.add_collection(line_group)
#figure(100)
#plt.hexbin(x,y, cmap=plt.cm.YlOrRd_r)
def draw_kpts(kpts, *args, **kwargs):
draw_kpts2(kpts, *args, **kwargs)
def draw_kpts2(kpts, offset=(0, 0), ell=SHOW_ELLS, pts=False, pts_color=ORANGE,
pts_size=POINT_SIZE, ell_alpha=ELL_ALPHA,
ell_linewidth=ELL_LINEWIDTH, ell_color=ELL_COLOR,
color_list=None, rect=None, arrow=False, **kwargs):
if not DISTINCT_COLORS:
color_list = None
printDBG('drawkpts2: Drawing Keypoints! ell=%r pts=%r' % (ell, pts))
# get matplotlib info
ax = gca()
pltTrans = ax.transData
ell_actors = []
# data
kpts = np.array(kpts)
kptsT = kpts.T
x = kptsT[0, :] + offset[0]
y = kptsT[1, :] + offset[1]
printDBG('[df2] draw_kpts()----------')
printDBG('[df2] draw_kpts() ell=%r pts=%r' % (ell, pts))
printDBG('[df2] draw_kpts() drawing kpts.shape=%r' % (kpts.shape,))
if rect is None:
rect = ell
rect = False
if pts is True:
rect = False
if ell or rect:
printDBG('[df2] draw_kpts() drawing ell kptsT.shape=%r' % (kptsT.shape,))
# We have the transformation from unit circle to ellipse here. (inv(A))
a = kptsT[2]
b = np.zeros(len(a))
c = kptsT[3]
d = kptsT[4]
kpts_iter = izip(x, y, a, b, c, d)
aff_list = [Affine2D([( a_, b_, x_),
( c_, d_, y_),
( 0, 0, 1)])
for (x_, y_, a_, b_, c_, d_) in kpts_iter]
patch_list = []
ell_actors = [Circle( (0, 0), 1, transform=aff) for aff in aff_list]
if ell:
patch_list += ell_actors
if rect:
rect_actors = [Rectangle( (-1, -1), 2, 2, transform=aff) for aff in aff_list]
patch_list += rect_actors
if arrow:
_kwargs = dict(head_width=.01, length_includes_head=False)
arrow_actors1 = [FancyArrow(0, 0, 0, 1, transform=aff, **_kwargs) for aff in aff_list]
arrow_actors2 = [FancyArrow(0, 0, 1, 0, transform=aff, **_kwargs) for aff in aff_list]
patch_list += arrow_actors1
patch_list += arrow_actors2
ellipse_collection = matplotlib.collections.PatchCollection(patch_list)
ellipse_collection.set_facecolor('none')
ellipse_collection.set_transform(pltTrans)
if ELL_ALPHA_OVERRIDE is not None:
ell_alpha = ELL_ALPHA_OVERRIDE
ellipse_collection.set_alpha(ell_alpha)
ellipse_collection.set_linewidth(ell_linewidth)
if not color_list is None:
ell_color = color_list
if ell_color == 'distinct':
ell_color = distinct_colors(len(kpts))
ellipse_collection.set_edgecolor(ell_color)
ax.add_collection(ellipse_collection)
if pts:
printDBG('[df2] draw_kpts() drawing pts x.shape=%r y.shape=%r' % (x.shape, y.shape))
if color_list is None:
color_list = [pts_color for _ in xrange(len(x))]
ax.autoscale(enable=False)
ax.scatter(x, y, c=color_list, s=2 * pts_size, marker='o', edgecolor='none')
#ax.autoscale(enable=False)
#ax.plot(x, y, linestyle='None', marker='o', markerfacecolor=pts_color, markersize=pts_size, markeredgewidth=0)
# ---- CHIP DISPLAY COMMANDS ----
def imshow(img, fnum=None, title=None, figtitle=None, pnum=None,
interpolation='nearest', **kwargs):
'other interpolations = nearest, bicubic, bilinear'
#printDBG('[df2] ----- IMSHOW ------ ')
#printDBG('[***df2.imshow] fnum=%r pnum=%r title=%r *** ' % (fnum, pnum, title))
#printDBG('[***df2.imshow] img.shape = %r ' % (img.shape,))
#printDBG('[***df2.imshow] img.stats = %r ' % (helpers.printable_mystats(img),))
fig = figure(fnum=fnum, pnum=pnum, title=title, figtitle=figtitle, **kwargs)
ax = gca()
if not DARKEN is None:
imgdtype = img.dtype
img = np.array(img, dtype=float) * DARKEN
img = np.array(img, dtype=imgdtype)
plt_imshow_kwargs = {
'interpolation': interpolation,
#'cmap': plt.get_cmap('gray'),
'vmin': 0,
'vmax': 255,
}
try:
if len(img.shape) == 3 and img.shape[2] == 3:
# img is in a color format
imgBGR = img
if imgBGR.dtype == np.float64:
if imgBGR.max() <= 1:
imgBGR = np.array(imgBGR, dtype=np.float32)
else:
imgBGR = np.array(imgBGR, dtype=np.uint8)
imgRGB = cv2.cvtColor(imgBGR, cv2.COLOR_BGR2RGB)
ax.imshow(imgRGB, **plt_imshow_kwargs)
elif len(img.shape) == 2:
# img is in grayscale
imgGRAY = img
ax.imshow(imgGRAY, cmap=plt.get_cmap('gray'), **plt_imshow_kwargs)
else:
raise Exception('unknown image format')
except TypeError as te:
print('[df2] imshow ERROR %r' % te)
raise
except Exception as ex:
print('[df2] img.dtype = %r' % (img.dtype,))
print('[df2] type(img) = %r' % (type(img),))
print('[df2] img.shape = %r' % (img.shape,))
print('[df2] imshow ERROR %r' % ex)
raise
#plt.set_cmap('gray')
ax.set_xticks([])
ax.set_yticks([])
#ax.set_autoscale(False)
#try:
#if pnum == 111:
#fig.tight_layout()
#except Exception as ex:
#print('[df2] !! Exception durring fig.tight_layout: '+repr(ex))
#raise
return fig, ax
def get_num_channels(img):
ndims = len(img.shape)
if ndims == 2:
nChannels = 1
elif ndims == 3 and img.shape[2] == 3:
nChannels = 3
elif ndims == 3 and img.shape[2] == 1:
nChannels = 1
else:
raise Exception('Cannot determine number of channels')
return nChannels
def stack_images(img1, img2, vert=None):
nChannels = get_num_channels(img1)
nChannels2 = get_num_channels(img2)
assert nChannels == nChannels2
(h1, w1) = img1.shape[0: 2] # get chip dimensions
(h2, w2) = img2.shape[0: 2]
woff, hoff = 0, 0
vert_wh = max(w1, w2), h1 + h2
horiz_wh = w1 + w2, max(h1, h2)
if vert is None:
# Display the orientation with the better (closer to 1) aspect ratio
vert_ar = max(vert_wh) / min(vert_wh)
horiz_ar = max(horiz_wh) / min(horiz_wh)
vert = vert_ar < horiz_ar
if vert:
wB, hB = vert_wh
hoff = h1
else:
wB, hB = horiz_wh
woff = w1
# concatentate images
if nChannels == 3:
imgB = np.zeros((hB, wB, 3), np.uint8)
imgB[0:h1, 0:w1, :] = img1
imgB[hoff:(hoff + h2), woff:(woff + w2), :] = img2
elif nChannels == 1:
imgB = np.zeros((hB, wB), np.uint8)
imgB[0:h1, 0:w1] = img1
imgB[hoff:(hoff + h2), woff:(woff + w2)] = img2
return imgB, woff, hoff
def show_chipmatch2(rchip1, rchip2, kpts1, kpts2, fm=None, fs=None, title=None,
vert=None, fnum=None, pnum=None, **kwargs):
'''Draws two chips and the feature matches between them. feature matches
kpts1 and kpts2 use the (x,y,a,c,d)
'''
printDBG('[df2] draw_matches2() fnum=%r, pnum=%r' % (fnum, pnum))
# get matching keypoints + offset
(h1, w1) = rchip1.shape[0:2] # get chip (h, w) dimensions
(h2, w2) = rchip2.shape[0:2]
# Stack the compared chips
match_img, woff, hoff = stack_images(rchip1, rchip2, vert)
xywh1 = (0, 0, w1, h1)
xywh2 = (woff, hoff, w2, h2)
# Show the stacked chips
fig, ax = imshow(match_img, title=title, fnum=fnum, pnum=pnum)
# Overlay feature match nnotations
draw_fmatch(xywh1, xywh2, kpts1, kpts2, fm, fs, **kwargs)
return ax, xywh1, xywh2
# draw feature match
def draw_fmatch(xywh1, xywh2, kpts1, kpts2, fm, fs=None, lbl1=None, lbl2=None,
fnum=None, pnum=None, rect=False, colorbar_=True, **kwargs):
'''Draws the matching features. This is draw because it is an overlay
xywh1 - location of rchip1 in the axes
xywh2 - location or rchip2 in the axes
'''
if fm is None:
assert kpts1.shape == kpts2.shape, 'shapes different or fm not none'
fm = np.tile(np.arange(0, len(kpts1)), (2, 1)).T
pts = kwargs.get('draw_pts', False)
ell = kwargs.get('draw_ell', True)
lines = kwargs.get('draw_lines', True)
ell_alpha = kwargs.get('ell_alpha', .4)
nMatch = len(fm)
#printDBG('[df2.draw_fnmatch] nMatch=%r' % nMatch)
x1, y1, w1, h1 = xywh1
x2, y2, w2, h2 = xywh2
offset2 = (x2, y2)
# Custom user label for chips 1 and 2
if lbl1 is not None:
absolute_lbl(x1 + w1, y1, lbl1)
if lbl2 is not None:
absolute_lbl(x2 + w2, y2, lbl2)
# Plot the number of matches
if kwargs.get('show_nMatches', False):
upperleft_text('#match=%d' % nMatch)
# Draw all keypoints in both chips as points
if kwargs.get('all_kpts', False):
all_args = dict(ell=False, pts=pts, pts_color=GREEN, pts_size=2,
ell_alpha=ell_alpha, rect=rect)
all_args.update(kwargs)
draw_kpts2(kpts1, **all_args)
draw_kpts2(kpts2, offset=offset2, **all_args)
# Draw Lines and Ellipses and Points oh my
if nMatch > 0:
colors = [kwargs['colors']] * nMatch if 'colors' in kwargs else distinct_colors(nMatch)
if fs is not None:
colors = feat_scores_to_color(fs, 'hot')
acols = add_alpha(colors)
# Helper functions
def _drawkpts(**_kwargs):
_kwargs.update(kwargs)
fxs1 = fm[:, 0]
fxs2 = fm[:, 1]
draw_kpts2(kpts1[fxs1], rect=rect, **_kwargs)
draw_kpts2(kpts2[fxs2], offset=offset2, rect=rect, **_kwargs)
def _drawlines(**_kwargs):
_kwargs.update(kwargs)
draw_lines2(kpts1, kpts2, fm, fs, kpts2_offset=offset2, **_kwargs)
# User helpers
if ell:
_drawkpts(pts=False, ell=True, color_list=colors)
if pts:
_drawkpts(pts_size=8, pts=True, ell=False, pts_color=BLACK)
_drawkpts(pts_size=6, pts=True, ell=False, color_list=acols)
if lines:
_drawlines(color_list=colors)
else:
draw_boxedX(xywh2)
if fs is not None and colorbar_ and 'colors' in vars() and colors is not None:
colorbar(fs, colors)
#legend()
return None
def draw_boxedX(xywh, color=RED, lw=2, alpha=.5, theta=0):
'draws a big red x. redx'
ax = gca()
x1, y1, w, h = xywh
x2, y2 = x1 + w, y1 + h
segments = [((x1, y1), (x2, y2)),
((x1, y2), (x2, y1))]
trans = Affine2D()
trans.rotate(theta)
trans = trans + ax.transData
width_list = [lw] * len(segments)
color_list = [color] * len(segments)
line_group = LineCollection(segments, width_list, color_list, alpha=alpha,
transOffset=trans)
ax.add_collection(line_group)
def disconnect_callback(fig, callback_type, **kwargs):
#print('[df2] disconnect %r callback' % callback_type)
axes = kwargs.get('axes', [])
for ax in axes:
ax._hs_viewtype = ''
cbid_type = callback_type + '_cbid'
cbfn_type = callback_type + '_func'
cbid = fig.__dict__.get(cbid_type, None)
cbfn = fig.__dict__.get(cbfn_type, None)
if cbid is not None:
fig.canvas.mpl_disconnect(cbid)
else:
cbfn = None
fig.__dict__[cbid_type] = None
return cbid, cbfn
def connect_callback(fig, callback_type, callback_fn):
#print('[df2] register %r callback' % callback_type)
if callback_fn is None:
return
cbid_type = callback_type + '_cbid'
cbfn_type = callback_type + '_func'
fig.__dict__[cbid_type] = fig.canvas.mpl_connect(callback_type, callback_fn)
fig.__dict__[cbfn_type] = callback_fn
| apache-2.0 | -692,918,566,996,145,400 | -7,302,982,867,466,568,000 | 31.697605 | 119 | 0.588353 | false |
crosswalk-project/chromium-crosswalk-efl | tools/telemetry/telemetry/core/heap/chrome_js_heap_snapshot_parser_unittest.py | 44 | 2529 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import unittest
from telemetry.core.heap import chrome_js_heap_snapshot_parser
class ChromeJsHeapSnapshotParserUnittest(unittest.TestCase):
def _HeapSnapshotData(self, node_types, edge_types, node_fields, edge_fields,
node_list, edge_list, strings):
"""Helper for creating heap snapshot data."""
return {'snapshot': {'meta': {'node_types': [node_types],
'edge_types': [edge_types],
'node_fields': node_fields,
'edge_fields': edge_fields}},
'nodes': node_list,
'edges': edge_list,
'strings': strings}
def testParseSimpleSnapshot(self):
# Create a snapshot containing 2 nodes and an edge between them.
node_types = ['object']
edge_types = ['property']
node_fields = ['type', 'name', 'id', 'edge_count']
edge_fields = ['type', 'name_or_index', 'to_node']
node_list = [0, 0, 0, 1,
0, 1, 1, 0]
edge_list = [0, 2, 4]
strings = ['node1', 'node2', 'edge1']
heap = self._HeapSnapshotData(node_types, edge_types, node_fields,
edge_fields, node_list, edge_list, strings)
objects = list(chrome_js_heap_snapshot_parser.ChromeJsHeapSnapshotParser(
json.dumps(heap)).GetAllLiveHeapObjects())
self.assertEqual(2, len(objects))
if objects[0].edges_from:
from_ix = 0
to_ix = 1
else:
from_ix = 1
to_ix = 0
self.assertEqual('node1', objects[from_ix].class_name)
self.assertEqual('node2', objects[to_ix].class_name)
self.assertEqual(1, len(objects[from_ix].edges_from))
self.assertEqual(0, len(objects[from_ix].edges_to))
self.assertEqual(0, len(objects[to_ix].edges_from))
self.assertEqual(1, len(objects[to_ix].edges_to))
self.assertEqual('node1',
objects[from_ix].edges_from[0].from_object.class_name)
self.assertEqual('node2',
objects[from_ix].edges_from[0].to_object.class_name)
self.assertEqual('edge1', objects[from_ix].edges_from[0].name_string)
self.assertEqual('node1', objects[to_ix].edges_to[0].from_object.class_name)
self.assertEqual('node2', objects[to_ix].edges_to[0].to_object.class_name)
self.assertEqual('edge1', objects[to_ix].edges_to[0].name_string)
| bsd-3-clause | 8,289,587,493,729,144,000 | -1,248,409,917,029,650,400 | 43.368421 | 80 | 0.61724 | false |
rjschwei/azure-sdk-for-python | azure-batch/azure/batch/models/job_list_preparation_and_release_task_status_options.py | 3 | 2209 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class JobListPreparationAndReleaseTaskStatusOptions(Model):
"""Additional parameters for the Job_list_preparation_and_release_task_status
operation.
:param filter: An OData $filter clause.
:type filter: str
:param select: An OData $select clause.
:type select: str
:param max_results: The maximum number of items to return in the response.
A maximum of 1000 tasks can be returned. Default value: 1000 .
:type max_results: int
:param timeout: The maximum time that the server can spend processing the
request, in seconds. The default is 30 seconds. Default value: 30 .
:type timeout: int
:param client_request_id: The caller-generated request identity, in the
form of a GUID with no decoration such as curly braces, e.g.
9C4D50EE-2D56-4CD3-8152-34347DC9F2B0.
:type client_request_id: str
:param return_client_request_id: Whether the server should return the
client-request-id in the response. Default value: False .
:type return_client_request_id: bool
:param ocp_date: The time the request was issued. Client libraries
typically set this to the current system clock time; set it explicitly if
you are calling the REST API directly.
:type ocp_date: datetime
"""
def __init__(self, filter=None, select=None, max_results=1000, timeout=30, client_request_id=None, return_client_request_id=False, ocp_date=None):
self.filter = filter
self.select = select
self.max_results = max_results
self.timeout = timeout
self.client_request_id = client_request_id
self.return_client_request_id = return_client_request_id
self.ocp_date = ocp_date
| mit | 5,653,453,916,959,763,000 | 1,532,673,028,223,376,400 | 44.081633 | 150 | 0.668628 | false |
foreverfaint/scrapy | scrapy/tests/test_utils_reqser.py | 30 | 2683 | import unittest
from scrapy.http import Request
from scrapy.spider import Spider
from scrapy.utils.reqser import request_to_dict, request_from_dict
class RequestSerializationTest(unittest.TestCase):
def setUp(self):
self.spider = TestSpider()
def test_basic(self):
r = Request("http://www.example.com")
self._assert_serializes_ok(r)
def test_all_attributes(self):
r = Request("http://www.example.com",
callback='parse_item',
errback='handle_error',
method="POST",
body="some body",
headers={'content-encoding': 'text/html; charset=latin-1'},
cookies={'currency': 'usd'},
encoding='latin-1',
priority=20,
meta={'a': 'b'})
self._assert_serializes_ok(r)
def test_latin1_body(self):
r = Request("http://www.example.com", body="\xa3")
self._assert_serializes_ok(r)
def test_utf8_body(self):
r = Request("http://www.example.com", body="\xc2\xa3")
self._assert_serializes_ok(r)
def _assert_serializes_ok(self, request, spider=None):
d = request_to_dict(request, spider=spider)
request2 = request_from_dict(d, spider=spider)
self._assert_same_request(request, request2)
def _assert_same_request(self, r1, r2):
self.assertEqual(r1.url, r2.url)
self.assertEqual(r1.callback, r2.callback)
self.assertEqual(r1.errback, r2.errback)
self.assertEqual(r1.method, r2.method)
self.assertEqual(r1.body, r2.body)
self.assertEqual(r1.headers, r2.headers)
self.assertEqual(r1.cookies, r2.cookies)
self.assertEqual(r1.meta, r2.meta)
self.assertEqual(r1._encoding, r2._encoding)
self.assertEqual(r1.priority, r2.priority)
self.assertEqual(r1.dont_filter, r2.dont_filter)
def test_callback_serialization(self):
r = Request("http://www.example.com", callback=self.spider.parse_item, \
errback=self.spider.handle_error)
self._assert_serializes_ok(r, spider=self.spider)
def test_unserializable_callback1(self):
r = Request("http://www.example.com", callback=lambda x: x)
self.assertRaises(ValueError, request_to_dict, r)
self.assertRaises(ValueError, request_to_dict, r, spider=self.spider)
def test_unserializable_callback2(self):
r = Request("http://www.example.com", callback=self.spider.parse_item)
self.assertRaises(ValueError, request_to_dict, r)
class TestSpider(Spider):
name = 'test'
def parse_item(self, response):
pass
def handle_error(self, failure):
pass
| bsd-3-clause | -2,514,575,299,390,609,000 | -2,438,861,221,831,105,500 | 34.773333 | 80 | 0.633619 | false |
silas/rock | rock/text.py | 1 | 1235 | from __future__ import unicode_literals
def _(text):
return text.strip('\n')
USAGE = _("""
Usage: rock [--help] [--env=ENV] [--path=PATH] [--runtime=RUNTIME] command
""")
HELP = _("""
--help show help message
--verbose show script while running
--dry-run show script without running
--version show version
project:
--env=ENV set env
--path=PATH set path
--runtime=RUNTIME set runtime
commands:
build run build
test run tests
run run in environment
clean clean project files
other commands:
config show project configuration
env show evaluable environment variables
init generates project skeleton
runtime show installed runtimes
""")
CONFIG_USAGE = _("""
Usage: rock config [--format=FORMAT]
""")
CONFIG_HELP = _("""
--help show help message
--format set output format (json, yaml)
""")
ENV_USAGE = _("""
Usage: rock env
""")
ENV_HELP = _("""
--help show help message
""")
RUNTIME_USAGE = _("""
Usage: rock runtime
""")
RUNTIME_HELP = _("""
--help show help message
""")
| mit | -1,261,390,607,055,214,600 | 5,819,744,315,217,400,000 | 20.293103 | 74 | 0.545749 | false |
xianian/qt-creator | tests/system/suite_general/tst_session_handling/test.py | 3 | 8199 | #############################################################################
##
## Copyright (C) 2015 The Qt Company Ltd.
## Contact: http://www.qt.io/licensing
##
## This file is part of Qt Creator.
##
## Commercial License Usage
## Licensees holding valid commercial Qt licenses may use this file in
## accordance with the commercial license agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and The Qt Company. For licensing terms and
## conditions see http://www.qt.io/terms-conditions. For further information
## use the contact form at http://www.qt.io/contact-us.
##
## GNU Lesser General Public License Usage
## Alternatively, this file may be used under the terms of the GNU Lesser
## General Public License version 2.1 or version 3 as published by the Free
## Software Foundation and appearing in the file LICENSE.LGPLv21 and
## LICENSE.LGPLv3 included in the packaging of this file. Please review the
## following information to ensure the GNU Lesser General Public License
## requirements will be met: https://www.gnu.org/licenses/lgpl.html and
## http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
##
## In addition, as a special exception, The Qt Company gives you certain additional
## rights. These rights are described in The Qt Company LGPL Exception
## version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
##
#############################################################################
source("../../shared/qtcreator.py")
def main():
projects = prepareTestExamples()
if not projects:
return
sessionName = "SampleSession"
startApplication("qtcreator" + SettingsPath)
if not startedWithoutPluginError():
return
createAndSwitchToSession(sessionName)
mainWindow = waitForObject(":Qt Creator_Core::Internal::MainWindow")
test.verify(waitFor("sessionName in str(mainWindow.windowTitle)", 2000),
"Verifying window title contains created session name.")
if canTestEmbeddedQtQuick():
checkWelcomePage(sessionName, True)
for project in projects:
openQmakeProject(project, Targets.DESKTOP_480_DEFAULT)
progressBarWait(20000)
checkNavigator(68, "Verifying whether all projects have been opened.")
openDocument("propertyanimation.QML.qml.color-animation\\.qml")
openDocument("declarative-music-browser.Headers.utility\\.h")
checkOpenDocuments(2, "Verifying whether 2 files are open.")
originalText = str(waitForObject(":Qt Creator_CppEditor::Internal::CPPEditorWidget").plainText)
switchSession("default")
test.verify(waitFor("'Qt Creator' == str(mainWindow.windowTitle)", 2000),
"Verifying window title is set to default.")
if canTestEmbeddedQtQuick():
checkWelcomePage(sessionName, False)
switchViewTo(ViewConstants.EDIT)
checkNavigator(1, "Verifying that no more project is opened.")
checkOpenDocuments(0, "Verifying whether all files have been closed.")
switchSession(sessionName)
test.verify(waitFor("sessionName in str(mainWindow.windowTitle)", 2000),
"Verifying window title contains created session name.")
checkNavigator(68, "Verifying whether all projects have been re-opened.")
checkOpenDocuments(2, "Verifying whether 2 files have been re-opened.")
if test.verify("utility.h" in str(mainWindow.windowTitle),
"Verifying whether utility.h has been opened."):
current = str(waitForObject(":Qt Creator_CppEditor::Internal::CPPEditorWidget").plainText)
test.verify(originalText == current, "Verifying that same file has been opened.")
checkForSessionFile(sessionName, projects)
invokeMenuItem("File", "Exit")
def prepareTestExamples():
examples = [os.path.join(sdkPath, "Examples", "4.7", "declarative", "animation", "basics",
"property-animation", "propertyanimation.pro"),
os.path.join(sdkPath, "Examples", "QtMobility", "declarative-music-browser",
"declarative-music-browser.pro")
]
projects = []
for sourceExample in examples:
if not neededFilePresent(sourceExample):
return None
# copy example projects to temp directory
for sourceExample in examples:
templateDir = prepareTemplate(os.path.dirname(sourceExample))
projects.append(os.path.join(templateDir, os.path.basename(sourceExample)))
return projects
def switchSession(toSession):
test.log("Switching to session '%s'" % toSession)
invokeMenuItem("File", "Session Manager...")
clickItem(waitForObject("{name='sessionList' type='QListWidget' visible='1' "
"window=':Session Manager_ProjectExplorer::Internal::SessionDialog'}"),
toSession, 5, 5, 0, Qt.LeftButton)
clickButton(waitForObject("{name='btSwitch' text='Switch to' type='QPushButton' visible='1' "
"window=':Session Manager_ProjectExplorer::Internal::SessionDialog'}"))
def createAndSwitchToSession(toSession):
sessionInputDialog = ("{type='ProjectExplorer::Internal::SessionNameInputDialog' unnamed='1' "
"visible='1' windowTitle='New Session Name'}")
test.log("Switching to session '%s' after creating it." % toSession)
invokeMenuItem("File", "Session Manager...")
clickButton(waitForObject("{name='btCreateNew' text='New' type='QPushButton' visible='1' "
"window=':Session Manager_ProjectExplorer::Internal::SessionDialog'}"))
lineEdit = waitForObject("{type='QLineEdit' unnamed='1' visible='1' window=%s}"
% sessionInputDialog)
replaceEditorContent(lineEdit, toSession)
clickButton(waitForObject("{text='Switch To' type='QPushButton' unnamed='1' visible='1' "
"window=%s}" % sessionInputDialog))
def checkWelcomePage(sessionName, isCurrent=False):
if isQt54Build:
welcomePage = ":WelcomePageStyledBar.WelcomePage_QQuickView"
else:
welcomePage = ":Qt Creator.WelcomePage_QQuickWidget"
switchViewTo(ViewConstants.WELCOME)
mouseClick(waitForObject("{container='%s' text='Projects' type='Button' "
"unnamed='1' visible='true'}" % welcomePage))
waitForObject("{container='%s' id='sessionsTitle' text='Sessions' type='Text' "
"unnamed='1' visible='true'}" % welcomePage)
if isCurrent:
sessions = ["default", "%s (current session)" % sessionName]
else:
sessions = ["default (current session)", sessionName]
for sessionName in sessions:
test.verify(object.exists("{container='%s' enabled='true' type='LinkedText' unnamed='1' "
"visible='true' text='%s'}" % (welcomePage, sessionName)),
"Verifying session '%s' exists." % sessionName)
def checkNavigator(expectedRows, message):
navigatorModel = waitForObject(":Qt Creator_Utils::NavigationTreeView").model()
test.compare(expectedRows, len(__iterateChildren__(navigatorModel, QModelIndex())), message)
def checkOpenDocuments(expectedRows, message):
selectFromCombo(":Qt Creator_Core::Internal::NavComboBox", "Open Documents")
openDocsWidget = waitForObject(":OpenDocuments_Widget")
test.compare(openDocsWidget.model().rowCount(), expectedRows, message)
def checkForSessionFile(sessionName, proFiles):
global tmpSettingsDir
sessionFile = os.path.join(tmpSettingsDir, "QtProject", "qtcreator", "%s.qws" % sessionName)
if test.verify(os.path.exists(sessionFile),
"Verifying whether session file '%s' has been created." % sessionFile):
content = readFile(sessionFile)
for proFile in proFiles:
if platform.system() in ('Microsoft', 'Windows'):
proFile = proFile.replace('\\', '/')
test.verify(proFile in content, "Verifying whether expected .pro file (%s) is listed "
"inside session file." % proFile)
def init():
removeQmlDebugFolderIfExists()
| lgpl-2.1 | -5,945,833,392,185,512,000 | -8,671,621,522,967,356,000 | 52.24026 | 101 | 0.66752 | false |
setsid/yacron | yacron/time.py | 1 | 5052 | """
This file is part of yacron.
Copyright (C) 2016 Vadim Kuznetsov <vimusov@gmail.com>
yacron is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
yacron is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with yacron. If not, see <http://www.gnu.org/licenses/>.
"""
class CronTime(object):
"""
Parse and store scheduled time.
"""
def __init__(self, minutes, hours, weekdays):
"""
Parse and store the minutes, hours and weekdays values.
:param minutes: Minutes (str)
:param hours: Hours (str)
:param weekdays: Weekdays (str)
:raise ValueError if any of the values is invalid
"""
self._minutes = self._parse_value(0, minutes, 59)
self._hours = self._parse_value(0, hours, 23)
# slashes are unacceptable in weekdays value
self._weekdays = self._parse_value(1, weekdays, 7, slash_acceptable=False)
@property
def minutes(self):
return self._minutes
@property
def hours(self):
return self._hours
@property
def weekdays(self):
return self._weekdays
def _check_value_range(self, min_value, value, max_value):
"""
Check is value in range.
:param min_value: Minimal valid value
:param value: Value
:param max_value: Maximum valid value
:return True if the value is in range
:raise ValueError if the value is out of range
"""
if not (min_value <= value <= max_value):
raise ValueError("invalid value '{0:d}', must be in [{1:d}..{2:d}]".format(value, min_value, max_value))
return True
def _check_special_chars(self, value):
"""
Check special characters in the value:
1) value can not contains more than one '*' or '/' or '-' characters;
2) special characters can not be mixed (there can be the only one except ',');
:param value: Value.
:raise ValueError if any invalid sequence of special characters found in the value.
"""
all_count = value.count('*')
slash_count = value.count('/')
comma_count = value.count(',')
hyphen_count = value.count('-')
is_invalid = any((
all_count > 1,
slash_count > 1,
hyphen_count > 1,
all_count and (slash_count or comma_count or hyphen_count),
slash_count and (all_count or comma_count or hyphen_count),
comma_count and (all_count or slash_count or hyphen_count),
hyphen_count and (all_count or slash_count or comma_count),
))
if is_invalid:
raise ValueError("invalid format in value '{0:s}'".format(value))
def _parse_value(self, min_value, value, max_value, slash_acceptable=True):
"""
Parse and check a value.
:param min_value: Minimal valid value
:param value: Value
:param max_value: Maximum valid value
:param slash_acceptable: Slash is valid in the value
:return: List of values.
:raise ValueError if parsing failed
"""
self._check_special_chars(value)
if value == '*':
return list(range(min_value, max_value + 1))
if value.startswith('/'):
if not slash_acceptable:
raise ValueError("value '{0:s}' can not contains slash".format(value))
divisor = int(value[1:])
self._check_value_range(min_value, divisor, max_value)
return [n for n in range(min_value, max_value + 1) if n % divisor == 0]
if '-' in value:
start_value, stop_value = map(int, value.split('-'))
self._check_value_range(min_value, start_value, max_value)
self._check_value_range(min_value, stop_value, max_value)
if start_value >= stop_value:
raise ValueError("start value can not be greater or equal to stop value")
return list(range(start_value, stop_value + 1))
if ',' in value:
return [n for n in map(int, value.split(',')) if self._check_value_range(min_value, n, max_value)]
return [int(value)]
def check_time(self, cur_time):
"""
Compare parsed time and current time.
:param cur_time: Current time (datetime).
:return: True if current time matches with parser time and False otherwise
"""
return all((
cur_time.minute in self._minutes,
cur_time.hour in self._hours,
cur_time.isoweekday() in self._weekdays,
))
| gpl-3.0 | -2,654,086,008,276,996,600 | 2,507,152,671,185,148,400 | 35.345324 | 116 | 0.598773 | false |
diydrones/visual-followme | src/drone_script.py | 4 | 1146 | import cv2
import time
import os.path
import os
from file_utils import Logger
from polyphemus import process_stream
mustarm = False
def get_vehicle():
api = local_connect() # @UndefinedVariable
v = api.get_vehicles()[0]
return v
def wait_for_arm(v):
print "Waiting for arming"
while not v.armed:
time.sleep(0.001)
print "ARMED"
def open_camera():
# yuck - opencv has no way to count # of cameras, so do this hack of looking for /dev/video*
numCameras = len(filter(lambda s: s.startswith("video"), os.listdir("/dev")))
c = cv2.VideoCapture()
# We start our search with higher numbered (likely external) cameras
for cnum in range(0, numCameras):
c.open(numCameras - cnum - 1)
if c.isOpened():
return c
raise Exception('No cameras found')
print "DroneScript - Visual-Follow Running"
v = get_vehicle()
while True:
if mustarm:
wait_for_arm(v)
video_in = open_camera()
homedir = os.path.expanduser("~")
logger = Logger(path= homedir + "/Videos/")
process_stream(video_in, logger, vehicle=v, require_arming=mustarm)
| gpl-3.0 | 1,017,225,917,181,705,500 | 6,400,484,405,408,681,000 | 23.382979 | 96 | 0.647469 | false |
daymer/xWIKI_Karma | Migration_to_xWiki/migration_sample.py | 1 | 1532 | from PythonConfluenceAPI import ConfluenceAPI
import Configuration
import CustomModules.SQL_Connector
from Configuration import MySQLConfig, MediaWIKIConfig
from Migration_to_xWiki.Users_association import Users
from CustomModules import Mechanics
from CustomModules.Mechanics import XWikiClient, MysqlConnector, MigrationAssistant
target_pool = 'Migration pool'
parent = 'Migration pool'
MySQLconfig_INSTANCE = MySQLConfig()
MysqlConnector_INSTANCE = MysqlConnector(MySQLconfig_INSTANCE)
SQLConfig = Configuration.SQLConfig()
xWikiConfig = Configuration.XWikiConfig(target_pool)
xWikiClient = XWikiClient(xWikiConfig.api_root, xWikiConfig.auth_user, xWikiConfig.auth_pass)
ConfluenceConfig_instance = Configuration.ConfluenceConfig()
confluenceAPI_instance = ConfluenceAPI(username=ConfluenceConfig_instance.USER, password=ConfluenceConfig_instance.PASS, uri_base=ConfluenceConfig_instance.ULR)
MediaWIKIConfig = MediaWIKIConfig()
Migrator = MigrationAssistant(ConfluenceConfig=ConfluenceConfig_instance, MediaWIKIConfig=MediaWIKIConfig, xWikiConfig=xWikiConfig)
UserList = Users()
SQLConnector_instance = CustomModules.SQL_Connector.SQLConnector(SQLConfig)
title = 'Hyper-V Basics'
platform = 'Confluence'
result = Mechanics.migrate_page(title, platform, target_pool, parent, MySQLconfig_INSTANCE,
MysqlConnector_INSTANCE, SQLConfig, SQLConnector_instance, ConfluenceConfig_instance,
MediaWIKIConfig, xWikiConfig, xWikiClient, Migrator, UserList)
print(result)
| apache-2.0 | 9,187,097,446,572,046,000 | 2,900,198,176,845,318,000 | 48.419355 | 160 | 0.81201 | false |
moondrop-entertainment/django-nonrel-drawp | tests/regressiontests/comment_tests/tests/app_api_tests.py | 55 | 2592 | from django.conf import settings
from django.contrib import comments
from django.contrib.comments.models import Comment
from django.contrib.comments.forms import CommentForm
from regressiontests.comment_tests.tests import CommentTestCase
class CommentAppAPITests(CommentTestCase):
"""Tests for the "comment app" API"""
def testGetCommentApp(self):
self.assertEqual(comments.get_comment_app(), comments)
def testGetForm(self):
self.assertEqual(comments.get_form(), CommentForm)
def testGetFormTarget(self):
self.assertEqual(comments.get_form_target(), "/post/")
def testGetFlagURL(self):
c = Comment(id=12345)
self.assertEqual(comments.get_flag_url(c), "/flag/12345/")
def getGetDeleteURL(self):
c = Comment(id=12345)
self.assertEqual(comments.get_delete_url(c), "/delete/12345/")
def getGetApproveURL(self):
c = Comment(id=12345)
self.assertEqual(comments.get_approve_url(c), "/approve/12345/")
class CustomCommentTest(CommentTestCase):
urls = 'regressiontests.comment_tests.urls'
def setUp(self):
self.old_comments_app = getattr(settings, 'COMMENTS_APP', None)
settings.COMMENTS_APP = 'regressiontests.comment_tests.custom_comments'
settings.INSTALLED_APPS = list(settings.INSTALLED_APPS) + [settings.COMMENTS_APP,]
def tearDown(self):
del settings.INSTALLED_APPS[-1]
settings.COMMENTS_APP = self.old_comments_app
if settings.COMMENTS_APP is None:
del settings._wrapped.COMMENTS_APP
def testGetCommentApp(self):
from regressiontests.comment_tests import custom_comments
self.assertEqual(comments.get_comment_app(), custom_comments)
def testGetModel(self):
from regressiontests.comment_tests.custom_comments.models import CustomComment
self.assertEqual(comments.get_model(), CustomComment)
def testGetForm(self):
from regressiontests.comment_tests.custom_comments.forms import CustomCommentForm
self.assertEqual(comments.get_form(), CustomCommentForm)
def testGetFormTarget(self):
self.assertEqual(comments.get_form_target(), "/post/")
def testGetFlagURL(self):
c = Comment(id=12345)
self.assertEqual(comments.get_flag_url(c), "/flag/12345/")
def getGetDeleteURL(self):
c = Comment(id=12345)
self.assertEqual(comments.get_delete_url(c), "/delete/12345/")
def getGetApproveURL(self):
c = Comment(id=12345)
self.assertEqual(comments.get_approve_url(c), "/approve/12345/")
| bsd-3-clause | 2,796,358,614,272,438,300 | -7,074,608,015,925,369,000 | 35.507042 | 90 | 0.698302 | false |
UCSC-iGEM-2016/taris_controller | taris_controller/taris_sensor.py | 1 | 9944 | #!/usr/bin/python
from __future__ import print_function
import io # used to create file streams
import fcntl # used to access I2C parameters like addresses
import sys
import time # used for sleep delay and timestamps
class Taris_Sensor():
''' This object holds all required interface data for the Atlas Scientific \
EZO pH and RTD sensors. Built off of the base library, with new functions \
added for calibration and additional testing. '''
def __init__(self, address, bus):
# open two file streams, one for reading and one for writing
# the specific I2C channel is selected with bus
# it is usually 1, except for older revisions where it's 0
# wb and rb indicate binary read and write
self.file_read = io.open("/dev/i2c-"+str(bus), "rb", buffering=0)
self.file_write = io.open("/dev/i2c-"+str(bus), "wb", buffering=0)
# initializes I2C to either a user specified or default address
self.set_i2c_address(address)
self.cal_timeout = 1.6 # timeout for calibrations
self.read_timeout = 1.0 # timeout for reads
self.short_timeout = 0.3 # timeout for regular commands
# Set if testing board
self.DEBUG = True
def set_i2c_address(self, addr):
'''Set the I2C communications to the slave specified by the address. \
The commands for I2C dev using the ioctl functions are specified in \
the i2c-dev.h file from i2c-tools'''
I2C_SLAVE = 0x703
fcntl.ioctl(self.file_read, I2C_SLAVE, addr)
fcntl.ioctl(self.file_write, I2C_SLAVE, addr)
def write(self, cmd):
'''Writes a command to the sensor.'''
# appends the null character and sends the string over I2C
cmd += "\00"
self.file_write.write(cmd)
def read(self, num_of_bytes=31,startbit=1):
'''Reads data from the sensor and parses the incoming response.'''
# reads a specified number of bytes from I2C, then parses and displays the result
res = self.file_read.read(num_of_bytes) # read from the board
response = filter(lambda x: x != '\x00', res) # remove the null characters to get the response
if ord(response[0]) == 1: # if the response isn't an error
# change MSB to 0 for all received characters except the first and get a list of characters
char_list = map(lambda x: chr(ord(x) & ~0x80), list(response[startbit:]))
# NOTE: having to change the MSB to 0 is a glitch in the raspberry pi, and you shouldn't have to do this!
return ''.join(char_list) # convert the char list to a string and returns it
else:
return "Error " + str(ord(response[0]))
def query(self, string, start=1):
'''For commands that require a write, a wait, and a response. For instance, \
calibration requires writing an initial CAL command, waiting 300ms, \
then checking for a pass/fail indicator message.'''
# write a command to the board, wait the correct timeout, and read the response
self.write(string)
# the read and calibration commands require a longer timeout
if string.upper().startswith("R"):
time.sleep(self.read_timeout)
elif string.upper().startswith("CAL"):
time.sleep(self.cal_timeout)
else:
time.sleep(self.short_timeout)
return self.read(startbit=start)
def verify(self):
'''Verifies that the sensor is connected, also returns firmware version.'''
device_ID = self.query("I")
if device_ID.startswith("?I"):
print("Connected sensor: " + str(device_ID)[3:])
else:
raw_input("EZO not connected: " + device_ID)
def close(self):
'''Closes the sensor's filestream, not usually required.'''
self.file_read.close()
self.file_write.close()
def getData(self):
'''Gets data from sensor reading as a float.'''
data = self.query("R")
return float(data)
def cal_wait(self, cal_time):
'''UI for waiting for pH sensor to stabilize during calibration'''
x=1
if self.DEBUG == True:
cal_time = 4
while x<cal_time:
if x==1:
sys.stdout.write("Please wait for sensor to stabilize:")
else:
sys.stdout.write(".")
sys.stdout.flush()
time.sleep(1)
x+=1
print('\n')
def pH_calibrateSensor(self):
'''Performs pH sensor calibration using included buffers.'''
# Clear previous calibration data
print("Starting pH sensor calibration...")
q = self.query("Cal,clear", 0)
if str(ord(q)) != '1':
print("Calibration failed with response " + str(q))
time.sleep(2)
return False
# Midpoint calibration. This will also reset previous data.
raw_input("Please rinse probe. Press [Enter] when pH 7 buffer is loaded.")
self.cal_wait(60)
mid_pH = "7.00"
q = self.query("CAL,MID," + mid_pH, 0)
if str(ord(q)) != '1':
print("Calibration failed with response " + str(q))
time.sleep(2)
return False
# Lowpoint calibration
raw_input("Please rinse probe. Press [Enter] when pH 4 buffer is loaded.")
self.cal_wait(60)
low_pH = "4.00"
q = self.query("CAL,LOW," + low_pH, 0)
if str(ord(q)) != '1':
print("Calibration failed with response " + str(q))
time.sleep(2)
return False
# Highpoint calibration
raw_input("Please rinse probe. Press [Enter] when pH 10 buffer is loaded.")
self.cal_wait(60)
high_pH = "10.00"
q = self.query("CAL,HIGH," + high_pH, 0)
if str(ord(q)) != '1':
print("Calibration failed with response " + str(q))
time.sleep(2)
return False
q = str(self.query("Cal,?"))
# Check that 3-point calibration is complete, otherwise return ERROR
if q != "?CAL,3":
print("Three point calibration incomplete!" + str(q))
cal_response = raw_input("Enter 'R' to retry or Enter to exit.")
if cal_response == "R" or cal_response == "r":
self.pH_calibrateSensor()
else:
return False
print("Three point pH calibration complete!")
time.sleep(1)
return True
def temp_calibrateSensor(self):
'''Calibrates the temperature sensor. Requires an external thermometer.'''
print("Clearing previous temperature calibration.")
q = str(ord(self.query("Cal,clear\0x0d", 0)))
if q == "1":
cal_temp = raw_input("Enter room temperature\n>>")
self.cal_wait(5)
q = str(ord(self.query("Cal,"+str(cal_temp) + "\0x0d", 0)))
if q == "1":
q = str(self.query("Cal,?"))
if q == "?CAL,1":
print("One point temperature calibration complete!")
return True
elif q == "?CAL,0":
print("One point temperature calibration incomplete!")
cal_response = raw_input("Enter R to retry or Enter to exit.")
if cal_response == "R" or cal_response == "r":
self.temp_calibrateSensor()
else:
return False
else:
print("Error setting new calibration temperature: " + str(q))
time.sleep(1)
return False
else:
print("Could not set new calibration temperature: " + str(q))
time.sleep(1)
return False
else:
print("Could not clear RTD sensor: " + str(q))
time.sleep(1)
return False
return False
def pH_compensateTemp(self,temp):
'''Compensates the pH sensor for temperature, is used in conjunction with \
a reading from the RTD sensor.'''
comp_status = self.query("T," + str(temp),0)
if str(ord(comp_status)) != '1':
print("Temperature compensation failed!: ")
time.sleep(2)
return False
else:
comp_status = str(self.query("T,?"))
print("Temperature compensation set for: " + comp_status[3:] + u'\xb0' + "C")
time.sleep(2)
return False
def lockProtocol(self,command):
'''Not currently working. Normally used for locking some of the \
internal parameters (e.g. baud rate for UART mode).'''
read_bytes = 9
print("1.\tDisconnect power to device and any signal wires.\n\
2.\tShort PRB to TX.\n\
3.\tTurn device on and wait for LED to change to blue.\n\
4.\tRemove short from PRB to TX, then restart device.\n\
5.\tConnect data lines to Raspberry Pi I2C pins.")
raw_input("Press Enter when this is complete.")
raw_input("Press Enter to prevent further changes to device configuration.")
command_message = "PLOCK," + str(command)
self.sensorQ(command_message)
time.sleep(0.3)
lock_status = self.sensorRead(read_bytes)
if lock_status == "?PLOCK,1":
print("Sensor settings locked.")
return_code = 1
elif lock_status == "?PLOCK,0":
print("Sensor settings unlocked.")
return_code = 0
else:
print("False locking sensor settings.")
return False
return return_code
| gpl-3.0 | -4,150,405,067,504,310,300 | -657,463,432,249,507,600 | 38.776 | 117 | 0.559433 | false |
sixty-north/segpy | test/test_header.py | 2 | 5354 | import inspect
import pickle
from copy import copy
from pytest import raises
from hypothesis import given, assume
from hypothesis.strategies import integers
import segpy
from segpy.header import field, Header, are_equal
from segpy.field_types import Int32, NNInt32
from segpy.datatypes import LIMITS, SegYType
from test.predicates import check_balanced
class ExampleHeader(Header):
field_a = field(
Int32, offset=1, default=0, documentation=
"""Field A. This is field A.""")
field_b = field(
NNInt32, offset=5, default=42, documentation=
"Field B. This is field B.")
field_c = field(
Int32, offset=9, default=-1, documentation=
"Field C. This is field C.")
class TestHeader:
def test_initialize_with_defaults(self):
h = ExampleHeader()
assert h.field_a == 0
assert h.field_b == 42
assert h.field_c == -1
def test_initialize_with_positional_arguments(self):
h = ExampleHeader(14, 22, 8)
assert h.field_a == 14
assert h.field_b == 22
assert h.field_c == 8
def test_initialize_with_keyword_arguments(self):
h = ExampleHeader(field_a=14, field_b=22, field_c=8)
assert h.field_a == 14
assert h.field_b == 22
assert h.field_c == 8
def test_initialize_with_positional_and_keyword_arguments(self):
h = ExampleHeader(14, 22, field_c=8)
assert h.field_a == 14
assert h.field_b == 22
assert h.field_c == 8
def test_out_of_range_field_values_raises_value_error(self):
with raises(ValueError):
ExampleHeader(14, -1, field_c=8)
def test_illegal_keyword_argument_raises_type_error(self):
with raises(TypeError):
ExampleHeader(14, 1, field_x=8)
def test_ordered_field_names(self):
assert ExampleHeader.ordered_field_names() == ('field_a', 'field_b', 'field_c')
@given(a=integers(*LIMITS[SegYType.INT32]),
b=integers(*LIMITS[SegYType.NNINT32]),
c=integers(*LIMITS[SegYType.INT32]))
def test_copy(self, a, b, c):
h1 = ExampleHeader(a, b, c)
h2 = copy(h1)
assert h1 is not h2
assert h1.field_a == h2.field_a
assert h1.field_a == h2.field_a
assert h1.field_a == h2.field_a
@given(a=integers(*LIMITS[SegYType.INT32]),
b=integers(*LIMITS[SegYType.NNINT32]),
c=integers(*LIMITS[SegYType.INT32]))
def test_repr(self, a, b, c):
r = repr(ExampleHeader(a, b, c))
assert str(a) in r
assert str(b) in r
assert str(c) in r
assert 'field_a' in r
assert 'field_b' in r
assert 'field_c' in r
assert 'ExampleHeader' in r
assert check_balanced(r)
@given(a=integers(*LIMITS[SegYType.INT32]),
b=integers(*LIMITS[SegYType.NNINT32]),
c=integers(*LIMITS[SegYType.INT32]))
def test_equality(self, a, b, c):
lhs = ExampleHeader(a, b, c)
rhs = ExampleHeader(a, b, c)
assert are_equal(lhs, rhs)
@given(a=integers(*LIMITS[SegYType.INT32]),
b=integers(*LIMITS[SegYType.NNINT32]),
c=integers(*LIMITS[SegYType.INT32]))
def test_inequality(self, a, b, c):
assume(a != 0)
lhs = ExampleHeader(-a, b, c)
rhs = ExampleHeader(a, b, c)
assert not are_equal(lhs, rhs)
def test_inequality_different_type(self):
h = ExampleHeader(1, 2, 3)
assert not are_equal(h, 42)
def test_read_illegal_attribute_raises_attribute_error(self):
h = ExampleHeader(1, 2, 3)
with raises(AttributeError):
_ = h.field_x
@given(a=integers(*LIMITS[SegYType.INT32]),
b=integers(*LIMITS[SegYType.NNINT32]),
c=integers(*LIMITS[SegYType.INT32]))
def test_pickle_roundtrip(self, a, b, c):
h1 = ExampleHeader(a, b, c)
s = pickle.dumps(h1)
h2 = pickle.loads(s)
assert are_equal(h1, h2)
@given(a=integers(*LIMITS[SegYType.INT32]),
b=integers(*LIMITS[SegYType.NNINT32]),
c=integers(*LIMITS[SegYType.INT32]))
def test_pickle_versioning_mismatch_raises_type_error(self, a, b, c):
h1 = ExampleHeader(a, b, c)
s = pickle.dumps(h1)
s = s.replace(segpy.__version__.encode('ascii'), b'xxxxx')
with raises(TypeError):
pickle.loads(s)
def test_delete_field_raises_attribute_error(self):
h1 = ExampleHeader(1, 2, 3)
with raises(AttributeError):
del h1.field_a
class TestNamedField:
def test_name(self):
assert ExampleHeader.field_a.name == 'field_a'
def test_value_type(self):
assert ExampleHeader.field_a.value_type == Int32
def test_offset(self):
assert ExampleHeader.field_a.offset == 1
def test_default(self):
assert ExampleHeader.field_a.default == 0
def test_doc(self):
assert inspect.getdoc(ExampleHeader.field_a) == "Field A. This is field A."
def test_repr(self):
r = repr(ExampleHeader.field_a)
assert 'FieldAField' in r
assert 'name' in r
assert 'value_type' in r
assert 'default' in r
assert 'field_a' in r
assert 'Int32' in r
assert '1' in r
assert '0' in r
assert check_balanced(r) | agpl-3.0 | 8,491,242,996,517,354,000 | 5,719,684,336,307,790,000 | 29.953757 | 87 | 0.602167 | false |
xy515258/Xia | make_new_application.py | 6 | 4027 | #!/usr/bin/env python
# This script is for creating a new herd animal. Just run this script
# from the "stork" directory supplying a new animal name and it should
# create a complete application template built with support for both
# MOOSE and ELK. Enjoy!
import os, sys, string, re, subprocess
from optparse import OptionParser
from shutil import copytree, ignore_patterns
# DO NOT MODIFY
# This value should be set to true if this stork is within the svn herd repository
global_in_herd = False
global_ignores = ['.svn', '.git']
global_app_name = ''
global_rename_suffix = 'app'
def renameFiles(app_path):
rename_pattern = re.compile(r'(stork)(.*)', re.I)
suffix_pattern = re.compile(r'(.*)\.' + global_rename_suffix + '$')
for dirpath, dirnames, filenames in os.walk(app_path):
# Don't traverse into ignored directories
for ignore in global_ignores:
if ignore in dirnames:
dirnames.remove(ignore)
for file in filenames:
match = rename_pattern.match(file)
# Replace 'stork' in the contents
replaceNameInContents(dirpath + '/' + file)
# See if the file needs to be renamed and rename
if match != None:
replace_string = replacementFunction(match)
os.rename(dirpath + '/' + file, dirpath + '/' + replace_string + match.group(2))
# update the file
file = replace_string + match.group(2)
# If there are files with .app suffixes drop the suffix
match = suffix_pattern.search(file)
if match != None:
os.rename(dirpath + '/' + file, dirpath + '/' + match.group(1))
def replaceNameInContents(filename):
f = open(filename)
text = f.read()
f.close()
# Replace all instances of the word stork with the right case
pattern = re.compile(r'(stork)', re.I)
text = pattern.sub(replacementFunction, text)
# Retrieve original file attribute to be applied later
mode = os.stat(filename).st_mode
# Now write the file back out
f = open(filename + '~tmp', 'w')
f.write(text)
f.close()
os.chmod(filename + '~tmp', mode)
os.rename(filename + '~tmp', filename)
def replacementFunction(match):
# There are 3 "case" cases
# Case 1: all lower case
if match.group(1) == 'stork':
return global_app_name
# Case 2: all upper case
if match.group(1) == 'STORK':
return string.upper(global_app_name)
# Case 3: First letter is capitalized
if match.group(1) == 'Stork':
name = global_app_name.replace("_", " ")
name = name.title()
name = name.replace(" ", "")
return name
print match.group(0) + "\nBad Case Detected!"
sys.exit(1)
if __name__ == '__main__':
parser = OptionParser()
(global_options, args) = parser.parse_args()
# Get the animal name
if global_in_herd:
if len(args) != 1:
print 'Usage: ./make_new_application.py <animal name>'
sys.exit()
global_app_name = string.lower(args[0])
else:
if len(args) != 0:
print 'Usage: ./make_new_application.py'
sys.exit()
global_app_name = os.path.basename(os.path.dirname(os.path.realpath(__file__)))
# Make the new application
if global_in_herd:
copytree('.', '../' + global_app_name, ignore=ignore_patterns('.svn', '.git', '*.module', 'make_new*', 'LICENSE'))
renameFiles('../' + global_app_name)
print 'Your application should be ready!\nAdd the directory ../' + global_app_name + ' to your checkout and commit.'
else:
# We are in a git clone
renameFiles('.')
try:
os.remove('Makefile.module')
os.remove('run_tests.module')
os.remove(os.path.join('src', 'base', 'StorkApp.C.module'))
os.remove('make_new_application.py')
os.remove('make_new_module.py')
except:
pass
# Add the newly created untracked files and delete the removed ones
subprocess.check_output("git rm -f *.py Makefile.* run_tests.*", shell=True)
subprocess.call("git add --all *", shell=True)
print 'Your application should be ready!\nCommit this directory to your local repository and push.'
| lgpl-2.1 | 5,328,880,684,123,721,000 | 1,316,055,169,650,139,400 | 31.475806 | 120 | 0.651105 | false |
smerritt/swift | test/unit/common/middleware/test_versioned_writes.py | 3 | 63681 | # Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import json
import os
import time
import mock
import unittest
from swift.common import swob, utils
from swift.common.middleware import versioned_writes, copy
from swift.common.swob import Request
from test.unit.common.middleware.helpers import FakeSwift
class FakeCache(object):
def __init__(self, val):
if 'status' not in val:
val['status'] = 200
self.val = val
def get(self, *args):
return self.val
def local_tz(func):
'''
Decorator to change the timezone when running a test.
This uses the Eastern Time Zone definition from the time module's docs.
Note that the timezone affects things like time.time() and time.mktime().
'''
@functools.wraps(func)
def wrapper(*args, **kwargs):
tz = os.environ.get('TZ', '')
try:
os.environ['TZ'] = 'EST+05EDT,M4.1.0,M10.5.0'
time.tzset()
return func(*args, **kwargs)
finally:
os.environ['TZ'] = tz
time.tzset()
return wrapper
class VersionedWritesBaseTestCase(unittest.TestCase):
def setUp(self):
self.app = FakeSwift()
conf = {'allow_versioned_writes': 'true'}
self.vw = versioned_writes.filter_factory(conf)(self.app)
def tearDown(self):
self.assertEqual(self.app.unclosed_requests, {})
def call_app(self, req, app=None):
if app is None:
app = self.app
self.authorized = []
def authorize(req):
self.authorized.append(req)
if 'swift.authorize' not in req.environ:
req.environ['swift.authorize'] = authorize
req.headers.setdefault("User-Agent", "Marula Kruger")
status = [None]
headers = [None]
def start_response(s, h, ei=None):
status[0] = s
headers[0] = h
body_iter = app(req.environ, start_response)
with utils.closing_if_possible(body_iter):
body = b''.join(body_iter)
return status[0], headers[0], body
def call_vw(self, req):
return self.call_app(req, app=self.vw)
def assertRequestEqual(self, req, other):
self.assertEqual(req.method, other.method)
self.assertEqual(req.path, other.path)
class VersionedWritesTestCase(VersionedWritesBaseTestCase):
def test_put_container(self):
self.app.register('PUT', '/v1/a/c', swob.HTTPOk, {}, 'passed')
req = Request.blank('/v1/a/c',
headers={'X-Versions-Location': 'ver_cont'},
environ={'REQUEST_METHOD': 'PUT'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
# check for sysmeta header
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c', path)
self.assertIn('x-container-sysmeta-versions-location', req_headers)
self.assertEqual(req.headers['x-container-sysmeta-versions-location'],
'ver_cont')
self.assertIn('x-container-sysmeta-versions-mode', req_headers)
self.assertEqual(req.headers['x-container-sysmeta-versions-mode'],
'stack')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_put_container_history_header(self):
self.app.register('PUT', '/v1/a/c', swob.HTTPOk, {}, 'passed')
req = Request.blank('/v1/a/c',
headers={'X-History-Location': 'ver_cont'},
environ={'REQUEST_METHOD': 'PUT'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
# check for sysmeta header
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c', path)
self.assertIn('x-container-sysmeta-versions-location', req_headers)
self.assertEqual('ver_cont',
req_headers['x-container-sysmeta-versions-location'])
self.assertIn('x-container-sysmeta-versions-mode', req_headers)
self.assertEqual('history',
req_headers['x-container-sysmeta-versions-mode'])
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_put_container_both_headers(self):
req = Request.blank('/v1/a/c',
headers={'X-Versions-Location': 'ver_cont',
'X-History-Location': 'ver_cont'},
environ={'REQUEST_METHOD': 'PUT'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '400 Bad Request')
self.assertFalse(self.app.calls)
def test_container_allow_versioned_writes_false(self):
self.vw.conf = {'allow_versioned_writes': 'false'}
# PUT/POST container must fail as 412 when allow_versioned_writes
# set to false
for method in ('PUT', 'POST'):
for header in ('X-Versions-Location', 'X-History-Location'):
req = Request.blank('/v1/a/c',
headers={header: 'ver_cont'},
environ={'REQUEST_METHOD': method})
status, headers, body = self.call_vw(req)
self.assertEqual(status, "412 Precondition Failed",
'Got %s instead of 412 when %sing '
'with %s header' % (status, method, header))
# GET performs as normal
self.app.register('GET', '/v1/a/c', swob.HTTPOk, {}, 'passed')
for method in ('GET', 'HEAD'):
req = Request.blank('/v1/a/c',
headers={'X-Versions-Location': 'ver_cont'},
environ={'REQUEST_METHOD': method})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
def _test_removal(self, headers):
self.app.register('POST', '/v1/a/c', swob.HTTPNoContent, {}, 'passed')
req = Request.blank('/v1/a/c',
headers=headers,
environ={'REQUEST_METHOD': 'POST'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '204 No Content')
# check for sysmeta header
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('POST', method)
self.assertEqual('/v1/a/c', path)
for header in ['x-container-sysmeta-versions-location',
'x-container-sysmeta-versions-mode',
'x-versions-location']:
self.assertIn(header, req_headers)
self.assertEqual('', req_headers[header])
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_remove_headers(self):
self._test_removal({'X-Remove-Versions-Location': 'x'})
self._test_removal({'X-Remove-History-Location': 'x'})
def test_empty_versions_location(self):
self._test_removal({'X-Versions-Location': ''})
self._test_removal({'X-History-Location': ''})
def test_remove_add_versions_precedence(self):
self.app.register(
'POST', '/v1/a/c', swob.HTTPOk,
{'x-container-sysmeta-versions-location': 'ver_cont'},
'passed')
req = Request.blank('/v1/a/c',
headers={'X-Remove-Versions-Location': 'x',
'X-Versions-Location': 'ver_cont'},
environ={'REQUEST_METHOD': 'POST'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertIn(('X-Versions-Location', 'ver_cont'), headers)
# check for sysmeta header
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('POST', method)
self.assertEqual('/v1/a/c', path)
self.assertIn('x-container-sysmeta-versions-location', req_headers)
self.assertNotIn('x-remove-versions-location', req_headers)
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def _test_blank_add_versions_precedence(self, blank_header, add_header):
self.app.register(
'POST', '/v1/a/c', swob.HTTPOk,
{'x-container-sysmeta-versions-location': 'ver_cont'},
'passed')
req = Request.blank('/v1/a/c',
headers={blank_header: '',
add_header: 'ver_cont'},
environ={'REQUEST_METHOD': 'POST'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
# check for sysmeta header
calls = self.app.calls_with_headers
method, path, req_headers = calls[-1]
self.assertEqual('POST', method)
self.assertEqual('/v1/a/c', path)
self.assertIn('x-container-sysmeta-versions-location', req_headers)
self.assertEqual('ver_cont',
req_headers['x-container-sysmeta-versions-location'])
self.assertIn('x-container-sysmeta-versions-mode', req_headers)
self.assertEqual('history' if add_header == 'X-History-Location'
else 'stack',
req_headers['x-container-sysmeta-versions-mode'])
self.assertNotIn('x-remove-versions-location', req_headers)
self.assertIn('x-versions-location', req_headers)
self.assertEqual('', req_headers['x-versions-location'])
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_blank_add_versions_precedence(self):
self._test_blank_add_versions_precedence(
'X-Versions-Location', 'X-History-Location')
self._test_blank_add_versions_precedence(
'X-History-Location', 'X-Versions-Location')
def test_get_container(self):
self.app.register(
'GET', '/v1/a/c', swob.HTTPOk,
{'x-container-sysmeta-versions-location': 'ver_cont',
'x-container-sysmeta-versions-mode': 'stack'}, None)
req = Request.blank(
'/v1/a/c',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertIn(('X-Versions-Location', 'ver_cont'), headers)
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_head_container(self):
self.app.register(
'HEAD', '/v1/a/c', swob.HTTPOk,
{'x-container-sysmeta-versions-location': 'other_ver_cont',
'x-container-sysmeta-versions-mode': 'history'}, None)
req = Request.blank(
'/v1/a/c',
environ={'REQUEST_METHOD': 'HEAD'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertIn(('X-History-Location', 'other_ver_cont'), headers)
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_get_head(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, None)
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.app.register('HEAD', '/v1/a/c/o', swob.HTTPOk, {}, None)
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_put_object_no_versioning(self):
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
cache = FakeCache({})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_put_first_object_success(self):
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPNotFound, {}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100',
'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 2)
# Versioned writes middleware now calls auth on the incoming request
# before we try the GET and then at the proxy, so there are 2
# atuhorized for the same request.
self.assertRequestEqual(req, self.authorized[0])
self.assertRequestEqual(req, self.authorized[1])
self.assertEqual(2, self.app.call_count)
self.assertEqual(['VW', None], self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
def test_put_object_no_versioning_with_container_config_true(self):
# set False to versions_write and expect no GET occurred
self.vw.conf = {'allow_versioned_writes': 'false'}
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed')
cache = FakeCache({'versions': 'ver_cont'})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
called_method = [method for (method, path, hdrs) in self.app._calls]
self.assertNotIn('GET', called_method)
def test_put_request_is_dlo_manifest_with_container_config_true(self):
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed')
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:01:00 GMT'}, 'old version')
self.app.register(
'PUT', '/v1/a/ver_cont/001o/0000000060.00000', swob.HTTPCreated,
{}, '')
cache = FakeCache({'versions': 'ver_cont'})
req = Request.blank(
'/v1/a/c/o',
headers={'X-Object-Manifest': 'req/manifest'},
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 2)
self.assertRequestEqual(req, self.authorized[0])
self.assertRequestEqual(req, self.authorized[1])
self.assertEqual(3, self.app.call_count)
self.assertEqual([
('GET', '/v1/a/c/o'),
('PUT', '/v1/a/ver_cont/001o/0000000060.00000'),
('PUT', '/v1/a/c/o'),
], self.app.calls)
self.assertIn('x-object-manifest',
self.app.calls_with_headers[2].headers)
def test_put_version_is_dlo_manifest_with_container_config_true(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk,
{'X-Object-Manifest': 'resp/manifest',
'last-modified': 'Thu, 1 Jan 1970 01:00:00 GMT'},
'passed')
self.app.register(
'PUT', '/v1/a/ver_cont/001o/0000003600.00000', swob.HTTPCreated,
{}, '')
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed')
cache = FakeCache({'versions': 'ver_cont'})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '201 Created')
# The middleware now auths the request before the initial GET, the
# same GET that gets the X-Object-Manifest back. So a second auth is
# now done.
self.assertEqual(len(self.authorized), 2)
self.assertRequestEqual(req, self.authorized[0])
self.assertRequestEqual(req, self.authorized[1])
self.assertEqual(3, self.app.call_count)
self.assertEqual([
('GET', '/v1/a/c/o'),
('PUT', '/v1/a/ver_cont/001o/0000003600.00000'),
('PUT', '/v1/a/c/o'),
], self.app.calls)
self.assertIn('x-object-manifest',
self.app.calls_with_headers[1].headers)
def test_delete_object_no_versioning_with_container_config_true(self):
# set False to versions_write obviously and expect no GET versioning
# container and GET/PUT called (just delete object as normal)
self.vw.conf = {'allow_versioned_writes': 'false'}
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPNoContent, {}, 'passed')
cache = FakeCache({'versions': 'ver_cont'})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '204 No Content')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
called_method = \
[method for (method, path, rheaders) in self.app._calls]
self.assertNotIn('PUT', called_method)
self.assertNotIn('GET', called_method)
self.assertEqual(1, self.app.call_count)
def test_new_version_success(self):
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed')
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:00:01 GMT'}, 'passed')
self.app.register(
'PUT', '/v1/a/ver_cont/001o/0000000001.00000', swob.HTTPCreated,
{}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100',
'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '201 Created')
# authorized twice now because versioned_writes now makes a check on
# PUT
self.assertEqual(len(self.authorized), 2)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(['VW', 'VW', None], self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
def test_new_version_get_errors(self):
# GET on source fails, expect client error response,
# no PUT should happen
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPBadRequest, {}, None)
cache = FakeCache({'versions': 'ver_cont'})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '412 Precondition Failed')
self.assertEqual(1, self.app.call_count)
# GET on source fails, expect server error response
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPBadGateway, {}, None)
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '503 Service Unavailable')
self.assertEqual(2, self.app.call_count)
def test_new_version_put_errors(self):
# PUT of version fails, expect client error response
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:00:01 GMT'}, 'passed')
self.app.register(
'PUT', '/v1/a/ver_cont/001o/0000000001.00000',
swob.HTTPUnauthorized, {}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '412 Precondition Failed')
self.assertEqual(2, self.app.call_count)
# PUT of version fails, expect server error response
self.app.register(
'PUT', '/v1/a/ver_cont/001o/0000000001.00000', swob.HTTPBadGateway,
{}, None)
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '503 Service Unavailable')
self.assertEqual(4, self.app.call_count)
@local_tz
def test_new_version_sysmeta_precedence(self):
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:00:00 GMT'}, 'passed')
self.app.register(
'PUT', '/v1/a/ver_cont/001o/0000000000.00000', swob.HTTPOk,
{}, None)
# fill cache with two different values for versions location
# new middleware should use sysmeta first
cache = FakeCache({'versions': 'old_ver_cont',
'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
# authorized twice now because versioned_writes now makes a check on
# PUT
self.assertEqual(len(self.authorized), 2)
self.assertRequestEqual(req, self.authorized[0])
# check that sysmeta header was used
calls = self.app.calls_with_headers
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/ver_cont/001o/0000000000.00000', path)
def test_delete_no_versions_container_success(self):
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET',
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
swob.HTTPNotFound, {}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0', 'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(2, self.app.call_count)
self.assertEqual(['VW', None], self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('DELETE', '/v1/a/c/o'),
])
def test_delete_first_object_success(self):
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET',
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
swob.HTTPOk, {}, '[]')
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('DELETE', '/v1/a/c/o'),
])
def test_delete_latest_version_no_marker_success(self):
self.app.register(
'GET',
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "y", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "text/plain"}, '
'{"hash": "x", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"}]')
self.app.register(
'GET', '/v1/a/ver_cont/001o/2', swob.HTTPCreated,
{'content-length': '3'}, None)
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, None)
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/2', swob.HTTPOk,
{}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
headers={'X-If-Delete-At': 1},
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0', 'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(4, self.app.call_count)
self.assertEqual(['VW', 'VW', 'VW', 'VW'], self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
# check that X-If-Delete-At was removed from DELETE request
req_headers = self.app.headers[-1]
self.assertNotIn('x-if-delete-at', [h.lower() for h in req_headers])
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('GET', '/v1/a/ver_cont/001o/2'),
('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/2'),
])
def test_delete_latest_version_restores_marker_success(self):
self.app.register(
'GET',
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "x", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "application/x-deleted;swift_versions_deleted=1"'
'}, {"hash": "y", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"'
'}]')
self.app.register(
'HEAD', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPNoContent, {})
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
headers={'X-If-Delete-At': 1},
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '204 No Content')
self.assertEqual(len(self.authorized), 2)
self.assertRequestEqual(req, self.authorized[0])
self.assertRequestEqual(req, self.authorized[1])
calls = self.app.calls_with_headers
self.assertEqual(['GET', 'HEAD', 'DELETE'],
[c.method for c in calls])
self.assertIn('X-Newest', calls[1].headers)
self.assertEqual('True', calls[1].headers['X-Newest'])
method, path, req_headers = calls.pop()
self.assertTrue(path.startswith('/v1/a/c/o'))
# Since we're deleting the original, this *should* still be present:
self.assertEqual('1', req_headers.get('X-If-Delete-At'))
def test_delete_latest_version_is_marker_success(self):
# Test popping a delete marker off the stack. So, there's data in the
# versions container, topped by a delete marker, and there's nothing
# in the base versioned container.
self.app.register(
'GET',
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "y", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "application/x-deleted;swift_versions_deleted=1"'
'},{"hash": "x", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"'
'}]')
self.app.register(
'HEAD', '/v1/a/c/o', swob.HTTPNotFound, {}, 'passed')
self.app.register(
'GET', '/v1/a/ver_cont/001o/1', swob.HTTPOk, {}, 'passed')
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, None)
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/2', swob.HTTPOk, {}, 'passed')
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/1', swob.HTTPOk, {}, 'passed')
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
headers={'X-If-Delete-At': 1},
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('HEAD', '/v1/a/c/o'),
('GET', '/v1/a/ver_cont/001o/1'),
('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/1'),
('DELETE', '/v1/a/ver_cont/001o/2'),
])
self.assertIn('X-Newest', self.app.headers[1])
self.assertEqual('True', self.app.headers[1]['X-Newest'])
self.assertIn('X-Newest', self.app.headers[2])
self.assertEqual('True', self.app.headers[2]['X-Newest'])
# check that X-If-Delete-At was removed from DELETE request
for req_headers in self.app.headers[-2:]:
self.assertNotIn('x-if-delete-at',
[h.lower() for h in req_headers])
def test_delete_latest_version_doubled_up_markers_success(self):
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/'
'&marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "x", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/3", '
'"content_type": "application/x-deleted;swift_versions_deleted=1"'
'}, {"hash": "y", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "application/x-deleted;swift_versions_deleted=1"'
'}, {"hash": "y", '
'"last_modified": "2014-11-20T14:23:02.206740", '
'"bytes": 30, '
'"name": "001o/1", '
'"content_type": "text/plain"'
'}]')
self.app.register(
'HEAD', '/v1/a/c/o', swob.HTTPNotFound, {}, 'passed')
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/3', swob.HTTPOk, {}, 'passed')
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
headers={'X-If-Delete-At': 1},
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
# check that X-If-Delete-At was removed from DELETE request
calls = self.app.calls_with_headers
self.assertEqual(['GET', 'HEAD', 'DELETE'],
[c.method for c in calls])
method, path, req_headers = calls.pop()
self.assertTrue(path.startswith('/v1/a/ver_cont/001o/3'))
self.assertNotIn('x-if-delete-at', [h.lower() for h in req_headers])
@mock.patch('swift.common.middleware.versioned_writes.time.time',
return_value=1234)
def test_history_delete_marker_no_object_success(self, mock_time):
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPNotFound,
{}, 'passed')
self.app.register(
'PUT', '/v1/a/ver_cont/001o/0000001234.00000', swob.HTTPCreated,
{}, 'passed')
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPNotFound, {}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont',
'versions-mode': 'history'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '404 Not Found')
self.assertEqual(len(self.authorized), 2)
req.environ['REQUEST_METHOD'] = 'PUT'
self.assertRequestEqual(req, self.authorized[0])
calls = self.app.calls_with_headers
self.assertEqual(['GET', 'PUT', 'DELETE'], [c.method for c in calls])
self.assertEqual('application/x-deleted;swift_versions_deleted=1',
calls[1].headers.get('Content-Type'))
@mock.patch('swift.common.middleware.versioned_writes.time.time',
return_value=123456789.54321)
def test_history_delete_marker_over_object_success(self, mock_time):
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk,
{'last-modified': 'Wed, 19 Nov 2014 18:19:02 GMT'}, 'passed')
self.app.register(
'PUT', '/v1/a/ver_cont/001o/1416421142.00000', swob.HTTPCreated,
{}, 'passed')
self.app.register(
'PUT', '/v1/a/ver_cont/001o/0123456789.54321', swob.HTTPCreated,
{}, 'passed')
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPNoContent, {}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont',
'versions-mode': 'history'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '204 No Content')
self.assertEqual('', body)
self.assertEqual(len(self.authorized), 2)
req.environ['REQUEST_METHOD'] = 'PUT'
self.assertRequestEqual(req, self.authorized[0])
calls = self.app.calls_with_headers
self.assertEqual(['GET', 'PUT', 'PUT', 'DELETE'],
[c.method for c in calls])
self.assertEqual('/v1/a/ver_cont/001o/1416421142.00000',
calls[1].path)
self.assertEqual('application/x-deleted;swift_versions_deleted=1',
calls[2].headers.get('Content-Type'))
def test_delete_single_version_success(self):
# check that if the first listing page has just a single item then
# it is not erroneously inferred to be a non-reversed listing
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET',
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "y", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"}]')
self.app.register(
'GET', '/v1/a/ver_cont/001o/1', swob.HTTPOk,
{'content-length': '3'}, None)
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, None)
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/1', swob.HTTPOk,
{}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('GET', '/v1/a/ver_cont/001o/1'),
('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/1'),
])
def test_DELETE_on_expired_versioned_object(self):
self.app.register(
'GET',
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "y", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "text/plain"}, '
'{"hash": "x", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"}]')
# expired object
self.app.register(
'GET', '/v1/a/ver_cont/001o/2', swob.HTTPNotFound,
{}, None)
self.app.register(
'GET', '/v1/a/ver_cont/001o/1', swob.HTTPCreated,
{'content-length': '3'}, None)
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPOk, {}, None)
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/1', swob.HTTPOk,
{}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(5, self.app.call_count)
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('GET', '/v1/a/ver_cont/001o/2'),
('GET', '/v1/a/ver_cont/001o/1'),
('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/1'),
])
def test_denied_DELETE_of_versioned_object(self):
authorize_call = []
self.app.register(
'GET',
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "y", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "text/plain"}, '
'{"hash": "x", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"}]')
def fake_authorize(req):
# the container GET is pre-auth'd so here we deny the object DELETE
authorize_call.append(req)
return swob.HTTPForbidden()
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'swift.authorize': fake_authorize,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '403 Forbidden')
self.assertEqual(len(authorize_call), 1)
self.assertRequestEqual(req, authorize_call[0])
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
])
def test_denied_PUT_of_versioned_object(self):
authorize_call = []
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:00:01 GMT'}, 'passed')
def fake_authorize(req):
# we should deny the object PUT
authorize_call.append(req)
return swob.HTTPForbidden()
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'swift.authorize': fake_authorize,
'CONTENT_LENGTH': '0'})
# Save off a copy, as the middleware may modify the original
expected_req = Request(req.environ.copy())
status, headers, body = self.call_vw(req)
self.assertEqual(status, '403 Forbidden')
self.assertEqual(len(authorize_call), 1)
self.assertRequestEqual(expected_req, authorize_call[0])
self.assertEqual(self.app.calls, [])
class VersionedWritesOldContainersTestCase(VersionedWritesBaseTestCase):
def test_delete_latest_version_success(self):
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "x", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"}, '
'{"hash": "y", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "text/plain"}]')
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/'
'&marker=001o/2',
swob.HTTPNotFound, {}, None)
self.app.register(
'GET', '/v1/a/ver_cont/001o/2', swob.HTTPCreated,
{'content-length': '3'}, None)
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, None)
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/2', swob.HTTPOk,
{}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
headers={'X-If-Delete-At': 1},
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0', 'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(5, self.app.call_count)
self.assertEqual(['VW', 'VW', 'VW', 'VW', 'VW'],
self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
# check that X-If-Delete-At was removed from DELETE request
req_headers = self.app.headers[-1]
self.assertNotIn('x-if-delete-at', [h.lower() for h in req_headers])
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('GET', prefix_listing_prefix + 'marker=001o/2'),
('GET', '/v1/a/ver_cont/001o/2'),
('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/2'),
])
def test_DELETE_on_expired_versioned_object(self):
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "x", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"}, '
'{"hash": "y", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "text/plain"}]')
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/'
'&marker=001o/2',
swob.HTTPNotFound, {}, None)
# expired object
self.app.register(
'GET', '/v1/a/ver_cont/001o/2', swob.HTTPNotFound,
{}, None)
self.app.register(
'GET', '/v1/a/ver_cont/001o/1', swob.HTTPCreated,
{'content-length': '3'}, None)
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPOk, {}, None)
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/1', swob.HTTPOk,
{}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(6, self.app.call_count)
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('GET', prefix_listing_prefix + 'marker=001o/2'),
('GET', '/v1/a/ver_cont/001o/2'),
('GET', '/v1/a/ver_cont/001o/1'),
('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/1'),
])
def test_denied_DELETE_of_versioned_object(self):
authorize_call = []
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "x", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"}, '
'{"hash": "y", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "text/plain"}]')
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/'
'&marker=001o/2',
swob.HTTPNotFound, {}, None)
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPForbidden,
{}, None)
def fake_authorize(req):
authorize_call.append(req)
return swob.HTTPForbidden()
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'swift.authorize': fake_authorize,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '403 Forbidden')
self.assertEqual(len(authorize_call), 1)
self.assertRequestEqual(req, authorize_call[0])
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('GET', prefix_listing_prefix + 'marker=001o/2'),
])
def test_partially_upgraded_cluster(self):
old_versions = [
{'hash': 'etag%d' % x,
'last_modified': "2014-11-21T14:14:%02d.409100" % x,
'bytes': 3,
'name': '001o/%d' % x,
'content_type': 'text/plain'}
for x in range(5)]
# first container server can reverse
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=&reverse=on',
swob.HTTPOk, {}, json.dumps(list(reversed(old_versions[2:]))))
# but all objects are already gone
self.app.register(
'GET', '/v1/a/ver_cont/001o/4', swob.HTTPNotFound,
{}, None)
self.app.register(
'GET', '/v1/a/ver_cont/001o/3', swob.HTTPNotFound,
{}, None)
self.app.register(
'GET', '/v1/a/ver_cont/001o/2', swob.HTTPNotFound,
{}, None)
# second container server can't reverse
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=001o/2&reverse=on',
swob.HTTPOk, {}, json.dumps(old_versions[3:]))
# subsequent requests shouldn't reverse
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=&end_marker=001o/2',
swob.HTTPOk, {}, json.dumps(old_versions[:1]))
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=001o/0&end_marker=001o/2',
swob.HTTPOk, {}, json.dumps(old_versions[1:2]))
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=001o/1&end_marker=001o/2',
swob.HTTPOk, {}, '[]')
self.app.register(
'GET', '/v1/a/ver_cont/001o/1', swob.HTTPOk,
{'content-length': '3'}, None)
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, None)
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/1', swob.HTTPNoContent,
{}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '204 No Content')
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('GET', '/v1/a/ver_cont/001o/4'),
('GET', '/v1/a/ver_cont/001o/3'),
('GET', '/v1/a/ver_cont/001o/2'),
('GET', prefix_listing_prefix + 'marker=001o/2&reverse=on'),
('GET', prefix_listing_prefix + 'marker=&end_marker=001o/2'),
('GET', prefix_listing_prefix + 'marker=001o/0&end_marker=001o/2'),
('GET', prefix_listing_prefix + 'marker=001o/1&end_marker=001o/2'),
('GET', '/v1/a/ver_cont/001o/1'),
('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/1'),
])
def test_partially_upgraded_cluster_single_result_on_second_page(self):
old_versions = [
{'hash': 'etag%d' % x,
'last_modified': "2014-11-21T14:14:%02d.409100" % x,
'bytes': 3,
'name': '001o/%d' % x,
'content_type': 'text/plain'}
for x in range(5)]
# first container server can reverse
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=&reverse=on',
swob.HTTPOk, {}, json.dumps(list(reversed(old_versions[-2:]))))
# but both objects are already gone
self.app.register(
'GET', '/v1/a/ver_cont/001o/4', swob.HTTPNotFound,
{}, None)
self.app.register(
'GET', '/v1/a/ver_cont/001o/3', swob.HTTPNotFound,
{}, None)
# second container server can't reverse
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=001o/3&reverse=on',
swob.HTTPOk, {}, json.dumps(old_versions[4:]))
# subsequent requests shouldn't reverse
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=&end_marker=001o/3',
swob.HTTPOk, {}, json.dumps(old_versions[:2]))
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=001o/1&end_marker=001o/3',
swob.HTTPOk, {}, json.dumps(old_versions[2:3]))
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=001o/2&end_marker=001o/3',
swob.HTTPOk, {}, '[]')
self.app.register(
'GET', '/v1/a/ver_cont/001o/2', swob.HTTPOk,
{'content-length': '3'}, None)
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, None)
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/2', swob.HTTPNoContent,
{}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '204 No Content')
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('GET', '/v1/a/ver_cont/001o/4'),
('GET', '/v1/a/ver_cont/001o/3'),
('GET', prefix_listing_prefix + 'marker=001o/3&reverse=on'),
('GET', prefix_listing_prefix + 'marker=&end_marker=001o/3'),
('GET', prefix_listing_prefix + 'marker=001o/1&end_marker=001o/3'),
('GET', prefix_listing_prefix + 'marker=001o/2&end_marker=001o/3'),
('GET', '/v1/a/ver_cont/001o/2'),
('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/2'),
])
class VersionedWritesCopyingTestCase(VersionedWritesBaseTestCase):
# verify interaction of copy and versioned_writes middlewares
def setUp(self):
self.app = FakeSwift()
conf = {'allow_versioned_writes': 'true'}
self.vw = versioned_writes.filter_factory(conf)(self.app)
self.filter = copy.filter_factory({})(self.vw)
def call_filter(self, req, **kwargs):
return self.call_app(req, app=self.filter, **kwargs)
def test_copy_first_version(self):
# no existing object to move to the versions container
self.app.register(
'GET', '/v1/a/tgt_cont/tgt_obj', swob.HTTPNotFound, {}, None)
self.app.register(
'GET', '/v1/a/src_cont/src_obj', swob.HTTPOk, {}, 'passed')
self.app.register(
'PUT', '/v1/a/tgt_cont/tgt_obj', swob.HTTPCreated, {}, 'passed')
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/src_cont/src_obj',
environ={'REQUEST_METHOD': 'COPY', 'swift.cache': cache,
'CONTENT_LENGTH': '100'},
headers={'Destination': 'tgt_cont/tgt_obj'})
status, headers, body = self.call_filter(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 3)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/src_cont/src_obj', self.authorized[0].path)
# At the moment we are calling authorize on the incoming request in
# the middleware before we do the PUT (and the source GET) and again
# on the incoming request when it gets to the proxy. So the 2nd and
# 3rd auths look the same.
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/tgt_cont/tgt_obj', self.authorized[1].path)
self.assertEqual('PUT', self.authorized[2].method)
self.assertEqual('/v1/a/tgt_cont/tgt_obj', self.authorized[2].path)
# note the GET on tgt_cont/tgt_obj is pre-authed
self.assertEqual(3, self.app.call_count, self.app.calls)
def test_copy_new_version(self):
# existing object should be moved to versions container
self.app.register(
'GET', '/v1/a/src_cont/src_obj', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET', '/v1/a/tgt_cont/tgt_obj', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:00:01 GMT'}, 'passed')
self.app.register(
'PUT', '/v1/a/ver_cont/007tgt_obj/0000000001.00000', swob.HTTPOk,
{}, None)
self.app.register(
'PUT', '/v1/a/tgt_cont/tgt_obj', swob.HTTPCreated, {}, 'passed')
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/src_cont/src_obj',
environ={'REQUEST_METHOD': 'COPY', 'swift.cache': cache,
'CONTENT_LENGTH': '100'},
headers={'Destination': 'tgt_cont/tgt_obj'})
status, headers, body = self.call_filter(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 3)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/src_cont/src_obj', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/tgt_cont/tgt_obj', self.authorized[1].path)
self.assertEqual(4, self.app.call_count)
def test_copy_new_version_different_account(self):
self.app.register(
'GET', '/v1/src_a/src_cont/src_obj', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET', '/v1/tgt_a/tgt_cont/tgt_obj', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:00:01 GMT'}, 'passed')
self.app.register(
'PUT', '/v1/tgt_a/ver_cont/007tgt_obj/0000000001.00000',
swob.HTTPOk, {}, None)
self.app.register(
'PUT', '/v1/tgt_a/tgt_cont/tgt_obj', swob.HTTPCreated, {},
'passed')
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/src_a/src_cont/src_obj',
environ={'REQUEST_METHOD': 'COPY', 'swift.cache': cache,
'CONTENT_LENGTH': '100'},
headers={'Destination': 'tgt_cont/tgt_obj',
'Destination-Account': 'tgt_a'})
status, headers, body = self.call_filter(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 3)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/src_a/src_cont/src_obj', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/tgt_a/tgt_cont/tgt_obj', self.authorized[1].path)
self.assertEqual(4, self.app.call_count)
def test_copy_object_no_versioning_with_container_config_true(self):
# set False to versions_write obviously and expect no extra
# COPY called (just copy object as normal)
self.vw.conf = {'allow_versioned_writes': 'false'}
self.app.register(
'GET', '/v1/a/src_cont/src_obj', swob.HTTPOk, {}, 'passed')
self.app.register(
'PUT', '/v1/a/tgt_cont/tgt_obj', swob.HTTPCreated, {}, 'passed')
cache = FakeCache({'versions': 'ver_cont'})
req = Request.blank(
'/v1/a/src_cont/src_obj',
environ={'REQUEST_METHOD': 'COPY', 'swift.cache': cache},
headers={'Destination': '/tgt_cont/tgt_obj'})
status, headers, body = self.call_filter(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/src_cont/src_obj', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/tgt_cont/tgt_obj', self.authorized[1].path)
self.assertEqual(2, self.app.call_count)
class TestSwiftInfo(unittest.TestCase):
def setUp(self):
utils._swift_info = {}
utils._swift_admin_info = {}
def test_registered_defaults(self):
versioned_writes.filter_factory({})('have to pass in an app')
swift_info = utils.get_swift_info()
# in default, versioned_writes is not in swift_info
self.assertNotIn('versioned_writes', swift_info)
def test_registered_explicitly_set(self):
versioned_writes.filter_factory(
{'allow_versioned_writes': 'true'})('have to pass in an app')
swift_info = utils.get_swift_info()
self.assertIn('versioned_writes', swift_info)
self.assertEqual(
swift_info['versioned_writes'].get('allowed_flags'),
('x-versions-location', 'x-history-location'))
if __name__ == '__main__':
unittest.main()
| apache-2.0 | 4,790,709,038,947,844,000 | 5,204,423,332,885,829,000 | 41.825151 | 79 | 0.545312 | false |
classicboyir/BuildingMachineLearningSystemsWithPython | ch10/simple_classification.py | 21 | 2299 | # This code is supporting material for the book
# Building Machine Learning Systems with Python
# by Willi Richert and Luis Pedro Coelho
# published by PACKT Publishing
#
# It is made available under the MIT License
import mahotas as mh
import numpy as np
from glob import glob
from features import texture, color_histogram
from sklearn.linear_model import LogisticRegression
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import StandardScaler
basedir = '../SimpleImageDataset/'
haralicks = []
labels = []
chists = []
print('This script will test (with cross-validation) classification of the simple 3 class dataset')
print('Computing features...')
# Use glob to get all the images
images = glob('{}/*.jpg'.format(basedir))
# We sort the images to ensure that they are always processed in the same order
# Otherwise, this would introduce some variation just based on the random
# ordering that the filesystem uses
for fname in sorted(images):
imc = mh.imread(fname)
haralicks.append(texture(mh.colors.rgb2grey(imc)))
chists.append(color_histogram(imc))
# Files are named like building00.jpg, scene23.jpg...
labels.append(fname[:-len('xx.jpg')])
print('Finished computing features.')
haralicks = np.array(haralicks)
labels = np.array(labels)
chists = np.array(chists)
haralick_plus_chists = np.hstack([chists, haralicks])
# We use Logistic Regression because it achieves high accuracy on small(ish) datasets
# Feel free to experiment with other classifiers
clf = Pipeline([('preproc', StandardScaler()),
('classifier', LogisticRegression())])
from sklearn import cross_validation
cv = cross_validation.LeaveOneOut(len(images))
scores = cross_validation.cross_val_score(
clf, haralicks, labels, cv=cv)
print('Accuracy (Leave-one-out) with Logistic Regression [haralick features]: {:.1%}'.format(
scores.mean()))
scores = cross_validation.cross_val_score(
clf, chists, labels, cv=cv)
print('Accuracy (Leave-one-out) with Logistic Regression [color histograms]: {:.1%}'.format(
scores.mean()))
scores = cross_validation.cross_val_score(
clf, haralick_plus_chists, labels, cv=cv)
print('Accuracy (Leave-one-out) with Logistic Regression [texture features + color histograms]: {:.1%}'.format(
scores.mean()))
| mit | 5,339,207,821,907,481,000 | 966,396,863,060,336,800 | 31.842857 | 111 | 0.740757 | false |
lopezloo/mtasa-blue | vendor/google-breakpad/src/tools/python/deps-to-manifest.py | 19 | 4857 | #!/usr/bin/python
# Copyright 2016 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Convert gclient's DEPS file to repo's manifest xml file."""
from __future__ import print_function
import argparse
import os
import sys
REMOTES = {
'chromium': 'https://chromium.googlesource.com/',
'github': 'https://github.com/',
}
REVIEWS = {
'chromium': 'https://chromium-review.googlesource.com',
}
MANIFEST_HEAD = """<?xml version='1.0' encoding='UTF-8'?>
<!-- AUTOGENERATED BY %(prog)s; DO NOT EDIT -->
<manifest>
<default revision='refs/heads/master'
remote='chromium'
sync-c='true'
sync-j='8' />
"""
MANIFEST_REMOTE = """
<remote name='%(name)s'
fetch='%(fetch)s'
review='%(review)s' />
"""
MANIFEST_PROJECT = """
<project path='%(path)s'
name='%(name)s'
revision='%(revision)s'
remote='%(remote)s' />
"""
MANIFEST_TAIL = """
</manifest>
"""
def ConvertDepsToManifest(deps, manifest):
"""Convert the |deps| file to the |manifest|."""
# Load the DEPS file data.
ctx = {}
execfile(deps, ctx)
new_contents = ''
# Write out the common header.
data = {
'prog': os.path.basename(__file__),
}
new_contents += MANIFEST_HEAD % data
# Write out the <remote> sections.
for name, fetch in REMOTES.items():
data = {
'name': name,
'fetch': fetch,
'review': REVIEWS.get(name, ''),
}
new_contents += MANIFEST_REMOTE % data
# Write out the main repo itself.
data = {
'path': 'src',
'name': 'breakpad/breakpad',
'revision': 'refs/heads/master',
'remote': 'chromium',
}
new_contents += MANIFEST_PROJECT % data
# Write out the <project> sections.
for path, url in ctx['deps'].items():
for name, fetch in REMOTES.items():
if url.startswith(fetch):
remote = name
break
else:
raise ValueError('Unknown DEPS remote: %s: %s' % (path, url))
# The DEPS url will look like:
# https://chromium.googlesource.com/external/gyp/@e8ab0833a42691cd2
remote_path, rev = url.split('@')
remote_path = remote_path[len(fetch):]
# If it's not a revision, assume it's a tag. Repo wants full ref names.
if len(rev) != 40:
rev = 'refs/tags/%s' % rev
data = {
'path': path,
'name': remote_path,
'revision': rev,
'remote': remote,
}
new_contents += MANIFEST_PROJECT % data
# Write out the common footer.
new_contents += MANIFEST_TAIL
# See if the manifest has actually changed contents to avoid thrashing.
try:
old_contents = open(manifest).read()
except IOError:
# In case the file doesn't exist yet.
old_contents = ''
if old_contents != new_contents:
print('Updating %s due to changed %s' % (manifest, deps))
with open(manifest, 'w') as fp:
fp.write(new_contents)
def GetParser():
"""Return a CLI parser."""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('deps',
help='The DEPS file to convert')
parser.add_argument('manifest',
help='The manifest xml to generate')
return parser
def main(argv):
"""The main func!"""
parser = GetParser()
opts = parser.parse_args(argv)
ConvertDepsToManifest(opts.deps, opts.manifest)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| gpl-3.0 | 8,502,639,454,997,077,000 | -2,692,609,690,749,713,400 | 28.083832 | 76 | 0.652872 | false |
fevxie/sale-workflow | sale_jit_on_services/__openerp__.py | 34 | 1916 | # -*- coding: utf-8 -*-
#
#
# Author: Joël Grand-Guillaume, Yannick Vaucher
# Copyright 2013 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
{
'name': 'Sale Service Just In Time',
'version': '1.0',
'category': 'Generic Modules/Sale',
'description': """
Sale Service Just In Time
=========================
When you make a SO with products and services, the workflow of the SO will not
reach the state done unless you deliver all products and procurements
linked to service products are done.
Usually, when the MRP runs, it marks procurements of services' lines as
done. But, you may want to mark them as done like if you were using the
mrp_jit module.
This module provide that feature: It bring the behavior of the mrp_jit module
but only on services products.
Contributors
------------
* Joël Grand-Guillaume <joel.grand-guillaume@camptocamp.com>
* Yannick Vaucher <yannick.vaucher@camptocamp.com>
""",
'author': "Camptocamp,Odoo Community Association (OCA)",
'depends': ['procurement'],
'website': 'http://www.camptocamp.com',
'data': [
'procurement_workflow.xml'
],
'test': [
'test/sale_service_jit_test.yml',
],
'demo': [],
'installable': False,
'active': False,
}
| agpl-3.0 | -3,028,989,947,018,595,300 | -6,655,458,644,440,890,000 | 30.9 | 78 | 0.682863 | false |
lezizi/A-Framework | python/local-source/source.py | 1 | 2324 | #!/usr/bin/env python
#
# Copyright (C) 2012 LeZiZi Studio
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class SourceHandler():
'''
Provides basic source handling.
Property:
source: source object
'''
from base import Source
def __init__(self, source=None):
if source is None:
self.source = self.Source()
else:
self.source = source
def append(self,action):
'''
Append an Action to current source.
Argument:
action: An Action.
Return:
Boolean. True for success and False when action exsisits.
'''
self.source.list.append(action)
def delete(self,act):
'''
Argument:
act: An Action OR a string of action key.
Return:
Boolean. True for success.
'''
if self.source.list.count(act) == 0:
del(self.list[self.list.index(act)])
return(True)
else:
return(False)
def join(self, source):
'''
Copy source form another souce to current source.
'''
for each in source:
if self.list.count(each) == 0 :
self.list.append(each)
def match(self,ingroups=[],outgroups=[],implementation=None,key=None):
### NOT YET IMP ##
pass
def test():
from base import Action
b = Action()
b.key = "1"
c = Action()
c.key = "1"
print(cmp(b,c))
a = SourceHandler()
print(a.append(b))
print(a.append(c))
print(a.source.list)
print(a.delete(b))
#for each in dir(a):
# print(getattr(a,each))
# test()
| apache-2.0 | -7,886,075,234,205,067,000 | 6,188,897,121,898,968,000 | 25.023256 | 76 | 0.547762 | false |
lyndsysimon/hgrid-git-example | app.py | 1 | 1874 | from flask import Flask, jsonify, render_template, request
import json
import os
import tempfile
app = Flask(__name__)
from git_subprocess import Repository
repo_path = '/tmp/test/'
# Set up a git repository for a storage backend
repo = Repository(repo_path or tempfile.mkdtemp())
repo.init()
# Homepage - just render the template
@app.route('/')
def index():
return render_template('index.html')
# DELETE verb
@app.route('/api/files/', methods=['DELETE', ])
def delete_files():
# since multiple items could be deleted at once, iterate the list.
for id in json.loads(request.form.get('ids', '[]')):
repo._rm_file(id)
repo.commit(
author='Internet User <anon@inter.net>',
message='Deleted file(s)',
)
return jsonify({'deleted': request.form.get('ids')})
# GET verb
@app.route('/api/files/', methods=['GET', ])
def get_files():
return jsonify({
'files': [
_file_dict(f)
for f in os.listdir(repo.path)
if os.path.isfile(os.path.join(repo.path, f))
]
})
# POST verb
@app.route('/api/files/', methods=['POST', ])
def add_file():
f = request.files.get('file')
# write the file out to its new location
new_path = os.path.join(repo.path, f.filename)
with open(new_path, 'w') as outfile:
outfile.write(f.read())
# add it to git and commit
repo.add_file(
file_path=f.filename,
commit_author='Internet User <anon@inter.net>',
commit_message='Commited file {}'.format(f.filename)
)
return json.dumps([_file_dict(new_path), ])
def _file_dict(f):
return {
'uid': f,
'name': f,
'size': os.path.getsize(os.path.join(repo.path, f)),
'type': 'file',
'parent_uid': 'null'
}
if __name__ == '__main__':
app.run(debug=True, port=5000)
| bsd-2-clause | -2,824,141,090,408,491,500 | 6,131,456,454,845,527,000 | 23.337662 | 70 | 0.593917 | false |
junrao/kafka | tests/kafkatest/tests/replication_test.py | 4 | 6417 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ducktape.utils.util import wait_until
from ducktape.mark import matrix
from kafkatest.services.zookeeper import ZookeeperService
from kafkatest.services.kafka import KafkaService
from kafkatest.services.verifiable_producer import VerifiableProducer
from kafkatest.services.console_consumer import ConsoleConsumer, is_int
from kafkatest.tests.produce_consume_validate import ProduceConsumeValidateTest
import signal
def clean_shutdown(test):
"""Discover leader node for our topic and shut it down cleanly."""
test.kafka.signal_leader(test.topic, partition=0, sig=signal.SIGTERM)
def hard_shutdown(test):
"""Discover leader node for our topic and shut it down with a hard kill."""
test.kafka.signal_leader(test.topic, partition=0, sig=signal.SIGKILL)
def clean_bounce(test):
"""Chase the leader of one partition and restart it cleanly."""
for i in range(5):
prev_leader_node = test.kafka.leader(topic=test.topic, partition=0)
test.kafka.restart_node(prev_leader_node, clean_shutdown=True)
def hard_bounce(test):
"""Chase the leader and restart it with a hard kill."""
for i in range(5):
prev_leader_node = test.kafka.leader(topic=test.topic, partition=0)
test.kafka.signal_node(prev_leader_node, sig=signal.SIGKILL)
# Since this is a hard kill, we need to make sure the process is down and that
# zookeeper and the broker cluster have registered the loss of the leader.
# Waiting for a new leader to be elected on the topic-partition is a reasonable heuristic for this.
def leader_changed():
current_leader = test.kafka.leader(topic=test.topic, partition=0)
return current_leader is not None and current_leader != prev_leader_node
wait_until(lambda: len(test.kafka.pids(prev_leader_node)) == 0, timeout_sec=5)
wait_until(leader_changed, timeout_sec=10, backoff_sec=.5)
test.kafka.start_node(prev_leader_node)
failures = {
"clean_shutdown": clean_shutdown,
"hard_shutdown": hard_shutdown,
"clean_bounce": clean_bounce,
"hard_bounce": hard_bounce
}
class ReplicationTest(ProduceConsumeValidateTest):
"""
Note that consuming is a bit tricky, at least with console consumer. The goal is to consume all messages
(foreach partition) in the topic. In this case, waiting for the last message may cause the consumer to stop
too soon since console consumer is consuming multiple partitions from a single thread and therefore we lose
ordering guarantees.
Waiting on a count of consumed messages can be unreliable: if we stop consuming when num_consumed == num_acked,
we might exit early if some messages are duplicated (though not an issue here since producer retries==0)
Therefore rely here on the consumer.timeout.ms setting which times out on the interval between successively
consumed messages. Since we run the producer to completion before running the consumer, this is a reliable
indicator that nothing is left to consume.
"""
def __init__(self, test_context):
""":type test_context: ducktape.tests.test.TestContext"""
super(ReplicationTest, self).__init__(test_context=test_context)
self.topic = "test_topic"
self.zk = ZookeeperService(test_context, num_nodes=1)
self.kafka = KafkaService(test_context, num_nodes=3, zk=self.zk, topics={self.topic: {
"partitions": 3,
"replication-factor": 3,
"min.insync.replicas": 2}
})
self.producer_throughput = 10000
self.num_producers = 1
self.num_consumers = 1
def setUp(self):
self.zk.start()
def min_cluster_size(self):
"""Override this since we're adding services outside of the constructor"""
return super(ReplicationTest, self).min_cluster_size() + self.num_producers + self.num_consumers
@matrix(failure_mode=["clean_shutdown", "hard_shutdown", "clean_bounce", "hard_bounce"],
security_protocol=["PLAINTEXT", "SSL", "SASL_PLAINTEXT", "SASL_SSL"])
def test_replication_with_broker_failure(self, failure_mode, security_protocol):
"""Replication tests.
These tests verify that replication provides simple durability guarantees by checking that data acked by
brokers is still available for consumption in the face of various failure scenarios.
Setup: 1 zk, 3 kafka nodes, 1 topic with partitions=3, replication-factor=3, and min.insync.replicas=2
- Produce messages in the background
- Consume messages in the background
- Drive broker failures (shutdown, or bounce repeatedly with kill -15 or kill -9)
- When done driving failures, stop producing, and finish consuming
- Validate that every acked message was consumed
"""
self.kafka.security_protocol = 'PLAINTEXT'
self.kafka.interbroker_security_protocol = security_protocol
self.producer = VerifiableProducer(self.test_context, self.num_producers, self.kafka, self.topic, throughput=self.producer_throughput)
self.consumer = ConsoleConsumer(self.test_context, self.num_consumers, self.kafka, self.topic, consumer_timeout_ms=60000, message_validator=is_int)
self.kafka.start()
self.run_produce_consume_validate(core_test_action=lambda: failures[failure_mode](self))
| apache-2.0 | -2,338,247,255,053,598,700 | 2,788,651,114,581,958,700 | 47.613636 | 155 | 0.68973 | false |
sarvex/tensorflow | tensorflow/python/data/util/traverse_test.py | 8 | 4741 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for utilities for traversing the dataset construction graph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
from tensorflow.python.data.experimental.ops import data_service_ops
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.util import traverse
from tensorflow.python.framework import combinations
from tensorflow.python.ops import gen_dataset_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
class _TestDataset(dataset_ops.UnaryUnchangedStructureDataset):
def __init__(self, input_dataset):
self._input_dataset = input_dataset
temp_variant_tensor = gen_dataset_ops.prefetch_dataset(
input_dataset._variant_tensor,
buffer_size=1,
**self._flat_structure)
variant_tensor = gen_dataset_ops.model_dataset(
temp_variant_tensor, **self._flat_structure)
super(_TestDataset, self).__init__(input_dataset, variant_tensor)
class TraverseTest(test_base.DatasetTestBase, parameterized.TestCase):
@combinations.generate(test_base.graph_only_combinations())
def testOnlySource(self):
ds = dataset_ops.Dataset.range(10)
variant_tensor_ops = traverse.obtain_all_variant_tensor_ops(ds)
self.assertAllEqual(["RangeDataset"], [x.name for x in variant_tensor_ops])
@combinations.generate(test_base.graph_only_combinations())
def testSimplePipeline(self):
ds = dataset_ops.Dataset.range(10).map(math_ops.square)
variant_tensor_ops = traverse.obtain_all_variant_tensor_ops(ds)
self.assertSetEqual(
set(["MapDataset", "RangeDataset"]),
set(x.name for x in variant_tensor_ops))
@combinations.generate(test_base.graph_only_combinations())
def testConcat(self):
ds1 = dataset_ops.Dataset.range(10)
ds2 = dataset_ops.Dataset.range(10)
ds = ds1.concatenate(ds2)
variant_tensor_ops = traverse.obtain_all_variant_tensor_ops(ds)
self.assertSetEqual(
set(["ConcatenateDataset", "RangeDataset", "RangeDataset_1"]),
set(x.name for x in variant_tensor_ops))
@combinations.generate(test_base.graph_only_combinations())
def testZip(self):
ds1 = dataset_ops.Dataset.range(10)
ds2 = dataset_ops.Dataset.range(10)
ds = dataset_ops.Dataset.zip((ds1, ds2))
variant_tensor_ops = traverse.obtain_all_variant_tensor_ops(ds)
self.assertSetEqual(
set(["ZipDataset", "RangeDataset", "RangeDataset_1"]),
set(x.name for x in variant_tensor_ops))
@combinations.generate(test_base.graph_only_combinations())
def testMultipleVariantTensors(self):
ds = dataset_ops.Dataset.range(10)
ds = _TestDataset(ds)
variant_tensor_ops = traverse.obtain_all_variant_tensor_ops(ds)
self.assertSetEqual(
set(["RangeDataset", "ModelDataset", "PrefetchDataset"]),
set(x.name for x in variant_tensor_ops))
@combinations.generate(test_base.graph_only_combinations())
def testFlatMap(self):
ds1 = dataset_ops.Dataset.range(10).repeat(10)
def map_fn(ds):
def _map(x):
return ds.batch(x)
return _map
ds2 = dataset_ops.Dataset.range(20).prefetch(1)
ds2 = ds2.flat_map(map_fn(ds1))
variant_tensor_ops = traverse.obtain_all_variant_tensor_ops(ds2)
self.assertSetEqual(
set([
"FlatMapDataset", "PrefetchDataset", "RepeatDataset",
"RangeDataset", "RangeDataset_1"
]), set(x.name for x in variant_tensor_ops))
@combinations.generate(test_base.graph_only_combinations())
def testTfDataService(self):
ds = dataset_ops.Dataset.range(10)
ds = ds.apply(
data_service_ops.distribute("parallel_epochs", "grpc://foo:0"))
ops = traverse.obtain_capture_by_value_ops(ds)
self.assertContainsSubset(
["RangeDataset", "DataServiceDatasetV2", "DummyIterationCounter"],
set(x.name for x in ops))
if __name__ == "__main__":
test.main()
| apache-2.0 | -2,383,973,866,510,417,400 | -820,511,705,265,966,600 | 37.544715 | 80 | 0.701962 | false |
foss-transportationmodeling/rettina-server | flask/lib/python2.7/site-packages/sqlalchemy/dialects/__init__.py | 21 | 1027 | # dialects/__init__.py
# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
__all__ = (
'drizzle',
'firebird',
'mssql',
'mysql',
'oracle',
'postgresql',
'sqlite',
'sybase',
)
from .. import util
def _auto_fn(name):
"""default dialect importer.
plugs into the :class:`.PluginLoader`
as a first-hit system.
"""
if "." in name:
dialect, driver = name.split(".")
else:
dialect = name
driver = "base"
try:
module = __import__('sqlalchemy.dialects.%s' % (dialect, )).dialects
except ImportError:
return None
module = getattr(module, dialect)
if hasattr(module, driver):
module = getattr(module, driver)
return lambda: module.dialect
else:
return None
registry = util.PluginLoader("sqlalchemy.dialects", auto_fn=_auto_fn)
| apache-2.0 | 4,230,963,994,394,617,300 | -218,513,727,398,239,900 | 21.326087 | 76 | 0.61149 | false |
pombredanne/RESTandra | drivers/py/cql/connection.py | 2 | 7350 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os.path import exists, abspath, dirname, join
from thrift.transport import TTransport, TSocket
from thrift.protocol import TBinaryProtocol
from thrift.Thrift import TApplicationException
from errors import CQLException, InvalidCompressionScheme
from marshal import prepare
from decoders import SchemaDecoder
from results import RowsProxy
import zlib, re
try:
from cassandra import Cassandra
from cassandra.ttypes import Compression, InvalidRequestException, \
CqlResultType, AuthenticationRequest
except ImportError:
# Hack to run from a source tree
import sys
sys.path.append(join(abspath(dirname(__file__)),
'..',
'..',
'..',
'interface',
'thrift',
'gen-py'))
from cassandra import Cassandra
from cassandra.ttypes import Compression, InvalidRequestException, \
CqlResultType, AuthenticationRequest
COMPRESSION_SCHEMES = ['GZIP']
DEFAULT_COMPRESSION = 'GZIP'
__all__ = ['COMPRESSION_SCHEMES', 'DEFAULT_COMPRESSION', 'Connection']
class Connection(object):
"""
CQL connection object.
Example usage:
>>> conn = Connection("localhost", keyspace="Keyspace1")
>>> r = conn.execute('SELECT "age" FROM Users')
>>> for row in r.rows:
... for column in row.columns:
... print "%s is %s years of age" % (r.key, column.age)
"""
_keyspace_re = re.compile("USE (\w+);?", re.I | re.M)
_cfamily_re = re.compile("SELECT\s+.+\s+FROM\s+(\w+)", re.I | re.M)
def __init__(self, host, port=9160, keyspace=None, username=None,
password=None, decoder=None):
"""
Params:
* host .........: hostname of Cassandra node.
* port .........: port number to connect to (optional).
* keyspace .....: keyspace name (optional).
* username .....: username used in authentication (optional).
* password .....: password used in authentication (optional).
* decoder ......: result decoder instance (optional, defaults to none).
"""
socket = TSocket.TSocket(host, port)
self.transport = TTransport.TFramedTransport(socket)
protocol = TBinaryProtocol.TBinaryProtocolAccelerated(self.transport)
self.client = Cassandra.Client(protocol)
socket.open()
# XXX: "current" is probably a misnomer.
self._cur_keyspace = None
self._cur_column_family = None
if username and password:
credentials = {"username": username, "password": password}
self.client.login(AuthenticationRequest(credentials=credentials))
if keyspace:
self.execute('USE %s;' % keyspace)
self._cur_keyspace = keyspace
if not decoder:
self.decoder = SchemaDecoder(self.__get_schema())
else:
self.decoder = decoder
def __get_schema(self):
def columns(metadata):
results = {}
for col in metadata:
results[col.name] = col.validation_class
return results
def column_families(cf_defs):
cfresults = {}
for cf in cf_defs:
cfresults[cf.name] = {"comparator": cf.comparator_type}
cfresults[cf.name]["default_validation_class"] = \
cf.default_validation_class
cfresults[cf.name]["columns"] = columns(cf.column_metadata)
return cfresults
schema = {}
for ksdef in self.client.describe_keyspaces():
schema[ksdef.name] = column_families(ksdef.cf_defs)
return schema
def prepare(self, query, *args):
prepared_query = prepare(query, *args)
# Snag the keyspace or column family and stash it for later use in
# decoding columns. These regexes don't match every query, but the
# current column family only needs to be current for SELECTs.
match = Connection._cfamily_re.match(prepared_query)
if match:
self._cur_column_family = match.group(1)
else:
match = Connection._keyspace_re.match(prepared_query)
if match:
self._cur_keyspace = match.group(1)
return prepared_query
def execute(self, query, *args, **kwargs):
"""
Execute a CQL query on a remote node.
Params:
* query .........: CQL query string.
* args ..........: Query parameters.
* compression ...: Query compression type (optional).
"""
if kwargs.has_key("compression"):
compress = kwargs.get("compression").upper()
else:
compress = DEFAULT_COMPRESSION
compressed_query = Connection.compress_query(self.prepare(query, *args),
compress)
request_compression = getattr(Compression, compress)
try:
response = self.client.execute_cql_query(compressed_query,
request_compression)
except InvalidRequestException, ire:
raise CQLException("Bad Request: %s" % ire.why)
except TApplicationException, tapp:
raise CQLException("Internal application error")
except Exception, exc:
raise CQLException(exc)
if response.type == CqlResultType.ROWS:
return RowsProxy(response.rows,
self._cur_keyspace,
self._cur_column_family,
self.decoder)
if response.type == CqlResultType.INT:
return response.num
return None
def close(self):
self.transport.close()
def is_open(self):
return self.transport.isOpen()
@classmethod
def compress_query(cls, query, compression):
"""
Returns a query string compressed with the specified compression type.
Params:
* query .........: The query string to compress.
* compression ...: Type of compression to use.
"""
if not compression in COMPRESSION_SCHEMES:
raise InvalidCompressionScheme(compression)
if compression == 'GZIP':
return zlib.compress(query)
# vi: ai ts=4 tw=0 sw=4 et
| apache-2.0 | -1,263,381,176,564,202,800 | 3,254,334,885,135,317,000 | 36.5 | 80 | 0.587483 | false |
Michaelmwirigi/lynsays | node_modules/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/build-3.3/pygments/lexers/_postgres_builtins.py | 95 | 9692 | # -*- coding: utf-8 -*-
"""
pygments.lexers._postgres_builtins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Self-updating data files for PostgreSQL lexer.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
import urllib.request, urllib.parse, urllib.error
# One man's constant is another man's variable.
SOURCE_URL = 'https://github.com/postgres/postgres/raw/master'
KEYWORDS_URL = SOURCE_URL + '/doc/src/sgml/keywords.sgml'
DATATYPES_URL = SOURCE_URL + '/doc/src/sgml/datatype.sgml'
def update_myself():
data_file = list(fetch(DATATYPES_URL))
datatypes = parse_datatypes(data_file)
pseudos = parse_pseudos(data_file)
keywords = parse_keywords(fetch(KEYWORDS_URL))
update_consts(__file__, 'DATATYPES', datatypes)
update_consts(__file__, 'PSEUDO_TYPES', pseudos)
update_consts(__file__, 'KEYWORDS', keywords)
def parse_keywords(f):
kw = []
for m in re.finditer(
r'\s*<entry><token>([^<]+)</token></entry>\s*'
r'<entry>([^<]+)</entry>', f.read()):
kw.append(m.group(1))
if not kw:
raise ValueError('no keyword found')
kw.sort()
return kw
def parse_datatypes(f):
dt = set()
for line in f:
if '<sect1' in line:
break
if '<entry><type>' not in line:
continue
# Parse a string such as
# time [ (<replaceable>p</replaceable>) ] [ without time zone ]
# into types "time" and "without time zone"
# remove all the tags
line = re.sub("<replaceable>[^<]+</replaceable>", "", line)
line = re.sub("<[^>]+>", "", line)
# Drop the parts containing braces
for tmp in [t for tmp in line.split('[')
for t in tmp.split(']') if "(" not in t]:
for t in tmp.split(','):
t = t.strip()
if not t: continue
dt.add(" ".join(t.split()))
dt = list(dt)
dt.sort()
return dt
def parse_pseudos(f):
dt = []
re_start = re.compile(r'\s*<table id="datatype-pseudotypes-table">')
re_entry = re.compile(r'\s*<entry><type>([^<]+)</></entry>')
re_end = re.compile(r'\s*</table>')
f = iter(f)
for line in f:
if re_start.match(line) is not None:
break
else:
raise ValueError('pseudo datatypes table not found')
for line in f:
m = re_entry.match(line)
if m is not None:
dt.append(m.group(1))
if re_end.match(line) is not None:
break
else:
raise ValueError('end of pseudo datatypes table not found')
if not dt:
raise ValueError('pseudo datatypes not found')
return dt
def fetch(url):
return urllib.request.urlopen(url)
def update_consts(filename, constname, content):
f = open(filename)
lines = f.readlines()
f.close()
# Line to start/end inserting
re_start = re.compile(r'^%s\s*=\s*\[\s*$' % constname)
re_end = re.compile(r'^\s*\]\s*$')
start = [ n for n, l in enumerate(lines) if re_start.match(l) ]
if not start:
raise ValueError("couldn't find line containing '%s = ['" % constname)
if len(start) > 1:
raise ValueError("too many lines containing '%s = ['" % constname)
start = start[0] + 1
end = [ n for n, l in enumerate(lines) if n >= start and re_end.match(l) ]
if not end:
raise ValueError("couldn't find line containing ']' after %s " % constname)
end = end[0]
# Pack the new content in lines not too long
content = [repr(item) for item in content ]
new_lines = [[]]
for item in content:
if sum(map(len, new_lines[-1])) + 2 * len(new_lines[-1]) + len(item) + 4 > 75:
new_lines.append([])
new_lines[-1].append(item)
lines[start:end] = [ " %s,\n" % ", ".join(items) for items in new_lines ]
f = open(filename, 'w')
f.write(''.join(lines))
f.close()
# Autogenerated: please edit them if you like wasting your time.
KEYWORDS = [
'ABORT', 'ABSOLUTE', 'ACCESS', 'ACTION', 'ADD', 'ADMIN', 'AFTER',
'AGGREGATE', 'ALL', 'ALSO', 'ALTER', 'ALWAYS', 'ANALYSE', 'ANALYZE',
'AND', 'ANY', 'ARRAY', 'AS', 'ASC', 'ASSERTION', 'ASSIGNMENT',
'ASYMMETRIC', 'AT', 'ATTRIBUTE', 'AUTHORIZATION', 'BACKWARD', 'BEFORE',
'BEGIN', 'BETWEEN', 'BIGINT', 'BINARY', 'BIT', 'BOOLEAN', 'BOTH', 'BY',
'CACHE', 'CALLED', 'CASCADE', 'CASCADED', 'CASE', 'CAST', 'CATALOG',
'CHAIN', 'CHAR', 'CHARACTER', 'CHARACTERISTICS', 'CHECK', 'CHECKPOINT',
'CLASS', 'CLOSE', 'CLUSTER', 'COALESCE', 'COLLATE', 'COLLATION',
'COLUMN', 'COMMENT', 'COMMENTS', 'COMMIT', 'COMMITTED', 'CONCURRENTLY',
'CONFIGURATION', 'CONNECTION', 'CONSTRAINT', 'CONSTRAINTS', 'CONTENT',
'CONTINUE', 'CONVERSION', 'COPY', 'COST', 'CREATE', 'CROSS', 'CSV',
'CURRENT', 'CURRENT_CATALOG', 'CURRENT_DATE', 'CURRENT_ROLE',
'CURRENT_SCHEMA', 'CURRENT_TIME', 'CURRENT_TIMESTAMP', 'CURRENT_USER',
'CURSOR', 'CYCLE', 'DATA', 'DATABASE', 'DAY', 'DEALLOCATE', 'DEC',
'DECIMAL', 'DECLARE', 'DEFAULT', 'DEFAULTS', 'DEFERRABLE', 'DEFERRED',
'DEFINER', 'DELETE', 'DELIMITER', 'DELIMITERS', 'DESC', 'DICTIONARY',
'DISABLE', 'DISCARD', 'DISTINCT', 'DO', 'DOCUMENT', 'DOMAIN', 'DOUBLE',
'DROP', 'EACH', 'ELSE', 'ENABLE', 'ENCODING', 'ENCRYPTED', 'END',
'ENUM', 'ESCAPE', 'EXCEPT', 'EXCLUDE', 'EXCLUDING', 'EXCLUSIVE',
'EXECUTE', 'EXISTS', 'EXPLAIN', 'EXTENSION', 'EXTERNAL', 'EXTRACT',
'FALSE', 'FAMILY', 'FETCH', 'FIRST', 'FLOAT', 'FOLLOWING', 'FOR',
'FORCE', 'FOREIGN', 'FORWARD', 'FREEZE', 'FROM', 'FULL', 'FUNCTION',
'FUNCTIONS', 'GLOBAL', 'GRANT', 'GRANTED', 'GREATEST', 'GROUP',
'HANDLER', 'HAVING', 'HEADER', 'HOLD', 'HOUR', 'IDENTITY', 'IF',
'ILIKE', 'IMMEDIATE', 'IMMUTABLE', 'IMPLICIT', 'IN', 'INCLUDING',
'INCREMENT', 'INDEX', 'INDEXES', 'INHERIT', 'INHERITS', 'INITIALLY',
'INLINE', 'INNER', 'INOUT', 'INPUT', 'INSENSITIVE', 'INSERT', 'INSTEAD',
'INT', 'INTEGER', 'INTERSECT', 'INTERVAL', 'INTO', 'INVOKER', 'IS',
'ISNULL', 'ISOLATION', 'JOIN', 'KEY', 'LABEL', 'LANGUAGE', 'LARGE',
'LAST', 'LC_COLLATE', 'LC_CTYPE', 'LEADING', 'LEAST', 'LEFT', 'LEVEL',
'LIKE', 'LIMIT', 'LISTEN', 'LOAD', 'LOCAL', 'LOCALTIME',
'LOCALTIMESTAMP', 'LOCATION', 'LOCK', 'MAPPING', 'MATCH', 'MAXVALUE',
'MINUTE', 'MINVALUE', 'MODE', 'MONTH', 'MOVE', 'NAME', 'NAMES',
'NATIONAL', 'NATURAL', 'NCHAR', 'NEXT', 'NO', 'NONE', 'NOT', 'NOTHING',
'NOTIFY', 'NOTNULL', 'NOWAIT', 'NULL', 'NULLIF', 'NULLS', 'NUMERIC',
'OBJECT', 'OF', 'OFF', 'OFFSET', 'OIDS', 'ON', 'ONLY', 'OPERATOR',
'OPTION', 'OPTIONS', 'OR', 'ORDER', 'OUT', 'OUTER', 'OVER', 'OVERLAPS',
'OVERLAY', 'OWNED', 'OWNER', 'PARSER', 'PARTIAL', 'PARTITION',
'PASSING', 'PASSWORD', 'PLACING', 'PLANS', 'POSITION', 'PRECEDING',
'PRECISION', 'PREPARE', 'PREPARED', 'PRESERVE', 'PRIMARY', 'PRIOR',
'PRIVILEGES', 'PROCEDURAL', 'PROCEDURE', 'QUOTE', 'RANGE', 'READ',
'REAL', 'REASSIGN', 'RECHECK', 'RECURSIVE', 'REF', 'REFERENCES',
'REINDEX', 'RELATIVE', 'RELEASE', 'RENAME', 'REPEATABLE', 'REPLACE',
'REPLICA', 'RESET', 'RESTART', 'RESTRICT', 'RETURNING', 'RETURNS',
'REVOKE', 'RIGHT', 'ROLE', 'ROLLBACK', 'ROW', 'ROWS', 'RULE',
'SAVEPOINT', 'SCHEMA', 'SCROLL', 'SEARCH', 'SECOND', 'SECURITY',
'SELECT', 'SEQUENCE', 'SEQUENCES', 'SERIALIZABLE', 'SERVER', 'SESSION',
'SESSION_USER', 'SET', 'SETOF', 'SHARE', 'SHOW', 'SIMILAR', 'SIMPLE',
'SMALLINT', 'SOME', 'STABLE', 'STANDALONE', 'START', 'STATEMENT',
'STATISTICS', 'STDIN', 'STDOUT', 'STORAGE', 'STRICT', 'STRIP',
'SUBSTRING', 'SYMMETRIC', 'SYSID', 'SYSTEM', 'TABLE', 'TABLES',
'TABLESPACE', 'TEMP', 'TEMPLATE', 'TEMPORARY', 'TEXT', 'THEN', 'TIME',
'TIMESTAMP', 'TO', 'TRAILING', 'TRANSACTION', 'TREAT', 'TRIGGER',
'TRIM', 'TRUE', 'TRUNCATE', 'TRUSTED', 'TYPE', 'UNBOUNDED',
'UNCOMMITTED', 'UNENCRYPTED', 'UNION', 'UNIQUE', 'UNKNOWN', 'UNLISTEN',
'UNLOGGED', 'UNTIL', 'UPDATE', 'USER', 'USING', 'VACUUM', 'VALID',
'VALIDATE', 'VALIDATOR', 'VALUE', 'VALUES', 'VARCHAR', 'VARIADIC',
'VARYING', 'VERBOSE', 'VERSION', 'VIEW', 'VOLATILE', 'WHEN', 'WHERE',
'WHITESPACE', 'WINDOW', 'WITH', 'WITHOUT', 'WORK', 'WRAPPER', 'WRITE',
'XML', 'XMLATTRIBUTES', 'XMLCONCAT', 'XMLELEMENT', 'XMLEXISTS',
'XMLFOREST', 'XMLPARSE', 'XMLPI', 'XMLROOT', 'XMLSERIALIZE', 'YEAR',
'YES', 'ZONE',
]
DATATYPES = [
'bigint', 'bigserial', 'bit', 'bit varying', 'bool', 'boolean', 'box',
'bytea', 'char', 'character', 'character varying', 'cidr', 'circle',
'date', 'decimal', 'double precision', 'float4', 'float8', 'inet',
'int', 'int2', 'int4', 'int8', 'integer', 'interval', 'json', 'line',
'lseg', 'macaddr', 'money', 'numeric', 'path', 'point', 'polygon',
'real', 'serial', 'serial2', 'serial4', 'serial8', 'smallint',
'smallserial', 'text', 'time', 'timestamp', 'timestamptz', 'timetz',
'tsquery', 'tsvector', 'txid_snapshot', 'uuid', 'varbit', 'varchar',
'with time zone', 'without time zone', 'xml',
]
PSEUDO_TYPES = [
'any', 'anyelement', 'anyarray', 'anynonarray', 'anyenum', 'anyrange',
'cstring', 'internal', 'language_handler', 'fdw_handler', 'record',
'trigger', 'void', 'opaque',
]
# Remove 'trigger' from types
PSEUDO_TYPES = sorted(set(PSEUDO_TYPES) - set(map(str.lower, KEYWORDS)))
PLPGSQL_KEYWORDS = [
'ALIAS', 'CONSTANT', 'DIAGNOSTICS', 'ELSIF', 'EXCEPTION', 'EXIT',
'FOREACH', 'GET', 'LOOP', 'NOTICE', 'OPEN', 'PERFORM', 'QUERY', 'RAISE',
'RETURN', 'REVERSE', 'SQLSTATE', 'WHILE',
]
if __name__ == '__main__':
update_myself()
| mit | 492,828,964,723,890,050 | -1,035,246,798,717,358,000 | 40.596567 | 86 | 0.576868 | false |
autopulated/ninja | misc/ninja_syntax_test.py | 24 | 6158 | #!/usr/bin/env python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
import ninja_syntax
LONGWORD = 'a' * 10
LONGWORDWITHSPACES = 'a'*5 + '$ ' + 'a'*5
INDENT = ' '
class TestLineWordWrap(unittest.TestCase):
def setUp(self):
self.out = StringIO()
self.n = ninja_syntax.Writer(self.out, width=8)
def test_single_long_word(self):
# We shouldn't wrap a single long word.
self.n._line(LONGWORD)
self.assertEqual(LONGWORD + '\n', self.out.getvalue())
def test_few_long_words(self):
# We should wrap a line where the second word is overlong.
self.n._line(' '.join(['x', LONGWORD, 'y']))
self.assertEqual(' $\n'.join(['x',
INDENT + LONGWORD,
INDENT + 'y']) + '\n',
self.out.getvalue())
def test_short_words_indented(self):
# Test that indent is taking into acount when breaking subsequent lines.
# The second line should not be ' to tree', as that's longer than the
# test layout width of 8.
self.n._line('line_one to tree')
self.assertEqual('''\
line_one $
to $
tree
''',
self.out.getvalue())
def test_few_long_words_indented(self):
# Check wrapping in the presence of indenting.
self.n._line(' '.join(['x', LONGWORD, 'y']), indent=1)
self.assertEqual(' $\n'.join([' ' + 'x',
' ' + INDENT + LONGWORD,
' ' + INDENT + 'y']) + '\n',
self.out.getvalue())
def test_escaped_spaces(self):
self.n._line(' '.join(['x', LONGWORDWITHSPACES, 'y']))
self.assertEqual(' $\n'.join(['x',
INDENT + LONGWORDWITHSPACES,
INDENT + 'y']) + '\n',
self.out.getvalue())
def test_fit_many_words(self):
self.n = ninja_syntax.Writer(self.out, width=78)
self.n._line('command = cd ../../chrome; python ../tools/grit/grit/format/repack.py ../out/Debug/obj/chrome/chrome_dll.gen/repack/theme_resources_large.pak ../out/Debug/gen/chrome/theme_resources_large.pak', 1)
self.assertEqual('''\
command = cd ../../chrome; python ../tools/grit/grit/format/repack.py $
../out/Debug/obj/chrome/chrome_dll.gen/repack/theme_resources_large.pak $
../out/Debug/gen/chrome/theme_resources_large.pak
''',
self.out.getvalue())
def test_leading_space(self):
self.n = ninja_syntax.Writer(self.out, width=14) # force wrapping
self.n.variable('foo', ['', '-bar', '-somethinglong'], 0)
self.assertEqual('''\
foo = -bar $
-somethinglong
''',
self.out.getvalue())
def test_embedded_dollar_dollar(self):
self.n = ninja_syntax.Writer(self.out, width=15) # force wrapping
self.n.variable('foo', ['a$$b', '-somethinglong'], 0)
self.assertEqual('''\
foo = a$$b $
-somethinglong
''',
self.out.getvalue())
def test_two_embedded_dollar_dollars(self):
self.n = ninja_syntax.Writer(self.out, width=17) # force wrapping
self.n.variable('foo', ['a$$b', '-somethinglong'], 0)
self.assertEqual('''\
foo = a$$b $
-somethinglong
''',
self.out.getvalue())
def test_leading_dollar_dollar(self):
self.n = ninja_syntax.Writer(self.out, width=14) # force wrapping
self.n.variable('foo', ['$$b', '-somethinglong'], 0)
self.assertEqual('''\
foo = $$b $
-somethinglong
''',
self.out.getvalue())
def test_trailing_dollar_dollar(self):
self.n = ninja_syntax.Writer(self.out, width=14) # force wrapping
self.n.variable('foo', ['a$$', '-somethinglong'], 0)
self.assertEqual('''\
foo = a$$ $
-somethinglong
''',
self.out.getvalue())
class TestBuild(unittest.TestCase):
def setUp(self):
self.out = StringIO()
self.n = ninja_syntax.Writer(self.out)
def test_variables_dict(self):
self.n.build('out', 'cc', 'in', variables={'name': 'value'})
self.assertEqual('''\
build out: cc in
name = value
''',
self.out.getvalue())
def test_variables_list(self):
self.n.build('out', 'cc', 'in', variables=[('name', 'value')])
self.assertEqual('''\
build out: cc in
name = value
''',
self.out.getvalue())
class TestExpand(unittest.TestCase):
def test_basic(self):
vars = {'x': 'X'}
self.assertEqual('foo', ninja_syntax.expand('foo', vars))
def test_var(self):
vars = {'xyz': 'XYZ'}
self.assertEqual('fooXYZ', ninja_syntax.expand('foo$xyz', vars))
def test_vars(self):
vars = {'x': 'X', 'y': 'YYY'}
self.assertEqual('XYYY', ninja_syntax.expand('$x$y', vars))
def test_space(self):
vars = {}
self.assertEqual('x y z', ninja_syntax.expand('x$ y$ z', vars))
def test_locals(self):
vars = {'x': 'a'}
local_vars = {'x': 'b'}
self.assertEqual('a', ninja_syntax.expand('$x', vars))
self.assertEqual('b', ninja_syntax.expand('$x', vars, local_vars))
def test_double(self):
self.assertEqual('a b$c', ninja_syntax.expand('a$ b$$c', {}))
if __name__ == '__main__':
unittest.main()
| apache-2.0 | 243,358,982,308,435,550 | 8,729,949,634,256,872,000 | 33.595506 | 218 | 0.559272 | false |
lliss/tr-55 | tr55/model.py | 1 | 14151 | # -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
"""
TR-55 Model Implementation
A mapping between variable/parameter names found in the TR-55 document
and variables used in this program are as follows:
* `precip` is referred to as P in the report
* `runoff` is Q
* `evaptrans` maps to ET, the evapotranspiration
* `inf` is the amount of water that infiltrates into the soil (in inches)
* `init_abs` is Ia, the initial abstraction, another form of infiltration
"""
import copy
from tr55.tablelookup import lookup_cn, lookup_bmp_infiltration, \
lookup_ki, is_bmp, is_built_type, make_precolumbian, get_pollutants
from tr55.water_quality import get_volume_of_runoff, get_pollutant_load
from tr55.operations import dict_plus
def runoff_pitt(precip, land_use):
"""
The Pitt Small Storm Hydrology method. The output is a runoff
value in inches.
"""
c1 = +3.638858398e-2
c2 = -1.243464039e-1
c3 = +1.295682223e-1
c4 = +9.375868043e-1
c5 = -2.235170859e-2
c6 = +0.170228067e+0
c7 = -3.971810782e-1
c8 = +3.887275538e-1
c9 = -2.289321859e-2
p4 = pow(precip, 4)
p3 = pow(precip, 3)
p2 = pow(precip, 2)
impervious = (c1 * p3) + (c2 * p2) + (c3 * precip) + c4
urb_grass = (c5 * p4) + (c6 * p3) + (c7 * p2) + (c8 * precip) + c9
runoff_vals = {
'open_water': impervious,
'developed_low': 0.20 * impervious + 0.80 * urb_grass,
'cluster_housing': 0.20 * impervious + 0.80 * urb_grass,
'developed_med': 0.65 * impervious + 0.35 * urb_grass,
'developed_high': impervious,
'developed_open': urb_grass
}
if land_use not in runoff_vals:
raise Exception('Land use %s not a built-type.' % land_use)
else:
return min(runoff_vals[land_use], precip)
def nrcs_cutoff(precip, curve_number):
"""
A function to find the cutoff between precipitation/curve number
pairs that have zero runoff by definition, and those that do not.
"""
if precip <= -1 * (2 * (curve_number - 100.0) / curve_number):
return True
else:
return False
def runoff_nrcs(precip, evaptrans, soil_type, land_use):
"""
The runoff equation from the TR-55 document. The output is a
runoff value in inches.
"""
if land_use == 'cluster_housing':
land_use = 'developed_low'
curve_number = lookup_cn(soil_type, land_use)
if nrcs_cutoff(precip, curve_number):
return 0.0
potential_retention = (1000.0 / curve_number) - 10
initial_abs = 0.2 * potential_retention
precip_minus_initial_abs = precip - initial_abs
numerator = pow(precip_minus_initial_abs, 2)
denominator = (precip_minus_initial_abs + potential_retention)
runoff = numerator / denominator
return min(runoff, precip - evaptrans)
def simulate_cell_day(precip, evaptrans, cell, cell_count):
"""
Simulate a bunch of cells of the same type during a one-day event.
`precip` is the amount of precipitation in inches.
`evaptrans` is evapotranspiration.
`cell` is a string which contains a soil type and land use
separated by a colon.
`cell_count` is the number of cells to simulate.
The return value is a dictionary of runoff, evapotranspiration, and
infiltration as volumes of water.
"""
def clamp(runoff, et, inf, precip):
"""
This function clamps ensures that runoff + et + inf <= precip.
NOTE: infiltration is normally independent of the
precipitation level, but this function introduces a slight
dependency (that is, at very low levels of precipitation, this
function can cause infiltration to be smaller than it
ordinarily would be.
"""
total = runoff + et + inf
if (total > precip):
scale = precip / total
runoff *= scale
et *= scale
inf *= scale
return (runoff, et, inf)
precip = max(0.0, precip)
soil_type, land_use, bmp = cell.lower().split(':')
# If there is no precipitation, then there is no runoff or
# infiltration. There is evapotranspiration, however (it is
# understood that over a period of time, this can lead to the sum
# of the three values exceeding the total precipitation).
if precip == 0.0:
return {
'runoff-vol': 0.0,
# 'et-vol': cell_count * evaptrans,
'et-vol': 0.0,
'inf-vol': 0.0,
}
# Deal with the Best Management Practices (BMPs). For most BMPs,
# the infiltration is read from the table and the runoff is what
# is left over after infiltration and evapotranspiration. Rain
# gardens are treated differently.
if bmp and is_bmp(bmp) and bmp != 'rain_garden':
inf = lookup_bmp_infiltration(soil_type, bmp) # infiltration
runoff = max(0.0, precip - (evaptrans + inf)) # runoff
(runoff, evaptrans, inf) = clamp(runoff, evaptrans, inf, precip)
return {
'runoff-vol': cell_count * runoff,
'et-vol': cell_count * evaptrans,
'inf-vol': cell_count * inf
}
elif bmp and bmp == 'rain_garden':
# Here, return a mixture of 20% ideal rain garden and 80%
# high-intensity residential.
inf = lookup_bmp_infiltration(soil_type, bmp)
runoff = max(0.0, precip - (evaptrans + inf))
hi_res_cell = soil_type + ':developed_med:'
hi_res = simulate_cell_day(precip, evaptrans, hi_res_cell, 1)
hir_run = hi_res['runoff-vol']
hir_et = hi_res['et-vol']
hir_inf = hi_res['inf-vol']
final_runoff = (0.2 * runoff + 0.8 * hir_run)
final_et = (0.2 * evaptrans + 0.8 * hir_et)
final_inf = (0.2 * inf + 0.8 * hir_inf)
final = clamp(final_runoff, final_et, final_inf, precip)
(final_runoff, final_et, final_inf) = final
return {
'runoff-vol': cell_count * final_runoff,
'et-vol': cell_count * final_et,
'inf-vol': cell_count * final_inf
}
# At this point, if the `bmp` string has non-zero length, it is
# equal to either 'no_till' or 'cluster_housing'.
if bmp and bmp != 'no_till' and bmp != 'cluster_housing':
raise KeyError('Unexpected BMP: %s' % bmp)
land_use = bmp or land_use
# When the land use is a built-type and the level of precipitation
# is two inches or less, use the Pitt Small Storm Hydrology Model.
# When the land use is a built-type but the level of precipitation
# is higher, the runoff is the larger of that predicted by the
# Pitt model and NRCS model. Otherwise, return the NRCS amount.
if is_built_type(land_use) and precip <= 2.0:
runoff = runoff_pitt(precip, land_use)
elif is_built_type(land_use):
pitt_runoff = runoff_pitt(2.0, land_use)
nrcs_runoff = runoff_nrcs(precip, evaptrans, soil_type, land_use)
runoff = max(pitt_runoff, nrcs_runoff)
else:
runoff = runoff_nrcs(precip, evaptrans, soil_type, land_use)
inf = max(0.0, precip - (evaptrans + runoff))
(runoff, evaptrans, inf) = clamp(runoff, evaptrans, inf, precip)
return {
'runoff-vol': cell_count * runoff,
'et-vol': cell_count * evaptrans,
'inf-vol': cell_count * inf,
}
def create_unmodified_census(census):
"""
This creates a cell census, ignoring any modifications. The
output is suitable for use with `simulate_water_quality`.
"""
unmod = copy.deepcopy(census)
unmod.pop('modifications', None)
return unmod
def create_modified_census(census):
"""
This creates a cell census, with modifications, that is suitable
for use with `simulate_water_quality`.
For every type of cell that undergoes modification, the
modifications are indicated with a sub-distribution under that
cell type.
"""
mod = copy.deepcopy(census)
mod.pop('modifications', None)
for (cell, subcensus) in mod['distribution'].items():
n = subcensus['cell_count']
changes = {
'distribution': {
cell: {
'distribution': {
cell: {'cell_count': n}
}
}
}
}
mod = dict_plus(mod, changes)
for modification in (census.get('modifications') or []):
for (orig_cell, subcensus) in modification['distribution'].items():
n = subcensus['cell_count']
soil1, land1 = orig_cell.split(':')
soil2, land2, bmp = modification['change'].split(':')
changed_cell = '%s:%s:%s' % (soil2 or soil1, land2 or land1, bmp)
changes = {
'distribution': {
orig_cell: {
'distribution': {
orig_cell: {'cell_count': -n},
changed_cell: {'cell_count': n}
}
}
}
}
mod = dict_plus(mod, changes)
return mod
def simulate_water_quality(tree, cell_res, fn,
current_cell=None, precolumbian=False):
"""
Perform a water quality simulation by doing simulations on each of
the cell types (leaves), then adding them together by summing the
values of a node's subtrees and storing them at that node.
`tree` is the (sub)tree of cell distributions that is currently
under consideration.
`cell_res` is the size of each cell (used for turning inches of
water into volumes of water).
`fn` is a function that takes a cell type and a number of cells
and returns a dictionary containing runoff, et, and inf as
volumes.
`current_cell` is the cell type for the present node.
"""
# Internal node.
if 'cell_count' in tree and 'distribution' in tree:
n = tree['cell_count']
# simulate subtrees
if n != 0:
tally = {}
for cell, subtree in tree['distribution'].items():
simulate_water_quality(subtree, cell_res, fn,
cell, precolumbian)
subtree_ex_dist = subtree.copy()
subtree_ex_dist.pop('distribution', None)
tally = dict_plus(tally, subtree_ex_dist)
tree.update(tally) # update this node
# effectively a leaf
elif n == 0:
for pol in get_pollutants():
tree[pol] = 0.0
# Leaf node.
elif 'cell_count' in tree and 'distribution' not in tree:
# the number of cells covered by this leaf
n = tree['cell_count']
# canonicalize the current_cell string
split = current_cell.split(':')
if (len(split) == 2):
split.append('')
if precolumbian:
split[1] = make_precolumbian(split[1])
current_cell = '%s:%s:%s' % tuple(split)
# run the runoff model on this leaf
result = fn(current_cell, n) # runoff, et, inf
tree.update(result)
# perform water quality calculation
if n != 0:
soil_type, land_use, bmp = split
runoff_per_cell = result['runoff-vol'] / n
liters = get_volume_of_runoff(runoff_per_cell, n, cell_res)
for pol in get_pollutants():
tree[pol] = get_pollutant_load(land_use, pol, liters)
def postpass(tree):
"""
Remove volume units and replace them with inches.
"""
if 'cell_count' in tree:
if tree['cell_count'] > 0:
n = tree['cell_count']
tree['runoff'] = tree['runoff-vol'] / n
tree['et'] = tree['et-vol'] / n
tree['inf'] = tree['inf-vol'] / n
else:
tree['runoff'] = 0
tree['et'] = 0
tree['inf'] = 0
tree.pop('runoff-vol', None)
tree.pop('et-vol', None)
tree.pop('inf-vol', None)
if 'distribution' in tree:
for subtree in tree['distribution'].values():
postpass(subtree)
def simulate_modifications(census, fn, cell_res, precolumbian=False):
"""
Simulate effects of modifications.
`census` contains a distribution of cell-types in the area of interest.
`fn` is as described in `simulate_water_quality`.
`cell_res` is as described in `simulate_water_quality`.
"""
mod = create_modified_census(census)
simulate_water_quality(mod, cell_res, fn, precolumbian=precolumbian)
postpass(mod)
unmod = create_unmodified_census(census)
simulate_water_quality(unmod, cell_res, fn, precolumbian=precolumbian)
postpass(unmod)
return {
'unmodified': unmod,
'modified': mod
}
def simulate_day(census, precip, cell_res=10, precolumbian=False):
"""
Simulate a day, including water quality effects of modifications.
`census` contains a distribution of cell-types in the area of interest.
`cell_res` is as described in `simulate_water_quality`.
`precolumbian` indicates that artificial types should be turned
into forest.
"""
et_max = 0.207
if 'modifications' in census:
verify_census(census)
def fn(cell, cell_count):
# Compute et for cell type
split = cell.split(':')
if (len(split) == 2):
(land_use, bmp) = split
else:
(_, land_use, bmp) = split
et = et_max * lookup_ki(bmp or land_use)
# Simulate the cell for one day
return simulate_cell_day(precip, et, cell, cell_count)
return simulate_modifications(census, fn, cell_res, precolumbian)
def verify_census(census):
"""
Assures that there is no soil type/land cover pair
in a modification census that isn't in the AoI census.
"""
for modification in census['modifications']:
for land_cover in modification['distribution']:
if land_cover not in census['distribution']:
raise ValueError("Invalid modification census")
| apache-2.0 | 3,415,994,465,098,827,000 | 482,401,713,128,284,800 | 33.098795 | 77 | 0.596636 | false |
spaceof7/QGIS | python/plugins/processing/algs/qgis/HubDistanceLines.py | 6 | 6462 | # -*- coding: utf-8 -*-
"""
***************************************************************************
HubDistanceLines.py
---------------------
Date : May 2010
Copyright : (C) 2010 by Michael Minn
Email : pyqgis at michaelminn dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Michael Minn'
__date__ = 'May 2010'
__copyright__ = '(C) 2010, Michael Minn'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from qgis.PyQt.QtCore import QVariant
from qgis.core import (QgsField,
QgsGeometry,
QgsDistanceArea,
QgsFeature,
QgsFeatureSink,
QgsFeatureRequest,
QgsWkbTypes,
QgsUnitTypes,
QgsProcessing,
QgsProcessingParameterFeatureSource,
QgsProcessingParameterField,
QgsProcessingParameterEnum,
QgsProcessingParameterFeatureSink,
QgsProcessingException,
QgsSpatialIndex)
from processing.algs.qgis.QgisAlgorithm import QgisAlgorithm
from math import sqrt
class HubDistanceLines(QgisAlgorithm):
INPUT = 'INPUT'
HUBS = 'HUBS'
FIELD = 'FIELD'
UNIT = 'UNIT'
OUTPUT = 'OUTPUT'
LAYER_UNITS = 'LAYER_UNITS'
UNITS = [QgsUnitTypes.DistanceMeters,
QgsUnitTypes.DistanceFeet,
QgsUnitTypes.DistanceMiles,
QgsUnitTypes.DistanceKilometers,
LAYER_UNITS
]
def group(self):
return self.tr('Vector analysis')
def groupId(self):
return 'vectoranalysis'
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.units = [self.tr('Meters'),
self.tr('Feet'),
self.tr('Miles'),
self.tr('Kilometers'),
self.tr('Layer units')]
self.addParameter(QgsProcessingParameterFeatureSource(self.INPUT,
self.tr('Source points layer')))
self.addParameter(QgsProcessingParameterFeatureSource(self.HUBS,
self.tr('Destination hubs layer')))
self.addParameter(QgsProcessingParameterField(self.FIELD,
self.tr('Hub layer name attribute'), parentLayerParameterName=self.HUBS))
self.addParameter(QgsProcessingParameterEnum(self.UNIT,
self.tr('Measurement unit'), self.units))
self.addParameter(QgsProcessingParameterFeatureSink(self.OUTPUT, self.tr('Hub distance'), QgsProcessing.TypeVectorLine))
def name(self):
return 'distancetonearesthublinetohub'
def displayName(self):
return self.tr('Distance to nearest hub (line to hub)')
def processAlgorithm(self, parameters, context, feedback):
if parameters[self.INPUT] == parameters[self.HUBS]:
raise QgsProcessingException(
self.tr('Same layer given for both hubs and spokes'))
point_source = self.parameterAsSource(parameters, self.INPUT, context)
hub_source = self.parameterAsSource(parameters, self.HUBS, context)
fieldName = self.parameterAsString(parameters, self.FIELD, context)
units = self.UNITS[self.parameterAsEnum(parameters, self.UNIT, context)]
fields = point_source.fields()
fields.append(QgsField('HubName', QVariant.String))
fields.append(QgsField('HubDist', QVariant.Double))
(sink, dest_id) = self.parameterAsSink(parameters, self.OUTPUT, context,
fields, QgsWkbTypes.LineString, point_source.sourceCrs())
index = QgsSpatialIndex(hub_source.getFeatures(QgsFeatureRequest().setSubsetOfAttributes([]).setDestinationCrs(point_source.sourceCrs(), context.transformContext())))
distance = QgsDistanceArea()
distance.setSourceCrs(point_source.sourceCrs(), context.transformContext())
distance.setEllipsoid(context.project().ellipsoid())
# Scan source points, find nearest hub, and write to output file
features = point_source.getFeatures()
total = 100.0 / point_source.featureCount() if point_source.featureCount() else 0
for current, f in enumerate(features):
if feedback.isCanceled():
break
if not f.hasGeometry():
sink.addFeature(f, QgsFeatureSink.FastInsert)
continue
src = f.geometry().boundingBox().center()
neighbors = index.nearestNeighbor(src, 1)
ft = next(hub_source.getFeatures(QgsFeatureRequest().setFilterFid(neighbors[0]).setSubsetOfAttributes([fieldName], hub_source.fields()).setDestinationCrs(point_source.sourceCrs(), context.transformContext())))
closest = ft.geometry().boundingBox().center()
hubDist = distance.measureLine(src, closest)
if units != self.LAYER_UNITS:
hub_dist_in_desired_units = distance.convertLengthMeasurement(hubDist, units)
else:
hub_dist_in_desired_units = hubDist
attributes = f.attributes()
attributes.append(ft[fieldName])
attributes.append(hub_dist_in_desired_units)
feat = QgsFeature()
feat.setAttributes(attributes)
feat.setGeometry(QgsGeometry.fromPolylineXY([src, closest]))
sink.addFeature(feat, QgsFeatureSink.FastInsert)
feedback.setProgress(int(current * total))
return {self.OUTPUT: dest_id}
| gpl-2.0 | -3,813,129,090,989,495,300 | -4,227,043,921,121,730,600 | 40.159236 | 221 | 0.556175 | false |
resmo/cloudstack | test/integration/smoke/test_affinity_groups_projects.py | 3 | 6214 | #!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from marvin.codes import FAILED
from marvin.cloudstackTestCase import *
from marvin.cloudstackAPI import *
from marvin.lib.utils import *
from marvin.lib.base import *
from marvin.lib.common import *
from marvin.sshClient import SshClient
from nose.plugins.attrib import attr
class TestDeployVmWithAffinityGroup(cloudstackTestCase):
"""
This test deploys a virtual machine for a project
using the small service offering and builtin template
"""
@classmethod
def setUpClass(cls):
cls.testClient = super(TestDeployVmWithAffinityGroup, cls).getClsTestClient()
zone_name = cls.testClient.getZoneForTests()
cls.apiclient = cls.testClient.getApiClient()
cls.domain = get_domain(cls.apiclient)
cls.services = cls.testClient.getParsedTestDataConfig()
# Get Zone, Domain and templates
cls.zone = get_zone(cls.apiclient, cls.testClient.getZoneForTests())
cls.template = get_template(
cls.apiclient,
cls.zone.id,
cls.services["ostype"]
)
if cls.template == FAILED:
assert False, "get_template() failed to return template with description %s" % cls.services["ostype"]
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["template"] = cls.template.id
cls.services["zoneid"] = cls.zone.id
cls.account = Account.create(
cls.apiclient,
cls.services["account"],
domainid=cls.domain.id
)
projectData = {
"name": "Project",
"displaytext": "Test project",
}
cls.project = Project.create(
cls.apiclient,
projectData,
account=cls.account.name,
domainid=cls.account.domainid
)
# Add user to the project
cls.project.addAccount(
cls.apiclient,
cls.account.name
)
cls.service_offering = ServiceOffering.create(
cls.apiclient,
cls.services["service_offerings"]["tiny"]
)
cls.ag = AffinityGroup.create(cls.apiclient, cls.services["virtual_machine"]["affinity"],projectid=cls.project.id)
cls._cleanup = [
cls.service_offering,
cls.ag,
cls.project,
cls.account,
]
return
@attr(tags=["basic", "advanced", "multihost"], required_hardware="false")
def test_DeployVmAntiAffinityGroup_in_project(self):
"""
test DeployVM in anti-affinity groups for project
deploy VM1 and VM2 in the same host-anti-affinity groups
Verify that the vms are deployed on separate hosts
"""
#deploy VM1 in affinity group created in setUp
vm1 = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
templateid=self.template.id,
projectid=self.project.id,
serviceofferingid=self.service_offering.id,
affinitygroupnames=[self.ag.name]
)
list_vm1 = list_virtual_machines(
self.apiclient,
id=vm1.id
)
self.assertEqual(
isinstance(list_vm1, list),
True,
"Check list response returns a valid list"
)
self.assertNotEqual(
len(list_vm1),
0,
"Check VM available in List Virtual Machines"
)
vm1_response = list_vm1[0]
self.assertEqual(
vm1_response.state,
'Running',
msg="VM is not in Running state"
)
self.assertEqual(
vm1_response.projectid,
self.project.id,
msg="VM1 is not deployed in project"
)
host_of_vm1 = vm1_response.hostid
#deploy VM2 in affinity group created in setUp
vm2 = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
templateid=self.template.id,
projectid=self.project.id,
serviceofferingid=self.service_offering.id,
affinitygroupnames=[self.ag.name]
)
list_vm2 = list_virtual_machines(
self.apiclient,
id=vm2.id
)
self.assertEqual(
isinstance(list_vm2, list),
True,
"Check list response returns a valid list"
)
self.assertNotEqual(
len(list_vm2),
0,
"Check VM available in List Virtual Machines"
)
vm2_response = list_vm2[0]
self.assertEqual(
vm2_response.state,
'Running',
msg="VM is not in Running state"
)
self.assertEqual(
vm2_response.projectid,
self.project.id,
msg="VM2 is not deployed in project"
)
host_of_vm2 = vm2_response.hostid
self.assertNotEqual(host_of_vm1, host_of_vm2,
msg="Both VMs of affinity group %s are on the same host" % self.ag.name)
@classmethod
def tearDownClass(cls):
try:
#Clean up, terminate the created templates
cleanup_resources(cls.apiclient, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
| apache-2.0 | 4,829,454,065,248,978,000 | -2,446,501,529,246,200,300 | 32.053191 | 122 | 0.597683 | false |
ryanmockabee/golfr | flask/lib/python3.6/site-packages/jinja2/idtracking.py | 130 | 8760 | from jinja2.visitor import NodeVisitor
from jinja2._compat import iteritems
VAR_LOAD_PARAMETER = 'param'
VAR_LOAD_RESOLVE = 'resolve'
VAR_LOAD_ALIAS = 'alias'
VAR_LOAD_UNDEFINED = 'undefined'
def find_symbols(nodes, parent_symbols=None):
sym = Symbols(parent=parent_symbols)
visitor = FrameSymbolVisitor(sym)
for node in nodes:
visitor.visit(node)
return sym
def symbols_for_node(node, parent_symbols=None):
sym = Symbols(parent=parent_symbols)
sym.analyze_node(node)
return sym
class Symbols(object):
def __init__(self, parent=None):
if parent is None:
self.level = 0
else:
self.level = parent.level + 1
self.parent = parent
self.refs = {}
self.loads = {}
self.stores = set()
def analyze_node(self, node, **kwargs):
visitor = RootVisitor(self)
visitor.visit(node, **kwargs)
def _define_ref(self, name, load=None):
ident = 'l_%d_%s' % (self.level, name)
self.refs[name] = ident
if load is not None:
self.loads[ident] = load
return ident
def find_load(self, target):
if target in self.loads:
return self.loads[target]
if self.parent is not None:
return self.parent.find_load(target)
def find_ref(self, name):
if name in self.refs:
return self.refs[name]
if self.parent is not None:
return self.parent.find_ref(name)
def ref(self, name):
rv = self.find_ref(name)
if rv is None:
raise AssertionError('Tried to resolve a name to a reference that '
'was unknown to the frame (%r)' % name)
return rv
def copy(self):
rv = object.__new__(self.__class__)
rv.__dict__.update(self.__dict__)
rv.refs = self.refs.copy()
rv.loads = self.loads.copy()
rv.stores = self.stores.copy()
return rv
def store(self, name):
self.stores.add(name)
# If we have not see the name referenced yet, we need to figure
# out what to set it to.
if name not in self.refs:
# If there is a parent scope we check if the name has a
# reference there. If it does it means we might have to alias
# to a variable there.
if self.parent is not None:
outer_ref = self.parent.find_ref(name)
if outer_ref is not None:
self._define_ref(name, load=(VAR_LOAD_ALIAS, outer_ref))
return
# Otherwise we can just set it to undefined.
self._define_ref(name, load=(VAR_LOAD_UNDEFINED, None))
def declare_parameter(self, name):
self.stores.add(name)
return self._define_ref(name, load=(VAR_LOAD_PARAMETER, None))
def load(self, name):
target = self.find_ref(name)
if target is None:
self._define_ref(name, load=(VAR_LOAD_RESOLVE, name))
def branch_update(self, branch_symbols):
stores = {}
for branch in branch_symbols:
for target in branch.stores:
if target in self.stores:
continue
stores[target] = stores.get(target, 0) + 1
for sym in branch_symbols:
self.refs.update(sym.refs)
self.loads.update(sym.loads)
self.stores.update(sym.stores)
for name, branch_count in iteritems(stores):
if branch_count == len(branch_symbols):
continue
target = self.find_ref(name)
assert target is not None, 'should not happen'
if self.parent is not None:
outer_target = self.parent.find_ref(name)
if outer_target is not None:
self.loads[target] = (VAR_LOAD_ALIAS, outer_target)
continue
self.loads[target] = (VAR_LOAD_RESOLVE, name)
def dump_stores(self):
rv = {}
node = self
while node is not None:
for name in node.stores:
if name not in rv:
rv[name] = self.find_ref(name)
node = node.parent
return rv
def dump_param_targets(self):
rv = set()
node = self
while node is not None:
for target, (instr, _) in iteritems(self.loads):
if instr == VAR_LOAD_PARAMETER:
rv.add(target)
node = node.parent
return rv
class RootVisitor(NodeVisitor):
def __init__(self, symbols):
self.sym_visitor = FrameSymbolVisitor(symbols)
def _simple_visit(self, node, **kwargs):
for child in node.iter_child_nodes():
self.sym_visitor.visit(child)
visit_Template = visit_Block = visit_Macro = visit_FilterBlock = \
visit_Scope = visit_If = visit_ScopedEvalContextModifier = \
_simple_visit
def visit_AssignBlock(self, node, **kwargs):
for child in node.body:
self.sym_visitor.visit(child)
def visit_CallBlock(self, node, **kwargs):
for child in node.iter_child_nodes(exclude=('call',)):
self.sym_visitor.visit(child)
def visit_For(self, node, for_branch='body', **kwargs):
if for_branch == 'body':
self.sym_visitor.visit(node.target, store_as_param=True)
branch = node.body
elif for_branch == 'else':
branch = node.else_
elif for_branch == 'test':
self.sym_visitor.visit(node.target, store_as_param=True)
if node.test is not None:
self.sym_visitor.visit(node.test)
return
else:
raise RuntimeError('Unknown for branch')
for item in branch or ():
self.sym_visitor.visit(item)
def visit_With(self, node, **kwargs):
for target in node.targets:
self.sym_visitor.visit(target)
for child in node.body:
self.sym_visitor.visit(child)
def generic_visit(self, node, *args, **kwargs):
raise NotImplementedError('Cannot find symbols for %r' %
node.__class__.__name__)
class FrameSymbolVisitor(NodeVisitor):
"""A visitor for `Frame.inspect`."""
def __init__(self, symbols):
self.symbols = symbols
def visit_Name(self, node, store_as_param=False, **kwargs):
"""All assignments to names go through this function."""
if store_as_param or node.ctx == 'param':
self.symbols.declare_parameter(node.name)
elif node.ctx == 'store':
self.symbols.store(node.name)
elif node.ctx == 'load':
self.symbols.load(node.name)
def visit_If(self, node, **kwargs):
self.visit(node.test, **kwargs)
original_symbols = self.symbols
def inner_visit(nodes):
self.symbols = rv = original_symbols.copy()
for subnode in nodes:
self.visit(subnode, **kwargs)
self.symbols = original_symbols
return rv
body_symbols = inner_visit(node.body)
else_symbols = inner_visit(node.else_ or ())
self.symbols.branch_update([body_symbols, else_symbols])
def visit_Macro(self, node, **kwargs):
self.symbols.store(node.name)
def visit_Import(self, node, **kwargs):
self.generic_visit(node, **kwargs)
self.symbols.store(node.target)
def visit_FromImport(self, node, **kwargs):
self.generic_visit(node, **kwargs)
for name in node.names:
if isinstance(name, tuple):
self.symbols.store(name[1])
else:
self.symbols.store(name)
def visit_Assign(self, node, **kwargs):
"""Visit assignments in the correct order."""
self.visit(node.node, **kwargs)
self.visit(node.target, **kwargs)
def visit_For(self, node, **kwargs):
"""Visiting stops at for blocks. However the block sequence
is visited as part of the outer scope.
"""
self.visit(node.iter, **kwargs)
def visit_CallBlock(self, node, **kwargs):
self.visit(node.call, **kwargs)
def visit_FilterBlock(self, node, **kwargs):
self.visit(node.filter, **kwargs)
def visit_With(self, node, **kwargs):
for target in node.values:
self.visit(target)
def visit_AssignBlock(self, node, **kwargs):
"""Stop visiting at block assigns."""
self.visit(node.target, **kwargs)
def visit_Scope(self, node, **kwargs):
"""Stop visiting at scopes."""
def visit_Block(self, node, **kwargs):
"""Stop visiting at blocks."""
| mit | 8,066,246,232,400,774,000 | -8,800,272,815,726,671,000 | 31.087912 | 79 | 0.568379 | false |
SCSSG/Odoo-SCS | addons/base_report_designer/plugin/openerp_report_designer/bin/script/LoginTest.py | 384 | 1320 | #########################################################################
#
# Copyright (c) 2003-2004 Danny Brewer d29583@groovegarden.com
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
#
#############################################################################
if __name__<>"package":
from ServerParameter import *
from lib.gui import *
class LoginTest:
def __init__(self):
if not loginstatus:
Change(None)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -471,481,813,620,537,600 | 2,520,199,120,383,266,000 | 37.823529 | 81 | 0.639394 | false |
kmonsoor/npyscreen | npyscreen/wgmultilineeditable.py | 15 | 4413 | import curses
from . import wgwidget
from . import wgmultiline
from . import wgtextbox as textbox
from . import wgboxwidget
class MultiLineEditable(wgmultiline.MultiLine):
_contained_widgets = textbox.Textfield
CHECK_VALUE = True
ALLOW_CONTINUE_EDITING = True
CONTINUE_EDITING_AFTER_EDITING_ONE_LINE = True
def get_new_value(self):
return ''
def check_line_value(self, vl):
if not vl:
return False
else:
return True
def edit_cursor_line_value(self):
if len(self.values) == 0:
self.insert_line_value()
return False
try:
active_line = self._my_widgets[(self.cursor_line-self.start_display_at)]
except IndexError:
self._my_widgets[0]
self.cursor_line = 0
self.insert_line_value()
return True
active_line.highlight = False
active_line.edit()
try:
self.values[self.cursor_line] = active_line.value
except IndexError:
self.values.append(active_line.value)
if not self.cursor_line:
self.cursor_line = 0
self.cursor_line = len(self.values) - 1
self.reset_display_cache()
if self.CHECK_VALUE:
if not self.check_line_value(self.values[self.cursor_line]):
self.delete_line_value()
return False
self.display()
return True
def insert_line_value(self):
if self.cursor_line is None:
self.cursor_line = 0
self.values.insert(self.cursor_line, self.get_new_value())
self.display()
cont = self.edit_cursor_line_value()
if cont and self.ALLOW_CONTINUE_EDITING:
self._continue_editing()
def delete_line_value(self):
if len(self.values) > 0:
del self.values[self.cursor_line]
self.display()
def _continue_editing(self):
active_line = self._my_widgets[(self.cursor_line-self.start_display_at)]
continue_editing = self.ALLOW_CONTINUE_EDITING
if hasattr(active_line, 'how_exited'):
while active_line.how_exited == wgwidget.EXITED_DOWN and continue_editing:
self.values.insert(self.cursor_line+1, self.get_new_value())
self.cursor_line += 1
self.display()
continue_editing = self.edit_cursor_line_value()
active_line = self._my_widgets[(self.cursor_line-self.start_display_at)]
def h_insert_next_line(self, ch):
if len(self.values) == self.cursor_line - 1 or len(self.values) == 0:
self.values.append(self.get_new_value())
self.cursor_line += 1
self.display()
cont = self.edit_cursor_line_value()
if cont and self.ALLOW_CONTINUE_EDITING:
self._continue_editing()
else:
self.cursor_line += 1
self.insert_line_value()
def h_edit_cursor_line_value(self, ch):
continue_line = self.edit_cursor_line_value()
if continue_line and self.CONTINUE_EDITING_AFTER_EDITING_ONE_LINE:
self._continue_editing()
def h_insert_value(self, ch):
return self.insert_line_value()
def h_delete_line_value(self, ch):
self.delete_line_value()
def set_up_handlers(self):
super(MultiLineEditable, self).set_up_handlers()
self.handlers.update ( {
ord('i'): self.h_insert_value,
ord('o'): self.h_insert_next_line,
curses.ascii.CR: self.h_edit_cursor_line_value,
curses.ascii.NL: self.h_edit_cursor_line_value,
curses.ascii.SP: self.h_edit_cursor_line_value,
curses.ascii.DEL: self.h_delete_line_value,
curses.ascii.BS: self.h_delete_line_value,
curses.KEY_BACKSPACE: self.h_delete_line_value,
} )
class MultiLineEditableTitle(wgmultiline.TitleMultiLine):
_entry_type = MultiLineEditable
class MultiLineEditableBoxed(wgboxwidget.BoxTitle):
_contained_widget = MultiLineEditable
| bsd-2-clause | -7,289,030,044,937,701,000 | 2,953,958,849,712,597,000 | 34.596774 | 88 | 0.557444 | false |
Lukc/ospace-lukc | client-pygame/lib/pygameui/ProgressBar.py | 2 | 1287 | #
# Copyright 2001 - 2006 Ludek Smid [http://www.ospace.net/]
#
# This file is part of Pygame.UI.
#
# Pygame.UI is free software; you can redistribute it and/or modify
# it under the terms of the Lesser GNU General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# Pygame.UI is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Lesser GNU General Public License for more details.
#
# You should have received a copy of the Lesser GNU General Public License
# along with Pygame.UI; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
from pygame.locals import *
from Const import *
from Widget import Widget, registerWidget
class ProgressBar(Widget):
def __init__(self, parent, **kwargs):
Widget.__init__(self, parent)
# data
self.min = 0
self.max = 100
self.value = 0
# flags
self.processKWArguments(kwargs)
parent.registerWidget(self)
def draw(self, surface):
self.theme.drawProgressBar(surface, self)
return self.rect
registerWidget(ProgressBar, 'progressbar')
| gpl-2.0 | -6,095,014,898,533,244,000 | 4,852,241,521,822,423,000 | 30.390244 | 78 | 0.734266 | false |
kevin-coder/tensorflow-fork | tensorflow/lite/experimental/micro/examples/micro_speech/apollo3/captured_data_to_wav.py | 11 | 1442 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Converts values pulled from the microcontroller into audio files."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import struct
# import matplotlib.pyplot as plt
import numpy as np
import soundfile as sf
def new_data_to_array(fn):
vals = []
with open(fn) as f:
for n, line in enumerate(f):
if n != 0:
vals.extend([int(v, 16) for v in line.split()])
b = ''.join(map(chr, vals))
y = struct.unpack('<' + 'h' * int(len(b) / 2), b)
return y
data = 'captured_data.txt'
values = np.array(new_data_to_array(data)).astype(float)
# plt.plot(values, 'o-')
# plt.show(block=False)
wav = values / np.max(np.abs(values))
sf.write('captured_data.wav', wav, 16000)
| apache-2.0 | -3,774,084,623,712,656,000 | 2,426,558,034,309,517,000 | 30.347826 | 80 | 0.666436 | false |
luwei0917/awsemmd_script | small_script/computeRg.py | 1 | 2040 | from Bio.PDB.PDBParser import PDBParser
import argparse
parser = argparse.ArgumentParser(description="Compute Rg of pdb")
parser.add_argument("pdb", help="pdb file")
args = parser.parse_args()
def computeRg(pdb_file, chain="A"):
# compute Radius of gyration
# pdb_file = f"/Users/weilu/Research/server/feb_2019/iterative_optimization_new_temp_range/all_simulations/{p}/{p}/crystal_structure.pdb"
chain_name = chain
parser = PDBParser()
structure = parser.get_structure('X', pdb_file)
chain = list(structure[0][chain_name])
all_res = list(structure.get_residues())
# n = len(all_res)
# n = len(chain)
regular_res_list = [res for res in all_res if res.get_id()[0] == ' ']
n = len(regular_res_list)
print("all chains")
cutoff = 15
for residue in regular_res_list:
if residue.get_id()[0] == ' ' and abs(residue["CA"].get_vector()[-1]) < cutoff:
print(residue.get_id()[1])
rg = 0.0
for i, residue_i in enumerate(regular_res_list):
for j, residue_j in enumerate(regular_res_list[i+1:]):
try:
r = residue_i["CA"] - residue_j["CA"]
except:
print(residue_i, residue_j)
rg += r**2
return (rg/(n**2))**0.5
rg = computeRg(args.pdb)
print(rg)
def cylindrical_rg_bias_term(oa, k_rg=4.184, rg0=0, atomGroup=-1, forceGroup=27):
nres, ca = oa.nres, oa.ca
if atomGroup == -1:
group = list(range(nres))
else:
group = atomGroup # atomGroup = [0, 1, 10, 12] means include residue 1, 2, 11, 13.
n = len(group)
rg_square = CustomBondForce("1/normalization*(x^2+y^2)")
# rg = CustomBondForce("1")
rg_square.addGlobalParameter("normalization", n*n)
for i in group:
for j in group:
if j <= i:
continue
rg_square.addBond(ca[i], ca[j], [])
rg = CustomCVForce(f"{k_rg}*(rg_square^0.5-{rg0})^2")
rg.addCollectiveVariable("rg_square", rg_square)
rg.setForceGroup(forceGroup)
return rg
| mit | -2,794,268,491,933,733,000 | 3,639,994,715,854,312,400 | 35.428571 | 141 | 0.59951 | false |
batisteo/pasportaservo | hosting/migrations/0042_create_visibility.py | 4 | 3311 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-07-17 10:47
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('hosting', '0041_auto_20170929_1743'),
]
operations = [
migrations.CreateModel(
name='VisibilitySettings',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('model_type', models.CharField(default='Unknown', max_length=25, verbose_name='type')),
('model_id', models.PositiveIntegerField(null=True)),
('content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
('visible_online_public', models.BooleanField(verbose_name='visible online for all')),
('visible_online_authed', models.BooleanField(verbose_name='visible online w/authorization')),
('visible_in_book', models.BooleanField(verbose_name='visible in the book')),
],
options={'verbose_name': 'visibility settings', 'verbose_name_plural': 'visibility settings'},
),
migrations.CreateModel(
name='VisibilitySettingsForPublicEmail',
fields=[
],
options={
'proxy': True,
},
bases=('hosting.visibilitysettings',),
),
migrations.CreateModel(
name='VisibilitySettingsForPhone',
fields=[
],
options={
'proxy': True,
},
bases=('hosting.visibilitysettings',),
),
migrations.CreateModel(
name='VisibilitySettingsForPlace',
fields=[
],
options={
'proxy': True,
},
bases=('hosting.visibilitysettings',),
),
migrations.CreateModel(
name='VisibilitySettingsForFamilyMembers',
fields=[
],
options={
'proxy': True,
},
bases=('hosting.visibilitysettings',),
),
migrations.AddField(
model_name='profile',
name='email_visibility',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='hosting.VisibilitySettings'),
),
migrations.AddField(
model_name='phone',
name='visibility',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='hosting.VisibilitySettings'),
),
migrations.AddField(
model_name='place',
name='visibility',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='hosting.VisibilitySettings'),
),
migrations.AddField(
model_name='place',
name='family_members_visibility',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='hosting.VisibilitySettings'),
),
]
| agpl-3.0 | -2,028,935,733,629,041,200 | -507,927,191,394,018,600 | 37.952941 | 146 | 0.56569 | false |
freakynit/kaggle-ndsb | configurations/featharalick_bagged_convroll4_big_wd_maxout512.py | 6 | 2632 | import numpy as np
import theano
import theano.tensor as T
import lasagne as nn
import data
import load
import nn_plankton
import dihedral
import tmp_dnn
import tta
features = [
# "hu",
# "tutorial",
"haralick",
# "aaronmoments",
# "lbp",
# "pftas",
# "zernike_moments",
# "image_size",
]
batch_size = 128
chunk_size = 32768
num_chunks_train = 240
momentum = 0.9
learning_rate_schedule = {
0: 0.001,
100: 0.0001,
200: 0.00001,
}
validate_every = 40
save_every = 240
if save_every > num_chunks_train: print "\n"*5; print "WARNING: NOT SAVING METADATA!!!"; print "\n"*5;
sdir = "/mnt/storage/users/sedielem/git/kaggle-plankton/predictions/"
train_pred_file = ""
valid_pred_file = ""
test_pred_file = sdir+"bagged--test--convroll4_big_wd_maxout512--avg-probs.npy"
data_loader = load.PredictionsWithFeaturesDataLoader(
features = features,
train_pred_file=train_pred_file,
valid_pred_file=valid_pred_file,
test_pred_file=test_pred_file,
num_chunks_train=num_chunks_train,
chunk_size=chunk_size)
create_train_gen = lambda: data_loader.create_random_gen()
create_eval_train_gen = lambda: data_loader.create_fixed_gen("train")
create_eval_valid_gen = lambda: data_loader.create_fixed_gen("valid")
create_eval_test_gen = lambda: data_loader.create_fixed_gen("test")
def build_model():
l0 = nn.layers.InputLayer((batch_size, data.num_classes))
l0_size = nn.layers.InputLayer((batch_size, 52))
l1_size = nn.layers.DenseLayer(l0_size, num_units=80, W=nn_plankton.Orthogonal('relu'), b=nn.init.Constant(0.1))
l2_size = nn.layers.DenseLayer(l1_size, num_units=80, W=nn_plankton.Orthogonal('relu'), b=nn.init.Constant(0.1))
l3_size = nn.layers.DenseLayer(l2_size, num_units=data.num_classes, W=nn_plankton.Orthogonal(), b=nn.init.Constant(0.1), nonlinearity=None)
l1 = nn_plankton.NonlinLayer(l0, T.log)
ltot = nn.layers.ElemwiseSumLayer([l1, l3_size])
# norm_by_sum = lambda x: x / x.sum(1).dimshuffle(0, "x")
lout = nn_plankton.NonlinLayer(ltot, nonlinearity=T.nnet.softmax)
return [l0, l0_size], lout
def build_objective(l_ins, l_out):
reg_param = 0.0002
alpha = 0. # 0 -> L2 1-> L1
print "regu", reg_param, alpha
# lambda_reg = 0.005
params = nn.layers.get_all_non_bias_params(l_out)
# reg_term = sum(T.sum(p**2) for p in params)
L2 = sum(T.sum(p**2) for p in params)
L1 = sum(T.sum(T.abs_(p)) for p in params)
def loss(y, t):
return nn_plankton.log_loss(y, t) + reg_param*(alpha * L1 + (1-alpha) * L2)
return nn.objectives.Objective(l_out, loss_function=loss) | mit | 1,874,599,336,342,878,000 | -1,600,086,405,239,381,800 | 28.255556 | 143 | 0.667553 | false |
thomsonreuters/electron | script/dump-symbols.py | 6 | 1697 | #!/usr/bin/env python
import os
import sys
from lib.config import PLATFORM
from lib.util import electron_gyp, execute, rm_rf
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
DIST_DIR = os.path.join(SOURCE_ROOT, 'dist')
OUT_DIR = os.path.join(SOURCE_ROOT, 'out', 'R')
CHROMIUM_DIR = os.path.join(SOURCE_ROOT, 'vendor', 'download',
'libchromiumcontent', 'static_library')
def main(destination):
rm_rf(destination)
(project_name, product_name) = get_names_from_gyp()
if PLATFORM in ['darwin', 'linux']:
generate_breakpad_symbols = os.path.join(SOURCE_ROOT, 'tools', 'posix',
'generate_breakpad_symbols.py')
if PLATFORM == 'darwin':
start = os.path.join(OUT_DIR, '{0}.app'.format(product_name), 'Contents',
'MacOS', product_name)
else:
start = os.path.join(OUT_DIR, project_name)
args = [
'--build-dir={0}'.format(OUT_DIR),
'--binary={0}'.format(start),
'--symbols-dir={0}'.format(destination),
'--libchromiumcontent-dir={0}'.format(CHROMIUM_DIR),
'--clear',
'--jobs=16',
]
else:
generate_breakpad_symbols = os.path.join(SOURCE_ROOT, 'tools', 'win',
'generate_breakpad_symbols.py')
args = [
'--symbols-dir={0}'.format(destination),
'--jobs=16',
os.path.relpath(OUT_DIR),
]
execute([sys.executable, generate_breakpad_symbols] + args)
def get_names_from_gyp():
variables = electron_gyp()
return (variables['project_name%'], variables['product_name%'])
if __name__ == '__main__':
sys.exit(main(sys.argv[1]))
| mit | 8,579,790,120,858,073,000 | 9,070,927,142,811,973,000 | 29.854545 | 79 | 0.586329 | false |
Jai-Chaudhary/gensim | gensim/corpora/wikicorpus.py | 4 | 12990 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010 Radim Rehurek <radimrehurek@seznam.cz>
# Copyright (C) 2012 Lars Buitinck <larsmans@gmail.com>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
Construct a corpus from a Wikipedia (or other MediaWiki-based) database dump.
If you have the `pattern` package installed, this module will use a fancy
lemmatization to get a lemma of each token (instead of plain alphabetic
tokenizer). The package is available at https://github.com/clips/pattern .
See scripts/process_wiki.py for a canned (example) script based on this
module.
"""
import bz2
import logging
import re
from xml.etree.cElementTree import iterparse # LXML isn't faster, so let's go with the built-in solution
import multiprocessing
from gensim import utils
# cannot import whole gensim.corpora, because that imports wikicorpus...
from gensim.corpora.dictionary import Dictionary
from gensim.corpora.textcorpus import TextCorpus
logger = logging.getLogger('gensim.corpora.wikicorpus')
# ignore articles shorter than ARTICLE_MIN_WORDS characters (after full preprocessing)
ARTICLE_MIN_WORDS = 50
RE_P0 = re.compile('<!--.*?-->', re.DOTALL | re.UNICODE) # comments
RE_P1 = re.compile('<ref([> ].*?)(</ref>|/>)', re.DOTALL | re.UNICODE) # footnotes
RE_P2 = re.compile("(\n\[\[[a-z][a-z][\w-]*:[^:\]]+\]\])+$", re.UNICODE) # links to languages
RE_P3 = re.compile("{{([^}{]*)}}", re.DOTALL | re.UNICODE) # template
RE_P4 = re.compile("{{([^}]*)}}", re.DOTALL | re.UNICODE) # template
RE_P5 = re.compile('\[(\w+):\/\/(.*?)(( (.*?))|())\]', re.UNICODE) # remove URL, keep description
RE_P6 = re.compile("\[([^][]*)\|([^][]*)\]", re.DOTALL | re.UNICODE) # simplify links, keep description
RE_P7 = re.compile('\n\[\[[iI]mage(.*?)(\|.*?)*\|(.*?)\]\]', re.UNICODE) # keep description of images
RE_P8 = re.compile('\n\[\[[fF]ile(.*?)(\|.*?)*\|(.*?)\]\]', re.UNICODE) # keep description of files
RE_P9 = re.compile('<nowiki([> ].*?)(</nowiki>|/>)', re.DOTALL | re.UNICODE) # outside links
RE_P10 = re.compile('<math([> ].*?)(</math>|/>)', re.DOTALL | re.UNICODE) # math content
RE_P11 = re.compile('<(.*?)>', re.DOTALL | re.UNICODE) # all other tags
RE_P12 = re.compile('\n(({\|)|(\|-)|(\|}))(.*?)(?=\n)', re.UNICODE) # table formatting
RE_P13 = re.compile('\n(\||\!)(.*?\|)*([^|]*?)', re.UNICODE) # table cell formatting
RE_P14 = re.compile('\[\[Category:[^][]*\]\]', re.UNICODE) # categories
# Remove File and Image template
RE_P15 = re.compile('\[\[([fF]ile:|[iI]mage)[^]]*(\]\])', re.UNICODE)
def filter_wiki(raw):
"""
Filter out wiki mark-up from `raw`, leaving only text. `raw` is either unicode
or utf-8 encoded string.
"""
# parsing of the wiki markup is not perfect, but sufficient for our purposes
# contributions to improving this code are welcome :)
text = utils.to_unicode(raw, 'utf8', errors='ignore')
text = utils.decode_htmlentities(text) # '&nbsp;' --> '\xa0'
return remove_markup(text)
def remove_markup(text):
text = re.sub(RE_P2, "", text) # remove the last list (=languages)
# the wiki markup is recursive (markup inside markup etc)
# instead of writing a recursive grammar, here we deal with that by removing
# markup in a loop, starting with inner-most expressions and working outwards,
# for as long as something changes.
text = remove_template(text)
text = remove_file(text)
iters = 0
while True:
old, iters = text, iters + 1
text = re.sub(RE_P0, "", text) # remove comments
text = re.sub(RE_P1, '', text) # remove footnotes
text = re.sub(RE_P9, "", text) # remove outside links
text = re.sub(RE_P10, "", text) # remove math content
text = re.sub(RE_P11, "", text) # remove all remaining tags
text = re.sub(RE_P14, '', text) # remove categories
text = re.sub(RE_P5, '\\3', text) # remove urls, keep description
text = re.sub(RE_P6, '\\2', text) # simplify links, keep description only
# remove table markup
text = text.replace('||', '\n|') # each table cell on a separate line
text = re.sub(RE_P12, '\n', text) # remove formatting lines
text = re.sub(RE_P13, '\n\\3', text) # leave only cell content
# remove empty mark-up
text = text.replace('[]', '')
if old == text or iters > 2: # stop if nothing changed between two iterations or after a fixed number of iterations
break
# the following is needed to make the tokenizer see '[[socialist]]s' as a single word 'socialists'
# TODO is this really desirable?
text = text.replace('[', '').replace(']', '') # promote all remaining markup to plain text
return text
def remove_template(s):
"""Remove template wikimedia markup.
Return a copy of `s` with all the wikimedia markup template removed. See
http://meta.wikimedia.org/wiki/Help:Template for wikimedia templates
details.
Note: Since template can be nested, it is difficult remove them using
regular expresssions.
"""
# Find the start and end position of each template by finding the opening
# '{{' and closing '}}'
n_open, n_close = 0, 0
starts, ends = [], []
in_template = False
prev_c = None
for i, c in enumerate(iter(s)):
if not in_template:
if c == '{' and c == prev_c:
starts.append(i - 1)
in_template = True
n_open = 1
if in_template:
if c == '{':
n_open += 1
elif c == '}':
n_close += 1
if n_open == n_close:
ends.append(i)
in_template = False
n_open, n_close = 0, 0
prev_c = c
# Remove all the templates
s = ''.join([s[end + 1:start] for start, end in
zip(starts + [None], [-1] + ends)])
return s
def remove_file(s):
"""Remove the 'File:' and 'Image:' markup, keeping the file caption.
Return a copy of `s` with all the 'File:' and 'Image:' markup replaced by
their corresponding captions. See http://www.mediawiki.org/wiki/Help:Images
for the markup details.
"""
# The regex RE_P15 match a File: or Image: markup
for match in re.finditer(RE_P15, s):
m = match.group(0)
caption = m[:-2].split('|')[-1]
s = s.replace(m, caption, 1)
return s
def tokenize(content):
"""
Tokenize a piece of text from wikipedia. The input string `content` is assumed
to be mark-up free (see `filter_wiki()`).
Return list of tokens as utf8 bytestrings. Ignore words shorted than 2 or longer
that 15 characters (not bytes!).
"""
# TODO maybe ignore tokens with non-latin characters? (no chinese, arabic, russian etc.)
return [token.encode('utf8') for token in utils.tokenize(content, lower=True, errors='ignore')
if 2 <= len(token) <= 15 and not token.startswith('_')]
def get_namespace(tag):
"""Returns the namespace of tag."""
m = re.match("^{(.*?)}", tag)
namespace = m.group(1) if m else ""
if not namespace.startswith("http://www.mediawiki.org/xml/export-"):
raise ValueError("%s not recognized as MediaWiki dump namespace"
% namespace)
return namespace
_get_namespace = get_namespace
def extract_pages(f, filter_namespaces=False):
"""
Extract pages from MediaWiki database dump.
Return an iterable over (str, str) which generates (title, content) pairs.
"""
elems = (elem for _, elem in iterparse(f, events=("end",)))
# We can't rely on the namespace for database dumps, since it's changed
# it every time a small modification to the format is made. So, determine
# those from the first element we find, which will be part of the metadata,
# and construct element paths.
elem = next(elems)
namespace = get_namespace(elem.tag)
ns_mapping = {"ns": namespace}
page_tag = "{%(ns)s}page" % ns_mapping
text_path = "./{%(ns)s}revision/{%(ns)s}text" % ns_mapping
title_path = "./{%(ns)s}title" % ns_mapping
ns_path = "./{%(ns)s}ns" % ns_mapping
pageid_path = "./{%(ns)s}id" % ns_mapping
for elem in elems:
if elem.tag == page_tag:
title = elem.find(title_path).text
text = elem.find(text_path).text
ns = elem.find(ns_path).text
if filter_namespaces and ns not in filter_namespaces:
text = None
pageid = elem.find(pageid_path).text
yield title, text or "", pageid # empty page will yield None
# Prune the element tree, as per
# http://www.ibm.com/developerworks/xml/library/x-hiperfparse/
# except that we don't need to prune backlinks from the parent
# because we don't use LXML.
# We do this only for <page>s, since we need to inspect the
# ./revision/text element. The pages comprise the bulk of the
# file, so in practice we prune away enough.
elem.clear()
_extract_pages = extract_pages # for backward compatibility
def process_article(args):
"""
Parse a wikipedia article, returning its content as a list of tokens
(utf8-encoded strings).
"""
text, lemmatize, title, pageid = args
text = filter_wiki(text)
if lemmatize:
result = utils.lemmatize(text)
else:
result = tokenize(text)
return result, title, pageid
class WikiCorpus(TextCorpus):
"""
Treat a wikipedia articles dump (\*articles.xml.bz2) as a (read-only) corpus.
The documents are extracted on-the-fly, so that the whole (massive) dump
can stay compressed on disk.
>>> wiki = WikiCorpus('enwiki-20100622-pages-articles.xml.bz2') # create word->word_id mapping, takes almost 8h
>>> wiki.saveAsText('wiki_en_vocab200k') # another 8h, creates a file in MatrixMarket format plus file with id->word
"""
def __init__(self, fname, processes=None, lemmatize=utils.HAS_PATTERN, dictionary=None, filter_namespaces=('0',)):
"""
Initialize the corpus. Unless a dictionary is provided, this scans the
corpus once, to determine its vocabulary.
If `pattern` package is installed, use fancier shallow parsing to get
token lemmas. Otherwise, use simple regexp tokenization. You can override
this automatic logic by forcing the `lemmatize` parameter explicitly.
"""
self.fname = fname
self.filter_namespaces = filter_namespaces
self.metadata = False
if processes is None:
processes = max(1, multiprocessing.cpu_count() - 1)
self.processes = processes
self.lemmatize = lemmatize
if dictionary is None:
self.dictionary = Dictionary(self.get_texts())
else:
self.dictionary = dictionary
def get_texts(self):
"""
Iterate over the dump, returning text version of each article as a list
of tokens.
Only articles of sufficient length are returned (short articles & redirects
etc are ignored).
Note that this iterates over the **texts**; if you want vectors, just use
the standard corpus interface instead of this function::
>>> for vec in wiki_corpus:
>>> print(vec)
"""
articles, articles_all = 0, 0
positions, positions_all = 0, 0
texts = ((text, self.lemmatize, title, pageid) for title, text, pageid in extract_pages(bz2.BZ2File(self.fname), self.filter_namespaces))
pool = multiprocessing.Pool(self.processes)
# process the corpus in smaller chunks of docs, because multiprocessing.Pool
# is dumb and would load the entire input into RAM at once...
ignore_namespaces = 'Wikipedia Category File Portal Template MediaWiki User Help Book Draft'.split()
for group in utils.chunkize(texts, chunksize=10 * self.processes, maxsize=1):
for tokens, title, pageid in pool.imap(process_article, group): # chunksize=10):
articles_all += 1
positions_all += len(tokens)
# article redirects and short stubs are pruned here
if len(tokens) < ARTICLE_MIN_WORDS or any(title.startswith(ignore + ':') for ignore in ignore_namespaces):
continue
articles += 1
positions += len(tokens)
if self.metadata:
yield (tokens, (pageid, title))
else:
yield tokens
pool.terminate()
logger.info("finished iterating over Wikipedia corpus of %i documents with %i positions"
" (total %i articles, %i positions before pruning articles shorter than %i words)" %
(articles, positions, articles_all, positions_all, ARTICLE_MIN_WORDS))
self.length = articles # cache corpus length
# endclass WikiCorpus
| gpl-3.0 | 8,383,650,196,059,681,000 | 4,495,442,670,770,881,000 | 40.369427 | 145 | 0.619246 | false |
Soovox/django-socialregistration | socialregistration/contrib/openid/storage.py | 10 | 2208 | import base64
from openid.association import Association
from openid.store.interface import OpenIDStore
from socialregistration.contrib.openid.models import (OpenIDNonce,
OpenIDStore as OpenIDStoreModel)
class OpenIDStore(OpenIDStore):
max_nonce_age = 6 * 60 * 60
def storeAssociation(self, server_url, assoc=None):
stored_assoc = OpenIDStoreModel.objects.create(
server_url=server_url,
handle=assoc.handle,
secret=base64.encodestring(assoc.secret),
issued=assoc.issued,
lifetime=assoc.issued,
assoc_type=assoc.assoc_type
)
def getAssociation(self, server_url, handle=None):
stored_assocs = OpenIDStoreModel.objects.filter(
server_url=server_url
)
if handle:
stored_assocs = stored_assocs.filter(handle=handle)
stored_assocs.order_by('-issued')
if stored_assocs.count() == 0:
return None
return_val = None
for stored_assoc in stored_assocs:
assoc = Association(
stored_assoc.handle, base64.decodestring(stored_assoc.secret),
stored_assoc.issued, stored_assoc.lifetime, stored_assoc.assoc_type
)
if assoc.getExpiresIn() == 0:
stored_assoc.delete()
else:
if return_val is None:
return_val = assoc
return return_val
def removeAssociation(self, server_url, handle):
stored_assocs = OpenIDStoreModel.objects.filter(
server_url=server_url
)
if handle:
stored_assocs = stored_assocs.filter(handle=handle)
stored_assocs.delete()
def useNonce(self, server_url, timestamp, salt):
try:
nonce = OpenIDNonce.objects.get(
server_url=server_url,
timestamp=timestamp,
salt=salt
)
except OpenIDNonce.DoesNotExist:
nonce = OpenIDNonce.objects.create(
server_url=server_url,
timestamp=timestamp,
salt=salt
)
return True
return False
| mit | -3,457,080,033,908,379,000 | -5,735,750,157,505,803,000 | 29.246575 | 83 | 0.582428 | false |
s-hertel/ansible | lib/ansible/cli/arguments/option_helpers.py | 12 | 17132 | # Copyright: (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import copy
import operator
import argparse
import os
import os.path
import sys
import time
import yaml
try:
import _yaml
HAS_LIBYAML = True
except ImportError:
HAS_LIBYAML = False
from jinja2 import __version__ as j2_version
import ansible
from ansible import constants as C
from ansible.module_utils._text import to_native
from ansible.release import __version__
from ansible.utils.path import unfrackpath
#
# Special purpose OptionParsers
#
class SortingHelpFormatter(argparse.HelpFormatter):
def add_arguments(self, actions):
actions = sorted(actions, key=operator.attrgetter('option_strings'))
super(SortingHelpFormatter, self).add_arguments(actions)
class AnsibleVersion(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
ansible_version = to_native(version(getattr(parser, 'prog')))
print(ansible_version)
parser.exit()
class UnrecognizedArgument(argparse.Action):
def __init__(self, option_strings, dest, const=True, default=None, required=False, help=None, metavar=None, nargs=0):
super(UnrecognizedArgument, self).__init__(option_strings=option_strings, dest=dest, nargs=nargs, const=const,
default=default, required=required, help=help)
def __call__(self, parser, namespace, values, option_string=None):
parser.error('unrecognized arguments: %s' % option_string)
class PrependListAction(argparse.Action):
"""A near clone of ``argparse._AppendAction``, but designed to prepend list values
instead of appending.
"""
def __init__(self, option_strings, dest, nargs=None, const=None, default=None, type=None,
choices=None, required=False, help=None, metavar=None):
if nargs == 0:
raise ValueError('nargs for append actions must be > 0; if arg '
'strings are not supplying the value to append, '
'the append const action may be more appropriate')
if const is not None and nargs != argparse.OPTIONAL:
raise ValueError('nargs must be %r to supply const' % argparse.OPTIONAL)
super(PrependListAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=nargs,
const=const,
default=default,
type=type,
choices=choices,
required=required,
help=help,
metavar=metavar
)
def __call__(self, parser, namespace, values, option_string=None):
items = copy.copy(ensure_value(namespace, self.dest, []))
items[0:0] = values
setattr(namespace, self.dest, items)
def ensure_value(namespace, name, value):
if getattr(namespace, name, None) is None:
setattr(namespace, name, value)
return getattr(namespace, name)
#
# Callbacks to validate and normalize Options
#
def unfrack_path(pathsep=False):
"""Turn an Option's data into a single path in Ansible locations"""
def inner(value):
if pathsep:
return [unfrackpath(x) for x in value.split(os.pathsep) if x]
if value == '-':
return value
return unfrackpath(value)
return inner
def _git_repo_info(repo_path):
""" returns a string containing git branch, commit id and commit date """
result = None
if os.path.exists(repo_path):
# Check if the .git is a file. If it is a file, it means that we are in a submodule structure.
if os.path.isfile(repo_path):
try:
gitdir = yaml.safe_load(open(repo_path)).get('gitdir')
# There is a possibility the .git file to have an absolute path.
if os.path.isabs(gitdir):
repo_path = gitdir
else:
repo_path = os.path.join(repo_path[:-4], gitdir)
except (IOError, AttributeError):
return ''
with open(os.path.join(repo_path, "HEAD")) as f:
line = f.readline().rstrip("\n")
if line.startswith("ref:"):
branch_path = os.path.join(repo_path, line[5:])
else:
branch_path = None
if branch_path and os.path.exists(branch_path):
branch = '/'.join(line.split('/')[2:])
with open(branch_path) as f:
commit = f.readline()[:10]
else:
# detached HEAD
commit = line[:10]
branch = 'detached HEAD'
branch_path = os.path.join(repo_path, "HEAD")
date = time.localtime(os.stat(branch_path).st_mtime)
if time.daylight == 0:
offset = time.timezone
else:
offset = time.altzone
result = "({0} {1}) last updated {2} (GMT {3:+04d})".format(branch, commit, time.strftime("%Y/%m/%d %H:%M:%S", date), int(offset / -36))
else:
result = ''
return result
def _gitinfo():
basedir = os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..', '..', '..'))
repo_path = os.path.join(basedir, '.git')
return _git_repo_info(repo_path)
def version(prog=None):
""" return ansible version """
if prog:
result = [" ".join((prog, __version__))]
else:
result = [__version__]
gitinfo = _gitinfo()
if gitinfo:
result[0] = "{0} {1}".format(result[0], gitinfo)
result.append(" config file = %s" % C.CONFIG_FILE)
if C.DEFAULT_MODULE_PATH is None:
cpath = "Default w/o overrides"
else:
cpath = C.DEFAULT_MODULE_PATH
result.append(" configured module search path = %s" % cpath)
result.append(" ansible python module location = %s" % ':'.join(ansible.__path__))
result.append(" ansible collection location = %s" % ':'.join(C.COLLECTIONS_PATHS))
result.append(" executable location = %s" % sys.argv[0])
result.append(" python version = %s" % ''.join(sys.version.splitlines()))
result.append(" jinja version = %s" % j2_version)
result.append(" libyaml = %s" % HAS_LIBYAML)
return "\n".join(result)
#
# Functions to add pre-canned options to an OptionParser
#
def create_base_parser(prog, usage="", desc=None, epilog=None):
"""
Create an options parser for all ansible scripts
"""
# base opts
parser = argparse.ArgumentParser(
prog=prog,
formatter_class=SortingHelpFormatter,
epilog=epilog,
description=desc,
conflict_handler='resolve',
)
version_help = "show program's version number, config file location, configured module search path," \
" module location, executable location and exit"
parser.add_argument('--version', action=AnsibleVersion, nargs=0, help=version_help)
add_verbosity_options(parser)
return parser
def add_verbosity_options(parser):
"""Add options for verbosity"""
parser.add_argument('-v', '--verbose', dest='verbosity', default=C.DEFAULT_VERBOSITY, action="count",
help="verbose mode (-vvv for more, -vvvv to enable connection debugging)")
def add_async_options(parser):
"""Add options for commands which can launch async tasks"""
parser.add_argument('-P', '--poll', default=C.DEFAULT_POLL_INTERVAL, type=int, dest='poll_interval',
help="set the poll interval if using -B (default=%s)" % C.DEFAULT_POLL_INTERVAL)
parser.add_argument('-B', '--background', dest='seconds', type=int, default=0,
help='run asynchronously, failing after X seconds (default=N/A)')
def add_basedir_options(parser):
"""Add options for commands which can set a playbook basedir"""
parser.add_argument('--playbook-dir', default=C.config.get_config_value('PLAYBOOK_DIR'), dest='basedir', action='store',
help="Since this tool does not use playbooks, use this as a substitute playbook directory."
"This sets the relative path for many features including roles/ group_vars/ etc.",
type=unfrack_path())
def add_check_options(parser):
"""Add options for commands which can run with diagnostic information of tasks"""
parser.add_argument("-C", "--check", default=False, dest='check', action='store_true',
help="don't make any changes; instead, try to predict some of the changes that may occur")
parser.add_argument('--syntax-check', dest='syntax', action='store_true',
help="perform a syntax check on the playbook, but do not execute it")
parser.add_argument("-D", "--diff", default=C.DIFF_ALWAYS, dest='diff', action='store_true',
help="when changing (small) files and templates, show the differences in those"
" files; works great with --check")
def add_connect_options(parser):
"""Add options for commands which need to connection to other hosts"""
connect_group = parser.add_argument_group("Connection Options", "control as whom and how to connect to hosts")
connect_group.add_argument('-k', '--ask-pass', default=C.DEFAULT_ASK_PASS, dest='ask_pass', action='store_true',
help='ask for connection password')
connect_group.add_argument('--private-key', '--key-file', default=C.DEFAULT_PRIVATE_KEY_FILE, dest='private_key_file',
help='use this file to authenticate the connection', type=unfrack_path())
connect_group.add_argument('-u', '--user', default=C.DEFAULT_REMOTE_USER, dest='remote_user',
help='connect as this user (default=%s)' % C.DEFAULT_REMOTE_USER)
connect_group.add_argument('-c', '--connection', dest='connection', default=C.DEFAULT_TRANSPORT,
help="connection type to use (default=%s)" % C.DEFAULT_TRANSPORT)
connect_group.add_argument('-T', '--timeout', default=C.DEFAULT_TIMEOUT, type=int, dest='timeout',
help="override the connection timeout in seconds (default=%s)" % C.DEFAULT_TIMEOUT)
connect_group.add_argument('--ssh-common-args', default='', dest='ssh_common_args',
help="specify common arguments to pass to sftp/scp/ssh (e.g. ProxyCommand)")
connect_group.add_argument('--sftp-extra-args', default='', dest='sftp_extra_args',
help="specify extra arguments to pass to sftp only (e.g. -f, -l)")
connect_group.add_argument('--scp-extra-args', default='', dest='scp_extra_args',
help="specify extra arguments to pass to scp only (e.g. -l)")
connect_group.add_argument('--ssh-extra-args', default='', dest='ssh_extra_args',
help="specify extra arguments to pass to ssh only (e.g. -R)")
parser.add_argument_group(connect_group)
def add_fork_options(parser):
"""Add options for commands that can fork worker processes"""
parser.add_argument('-f', '--forks', dest='forks', default=C.DEFAULT_FORKS, type=int,
help="specify number of parallel processes to use (default=%s)" % C.DEFAULT_FORKS)
def add_inventory_options(parser):
"""Add options for commands that utilize inventory"""
parser.add_argument('-i', '--inventory', '--inventory-file', dest='inventory', action="append",
help="specify inventory host path or comma separated host list. --inventory-file is deprecated")
parser.add_argument('--list-hosts', dest='listhosts', action='store_true',
help='outputs a list of matching hosts; does not execute anything else')
parser.add_argument('-l', '--limit', default=C.DEFAULT_SUBSET, dest='subset',
help='further limit selected hosts to an additional pattern')
def add_meta_options(parser):
"""Add options for commands which can launch meta tasks from the command line"""
parser.add_argument('--force-handlers', default=C.DEFAULT_FORCE_HANDLERS, dest='force_handlers', action='store_true',
help="run handlers even if a task fails")
parser.add_argument('--flush-cache', dest='flush_cache', action='store_true',
help="clear the fact cache for every host in inventory")
def add_module_options(parser):
"""Add options for commands that load modules"""
module_path = C.config.get_configuration_definition('DEFAULT_MODULE_PATH').get('default', '')
parser.add_argument('-M', '--module-path', dest='module_path', default=None,
help="prepend colon-separated path(s) to module library (default=%s)" % module_path,
type=unfrack_path(pathsep=True), action=PrependListAction)
def add_output_options(parser):
"""Add options for commands which can change their output"""
parser.add_argument('-o', '--one-line', dest='one_line', action='store_true',
help='condense output')
parser.add_argument('-t', '--tree', dest='tree', default=None,
help='log output to this directory')
def add_runas_options(parser):
"""
Add options for commands which can run tasks as another user
Note that this includes the options from add_runas_prompt_options(). Only one of these
functions should be used.
"""
runas_group = parser.add_argument_group("Privilege Escalation Options", "control how and which user you become as on target hosts")
# consolidated privilege escalation (become)
runas_group.add_argument("-b", "--become", default=C.DEFAULT_BECOME, action="store_true", dest='become',
help="run operations with become (does not imply password prompting)")
runas_group.add_argument('--become-method', dest='become_method', default=C.DEFAULT_BECOME_METHOD,
help='privilege escalation method to use (default=%s)' % C.DEFAULT_BECOME_METHOD +
', use `ansible-doc -t become -l` to list valid choices.')
runas_group.add_argument('--become-user', default=None, dest='become_user', type=str,
help='run operations as this user (default=%s)' % C.DEFAULT_BECOME_USER)
add_runas_prompt_options(parser, runas_group=runas_group)
def add_runas_prompt_options(parser, runas_group=None):
"""
Add options for commands which need to prompt for privilege escalation credentials
Note that add_runas_options() includes these options already. Only one of the two functions
should be used.
"""
if runas_group is None:
runas_group = parser.add_argument_group("Privilege Escalation Options",
"control how and which user you become as on target hosts")
runas_group.add_argument('-K', '--ask-become-pass', dest='become_ask_pass', action='store_true',
default=C.DEFAULT_BECOME_ASK_PASS,
help='ask for privilege escalation password')
parser.add_argument_group(runas_group)
def add_runtask_options(parser):
"""Add options for commands that run a task"""
parser.add_argument('-e', '--extra-vars', dest="extra_vars", action="append",
help="set additional variables as key=value or YAML/JSON, if filename prepend with @", default=[])
def add_tasknoplay_options(parser):
"""Add options for commands that run a task w/o a defined play"""
parser.add_argument('--task-timeout', type=int, dest="task_timeout", action="store", default=C.TASK_TIMEOUT,
help="set task timeout limit in seconds, must be positive integer.")
def add_subset_options(parser):
"""Add options for commands which can run a subset of tasks"""
parser.add_argument('-t', '--tags', dest='tags', default=C.TAGS_RUN, action='append',
help="only run plays and tasks tagged with these values")
parser.add_argument('--skip-tags', dest='skip_tags', default=C.TAGS_SKIP, action='append',
help="only run plays and tasks whose tags do not match these values")
def add_vault_options(parser):
"""Add options for loading vault files"""
parser.add_argument('--vault-id', default=[], dest='vault_ids', action='append', type=str,
help='the vault identity to use')
base_group = parser.add_mutually_exclusive_group()
base_group.add_argument('--ask-vault-password', '--ask-vault-pass', default=C.DEFAULT_ASK_VAULT_PASS, dest='ask_vault_pass', action='store_true',
help='ask for vault password')
base_group.add_argument('--vault-password-file', '--vault-pass-file', default=[], dest='vault_password_files',
help="vault password file", type=unfrack_path(), action='append')
| gpl-3.0 | -8,720,541,197,535,171,000 | -5,797,028,480,873,329,000 | 45.053763 | 149 | 0.621877 | false |
akretion/purchase-workflow | purchase_requisition_delivery_address/__openerp__.py | 4 | 1242 | # -*- coding: utf-8 -*-
#
#
# Author: Yannick Vaucher
# Copyright 2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
{'name': "Purchase Requisition - Delivery Address",
'summary': "Adds delivery address on Purchase requisition",
'version': "0.1",
'author': "Camptocamp",
'category': "Purchase Management",
'license': "AGPL-3",
'complexity': "easy",
'images': [],
'depends': ['purchase_requisition',
'stock_dropshipping',
],
'demo': [],
'data': ['view/purchase_requisition.xml'],
'test': [],
'installable': True,
'auto_install': False,
}
| agpl-3.0 | 2,471,914,387,362,457,000 | -2,621,962,823,514,537,000 | 32.567568 | 77 | 0.675523 | false |
abtreece/ansible | lib/ansible/plugins/connection/jail.py | 8 | 8028 | # Based on local.py (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
# and chroot.py (c) 2013, Maykel Moya <mmoya@speedyrails.com>
# (c) 2013, Michael Scherer <misc@zarb.org>
# (c) 2015, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import distutils.spawn
import os
import os.path
import pipes
import subprocess
import traceback
from ansible.errors import AnsibleError
from ansible.module_utils._text import to_bytes
from ansible.plugins.connection import ConnectionBase, BUFSIZE
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class Connection(ConnectionBase):
''' Local BSD Jail based connections '''
transport = 'jail'
# Pipelining may work. Someone needs to test by setting this to True and
# having pipelining=True in their ansible.cfg
has_pipelining = True
# Some become_methods may work in v2 (sudo works for other chroot-based
# plugins while su seems to be failing). If some work, check chroot.py to
# see how to disable just some methods.
become_methods = frozenset()
def __init__(self, play_context, new_stdin, *args, **kwargs):
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
self.jail = self._play_context.remote_addr
if os.geteuid() != 0:
raise AnsibleError("jail connection requires running as root")
self.jls_cmd = self._search_executable('jls')
self.jexec_cmd = self._search_executable('jexec')
if self.jail not in self.list_jails():
raise AnsibleError("incorrect jail name %s" % self.jail)
@staticmethod
def _search_executable(executable):
cmd = distutils.spawn.find_executable(executable)
if not cmd:
raise AnsibleError("%s command not found in PATH" % executable)
return cmd
def list_jails(self):
p = subprocess.Popen([self.jls_cmd, '-q', 'name'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
return stdout.split()
def get_jail_path(self):
p = subprocess.Popen([self.jls_cmd, '-j', to_bytes(self.jail), '-q', 'path'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
# remove \n
return stdout[:-1]
def _connect(self):
''' connect to the jail; nothing to do here '''
super(Connection, self)._connect()
if not self._connected:
display.vvv(u"ESTABLISH JAIL CONNECTION FOR USER: {0}".format(self._play_context.remote_user), host=self.jail)
self._connected = True
def _buffered_exec_command(self, cmd, stdin=subprocess.PIPE):
''' run a command on the jail. This is only needed for implementing
put_file() get_file() so that we don't have to read the whole file
into memory.
compared to exec_command() it looses some niceties like being able to
return the process's exit code immediately.
'''
local_cmd = [self.jexec_cmd]
set_env = ''
if self._play_context.remote_user is not None:
local_cmd += ['-U', self._play_context.remote_user]
# update HOME since -U does not update the jail environment
set_env = 'HOME=~' + self._play_context.remote_user + ' '
local_cmd += [self.jail, self._play_context.executable, '-c', set_env + cmd]
display.vvv("EXEC %s" % (local_cmd,), host=self.jail)
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
p = subprocess.Popen(local_cmd, shell=False, stdin=stdin,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return p
def exec_command(self, cmd, in_data=None, sudoable=False):
''' run a command on the jail '''
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
p = self._buffered_exec_command(cmd)
stdout, stderr = p.communicate(in_data)
return (p.returncode, stdout, stderr)
def _prefix_login_path(self, remote_path):
''' Make sure that we put files into a standard path
If a path is relative, then we need to choose where to put it.
ssh chooses $HOME but we aren't guaranteed that a home dir will
exist in any given chroot. So for now we're choosing "/" instead.
This also happens to be the former default.
Can revisit using $HOME instead if it's a problem
'''
if not remote_path.startswith(os.path.sep):
remote_path = os.path.join(os.path.sep, remote_path)
return os.path.normpath(remote_path)
def put_file(self, in_path, out_path):
''' transfer a file from local to jail '''
super(Connection, self).put_file(in_path, out_path)
display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.jail)
out_path = pipes.quote(self._prefix_login_path(out_path))
try:
with open(to_bytes(in_path, errors='surrogate_or_strict'), 'rb') as in_file:
try:
p = self._buffered_exec_command('dd of=%s bs=%s' % (out_path, BUFSIZE), stdin=in_file)
except OSError:
raise AnsibleError("jail connection requires dd command in the jail")
try:
stdout, stderr = p.communicate()
except:
traceback.print_exc()
raise AnsibleError("failed to transfer file %s to %s" % (in_path, out_path))
if p.returncode != 0:
raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
except IOError:
raise AnsibleError("file or module does not exist at: %s" % in_path)
def fetch_file(self, in_path, out_path):
''' fetch a file from jail to local '''
super(Connection, self).fetch_file(in_path, out_path)
display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.jail)
in_path = pipes.quote(self._prefix_login_path(in_path))
try:
p = self._buffered_exec_command('dd if=%s bs=%s' % (in_path, BUFSIZE))
except OSError:
raise AnsibleError("jail connection requires dd command in the jail")
with open(to_bytes(out_path, errors='surrogate_or_strict'), 'wb+') as out_file:
try:
chunk = p.stdout.read(BUFSIZE)
while chunk:
out_file.write(chunk)
chunk = p.stdout.read(BUFSIZE)
except:
traceback.print_exc()
raise AnsibleError("failed to transfer file %s to %s" % (in_path, out_path))
stdout, stderr = p.communicate()
if p.returncode != 0:
raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
def close(self):
''' terminate the connection; nothing to do here '''
super(Connection, self).close()
self._connected = False
| mit | 5,752,387,712,759,958,000 | 7,143,415,394,699,727,000 | 39.751269 | 122 | 0.615471 | false |
nttks/jenkins-test | cms/djangoapps/contentstore/git_export_utils.py | 12 | 7181 | """
Utilities for export a course's XML into a git repository,
committing and pushing the changes.
"""
import logging
import os
import subprocess
from urlparse import urlparse
from django.conf import settings
from django.contrib.auth.models import User
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from xmodule.contentstore.django import contentstore
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.xml_exporter import export_to_xml
log = logging.getLogger(__name__)
GIT_REPO_EXPORT_DIR = getattr(settings, 'GIT_REPO_EXPORT_DIR', None)
GIT_EXPORT_DEFAULT_IDENT = getattr(settings, 'GIT_EXPORT_DEFAULT_IDENT',
{'name': 'STUDIO_EXPORT_TO_GIT',
'email': 'STUDIO_EXPORT_TO_GIT@example.com'})
class GitExportError(Exception):
"""
Convenience exception class for git export error conditions.
"""
NO_EXPORT_DIR = _("GIT_REPO_EXPORT_DIR not set or path {0} doesn't exist, "
"please create it, or configure a different path with "
"GIT_REPO_EXPORT_DIR".format(GIT_REPO_EXPORT_DIR))
URL_BAD = _('Non writable git url provided. Expecting something like:'
' git@github.com:mitocw/edx4edx_lite.git')
URL_NO_AUTH = _('If using http urls, you must provide the username '
'and password in the url. Similar to '
'https://user:pass@github.com/user/course.')
DETACHED_HEAD = _('Unable to determine branch, repo in detached HEAD mode')
CANNOT_PULL = _('Unable to update or clone git repository.')
XML_EXPORT_FAIL = _('Unable to export course to xml.')
CONFIG_ERROR = _('Unable to configure git username and password')
CANNOT_COMMIT = _('Unable to commit changes. This is usually '
'because there are no changes to be committed')
CANNOT_PUSH = _('Unable to push changes. This is usually '
'because the remote repository cannot be contacted')
BAD_COURSE = _('Bad course location provided')
MISSING_BRANCH = _('Missing branch on fresh clone')
def cmd_log(cmd, cwd):
"""
Helper function to redirect stderr to stdout and log the command
used along with the output. Will raise subprocess.CalledProcessError if
command doesn't return 0, and returns the command's output.
"""
output = subprocess.check_output(cmd, cwd=cwd, stderr=subprocess.STDOUT)
log.debug('Command was: {0!r}. '
'Working directory was: {1!r}'.format(' '.join(cmd), cwd))
log.debug('Command output was: {0!r}'.format(output))
return output
def export_to_git(course_id, repo, user='', rdir=None):
"""Export a course to git."""
# pylint: disable=too-many-statements
if not GIT_REPO_EXPORT_DIR:
raise GitExportError(GitExportError.NO_EXPORT_DIR)
if not os.path.isdir(GIT_REPO_EXPORT_DIR):
raise GitExportError(GitExportError.NO_EXPORT_DIR)
# Check for valid writable git url
if not (repo.endswith('.git') or
repo.startswith(('http:', 'https:', 'file:'))):
raise GitExportError(GitExportError.URL_BAD)
# Check for username and password if using http[s]
if repo.startswith('http:') or repo.startswith('https:'):
parsed = urlparse(repo)
if parsed.username is None or parsed.password is None:
raise GitExportError(GitExportError.URL_NO_AUTH)
if rdir:
rdir = os.path.basename(rdir)
else:
rdir = repo.rsplit('/', 1)[-1].rsplit('.git', 1)[0]
log.debug("rdir = %s", rdir)
# Pull or clone repo before exporting to xml
# and update url in case origin changed.
rdirp = '{0}/{1}'.format(GIT_REPO_EXPORT_DIR, rdir)
branch = None
if os.path.exists(rdirp):
log.info('Directory already exists, doing a git reset and pull '
'instead of git clone.')
cwd = rdirp
# Get current branch
cmd = ['git', 'symbolic-ref', '--short', 'HEAD']
try:
branch = cmd_log(cmd, cwd).strip('\n')
except subprocess.CalledProcessError as ex:
log.exception('Failed to get branch: %r', ex.output)
raise GitExportError(GitExportError.DETACHED_HEAD)
cmds = [
['git', 'remote', 'set-url', 'origin', repo],
['git', 'fetch', 'origin'],
['git', 'reset', '--hard', 'origin/{0}'.format(branch)],
['git', 'pull'],
['git', 'clean', '-d', '-f'],
]
else:
cmds = [['git', 'clone', repo]]
cwd = GIT_REPO_EXPORT_DIR
cwd = os.path.abspath(cwd)
for cmd in cmds:
try:
cmd_log(cmd, cwd)
except subprocess.CalledProcessError as ex:
log.exception('Failed to pull git repository: %r', ex.output)
raise GitExportError(GitExportError.CANNOT_PULL)
# export course as xml before commiting and pushing
root_dir = os.path.dirname(rdirp)
course_dir = os.path.basename(rdirp).rsplit('.git', 1)[0]
try:
export_to_xml(modulestore(), contentstore(), course_id,
root_dir, course_dir)
except (EnvironmentError, AttributeError):
log.exception('Failed export to xml')
raise GitExportError(GitExportError.XML_EXPORT_FAIL)
# Get current branch if not already set
if not branch:
cmd = ['git', 'symbolic-ref', '--short', 'HEAD']
try:
branch = cmd_log(cmd, os.path.abspath(rdirp)).strip('\n')
except subprocess.CalledProcessError as ex:
log.exception('Failed to get branch from freshly cloned repo: %r',
ex.output)
raise GitExportError(GitExportError.MISSING_BRANCH)
# Now that we have fresh xml exported, set identity, add
# everything to git, commit, and push to the right branch.
ident = {}
try:
user = User.objects.get(username=user)
ident['name'] = user.username
ident['email'] = user.email
except User.DoesNotExist:
# That's ok, just use default ident
ident = GIT_EXPORT_DEFAULT_IDENT
time_stamp = timezone.now()
cwd = os.path.abspath(rdirp)
commit_msg = 'Export from Studio at {1}'.format(user, time_stamp)
try:
cmd_log(['git', 'config', 'user.email', ident['email']], cwd)
cmd_log(['git', 'config', 'user.name', ident['name']], cwd)
except subprocess.CalledProcessError as ex:
log.exception('Error running git configure commands: %r', ex.output)
raise GitExportError(GitExportError.CONFIG_ERROR)
try:
cmd_log(['git', 'add', '.'], cwd)
cmd_log(['git', 'commit', '-a', '-m', commit_msg], cwd)
except subprocess.CalledProcessError as ex:
log.exception('Unable to commit changes: %r', ex.output)
raise GitExportError(GitExportError.CANNOT_COMMIT)
try:
cmd_log(['git', 'push', '-q', 'origin', branch], cwd)
except subprocess.CalledProcessError as ex:
log.exception('Error running git push command: %r', ex.output)
raise GitExportError(GitExportError.CANNOT_PUSH)
| agpl-3.0 | 5,470,797,802,104,666,000 | -8,974,517,686,703,559,000 | 39.570621 | 81 | 0.624008 | false |
marrocamp/weevely3 | modules/file/tar.py | 16 | 1981 | from core.vectors import PhpFile, ModuleExec
from core.module import Module
from core import messages
from core import modules
from core.loggers import log
import os
class Tar(Module):
"""Compress or expand tar archives."""
aliases = [ 'tar' ]
def init(self):
self.register_info(
{
'author': [
'Emilio Pinna'
],
'license': 'GPLv3'
}
)
self.register_vectors(
[
PhpFile(
payload_path = os.path.join(self.folder, 'php_tar.tpl'),
name = 'php_tar',
)
]
)
self.register_arguments([
{ 'name' : 'rtar', 'help' : 'Remote Tar file' },
{ 'name' : 'rfiles', 'help' : 'Remote files to compress. If decompressing, set destination folder.', 'nargs' : '+' },
{ 'name' : '--decompress', 'action' : 'store_true', 'default' : False, 'help' : 'Simulate tar -x' },
{ 'name' : '-z', 'action' : 'store_true', 'default' : False, 'help' : 'Simulate tar -xz for gzip compressed archives' },
{ 'name' : '-j', 'action' : 'store_true', 'default' : False, 'help' : 'Simulate tar -xj for bzip2 compressed archives' },
])
def run(self):
if self.args.get('z'):
ModuleExec('file_gzip', [ '--keep', '--decompress', self.args['rtar'] ]).run()
self.args['rtar'] = '.'.join(self.args['rtar'].split('.')[:-1])
elif self.args.get('j'):
ModuleExec('file_bzip2', [ '--keep', '--decompress', self.args['rtar'] ]).run()
self.args['rtar'] = '.'.join(self.args['rtar'].split('.')[:-1])
# The correct execution returns something only on errors
result_err = self.vectors.get_result(
name = 'php_tar',
format_args = self.args,
)
if result_err:
log.warn(result_err)
return
return True
| gpl-3.0 | -5,788,845,709,804,750,000 | 6,618,367,451,727,790,000 | 31.47541 | 131 | 0.504291 | false |
mfherbst/spack | var/spack/repos/builtin/packages/mvapich2/package.py | 2 | 10014 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
from spack import *
from spack.error import SpackError
def _process_manager_validator(values):
if len(values) > 1 and 'slurm' in values:
raise SpackError(
'slurm cannot be activated along with other process managers'
)
class Mvapich2(AutotoolsPackage):
"""MVAPICH2 is an MPI implementation for Infiniband networks."""
homepage = "http://mvapich.cse.ohio-state.edu/"
url = "http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.2.tar.gz"
list_url = "http://mvapich.cse.ohio-state.edu/downloads/"
version('2.3rc2', '6fcf22fe2a16023b462ef57614daa357')
version('2.3rc1', '386d79ae36b2136d203826465ad8b6cc')
version('2.3a', '87c3fbf8a755b53806fa9ecb21453445')
# Prefer the latest stable release
version('2.3', sha256='01d5fb592454ddd9ecc17e91c8983b6aea0e7559aa38f410b111c8ef385b50dd', preferred=True)
version('2.2', '939b65ebe5b89a5bc822cdab0f31f96e')
version('2.1', '0095ceecb19bbb7fb262131cb9c2cdd6')
version('2.0', '9fbb68a4111a8b6338e476dc657388b4')
provides('mpi')
provides('mpi@:3.0')
variant('debug', default=False,
description='Enable debug info and error messages at run-time')
variant('cuda', default=False,
description='Enable CUDA extension')
variant('regcache', default=True,
description='Enable memory registration cache')
# Accepted values are:
# single - No threads (MPI_THREAD_SINGLE)
# funneled - Only the main thread calls MPI (MPI_THREAD_FUNNELED)
# serialized - User serializes calls to MPI (MPI_THREAD_SERIALIZED)
# multiple - Fully multi-threaded (MPI_THREAD_MULTIPLE)
# runtime - Alias to "multiple"
variant(
'threads',
default='multiple',
values=('single', 'funneled', 'serialized', 'multiple'),
multi=False,
description='Control the level of thread support'
)
# 32 is needed when job size exceeds 32768 cores
variant(
'ch3_rank_bits',
default='32',
values=('16', '32'),
multi=False,
description='Number of bits allocated to the rank field (16 or 32)'
)
variant(
'process_managers',
description='List of the process managers to activate',
values=('slurm', 'hydra', 'gforker', 'remshell'),
multi=True,
validator=_process_manager_validator
)
variant(
'fabrics',
description='The fabric enabled for this build',
default='psm',
values=(
'psm', 'sock', 'nemesisib', 'nemesis', 'mrail', 'nemesisibtcp',
'nemesistcpib'
)
)
variant(
'alloca',
default=False,
description='Use alloca to allocate temporary memory if available'
)
variant(
'file_systems',
description='List of the ROMIO file systems to activate',
values=('lustre', 'gpfs', 'nfs', 'ufs'),
multi=True
)
depends_on('bison', type='build')
depends_on('libpciaccess', when=(sys.platform != 'darwin'))
depends_on('cuda', when='+cuda')
depends_on('psm', when='fabrics=psm')
filter_compiler_wrappers(
'mpicc', 'mpicxx', 'mpif77', 'mpif90', 'mpifort', relative_root='bin'
)
@property
def libs(self):
query_parameters = self.spec.last_query.extra_parameters
libraries = ['libmpi']
if 'cxx' in query_parameters:
libraries = ['libmpicxx'] + libraries
return find_libraries(
libraries, root=self.prefix, shared=True, recursive=True
)
@property
def process_manager_options(self):
spec = self.spec
other_pms = []
for x in ('hydra', 'gforker', 'remshell'):
if 'process_managers={0}'.format(x) in spec:
other_pms.append(x)
opts = []
if len(other_pms) > 0:
opts = ['--with-pm=%s' % ':'.join(other_pms)]
# See: http://slurm.schedmd.com/mpi_guide.html#mvapich2
if 'process_managers=slurm' in spec:
opts = [
'--with-pmi=pmi2',
'--with-pm=slurm'
]
return opts
@property
def network_options(self):
opts = []
# From here on I can suppose that only one variant has been selected
if 'fabrics=psm' in self.spec:
opts = [
"--with-device=ch3:psm",
"--with-psm={0}".format(self.spec['psm'].prefix)
]
elif 'fabrics=sock' in self.spec:
opts = ["--with-device=ch3:sock"]
elif 'fabrics=nemesistcpib' in self.spec:
opts = ["--with-device=ch3:nemesis:tcp,ib"]
elif 'fabrics=nemesisibtcp' in self.spec:
opts = ["--with-device=ch3:nemesis:ib,tcp"]
elif 'fabrics=nemesisib' in self.spec:
opts = ["--with-device=ch3:nemesis:ib"]
elif 'fabrics=nemesis' in self.spec:
opts = ["--with-device=ch3:nemesis"]
elif 'fabrics=mrail' in self.spec:
opts = ["--with-device=ch3:mrail", "--with-rdma=gen2"]
return opts
@property
def file_system_options(self):
spec = self.spec
fs = []
for x in ('lustre', 'gpfs', 'nfs', 'ufs'):
if 'file_systems={0}'.format(x) in spec:
fs.append(x)
opts = []
if len(fs) > 0:
opts.append('--with-file-system=%s' % '+'.join(fs))
return opts
def setup_environment(self, spack_env, run_env):
spec = self.spec
# mvapich2 configure fails when F90 and F90FLAGS are set
spack_env.unset('F90')
spack_env.unset('F90FLAGS')
if 'process_managers=slurm' in spec:
run_env.set('SLURM_MPI_TYPE', 'pmi2')
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
spack_env.set('MPICC', join_path(self.prefix.bin, 'mpicc'))
spack_env.set('MPICXX', join_path(self.prefix.bin, 'mpicxx'))
spack_env.set('MPIF77', join_path(self.prefix.bin, 'mpif77'))
spack_env.set('MPIF90', join_path(self.prefix.bin, 'mpif90'))
spack_env.set('MPICH_CC', spack_cc)
spack_env.set('MPICH_CXX', spack_cxx)
spack_env.set('MPICH_F77', spack_f77)
spack_env.set('MPICH_F90', spack_fc)
spack_env.set('MPICH_FC', spack_fc)
def setup_dependent_package(self, module, dependent_spec):
self.spec.mpicc = join_path(self.prefix.bin, 'mpicc')
self.spec.mpicxx = join_path(self.prefix.bin, 'mpicxx')
self.spec.mpifc = join_path(self.prefix.bin, 'mpif90')
self.spec.mpif77 = join_path(self.prefix.bin, 'mpif77')
self.spec.mpicxx_shared_libs = [
join_path(self.prefix.lib, 'libmpicxx.{0}'.format(dso_suffix)),
join_path(self.prefix.lib, 'libmpi.{0}'.format(dso_suffix))
]
@run_before('configure')
def die_without_fortran(self):
# Until we can pass variants such as +fortran through virtual
# dependencies depends_on('mpi'), require Fortran compiler to
# avoid delayed build errors in dependents.
if (self.compiler.f77 is None) or (self.compiler.fc is None):
raise InstallError(
'Mvapich2 requires both C and Fortran compilers!'
)
def configure_args(self):
spec = self.spec
args = [
'--enable-shared',
'--enable-romio',
'--disable-silent-rules',
'--disable-new-dtags',
'--enable-fortran=all',
"--enable-threads={0}".format(spec.variants['threads'].value),
"--with-ch3-rank-bits={0}".format(
spec.variants['ch3_rank_bits'].value),
]
args.extend(self.enable_or_disable('alloca'))
if '+debug' in self.spec:
args.extend([
'--disable-fast',
'--enable-error-checking=runtime',
'--enable-error-messages=all',
# Permits debugging with TotalView
'--enable-g=dbg',
'--enable-debuginfo'
])
else:
args.append('--enable-fast=all')
if '+cuda' in self.spec:
args.extend([
'--enable-cuda',
'--with-cuda={0}'.format(spec['cuda'].prefix)
])
else:
args.append('--disable-cuda')
if '+regcache' in self.spec:
args.append('--enable-registration-cache')
else:
args.append('--disable-registration-cache')
args.extend(self.process_manager_options)
args.extend(self.network_options)
args.extend(self.file_system_options)
return args
| lgpl-2.1 | 9,199,561,519,514,030,000 | 6,824,015,567,760,331,000 | 34.260563 | 109 | 0.586379 | false |
ernestask/meson | tools/ac_converter.py | 8 | 19281 | #!/usr/bin/env python3
# Copyright 2015 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
help_message = """Usage: %s <config.h.meson>
This script reads config.h.meson, looks for header
checks and writes the corresponding meson declaration.
Copy config.h.in to config.h.meson, replace #undef
with #mesondefine and run this. We can't do this automatically
because some configure scripts have #undef statements
that are unrelated to configure checks.
"""
import sys
# Add stuff here as it is encountered.
function_data = \
{'HAVE_FEENABLEEXCEPT': ('feenableexcept', 'fenv.h'),
'HAVE_FECLEAREXCEPT': ('feclearexcept', 'fenv.h'),
'HAVE_FEDISABLEEXCEPT': ('fedisableexcept', 'fenv.h'),
'HAVE_MMAP': ('mmap', 'sys/mman.h'),
'HAVE_GETPAGESIZE': ('getpagesize', 'unistd.h'),
'HAVE_GETISAX': ('getisax', 'sys/auxv.h'),
'HAVE_GETTIMEOFDAY': ('gettimeofday', 'sys/time.h'),
'HAVE_MPROTECT': ('mprotect', 'sys/mman.h'),
'HAVE_POSIX_MEMALIGN': ('posix_memalign', 'stdlib.h'),
'HAVE_SIGACTION': ('sigaction', 'signal.h'),
'HAVE_ALARM': ('alarm', 'unistd.h'),
'HAVE_CTIME_R': ('ctime_r', 'time.h'),
'HAVE_DRAND48': ('drand48', 'stdlib.h'),
'HAVE_FLOCKFILE': ('flockfile', 'stdio.h'),
'HAVE_FORK': ('fork', 'unistd.h'),
'HAVE_FUNLOCKFILE': ('funlockfile', 'stdio.h'),
'HAVE_GETLINE': ('getline', 'stdio.h'),
'HAVE_LINK': ('link', 'unistd.h'),
'HAVE_RAISE': ('raise', 'signal.h'),
'HAVE_STRNDUP': ('strndup', 'string.h'),
'HAVE_SCHED_GETAFFINITY': ('sched_getaffinity', 'sched.h'),
'HAVE_WAITPID': ('waitpid', 'sys/wait.h'),
'HAVE_XRENDERCREATECONICALGRADIENT': ('XRenderCreateConicalGradient', 'xcb/render.h'),
'HAVE_XRENDERCREATELINEARGRADIENT': ('XRenderCreateLinearGradient', 'xcb/render.h'),
'HAVE_XRENDERCREATERADIALGRADIENT': ('XRenderCreateRadialGradient', 'xcb/render.h'),
'HAVE_XRENDERCREATESOLIDFILL': ('XRenderCreateSolidFill', 'xcb/render.h'),
'HAVE_DCGETTEXT': ('dcgettext', 'libintl.h'),
'HAVE_ENDMNTENT': ('endmntent', 'mntent.h'),
'HAVE_ENDSERVENT': ('endservent', 'netdb.h'),
'HAVE_EVENTFD': ('eventfd', 'sys/eventfd.h'),
'HAVE_FALLOCATE': ('fallocate', 'fcntl.h'),
'HAVE_FCHMOD': ('fchmod', 'sys/stat.h'),
'HAVE_FCHOWN': ('fchown', 'unistd.h'),
'HAVE_FDWALK': ('fdwalk', 'stdlib.h'),
'HAVE_FSYNC': ('fsync', 'unistd.h'),
'HAVE_GETC_UNLOCKED': ('getc_unlocked', 'stdio.h'),
'HAVE_GETFSSTAT': ('getfsstat', 'sys/mount.h'),
'HAVE_GETMNTENT_R': ('getmntent_r', 'mntent.h'),
'HAVE_GETPROTOBYNAME_R': ('getprotobyname_r', 'netdb.h'),
'HAVE_GETRESUID': ('getresuid', 'unistd.h'),
'HAVE_GETVFSSTAT': ('getvfsstat', 'sys/statvfs.h'),
'HAVE_GMTIME_R': ('gmtime_r', 'time.h'),
'HAVE_HASMNTOPT': ('hasmntopt', 'mntent.h'),
'HAVE_IF_INDEXTONAME': ('if_indextoname', 'net/if.h'),
'HAVE_IF_NAMETOINDEX': ('if_nametoindex', 'net/if.h'),
'HAVE_INOTIFY_INIT1': ('inotify_init1', 'sys/inotify.h'),
'HAVE_ISSETUGID': ('issetugid', 'unistd.h'),
'HAVE_KEVENT': ('kevent', 'sys/event.h'),
'HAVE_KQUEUE': ('kqueue', 'sys/event.h'),
'HAVE_LCHMOD': ('lchmod', 'sys/stat.h'),
'HAVE_LCHOWN': ('lchown', 'unistd.h'),
'HAVE_LSTAT': ('lstat', 'sys/stat.h'),
'HAVE_MEMCPY': ('memcpy', 'string.h'),
'HAVE_MEMALIGN': ('memalign', 'stdlib.h'),
'HAVE_MEMMEM': ('memmem', 'string.h'),
'HAVE_NEWLOCALE': ('newlocale', 'locale.h'),
'HAVE_PIPE2': ('pipe2', 'fcntl.h'),
'HAVE_POLL': ('poll', 'poll.h'),
'HAVE_PRLIMIT': ('prlimit', 'sys/resource.h'),
'HAVE_PTHREAD_ATTR_SETSTACKSIZE': ('pthread_attr_setstacksize', 'pthread.h'),
'HAVE_PTHREAD_CONDATTR_SETCLOCK': ('pthread_condattr_setclock', 'pthread.h'),
'HAVE_PTHREAD_COND_TIMEDWAIT_RELATIVE_NP': ('pthread_cond_timedwait_relative_np', 'pthread.h'),
'HAVE_READLINK': ('readlink', 'unistd.h'),
'HAVE_RES_INIT': ('res_init', 'resolv.h'),
'HAVE_SENDMMSG': ('sendmmsg', 'sys/socket.h'),
'HAVE_SOCKET': ('socket', 'sys/socket.h'),
'HAVE_GETENV': ('getenv', 'stdlib.h'),
'HAVE_SETENV': ('setenv', 'stdlib.h'),
'HAVE_PUTENV': ('putenv', 'stdlib.h'),
'HAVE_UNSETENV': ('unsetenv', 'stdlib.h'),
'HAVE_SETMNTENT': ('setmntent', 'mntent.h'),
'HAVE_SNPRINTF': ('snprintf', 'stdio.h'),
'HAVE_SPLICE': ('splice', 'fcntl.h'),
'HAVE_STATFS': ('statfs', 'mount.h'),
'HAVE_STATVFS': ('statvfs', 'sys/statvfs.h'),
'HAVE_STPCOPY': ('stpcopy', 'string.h'),
'HAVE_STRCASECMP': ('strcasecmp', 'strings.h'),
'HAVE_STRLCPY': ('strlcpy', 'string.h'),
'HAVE_STRNCASECMP': ('strncasecmp', 'strings.h'),
'HAVE_STRSIGNAL': ('strsignal', 'signal.h'),
'HAVE_STRTOD_L': ('strtod_l', 'stdlib.h'),
'HAVE_STRTOLL_L': ('strtoll_l', 'stdlib.h'),
'HAVE_STRTOULL_L': ('strtoull_l', 'stdlib.h'),
'HAVE_SYMLINK': ('symlink', 'unistd.h'),
'HAVE_SYSCTLBYNAME': ('sysctlbyname', 'sys/sysctl.h'),
'HAVE_TIMEGM': ('timegm', 'time.h'),
'HAVE_USELOCALE': ('uselocale', 'xlocale.h'),
'HAVE_UTIMES': ('utimes', 'sys/time.h'),
'HAVE_VALLOC': ('valloc', 'stdlib.h'),
'HAVE_VASPRINTF': ('vasprintf', 'stdio.h'),
'HAVE_VSNPRINTF': ('vsnprintf', 'stdio.h'),
'HAVE_BCOPY': ('bcopy', 'strings.h'),
'HAVE_STRERROR': ('strerror', 'string.h'),
'HAVE_MEMMOVE': ('memmove', 'string.h'),
'HAVE_STRTOIMAX': ('strtoimax', 'inttypes.h'),
'HAVE_STRTOLL': ('strtoll', 'stdlib.h'),
'HAVE_STRTOQ': ('strtoq', 'stdlib.h'),
'HAVE_ACCEPT4': ('accept4', 'sys/socket.h'),
'HAVE_CHMOD': ('chmod', 'sys/stat.h'),
'HAVE_CHOWN': ('chown', 'unistd.h'),
'HAVE_FSTAT': ('fstat', 'sys/stat.h'),
'HAVE_GETADDRINFO': ('getaddrinfo', 'netdb.h'),
'HAVE_GETGRGID_R': ('getgrgid_r', 'grp.h'),
'HAVE_GETGRNAM_R': ('getgrnam_r', 'grp.h'),
'HAVE_GETGROUPS': ('getgroups', 'grp.h'),
'HAVE_GETOPT_LONG': ('getopt_long', 'getopt.h'),
'HAVE_GETPWNAM_R': ('getpwnam', 'pwd.h'),
'HAVE_GETPWUID_R': ('getpwuid_r', 'pwd.h'),
'HAVE_GETUID': ('getuid', 'unistd.h'),
'HAVE_LRINTF': ('lrintf', 'math.h'),
'HAVE_DECL_ISNAN': ('isnan', 'math.h'),
'HAVE_DECL_ISINF': ('isinf', 'math.h'),
'HAVE_ROUND': ('round', 'math.h'),
'HAVE_NEARBYINT': ('nearbyint', 'math.h'),
'HAVE_RINT': ('rint', 'math.h'),
'HAVE_MKFIFO': ('mkfifo', 'sys/stat.h'),
'HAVE_MLOCK': ('mlock', 'sys/mman.h'),
'HAVE_NANOSLEEP': ('nanosleep', 'time.h'),
'HAVE_PIPE': ('pipe', 'unistd.h'),
'HAVE_PPOLL': ('ppoll', 'poll.h'),
'HAVE_REGEXEC': ('regexec', 'regex.h'),
'HAVE_SETEGID': ('setegid', 'unistd.h'),
'HAVE_SETEUID': ('seteuid', 'unistd.h'),
'HAVE_SETPGID': ('setpgid', 'unistd.h'),
'HAVE_SETREGID': ('setregid', 'unistd.h'),
'HAVE_SETRESGID': ('setresgid', 'unistd.h'),
'HAVE_SETRESUID': ('setresuid', 'unistd.h'),
'HAVE_SHM_OPEN': ('shm_open', 'fcntl.h'),
'HAVE_SLEEP': ('sleep', 'unistd.h'),
'HAVE_STRERROR_R': ('strerror_r', 'string.h'),
'HAVE_STRTOF': ('strtof', 'stdlib.h'),
'HAVE_SYSCONF': ('sysconf', 'unistd.h'),
'HAVE_USLEEP': ('usleep', 'unistd.h'),
'HAVE_VFORK': ('vfork', 'unistd.h'),
'HAVE_MALLOC': ('malloc', 'stdlib.h'),
'HAVE_CALLOC': ('calloc', 'stdlib.h'),
'HAVE_REALLOC': ('realloc', 'stdlib.h'),
'HAVE_FREE': ('free', 'stdlib.h'),
'HAVE_ALLOCA': ('alloca', 'alloca.h'),
'HAVE_QSORT': ('qsort', 'stdlib.h'),
'HAVE_ABS': ('abs', 'stdlib.h'),
'HAVE_MEMSET': ('memset', 'string.h'),
'HAVE_MEMCMP': ('memcmp', 'string.h'),
'HAVE_STRLEN': ('strlen', 'string.h'),
'HAVE_STRLCAT': ('strlcat', 'string.h'),
'HAVE_STRDUP': ('strdup', 'string.h'),
'HAVE__STRREV': ('_strrev', 'string.h'),
'HAVE__STRUPR': ('_strupr', 'string.h'),
'HAVE__STRLWR': ('_strlwr', 'string.h'),
'HAVE_INDEX': ('index', 'strings.h'),
'HAVE_RINDEX': ('rindex', 'strings.h'),
'HAVE_STRCHR': ('strchr', 'string.h'),
'HAVE_STRRCHR': ('strrchr', 'string.h'),
'HAVE_STRSTR': ('strstr', 'string.h'),
'HAVE_STRTOL': ('strtol', 'stdlib.h'),
'HAVE_STRTOUL': ('strtoul', 'stdlib.h'),
'HAVE_STRTOULL': ('strtoull', 'stdlib.h'),
'HAVE_STRTOD': ('strtod', 'stdlib.h'),
'HAVE_ATOI': ('atoi', 'stdlib.h'),
'HAVE_ATOF': ('atof', 'stdlib.h'),
'HAVE_STRCMP': ('strcmp', 'string.h'),
'HAVE_STRNCMP': ('strncmp', 'string.h'),
'HAVE_VSSCANF': ('vsscanf', 'stdio.h'),
'HAVE_CHROOT': ('chroot', 'unistd.h'),
'HAVE_CLOCK': ('clock', 'time.h'),
'HAVE_CLOCK_GETRES': ('clock_getres', 'time.h'),
'HAVE_CLOCK_GETTIME': ('clock_gettime', 'time.h'),
'HAVE_CLOCK_SETTIME': ('clock_settime', 'time.h'),
'HAVE_CONFSTR': ('confstr', 'time.h'),
'HAVE_CTERMID': ('ctermid', 'stdio.h'),
'HAVE_DIRFD': ('dirfd', 'dirent.h'),
'HAVE_DLOPEN': ('dlopen', 'dlfcn.h'),
'HAVE_DUP2': ('dup2', 'unistd.h'),
'HAVE_DUP3': ('dup3', 'unistd.h'),
'HAVE_EPOLL_CREATE1': ('epoll_create1', 'sys/epoll.h'),
'HAVE_ERF': ('erf', 'math.h'),
'HAVE_ERFC': ('erfc', 'math.h'),
'HAVE_EXECV': ('execv', 'unistd.h'),
'HAVE_FACCESSAT': ('faccessat', 'unistd.h'),
'HAVE_FCHDIR': ('fchdir', 'unistd.h'),
'HAVE_FCHMODAT': ('fchmodat', 'sys/stat.h'),
'HAVE_FDATASYNC': ('fdatasync', 'unistd.h'),
'HAVE_FDOPENDIR': ('fdopendir', 'dirent.h'),
'HAVE_FEXECVE': ('fexecve', 'unistd.h'),
'HAVE_FLOCK': ('flock', 'sys/file.h'),
'HAVE_FORKPTY': ('forkpty', 'pty.h'),
'HAVE_FPATHCONF': ('fpathconf', 'unistd.h'),
'HAVE_FSTATAT': ('fstatat', 'unistd.h'),
'HAVE_FSTATVFS': ('fstatvfs', 'sys/statvfs.h'),
'HAVE_FTELLO': ('ftello', 'stdio.h'),
'HAVE_FTIME': ('ftime', 'sys/timeb.h'),
'HAVE_FTRUNCATE': ('ftruncate', 'unistd.h'),
'HAVE_FUTIMENS': ('futimens', 'sys/stat.h'),
'HAVE_FUTIMES': ('futimes', 'sys/time.h'),
'HAVE_GAI_STRERROR': ('gai_strerror', 'netdb.h'),
'HAVE_GETGROUPLIST': ('getgrouplist', 'grp.h'),
'HAVE_GETHOSTBYNAME': ('gethostbyname', 'netdb.h'),
'HAVE_GETHOSTBYNAME_R': ('gethostbyname_r', 'netdb.h'),
'HAVE_GETITIMER': ('getitimer', 'sys/time.h'),
'HAVE_GETLOADAVG': ('getloadavg', 'stdlib.h'),
'HAVE_GETLOGIN': ('getlogin', 'unistd.h'),
'HAVE_GETNAMEINFO': ('getnameinfo', 'netdb.h'),
'HAVE_GETPEERNAME': ('getpeername', 'sys/socket.h'),
'HAVE_GETPGID': ('getpgid', 'unistd.h'),
'HAVE_GETPGRP': ('getpgrp', 'unistd.h'),
'HAVE_GETPID': ('getpid', 'unistd.h'),
'HAVE_GETPRIORITY': ('getpriority', 'sys/resource.h'),
'HAVE_GETPWENT': ('getpwent', 'pwd.h'),
'HAVE_GETRANDOM': ('getrandom', 'linux/random.h'),
'HAVE_GETRESGID': ('getresgid', 'unistd.h'),
'HAVE_GETSID': ('getsid', 'unistd.h'),
'HAVE_GETSPENT': ('getspent', 'shadow.h'),
'HAVE_GETSPNAM': ('getspnam', 'shadow.h'),
'HAVE_GETWD': ('getwd', 'unistd.h'),
'HAVE_HSTRERROR': ('hstrerror', 'netdb.h'),
'HAVE_HTOLE64': ('htole64', 'endian.h'),
'HAVE_IF_NAMEINDEX': ('if_nameindex', 'net/if.h'),
'HAVE_INET_ATON': ('inet_aton', 'arpa/inet.h'),
'HAVE_INET_PTON': ('inet_pton', 'arpa/inet.h'),
'HAVE_INITGROUPS': ('initgroups', 'grp.h'),
'HAVE_KILL': ('kill', 'signal.h'),
'HAVE_KILLPG': ('killpg', 'signal.h'),
'HAVE_LINKAT': ('linkat', 'unistd.h'),
'HAVE_LOCKF': ('lockf', 'unistd.h'),
'HAVE_LUTIMES': ('lutimes', 'sys/time.h'),
'HAVE_MAKEDEV': ('makedev', 'sys/sysmacros.h'),
'HAVE_MBRTOWC': ('mbrtowc', 'wchar.h'),
'HAVE_MEMRCHR': ('memrchr', 'string.h'),
'HAVE_MKDIRAT': ('mkdirat', 'sys/stat.h'),
'HAVE_MKFIFOAT': ('mkfifoat', 'sys/stat.h'),
'HAVE_MKNOD': ('mknod', 'unistd.h'),
'HAVE_MKNODAT': ('mknodat', 'unistd.h'),
'HAVE_MKTIME': ('mktime', 'unistd.h'),
'HAVE_MKREMAP': ('mkremap', 'sys/mman.h'),
'HAVE_NICE': ('nice', 'unistd.h'),
'HAVE_OPENAT': ('openat', 'fcntl.h'),
'HAVE_OPENPTY': ('openpty', 'pty.h'),
'HAVE_PATHCONF': ('pathconf', 'unistd.h'),
'HAVE_PAUSE': ('pause', 'unistd.h'),
'HAVE_PREAD': ('pread', 'unistd.h'),
'HAVE_PTHREAD_KILL': ('pthread_kill', 'signal.h'),
'HAVE_PTHREAD_SIGMASK': ('pthread_sigmask', 'signal.h'),
'HAVE_PWRITE': ('pwrite', 'unistd.h'),
'HAVE_READLINKAT': ('readlinkat', 'unistd.h'),
'HAVE_READV': ('readv', 'sys/uio.h'),
'HAVE_RENAMEAT': ('renamat', 'stdio.h'),
'HAVE_SCHED_GET_PRIORITY_MAX': ('sched_get_priority_max', 'sched.h'),
'HAVE_SCHED_RR_GET_INTERVAL': ('sched_rr_get_interval', 'sched.h'),
'HAVE_SCHED_SETAFFINITY': ('sched_setaffinity', 'sched.h'),
'HAVE_SCHED_SETPARAM': ('sched_setparam', 'sched.h'),
'HAVE_SCHED_SETSCHEDULER': ('sched_setscheduler', 'sched.h'),
'HAVE_SELECT': ('select', 'sys/select.h'),
'HAVE_SEM_GETVALUE': ('sem_getvalue', 'semaphore.h'),
'HAVE_SEM_OPEN': ('sem_open', 'semaphore.h'),
'HAVE_SEM_TIMEDWAIT': ('sem_timedwait', 'semaphore.h'),
'HAVE_SEM_UNLINK': ('sem_unlink', 'semaphore.h'),
'HAVE_SENDFILE': ('sendfile', 'sys/sendfile.h'),
'HAVE_SETGID': ('setgid', 'unistd.h'),
'HAVE_SETGROUPS': ('setgroups', 'grp.h'),
'HAVE_SETHOSTNAME': ('sethostname', 'unistd.h'),
'HAVE_SETITIMER': ('setitimer', 'sys/time.h'),
'HAVE_SETLOCALE': ('setlocale', 'locale.h'),
'HAVE_SETPGRP': ('setpgrp', 'unistd.h'),
'HAVE_SETPRIORITY': ('setpriority', 'sys/resource.h'),
'HAVE_SETREUID': ('setreuid', 'unistd.h'),
'HAVE_SETSID': ('setsid', 'unistd.h'),
'HAVE_SETUID': ('setuid', 'unistd.h'),
'HAVE_SETVBUF': ('setvbuf', 'unistd.h'),
'HAVE_SIGALTSTACK': ('sigaltstack', 'signal.h'),
'HAVE_SIGINTERRUPT': ('siginterrupt', 'signal.h'),
'HAVE_SIGPENDING': ('sigpending', 'signal.h'),
'HAVE_SIGRELSE': ('sigrelse', 'signal.h'),
'HAVE_SIGTIMEDWAIT': ('sigtimedwait', 'signal.h'),
'HAVE_SIGWAIT': ('sigwait', 'signal.h'),
'HAVE_SIGWAITINFO': ('sigwaitinfo', 'signal.h'),
'HAVE_SOCKETPAIR': ('socketpair', 'sys/socket.h'),
'HAVE_STRFTIME': ('strftime', 'time.h'),
'HAVE_SYMLINKAT': ('symlinkat', 'unistd.h'),
'HAVE_SYNC': ('sync', 'unistd.h'),
'HAVE_TCGETPGRP': ('tcgetpgrp', 'unistd.h'),
'HAVE_TCSETPGRP': ('tcsetpgrp', 'unistd.h'),
'HAVE_TEMPNAM': ('tempnam', 'stdio.h'),
'HAVE_TIMES': ('times', 'sys/times.h'),
'HAVE_TEMPFILE': ('tempfile', 'stdio.h'),
'HAVE_TMPNAM': ('tmpnam', 'stdio.h'),
'HAVE_TMPNAM_R': ('tmpnam_r', 'stdio.h'),
'HAVE_TRUNCATE': ('truncate', 'unistd.h'),
'HAVE_TZNAME': ('tzname', 'time.h'),
'HAVE_UNAME': ('uname', 'sys/utsname.h'),
'HAVE_UNLINKAT': ('unlinkat', 'unistd.h'),
'HAVE_UTIMENSAT': ('utimensat', 'sys/stat.h'),
'HAVE_WAIT3': ('wait3', 'sys/wait.h'),
'HAVE_WAIT4': ('wait4', 'sys/wait.h'),
'HAVE_WAITID': ('waitid', 'sys/wait.h'),
'HAVE_WRITEV': ('writev', 'sys/uio.h'),
'HAVE_WMEMCMP': ('wmemcmp', 'wchar.h'),
'HAVE_ATAN': ('atan', 'math.h'),
'HAVE_ATAN2': ('atan2', 'math.h'),
'HAVE_ACOS': ('acos', 'math.h'),
'HAVE_ACOSH': ('acosh', 'math.h'),
'HAVE_ASIN': ('asin', 'math.h'),
'HAVE_ASINH': ('asinh', 'math.h'),
'HAVE_ATANH': ('atanh', 'math.h'),
'HAVE_CEIL': ('ceil', 'math.h'),
'HAVE_COPYSIGN': ('copysign', 'math.h'),
'HAVE_COS': ('cos', 'math.h'),
'HAVE_COSH': ('cosh', 'math.h'),
'HAVE_COSF': ('cosf', 'math.h'),
'HAVE_EXPM1': ('expm1', 'math.h'),
'HAVE_FABS': ('fabs', 'math.h'),
'HAVE_FINITE': ('finite', 'math.h'),
'HAVE_FLOOR': ('floor', 'math.h'),
'HAVE_GAMMA': ('gamma', 'math.h'),
'HAVE_HYPOT': ('hypot', 'math.h'),
'HAVE_ISINF': ('isinf', 'math.h'),
'HAVE_LOG': ('log', 'math.h'),
'HAVE_LOG1P': ('log1p', 'math.h'),
'HAVE_LOG2': ('log2', 'math.h'),
'HAVE_LGAMMA': ('lgamma', 'math.h'),
'HAVE_POW': ('pow', 'math.h'),
'HAVE_SCALBN': ('scalbn', 'math.h'),
'HAVE_SIN': ('sin', 'math.h'),
'HAVE_SINF': ('sinf', 'math.h'),
'HAVE_SINH': ('sinh', 'math.h'),
'HAVE_SQRT': ('sqrt', 'math.h'),
'HAVE_TGAMMA': ('tgamma', 'math.h'),
'HAVE_FSEEKO': ('fseeko', 'stdio.h'),
'HAVE_FSEEKO64': ('fseeko64', 'stdio.h'),
'HAVE_SETJMP': ('setjmp', 'setjmp.h'),
'HAVE_PTHREAD_SETNAME_NP': ('pthread_setname_np', 'pthread.h'),
'HAVE_PTHREAD_SET_NAME_NP': ('pthread_set_name_np', 'pthread.h'),
}
headers = []
functions = []
sizes = []
if len(sys.argv) != 2:
print(help_message % sys.argv[0])
sys.exit(0)
with open(sys.argv[1]) as f:
for line in f:
line = line.strip()
arr = line.split()
# Check for headers.
if line.startswith('#mesondefine') and line.endswith('_H'):
token = line.split()[1]
tarr = token.split('_')[1:-1]
tarr = [x.lower() for x in tarr]
hname = '/'.join(tarr) + '.h'
headers.append((token, hname))
# Check for functions.
try:
token = arr[1]
if token in function_data:
fdata = function_data[token]
functions.append((token, fdata[0], fdata[1]))
elif token.startswith('HAVE_') and not token.endswith('_H'):
functions.append((token, ))
except Exception:
pass
# Check for sizeof tests.
if len(arr) != 2:
continue
elem = arr[1]
if elem.startswith('SIZEOF_'):
typename = elem.split('_', 1)[1] \
.replace('_P', '*') \
.replace('_', ' ') \
.lower() \
.replace('size t', 'size_t')
sizes.append((elem, typename))
print('''cc = meson.get_compiler('c')
cdata = configuration_data()''')
# Convert header checks.
print('check_headers = [')
for token, hname in headers:
print(" ['%s', '%s']," % (token, hname))
print(']\n')
print('''foreach h : check_headers
if cc.has_header(h.get(1))
cdata.set(h.get(0), 1)
endif
endforeach
''')
# Convert function checks.
print('check_functions = [')
for token in functions:
if len(token) == 3:
token, fdata0, fdata1 = token
print(" ['%s', '%s', '#include<%s>']," % (token, fdata0, fdata1))
else:
print('# check token', token)
print(']\n')
print('''foreach f : check_functions
if cc.has_function(f.get(1), prefix : f.get(2))
cdata.set(f.get(0), 1)
endif
endforeach
''')
# Convert sizeof checks.
for elem, typename in sizes:
print("cdata.set('%s', cc.sizeof('%s'))" % (elem, typename))
print('''
configure_file(input : 'config.h.meson',
output : 'config.h',
configuration : cdata)''')
| apache-2.0 | -7,068,034,337,356,909,000 | -106,974,121,388,615,440 | 41.562914 | 100 | 0.560604 | false |
VitalPet/odoo | addons/email_template/__init__.py | 65 | 1126 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2009 Sharoon Thomas
# Copyright (C) 2010-Today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import email_template
import wizard
import res_partner
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -9,061,226,431,668,120,000 | -1,241,408,566,779,977,500 | 42.307692 | 78 | 0.625222 | false |
thegrill/checkin-control | docs/source/conf.py | 1 | 6111 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# grill-checkin-control documentation build configuration file, created by
# sphinx-quickstart on Sun Jun 25 22:20:49 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
# 'sphinx.ext.imgmath',
# 'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
# 'sphinx.ext.githubpages',
'sphinx.ext.graphviz',
'sphinx.ext.inheritance_diagram',
'sphinx_autodoc_typehints']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'checkin-control'
copyright = '2017, Christian Lopez Barron'
author = 'Christian Lopez Barron'
# inheritance_graph_attrs = dict(rankdir="LR", size='"6.0, 8.0"',fontsize=14, ratio='compress')
inheritance_graph_attrs = dict(rankdir="TB", bgcolor='transparent')
# inheritance_node_attrs = dict(shape='Mrecord', fontsize=14, height=0.75, color='dodgerblue1', style='filled')
inheritance_node_attrs = dict(shape='Mrecord', color='"#2573a7"', style='filled', fillcolor='"#eaf4fa"',
size='"6.0, 8.0"')
inheritance_edge_attrs = dict(color='"#123a54"')
autodoc_member_order = 'groupwise'
autodoc_default_flags = ['members', 'show-inheritance']
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'grill-checkin-controldoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'grill-checkin-control.tex', 'grill-checkin-control Documentation',
'Christian Lopez Barron', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'grill-checkin-control', 'grill-checkin-control Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'grill-checkin-control', 'grill-checkin-control Documentation',
author, 'checkin-control', 'One line description of project.',
'Miscellaneous'),
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'python': ('https://docs.python.org/3.6', None),
'fs': ('https://pyfilesystem2.readthedocs.io/en/latest/', None)}
| mit | 7,054,008,676,310,207,000 | -7,961,249,818,972,062,000 | 32.211957 | 111 | 0.663067 | false |
lahnerml/espresso | maintainer/check_features.py | 9 | 1064 | # Copyright (C) 2013,2014,2015,2016 The ESPResSo project
# Copyright (C) 2012 Olaf Lenz
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Check whether all features used in the code are defined
#
from __future__ import print_function
import sys, os
sys.path.append(os.path.join(sys.path[0], '..', 'config'))
import featuredefs
if len(sys.argv) != 2:
print("Usage: %s FILE" % sys.argv[0])
exit(2)
fdefs = featuredefs.defs(sys.argv[1])
| gpl-3.0 | 5,184,993,726,261,126,000 | 4,759,651,848,566,740,000 | 31.242424 | 71 | 0.732143 | false |
waldenner/robotframework | install.py | 6 | 3087 | #!/usr/bin/env python
"""Custom Robot Framework installation script.
Usage: python install.py [ in(stall) | un(install) | re(install) ]
Using `python install.py install` simply runs `python setup.py install`
internally. You need to use `setup.py` directly, if you want to alter the
default installation somehow.
To install with with Jython or IronPython instead of Python, replace `python`
with `jython` or `ipy`, respectively.
For more information about installation in general see
http://code.google.com/p/robotframework/wiki/Installation
"""
import glob
import os
import shutil
import sys
def install():
_remove(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'build'))
print 'Installing Robot Framework...'
setup = os.path.join(os.path.dirname(sys.argv[0]), 'setup.py')
rc = os.system('"%s" %s install' % (sys.executable, setup))
if rc != 0:
print 'Installation failed.'
sys.exit(rc)
print 'Installation was successful.'
def uninstall():
print 'Uninstalling Robot Framework...'
try:
instdir = _get_installation_directory()
except Exception:
print 'Robot Framework is not installed or the installation is corrupted.'
sys.exit(1)
_remove(instdir)
if not 'robotframework' in instdir:
_remove_egg_info(instdir)
_remove_runners()
print 'Uninstallation was successful.'
def reinstall():
uninstall()
install()
def _get_installation_directory():
import robot
# Ensure we got correct robot module
if 'Robot' not in robot.pythonpathsetter.__doc__:
raise TypeError
robot_dir = os.path.dirname(robot.__file__)
parent_dir = os.path.dirname(robot_dir)
if 'robotframework' in os.path.basename(parent_dir):
return parent_dir
return robot_dir
def _remove_runners():
runners = ['pybot', 'jybot', 'ipybot', 'rebot', 'jyrebot', 'ipyrebot']
if os.sep == '\\':
runners = [r + '.bat' for r in runners]
for name in runners:
if os.name == 'java':
_remove(os.path.join(sys.prefix, 'bin', name))
elif os.sep == '\\':
_remove(os.path.join(sys.prefix, 'Scripts', name))
else:
for dirpath in ['/bin', '/usr/bin/', '/usr/local/bin']:
_remove(os.path.join(dirpath, name))
def _remove_egg_info(instdir):
pattern = os.path.join(os.path.dirname(instdir), 'robotframework-*.egg-info')
for path in glob.glob(pattern):
_remove(path)
def _remove(path):
if not os.path.exists(path):
return
try:
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
except Exception, err:
print "Removing '%s' failed: %s" % (path, err)
else:
print "Removed '%s'" % path
if __name__ == '__main__':
actions = {'install': install, 'in': install,
'uninstall': uninstall, 'un': uninstall,
'reinstall': reinstall, 're': reinstall}
try:
actions[sys.argv[1]]()
except (KeyError, IndexError):
print __doc__
| apache-2.0 | 8,835,020,318,493,850,000 | -2,417,091,828,454,357,000 | 29.264706 | 82 | 0.621963 | false |
ProcessOut/processout-python | processout/token.py | 1 | 10402 | try:
from urllib.parse import quote_plus
except ImportError:
from urllib import quote_plus
import processout
from processout.networking.request import Request
from processout.networking.response import Response
# The content of this file was automatically generated
class Token(object):
def __init__(self, client, prefill = None):
self._client = client
self._id = None
self._customer = None
self._customer_id = None
self._gateway_configuration = None
self._gateway_configuration_id = None
self._card = None
self._card_id = None
self._type = None
self._metadata = None
self._is_subscription_only = None
self._is_default = None
self._created_at = None
if prefill != None:
self.fill_with_data(prefill)
@property
def id(self):
"""Get id"""
return self._id
@id.setter
def id(self, val):
"""Set id
Keyword argument:
val -- New id value"""
self._id = val
return self
@property
def customer(self):
"""Get customer"""
return self._customer
@customer.setter
def customer(self, val):
"""Set customer
Keyword argument:
val -- New customer value"""
if val is None:
self._customer = val
return self
if isinstance(val, dict):
obj = processout.Customer(self._client)
obj.fill_with_data(val)
self._customer = obj
else:
self._customer = val
return self
@property
def customer_id(self):
"""Get customer_id"""
return self._customer_id
@customer_id.setter
def customer_id(self, val):
"""Set customer_id
Keyword argument:
val -- New customer_id value"""
self._customer_id = val
return self
@property
def gateway_configuration(self):
"""Get gateway_configuration"""
return self._gateway_configuration
@gateway_configuration.setter
def gateway_configuration(self, val):
"""Set gateway_configuration
Keyword argument:
val -- New gateway_configuration value"""
if val is None:
self._gateway_configuration = val
return self
if isinstance(val, dict):
obj = processout.GatewayConfiguration(self._client)
obj.fill_with_data(val)
self._gateway_configuration = obj
else:
self._gateway_configuration = val
return self
@property
def gateway_configuration_id(self):
"""Get gateway_configuration_id"""
return self._gateway_configuration_id
@gateway_configuration_id.setter
def gateway_configuration_id(self, val):
"""Set gateway_configuration_id
Keyword argument:
val -- New gateway_configuration_id value"""
self._gateway_configuration_id = val
return self
@property
def card(self):
"""Get card"""
return self._card
@card.setter
def card(self, val):
"""Set card
Keyword argument:
val -- New card value"""
if val is None:
self._card = val
return self
if isinstance(val, dict):
obj = processout.Card(self._client)
obj.fill_with_data(val)
self._card = obj
else:
self._card = val
return self
@property
def card_id(self):
"""Get card_id"""
return self._card_id
@card_id.setter
def card_id(self, val):
"""Set card_id
Keyword argument:
val -- New card_id value"""
self._card_id = val
return self
@property
def type(self):
"""Get type"""
return self._type
@type.setter
def type(self, val):
"""Set type
Keyword argument:
val -- New type value"""
self._type = val
return self
@property
def metadata(self):
"""Get metadata"""
return self._metadata
@metadata.setter
def metadata(self, val):
"""Set metadata
Keyword argument:
val -- New metadata value"""
self._metadata = val
return self
@property
def is_subscription_only(self):
"""Get is_subscription_only"""
return self._is_subscription_only
@is_subscription_only.setter
def is_subscription_only(self, val):
"""Set is_subscription_only
Keyword argument:
val -- New is_subscription_only value"""
self._is_subscription_only = val
return self
@property
def is_default(self):
"""Get is_default"""
return self._is_default
@is_default.setter
def is_default(self, val):
"""Set is_default
Keyword argument:
val -- New is_default value"""
self._is_default = val
return self
@property
def created_at(self):
"""Get created_at"""
return self._created_at
@created_at.setter
def created_at(self, val):
"""Set created_at
Keyword argument:
val -- New created_at value"""
self._created_at = val
return self
def fill_with_data(self, data):
"""Fill the current object with the new values pulled from data
Keyword argument:
data -- The data from which to pull the new values"""
if "id" in data.keys():
self.id = data["id"]
if "customer" in data.keys():
self.customer = data["customer"]
if "customer_id" in data.keys():
self.customer_id = data["customer_id"]
if "gateway_configuration" in data.keys():
self.gateway_configuration = data["gateway_configuration"]
if "gateway_configuration_id" in data.keys():
self.gateway_configuration_id = data["gateway_configuration_id"]
if "card" in data.keys():
self.card = data["card"]
if "card_id" in data.keys():
self.card_id = data["card_id"]
if "type" in data.keys():
self.type = data["type"]
if "metadata" in data.keys():
self.metadata = data["metadata"]
if "is_subscription_only" in data.keys():
self.is_subscription_only = data["is_subscription_only"]
if "is_default" in data.keys():
self.is_default = data["is_default"]
if "created_at" in data.keys():
self.created_at = data["created_at"]
return self
def verify(self, options = {}):
"""Verify a customer token's card is valid.
Keyword argument:
options -- Options for the request"""
self.fill_with_data(options)
request = Request(self._client)
path = "/customers/" + quote_plus(self.customer_id) + "/tokens/" + quote_plus(self.id) + "/verify"
data = {
}
response = Response(request.post(path, data, options))
return_values = []
return_values.append(response.success)
return return_values[0]
def fetch_customer_tokens(self, customer_id, options = {}):
"""Get the customer's tokens.
Keyword argument:
customer_id -- ID of the customer
options -- Options for the request"""
self.fill_with_data(options)
request = Request(self._client)
path = "/customers/" + quote_plus(customer_id) + "/tokens"
data = {
}
response = Response(request.get(path, data, options))
return_values = []
a = []
body = response.body
for v in body['tokens']:
tmp = processout.Token(self._client)
tmp.fill_with_data(v)
a.append(tmp)
return_values.append(a)
return return_values[0]
def find(self, customer_id, token_id, options = {}):
"""Find a customer's token by its ID.
Keyword argument:
customer_id -- ID of the customer
token_id -- ID of the token
options -- Options for the request"""
self.fill_with_data(options)
request = Request(self._client)
path = "/customers/" + quote_plus(customer_id) + "/tokens/" + quote_plus(token_id) + ""
data = {
}
response = Response(request.get(path, data, options))
return_values = []
body = response.body
body = body["token"]
obj = processout.Token(self._client)
return_values.append(obj.fill_with_data(body))
return return_values[0]
def create(self, options = {}):
"""Create a new token for the given customer ID.
Keyword argument:
options -- Options for the request"""
self.fill_with_data(options)
request = Request(self._client)
path = "/customers/" + quote_plus(self.customer_id) + "/tokens"
data = {
'metadata': self.metadata,
'source': options.get("source"),
'settings': options.get("settings"),
'target': options.get("target"),
'verify': options.get("verify"),
'verify_metadata': options.get("verify_metadata"),
'set_default': options.get("set_default")
}
response = Response(request.post(path, data, options))
return_values = []
body = response.body
body = body["token"]
return_values.append(self.fill_with_data(body))
return return_values[0]
def delete(self, options = {}):
"""Delete a customer token
Keyword argument:
options -- Options for the request"""
self.fill_with_data(options)
request = Request(self._client)
path = "/customers/" + quote_plus(self.customer_id) + "/tokens/" + quote_plus(self.id) + ""
data = {
}
response = Response(request.delete(path, data, options))
return_values = []
return_values.append(response.success)
return return_values[0]
| mit | -1,755,301,519,321,824,000 | 2,063,353,535,887,300,000 | 26.44591 | 109 | 0.54278 | false |
BDAsdeCorazones/TestAirlines | tabo/cherrypy/cherrypy/test/test_refleaks.py | 22 | 1438 | """Tests for refleaks."""
from cherrypy._cpcompat import HTTPConnection, HTTPSConnection, ntob
import threading
import cherrypy
data = object()
from cherrypy.test import helper
class ReferenceTests(helper.CPWebCase):
def setup_server():
class Root:
def index(self, *args, **kwargs):
cherrypy.request.thing = data
return "Hello world!"
index.exposed = True
cherrypy.tree.mount(Root())
setup_server = staticmethod(setup_server)
def test_threadlocal_garbage(self):
success = []
def getpage():
host = '%s:%s' % (self.interface(), self.PORT)
if self.scheme == 'https':
c = HTTPSConnection(host)
else:
c = HTTPConnection(host)
try:
c.putrequest('GET', '/')
c.endheaders()
response = c.getresponse()
body = response.read()
self.assertEqual(response.status, 200)
self.assertEqual(body, ntob("Hello world!"))
finally:
c.close()
success.append(True)
ITERATIONS = 25
ts = []
for _ in range(ITERATIONS):
t = threading.Thread(target=getpage)
ts.append(t)
t.start()
for t in ts:
t.join()
self.assertEqual(len(success), ITERATIONS)
| gpl-2.0 | -4,483,254,518,982,292,000 | -1,404,435,660,368,121,900 | 23.372881 | 68 | 0.521558 | false |
Airbitz/airbitz-ofx | qbo.py | 1 | 7851 | #####################################################################
# #
# File: qbo.py #
# Developer: Justin Leto #
# #
# qbo class provides an interface from main csv iterator method #
# to handle qbo formatting, validations, and writing to file. #
# #
# Usage: python csvtoqbo.py <options> <csvfiles> #
# #
#####################################################################
import sys, traceback
import os
from datetime import datetime
import logging
import qboconst
class qbo:
# Holds a list of valid transactions via the addTransaction() method
__transactions = list()
# The full QBO document build from constants and transactions
__document = None
# Flag indicating whether the QBO document is valid
__isValid = None
# constructor
def __init__(self):
# Reads in constant values from file, set to private (const) variables
self.__HEADER = qboconst.HEADER
self.__FOOTER = qboconst.FOOTER
self.__DATE_START = qboconst.DATE_START
self.__DATE_END = qboconst.DATE_END
self.__BANKTRANLIST_START = qboconst.BANKTRANLIST_START
self.__BANKTRANLIST_END = qboconst.BANKTRANLIST_END
self.__TRANSACTION_START = qboconst.TRANSACTION_START
self.__TRANSACTION_END = qboconst.TRANSACTION_END
# Set document to valid
self.__isValid = True
# PUBLIC GET METHODS for constant values - used in unit testing.
#
#
def getHEADER(self):
return self.__HEADER
def getFOOTER(self):
return self.__FOOTER
def getDATE_START(self):
return self.__DATE_START
def getDATE_END(self):
return self.__DATE_END
def getBANKTRANLIST_START(self):
return self.__BANKTRANLIST_START
def getBANKTRANLIST_END(self):
return self.__BANKTRANLIST_END
def getTRANSACTION_START(self):
return self.__TRANSACTION_START
def getTRANSACTION_END(self):
return self.__TRANSACTION_END
# method to validate paramters used to submit transactions
def validateTransaction(self, status, date_posted, txn_type, to_from_flag, txn_amount, txn_exrate, name):
# if str.lower(status) != 'completed':
# #log status failure
# logging.info("Transaction status [" + status + "] invalid.")
# raise Exception("Transaction status [" + status + "] invalid.")
#
#if type(datetime.strptime(str(date_posted), '%m/%d/%Y')) is not datetime:
# logging.info("Transaction posted date [" + date_posted + "] invalid.")
# raise Exception("Transaction posted date [" + date_posted + "] invalid.")
# if str.lower(txn_type) not in ('payment','refund','withdrawal', 'withdraw funds', 'send', 'receive'):
# logging.info("Transaction type [" + str(txn_type) + "] not 'Payment', 'Refund', 'Withdraw Funds', or 'Withdrawal'.")
# raise Exception("Transaction type [" + str(txn_type) + "] not 'Payment', 'Refund', 'Withdraw Funds', or 'Withdrawal'.")
#
# if str.lower(to_from_flag) not in ('to', 'from'):
# logging.info("Transaction 'To/From' field [" + to_from_flag + "] invalid.")
# raise Exception("Transaction 'To/From' field [" + to_from_flag + "] invalid.")
#
# #logical test of txn_type and to_from_flag
# if ((str.lower(txn_type) == 'refund' and str.lower(to_from_flag) != 'to') or (str.lower(txn_type) == 'payment' and str.lower(to_from_flag) != 'from')):
# logging.info("Transaction type inconsistent with 'To/From' field.")
# raise Exception("Transaction type inconsistent with 'To/From' field.")
#
if len(name) == 0 or not name:
logging.info("Transaction name empty or null.")
raise Exception("Transaction name empty or null.")
return True
# Add transaction takes in param values uses the required formatting QBO transactions
# and pushes to list
def addTransaction(self, denom, date_posted, txn_memo, txn_id, txn_amount, txn_curamt, txn_category, name):
# try:
# # Validating param values prior to committing transaction
# self.validateTransaction(status, date_posted, txn_type, txn_id, txn_amount, name)
# except:
# raise Exception
# Construct QBO formatted transaction
transaction = ""
day = ""
month = ""
date_array = date_posted.split('-')
day = date_array[2]
month = date_array[1]
year = date_array[0]
if len(day) == 1:
day = "0"+day
if len(month) ==1:
month = "0"+month
rec_date = datetime.strptime(year+"/"+month+"/"+day, '%Y/%m/%d')
rec_date = rec_date.strftime('%Y%m%d%H%M%S') + '.000'
dtposted = ' <DTPOSTED>' + rec_date
if float(txn_amount) > 0:
trtype = ' <TRNTYPE>CREDIT'
else:
trtype = ' <TRNTYPE>DEBIT'
#
# if str.lower(txn_type) == 'receive':
# trtype = '<TRNTYPE>CREDIT'
# elif str.lower(txn_type) == 'send':
# trtype = '<TRNTYPE>DEBIT'
# if str.lower(txn_type) in ('refund', 'withdrawal', 'withdraw funds'):
# tramt = '<TRNAMT>-' + str(txn_amount).replace('$','')
# else:
# tramt = '<TRNAMT>' + str(txn_amount).replace('$','')
tramtbits = float(txn_amount) * denom
tramt = ' <TRNAMT>' + str(tramtbits)
if name:
trname = ' <NAME>' + str(name) + "\n"
else:
trname = ''
exrate = float(txn_curamt) / (tramtbits)
curamt = "{0:0.2f}".format(abs(float(txn_curamt)))
fmtexrate = "{0:0.6f}".format(float(exrate))
rawmemo = 'Rate=' + fmtexrate + " USD=" + curamt + " category=\"" + str(txn_category) + "\" memo=\"" + str(txn_memo)
memo = ' <MEMO>' + rawmemo[:253] + "\"\n"
fitid = ' <FITID>' + str(txn_id)
exrate = ' <CURRATE>' + fmtexrate
transaction = ("" + self.__TRANSACTION_START + "\n"
"" + trtype + "\n"
"" + dtposted + "\n"
"" + tramt + "\n"
"" + fitid + "\n"
"" + trname +
"" + memo +
"" + " <CURRENCY>" + "\n"
"" + exrate + "\n"
"" + " <CURSYM>USD" + "\n"
"" + " </CURRENCY>" + "\n"
"" + self.__TRANSACTION_END + "\n")
# Commit transaction to the document by adding to private member list object
self.__transactions.append(transaction)
logging.info("Transaction [" + str(self.getCount()) + "] Accepted.")
return True
# get the current number of valid committed transactions
def getCount(self):
return len(self.__transactions)
# get the valid status of the document
def isValid(self):
# If number of valid transactions are 0 document is invalid
if self.getCount() == 0:
self.__isValid = False
return self.__isValid
# get the text of the document
def getDocument(self):
self.Build()
return self.__document
# Construct the document, add the transactions
# save str into private member variable __document
def Build(self):
if not self.isValid():
logging.info("Error: QBO document is not valid.")
raise Exception("Error: QBO document is not valid.")
self.__document = ("" + self.__HEADER + "\n"
"" + self.__BANKTRANLIST_START + "\n"
"" + self.__DATE_START + "\n"
"" + self.__DATE_END + "\n")
for txn in self.__transactions:
self.__document = self.__document + str(txn)
self.__document = self.__document + ("" + self.__BANKTRANLIST_END + "\n"
"" + self.__FOOTER + "")
# Write QBO document to file
def Write(self, filename):
try:
with open(filename, 'w') as f:
# getDocument method will build document
# test for validity and return string for write
f.write(self.getDocument())
return True
except:
#log io error return False
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
print(''.join('!! ' + line for line in lines))
logging.info('qbo.Write() method: '.join('!! ' + line for line in lines))
return False
| mit | -7,222,893,552,280,705,000 | 4,905,500,952,374,224,000 | 31.126582 | 155 | 0.603235 | false |
jamesmarva/d3status | d3status/libs/options.py | 3 | 1419 | ## -*- coding: utf-8 -*-
#
# Copyright (c) 2012 feilong.me. All rights reserved.
#
# @author: Felinx Lee <felinx.lee@gmail.com>
# Created on Jun 30, 2012
#
import logging
import os
from tornado.options import parse_command_line, options, define
def parse_config_file(path):
"""Rewrite tornado default parse_config_file.
Parses and loads the Python config file at the given path.
This version allow customize new options which are not defined before
from a configuration file.
"""
config = {}
execfile(path, config, config)
for name in config:
if name in options:
options[name].set(config[name])
else:
define(name, config[name])
def parse_options():
_root = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")
_settings = os.path.join(_root, "settings.py")
_settings_local = os.path.join(_root, "settings_local.py")
try:
parse_config_file(_settings)
logging.info("Using settings.py as default settings.")
except Exception, e:
logging.error("No any default settings, are you sure? Exception: %s" % e)
try:
parse_config_file(_settings_local)
logging.info("Override some settings with local settings.")
except Exception, e:
logging.error("No local settings. Exception: %s" % e)
parse_command_line()
| apache-2.0 | 6,861,515,853,556,441,000 | -8,684,482,651,924,388,000 | 26.959184 | 81 | 0.625793 | false |
jnewland/home-assistant | homeassistant/components/toon/climate.py | 6 | 4030 | """Support for Toon thermostat."""
from datetime import timedelta
import logging
from typing import Any, Dict, List
from homeassistant.components.climate import ClimateDevice
from homeassistant.components.climate.const import (
STATE_AUTO, STATE_COOL, STATE_ECO, STATE_HEAT, SUPPORT_OPERATION_MODE,
SUPPORT_TARGET_TEMPERATURE)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS
from homeassistant.helpers.typing import HomeAssistantType
from . import ToonDisplayDeviceEntity
from .const import DATA_TOON_CLIENT, DEFAULT_MAX_TEMP, DEFAULT_MIN_TEMP, DOMAIN
_LOGGER = logging.getLogger(__name__)
SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_OPERATION_MODE
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=5)
SCAN_INTERVAL = timedelta(seconds=300)
HA_TOON = {
STATE_AUTO: 'Comfort',
STATE_HEAT: 'Home',
STATE_ECO: 'Away',
STATE_COOL: 'Sleep',
}
TOON_HA = {value: key for key, value in HA_TOON.items()}
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry,
async_add_entities) -> None:
"""Set up a Toon binary sensors based on a config entry."""
toon = hass.data[DATA_TOON_CLIENT][entry.entry_id]
async_add_entities([ToonThermostatDevice(toon)], True)
class ToonThermostatDevice(ToonDisplayDeviceEntity, ClimateDevice):
"""Representation of a Toon climate device."""
def __init__(self, toon) -> None:
"""Initialize the Toon climate device."""
self._state = None
self._current_temperature = None
self._target_temperature = None
self._next_target_temperature = None
self._heating_type = None
super().__init__(toon, "Toon Thermostat", 'mdi:thermostat')
@property
def unique_id(self) -> str:
"""Return the unique ID for this thermostat."""
return '_'.join([DOMAIN, self.toon.agreement.id, 'climate'])
@property
def supported_features(self) -> int:
"""Return the list of supported features."""
return SUPPORT_FLAGS
@property
def temperature_unit(self) -> str:
"""Return the unit of measurement."""
return TEMP_CELSIUS
@property
def current_operation(self) -> str:
"""Return current operation i.e. comfort, home, away."""
return TOON_HA.get(self._state)
@property
def operation_list(self) -> List[str]:
"""Return a list of available operation modes."""
return list(HA_TOON.keys())
@property
def current_temperature(self) -> float:
"""Return the current temperature."""
return self._current_temperature
@property
def target_temperature(self) -> float:
"""Return the temperature we try to reach."""
return self._target_temperature
@property
def min_temp(self) -> float:
"""Return the minimum temperature."""
return DEFAULT_MIN_TEMP
@property
def max_temp(self) -> float:
"""Return the maximum temperature."""
return DEFAULT_MAX_TEMP
@property
def device_state_attributes(self) -> Dict[str, Any]:
"""Return the current state of the burner."""
return {
'heating_type': self._heating_type,
}
def set_temperature(self, **kwargs) -> None:
"""Change the setpoint of the thermostat."""
temperature = kwargs.get(ATTR_TEMPERATURE)
self.toon.thermostat = temperature
def set_operation_mode(self, operation_mode: str) -> None:
"""Set new operation mode."""
self.toon.thermostat_state = HA_TOON[operation_mode]
def update(self) -> None:
"""Update local state."""
if self.toon.thermostat_state is None:
self._state = None
else:
self._state = self.toon.thermostat_state.name
self._current_temperature = self.toon.temperature
self._target_temperature = self.toon.thermostat
self._heating_type = self.toon.agreement.heating_type
| apache-2.0 | -9,056,414,336,587,887,000 | -584,677,445,226,830,200 | 30.732283 | 79 | 0.654342 | false |
arostm/mbed-os | features/FEATURE_LWIP/TESTS/mbedmicro-net/host_tests/udp_shotgun.py | 39 | 4553 | """
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
import socket
import json
import random
import itertools
import time
from sys import stdout
from threading import Thread
from SocketServer import BaseRequestHandler, UDPServer
from mbed_host_tests import BaseHostTest, event_callback
class UDPEchoClientHandler(BaseRequestHandler):
def handle(self):
""" UDP packet handler. Responds with multiple simultaneous packets
"""
data, sock = self.request
pattern = [ord(d) << 4 for d in data]
# Each byte in request indicates size of packet to recieve
# Each packet size is shifted over by 4 to fit in a byte, which
# avoids any issues with endianess or decoding
for packet in pattern:
data = [random.randint(0, 255) for _ in range(packet-1)]
data.append(reduce(lambda a,b: a^b, data))
data = ''.join(map(chr, data))
sock.sendto(data, self.client_address)
# Sleep a tiny bit to compensate for local network
time.sleep(0.01)
class UDPEchoClientTest(BaseHostTest):
def __init__(self):
"""
Initialise test parameters.
:return:
"""
BaseHostTest.__init__(self)
self.SERVER_IP = None # Will be determined after knowing the target IP
self.SERVER_PORT = 0 # Let TCPServer choose an arbitrary port
self.server = None
self.server_thread = None
self.target_ip = None
@staticmethod
def find_interface_to_target_addr(target_ip):
"""
Finds IP address of the interface through which it is connected to the target.
:return:
"""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
s.connect((target_ip, 0)) # Target IP, any port
except socket.error:
s.connect((target_ip, 8000)) # Target IP, 'random' port
ip = s.getsockname()[0]
s.close()
return ip
def setup_udp_server(self):
"""
sets up a UDP server for target to connect and send test data.
:return:
"""
# !NOTE: There should mechanism to assert in the host test
if self.SERVER_IP is None:
self.log("setup_udp_server() called before determining server IP!")
self.notify_complete(False)
# Returning none will suppress host test from printing success code
self.server = UDPServer((self.SERVER_IP, self.SERVER_PORT), UDPEchoClientHandler)
ip, port = self.server.server_address
self.SERVER_PORT = port
self.server.allow_reuse_address = True
self.log("HOST: Listening for UDP packets: " + self.SERVER_IP + ":" + str(self.SERVER_PORT))
self.server_thread = Thread(target=UDPEchoClientTest.server_thread_func, args=(self,))
self.server_thread.start()
@staticmethod
def server_thread_func(this):
"""
Thread function to run TCP server forever.
:param this:
:return:
"""
this.server.serve_forever()
@event_callback("target_ip")
def _callback_target_ip(self, key, value, timestamp):
"""
Callback to handle reception of target's IP address.
:param key:
:param value:
:param timestamp:
:return:
"""
self.target_ip = value
self.SERVER_IP = self.find_interface_to_target_addr(self.target_ip)
self.setup_udp_server()
@event_callback("host_ip")
def _callback_host_ip(self, key, value, timestamp):
"""
Callback for request for host IP Addr
"""
self.send_kv("host_ip", self.SERVER_IP)
@event_callback("host_port")
def _callback_host_port(self, key, value, timestamp):
"""
Callback for request for host port
"""
self.send_kv("host_port", self.SERVER_PORT)
def teardown(self):
if self.server:
self.server.shutdown()
self.server_thread.join()
| apache-2.0 | 220,133,062,098,927,700 | 6,459,545,347,821,612,000 | 31.06338 | 100 | 0.628816 | false |
franosincic/edx-platform | openedx/core/djangoapps/programs/tests/test_models.py | 8 | 4051 | """Tests for models supporting Program-related functionality."""
import ddt
from django.test import TestCase
import mock
from openedx.core.djangoapps.programs.models import ProgramsApiConfig
from openedx.core.djangoapps.programs.tests.mixins import ProgramsApiConfigMixin
@ddt.ddt
# ConfigurationModels use the cache. Make every cache get a miss.
@mock.patch('config_models.models.cache.get', return_value=None)
class TestProgramsApiConfig(ProgramsApiConfigMixin, TestCase):
"""Tests covering the ProgramsApiConfig model."""
def test_url_construction(self, _mock_cache):
"""Verify that URLs returned by the model are constructed correctly."""
programs_config = self.create_programs_config()
self.assertEqual(
programs_config.internal_api_url,
programs_config.internal_service_url.strip('/') + '/api/v{}/'.format(programs_config.api_version_number)
)
self.assertEqual(
programs_config.public_api_url,
programs_config.public_service_url.strip('/') + '/api/v{}/'.format(programs_config.api_version_number)
)
authoring_app_config = programs_config.authoring_app_config
self.assertEqual(
authoring_app_config.js_url,
programs_config.public_service_url.strip('/') + programs_config.authoring_app_js_path
)
self.assertEqual(
authoring_app_config.css_url,
programs_config.public_service_url.strip('/') + programs_config.authoring_app_css_path
)
@ddt.data(
(0, False),
(1, True),
)
@ddt.unpack
def test_cache_control(self, cache_ttl, is_cache_enabled, _mock_cache):
"""Verify the behavior of the property controlling whether API responses are cached."""
programs_config = self.create_programs_config(cache_ttl=cache_ttl)
self.assertEqual(programs_config.is_cache_enabled, is_cache_enabled)
def test_is_student_dashboard_enabled(self, _mock_cache):
"""
Verify that the property controlling display on the student dashboard is only True
when configuration is enabled and all required configuration is provided.
"""
programs_config = self.create_programs_config(enabled=False)
self.assertFalse(programs_config.is_student_dashboard_enabled)
programs_config = self.create_programs_config(enable_student_dashboard=False)
self.assertFalse(programs_config.is_student_dashboard_enabled)
programs_config = self.create_programs_config()
self.assertTrue(programs_config.is_student_dashboard_enabled)
def test_is_studio_tab_enabled(self, _mock_cache):
"""
Verify that the property controlling display of the Studio tab is only True
when configuration is enabled and all required configuration is provided.
"""
programs_config = self.create_programs_config(enabled=False)
self.assertFalse(programs_config.is_studio_tab_enabled)
programs_config = self.create_programs_config(enable_studio_tab=False)
self.assertFalse(programs_config.is_studio_tab_enabled)
programs_config = self.create_programs_config(authoring_app_js_path='', authoring_app_css_path='')
self.assertFalse(programs_config.is_studio_tab_enabled)
programs_config = self.create_programs_config()
self.assertTrue(programs_config.is_studio_tab_enabled)
def test_is_certification_enabled(self, _mock_cache):
"""
Verify that the property controlling certification-related functionality
for Programs behaves as expected.
"""
programs_config = self.create_programs_config(enabled=False)
self.assertFalse(programs_config.is_certification_enabled)
programs_config = self.create_programs_config(enable_certification=False)
self.assertFalse(programs_config.is_certification_enabled)
programs_config = self.create_programs_config()
self.assertTrue(programs_config.is_certification_enabled)
| agpl-3.0 | -2,541,271,596,620,416,000 | 5,317,716,192,100,504,000 | 43.032609 | 116 | 0.696618 | false |
yashu-seth/networkx | networkx/release.py | 24 | 7759 | """Release data for NetworkX.
When NetworkX is imported a number of steps are followed to determine
the version information.
1) If the release is not a development release (dev=False), then version
information is read from version.py, a file containing statically
defined version information. This file should exist on every
downloadable release of NetworkX since setup.py creates it during
packaging/installation. However, version.py might not exist if one
is running NetworkX from the mercurial repository. In the event that
version.py does not exist, then no vcs information will be available.
2) If the release is a development release, then version information
is read dynamically, when possible. If no dynamic information can be
read, then an attempt is made to read the information from version.py.
If version.py does not exist, then no vcs information will be available.
Clarification:
version.py is created only by setup.py
When setup.py creates version.py, it does so before packaging/installation.
So the created file is included in the source distribution. When a user
downloads a tar.gz file and extracts the files, the files will not be in a
live version control repository. So when the user runs setup.py to install
NetworkX, we must make sure write_versionfile() does not overwrite the
revision information contained in the version.py that was included in the
tar.gz file. This is why write_versionfile() includes an early escape.
"""
# Copyright (C) 2004-2015 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
from __future__ import absolute_import
import os
import sys
import time
import datetime
basedir = os.path.abspath(os.path.split(__file__)[0])
def write_versionfile():
"""Creates a static file containing version information."""
versionfile = os.path.join(basedir, 'version.py')
text = '''"""
Version information for NetworkX, created during installation.
Do not add this file to the repository.
"""
import datetime
version = %(version)r
date = %(date)r
# Was NetworkX built from a development version? If so, remember that the major
# and minor versions reference the "target" (rather than "current") release.
dev = %(dev)r
# Format: (name, major, min, revision)
version_info = %(version_info)r
# Format: a 'datetime.datetime' instance
date_info = %(date_info)r
# Format: (vcs, vcs_tuple)
vcs_info = %(vcs_info)r
'''
# Try to update all information
date, date_info, version, version_info, vcs_info = get_info(dynamic=True)
def writefile():
fh = open(versionfile, 'w')
subs = {
'dev' : dev,
'version': version,
'version_info': version_info,
'date': date,
'date_info': date_info,
'vcs_info': vcs_info
}
fh.write(text % subs)
fh.close()
if vcs_info[0] == 'mercurial':
# Then, we want to update version.py.
writefile()
else:
if os.path.isfile(versionfile):
# This is *good*, and the most likely place users will be when
# running setup.py. We do not want to overwrite version.py.
# Grab the version so that setup can use it.
sys.path.insert(0, basedir)
from version import version
del sys.path[0]
else:
# This is *bad*. It means the user might have a tarball that
# does not include version.py. Let this error raise so we can
# fix the tarball.
##raise Exception('version.py not found!')
# We no longer require that prepared tarballs include a version.py
# So we use the possibly trunctated value from get_info()
# Then we write a new file.
writefile()
return version
def get_revision():
"""Returns revision and vcs information, dynamically obtained."""
vcs, revision, tag = None, None, None
hgdir = os.path.join(basedir, '..', '.hg')
gitdir = os.path.join(basedir, '..', '.git')
if os.path.isdir(gitdir):
vcs = 'git'
# For now, we are not bothering with revision and tag.
vcs_info = (vcs, (revision, tag))
return revision, vcs_info
def get_info(dynamic=True):
## Date information
date_info = datetime.datetime.now()
date = time.asctime(date_info.timetuple())
revision, version, version_info, vcs_info = None, None, None, None
import_failed = False
dynamic_failed = False
if dynamic:
revision, vcs_info = get_revision()
if revision is None:
dynamic_failed = True
if dynamic_failed or not dynamic:
# This is where most final releases of NetworkX will be.
# All info should come from version.py. If it does not exist, then
# no vcs information will be provided.
sys.path.insert(0, basedir)
try:
from version import date, date_info, version, version_info, vcs_info
except ImportError:
import_failed = True
vcs_info = (None, (None, None))
else:
revision = vcs_info[1][0]
del sys.path[0]
if import_failed or (dynamic and not dynamic_failed):
# We are here if:
# we failed to determine static versioning info, or
# we successfully obtained dynamic revision info
version = ''.join([str(major), '.', str(minor)])
if dev:
version += '.dev_' + date_info.strftime("%Y%m%d%H%M%S")
version_info = (name, major, minor, revision)
return date, date_info, version, version_info, vcs_info
## Version information
name = 'networkx'
major = "2"
minor = "0"
## Declare current release as a development release.
## Change to False before tagging a release; then change back.
dev = True
description = "Python package for creating and manipulating graphs and networks"
long_description = \
"""
NetworkX is a Python package for the creation, manipulation, and
study of the structure, dynamics, and functions of complex networks.
"""
license = 'BSD'
authors = {'Hagberg' : ('Aric Hagberg','hagberg@lanl.gov'),
'Schult' : ('Dan Schult','dschult@colgate.edu'),
'Swart' : ('Pieter Swart','swart@lanl.gov')
}
maintainer = "NetworkX Developers"
maintainer_email = "networkx-discuss@googlegroups.com"
url = 'http://networkx.github.io/'
download_url= 'https://pypi.python.org/pypi/networkx/'
platforms = ['Linux','Mac OSX','Windows','Unix']
keywords = ['Networks', 'Graph Theory', 'Mathematics', 'network', 'graph', 'discrete mathematics', 'math']
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Scientific/Engineering :: Physics']
date, date_info, version, version_info, vcs_info = get_info()
if __name__ == '__main__':
# Write versionfile for nightly snapshots.
write_versionfile()
| bsd-3-clause | 7,663,610,306,512,692,000 | -3,189,663,154,297,699,000 | 33.180617 | 106 | 0.648279 | false |
guettli/django | tests/m2m_through_regress/tests.py | 31 | 10463 | from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.core import management
from django.test import TestCase
from django.utils.six import StringIO
from .models import (
Car, CarDriver, Driver, Group, Membership, Person, UserMembership,
)
class M2MThroughTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.bob = Person.objects.create(name="Bob")
cls.jim = Person.objects.create(name="Jim")
cls.rock = Group.objects.create(name="Rock")
cls.roll = Group.objects.create(name="Roll")
cls.frank = User.objects.create_user("frank", "frank@example.com", "password")
cls.jane = User.objects.create_user("jane", "jane@example.com", "password")
# normal intermediate model
cls.bob_rock = Membership.objects.create(person=cls.bob, group=cls.rock)
cls.bob_roll = Membership.objects.create(person=cls.bob, group=cls.roll, price=50)
cls.jim_rock = Membership.objects.create(person=cls.jim, group=cls.rock, price=50)
# intermediate model with custom id column
cls.frank_rock = UserMembership.objects.create(user=cls.frank, group=cls.rock)
cls.frank_roll = UserMembership.objects.create(user=cls.frank, group=cls.roll)
cls.jane_rock = UserMembership.objects.create(user=cls.jane, group=cls.rock)
def test_retrieve_reverse_m2m_items(self):
self.assertQuerysetEqual(
self.bob.group_set.all(), [
"<Group: Rock>",
"<Group: Roll>",
],
ordered=False
)
def test_retrieve_forward_m2m_items(self):
self.assertQuerysetEqual(
self.roll.members.all(), [
"<Person: Bob>",
]
)
def test_cannot_use_setattr_on_reverse_m2m_with_intermediary_model(self):
msg = (
"Cannot set values on a ManyToManyField which specifies an "
"intermediary model. Use m2m_through_regress.Membership's Manager "
"instead."
)
with self.assertRaisesMessage(AttributeError, msg):
self.bob.group_set.set([])
def test_cannot_use_setattr_on_forward_m2m_with_intermediary_model(self):
msg = (
"Cannot set values on a ManyToManyField which specifies an "
"intermediary model. Use m2m_through_regress.Membership's Manager "
"instead."
)
with self.assertRaisesMessage(AttributeError, msg):
self.roll.members.set([])
def test_cannot_use_create_on_m2m_with_intermediary_model(self):
with self.assertRaises(AttributeError):
self.rock.members.create(name="Anne")
def test_cannot_use_create_on_reverse_m2m_with_intermediary_model(self):
with self.assertRaises(AttributeError):
self.bob.group_set.create(name="Funk")
def test_retrieve_reverse_m2m_items_via_custom_id_intermediary(self):
self.assertQuerysetEqual(
self.frank.group_set.all(), [
"<Group: Rock>",
"<Group: Roll>",
],
ordered=False
)
def test_retrieve_forward_m2m_items_via_custom_id_intermediary(self):
self.assertQuerysetEqual(
self.roll.user_members.all(), [
"<User: frank>",
]
)
def test_join_trimming_forwards(self):
"Check that we don't involve too many copies of the intermediate table when doing a join. Refs #8046, #8254"
self.assertQuerysetEqual(
self.rock.members.filter(membership__price=50), [
"<Person: Jim>",
]
)
def test_join_trimming_reverse(self):
self.assertQuerysetEqual(
self.bob.group_set.filter(membership__price=50), [
"<Group: Roll>",
]
)
class M2MThroughSerializationTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.bob = Person.objects.create(name="Bob")
cls.roll = Group.objects.create(name="Roll")
cls.bob_roll = Membership.objects.create(person=cls.bob, group=cls.roll)
def test_serialization(self):
"m2m-through models aren't serialized as m2m fields. Refs #8134"
pks = {"p_pk": self.bob.pk, "g_pk": self.roll.pk, "m_pk": self.bob_roll.pk}
out = StringIO()
management.call_command("dumpdata", "m2m_through_regress", format="json", stdout=out)
self.assertJSONEqual(
out.getvalue().strip(),
'[{"pk": %(m_pk)s, "model": "m2m_through_regress.membership", "fields": {"person": %(p_pk)s, "price": '
'100, "group": %(g_pk)s}}, {"pk": %(p_pk)s, "model": "m2m_through_regress.person", "fields": {"name": '
'"Bob"}}, {"pk": %(g_pk)s, "model": "m2m_through_regress.group", "fields": {"name": "Roll"}}]'
% pks
)
out = StringIO()
management.call_command("dumpdata", "m2m_through_regress", format="xml", indent=2, stdout=out)
self.assertXMLEqual(out.getvalue().strip(), """
<?xml version="1.0" encoding="utf-8"?>
<django-objects version="1.0">
<object pk="%(m_pk)s" model="m2m_through_regress.membership">
<field to="m2m_through_regress.person" name="person" rel="ManyToOneRel">%(p_pk)s</field>
<field to="m2m_through_regress.group" name="group" rel="ManyToOneRel">%(g_pk)s</field>
<field type="IntegerField" name="price">100</field>
</object>
<object pk="%(p_pk)s" model="m2m_through_regress.person">
<field type="CharField" name="name">Bob</field>
</object>
<object pk="%(g_pk)s" model="m2m_through_regress.group">
<field type="CharField" name="name">Roll</field>
</object>
</django-objects>
""".strip() % pks)
class ToFieldThroughTests(TestCase):
def setUp(self):
self.car = Car.objects.create(make="Toyota")
self.driver = Driver.objects.create(name="Ryan Briscoe")
CarDriver.objects.create(car=self.car, driver=self.driver)
# We are testing if wrong objects get deleted due to using wrong
# field value in m2m queries. So, it is essential that the pk
# numberings do not match.
# Create one intentionally unused driver to mix up the autonumbering
self.unused_driver = Driver.objects.create(name="Barney Gumble")
# And two intentionally unused cars.
self.unused_car1 = Car.objects.create(make="Trabant")
self.unused_car2 = Car.objects.create(make="Wartburg")
def test_to_field(self):
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Ryan Briscoe>"]
)
def test_to_field_reverse(self):
self.assertQuerysetEqual(
self.driver.car_set.all(),
["<Car: Toyota>"]
)
def test_to_field_clear_reverse(self):
self.driver.car_set.clear()
self.assertQuerysetEqual(
self.driver.car_set.all(), [])
def test_to_field_clear(self):
self.car.drivers.clear()
self.assertQuerysetEqual(
self.car.drivers.all(), [])
# Low level tests for _add_items and _remove_items. We test these methods
# because .add/.remove aren't available for m2m fields with through, but
# through is the only way to set to_field currently. We do want to make
# sure these methods are ready if the ability to use .add or .remove with
# to_field relations is added some day.
def test_add(self):
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Ryan Briscoe>"]
)
# Yikes - barney is going to drive...
self.car.drivers._add_items('car', 'driver', self.unused_driver)
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Barney Gumble>", "<Driver: Ryan Briscoe>"]
)
def test_add_null(self):
nullcar = Car.objects.create(make=None)
with self.assertRaises(ValueError):
nullcar.drivers._add_items('car', 'driver', self.unused_driver)
def test_add_related_null(self):
nulldriver = Driver.objects.create(name=None)
with self.assertRaises(ValueError):
self.car.drivers._add_items('car', 'driver', nulldriver)
def test_add_reverse(self):
car2 = Car.objects.create(make="Honda")
self.assertQuerysetEqual(
self.driver.car_set.all(),
["<Car: Toyota>"]
)
self.driver.car_set._add_items('driver', 'car', car2)
self.assertQuerysetEqual(
self.driver.car_set.all(),
["<Car: Toyota>", "<Car: Honda>"],
ordered=False
)
def test_add_null_reverse(self):
nullcar = Car.objects.create(make=None)
with self.assertRaises(ValueError):
self.driver.car_set._add_items('driver', 'car', nullcar)
def test_add_null_reverse_related(self):
nulldriver = Driver.objects.create(name=None)
with self.assertRaises(ValueError):
nulldriver.car_set._add_items('driver', 'car', self.car)
def test_remove(self):
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Ryan Briscoe>"]
)
self.car.drivers._remove_items('car', 'driver', self.driver)
self.assertQuerysetEqual(
self.car.drivers.all(), [])
def test_remove_reverse(self):
self.assertQuerysetEqual(
self.driver.car_set.all(),
["<Car: Toyota>"]
)
self.driver.car_set._remove_items('driver', 'car', self.car)
self.assertQuerysetEqual(
self.driver.car_set.all(), [])
class ThroughLoadDataTestCase(TestCase):
fixtures = ["m2m_through"]
def test_sequence_creation(self):
"""
Sequences on an m2m_through are created for the through model, not a
phantom auto-generated m2m table (#11107).
"""
out = StringIO()
management.call_command("dumpdata", "m2m_through_regress", format="json", stdout=out)
self.assertJSONEqual(
out.getvalue().strip(),
'[{"pk": 1, "model": "m2m_through_regress.usermembership", "fields": {"price": 100, "group": 1, "user"'
': 1}}, {"pk": 1, "model": "m2m_through_regress.person", "fields": {"name": "Guido"}}, {"pk": 1, '
'"model": "m2m_through_regress.group", "fields": {"name": "Python Core Group"}}]'
)
| bsd-3-clause | -7,445,935,394,258,643,000 | 7,779,002,305,382,380,000 | 37.751852 | 116 | 0.604989 | false |
CARTAvis/carta | carta/html5/common/skel/source/class/skel/simulation/tStack.py | 3 | 9489 | import unittest
import Util
import time
import selectBrowser
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
#Stack functionality.
class tStack(unittest.TestCase):
def setUp(self):
browser = selectBrowser._getBrowser()
Util.setUp(self, browser)
def verifyCompositionMode(self, driver, mode):
print "verifying mode=", mode
combineCombo = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.ID, "layerCompositionMode")))
driver.execute_script( "arguments[0].scrollIntoView(true);", combineCombo )
combineText = combineCombo.find_element_by_xpath( ".//div/div")
combMode = combineText.text
print "Combine mode=",combMode
self.assertTrue( mode==combMode, "Combine modes not as expected")
def _isColorChecked(self, colorBox ):
colorBorder = colorBox.get_attribute( "class")
checked = False
if colorBorder == "qx-line-border":
checked = True
return checked
def _testColor(self, colorBoxId, colorExpected, colorStr, driver ):
filterBox = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.ID, colorBoxId)))
driver.execute_script( "arguments[0].scrollIntoView(true);", filterBox )
colorChecked = self._isColorChecked( filterBox )
print "Color checked=", colorChecked
colorCheckExpected = True
if colorExpected == 0:
colorCheckExpected = False
self.assertEqual( colorChecked, colorCheckExpected, colorStr + " amount is not correct")
def verifyColor(self, driver, redExpected, greenExpected, blueExpected ):
self._testColor( "filterRedBox", redExpected, "Red", driver )
self._testColor( "filterGreenBox", greenExpected, "Green", driver)
self._testColor( "filterBlueBox", blueExpected, "Blue", driver )
def _verifyRGB(self, driver, imageName, rgbStr ):
xPath = "//div[@qxclass='skel.widgets.Image.Stack.TreeItem']/div[text()='" + imageName + "']/../div[@qxclass='skel.widgets.Image.Stack.CustomIcon']"
item = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, xPath)))
styleStr = item.get_attribute("style")
print "Style=",styleStr
rgb = item.value_of_css_property( 'background-color')
print "RGB color=",rgb
print "RGBSTR=", rgbStr
self.assertTrue( rgb==rgbStr, "Red Icon not correct color")
# Load 3 images
# Hide the second image; check the count goes down to 2
# Show the second image; check the count goes up to 3
def test_hideShow(self):
driver = self.driver
timeout = selectBrowser._getSleep()
#Load images
Util.load_image( self, driver, "Default")
Util.load_image( self, driver, "aJ.fits")
Util.load_image( self, driver, "aH.fits")
#Verify the image animator sees three images.
Util.verifyAnimatorUpperBound( self, driver, 2, "Image" )
#Open the image settings
#Open the stack tab
Util.openSettings( self, driver, "Image", True )
Util.clickTab( driver, "Stack" )
#Turn off auto select
autoSelectCheck = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.ID, "autoSelectImages")))
ActionChains(driver).click( autoSelectCheck ).perform()
#Hide the second image
secondItem = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, "//div[@qxclass='skel.widgets.Image.Stack.TreeItem']/div[text()='aJ.fits']/..")))
ActionChains(driver).context_click( secondItem ).perform()
ActionChains(driver).send_keys( Keys.ARROW_DOWN ).send_keys( Keys.ARROW_DOWN).send_keys( Keys.ENTER ).perform()
#Verify the animator sees two images
time.sleep( 2 )
Util.verifyAnimatorUpperBound(self, driver, 1, "Image" )
#Show the second image
secondItem = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, "//div[@qxclass='skel.widgets.Image.Stack.TreeItem']/div[text()='aJ.fits']/..")))
ActionChains(driver).context_click( secondItem ).perform()
ActionChains(driver).send_keys( Keys.ARROW_DOWN ).send_keys( Keys.ARROW_DOWN).send_keys( Keys.ENTER ).perform()
#Verify the animator sees three images
time.sleep( 2 )
Util.verifyAnimatorUpperBound( self, driver, 2, "Image")
# Load 3 images
# Test that we can group the images into an RGB layer.
# Test that we can ungroup the images.
def test_groupUngroup(self):
driver = self.driver
timeout = selectBrowser._getSleep()
#Load images
Util.load_image( self, driver, "Orion.methanol.cbc.contsub.image.fits")
Util.load_image( self, driver, "Orion.cont.image.fits")
Util.load_image( self, driver, "TWHydra_CO2_1line.image.fits")
time.sleep( 2 )
#Open the image settings
#Open the stack tab
Util.openSettings( self, driver, "Image", True )
Util.clickTab( driver, "Stack" )
#Turn off auto select
autoSelectCheck = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.ID, "autoSelectImages")))
ActionChains(driver).click( autoSelectCheck ).perform()
#Select all images (The third should already be selected so selecting
#the first with a shift should do it).
firstItem = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, "//div[@qxclass='skel.widgets.Image.Stack.TreeItem']/div[text()='Orion.methanol.cbc.contsub.image.fits']/..")))
actions = ActionChains( driver).key_down(Keys.SHIFT).click( firstItem )
actions.key_up( Keys.SHIFT ).perform()
#Click the group check box.
groupCheck = WebDriverWait( driver, 10).until( EC.presence_of_element_located((By.ID, "stackGroupImages")))
ActionChains(driver).click( groupCheck ).perform()
time.sleep(2)
#Verify that the images now have RGB boxes.
self._verifyRGB( driver, "Orion.methanol.cbc.contsub.image.fits", "rgba(255, 0, 0, 1)")
self._verifyRGB( driver, "Orion.cont.image.fits", "rgba(0, 255, 0, 1)")
self._verifyRGB( driver, "TWHydra_CO2_1line.image.fits", "rgba(0, 0, 255, 1)")
#Ungroup the images.
groupCheck = WebDriverWait( driver, 10).until( EC.presence_of_element_located((By.ID, "stackGroupImages")))
ActionChains(driver).click( groupCheck ).perform()
time.sleep(2)
#Verify the images have transparent RGB boxes.
self._verifyRGB( driver, "Orion.methanol.cbc.contsub.image.fits", "rgba(0, 0, 0, 0)")
self._verifyRGB( driver, "Orion.cont.image.fits", "rgba(0, 0, 0, 0)")
self._verifyRGB( driver, "TWHydra_CO2_1line.image.fits", "rgba(0, 0, 0, 0)")
#Test that we can rename a group.
def test_groupRename(self):
driver = self.driver
timeout = selectBrowser._getSleep()
#Load images
Util.load_image( self, driver, "Orion.methanol.cbc.contsub.image.fits")
Util.load_image( self, driver, "Orion.cont.image.fits")
Util.load_image( self, driver, "TWHydra_CO2_1line.image.fits")
time.sleep( 2 )
#Open the image settings
#Open the stack tab
Util.openSettings( self, driver, "Image", True )
time.sleep(4)
Util.clickTab( driver, "Stack" )
#Turn off auto select
autoSelectCheck = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.ID, "autoSelectImages")))
ActionChains(driver).click( autoSelectCheck ).perform()
#Group the bottom two images.
secondItem = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, "//div[@qxclass='skel.widgets.Image.Stack.TreeItem']/div[text()='Orion.cont.image.fits']/..")))
actions = ActionChains( driver).key_down(Keys.SHIFT).click( secondItem )
actions.key_up( Keys.SHIFT ).perform()
#Click the group check box.
groupCheck = WebDriverWait( driver, 10).until( EC.presence_of_element_located((By.ID, "stackGroupImages")))
ActionChains(driver).click( groupCheck ).perform()
time.sleep(2)
#Change the name of the group to twoImageRGB & verify that there is a tree node with that name..
nameText = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, "//input[starts-with(@id, 'stackLayerName')]")))
Util._changeElementText(self, driver, nameText, "twoImageRGB")
WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, "//div[contains(text(),'twoImageRGB')]")))
def tearDown(self):
#Close the browser
self.driver.close()
#Allow browser to fully close before continuing
time.sleep(2)
#Close the session and delete temporary files
self.driver.quit()
if __name__ == "__main__":
unittest.main() | gpl-3.0 | -6,543,912,193,366,399,000 | 5,061,808,288,769,194,000 | 46.21393 | 205 | 0.640953 | false |
Teamxrtc/webrtc-streaming-node | third_party/depot_tools/external_bin/gsutil/gsutil_4.15/gsutil/gslib/commands/help.py | 25 | 8710 | # -*- coding: utf-8 -*-
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of gsutil help command."""
from __future__ import absolute_import
import itertools
import os
import pkgutil
import re
from subprocess import PIPE
from subprocess import Popen
import gslib.addlhelp
from gslib.command import Command
from gslib.command import OLD_ALIAS_MAP
import gslib.commands
from gslib.exception import CommandException
from gslib.help_provider import HelpProvider
from gslib.help_provider import MAX_HELP_NAME_LEN
from gslib.util import IsRunningInteractively
_SYNOPSIS = """
gsutil help [command or topic]
"""
_DETAILED_HELP_TEXT = ("""
<B>SYNOPSIS</B>
""" + _SYNOPSIS + """
<B>DESCRIPTION</B>
Running:
gsutil help
will provide a summary of all commands and additional topics on which
help is available.
Running:
gsutil help command or topic
will provide help about the specified command or topic.
Running:
gsutil help command sub-command
will provide help about the specified sub-command. For example, running:
gsutil help acl set
will provide help about the "set" subcommand of the "acl" command.
If you set the PAGER environment variable to the path to a pager program
(such as /bin/less on Linux), long help sections will be piped through
the specified pager.
""")
top_level_usage_string = (
'Usage: gsutil [-D] [-DD] [-h header]... '
'[-m] [-o] [-q] [command [opts...] args...]'
)
class HelpCommand(Command):
"""Implementation of gsutil help command."""
# Command specification. See base class for documentation.
command_spec = Command.CreateCommandSpec(
'help',
command_name_aliases=['?', 'man'],
usage_synopsis=_SYNOPSIS,
min_args=0,
max_args=2,
supported_sub_args='',
file_url_ok=True,
provider_url_ok=False,
urls_start_arg=0,
)
# Help specification. See help_provider.py for documentation.
help_spec = Command.HelpSpec(
help_name='help',
help_name_aliases=['?'],
help_type='command_help',
help_one_line_summary='Get help about commands and topics',
help_text=_DETAILED_HELP_TEXT,
subcommand_help_text={},
)
def RunCommand(self):
"""Command entry point for the help command."""
(help_type_map, help_name_map) = self._LoadHelpMaps()
output = []
if not self.args:
output.append('%s\nAvailable commands:\n' % top_level_usage_string)
format_str = ' %-' + str(MAX_HELP_NAME_LEN) + 's%s\n'
for help_prov in sorted(help_type_map['command_help'],
key=lambda hp: hp.help_spec.help_name):
output.append(format_str % (
help_prov.help_spec.help_name,
help_prov.help_spec.help_one_line_summary))
output.append('\nAdditional help topics:\n')
for help_prov in sorted(help_type_map['additional_help'],
key=lambda hp: hp.help_spec.help_name):
output.append(format_str % (
help_prov.help_spec.help_name,
help_prov.help_spec.help_one_line_summary))
output.append('\nUse gsutil help <command or topic> for detailed help.')
else:
invalid_subcommand = False
arg = self.args[0]
if arg not in help_name_map:
output.append('No help available for "%s"' % arg)
else:
help_prov = help_name_map[arg]
help_name = None
if len(self.args) > 1: # We also have a subcommand argument.
subcommand_map = help_prov.help_spec.subcommand_help_text
if subcommand_map and self.args[1] in subcommand_map:
help_name = arg + ' ' + self.args[1]
help_text = subcommand_map[self.args[1]]
else:
invalid_subcommand = True
if not subcommand_map:
output.append((
'The "%s" command has no subcommands. You can ask for the '
'full help by running:\n\n\tgsutil help %s\n') %
(arg, arg))
else:
subcommand_examples = []
for subcommand in subcommand_map:
subcommand_examples.append(
'\tgsutil help %s %s' % (arg, subcommand))
output.append(
('Subcommand "%s" does not exist for command "%s".\n'
'You can either ask for the full help about the command by '
'running:\n\n\tgsutil help %s\n\n'
'Or you can ask for help about one of the subcommands:\n\n%s'
) % (self.args[1], arg, arg, '\n'.join(subcommand_examples)))
if not invalid_subcommand:
if not help_name: # No subcommand or invalid subcommand.
help_name = help_prov.help_spec.help_name
help_text = help_prov.help_spec.help_text
output.append('<B>NAME</B>\n')
output.append(' %s - %s\n' % (
help_name, help_prov.help_spec.help_one_line_summary))
output.append('\n\n')
output.append(help_text.strip('\n'))
new_alias = OLD_ALIAS_MAP.get(arg, [None])[0]
if new_alias:
deprecation_warning = """
The "%s" alias is deprecated, and will eventually be removed completely.
Please use the "%s" command instead.""" % (arg, new_alias)
output.append('\n\n\n<B>DEPRECATION WARNING</B>\n')
output.append(deprecation_warning)
self._OutputHelp(''.join(output))
return 0
def _OutputHelp(self, help_str):
"""Outputs simply formatted string.
This function paginates if the string is too long, PAGER is defined, and
the output is a tty.
Args:
help_str: String to format.
"""
# Replace <B> and </B> with terminal formatting strings if connected to tty.
if not IsRunningInteractively():
help_str = re.sub('<B>', '', help_str)
help_str = re.sub('</B>', '', help_str)
print help_str
return
help_str = re.sub('<B>', '\033[1m', help_str)
help_str = re.sub('</B>', '\033[0;0m', help_str)
num_lines = len(help_str.split('\n'))
if 'PAGER' in os.environ and num_lines >= gslib.util.GetTermLines():
# Use -r option for less to make bolding work right.
pager = os.environ['PAGER'].split(' ')
if pager[0].endswith('less'):
pager.append('-r')
try:
Popen(pager, stdin=PIPE).communicate(input=help_str)
except OSError, e:
raise CommandException('Unable to open pager (%s): %s' %
(' '.join(pager), e))
else:
print help_str
def _LoadHelpMaps(self):
"""Returns tuple of help type and help name.
help type is a dict with key: help type
value: list of HelpProviders
help name is a dict with key: help command name or alias
value: HelpProvider
Returns:
(help type, help name)
"""
# Import all gslib.commands submodules.
for _, module_name, _ in pkgutil.iter_modules(gslib.commands.__path__):
__import__('gslib.commands.%s' % module_name)
# Import all gslib.addlhelp submodules.
for _, module_name, _ in pkgutil.iter_modules(gslib.addlhelp.__path__):
__import__('gslib.addlhelp.%s' % module_name)
help_type_map = {}
help_name_map = {}
for s in gslib.help_provider.ALL_HELP_TYPES:
help_type_map[s] = []
# Only include HelpProvider subclasses in the dict.
for help_prov in itertools.chain(
HelpProvider.__subclasses__(), Command.__subclasses__()):
if help_prov is Command:
# Skip the Command base class itself; we just want its subclasses,
# where the help command text lives (in addition to non-Command
# HelpProviders, like naming.py).
continue
gslib.help_provider.SanityCheck(help_prov, help_name_map)
help_name_map[help_prov.help_spec.help_name] = help_prov
for help_name_aliases in help_prov.help_spec.help_name_aliases:
help_name_map[help_name_aliases] = help_prov
help_type_map[help_prov.help_spec.help_type].append(help_prov)
return (help_type_map, help_name_map)
| mit | 6,113,642,331,375,762,000 | 5,236,090,453,201,446,000 | 34.991736 | 80 | 0.619977 | false |
rackerlabs/cache-busters | cache_buster/test/test_driver.py | 1 | 5320 | """
Copyright 2013 Rackspace
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import pretend
from twisted.internet.defer import Deferred, succeed, fail
from twisted.python.failure import Failure
from twisted.trial import unittest
from cache_buster.driver import Driver, count_cache_results
from cache_buster.keys import FormattingKeyMaker
from cache_buster.test.doubles import DummyLogger
class DriverTests(unittest.TestCase):
def test_construct(self):
Driver(FormattingKeyMaker({}), None, None)
def test_invalidate_row_calls_cache_delete(self):
cache = pretend.stub(
delete=pretend.call_recorder(lambda key: succeed(None))
)
d = Driver(FormattingKeyMaker({
"foo_table": ["bar", "baz"]
}), cache, DummyLogger())
d.invalidate_row("foo_table", {})
self.assertEqual(cache.delete.calls, [
pretend.call("bar"), pretend.call("baz")
])
def test_invalidate_row_returns_deferred(self):
d = Driver(FormattingKeyMaker({}), None, DummyLogger())
res = self.successResultOf(d.invalidate_row("foo_table", {}))
self.assertIs(res, None)
def test_invalidate_row_waits_for_cache_delete(self):
d1 = Deferred()
cache = pretend.stub(
delete=lambda key: d1,
)
d = Driver(FormattingKeyMaker({
"foo_table": ["bar"]
}), cache, DummyLogger())
invalidate_d = d.invalidate_row("foo_table", {})
self.assertNoResult(invalidate_d)
d1.callback(None)
res = self.successResultOf(invalidate_d)
self.assertIs(res, None)
def test_invalidate_row_succeeds_on_cache_delete_failure(self):
cache = pretend.stub(
delete=lambda key: fail(Exception()),
)
d = Driver(FormattingKeyMaker({
"foo_table": ["bar"]
}), cache, DummyLogger())
invalidate_d = d.invalidate_row("foo_table", {})
res = self.successResultOf(invalidate_d)
self.assertIs(res, None)
def test_invalidate_row_logs_on_cache_delete_failure(self):
f = Failure(Exception())
cache = pretend.stub(
delete=lambda key: fail(f),
)
logger = pretend.stub(
msg=lambda s, **kwargs: None,
err=pretend.call_recorder(lambda failure, table, key: None)
)
d = Driver(FormattingKeyMaker({
"foo_table": ["bar"]
}), cache, logger)
d.invalidate_row("foo_table", {})
self.assertEqual(logger.err.calls, [
pretend.call(f, table="foo_table", key="bar")
])
def test_invalidate_row_logs_counts(self):
cache = pretend.stub(
delete=lambda key: succeed(True)
)
logger = pretend.stub(
err=None,
msg=pretend.call_recorder(lambda *args, **kwargs: None),
)
d = Driver(FormattingKeyMaker({
"foo_table": ["bar", "baz"]
}), cache, logger)
d.invalidate_row("foo_table", {})
self.assertEqual(logger.msg.calls, [
pretend.call("cache_buster.driver.invalidated_rows",
deletes=2, nonexistant=0, failures=0,
)
])
def test_invalidate_row_logs_nonexistant_counts(self):
cache = pretend.stub(
delete=lambda key: succeed(False)
)
logger = pretend.stub(
err=None,
msg=pretend.call_recorder(lambda *args, **kwargs: None)
)
d = Driver(FormattingKeyMaker({
"foo_table": ["bar"]
}), cache, logger)
d.invalidate_row("foo_table", {})
self.assertEqual(logger.msg.calls, [
pretend.call("cache_buster.driver.invalidated_rows",
deletes=0, nonexistant=1, failures=0,
)
])
def test_invalidate_row_logs_failure_counts(self):
cache = pretend.stub(
delete=lambda key: fail(Exception())
)
logger = pretend.stub(
err=lambda failure, table, key: None,
msg=pretend.call_recorder(lambda *args, **kwargs: None)
)
d = Driver(FormattingKeyMaker({
"foo_table": ["bar"]
}), cache, logger)
d.invalidate_row("foo_table", {})
self.assertEqual(logger.msg.calls, [
pretend.call("cache_buster.driver.invalidated_rows",
deletes=0, nonexistant=0, failures=1,
)
])
class CountCacheResultsTests(unittest.TestCase):
def test_many_results(self):
deletes, nonexistant, failures = count_cache_results([
True,
False,
None,
False,
True
])
self.assertEqual(deletes, 2)
self.assertEqual(nonexistant, 2)
self.assertEqual(failures, 1)
| apache-2.0 | -4,756,119,198,772,167,000 | 5,986,783,165,256,994,000 | 32.88535 | 72 | 0.594737 | false |
Azure/azure-sdk-for-python | sdk/core/azure-servicemanagement-legacy/tests/test_legacy_mgmt_sqldatabase.py | 12 | 9488 | # coding: utf-8
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
import unittest
from azure.servicemanagement import (
EventLog,
ServerQuota,
Server,
Servers,
ServiceObjective,
Database,
FirewallRule,
SqlDatabaseManagementService,
)
from testutils.common_recordingtestcase import (
TestMode,
record,
)
from tests.legacy_mgmt_testcase import LegacyMgmtTestCase
class LegacyMgmtSqlDatabaseTest(LegacyMgmtTestCase):
def setUp(self):
super(LegacyMgmtSqlDatabaseTest, self).setUp()
self.sqlms = self.create_service_management(SqlDatabaseManagementService)
self.created_server = None
def tearDown(self):
if not self.is_playback():
if self.created_server:
try:
self.sqlms.delete_server(self.created_server)
except:
pass
return super(LegacyMgmtSqlDatabaseTest, self).tearDown()
#--Helpers-----------------------------------------------------------------
def _create_server(self):
result = self.sqlms.create_server('azuredb', 'T5ii-B48x', 'West US')
self.created_server = result.server_name
def _server_exists(self, server_name):
result = self.sqlms.list_servers()
match = [s for s in result if s.name == server_name]
return len(match) == 1
def _create_database(self, name):
result = self.sqlms.create_database(
self.created_server,
name,
'dd6d99bb-f193-4ec1-86f2-43d3bccbc49c',
edition='Basic'
)
#--Operations for servers -------------------------------------------------
@record
def test_create_server(self):
# Arrange
# Act
result = self.sqlms.create_server('azuredb', 'T5ii-B48x', 'West US')
self.created_server = result.server_name
# Assert
self.assertGreater(len(result.server_name), 0)
self.assertGreater(len(result.fully_qualified_domain_name), 0)
self.assertTrue(self._server_exists(self.created_server))
@record
def test_set_server_admin_password(self):
# Arrange
self._create_server()
# Act
result = self.sqlms.set_server_admin_password(self.created_server, 'U6jj-C59y')
# Assert
self.assertIsNone(result)
@record
def test_delete_server(self):
# Arrange
self._create_server()
# Act
result = self.sqlms.delete_server(self.created_server)
# Assert
self.assertIsNone(result)
self.assertFalse(self._server_exists(self.created_server))
@record
def test_list_servers(self):
# Arrange
self._create_server()
# Act
result = self.sqlms.list_servers()
# Assert
self.assertIsNotNone(result)
self.assertIsInstance(result, Servers)
for server in result:
self.assertIsInstance(server, Server)
match = [s for s in result if s.name == self.created_server][0]
self.assertEqual(match.name, self.created_server)
self.assertEqual(match.administrator_login, 'azuredb')
self.assertEqual(match.location, 'West US')
self.assertEqual(match.geo_paired_region, '')
self.assertTrue(match.fully_qualified_domain_name.startswith(self.created_server))
self.assertGreater(len(match.version), 0)
@record
def test_list_quotas(self):
# Arrange
self._create_server()
# Act
result = self.sqlms.list_quotas(self.created_server)
# Assert
self.assertIsNotNone(result)
self.assertIsInstance(result, list)
for quota in result:
self.assertIsInstance(quota, ServerQuota)
self.assertGreater(len(quota.name), 0)
self.assertGreater(quota.value, 0)
#--Operations for firewall rules ------------------------------------------
@record
def test_create_firewall_rule(self):
# Arrange
self._create_server()
# Act
result = self.sqlms.create_firewall_rule(self.created_server,
'AllowAll',
'192.168.144.0',
'192.168.144.255')
# Assert
self.assertIsNone(result)
@record
def test_delete_firewall_rule(self):
# Arrange
self._create_server()
result = self.sqlms.create_firewall_rule(self.created_server,
'AllowAll',
'192.168.144.0',
'192.168.144.255')
# Act
result = self.sqlms.delete_firewall_rule(self.created_server,
'AllowAll')
# Assert
self.assertIsNone(result)
@record
def test_update_firewall_rule(self):
# Arrange
self._create_server()
result = self.sqlms.create_firewall_rule(self.created_server,
'AllowAll',
'192.168.144.0',
'192.168.144.255')
# Act
result = self.sqlms.update_firewall_rule(self.created_server,
'AllowAll',
'192.168.116.0',
'192.168.116.255')
# Assert
self.assertIsNone(result)
@record
def test_list_firewall_rules(self):
# Arrange
self._create_server()
result = self.sqlms.create_firewall_rule(self.created_server,
'AllowAll',
'192.168.144.0',
'192.168.144.255')
# Act
result = self.sqlms.list_firewall_rules(self.created_server)
# Assert
self.assertIsNotNone(result)
self.assertIsInstance(result, list)
for rule in result:
self.assertIsInstance(rule, FirewallRule)
@record
def test_list_service_level_objectives(self):
# Arrange
self._create_server()
# Act
result = self.sqlms.list_service_level_objectives(self.created_server)
# Assert
self.assertIsNotNone(result)
self.assertIsInstance(result, list)
for rule in result:
self.assertIsInstance(rule, ServiceObjective)
@record
def test_create_database(self):
# Arrange
self._create_server()
# Act
result = self.sqlms.create_database(
self.created_server,
'testdb',
'dd6d99bb-f193-4ec1-86f2-43d3bccbc49c',
edition='Basic'
)
# Assert
self.assertIsNone(result)
@record
def test_delete_database(self):
# Arrange
self._create_server()
self._create_database('temp')
# Act
result = self.sqlms.delete_database(self.created_server, 'temp')
# Assert
result = self.sqlms.list_databases(self.created_server)
match = [d for d in result if d.name == 'temp']
self.assertEqual(len(match), 0)
@record
def test_update_database(self):
# Arrange
self._create_server()
self._create_database('temp')
# Act
result = self.sqlms.update_database(self.created_server,
'temp',
'newname')
# Assert
result = self.sqlms.list_databases(self.created_server)
match = [d for d in result if d.name == 'newname']
self.assertEqual(len(match), 1)
@record
def test_list_databases(self):
# Arrange
self._create_server()
self._create_database('temp')
# Act
result = self.sqlms.list_databases(self.created_server)
# Assert
self.assertIsNotNone(result)
self.assertIsInstance(result, list)
for db in result:
self.assertIsInstance(db, Database)
match = [d for d in result if d.name == 'temp'][0]
self.assertEqual(match.name, 'temp')
self.assertEqual(match.state, 'Normal')
self.assertGreater(match.max_size_bytes, 0)
self.assertGreater(match.id, 0)
self.assertGreater(len(match.edition), 0)
self.assertGreater(len(match.collation_name), 0)
#------------------------------------------------------------------------------
if __name__ == '__main__':
unittest.main()
| mit | 907,850,727,181,444,700 | 4,920,137,234,563,561,000 | 30.006536 | 90 | 0.536573 | false |